1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatches,
14 SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, RunnableTag,
18};
19use anyhow::{anyhow, Context, Result};
20pub use clock::ReplicaId;
21use futures::channel::oneshot;
22use gpui::{AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLabel};
23use lazy_static::lazy_static;
24use lsp::LanguageServerId;
25use parking_lot::Mutex;
26use similar::{ChangeTag, TextDiff};
27use smallvec::SmallVec;
28use smol::future::yield_now;
29use std::{
30 any::Any,
31 cmp::{self, Ordering},
32 collections::BTreeMap,
33 ffi::OsStr,
34 future::Future,
35 iter::{self, Iterator, Peekable},
36 mem,
37 ops::{Deref, Range},
38 path::{Path, PathBuf},
39 str,
40 sync::Arc,
41 time::{Duration, Instant, SystemTime},
42 vec,
43};
44use sum_tree::TreeMap;
45use text::operation_queue::OperationQueue;
46use text::*;
47pub use text::{
48 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
49 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
50 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
51 Transaction, TransactionId, Unclipped,
52};
53use theme::SyntaxTheme;
54#[cfg(any(test, feature = "test-support"))]
55use util::RandomCharIter;
56use util::RangeExt;
57
58#[cfg(any(test, feature = "test-support"))]
59pub use {tree_sitter_rust, tree_sitter_typescript};
60
61pub use lsp::DiagnosticSeverity;
62
63lazy_static! {
64 /// A label for the background task spawned by the buffer to compute
65 /// a diff against the contents of its file.
66 pub static ref BUFFER_DIFF_TASK: TaskLabel = TaskLabel::new();
67}
68
69/// Indicate whether a [Buffer] has permissions to edit.
70#[derive(PartialEq, Clone, Copy, Debug)]
71pub enum Capability {
72 /// The buffer is a mutable replica.
73 ReadWrite,
74 /// The buffer is a read-only replica.
75 ReadOnly,
76}
77
78pub type BufferRow = u32;
79
80/// An in-memory representation of a source code file, including its text,
81/// syntax trees, git status, and diagnostics.
82pub struct Buffer {
83 text: TextBuffer,
84 diff_base: Option<Rope>,
85 git_diff: git::diff::BufferDiff,
86 file: Option<Arc<dyn File>>,
87 /// The mtime of the file when this buffer was last loaded from
88 /// or saved to disk.
89 saved_mtime: Option<SystemTime>,
90 /// The version vector when this buffer was last loaded from
91 /// or saved to disk.
92 saved_version: clock::Global,
93 transaction_depth: usize,
94 was_dirty_before_starting_transaction: Option<bool>,
95 reload_task: Option<Task<Result<()>>>,
96 language: Option<Arc<Language>>,
97 autoindent_requests: Vec<Arc<AutoindentRequest>>,
98 pending_autoindent: Option<Task<()>>,
99 sync_parse_timeout: Duration,
100 syntax_map: Mutex<SyntaxMap>,
101 parsing_in_background: bool,
102 parse_count: usize,
103 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
104 remote_selections: TreeMap<ReplicaId, SelectionSet>,
105 selections_update_count: usize,
106 diagnostics_update_count: usize,
107 diagnostics_timestamp: clock::Lamport,
108 file_update_count: usize,
109 git_diff_update_count: usize,
110 completion_triggers: Vec<String>,
111 completion_triggers_timestamp: clock::Lamport,
112 deferred_ops: OperationQueue<Operation>,
113 capability: Capability,
114 has_conflict: bool,
115 diff_base_version: usize,
116}
117
118/// An immutable, cheaply cloneable representation of a fixed
119/// state of a buffer.
120pub struct BufferSnapshot {
121 text: text::BufferSnapshot,
122 git_diff: git::diff::BufferDiff,
123 pub(crate) syntax: SyntaxSnapshot,
124 file: Option<Arc<dyn File>>,
125 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
126 diagnostics_update_count: usize,
127 file_update_count: usize,
128 git_diff_update_count: usize,
129 remote_selections: TreeMap<ReplicaId, SelectionSet>,
130 selections_update_count: usize,
131 language: Option<Arc<Language>>,
132 parse_count: usize,
133}
134
135/// The kind and amount of indentation in a particular line. For now,
136/// assumes that indentation is all the same character.
137#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
138pub struct IndentSize {
139 /// The number of bytes that comprise the indentation.
140 pub len: u32,
141 /// The kind of whitespace used for indentation.
142 pub kind: IndentKind,
143}
144
145/// A whitespace character that's used for indentation.
146#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
147pub enum IndentKind {
148 /// An ASCII space character.
149 #[default]
150 Space,
151 /// An ASCII tab character.
152 Tab,
153}
154
155/// The shape of a selection cursor.
156#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
157pub enum CursorShape {
158 /// A vertical bar
159 #[default]
160 Bar,
161 /// A block that surrounds the following character
162 Block,
163 /// An underline that runs along the following character
164 Underscore,
165 /// A box drawn around the following character
166 Hollow,
167}
168
169#[derive(Clone, Debug)]
170struct SelectionSet {
171 line_mode: bool,
172 cursor_shape: CursorShape,
173 selections: Arc<[Selection<Anchor>]>,
174 lamport_timestamp: clock::Lamport,
175}
176
177/// A diagnostic associated with a certain range of a buffer.
178#[derive(Clone, Debug, PartialEq, Eq)]
179pub struct Diagnostic {
180 /// The name of the service that produced this diagnostic.
181 pub source: Option<String>,
182 /// A machine-readable code that identifies this diagnostic.
183 pub code: Option<String>,
184 /// Whether this diagnostic is a hint, warning, or error.
185 pub severity: DiagnosticSeverity,
186 /// The human-readable message associated with this diagnostic.
187 pub message: String,
188 /// An id that identifies the group to which this diagnostic belongs.
189 ///
190 /// When a language server produces a diagnostic with
191 /// one or more associated diagnostics, those diagnostics are all
192 /// assigned a single group id.
193 pub group_id: usize,
194 /// Whether this diagnostic is the primary diagnostic for its group.
195 ///
196 /// In a given group, the primary diagnostic is the top-level diagnostic
197 /// returned by the language server. The non-primary diagnostics are the
198 /// associated diagnostics.
199 pub is_primary: bool,
200 /// Whether this diagnostic is considered to originate from an analysis of
201 /// files on disk, as opposed to any unsaved buffer contents. This is a
202 /// property of a given diagnostic source, and is configured for a given
203 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
204 /// for the language server.
205 pub is_disk_based: bool,
206 /// Whether this diagnostic marks unnecessary code.
207 pub is_unnecessary: bool,
208}
209
210/// TODO - move this into the `project` crate and make it private.
211pub async fn prepare_completion_documentation(
212 documentation: &lsp::Documentation,
213 language_registry: &Arc<LanguageRegistry>,
214 language: Option<Arc<Language>>,
215) -> Documentation {
216 match documentation {
217 lsp::Documentation::String(text) => {
218 if text.lines().count() <= 1 {
219 Documentation::SingleLine(text.clone())
220 } else {
221 Documentation::MultiLinePlainText(text.clone())
222 }
223 }
224
225 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
226 lsp::MarkupKind::PlainText => {
227 if value.lines().count() <= 1 {
228 Documentation::SingleLine(value.clone())
229 } else {
230 Documentation::MultiLinePlainText(value.clone())
231 }
232 }
233
234 lsp::MarkupKind::Markdown => {
235 let parsed = parse_markdown(value, language_registry, language).await;
236 Documentation::MultiLineMarkdown(parsed)
237 }
238 },
239 }
240}
241
242/// Documentation associated with a [`Completion`].
243#[derive(Clone, Debug)]
244pub enum Documentation {
245 /// There is no documentation for this completion.
246 Undocumented,
247 /// A single line of documentation.
248 SingleLine(String),
249 /// Multiple lines of plain text documentation.
250 MultiLinePlainText(String),
251 /// Markdown documentation.
252 MultiLineMarkdown(ParsedMarkdown),
253}
254
255/// An operation used to synchronize this buffer with its other replicas.
256#[derive(Clone, Debug, PartialEq)]
257pub enum Operation {
258 /// A text operation.
259 Buffer(text::Operation),
260
261 /// An update to the buffer's diagnostics.
262 UpdateDiagnostics {
263 /// The id of the language server that produced the new diagnostics.
264 server_id: LanguageServerId,
265 /// The diagnostics.
266 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
267 /// The buffer's lamport timestamp.
268 lamport_timestamp: clock::Lamport,
269 },
270
271 /// An update to the most recent selections in this buffer.
272 UpdateSelections {
273 /// The selections.
274 selections: Arc<[Selection<Anchor>]>,
275 /// The buffer's lamport timestamp.
276 lamport_timestamp: clock::Lamport,
277 /// Whether the selections are in 'line mode'.
278 line_mode: bool,
279 /// The [`CursorShape`] associated with these selections.
280 cursor_shape: CursorShape,
281 },
282
283 /// An update to the characters that should trigger autocompletion
284 /// for this buffer.
285 UpdateCompletionTriggers {
286 /// The characters that trigger autocompletion.
287 triggers: Vec<String>,
288 /// The buffer's lamport timestamp.
289 lamport_timestamp: clock::Lamport,
290 },
291}
292
293/// An event that occurs in a buffer.
294#[derive(Clone, Debug, PartialEq)]
295pub enum Event {
296 /// The buffer was changed in a way that must be
297 /// propagated to its other replicas.
298 Operation(Operation),
299 /// The buffer was edited.
300 Edited,
301 /// The buffer's `dirty` bit changed.
302 DirtyChanged,
303 /// The buffer was saved.
304 Saved,
305 /// The buffer's file was changed on disk.
306 FileHandleChanged,
307 /// The buffer was reloaded.
308 Reloaded,
309 /// The buffer's diff_base changed.
310 DiffBaseChanged,
311 /// Buffer's excerpts for a certain diff base were recalculated.
312 DiffUpdated,
313 /// The buffer's language was changed.
314 LanguageChanged,
315 /// The buffer's syntax trees were updated.
316 Reparsed,
317 /// The buffer's diagnostics were updated.
318 DiagnosticsUpdated,
319 /// The buffer gained or lost editing capabilities.
320 CapabilityChanged,
321 /// The buffer was explicitly requested to close.
322 Closed,
323}
324
325/// The file associated with a buffer.
326pub trait File: Send + Sync {
327 /// Returns the [`LocalFile`] associated with this file, if the
328 /// file is local.
329 fn as_local(&self) -> Option<&dyn LocalFile>;
330
331 /// Returns whether this file is local.
332 fn is_local(&self) -> bool {
333 self.as_local().is_some()
334 }
335
336 /// Returns the file's mtime.
337 fn mtime(&self) -> Option<SystemTime>;
338
339 /// Returns the path of this file relative to the worktree's root directory.
340 fn path(&self) -> &Arc<Path>;
341
342 /// Returns the path of this file relative to the worktree's parent directory (this means it
343 /// includes the name of the worktree's root folder).
344 fn full_path(&self, cx: &AppContext) -> PathBuf;
345
346 /// Returns the last component of this handle's absolute path. If this handle refers to the root
347 /// of its worktree, then this method will return the name of the worktree itself.
348 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
349
350 /// Returns the id of the worktree to which this file belongs.
351 ///
352 /// This is needed for looking up project-specific settings.
353 fn worktree_id(&self) -> usize;
354
355 /// Returns whether the file has been deleted.
356 fn is_deleted(&self) -> bool;
357
358 /// Returns whether the file existed on disk at one point
359 fn is_created(&self) -> bool {
360 self.mtime().is_some()
361 }
362
363 /// Converts this file into an [`Any`] trait object.
364 fn as_any(&self) -> &dyn Any;
365
366 /// Converts this file into a protobuf message.
367 fn to_proto(&self) -> rpc::proto::File;
368
369 /// Return whether Zed considers this to be a private file.
370 fn is_private(&self) -> bool;
371}
372
373/// The file associated with a buffer, in the case where the file is on the local disk.
374pub trait LocalFile: File {
375 /// Returns the absolute path of this file.
376 fn abs_path(&self, cx: &AppContext) -> PathBuf;
377
378 /// Loads the file's contents from disk.
379 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
380
381 /// Called when the buffer is reloaded from disk.
382 fn buffer_reloaded(
383 &self,
384 buffer_id: BufferId,
385 version: &clock::Global,
386 line_ending: LineEnding,
387 mtime: Option<SystemTime>,
388 cx: &mut AppContext,
389 );
390
391 /// Returns true if the file should not be shared with collaborators.
392 fn is_private(&self, _: &AppContext) -> bool {
393 false
394 }
395}
396
397/// The auto-indent behavior associated with an editing operation.
398/// For some editing operations, each affected line of text has its
399/// indentation recomputed. For other operations, the entire block
400/// of edited text is adjusted uniformly.
401#[derive(Clone, Debug)]
402pub enum AutoindentMode {
403 /// Indent each line of inserted text.
404 EachLine,
405 /// Apply the same indentation adjustment to all of the lines
406 /// in a given insertion.
407 Block {
408 /// The original indentation level of the first line of each
409 /// insertion, if it has been copied.
410 original_indent_columns: Vec<u32>,
411 },
412}
413
414#[derive(Clone)]
415struct AutoindentRequest {
416 before_edit: BufferSnapshot,
417 entries: Vec<AutoindentRequestEntry>,
418 is_block_mode: bool,
419}
420
421#[derive(Clone)]
422struct AutoindentRequestEntry {
423 /// A range of the buffer whose indentation should be adjusted.
424 range: Range<Anchor>,
425 /// Whether or not these lines should be considered brand new, for the
426 /// purpose of auto-indent. When text is not new, its indentation will
427 /// only be adjusted if the suggested indentation level has *changed*
428 /// since the edit was made.
429 first_line_is_new: bool,
430 indent_size: IndentSize,
431 original_indent_column: Option<u32>,
432}
433
434#[derive(Debug)]
435struct IndentSuggestion {
436 basis_row: u32,
437 delta: Ordering,
438 within_error: bool,
439}
440
441struct BufferChunkHighlights<'a> {
442 captures: SyntaxMapCaptures<'a>,
443 next_capture: Option<SyntaxMapCapture<'a>>,
444 stack: Vec<(usize, HighlightId)>,
445 highlight_maps: Vec<HighlightMap>,
446}
447
448/// An iterator that yields chunks of a buffer's text, along with their
449/// syntax highlights and diagnostic status.
450pub struct BufferChunks<'a> {
451 range: Range<usize>,
452 chunks: text::Chunks<'a>,
453 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
454 error_depth: usize,
455 warning_depth: usize,
456 information_depth: usize,
457 hint_depth: usize,
458 unnecessary_depth: usize,
459 highlights: Option<BufferChunkHighlights<'a>>,
460}
461
462/// A chunk of a buffer's text, along with its syntax highlight and
463/// diagnostic status.
464#[derive(Clone, Copy, Debug, Default)]
465pub struct Chunk<'a> {
466 /// The text of the chunk.
467 pub text: &'a str,
468 /// The syntax highlighting style of the chunk.
469 pub syntax_highlight_id: Option<HighlightId>,
470 /// The highlight style that has been applied to this chunk in
471 /// the editor.
472 pub highlight_style: Option<HighlightStyle>,
473 /// The severity of diagnostic associated with this chunk, if any.
474 pub diagnostic_severity: Option<DiagnosticSeverity>,
475 /// Whether this chunk of text is marked as unnecessary.
476 pub is_unnecessary: bool,
477 /// Whether this chunk of text was originally a tab character.
478 pub is_tab: bool,
479}
480
481/// A set of edits to a given version of a buffer, computed asynchronously.
482pub struct Diff {
483 pub(crate) base_version: clock::Global,
484 line_ending: LineEnding,
485 edits: Vec<(Range<usize>, Arc<str>)>,
486}
487
488#[derive(Clone, Copy)]
489pub(crate) struct DiagnosticEndpoint {
490 offset: usize,
491 is_start: bool,
492 severity: DiagnosticSeverity,
493 is_unnecessary: bool,
494}
495
496/// A class of characters, used for characterizing a run of text.
497#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
498pub enum CharKind {
499 /// Whitespace.
500 Whitespace,
501 /// Punctuation.
502 Punctuation,
503 /// Word.
504 Word,
505}
506
507/// A runnable is a set of data about a region that could be resolved into a task
508pub struct Runnable {
509 pub tags: SmallVec<[RunnableTag; 1]>,
510 pub language: Arc<Language>,
511 pub buffer: BufferId,
512}
513
514impl Buffer {
515 /// Create a new buffer with the given base text.
516 pub fn local<T: Into<String>>(base_text: T, cx: &mut ModelContext<Self>) -> Self {
517 Self::build(
518 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
519 None,
520 None,
521 Capability::ReadWrite,
522 )
523 }
524
525 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
526 pub fn local_normalized(
527 base_text_normalized: Rope,
528 line_ending: LineEnding,
529 cx: &mut ModelContext<Self>,
530 ) -> Self {
531 Self::build(
532 TextBuffer::new_normalized(
533 0,
534 cx.entity_id().as_non_zero_u64().into(),
535 line_ending,
536 base_text_normalized,
537 ),
538 None,
539 None,
540 Capability::ReadWrite,
541 )
542 }
543
544 /// Create a new buffer that is a replica of a remote buffer.
545 pub fn remote(
546 remote_id: BufferId,
547 replica_id: ReplicaId,
548 capability: Capability,
549 base_text: impl Into<String>,
550 ) -> Self {
551 Self::build(
552 TextBuffer::new(replica_id, remote_id, base_text.into()),
553 None,
554 None,
555 capability,
556 )
557 }
558
559 /// Create a new buffer that is a replica of a remote buffer, populating its
560 /// state from the given protobuf message.
561 pub fn from_proto(
562 replica_id: ReplicaId,
563 capability: Capability,
564 message: proto::BufferState,
565 file: Option<Arc<dyn File>>,
566 ) -> Result<Self> {
567 let buffer_id = BufferId::new(message.id)
568 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
569 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
570 let mut this = Self::build(buffer, message.diff_base, file, capability);
571 this.text.set_line_ending(proto::deserialize_line_ending(
572 rpc::proto::LineEnding::from_i32(message.line_ending)
573 .ok_or_else(|| anyhow!("missing line_ending"))?,
574 ));
575 this.saved_version = proto::deserialize_version(&message.saved_version);
576 this.saved_mtime = message.saved_mtime.map(|time| time.into());
577 Ok(this)
578 }
579
580 /// Serialize the buffer's state to a protobuf message.
581 pub fn to_proto(&self) -> proto::BufferState {
582 proto::BufferState {
583 id: self.remote_id().into(),
584 file: self.file.as_ref().map(|f| f.to_proto()),
585 base_text: self.base_text().to_string(),
586 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
587 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
588 saved_version: proto::serialize_version(&self.saved_version),
589 saved_mtime: self.saved_mtime.map(|time| time.into()),
590 }
591 }
592
593 /// Serialize as protobufs all of the changes to the buffer since the given version.
594 pub fn serialize_ops(
595 &self,
596 since: Option<clock::Global>,
597 cx: &AppContext,
598 ) -> Task<Vec<proto::Operation>> {
599 let mut operations = Vec::new();
600 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
601
602 operations.extend(self.remote_selections.iter().map(|(_, set)| {
603 proto::serialize_operation(&Operation::UpdateSelections {
604 selections: set.selections.clone(),
605 lamport_timestamp: set.lamport_timestamp,
606 line_mode: set.line_mode,
607 cursor_shape: set.cursor_shape,
608 })
609 }));
610
611 for (server_id, diagnostics) in &self.diagnostics {
612 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
613 lamport_timestamp: self.diagnostics_timestamp,
614 server_id: *server_id,
615 diagnostics: diagnostics.iter().cloned().collect(),
616 }));
617 }
618
619 operations.push(proto::serialize_operation(
620 &Operation::UpdateCompletionTriggers {
621 triggers: self.completion_triggers.clone(),
622 lamport_timestamp: self.completion_triggers_timestamp,
623 },
624 ));
625
626 let text_operations = self.text.operations().clone();
627 cx.background_executor().spawn(async move {
628 let since = since.unwrap_or_default();
629 operations.extend(
630 text_operations
631 .iter()
632 .filter(|(_, op)| !since.observed(op.timestamp()))
633 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
634 );
635 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
636 operations
637 })
638 }
639
640 /// Assign a language to the buffer, returning the buffer.
641 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
642 self.set_language(Some(language), cx);
643 self
644 }
645
646 /// Returns the [Capability] of this buffer.
647 pub fn capability(&self) -> Capability {
648 self.capability
649 }
650
651 /// Whether this buffer can only be read.
652 pub fn read_only(&self) -> bool {
653 self.capability == Capability::ReadOnly
654 }
655
656 /// Builds a [Buffer] with the given underlying [TextBuffer], diff base, [File] and [Capability].
657 pub fn build(
658 buffer: TextBuffer,
659 diff_base: Option<String>,
660 file: Option<Arc<dyn File>>,
661 capability: Capability,
662 ) -> Self {
663 let saved_mtime = file.as_ref().and_then(|file| file.mtime());
664
665 Self {
666 saved_mtime,
667 saved_version: buffer.version(),
668 reload_task: None,
669 transaction_depth: 0,
670 was_dirty_before_starting_transaction: None,
671 text: buffer,
672 diff_base: diff_base
673 .map(|mut raw_diff_base| {
674 LineEnding::normalize(&mut raw_diff_base);
675 raw_diff_base
676 })
677 .map(Rope::from),
678 diff_base_version: 0,
679 git_diff: git::diff::BufferDiff::new(),
680 file,
681 capability,
682 syntax_map: Mutex::new(SyntaxMap::new()),
683 parsing_in_background: false,
684 parse_count: 0,
685 sync_parse_timeout: Duration::from_millis(1),
686 autoindent_requests: Default::default(),
687 pending_autoindent: Default::default(),
688 language: None,
689 remote_selections: Default::default(),
690 selections_update_count: 0,
691 diagnostics: Default::default(),
692 diagnostics_update_count: 0,
693 diagnostics_timestamp: Default::default(),
694 file_update_count: 0,
695 git_diff_update_count: 0,
696 completion_triggers: Default::default(),
697 completion_triggers_timestamp: Default::default(),
698 deferred_ops: OperationQueue::new(),
699 has_conflict: false,
700 }
701 }
702
703 /// Retrieve a snapshot of the buffer's current state. This is computationally
704 /// cheap, and allows reading from the buffer on a background thread.
705 pub fn snapshot(&self) -> BufferSnapshot {
706 let text = self.text.snapshot();
707 let mut syntax_map = self.syntax_map.lock();
708 syntax_map.interpolate(&text);
709 let syntax = syntax_map.snapshot();
710
711 BufferSnapshot {
712 text,
713 syntax,
714 git_diff: self.git_diff.clone(),
715 file: self.file.clone(),
716 remote_selections: self.remote_selections.clone(),
717 diagnostics: self.diagnostics.clone(),
718 diagnostics_update_count: self.diagnostics_update_count,
719 file_update_count: self.file_update_count,
720 git_diff_update_count: self.git_diff_update_count,
721 language: self.language.clone(),
722 parse_count: self.parse_count,
723 selections_update_count: self.selections_update_count,
724 }
725 }
726
727 #[cfg(test)]
728 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
729 &self.text
730 }
731
732 /// Retrieve a snapshot of the buffer's raw text, without any
733 /// language-related state like the syntax tree or diagnostics.
734 pub fn text_snapshot(&self) -> text::BufferSnapshot {
735 self.text.snapshot()
736 }
737
738 /// The file associated with the buffer, if any.
739 pub fn file(&self) -> Option<&Arc<dyn File>> {
740 self.file.as_ref()
741 }
742
743 /// The version of the buffer that was last saved or reloaded from disk.
744 pub fn saved_version(&self) -> &clock::Global {
745 &self.saved_version
746 }
747
748 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
749 pub fn saved_mtime(&self) -> Option<SystemTime> {
750 self.saved_mtime
751 }
752
753 /// Assign a language to the buffer.
754 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
755 self.parse_count += 1;
756 self.syntax_map.lock().clear();
757 self.language = language;
758 self.reparse(cx);
759 cx.emit(Event::LanguageChanged);
760 }
761
762 /// Assign a language registry to the buffer. This allows the buffer to retrieve
763 /// other languages if parts of the buffer are written in different languages.
764 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
765 self.syntax_map
766 .lock()
767 .set_language_registry(language_registry);
768 }
769
770 /// Assign the buffer a new [Capability].
771 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
772 self.capability = capability;
773 cx.emit(Event::CapabilityChanged)
774 }
775
776 /// This method is called to signal that the buffer has been saved.
777 pub fn did_save(
778 &mut self,
779 version: clock::Global,
780 mtime: Option<SystemTime>,
781 cx: &mut ModelContext<Self>,
782 ) {
783 self.saved_version = version;
784 self.has_conflict = false;
785 self.saved_mtime = mtime;
786 cx.emit(Event::Saved);
787 cx.notify();
788 }
789
790 /// Reloads the contents of the buffer from disk.
791 pub fn reload(
792 &mut self,
793 cx: &mut ModelContext<Self>,
794 ) -> oneshot::Receiver<Option<Transaction>> {
795 let (tx, rx) = futures::channel::oneshot::channel();
796 let prev_version = self.text.version();
797 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
798 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
799 let file = this.file.as_ref()?.as_local()?;
800 Some((file.mtime(), file.load(cx)))
801 })?
802 else {
803 return Ok(());
804 };
805
806 let new_text = new_text.await?;
807 let diff = this
808 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
809 .await;
810 this.update(&mut cx, |this, cx| {
811 if this.version() == diff.base_version {
812 this.finalize_last_transaction();
813 this.apply_diff(diff, cx);
814 tx.send(this.finalize_last_transaction().cloned()).ok();
815 this.has_conflict = false;
816 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
817 } else {
818 if !diff.edits.is_empty()
819 || this
820 .edits_since::<usize>(&diff.base_version)
821 .next()
822 .is_some()
823 {
824 this.has_conflict = true;
825 }
826
827 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
828 }
829
830 this.reload_task.take();
831 })
832 }));
833 rx
834 }
835
836 /// This method is called to signal that the buffer has been reloaded.
837 pub fn did_reload(
838 &mut self,
839 version: clock::Global,
840 line_ending: LineEnding,
841 mtime: Option<SystemTime>,
842 cx: &mut ModelContext<Self>,
843 ) {
844 self.saved_version = version;
845 self.text.set_line_ending(line_ending);
846 self.saved_mtime = mtime;
847 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
848 file.buffer_reloaded(
849 self.remote_id(),
850 &self.saved_version,
851 self.line_ending(),
852 self.saved_mtime,
853 cx,
854 );
855 }
856 cx.emit(Event::Reloaded);
857 cx.notify();
858 }
859
860 /// Updates the [File] backing this buffer. This should be called when
861 /// the file has changed or has been deleted.
862 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
863 let mut file_changed = false;
864
865 if let Some(old_file) = self.file.as_ref() {
866 if new_file.path() != old_file.path() {
867 file_changed = true;
868 }
869
870 if new_file.is_deleted() {
871 if !old_file.is_deleted() {
872 file_changed = true;
873 if !self.is_dirty() {
874 cx.emit(Event::DirtyChanged);
875 }
876 }
877 } else {
878 let new_mtime = new_file.mtime();
879 if new_mtime != old_file.mtime() {
880 file_changed = true;
881
882 if !self.is_dirty() {
883 self.reload(cx).close();
884 }
885 }
886 }
887 } else {
888 file_changed = true;
889 };
890
891 self.file = Some(new_file);
892 if file_changed {
893 self.file_update_count += 1;
894 cx.emit(Event::FileHandleChanged);
895 cx.notify();
896 }
897 }
898
899 /// Returns the current diff base, see [Buffer::set_diff_base].
900 pub fn diff_base(&self) -> Option<&Rope> {
901 self.diff_base.as_ref()
902 }
903
904 /// Sets the text that will be used to compute a Git diff
905 /// against the buffer text.
906 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
907 self.diff_base = diff_base
908 .map(|mut raw_diff_base| {
909 LineEnding::normalize(&mut raw_diff_base);
910 raw_diff_base
911 })
912 .map(Rope::from);
913 self.diff_base_version += 1;
914 if let Some(recalc_task) = self.git_diff_recalc(cx) {
915 cx.spawn(|buffer, mut cx| async move {
916 recalc_task.await;
917 buffer
918 .update(&mut cx, |_, cx| {
919 cx.emit(Event::DiffBaseChanged);
920 })
921 .ok();
922 })
923 .detach();
924 }
925 }
926
927 /// Returns a number, unique per diff base set to the buffer.
928 pub fn diff_base_version(&self) -> usize {
929 self.diff_base_version
930 }
931
932 /// Recomputes the Git diff status.
933 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
934 let diff_base = self.diff_base.clone()?;
935 let snapshot = self.snapshot();
936
937 let mut diff = self.git_diff.clone();
938 let diff = cx.background_executor().spawn(async move {
939 diff.update(&diff_base, &snapshot).await;
940 diff
941 });
942
943 Some(cx.spawn(|this, mut cx| async move {
944 let buffer_diff = diff.await;
945 this.update(&mut cx, |this, cx| {
946 this.git_diff = buffer_diff;
947 this.git_diff_update_count += 1;
948 cx.emit(Event::DiffUpdated);
949 })
950 .ok();
951 }))
952 }
953
954 /// Returns the primary [Language] assigned to this [Buffer].
955 pub fn language(&self) -> Option<&Arc<Language>> {
956 self.language.as_ref()
957 }
958
959 /// Returns the [Language] at the given location.
960 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
961 let offset = position.to_offset(self);
962 self.syntax_map
963 .lock()
964 .layers_for_range(offset..offset, &self.text)
965 .last()
966 .map(|info| info.language.clone())
967 .or_else(|| self.language.clone())
968 }
969
970 /// The number of times the buffer was parsed.
971 pub fn parse_count(&self) -> usize {
972 self.parse_count
973 }
974
975 /// The number of times selections were updated.
976 pub fn selections_update_count(&self) -> usize {
977 self.selections_update_count
978 }
979
980 /// The number of times diagnostics were updated.
981 pub fn diagnostics_update_count(&self) -> usize {
982 self.diagnostics_update_count
983 }
984
985 /// The number of times the underlying file was updated.
986 pub fn file_update_count(&self) -> usize {
987 self.file_update_count
988 }
989
990 /// The number of times the git diff status was updated.
991 pub fn git_diff_update_count(&self) -> usize {
992 self.git_diff_update_count
993 }
994
995 /// Whether the buffer is being parsed in the background.
996 #[cfg(any(test, feature = "test-support"))]
997 pub fn is_parsing(&self) -> bool {
998 self.parsing_in_background
999 }
1000
1001 /// Indicates whether the buffer contains any regions that may be
1002 /// written in a language that hasn't been loaded yet.
1003 pub fn contains_unknown_injections(&self) -> bool {
1004 self.syntax_map.lock().contains_unknown_injections()
1005 }
1006
1007 #[cfg(test)]
1008 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1009 self.sync_parse_timeout = timeout;
1010 }
1011
1012 /// Called after an edit to synchronize the buffer's main parse tree with
1013 /// the buffer's new underlying state.
1014 ///
1015 /// Locks the syntax map and interpolates the edits since the last reparse
1016 /// into the foreground syntax tree.
1017 ///
1018 /// Then takes a stable snapshot of the syntax map before unlocking it.
1019 /// The snapshot with the interpolated edits is sent to a background thread,
1020 /// where we ask Tree-sitter to perform an incremental parse.
1021 ///
1022 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1023 /// waiting on the parse to complete. As soon as it completes, we proceed
1024 /// synchronously, unless a 1ms timeout elapses.
1025 ///
1026 /// If we time out waiting on the parse, we spawn a second task waiting
1027 /// until the parse does complete and return with the interpolated tree still
1028 /// in the foreground. When the background parse completes, call back into
1029 /// the main thread and assign the foreground parse state.
1030 ///
1031 /// If the buffer or grammar changed since the start of the background parse,
1032 /// initiate an additional reparse recursively. To avoid concurrent parses
1033 /// for the same buffer, we only initiate a new parse if we are not already
1034 /// parsing in the background.
1035 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1036 if self.parsing_in_background {
1037 return;
1038 }
1039 let language = if let Some(language) = self.language.clone() {
1040 language
1041 } else {
1042 return;
1043 };
1044
1045 let text = self.text_snapshot();
1046 let parsed_version = self.version();
1047
1048 let mut syntax_map = self.syntax_map.lock();
1049 syntax_map.interpolate(&text);
1050 let language_registry = syntax_map.language_registry();
1051 let mut syntax_snapshot = syntax_map.snapshot();
1052 drop(syntax_map);
1053
1054 let parse_task = cx.background_executor().spawn({
1055 let language = language.clone();
1056 let language_registry = language_registry.clone();
1057 async move {
1058 syntax_snapshot.reparse(&text, language_registry, language);
1059 syntax_snapshot
1060 }
1061 });
1062
1063 match cx
1064 .background_executor()
1065 .block_with_timeout(self.sync_parse_timeout, parse_task)
1066 {
1067 Ok(new_syntax_snapshot) => {
1068 self.did_finish_parsing(new_syntax_snapshot, cx);
1069 return;
1070 }
1071 Err(parse_task) => {
1072 self.parsing_in_background = true;
1073 cx.spawn(move |this, mut cx| async move {
1074 let new_syntax_map = parse_task.await;
1075 this.update(&mut cx, move |this, cx| {
1076 let grammar_changed =
1077 this.language.as_ref().map_or(true, |current_language| {
1078 !Arc::ptr_eq(&language, current_language)
1079 });
1080 let language_registry_changed = new_syntax_map
1081 .contains_unknown_injections()
1082 && language_registry.map_or(false, |registry| {
1083 registry.version() != new_syntax_map.language_registry_version()
1084 });
1085 let parse_again = language_registry_changed
1086 || grammar_changed
1087 || this.version.changed_since(&parsed_version);
1088 this.did_finish_parsing(new_syntax_map, cx);
1089 this.parsing_in_background = false;
1090 if parse_again {
1091 this.reparse(cx);
1092 }
1093 })
1094 .ok();
1095 })
1096 .detach();
1097 }
1098 }
1099 }
1100
1101 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
1102 self.parse_count += 1;
1103 self.syntax_map.lock().did_parse(syntax_snapshot);
1104 self.request_autoindent(cx);
1105 cx.emit(Event::Reparsed);
1106 cx.notify();
1107 }
1108
1109 /// Assign to the buffer a set of diagnostics created by a given language server.
1110 pub fn update_diagnostics(
1111 &mut self,
1112 server_id: LanguageServerId,
1113 diagnostics: DiagnosticSet,
1114 cx: &mut ModelContext<Self>,
1115 ) {
1116 let lamport_timestamp = self.text.lamport_clock.tick();
1117 let op = Operation::UpdateDiagnostics {
1118 server_id,
1119 diagnostics: diagnostics.iter().cloned().collect(),
1120 lamport_timestamp,
1121 };
1122 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1123 self.send_operation(op, cx);
1124 }
1125
1126 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1127 if let Some(indent_sizes) = self.compute_autoindents() {
1128 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1129 match cx
1130 .background_executor()
1131 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1132 {
1133 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1134 Err(indent_sizes) => {
1135 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1136 let indent_sizes = indent_sizes.await;
1137 this.update(&mut cx, |this, cx| {
1138 this.apply_autoindents(indent_sizes, cx);
1139 })
1140 .ok();
1141 }));
1142 }
1143 }
1144 } else {
1145 self.autoindent_requests.clear();
1146 }
1147 }
1148
1149 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1150 let max_rows_between_yields = 100;
1151 let snapshot = self.snapshot();
1152 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1153 return None;
1154 }
1155
1156 let autoindent_requests = self.autoindent_requests.clone();
1157 Some(async move {
1158 let mut indent_sizes = BTreeMap::new();
1159 for request in autoindent_requests {
1160 // Resolve each edited range to its row in the current buffer and in the
1161 // buffer before this batch of edits.
1162 let mut row_ranges = Vec::new();
1163 let mut old_to_new_rows = BTreeMap::new();
1164 let mut language_indent_sizes_by_new_row = Vec::new();
1165 for entry in &request.entries {
1166 let position = entry.range.start;
1167 let new_row = position.to_point(&snapshot).row;
1168 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1169 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1170
1171 if !entry.first_line_is_new {
1172 let old_row = position.to_point(&request.before_edit).row;
1173 old_to_new_rows.insert(old_row, new_row);
1174 }
1175 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1176 }
1177
1178 // Build a map containing the suggested indentation for each of the edited lines
1179 // with respect to the state of the buffer before these edits. This map is keyed
1180 // by the rows for these lines in the current state of the buffer.
1181 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1182 let old_edited_ranges =
1183 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1184 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1185 let mut language_indent_size = IndentSize::default();
1186 for old_edited_range in old_edited_ranges {
1187 let suggestions = request
1188 .before_edit
1189 .suggest_autoindents(old_edited_range.clone())
1190 .into_iter()
1191 .flatten();
1192 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1193 if let Some(suggestion) = suggestion {
1194 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1195
1196 // Find the indent size based on the language for this row.
1197 while let Some((row, size)) = language_indent_sizes.peek() {
1198 if *row > new_row {
1199 break;
1200 }
1201 language_indent_size = *size;
1202 language_indent_sizes.next();
1203 }
1204
1205 let suggested_indent = old_to_new_rows
1206 .get(&suggestion.basis_row)
1207 .and_then(|from_row| {
1208 Some(old_suggestions.get(from_row).copied()?.0)
1209 })
1210 .unwrap_or_else(|| {
1211 request
1212 .before_edit
1213 .indent_size_for_line(suggestion.basis_row)
1214 })
1215 .with_delta(suggestion.delta, language_indent_size);
1216 old_suggestions
1217 .insert(new_row, (suggested_indent, suggestion.within_error));
1218 }
1219 }
1220 yield_now().await;
1221 }
1222
1223 // In block mode, only compute indentation suggestions for the first line
1224 // of each insertion. Otherwise, compute suggestions for every inserted line.
1225 let new_edited_row_ranges = contiguous_ranges(
1226 row_ranges.iter().flat_map(|(range, _)| {
1227 if request.is_block_mode {
1228 range.start..range.start + 1
1229 } else {
1230 range.clone()
1231 }
1232 }),
1233 max_rows_between_yields,
1234 );
1235
1236 // Compute new suggestions for each line, but only include them in the result
1237 // if they differ from the old suggestion for that line.
1238 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1239 let mut language_indent_size = IndentSize::default();
1240 for new_edited_row_range in new_edited_row_ranges {
1241 let suggestions = snapshot
1242 .suggest_autoindents(new_edited_row_range.clone())
1243 .into_iter()
1244 .flatten();
1245 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1246 if let Some(suggestion) = suggestion {
1247 // Find the indent size based on the language for this row.
1248 while let Some((row, size)) = language_indent_sizes.peek() {
1249 if *row > new_row {
1250 break;
1251 }
1252 language_indent_size = *size;
1253 language_indent_sizes.next();
1254 }
1255
1256 let suggested_indent = indent_sizes
1257 .get(&suggestion.basis_row)
1258 .copied()
1259 .unwrap_or_else(|| {
1260 snapshot.indent_size_for_line(suggestion.basis_row)
1261 })
1262 .with_delta(suggestion.delta, language_indent_size);
1263 if old_suggestions.get(&new_row).map_or(
1264 true,
1265 |(old_indentation, was_within_error)| {
1266 suggested_indent != *old_indentation
1267 && (!suggestion.within_error || *was_within_error)
1268 },
1269 ) {
1270 indent_sizes.insert(new_row, suggested_indent);
1271 }
1272 }
1273 }
1274 yield_now().await;
1275 }
1276
1277 // For each block of inserted text, adjust the indentation of the remaining
1278 // lines of the block by the same amount as the first line was adjusted.
1279 if request.is_block_mode {
1280 for (row_range, original_indent_column) in
1281 row_ranges
1282 .into_iter()
1283 .filter_map(|(range, original_indent_column)| {
1284 if range.len() > 1 {
1285 Some((range, original_indent_column?))
1286 } else {
1287 None
1288 }
1289 })
1290 {
1291 let new_indent = indent_sizes
1292 .get(&row_range.start)
1293 .copied()
1294 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1295 let delta = new_indent.len as i64 - original_indent_column as i64;
1296 if delta != 0 {
1297 for row in row_range.skip(1) {
1298 indent_sizes.entry(row).or_insert_with(|| {
1299 let mut size = snapshot.indent_size_for_line(row);
1300 if size.kind == new_indent.kind {
1301 match delta.cmp(&0) {
1302 Ordering::Greater => size.len += delta as u32,
1303 Ordering::Less => {
1304 size.len = size.len.saturating_sub(-delta as u32)
1305 }
1306 Ordering::Equal => {}
1307 }
1308 }
1309 size
1310 });
1311 }
1312 }
1313 }
1314 }
1315 }
1316
1317 indent_sizes
1318 })
1319 }
1320
1321 fn apply_autoindents(
1322 &mut self,
1323 indent_sizes: BTreeMap<u32, IndentSize>,
1324 cx: &mut ModelContext<Self>,
1325 ) {
1326 self.autoindent_requests.clear();
1327
1328 let edits: Vec<_> = indent_sizes
1329 .into_iter()
1330 .filter_map(|(row, indent_size)| {
1331 let current_size = indent_size_for_line(self, row);
1332 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1333 })
1334 .collect();
1335
1336 self.edit(edits, None, cx);
1337 }
1338
1339 /// Create a minimal edit that will cause the given row to be indented
1340 /// with the given size. After applying this edit, the length of the line
1341 /// will always be at least `new_size.len`.
1342 pub fn edit_for_indent_size_adjustment(
1343 row: u32,
1344 current_size: IndentSize,
1345 new_size: IndentSize,
1346 ) -> Option<(Range<Point>, String)> {
1347 if new_size.kind == current_size.kind {
1348 match new_size.len.cmp(¤t_size.len) {
1349 Ordering::Greater => {
1350 let point = Point::new(row, 0);
1351 Some((
1352 point..point,
1353 iter::repeat(new_size.char())
1354 .take((new_size.len - current_size.len) as usize)
1355 .collect::<String>(),
1356 ))
1357 }
1358
1359 Ordering::Less => Some((
1360 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1361 String::new(),
1362 )),
1363
1364 Ordering::Equal => None,
1365 }
1366 } else {
1367 Some((
1368 Point::new(row, 0)..Point::new(row, current_size.len),
1369 iter::repeat(new_size.char())
1370 .take(new_size.len as usize)
1371 .collect::<String>(),
1372 ))
1373 }
1374 }
1375
1376 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1377 /// and the given new text.
1378 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1379 let old_text = self.as_rope().clone();
1380 let base_version = self.version();
1381 cx.background_executor()
1382 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1383 let old_text = old_text.to_string();
1384 let line_ending = LineEnding::detect(&new_text);
1385 LineEnding::normalize(&mut new_text);
1386
1387 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1388 let empty: Arc<str> = "".into();
1389
1390 let mut edits = Vec::new();
1391 let mut old_offset = 0;
1392 let mut new_offset = 0;
1393 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1394 for change in diff.iter_all_changes().map(Some).chain([None]) {
1395 if let Some(change) = &change {
1396 let len = change.value().len();
1397 match change.tag() {
1398 ChangeTag::Equal => {
1399 old_offset += len;
1400 new_offset += len;
1401 }
1402 ChangeTag::Delete => {
1403 let old_end_offset = old_offset + len;
1404 if let Some((last_old_range, _)) = &mut last_edit {
1405 last_old_range.end = old_end_offset;
1406 } else {
1407 last_edit =
1408 Some((old_offset..old_end_offset, new_offset..new_offset));
1409 }
1410 old_offset = old_end_offset;
1411 }
1412 ChangeTag::Insert => {
1413 let new_end_offset = new_offset + len;
1414 if let Some((_, last_new_range)) = &mut last_edit {
1415 last_new_range.end = new_end_offset;
1416 } else {
1417 last_edit =
1418 Some((old_offset..old_offset, new_offset..new_end_offset));
1419 }
1420 new_offset = new_end_offset;
1421 }
1422 }
1423 }
1424
1425 if let Some((old_range, new_range)) = &last_edit {
1426 if old_offset > old_range.end
1427 || new_offset > new_range.end
1428 || change.is_none()
1429 {
1430 let text = if new_range.is_empty() {
1431 empty.clone()
1432 } else {
1433 new_text[new_range.clone()].into()
1434 };
1435 edits.push((old_range.clone(), text));
1436 last_edit.take();
1437 }
1438 }
1439 }
1440
1441 Diff {
1442 base_version,
1443 line_ending,
1444 edits,
1445 }
1446 })
1447 }
1448
1449 /// Spawns a background task that searches the buffer for any whitespace
1450 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1451 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1452 let old_text = self.as_rope().clone();
1453 let line_ending = self.line_ending();
1454 let base_version = self.version();
1455 cx.background_executor().spawn(async move {
1456 let ranges = trailing_whitespace_ranges(&old_text);
1457 let empty = Arc::<str>::from("");
1458 Diff {
1459 base_version,
1460 line_ending,
1461 edits: ranges
1462 .into_iter()
1463 .map(|range| (range, empty.clone()))
1464 .collect(),
1465 }
1466 })
1467 }
1468
1469 /// Ensures that the buffer ends with a single newline character, and
1470 /// no other whitespace.
1471 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1472 let len = self.len();
1473 let mut offset = len;
1474 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1475 let non_whitespace_len = chunk
1476 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1477 .len();
1478 offset -= chunk.len();
1479 offset += non_whitespace_len;
1480 if non_whitespace_len != 0 {
1481 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1482 return;
1483 }
1484 break;
1485 }
1486 }
1487 self.edit([(offset..len, "\n")], None, cx);
1488 }
1489
1490 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1491 /// calculated, then adjust the diff to account for those changes, and discard any
1492 /// parts of the diff that conflict with those changes.
1493 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1494 // Check for any edits to the buffer that have occurred since this diff
1495 // was computed.
1496 let snapshot = self.snapshot();
1497 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1498 let mut delta = 0;
1499 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1500 while let Some(edit_since) = edits_since.peek() {
1501 // If the edit occurs after a diff hunk, then it does not
1502 // affect that hunk.
1503 if edit_since.old.start > range.end {
1504 break;
1505 }
1506 // If the edit precedes the diff hunk, then adjust the hunk
1507 // to reflect the edit.
1508 else if edit_since.old.end < range.start {
1509 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1510 edits_since.next();
1511 }
1512 // If the edit intersects a diff hunk, then discard that hunk.
1513 else {
1514 return None;
1515 }
1516 }
1517
1518 let start = (range.start as i64 + delta) as usize;
1519 let end = (range.end as i64 + delta) as usize;
1520 Some((start..end, new_text))
1521 });
1522
1523 self.start_transaction();
1524 self.text.set_line_ending(diff.line_ending);
1525 self.edit(adjusted_edits, None, cx);
1526 self.end_transaction(cx)
1527 }
1528
1529 fn changed_since_saved_version(&self) -> bool {
1530 self.edits_since::<usize>(&self.saved_version)
1531 .next()
1532 .is_some()
1533 }
1534 /// Checks if the buffer has unsaved changes.
1535 pub fn is_dirty(&self) -> bool {
1536 (self.has_conflict || self.changed_since_saved_version())
1537 || self
1538 .file
1539 .as_ref()
1540 .map_or(false, |file| file.is_deleted() || !file.is_created())
1541 }
1542
1543 /// Checks if the buffer and its file have both changed since the buffer
1544 /// was last saved or reloaded.
1545 pub fn has_conflict(&self) -> bool {
1546 (self.has_conflict || self.changed_since_saved_version())
1547 && self
1548 .file
1549 .as_ref()
1550 .map_or(false, |file| file.mtime() > self.saved_mtime)
1551 }
1552
1553 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1554 pub fn subscribe(&mut self) -> Subscription {
1555 self.text.subscribe()
1556 }
1557
1558 /// Starts a transaction, if one is not already in-progress. When undoing or
1559 /// redoing edits, all of the edits performed within a transaction are undone
1560 /// or redone together.
1561 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1562 self.start_transaction_at(Instant::now())
1563 }
1564
1565 /// Starts a transaction, providing the current time. Subsequent transactions
1566 /// that occur within a short period of time will be grouped together. This
1567 /// is controlled by the buffer's undo grouping duration.
1568 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1569 self.transaction_depth += 1;
1570 if self.was_dirty_before_starting_transaction.is_none() {
1571 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1572 }
1573 self.text.start_transaction_at(now)
1574 }
1575
1576 /// Terminates the current transaction, if this is the outermost transaction.
1577 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1578 self.end_transaction_at(Instant::now(), cx)
1579 }
1580
1581 /// Terminates the current transaction, providing the current time. Subsequent transactions
1582 /// that occur within a short period of time will be grouped together. This
1583 /// is controlled by the buffer's undo grouping duration.
1584 pub fn end_transaction_at(
1585 &mut self,
1586 now: Instant,
1587 cx: &mut ModelContext<Self>,
1588 ) -> Option<TransactionId> {
1589 assert!(self.transaction_depth > 0);
1590 self.transaction_depth -= 1;
1591 let was_dirty = if self.transaction_depth == 0 {
1592 self.was_dirty_before_starting_transaction.take().unwrap()
1593 } else {
1594 false
1595 };
1596 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1597 self.did_edit(&start_version, was_dirty, cx);
1598 Some(transaction_id)
1599 } else {
1600 None
1601 }
1602 }
1603
1604 /// Manually add a transaction to the buffer's undo history.
1605 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1606 self.text.push_transaction(transaction, now);
1607 }
1608
1609 /// Prevent the last transaction from being grouped with any subsequent transactions,
1610 /// even if they occur with the buffer's undo grouping duration.
1611 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1612 self.text.finalize_last_transaction()
1613 }
1614
1615 /// Manually group all changes since a given transaction.
1616 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1617 self.text.group_until_transaction(transaction_id);
1618 }
1619
1620 /// Manually remove a transaction from the buffer's undo history
1621 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1622 self.text.forget_transaction(transaction_id);
1623 }
1624
1625 /// Manually merge two adjacent transactions in the buffer's undo history.
1626 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1627 self.text.merge_transactions(transaction, destination);
1628 }
1629
1630 /// Waits for the buffer to receive operations with the given timestamps.
1631 pub fn wait_for_edits(
1632 &mut self,
1633 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1634 ) -> impl Future<Output = Result<()>> {
1635 self.text.wait_for_edits(edit_ids)
1636 }
1637
1638 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1639 pub fn wait_for_anchors(
1640 &mut self,
1641 anchors: impl IntoIterator<Item = Anchor>,
1642 ) -> impl 'static + Future<Output = Result<()>> {
1643 self.text.wait_for_anchors(anchors)
1644 }
1645
1646 /// Waits for the buffer to receive operations up to the given version.
1647 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1648 self.text.wait_for_version(version)
1649 }
1650
1651 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1652 /// [`Buffer::wait_for_version`] to resolve with an error.
1653 pub fn give_up_waiting(&mut self) {
1654 self.text.give_up_waiting();
1655 }
1656
1657 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
1658 pub fn set_active_selections(
1659 &mut self,
1660 selections: Arc<[Selection<Anchor>]>,
1661 line_mode: bool,
1662 cursor_shape: CursorShape,
1663 cx: &mut ModelContext<Self>,
1664 ) {
1665 let lamport_timestamp = self.text.lamport_clock.tick();
1666 self.remote_selections.insert(
1667 self.text.replica_id(),
1668 SelectionSet {
1669 selections: selections.clone(),
1670 lamport_timestamp,
1671 line_mode,
1672 cursor_shape,
1673 },
1674 );
1675 self.send_operation(
1676 Operation::UpdateSelections {
1677 selections,
1678 line_mode,
1679 lamport_timestamp,
1680 cursor_shape,
1681 },
1682 cx,
1683 );
1684 }
1685
1686 /// Clears the selections, so that other replicas of the buffer do not see any selections for
1687 /// this replica.
1688 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1689 if self
1690 .remote_selections
1691 .get(&self.text.replica_id())
1692 .map_or(true, |set| !set.selections.is_empty())
1693 {
1694 self.set_active_selections(Arc::from([]), false, Default::default(), cx);
1695 }
1696 }
1697
1698 /// Replaces the buffer's entire text.
1699 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1700 where
1701 T: Into<Arc<str>>,
1702 {
1703 self.autoindent_requests.clear();
1704 self.edit([(0..self.len(), text)], None, cx)
1705 }
1706
1707 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
1708 /// delete, and a string of text to insert at that location.
1709 ///
1710 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
1711 /// request for the edited ranges, which will be processed when the buffer finishes
1712 /// parsing.
1713 ///
1714 /// Parsing takes place at the end of a transaction, and may compute synchronously
1715 /// or asynchronously, depending on the changes.
1716 pub fn edit<I, S, T>(
1717 &mut self,
1718 edits_iter: I,
1719 autoindent_mode: Option<AutoindentMode>,
1720 cx: &mut ModelContext<Self>,
1721 ) -> Option<clock::Lamport>
1722 where
1723 I: IntoIterator<Item = (Range<S>, T)>,
1724 S: ToOffset,
1725 T: Into<Arc<str>>,
1726 {
1727 // Skip invalid edits and coalesce contiguous ones.
1728 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1729 for (range, new_text) in edits_iter {
1730 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1731 if range.start > range.end {
1732 mem::swap(&mut range.start, &mut range.end);
1733 }
1734 let new_text = new_text.into();
1735 if !new_text.is_empty() || !range.is_empty() {
1736 if let Some((prev_range, prev_text)) = edits.last_mut() {
1737 if prev_range.end >= range.start {
1738 prev_range.end = cmp::max(prev_range.end, range.end);
1739 *prev_text = format!("{prev_text}{new_text}").into();
1740 } else {
1741 edits.push((range, new_text));
1742 }
1743 } else {
1744 edits.push((range, new_text));
1745 }
1746 }
1747 }
1748 if edits.is_empty() {
1749 return None;
1750 }
1751
1752 self.start_transaction();
1753 self.pending_autoindent.take();
1754 let autoindent_request = autoindent_mode
1755 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1756
1757 let edit_operation = self.text.edit(edits.iter().cloned());
1758 let edit_id = edit_operation.timestamp();
1759
1760 if let Some((before_edit, mode)) = autoindent_request {
1761 let mut delta = 0isize;
1762 let entries = edits
1763 .into_iter()
1764 .enumerate()
1765 .zip(&edit_operation.as_edit().unwrap().new_text)
1766 .map(|((ix, (range, _)), new_text)| {
1767 let new_text_length = new_text.len();
1768 let old_start = range.start.to_point(&before_edit);
1769 let new_start = (delta + range.start as isize) as usize;
1770 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1771
1772 let mut range_of_insertion_to_indent = 0..new_text_length;
1773 let mut first_line_is_new = false;
1774 let mut original_indent_column = None;
1775
1776 // When inserting an entire line at the beginning of an existing line,
1777 // treat the insertion as new.
1778 if new_text.contains('\n')
1779 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1780 {
1781 first_line_is_new = true;
1782 }
1783
1784 // When inserting text starting with a newline, avoid auto-indenting the
1785 // previous line.
1786 if new_text.starts_with('\n') {
1787 range_of_insertion_to_indent.start += 1;
1788 first_line_is_new = true;
1789 }
1790
1791 // Avoid auto-indenting after the insertion.
1792 if let AutoindentMode::Block {
1793 original_indent_columns,
1794 } = &mode
1795 {
1796 original_indent_column =
1797 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1798 indent_size_for_text(
1799 new_text[range_of_insertion_to_indent.clone()].chars(),
1800 )
1801 .len
1802 }));
1803 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1804 range_of_insertion_to_indent.end -= 1;
1805 }
1806 }
1807
1808 AutoindentRequestEntry {
1809 first_line_is_new,
1810 original_indent_column,
1811 indent_size: before_edit.language_indent_size_at(range.start, cx),
1812 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1813 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1814 }
1815 })
1816 .collect();
1817
1818 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1819 before_edit,
1820 entries,
1821 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1822 }));
1823 }
1824
1825 self.end_transaction(cx);
1826 self.send_operation(Operation::Buffer(edit_operation), cx);
1827 Some(edit_id)
1828 }
1829
1830 fn did_edit(
1831 &mut self,
1832 old_version: &clock::Global,
1833 was_dirty: bool,
1834 cx: &mut ModelContext<Self>,
1835 ) {
1836 if self.edits_since::<usize>(old_version).next().is_none() {
1837 return;
1838 }
1839
1840 self.reparse(cx);
1841
1842 cx.emit(Event::Edited);
1843 if was_dirty != self.is_dirty() {
1844 cx.emit(Event::DirtyChanged);
1845 }
1846 cx.notify();
1847 }
1848
1849 /// Applies the given remote operations to the buffer.
1850 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1851 &mut self,
1852 ops: I,
1853 cx: &mut ModelContext<Self>,
1854 ) -> Result<()> {
1855 self.pending_autoindent.take();
1856 let was_dirty = self.is_dirty();
1857 let old_version = self.version.clone();
1858 let mut deferred_ops = Vec::new();
1859 let buffer_ops = ops
1860 .into_iter()
1861 .filter_map(|op| match op {
1862 Operation::Buffer(op) => Some(op),
1863 _ => {
1864 if self.can_apply_op(&op) {
1865 self.apply_op(op, cx);
1866 } else {
1867 deferred_ops.push(op);
1868 }
1869 None
1870 }
1871 })
1872 .collect::<Vec<_>>();
1873 self.text.apply_ops(buffer_ops)?;
1874 self.deferred_ops.insert(deferred_ops);
1875 self.flush_deferred_ops(cx);
1876 self.did_edit(&old_version, was_dirty, cx);
1877 // Notify independently of whether the buffer was edited as the operations could include a
1878 // selection update.
1879 cx.notify();
1880 Ok(())
1881 }
1882
1883 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1884 let mut deferred_ops = Vec::new();
1885 for op in self.deferred_ops.drain().iter().cloned() {
1886 if self.can_apply_op(&op) {
1887 self.apply_op(op, cx);
1888 } else {
1889 deferred_ops.push(op);
1890 }
1891 }
1892 self.deferred_ops.insert(deferred_ops);
1893 }
1894
1895 fn can_apply_op(&self, operation: &Operation) -> bool {
1896 match operation {
1897 Operation::Buffer(_) => {
1898 unreachable!("buffer operations should never be applied at this layer")
1899 }
1900 Operation::UpdateDiagnostics {
1901 diagnostics: diagnostic_set,
1902 ..
1903 } => diagnostic_set.iter().all(|diagnostic| {
1904 self.text.can_resolve(&diagnostic.range.start)
1905 && self.text.can_resolve(&diagnostic.range.end)
1906 }),
1907 Operation::UpdateSelections { selections, .. } => selections
1908 .iter()
1909 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1910 Operation::UpdateCompletionTriggers { .. } => true,
1911 }
1912 }
1913
1914 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1915 match operation {
1916 Operation::Buffer(_) => {
1917 unreachable!("buffer operations should never be applied at this layer")
1918 }
1919 Operation::UpdateDiagnostics {
1920 server_id,
1921 diagnostics: diagnostic_set,
1922 lamport_timestamp,
1923 } => {
1924 let snapshot = self.snapshot();
1925 self.apply_diagnostic_update(
1926 server_id,
1927 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1928 lamport_timestamp,
1929 cx,
1930 );
1931 }
1932 Operation::UpdateSelections {
1933 selections,
1934 lamport_timestamp,
1935 line_mode,
1936 cursor_shape,
1937 } => {
1938 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1939 if set.lamport_timestamp > lamport_timestamp {
1940 return;
1941 }
1942 }
1943
1944 self.remote_selections.insert(
1945 lamport_timestamp.replica_id,
1946 SelectionSet {
1947 selections,
1948 lamport_timestamp,
1949 line_mode,
1950 cursor_shape,
1951 },
1952 );
1953 self.text.lamport_clock.observe(lamport_timestamp);
1954 self.selections_update_count += 1;
1955 }
1956 Operation::UpdateCompletionTriggers {
1957 triggers,
1958 lamport_timestamp,
1959 } => {
1960 self.completion_triggers = triggers;
1961 self.text.lamport_clock.observe(lamport_timestamp);
1962 }
1963 }
1964 }
1965
1966 fn apply_diagnostic_update(
1967 &mut self,
1968 server_id: LanguageServerId,
1969 diagnostics: DiagnosticSet,
1970 lamport_timestamp: clock::Lamport,
1971 cx: &mut ModelContext<Self>,
1972 ) {
1973 if lamport_timestamp > self.diagnostics_timestamp {
1974 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
1975 if diagnostics.len() == 0 {
1976 if let Ok(ix) = ix {
1977 self.diagnostics.remove(ix);
1978 }
1979 } else {
1980 match ix {
1981 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
1982 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
1983 };
1984 }
1985 self.diagnostics_timestamp = lamport_timestamp;
1986 self.diagnostics_update_count += 1;
1987 self.text.lamport_clock.observe(lamport_timestamp);
1988 cx.notify();
1989 cx.emit(Event::DiagnosticsUpdated);
1990 }
1991 }
1992
1993 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1994 cx.emit(Event::Operation(operation));
1995 }
1996
1997 /// Removes the selections for a given peer.
1998 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1999 self.remote_selections.remove(&replica_id);
2000 cx.notify();
2001 }
2002
2003 /// Undoes the most recent transaction.
2004 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2005 let was_dirty = self.is_dirty();
2006 let old_version = self.version.clone();
2007
2008 if let Some((transaction_id, operation)) = self.text.undo() {
2009 self.send_operation(Operation::Buffer(operation), cx);
2010 self.did_edit(&old_version, was_dirty, cx);
2011 Some(transaction_id)
2012 } else {
2013 None
2014 }
2015 }
2016
2017 /// Manually undoes a specific transaction in the buffer's undo history.
2018 pub fn undo_transaction(
2019 &mut self,
2020 transaction_id: TransactionId,
2021 cx: &mut ModelContext<Self>,
2022 ) -> bool {
2023 let was_dirty = self.is_dirty();
2024 let old_version = self.version.clone();
2025 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2026 self.send_operation(Operation::Buffer(operation), cx);
2027 self.did_edit(&old_version, was_dirty, cx);
2028 true
2029 } else {
2030 false
2031 }
2032 }
2033
2034 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2035 pub fn undo_to_transaction(
2036 &mut self,
2037 transaction_id: TransactionId,
2038 cx: &mut ModelContext<Self>,
2039 ) -> bool {
2040 let was_dirty = self.is_dirty();
2041 let old_version = self.version.clone();
2042
2043 let operations = self.text.undo_to_transaction(transaction_id);
2044 let undone = !operations.is_empty();
2045 for operation in operations {
2046 self.send_operation(Operation::Buffer(operation), cx);
2047 }
2048 if undone {
2049 self.did_edit(&old_version, was_dirty, cx)
2050 }
2051 undone
2052 }
2053
2054 /// Manually redoes a specific transaction in the buffer's redo history.
2055 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2056 let was_dirty = self.is_dirty();
2057 let old_version = self.version.clone();
2058
2059 if let Some((transaction_id, operation)) = self.text.redo() {
2060 self.send_operation(Operation::Buffer(operation), cx);
2061 self.did_edit(&old_version, was_dirty, cx);
2062 Some(transaction_id)
2063 } else {
2064 None
2065 }
2066 }
2067
2068 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2069 pub fn redo_to_transaction(
2070 &mut self,
2071 transaction_id: TransactionId,
2072 cx: &mut ModelContext<Self>,
2073 ) -> bool {
2074 let was_dirty = self.is_dirty();
2075 let old_version = self.version.clone();
2076
2077 let operations = self.text.redo_to_transaction(transaction_id);
2078 let redone = !operations.is_empty();
2079 for operation in operations {
2080 self.send_operation(Operation::Buffer(operation), cx);
2081 }
2082 if redone {
2083 self.did_edit(&old_version, was_dirty, cx)
2084 }
2085 redone
2086 }
2087
2088 /// Override current completion triggers with the user-provided completion triggers.
2089 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
2090 self.completion_triggers.clone_from(&triggers);
2091 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2092 self.send_operation(
2093 Operation::UpdateCompletionTriggers {
2094 triggers,
2095 lamport_timestamp: self.completion_triggers_timestamp,
2096 },
2097 cx,
2098 );
2099 cx.notify();
2100 }
2101
2102 /// Returns a list of strings which trigger a completion menu for this language.
2103 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2104 pub fn completion_triggers(&self) -> &[String] {
2105 &self.completion_triggers
2106 }
2107}
2108
2109#[doc(hidden)]
2110#[cfg(any(test, feature = "test-support"))]
2111impl Buffer {
2112 pub fn edit_via_marked_text(
2113 &mut self,
2114 marked_string: &str,
2115 autoindent_mode: Option<AutoindentMode>,
2116 cx: &mut ModelContext<Self>,
2117 ) {
2118 let edits = self.edits_for_marked_text(marked_string);
2119 self.edit(edits, autoindent_mode, cx);
2120 }
2121
2122 pub fn set_group_interval(&mut self, group_interval: Duration) {
2123 self.text.set_group_interval(group_interval);
2124 }
2125
2126 pub fn randomly_edit<T>(
2127 &mut self,
2128 rng: &mut T,
2129 old_range_count: usize,
2130 cx: &mut ModelContext<Self>,
2131 ) where
2132 T: rand::Rng,
2133 {
2134 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2135 let mut last_end = None;
2136 for _ in 0..old_range_count {
2137 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2138 break;
2139 }
2140
2141 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2142 let mut range = self.random_byte_range(new_start, rng);
2143 if rng.gen_bool(0.2) {
2144 mem::swap(&mut range.start, &mut range.end);
2145 }
2146 last_end = Some(range.end);
2147
2148 let new_text_len = rng.gen_range(0..10);
2149 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2150
2151 edits.push((range, new_text));
2152 }
2153 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2154 self.edit(edits, None, cx);
2155 }
2156
2157 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
2158 let was_dirty = self.is_dirty();
2159 let old_version = self.version.clone();
2160
2161 let ops = self.text.randomly_undo_redo(rng);
2162 if !ops.is_empty() {
2163 for op in ops {
2164 self.send_operation(Operation::Buffer(op), cx);
2165 self.did_edit(&old_version, was_dirty, cx);
2166 }
2167 }
2168 }
2169}
2170
2171impl EventEmitter<Event> for Buffer {}
2172
2173impl Deref for Buffer {
2174 type Target = TextBuffer;
2175
2176 fn deref(&self) -> &Self::Target {
2177 &self.text
2178 }
2179}
2180
2181impl BufferSnapshot {
2182 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2183 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2184 indent_size_for_line(self, row)
2185 }
2186 /// Returns [`IndentSize`] for a given position that respects user settings
2187 /// and language preferences.
2188 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
2189 let settings = language_settings(self.language_at(position), self.file(), cx);
2190 if settings.hard_tabs {
2191 IndentSize::tab()
2192 } else {
2193 IndentSize::spaces(settings.tab_size.get())
2194 }
2195 }
2196
2197 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2198 /// is passed in as `single_indent_size`.
2199 pub fn suggested_indents(
2200 &self,
2201 rows: impl Iterator<Item = u32>,
2202 single_indent_size: IndentSize,
2203 ) -> BTreeMap<u32, IndentSize> {
2204 let mut result = BTreeMap::new();
2205
2206 for row_range in contiguous_ranges(rows, 10) {
2207 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2208 Some(suggestions) => suggestions,
2209 _ => break,
2210 };
2211
2212 for (row, suggestion) in row_range.zip(suggestions) {
2213 let indent_size = if let Some(suggestion) = suggestion {
2214 result
2215 .get(&suggestion.basis_row)
2216 .copied()
2217 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2218 .with_delta(suggestion.delta, single_indent_size)
2219 } else {
2220 self.indent_size_for_line(row)
2221 };
2222
2223 result.insert(row, indent_size);
2224 }
2225 }
2226
2227 result
2228 }
2229
2230 fn suggest_autoindents(
2231 &self,
2232 row_range: Range<u32>,
2233 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2234 let config = &self.language.as_ref()?.config;
2235 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2236
2237 // Find the suggested indentation ranges based on the syntax tree.
2238 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2239 let end = Point::new(row_range.end, 0);
2240 let range = (start..end).to_offset(&self.text);
2241 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2242 Some(&grammar.indents_config.as_ref()?.query)
2243 });
2244 let indent_configs = matches
2245 .grammars()
2246 .iter()
2247 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2248 .collect::<Vec<_>>();
2249
2250 let mut indent_ranges = Vec::<Range<Point>>::new();
2251 let mut outdent_positions = Vec::<Point>::new();
2252 while let Some(mat) = matches.peek() {
2253 let mut start: Option<Point> = None;
2254 let mut end: Option<Point> = None;
2255
2256 let config = &indent_configs[mat.grammar_index];
2257 for capture in mat.captures {
2258 if capture.index == config.indent_capture_ix {
2259 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2260 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2261 } else if Some(capture.index) == config.start_capture_ix {
2262 start = Some(Point::from_ts_point(capture.node.end_position()));
2263 } else if Some(capture.index) == config.end_capture_ix {
2264 end = Some(Point::from_ts_point(capture.node.start_position()));
2265 } else if Some(capture.index) == config.outdent_capture_ix {
2266 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2267 }
2268 }
2269
2270 matches.advance();
2271 if let Some((start, end)) = start.zip(end) {
2272 if start.row == end.row {
2273 continue;
2274 }
2275
2276 let range = start..end;
2277 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2278 Err(ix) => indent_ranges.insert(ix, range),
2279 Ok(ix) => {
2280 let prev_range = &mut indent_ranges[ix];
2281 prev_range.end = prev_range.end.max(range.end);
2282 }
2283 }
2284 }
2285 }
2286
2287 let mut error_ranges = Vec::<Range<Point>>::new();
2288 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2289 Some(&grammar.error_query)
2290 });
2291 while let Some(mat) = matches.peek() {
2292 let node = mat.captures[0].node;
2293 let start = Point::from_ts_point(node.start_position());
2294 let end = Point::from_ts_point(node.end_position());
2295 let range = start..end;
2296 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2297 Ok(ix) | Err(ix) => ix,
2298 };
2299 let mut end_ix = ix;
2300 while let Some(existing_range) = error_ranges.get(end_ix) {
2301 if existing_range.end < end {
2302 end_ix += 1;
2303 } else {
2304 break;
2305 }
2306 }
2307 error_ranges.splice(ix..end_ix, [range]);
2308 matches.advance();
2309 }
2310
2311 outdent_positions.sort();
2312 for outdent_position in outdent_positions {
2313 // find the innermost indent range containing this outdent_position
2314 // set its end to the outdent position
2315 if let Some(range_to_truncate) = indent_ranges
2316 .iter_mut()
2317 .filter(|indent_range| indent_range.contains(&outdent_position))
2318 .last()
2319 {
2320 range_to_truncate.end = outdent_position;
2321 }
2322 }
2323
2324 // Find the suggested indentation increases and decreased based on regexes.
2325 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2326 self.for_each_line(
2327 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2328 ..Point::new(row_range.end, 0),
2329 |row, line| {
2330 if config
2331 .decrease_indent_pattern
2332 .as_ref()
2333 .map_or(false, |regex| regex.is_match(line))
2334 {
2335 indent_change_rows.push((row, Ordering::Less));
2336 }
2337 if config
2338 .increase_indent_pattern
2339 .as_ref()
2340 .map_or(false, |regex| regex.is_match(line))
2341 {
2342 indent_change_rows.push((row + 1, Ordering::Greater));
2343 }
2344 },
2345 );
2346
2347 let mut indent_changes = indent_change_rows.into_iter().peekable();
2348 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2349 prev_non_blank_row.unwrap_or(0)
2350 } else {
2351 row_range.start.saturating_sub(1)
2352 };
2353 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2354 Some(row_range.map(move |row| {
2355 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2356
2357 let mut indent_from_prev_row = false;
2358 let mut outdent_from_prev_row = false;
2359 let mut outdent_to_row = u32::MAX;
2360
2361 while let Some((indent_row, delta)) = indent_changes.peek() {
2362 match indent_row.cmp(&row) {
2363 Ordering::Equal => match delta {
2364 Ordering::Less => outdent_from_prev_row = true,
2365 Ordering::Greater => indent_from_prev_row = true,
2366 _ => {}
2367 },
2368
2369 Ordering::Greater => break,
2370 Ordering::Less => {}
2371 }
2372
2373 indent_changes.next();
2374 }
2375
2376 for range in &indent_ranges {
2377 if range.start.row >= row {
2378 break;
2379 }
2380 if range.start.row == prev_row && range.end > row_start {
2381 indent_from_prev_row = true;
2382 }
2383 if range.end > prev_row_start && range.end <= row_start {
2384 outdent_to_row = outdent_to_row.min(range.start.row);
2385 }
2386 }
2387
2388 let within_error = error_ranges
2389 .iter()
2390 .any(|e| e.start.row < row && e.end > row_start);
2391
2392 let suggestion = if outdent_to_row == prev_row
2393 || (outdent_from_prev_row && indent_from_prev_row)
2394 {
2395 Some(IndentSuggestion {
2396 basis_row: prev_row,
2397 delta: Ordering::Equal,
2398 within_error,
2399 })
2400 } else if indent_from_prev_row {
2401 Some(IndentSuggestion {
2402 basis_row: prev_row,
2403 delta: Ordering::Greater,
2404 within_error,
2405 })
2406 } else if outdent_to_row < prev_row {
2407 Some(IndentSuggestion {
2408 basis_row: outdent_to_row,
2409 delta: Ordering::Equal,
2410 within_error,
2411 })
2412 } else if outdent_from_prev_row {
2413 Some(IndentSuggestion {
2414 basis_row: prev_row,
2415 delta: Ordering::Less,
2416 within_error,
2417 })
2418 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2419 {
2420 Some(IndentSuggestion {
2421 basis_row: prev_row,
2422 delta: Ordering::Equal,
2423 within_error,
2424 })
2425 } else {
2426 None
2427 };
2428
2429 prev_row = row;
2430 prev_row_start = row_start;
2431 suggestion
2432 }))
2433 }
2434
2435 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2436 while row > 0 {
2437 row -= 1;
2438 if !self.is_line_blank(row) {
2439 return Some(row);
2440 }
2441 }
2442 None
2443 }
2444
2445 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2446 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2447 /// returned in chunks where each chunk has a single syntax highlighting style and
2448 /// diagnostic status.
2449 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2450 let range = range.start.to_offset(self)..range.end.to_offset(self);
2451
2452 let mut syntax = None;
2453 let mut diagnostic_endpoints = Vec::new();
2454 if language_aware {
2455 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
2456 grammar.highlights_query.as_ref()
2457 });
2458 let highlight_maps = captures
2459 .grammars()
2460 .into_iter()
2461 .map(|grammar| grammar.highlight_map())
2462 .collect();
2463 syntax = Some((captures, highlight_maps));
2464 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
2465 diagnostic_endpoints.push(DiagnosticEndpoint {
2466 offset: entry.range.start,
2467 is_start: true,
2468 severity: entry.diagnostic.severity,
2469 is_unnecessary: entry.diagnostic.is_unnecessary,
2470 });
2471 diagnostic_endpoints.push(DiagnosticEndpoint {
2472 offset: entry.range.end,
2473 is_start: false,
2474 severity: entry.diagnostic.severity,
2475 is_unnecessary: entry.diagnostic.is_unnecessary,
2476 });
2477 }
2478 diagnostic_endpoints
2479 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2480 }
2481
2482 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
2483 }
2484
2485 /// Invokes the given callback for each line of text in the given range of the buffer.
2486 /// Uses callback to avoid allocating a string for each line.
2487 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2488 let mut line = String::new();
2489 let mut row = range.start.row;
2490 for chunk in self
2491 .as_rope()
2492 .chunks_in_range(range.to_offset(self))
2493 .chain(["\n"])
2494 {
2495 for (newline_ix, text) in chunk.split('\n').enumerate() {
2496 if newline_ix > 0 {
2497 callback(row, &line);
2498 row += 1;
2499 line.clear();
2500 }
2501 line.push_str(text);
2502 }
2503 }
2504 }
2505
2506 /// Iterates over every [`SyntaxLayer`] in the buffer.
2507 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2508 self.syntax.layers_for_range(0..self.len(), &self.text)
2509 }
2510
2511 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2512 let offset = position.to_offset(self);
2513 self.syntax
2514 .layers_for_range(offset..offset, &self.text)
2515 .filter(|l| l.node().end_byte() > offset)
2516 .last()
2517 }
2518
2519 /// Returns the main [Language]
2520 pub fn language(&self) -> Option<&Arc<Language>> {
2521 self.language.as_ref()
2522 }
2523
2524 /// Returns the [Language] at the given location.
2525 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2526 self.syntax_layer_at(position)
2527 .map(|info| info.language)
2528 .or(self.language.as_ref())
2529 }
2530
2531 /// Returns the settings for the language at the given location.
2532 pub fn settings_at<'a, D: ToOffset>(
2533 &self,
2534 position: D,
2535 cx: &'a AppContext,
2536 ) -> &'a LanguageSettings {
2537 language_settings(self.language_at(position), self.file.as_ref(), cx)
2538 }
2539
2540 /// Returns the [LanguageScope] at the given location.
2541 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2542 let offset = position.to_offset(self);
2543 let mut scope = None;
2544 let mut smallest_range: Option<Range<usize>> = None;
2545
2546 // Use the layer that has the smallest node intersecting the given point.
2547 for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
2548 let mut cursor = layer.node().walk();
2549
2550 let mut range = None;
2551 loop {
2552 let child_range = cursor.node().byte_range();
2553 if !child_range.to_inclusive().contains(&offset) {
2554 break;
2555 }
2556
2557 range = Some(child_range);
2558 if cursor.goto_first_child_for_byte(offset).is_none() {
2559 break;
2560 }
2561 }
2562
2563 if let Some(range) = range {
2564 if smallest_range
2565 .as_ref()
2566 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2567 {
2568 smallest_range = Some(range);
2569 scope = Some(LanguageScope {
2570 language: layer.language.clone(),
2571 override_id: layer.override_id(offset, &self.text),
2572 });
2573 }
2574 }
2575 }
2576
2577 scope.or_else(|| {
2578 self.language.clone().map(|language| LanguageScope {
2579 language,
2580 override_id: None,
2581 })
2582 })
2583 }
2584
2585 /// Returns a tuple of the range and character kind of the word
2586 /// surrounding the given position.
2587 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2588 let mut start = start.to_offset(self);
2589 let mut end = start;
2590 let mut next_chars = self.chars_at(start).peekable();
2591 let mut prev_chars = self.reversed_chars_at(start).peekable();
2592
2593 let scope = self.language_scope_at(start);
2594 let kind = |c| char_kind(&scope, c);
2595 let word_kind = cmp::max(
2596 prev_chars.peek().copied().map(kind),
2597 next_chars.peek().copied().map(kind),
2598 );
2599
2600 for ch in prev_chars {
2601 if Some(kind(ch)) == word_kind && ch != '\n' {
2602 start -= ch.len_utf8();
2603 } else {
2604 break;
2605 }
2606 }
2607
2608 for ch in next_chars {
2609 if Some(kind(ch)) == word_kind && ch != '\n' {
2610 end += ch.len_utf8();
2611 } else {
2612 break;
2613 }
2614 }
2615
2616 (start..end, word_kind)
2617 }
2618
2619 /// Returns the range for the closes syntax node enclosing the given range.
2620 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2621 let range = range.start.to_offset(self)..range.end.to_offset(self);
2622 let mut result: Option<Range<usize>> = None;
2623 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
2624 let mut cursor = layer.node().walk();
2625
2626 // Descend to the first leaf that touches the start of the range,
2627 // and if the range is non-empty, extends beyond the start.
2628 while cursor.goto_first_child_for_byte(range.start).is_some() {
2629 if !range.is_empty() && cursor.node().end_byte() == range.start {
2630 cursor.goto_next_sibling();
2631 }
2632 }
2633
2634 // Ascend to the smallest ancestor that strictly contains the range.
2635 loop {
2636 let node_range = cursor.node().byte_range();
2637 if node_range.start <= range.start
2638 && node_range.end >= range.end
2639 && node_range.len() > range.len()
2640 {
2641 break;
2642 }
2643 if !cursor.goto_parent() {
2644 continue 'outer;
2645 }
2646 }
2647
2648 let left_node = cursor.node();
2649 let mut layer_result = left_node.byte_range();
2650
2651 // For an empty range, try to find another node immediately to the right of the range.
2652 if left_node.end_byte() == range.start {
2653 let mut right_node = None;
2654 while !cursor.goto_next_sibling() {
2655 if !cursor.goto_parent() {
2656 break;
2657 }
2658 }
2659
2660 while cursor.node().start_byte() == range.start {
2661 right_node = Some(cursor.node());
2662 if !cursor.goto_first_child() {
2663 break;
2664 }
2665 }
2666
2667 // If there is a candidate node on both sides of the (empty) range, then
2668 // decide between the two by favoring a named node over an anonymous token.
2669 // If both nodes are the same in that regard, favor the right one.
2670 if let Some(right_node) = right_node {
2671 if right_node.is_named() || !left_node.is_named() {
2672 layer_result = right_node.byte_range();
2673 }
2674 }
2675 }
2676
2677 if let Some(previous_result) = &result {
2678 if previous_result.len() < layer_result.len() {
2679 continue;
2680 }
2681 }
2682 result = Some(layer_result);
2683 }
2684
2685 result
2686 }
2687
2688 /// Returns the outline for the buffer.
2689 ///
2690 /// This method allows passing an optional [SyntaxTheme] to
2691 /// syntax-highlight the returned symbols.
2692 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2693 self.outline_items_containing(0..self.len(), true, theme)
2694 .map(Outline::new)
2695 }
2696
2697 /// Returns all the symbols that contain the given position.
2698 ///
2699 /// This method allows passing an optional [SyntaxTheme] to
2700 /// syntax-highlight the returned symbols.
2701 pub fn symbols_containing<T: ToOffset>(
2702 &self,
2703 position: T,
2704 theme: Option<&SyntaxTheme>,
2705 ) -> Option<Vec<OutlineItem<Anchor>>> {
2706 let position = position.to_offset(self);
2707 let mut items = self.outline_items_containing(
2708 position.saturating_sub(1)..self.len().min(position + 1),
2709 false,
2710 theme,
2711 )?;
2712 let mut prev_depth = None;
2713 items.retain(|item| {
2714 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2715 prev_depth = Some(item.depth);
2716 result
2717 });
2718 Some(items)
2719 }
2720
2721 fn outline_items_containing(
2722 &self,
2723 range: Range<usize>,
2724 include_extra_context: bool,
2725 theme: Option<&SyntaxTheme>,
2726 ) -> Option<Vec<OutlineItem<Anchor>>> {
2727 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2728 grammar.outline_config.as_ref().map(|c| &c.query)
2729 });
2730 let configs = matches
2731 .grammars()
2732 .iter()
2733 .map(|g| g.outline_config.as_ref().unwrap())
2734 .collect::<Vec<_>>();
2735
2736 let mut stack = Vec::<Range<usize>>::new();
2737 let mut items = Vec::new();
2738 while let Some(mat) = matches.peek() {
2739 let config = &configs[mat.grammar_index];
2740 let item_node = mat.captures.iter().find_map(|cap| {
2741 if cap.index == config.item_capture_ix {
2742 Some(cap.node)
2743 } else {
2744 None
2745 }
2746 })?;
2747
2748 let item_range = item_node.byte_range();
2749 if item_range.end < range.start || item_range.start > range.end {
2750 matches.advance();
2751 continue;
2752 }
2753
2754 let mut buffer_ranges = Vec::new();
2755 for capture in mat.captures {
2756 let node_is_name;
2757 if capture.index == config.name_capture_ix {
2758 node_is_name = true;
2759 } else if Some(capture.index) == config.context_capture_ix
2760 || (Some(capture.index) == config.extra_context_capture_ix
2761 && include_extra_context)
2762 {
2763 node_is_name = false;
2764 } else {
2765 continue;
2766 }
2767
2768 let mut range = capture.node.start_byte()..capture.node.end_byte();
2769 let start = capture.node.start_position();
2770 if capture.node.end_position().row > start.row {
2771 range.end =
2772 range.start + self.line_len(start.row as u32) as usize - start.column;
2773 }
2774
2775 if !range.is_empty() {
2776 buffer_ranges.push((range, node_is_name));
2777 }
2778 }
2779
2780 if buffer_ranges.is_empty() {
2781 matches.advance();
2782 continue;
2783 }
2784
2785 let mut text = String::new();
2786 let mut highlight_ranges = Vec::new();
2787 let mut name_ranges = Vec::new();
2788 let mut chunks = self.chunks(
2789 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
2790 true,
2791 );
2792 let mut last_buffer_range_end = 0;
2793 for (buffer_range, is_name) in buffer_ranges {
2794 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
2795 text.push(' ');
2796 }
2797 last_buffer_range_end = buffer_range.end;
2798 if is_name {
2799 let mut start = text.len();
2800 let end = start + buffer_range.len();
2801
2802 // When multiple names are captured, then the matcheable text
2803 // includes the whitespace in between the names.
2804 if !name_ranges.is_empty() {
2805 start -= 1;
2806 }
2807
2808 name_ranges.push(start..end);
2809 }
2810
2811 let mut offset = buffer_range.start;
2812 chunks.seek(offset);
2813 for mut chunk in chunks.by_ref() {
2814 if chunk.text.len() > buffer_range.end - offset {
2815 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
2816 offset = buffer_range.end;
2817 } else {
2818 offset += chunk.text.len();
2819 }
2820 let style = chunk
2821 .syntax_highlight_id
2822 .zip(theme)
2823 .and_then(|(highlight, theme)| highlight.style(theme));
2824 if let Some(style) = style {
2825 let start = text.len();
2826 let end = start + chunk.text.len();
2827 highlight_ranges.push((start..end, style));
2828 }
2829 text.push_str(chunk.text);
2830 if offset >= buffer_range.end {
2831 break;
2832 }
2833 }
2834 }
2835
2836 matches.advance();
2837 while stack.last().map_or(false, |prev_range| {
2838 prev_range.start > item_range.start || prev_range.end < item_range.end
2839 }) {
2840 stack.pop();
2841 }
2842 stack.push(item_range.clone());
2843
2844 items.push(OutlineItem {
2845 depth: stack.len() - 1,
2846 range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
2847 text,
2848 highlight_ranges,
2849 name_ranges,
2850 })
2851 }
2852 Some(items)
2853 }
2854
2855 /// For each grammar in the language, runs the provided
2856 /// [tree_sitter::Query] against the given range.
2857 pub fn matches(
2858 &self,
2859 range: Range<usize>,
2860 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
2861 ) -> SyntaxMapMatches {
2862 self.syntax.matches(range, self, query)
2863 }
2864
2865 /// Returns bracket range pairs overlapping or adjacent to `range`
2866 pub fn bracket_ranges<T: ToOffset>(
2867 &self,
2868 range: Range<T>,
2869 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2870 // Find bracket pairs that *inclusively* contain the given range.
2871 let range = range.start.to_offset(self).saturating_sub(1)
2872 ..self.len().min(range.end.to_offset(self) + 1);
2873
2874 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2875 grammar.brackets_config.as_ref().map(|c| &c.query)
2876 });
2877 let configs = matches
2878 .grammars()
2879 .iter()
2880 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2881 .collect::<Vec<_>>();
2882
2883 iter::from_fn(move || {
2884 while let Some(mat) = matches.peek() {
2885 let mut open = None;
2886 let mut close = None;
2887 let config = &configs[mat.grammar_index];
2888 for capture in mat.captures {
2889 if capture.index == config.open_capture_ix {
2890 open = Some(capture.node.byte_range());
2891 } else if capture.index == config.close_capture_ix {
2892 close = Some(capture.node.byte_range());
2893 }
2894 }
2895
2896 matches.advance();
2897
2898 let Some((open, close)) = open.zip(close) else {
2899 continue;
2900 };
2901
2902 let bracket_range = open.start..=close.end;
2903 if !bracket_range.overlaps(&range) {
2904 continue;
2905 }
2906
2907 return Some((open, close));
2908 }
2909 None
2910 })
2911 }
2912
2913 /// Returns enclosing bracket ranges containing the given range
2914 pub fn enclosing_bracket_ranges<T: ToOffset>(
2915 &self,
2916 range: Range<T>,
2917 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2918 let range = range.start.to_offset(self)..range.end.to_offset(self);
2919
2920 self.bracket_ranges(range.clone())
2921 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
2922 }
2923
2924 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
2925 ///
2926 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
2927 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
2928 &self,
2929 range: Range<T>,
2930 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
2931 ) -> Option<(Range<usize>, Range<usize>)> {
2932 let range = range.start.to_offset(self)..range.end.to_offset(self);
2933
2934 // Get the ranges of the innermost pair of brackets.
2935 let mut result: Option<(Range<usize>, Range<usize>)> = None;
2936
2937 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
2938 if let Some(range_filter) = range_filter {
2939 if !range_filter(open.clone(), close.clone()) {
2940 continue;
2941 }
2942 }
2943
2944 let len = close.end - open.start;
2945
2946 if let Some((existing_open, existing_close)) = &result {
2947 let existing_len = existing_close.end - existing_open.start;
2948 if len > existing_len {
2949 continue;
2950 }
2951 }
2952
2953 result = Some((open, close));
2954 }
2955
2956 result
2957 }
2958
2959 /// Returns anchor ranges for any matches of the redaction query.
2960 /// The buffer can be associated with multiple languages, and the redaction query associated with each
2961 /// will be run on the relevant section of the buffer.
2962 pub fn redacted_ranges<T: ToOffset>(
2963 &self,
2964 range: Range<T>,
2965 ) -> impl Iterator<Item = Range<usize>> + '_ {
2966 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
2967 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
2968 grammar
2969 .redactions_config
2970 .as_ref()
2971 .map(|config| &config.query)
2972 });
2973
2974 let configs = syntax_matches
2975 .grammars()
2976 .iter()
2977 .map(|grammar| grammar.redactions_config.as_ref())
2978 .collect::<Vec<_>>();
2979
2980 iter::from_fn(move || {
2981 let redacted_range = syntax_matches
2982 .peek()
2983 .and_then(|mat| {
2984 configs[mat.grammar_index].and_then(|config| {
2985 mat.captures
2986 .iter()
2987 .find(|capture| capture.index == config.redaction_capture_ix)
2988 })
2989 })
2990 .map(|mat| mat.node.byte_range());
2991 syntax_matches.advance();
2992 redacted_range
2993 })
2994 }
2995
2996 pub fn runnable_ranges(
2997 &self,
2998 range: Range<Anchor>,
2999 ) -> impl Iterator<Item = RunnableRange> + '_ {
3000 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3001
3002 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3003 grammar.runnable_config.as_ref().map(|config| &config.query)
3004 });
3005
3006 let test_configs = syntax_matches
3007 .grammars()
3008 .iter()
3009 .map(|grammar| grammar.runnable_config.as_ref())
3010 .collect::<Vec<_>>();
3011
3012 iter::from_fn(move || {
3013 let test_range = syntax_matches.peek().and_then(|mat| {
3014 test_configs[mat.grammar_index].and_then(|test_configs| {
3015 let mut tags: SmallVec<[(Range<usize>, RunnableTag); 1]> =
3016 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3017 test_configs
3018 .runnable_tags
3019 .get(&capture.index)
3020 .cloned()
3021 .map(|tag_name| (capture.node.byte_range(), tag_name))
3022 }));
3023 let maximum_range = tags
3024 .iter()
3025 .max_by_key(|(byte_range, _)| byte_range.len())
3026 .map(|(range, _)| range)?
3027 .clone();
3028 tags.sort_by_key(|(range, _)| range == &maximum_range);
3029 let split_point = tags.partition_point(|(range, _)| range != &maximum_range);
3030 let (extra_captures, tags) = tags.split_at(split_point);
3031 let extra_captures = extra_captures
3032 .into_iter()
3033 .map(|(range, name)| {
3034 (
3035 name.0.to_string(),
3036 self.text_for_range(range.clone()).collect::<String>(),
3037 )
3038 })
3039 .collect();
3040 Some(RunnableRange {
3041 run_range: mat
3042 .captures
3043 .iter()
3044 .find(|capture| capture.index == test_configs.run_capture_ix)
3045 .map(|mat| mat.node.byte_range())?,
3046 runnable: Runnable {
3047 tags: tags.into_iter().cloned().map(|(_, tag)| tag).collect(),
3048 language: mat.language,
3049 buffer: self.remote_id(),
3050 },
3051 extra_captures,
3052 buffer_id: self.remote_id(),
3053 })
3054 })
3055 });
3056 syntax_matches.advance();
3057 test_range
3058 })
3059 }
3060
3061 /// Returns selections for remote peers intersecting the given range.
3062 #[allow(clippy::type_complexity)]
3063 pub fn remote_selections_in_range(
3064 &self,
3065 range: Range<Anchor>,
3066 ) -> impl Iterator<
3067 Item = (
3068 ReplicaId,
3069 bool,
3070 CursorShape,
3071 impl Iterator<Item = &Selection<Anchor>> + '_,
3072 ),
3073 > + '_ {
3074 self.remote_selections
3075 .iter()
3076 .filter(|(replica_id, set)| {
3077 **replica_id != self.text.replica_id() && !set.selections.is_empty()
3078 })
3079 .map(move |(replica_id, set)| {
3080 let start_ix = match set.selections.binary_search_by(|probe| {
3081 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3082 }) {
3083 Ok(ix) | Err(ix) => ix,
3084 };
3085 let end_ix = match set.selections.binary_search_by(|probe| {
3086 probe.start.cmp(&range.end, self).then(Ordering::Less)
3087 }) {
3088 Ok(ix) | Err(ix) => ix,
3089 };
3090
3091 (
3092 *replica_id,
3093 set.line_mode,
3094 set.cursor_shape,
3095 set.selections[start_ix..end_ix].iter(),
3096 )
3097 })
3098 }
3099
3100 /// Whether the buffer contains any git changes.
3101 pub fn has_git_diff(&self) -> bool {
3102 !self.git_diff.is_empty()
3103 }
3104
3105 /// Returns all the Git diff hunks intersecting the given
3106 /// row range.
3107 pub fn git_diff_hunks_in_row_range(
3108 &self,
3109 range: Range<BufferRow>,
3110 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3111 self.git_diff.hunks_in_row_range(range, self)
3112 }
3113
3114 /// Returns all the Git diff hunks intersecting the given
3115 /// range.
3116 pub fn git_diff_hunks_intersecting_range(
3117 &self,
3118 range: Range<Anchor>,
3119 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3120 self.git_diff.hunks_intersecting_range(range, self)
3121 }
3122
3123 /// Returns all the Git diff hunks intersecting the given
3124 /// range, in reverse order.
3125 pub fn git_diff_hunks_intersecting_range_rev(
3126 &self,
3127 range: Range<Anchor>,
3128 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3129 self.git_diff.hunks_intersecting_range_rev(range, self)
3130 }
3131
3132 /// Returns if the buffer contains any diagnostics.
3133 pub fn has_diagnostics(&self) -> bool {
3134 !self.diagnostics.is_empty()
3135 }
3136
3137 /// Returns all the diagnostics intersecting the given range.
3138 pub fn diagnostics_in_range<'a, T, O>(
3139 &'a self,
3140 search_range: Range<T>,
3141 reversed: bool,
3142 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3143 where
3144 T: 'a + Clone + ToOffset,
3145 O: 'a + FromAnchor + Ord,
3146 {
3147 let mut iterators: Vec<_> = self
3148 .diagnostics
3149 .iter()
3150 .map(|(_, collection)| {
3151 collection
3152 .range::<T, O>(search_range.clone(), self, true, reversed)
3153 .peekable()
3154 })
3155 .collect();
3156
3157 std::iter::from_fn(move || {
3158 let (next_ix, _) = iterators
3159 .iter_mut()
3160 .enumerate()
3161 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3162 .min_by(|(_, a), (_, b)| {
3163 let cmp = a
3164 .range
3165 .start
3166 .cmp(&b.range.start)
3167 // when range is equal, sort by diagnostic severity
3168 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3169 // and stabilize order with group_id
3170 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3171 if reversed {
3172 cmp.reverse()
3173 } else {
3174 cmp
3175 }
3176 })?;
3177 iterators[next_ix].next()
3178 })
3179 }
3180
3181 /// Returns all the diagnostic groups associated with the given
3182 /// language server id. If no language server id is provided,
3183 /// all diagnostics groups are returned.
3184 pub fn diagnostic_groups(
3185 &self,
3186 language_server_id: Option<LanguageServerId>,
3187 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3188 let mut groups = Vec::new();
3189
3190 if let Some(language_server_id) = language_server_id {
3191 if let Ok(ix) = self
3192 .diagnostics
3193 .binary_search_by_key(&language_server_id, |e| e.0)
3194 {
3195 self.diagnostics[ix]
3196 .1
3197 .groups(language_server_id, &mut groups, self);
3198 }
3199 } else {
3200 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3201 diagnostics.groups(*language_server_id, &mut groups, self);
3202 }
3203 }
3204
3205 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3206 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3207 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3208 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3209 });
3210
3211 groups
3212 }
3213
3214 /// Returns an iterator over the diagnostics for the given group.
3215 pub fn diagnostic_group<'a, O>(
3216 &'a self,
3217 group_id: usize,
3218 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3219 where
3220 O: 'a + FromAnchor,
3221 {
3222 self.diagnostics
3223 .iter()
3224 .flat_map(move |(_, set)| set.group(group_id, self))
3225 }
3226
3227 /// The number of times diagnostics were updated.
3228 pub fn diagnostics_update_count(&self) -> usize {
3229 self.diagnostics_update_count
3230 }
3231
3232 /// The number of times the buffer was parsed.
3233 pub fn parse_count(&self) -> usize {
3234 self.parse_count
3235 }
3236
3237 /// The number of times selections were updated.
3238 pub fn selections_update_count(&self) -> usize {
3239 self.selections_update_count
3240 }
3241
3242 /// Returns a snapshot of underlying file.
3243 pub fn file(&self) -> Option<&Arc<dyn File>> {
3244 self.file.as_ref()
3245 }
3246
3247 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
3248 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
3249 if let Some(file) = self.file() {
3250 if file.path().file_name().is_none() || include_root {
3251 Some(file.full_path(cx))
3252 } else {
3253 Some(file.path().to_path_buf())
3254 }
3255 } else {
3256 None
3257 }
3258 }
3259
3260 /// The number of times the underlying file was updated.
3261 pub fn file_update_count(&self) -> usize {
3262 self.file_update_count
3263 }
3264
3265 /// The number of times the git diff status was updated.
3266 pub fn git_diff_update_count(&self) -> usize {
3267 self.git_diff_update_count
3268 }
3269}
3270
3271fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
3272 indent_size_for_text(text.chars_at(Point::new(row, 0)))
3273}
3274
3275fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
3276 let mut result = IndentSize::spaces(0);
3277 for c in text {
3278 let kind = match c {
3279 ' ' => IndentKind::Space,
3280 '\t' => IndentKind::Tab,
3281 _ => break,
3282 };
3283 if result.len == 0 {
3284 result.kind = kind;
3285 }
3286 result.len += 1;
3287 }
3288 result
3289}
3290
3291impl Clone for BufferSnapshot {
3292 fn clone(&self) -> Self {
3293 Self {
3294 text: self.text.clone(),
3295 git_diff: self.git_diff.clone(),
3296 syntax: self.syntax.clone(),
3297 file: self.file.clone(),
3298 remote_selections: self.remote_selections.clone(),
3299 diagnostics: self.diagnostics.clone(),
3300 selections_update_count: self.selections_update_count,
3301 diagnostics_update_count: self.diagnostics_update_count,
3302 file_update_count: self.file_update_count,
3303 git_diff_update_count: self.git_diff_update_count,
3304 language: self.language.clone(),
3305 parse_count: self.parse_count,
3306 }
3307 }
3308}
3309
3310impl Deref for BufferSnapshot {
3311 type Target = text::BufferSnapshot;
3312
3313 fn deref(&self) -> &Self::Target {
3314 &self.text
3315 }
3316}
3317
3318unsafe impl<'a> Send for BufferChunks<'a> {}
3319
3320impl<'a> BufferChunks<'a> {
3321 pub(crate) fn new(
3322 text: &'a Rope,
3323 range: Range<usize>,
3324 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
3325 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
3326 ) -> Self {
3327 let mut highlights = None;
3328 if let Some((captures, highlight_maps)) = syntax {
3329 highlights = Some(BufferChunkHighlights {
3330 captures,
3331 next_capture: None,
3332 stack: Default::default(),
3333 highlight_maps,
3334 })
3335 }
3336
3337 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
3338 let chunks = text.chunks_in_range(range.clone());
3339
3340 BufferChunks {
3341 range,
3342 chunks,
3343 diagnostic_endpoints,
3344 error_depth: 0,
3345 warning_depth: 0,
3346 information_depth: 0,
3347 hint_depth: 0,
3348 unnecessary_depth: 0,
3349 highlights,
3350 }
3351 }
3352
3353 /// Seeks to the given byte offset in the buffer.
3354 pub fn seek(&mut self, offset: usize) {
3355 self.range.start = offset;
3356 self.chunks.seek(self.range.start);
3357 if let Some(highlights) = self.highlights.as_mut() {
3358 highlights
3359 .stack
3360 .retain(|(end_offset, _)| *end_offset > offset);
3361 if let Some(capture) = &highlights.next_capture {
3362 if offset >= capture.node.start_byte() {
3363 let next_capture_end = capture.node.end_byte();
3364 if offset < next_capture_end {
3365 highlights.stack.push((
3366 next_capture_end,
3367 highlights.highlight_maps[capture.grammar_index].get(capture.index),
3368 ));
3369 }
3370 highlights.next_capture.take();
3371 }
3372 }
3373 highlights.captures.set_byte_range(self.range.clone());
3374 }
3375 }
3376
3377 /// The current byte offset in the buffer.
3378 pub fn offset(&self) -> usize {
3379 self.range.start
3380 }
3381
3382 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
3383 let depth = match endpoint.severity {
3384 DiagnosticSeverity::ERROR => &mut self.error_depth,
3385 DiagnosticSeverity::WARNING => &mut self.warning_depth,
3386 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
3387 DiagnosticSeverity::HINT => &mut self.hint_depth,
3388 _ => return,
3389 };
3390 if endpoint.is_start {
3391 *depth += 1;
3392 } else {
3393 *depth -= 1;
3394 }
3395
3396 if endpoint.is_unnecessary {
3397 if endpoint.is_start {
3398 self.unnecessary_depth += 1;
3399 } else {
3400 self.unnecessary_depth -= 1;
3401 }
3402 }
3403 }
3404
3405 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
3406 if self.error_depth > 0 {
3407 Some(DiagnosticSeverity::ERROR)
3408 } else if self.warning_depth > 0 {
3409 Some(DiagnosticSeverity::WARNING)
3410 } else if self.information_depth > 0 {
3411 Some(DiagnosticSeverity::INFORMATION)
3412 } else if self.hint_depth > 0 {
3413 Some(DiagnosticSeverity::HINT)
3414 } else {
3415 None
3416 }
3417 }
3418
3419 fn current_code_is_unnecessary(&self) -> bool {
3420 self.unnecessary_depth > 0
3421 }
3422}
3423
3424impl<'a> Iterator for BufferChunks<'a> {
3425 type Item = Chunk<'a>;
3426
3427 fn next(&mut self) -> Option<Self::Item> {
3428 let mut next_capture_start = usize::MAX;
3429 let mut next_diagnostic_endpoint = usize::MAX;
3430
3431 if let Some(highlights) = self.highlights.as_mut() {
3432 while let Some((parent_capture_end, _)) = highlights.stack.last() {
3433 if *parent_capture_end <= self.range.start {
3434 highlights.stack.pop();
3435 } else {
3436 break;
3437 }
3438 }
3439
3440 if highlights.next_capture.is_none() {
3441 highlights.next_capture = highlights.captures.next();
3442 }
3443
3444 while let Some(capture) = highlights.next_capture.as_ref() {
3445 if self.range.start < capture.node.start_byte() {
3446 next_capture_start = capture.node.start_byte();
3447 break;
3448 } else {
3449 let highlight_id =
3450 highlights.highlight_maps[capture.grammar_index].get(capture.index);
3451 highlights
3452 .stack
3453 .push((capture.node.end_byte(), highlight_id));
3454 highlights.next_capture = highlights.captures.next();
3455 }
3456 }
3457 }
3458
3459 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
3460 if endpoint.offset <= self.range.start {
3461 self.update_diagnostic_depths(endpoint);
3462 self.diagnostic_endpoints.next();
3463 } else {
3464 next_diagnostic_endpoint = endpoint.offset;
3465 break;
3466 }
3467 }
3468
3469 if let Some(chunk) = self.chunks.peek() {
3470 let chunk_start = self.range.start;
3471 let mut chunk_end = (self.chunks.offset() + chunk.len())
3472 .min(next_capture_start)
3473 .min(next_diagnostic_endpoint);
3474 let mut highlight_id = None;
3475 if let Some(highlights) = self.highlights.as_ref() {
3476 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
3477 chunk_end = chunk_end.min(*parent_capture_end);
3478 highlight_id = Some(*parent_highlight_id);
3479 }
3480 }
3481
3482 let slice =
3483 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
3484 self.range.start = chunk_end;
3485 if self.range.start == self.chunks.offset() + chunk.len() {
3486 self.chunks.next().unwrap();
3487 }
3488
3489 Some(Chunk {
3490 text: slice,
3491 syntax_highlight_id: highlight_id,
3492 diagnostic_severity: self.current_diagnostic_severity(),
3493 is_unnecessary: self.current_code_is_unnecessary(),
3494 ..Default::default()
3495 })
3496 } else {
3497 None
3498 }
3499 }
3500}
3501
3502impl operation_queue::Operation for Operation {
3503 fn lamport_timestamp(&self) -> clock::Lamport {
3504 match self {
3505 Operation::Buffer(_) => {
3506 unreachable!("buffer operations should never be deferred at this layer")
3507 }
3508 Operation::UpdateDiagnostics {
3509 lamport_timestamp, ..
3510 }
3511 | Operation::UpdateSelections {
3512 lamport_timestamp, ..
3513 }
3514 | Operation::UpdateCompletionTriggers {
3515 lamport_timestamp, ..
3516 } => *lamport_timestamp,
3517 }
3518 }
3519}
3520
3521impl Default for Diagnostic {
3522 fn default() -> Self {
3523 Self {
3524 source: Default::default(),
3525 code: None,
3526 severity: DiagnosticSeverity::ERROR,
3527 message: Default::default(),
3528 group_id: 0,
3529 is_primary: false,
3530 is_disk_based: false,
3531 is_unnecessary: false,
3532 }
3533 }
3534}
3535
3536impl IndentSize {
3537 /// Returns an [IndentSize] representing the given spaces.
3538 pub fn spaces(len: u32) -> Self {
3539 Self {
3540 len,
3541 kind: IndentKind::Space,
3542 }
3543 }
3544
3545 /// Returns an [IndentSize] representing a tab.
3546 pub fn tab() -> Self {
3547 Self {
3548 len: 1,
3549 kind: IndentKind::Tab,
3550 }
3551 }
3552
3553 /// An iterator over the characters represented by this [IndentSize].
3554 pub fn chars(&self) -> impl Iterator<Item = char> {
3555 iter::repeat(self.char()).take(self.len as usize)
3556 }
3557
3558 /// The character representation of this [IndentSize].
3559 pub fn char(&self) -> char {
3560 match self.kind {
3561 IndentKind::Space => ' ',
3562 IndentKind::Tab => '\t',
3563 }
3564 }
3565
3566 /// Consumes the current [IndentSize] and returns a new one that has
3567 /// been shrunk or enlarged by the given size along the given direction.
3568 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
3569 match direction {
3570 Ordering::Less => {
3571 if self.kind == size.kind && self.len >= size.len {
3572 self.len -= size.len;
3573 }
3574 }
3575 Ordering::Equal => {}
3576 Ordering::Greater => {
3577 if self.len == 0 {
3578 self = size;
3579 } else if self.kind == size.kind {
3580 self.len += size.len;
3581 }
3582 }
3583 }
3584 self
3585 }
3586}
3587
3588#[cfg(any(test, feature = "test-support"))]
3589pub struct TestFile {
3590 pub path: Arc<Path>,
3591 pub root_name: String,
3592}
3593
3594#[cfg(any(test, feature = "test-support"))]
3595impl File for TestFile {
3596 fn path(&self) -> &Arc<Path> {
3597 &self.path
3598 }
3599
3600 fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
3601 PathBuf::from(&self.root_name).join(self.path.as_ref())
3602 }
3603
3604 fn as_local(&self) -> Option<&dyn LocalFile> {
3605 None
3606 }
3607
3608 fn mtime(&self) -> Option<SystemTime> {
3609 unimplemented!()
3610 }
3611
3612 fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
3613 self.path().file_name().unwrap_or(self.root_name.as_ref())
3614 }
3615
3616 fn worktree_id(&self) -> usize {
3617 0
3618 }
3619
3620 fn is_deleted(&self) -> bool {
3621 unimplemented!()
3622 }
3623
3624 fn as_any(&self) -> &dyn std::any::Any {
3625 unimplemented!()
3626 }
3627
3628 fn to_proto(&self) -> rpc::proto::File {
3629 unimplemented!()
3630 }
3631
3632 fn is_private(&self) -> bool {
3633 false
3634 }
3635}
3636
3637pub(crate) fn contiguous_ranges(
3638 values: impl Iterator<Item = u32>,
3639 max_len: usize,
3640) -> impl Iterator<Item = Range<u32>> {
3641 let mut values = values;
3642 let mut current_range: Option<Range<u32>> = None;
3643 std::iter::from_fn(move || loop {
3644 if let Some(value) = values.next() {
3645 if let Some(range) = &mut current_range {
3646 if value == range.end && range.len() < max_len {
3647 range.end += 1;
3648 continue;
3649 }
3650 }
3651
3652 let prev_range = current_range.clone();
3653 current_range = Some(value..(value + 1));
3654 if prev_range.is_some() {
3655 return prev_range;
3656 }
3657 } else {
3658 return current_range.take();
3659 }
3660 })
3661}
3662
3663/// Returns the [CharKind] for the given character. When a scope is provided,
3664/// the function checks if the character is considered a word character
3665/// based on the language scope's word character settings.
3666pub fn char_kind(scope: &Option<LanguageScope>, c: char) -> CharKind {
3667 if c.is_whitespace() {
3668 return CharKind::Whitespace;
3669 } else if c.is_alphanumeric() || c == '_' {
3670 return CharKind::Word;
3671 }
3672
3673 if let Some(scope) = scope {
3674 if let Some(characters) = scope.word_characters() {
3675 if characters.contains(&c) {
3676 return CharKind::Word;
3677 }
3678 }
3679 }
3680
3681 CharKind::Punctuation
3682}
3683
3684/// Find all of the ranges of whitespace that occur at the ends of lines
3685/// in the given rope.
3686///
3687/// This could also be done with a regex search, but this implementation
3688/// avoids copying text.
3689pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
3690 let mut ranges = Vec::new();
3691
3692 let mut offset = 0;
3693 let mut prev_chunk_trailing_whitespace_range = 0..0;
3694 for chunk in rope.chunks() {
3695 let mut prev_line_trailing_whitespace_range = 0..0;
3696 for (i, line) in chunk.split('\n').enumerate() {
3697 let line_end_offset = offset + line.len();
3698 let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
3699 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
3700
3701 if i == 0 && trimmed_line_len == 0 {
3702 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
3703 }
3704 if !prev_line_trailing_whitespace_range.is_empty() {
3705 ranges.push(prev_line_trailing_whitespace_range);
3706 }
3707
3708 offset = line_end_offset + 1;
3709 prev_line_trailing_whitespace_range = trailing_whitespace_range;
3710 }
3711
3712 offset -= 1;
3713 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
3714 }
3715
3716 if !prev_chunk_trailing_whitespace_range.is_empty() {
3717 ranges.push(prev_chunk_trailing_whitespace_range);
3718 }
3719
3720 ranges
3721}