1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatches,
14 SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 LanguageScope, Outline, RunnableTag,
17};
18use anyhow::{anyhow, Context, Result};
19pub use clock::ReplicaId;
20use futures::channel::oneshot;
21use gpui::{AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLabel};
22use lazy_static::lazy_static;
23use lsp::LanguageServerId;
24use parking_lot::Mutex;
25use similar::{ChangeTag, TextDiff};
26use smallvec::SmallVec;
27use smol::future::yield_now;
28use std::{
29 any::Any,
30 cmp::{self, Ordering},
31 collections::BTreeMap,
32 ffi::OsStr,
33 future::Future,
34 iter::{self, Iterator, Peekable},
35 mem,
36 ops::{Deref, Range},
37 path::{Path, PathBuf},
38 str,
39 sync::Arc,
40 time::{Duration, Instant, SystemTime},
41 vec,
42};
43use sum_tree::TreeMap;
44use text::operation_queue::OperationQueue;
45use text::*;
46pub use text::{
47 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
48 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
49 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
50 Transaction, TransactionId, Unclipped,
51};
52use theme::SyntaxTheme;
53#[cfg(any(test, feature = "test-support"))]
54use util::RandomCharIter;
55use util::RangeExt;
56
57#[cfg(any(test, feature = "test-support"))]
58pub use {tree_sitter_rust, tree_sitter_typescript};
59
60pub use lsp::DiagnosticSeverity;
61
62lazy_static! {
63 /// A label for the background task spawned by the buffer to compute
64 /// a diff against the contents of its file.
65 pub static ref BUFFER_DIFF_TASK: TaskLabel = TaskLabel::new();
66}
67
68/// Indicate whether a [Buffer] has permissions to edit.
69#[derive(PartialEq, Clone, Copy, Debug)]
70pub enum Capability {
71 /// The buffer is a mutable replica.
72 ReadWrite,
73 /// The buffer is a read-only replica.
74 ReadOnly,
75}
76
77/// An in-memory representation of a source code file, including its text,
78/// syntax trees, git status, and diagnostics.
79pub struct Buffer {
80 text: TextBuffer,
81 diff_base: Option<Rope>,
82 git_diff: git::diff::BufferDiff,
83 file: Option<Arc<dyn File>>,
84 /// The mtime of the file when this buffer was last loaded from
85 /// or saved to disk.
86 saved_mtime: Option<SystemTime>,
87 /// The version vector when this buffer was last loaded from
88 /// or saved to disk.
89 saved_version: clock::Global,
90 transaction_depth: usize,
91 was_dirty_before_starting_transaction: Option<bool>,
92 reload_task: Option<Task<Result<()>>>,
93 language: Option<Arc<Language>>,
94 autoindent_requests: Vec<Arc<AutoindentRequest>>,
95 pending_autoindent: Option<Task<()>>,
96 sync_parse_timeout: Duration,
97 syntax_map: Mutex<SyntaxMap>,
98 parsing_in_background: bool,
99 parse_count: usize,
100 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
101 remote_selections: TreeMap<ReplicaId, SelectionSet>,
102 selections_update_count: usize,
103 diagnostics_update_count: usize,
104 diagnostics_timestamp: clock::Lamport,
105 file_update_count: usize,
106 git_diff_update_count: usize,
107 completion_triggers: Vec<String>,
108 completion_triggers_timestamp: clock::Lamport,
109 deferred_ops: OperationQueue<Operation>,
110 capability: Capability,
111 has_conflict: bool,
112 diff_base_version: usize,
113}
114
115/// An immutable, cheaply cloneable representation of a fixed
116/// state of a buffer.
117pub struct BufferSnapshot {
118 text: text::BufferSnapshot,
119 git_diff: git::diff::BufferDiff,
120 pub(crate) syntax: SyntaxSnapshot,
121 file: Option<Arc<dyn File>>,
122 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
123 diagnostics_update_count: usize,
124 file_update_count: usize,
125 git_diff_update_count: usize,
126 remote_selections: TreeMap<ReplicaId, SelectionSet>,
127 selections_update_count: usize,
128 language: Option<Arc<Language>>,
129 parse_count: usize,
130}
131
132/// The kind and amount of indentation in a particular line. For now,
133/// assumes that indentation is all the same character.
134#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
135pub struct IndentSize {
136 /// The number of bytes that comprise the indentation.
137 pub len: u32,
138 /// The kind of whitespace used for indentation.
139 pub kind: IndentKind,
140}
141
142/// A whitespace character that's used for indentation.
143#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
144pub enum IndentKind {
145 /// An ASCII space character.
146 #[default]
147 Space,
148 /// An ASCII tab character.
149 Tab,
150}
151
152/// The shape of a selection cursor.
153#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
154pub enum CursorShape {
155 /// A vertical bar
156 #[default]
157 Bar,
158 /// A block that surrounds the following character
159 Block,
160 /// An underline that runs along the following character
161 Underscore,
162 /// A box drawn around the following character
163 Hollow,
164}
165
166#[derive(Clone, Debug)]
167struct SelectionSet {
168 line_mode: bool,
169 cursor_shape: CursorShape,
170 selections: Arc<[Selection<Anchor>]>,
171 lamport_timestamp: clock::Lamport,
172}
173
174/// A diagnostic associated with a certain range of a buffer.
175#[derive(Clone, Debug, PartialEq, Eq)]
176pub struct Diagnostic {
177 /// The name of the service that produced this diagnostic.
178 pub source: Option<String>,
179 /// A machine-readable code that identifies this diagnostic.
180 pub code: Option<String>,
181 /// Whether this diagnostic is a hint, warning, or error.
182 pub severity: DiagnosticSeverity,
183 /// The human-readable message associated with this diagnostic.
184 pub message: String,
185 /// An id that identifies the group to which this diagnostic belongs.
186 ///
187 /// When a language server produces a diagnostic with
188 /// one or more associated diagnostics, those diagnostics are all
189 /// assigned a single group id.
190 pub group_id: usize,
191 /// Whether this diagnostic is the primary diagnostic for its group.
192 ///
193 /// In a given group, the primary diagnostic is the top-level diagnostic
194 /// returned by the language server. The non-primary diagnostics are the
195 /// associated diagnostics.
196 pub is_primary: bool,
197 /// Whether this diagnostic is considered to originate from an analysis of
198 /// files on disk, as opposed to any unsaved buffer contents. This is a
199 /// property of a given diagnostic source, and is configured for a given
200 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
201 /// for the language server.
202 pub is_disk_based: bool,
203 /// Whether this diagnostic marks unnecessary code.
204 pub is_unnecessary: bool,
205}
206
207/// TODO - move this into the `project` crate and make it private.
208pub async fn prepare_completion_documentation(
209 documentation: &lsp::Documentation,
210 language_registry: &Arc<LanguageRegistry>,
211 language: Option<Arc<Language>>,
212) -> Documentation {
213 match documentation {
214 lsp::Documentation::String(text) => {
215 if text.lines().count() <= 1 {
216 Documentation::SingleLine(text.clone())
217 } else {
218 Documentation::MultiLinePlainText(text.clone())
219 }
220 }
221
222 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
223 lsp::MarkupKind::PlainText => {
224 if value.lines().count() <= 1 {
225 Documentation::SingleLine(value.clone())
226 } else {
227 Documentation::MultiLinePlainText(value.clone())
228 }
229 }
230
231 lsp::MarkupKind::Markdown => {
232 let parsed = parse_markdown(value, language_registry, language).await;
233 Documentation::MultiLineMarkdown(parsed)
234 }
235 },
236 }
237}
238
239/// Documentation associated with a [`Completion`].
240#[derive(Clone, Debug)]
241pub enum Documentation {
242 /// There is no documentation for this completion.
243 Undocumented,
244 /// A single line of documentation.
245 SingleLine(String),
246 /// Multiple lines of plain text documentation.
247 MultiLinePlainText(String),
248 /// Markdown documentation.
249 MultiLineMarkdown(ParsedMarkdown),
250}
251
252/// An operation used to synchronize this buffer with its other replicas.
253#[derive(Clone, Debug, PartialEq)]
254pub enum Operation {
255 /// A text operation.
256 Buffer(text::Operation),
257
258 /// An update to the buffer's diagnostics.
259 UpdateDiagnostics {
260 /// The id of the language server that produced the new diagnostics.
261 server_id: LanguageServerId,
262 /// The diagnostics.
263 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
264 /// The buffer's lamport timestamp.
265 lamport_timestamp: clock::Lamport,
266 },
267
268 /// An update to the most recent selections in this buffer.
269 UpdateSelections {
270 /// The selections.
271 selections: Arc<[Selection<Anchor>]>,
272 /// The buffer's lamport timestamp.
273 lamport_timestamp: clock::Lamport,
274 /// Whether the selections are in 'line mode'.
275 line_mode: bool,
276 /// The [`CursorShape`] associated with these selections.
277 cursor_shape: CursorShape,
278 },
279
280 /// An update to the characters that should trigger autocompletion
281 /// for this buffer.
282 UpdateCompletionTriggers {
283 /// The characters that trigger autocompletion.
284 triggers: Vec<String>,
285 /// The buffer's lamport timestamp.
286 lamport_timestamp: clock::Lamport,
287 },
288}
289
290/// An event that occurs in a buffer.
291#[derive(Clone, Debug, PartialEq)]
292pub enum Event {
293 /// The buffer was changed in a way that must be
294 /// propagated to its other replicas.
295 Operation(Operation),
296 /// The buffer was edited.
297 Edited,
298 /// The buffer's `dirty` bit changed.
299 DirtyChanged,
300 /// The buffer was saved.
301 Saved,
302 /// The buffer's file was changed on disk.
303 FileHandleChanged,
304 /// The buffer was reloaded.
305 Reloaded,
306 /// The buffer's diff_base changed.
307 DiffBaseChanged,
308 /// Buffer's excerpts for a certain diff base were recalculated.
309 DiffUpdated,
310 /// The buffer's language was changed.
311 LanguageChanged,
312 /// The buffer's syntax trees were updated.
313 Reparsed,
314 /// The buffer's diagnostics were updated.
315 DiagnosticsUpdated,
316 /// The buffer gained or lost editing capabilities.
317 CapabilityChanged,
318 /// The buffer was explicitly requested to close.
319 Closed,
320}
321
322/// The file associated with a buffer.
323pub trait File: Send + Sync {
324 /// Returns the [`LocalFile`] associated with this file, if the
325 /// file is local.
326 fn as_local(&self) -> Option<&dyn LocalFile>;
327
328 /// Returns whether this file is local.
329 fn is_local(&self) -> bool {
330 self.as_local().is_some()
331 }
332
333 /// Returns the file's mtime.
334 fn mtime(&self) -> Option<SystemTime>;
335
336 /// Returns the path of this file relative to the worktree's root directory.
337 fn path(&self) -> &Arc<Path>;
338
339 /// Returns the path of this file relative to the worktree's parent directory (this means it
340 /// includes the name of the worktree's root folder).
341 fn full_path(&self, cx: &AppContext) -> PathBuf;
342
343 /// Returns the last component of this handle's absolute path. If this handle refers to the root
344 /// of its worktree, then this method will return the name of the worktree itself.
345 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
346
347 /// Returns the id of the worktree to which this file belongs.
348 ///
349 /// This is needed for looking up project-specific settings.
350 fn worktree_id(&self) -> usize;
351
352 /// Returns whether the file has been deleted.
353 fn is_deleted(&self) -> bool;
354
355 /// Returns whether the file existed on disk at one point
356 fn is_created(&self) -> bool {
357 self.mtime().is_some()
358 }
359
360 /// Converts this file into an [`Any`] trait object.
361 fn as_any(&self) -> &dyn Any;
362
363 /// Converts this file into a protobuf message.
364 fn to_proto(&self) -> rpc::proto::File;
365
366 /// Return whether Zed considers this to be a private file.
367 fn is_private(&self) -> bool;
368}
369
370/// The file associated with a buffer, in the case where the file is on the local disk.
371pub trait LocalFile: File {
372 /// Returns the absolute path of this file.
373 fn abs_path(&self, cx: &AppContext) -> PathBuf;
374
375 /// Loads the file's contents from disk.
376 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
377
378 /// Called when the buffer is reloaded from disk.
379 fn buffer_reloaded(
380 &self,
381 buffer_id: BufferId,
382 version: &clock::Global,
383 line_ending: LineEnding,
384 mtime: Option<SystemTime>,
385 cx: &mut AppContext,
386 );
387
388 /// Returns true if the file should not be shared with collaborators.
389 fn is_private(&self, _: &AppContext) -> bool {
390 false
391 }
392}
393
394/// The auto-indent behavior associated with an editing operation.
395/// For some editing operations, each affected line of text has its
396/// indentation recomputed. For other operations, the entire block
397/// of edited text is adjusted uniformly.
398#[derive(Clone, Debug)]
399pub enum AutoindentMode {
400 /// Indent each line of inserted text.
401 EachLine,
402 /// Apply the same indentation adjustment to all of the lines
403 /// in a given insertion.
404 Block {
405 /// The original indentation level of the first line of each
406 /// insertion, if it has been copied.
407 original_indent_columns: Vec<u32>,
408 },
409}
410
411#[derive(Clone)]
412struct AutoindentRequest {
413 before_edit: BufferSnapshot,
414 entries: Vec<AutoindentRequestEntry>,
415 is_block_mode: bool,
416}
417
418#[derive(Clone)]
419struct AutoindentRequestEntry {
420 /// A range of the buffer whose indentation should be adjusted.
421 range: Range<Anchor>,
422 /// Whether or not these lines should be considered brand new, for the
423 /// purpose of auto-indent. When text is not new, its indentation will
424 /// only be adjusted if the suggested indentation level has *changed*
425 /// since the edit was made.
426 first_line_is_new: bool,
427 indent_size: IndentSize,
428 original_indent_column: Option<u32>,
429}
430
431#[derive(Debug)]
432struct IndentSuggestion {
433 basis_row: u32,
434 delta: Ordering,
435 within_error: bool,
436}
437
438struct BufferChunkHighlights<'a> {
439 captures: SyntaxMapCaptures<'a>,
440 next_capture: Option<SyntaxMapCapture<'a>>,
441 stack: Vec<(usize, HighlightId)>,
442 highlight_maps: Vec<HighlightMap>,
443}
444
445/// An iterator that yields chunks of a buffer's text, along with their
446/// syntax highlights and diagnostic status.
447pub struct BufferChunks<'a> {
448 range: Range<usize>,
449 chunks: text::Chunks<'a>,
450 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
451 error_depth: usize,
452 warning_depth: usize,
453 information_depth: usize,
454 hint_depth: usize,
455 unnecessary_depth: usize,
456 highlights: Option<BufferChunkHighlights<'a>>,
457}
458
459/// A chunk of a buffer's text, along with its syntax highlight and
460/// diagnostic status.
461#[derive(Clone, Copy, Debug, Default)]
462pub struct Chunk<'a> {
463 /// The text of the chunk.
464 pub text: &'a str,
465 /// The syntax highlighting style of the chunk.
466 pub syntax_highlight_id: Option<HighlightId>,
467 /// The highlight style that has been applied to this chunk in
468 /// the editor.
469 pub highlight_style: Option<HighlightStyle>,
470 /// The severity of diagnostic associated with this chunk, if any.
471 pub diagnostic_severity: Option<DiagnosticSeverity>,
472 /// Whether this chunk of text is marked as unnecessary.
473 pub is_unnecessary: bool,
474 /// Whether this chunk of text was originally a tab character.
475 pub is_tab: bool,
476}
477
478/// A set of edits to a given version of a buffer, computed asynchronously.
479pub struct Diff {
480 pub(crate) base_version: clock::Global,
481 line_ending: LineEnding,
482 edits: Vec<(Range<usize>, Arc<str>)>,
483}
484
485#[derive(Clone, Copy)]
486pub(crate) struct DiagnosticEndpoint {
487 offset: usize,
488 is_start: bool,
489 severity: DiagnosticSeverity,
490 is_unnecessary: bool,
491}
492
493/// A class of characters, used for characterizing a run of text.
494#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
495pub enum CharKind {
496 /// Whitespace.
497 Whitespace,
498 /// Punctuation.
499 Punctuation,
500 /// Word.
501 Word,
502}
503
504/// A runnable is a set of data about a region that could be resolved into a task
505pub struct Runnable {
506 pub tags: SmallVec<[RunnableTag; 1]>,
507 pub language: Arc<Language>,
508 pub buffer: BufferId,
509}
510
511impl Buffer {
512 /// Create a new buffer with the given base text.
513 pub fn local<T: Into<String>>(base_text: T, cx: &mut ModelContext<Self>) -> Self {
514 Self::build(
515 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
516 None,
517 None,
518 Capability::ReadWrite,
519 )
520 }
521
522 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
523 pub fn local_normalized(
524 base_text_normalized: Rope,
525 line_ending: LineEnding,
526 cx: &mut ModelContext<Self>,
527 ) -> Self {
528 Self::build(
529 TextBuffer::new_normalized(
530 0,
531 cx.entity_id().as_non_zero_u64().into(),
532 line_ending,
533 base_text_normalized,
534 ),
535 None,
536 None,
537 Capability::ReadWrite,
538 )
539 }
540
541 /// Create a new buffer that is a replica of a remote buffer.
542 pub fn remote(
543 remote_id: BufferId,
544 replica_id: ReplicaId,
545 capability: Capability,
546 base_text: impl Into<String>,
547 ) -> Self {
548 Self::build(
549 TextBuffer::new(replica_id, remote_id, base_text.into()),
550 None,
551 None,
552 capability,
553 )
554 }
555
556 /// Create a new buffer that is a replica of a remote buffer, populating its
557 /// state from the given protobuf message.
558 pub fn from_proto(
559 replica_id: ReplicaId,
560 capability: Capability,
561 message: proto::BufferState,
562 file: Option<Arc<dyn File>>,
563 ) -> Result<Self> {
564 let buffer_id = BufferId::new(message.id)
565 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
566 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
567 let mut this = Self::build(
568 buffer,
569 message.diff_base.map(|text| text.into()),
570 file,
571 capability,
572 );
573 this.text.set_line_ending(proto::deserialize_line_ending(
574 rpc::proto::LineEnding::from_i32(message.line_ending)
575 .ok_or_else(|| anyhow!("missing line_ending"))?,
576 ));
577 this.saved_version = proto::deserialize_version(&message.saved_version);
578 this.saved_mtime = message.saved_mtime.map(|time| time.into());
579 Ok(this)
580 }
581
582 /// Serialize the buffer's state to a protobuf message.
583 pub fn to_proto(&self) -> proto::BufferState {
584 proto::BufferState {
585 id: self.remote_id().into(),
586 file: self.file.as_ref().map(|f| f.to_proto()),
587 base_text: self.base_text().to_string(),
588 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
589 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
590 saved_version: proto::serialize_version(&self.saved_version),
591 saved_mtime: self.saved_mtime.map(|time| time.into()),
592 }
593 }
594
595 /// Serialize as protobufs all of the changes to the buffer since the given version.
596 pub fn serialize_ops(
597 &self,
598 since: Option<clock::Global>,
599 cx: &AppContext,
600 ) -> Task<Vec<proto::Operation>> {
601 let mut operations = Vec::new();
602 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
603
604 operations.extend(self.remote_selections.iter().map(|(_, set)| {
605 proto::serialize_operation(&Operation::UpdateSelections {
606 selections: set.selections.clone(),
607 lamport_timestamp: set.lamport_timestamp,
608 line_mode: set.line_mode,
609 cursor_shape: set.cursor_shape,
610 })
611 }));
612
613 for (server_id, diagnostics) in &self.diagnostics {
614 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
615 lamport_timestamp: self.diagnostics_timestamp,
616 server_id: *server_id,
617 diagnostics: diagnostics.iter().cloned().collect(),
618 }));
619 }
620
621 operations.push(proto::serialize_operation(
622 &Operation::UpdateCompletionTriggers {
623 triggers: self.completion_triggers.clone(),
624 lamport_timestamp: self.completion_triggers_timestamp,
625 },
626 ));
627
628 let text_operations = self.text.operations().clone();
629 cx.background_executor().spawn(async move {
630 let since = since.unwrap_or_default();
631 operations.extend(
632 text_operations
633 .iter()
634 .filter(|(_, op)| !since.observed(op.timestamp()))
635 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
636 );
637 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
638 operations
639 })
640 }
641
642 /// Assign a language to the buffer, returning the buffer.
643 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
644 self.set_language(Some(language), cx);
645 self
646 }
647
648 /// Returns the [Capability] of this buffer.
649 pub fn capability(&self) -> Capability {
650 self.capability
651 }
652
653 /// Whether this buffer can only be read.
654 pub fn read_only(&self) -> bool {
655 self.capability == Capability::ReadOnly
656 }
657
658 /// Builds a [Buffer] with the given underlying [TextBuffer], diff base, [File] and [Capability].
659 pub fn build(
660 buffer: TextBuffer,
661 diff_base: Option<Rope>,
662 file: Option<Arc<dyn File>>,
663 capability: Capability,
664 ) -> Self {
665 let saved_mtime = file.as_ref().and_then(|file| file.mtime());
666
667 Self {
668 saved_mtime,
669 saved_version: buffer.version(),
670 reload_task: None,
671 transaction_depth: 0,
672 was_dirty_before_starting_transaction: None,
673 text: buffer,
674 diff_base,
675 diff_base_version: 0,
676 git_diff: git::diff::BufferDiff::new(),
677 file,
678 capability,
679 syntax_map: Mutex::new(SyntaxMap::new()),
680 parsing_in_background: false,
681 parse_count: 0,
682 sync_parse_timeout: Duration::from_millis(1),
683 autoindent_requests: Default::default(),
684 pending_autoindent: Default::default(),
685 language: None,
686 remote_selections: Default::default(),
687 selections_update_count: 0,
688 diagnostics: Default::default(),
689 diagnostics_update_count: 0,
690 diagnostics_timestamp: Default::default(),
691 file_update_count: 0,
692 git_diff_update_count: 0,
693 completion_triggers: Default::default(),
694 completion_triggers_timestamp: Default::default(),
695 deferred_ops: OperationQueue::new(),
696 has_conflict: false,
697 }
698 }
699
700 /// Retrieve a snapshot of the buffer's current state. This is computationally
701 /// cheap, and allows reading from the buffer on a background thread.
702 pub fn snapshot(&self) -> BufferSnapshot {
703 let text = self.text.snapshot();
704 let mut syntax_map = self.syntax_map.lock();
705 syntax_map.interpolate(&text);
706 let syntax = syntax_map.snapshot();
707
708 BufferSnapshot {
709 text,
710 syntax,
711 git_diff: self.git_diff.clone(),
712 file: self.file.clone(),
713 remote_selections: self.remote_selections.clone(),
714 diagnostics: self.diagnostics.clone(),
715 diagnostics_update_count: self.diagnostics_update_count,
716 file_update_count: self.file_update_count,
717 git_diff_update_count: self.git_diff_update_count,
718 language: self.language.clone(),
719 parse_count: self.parse_count,
720 selections_update_count: self.selections_update_count,
721 }
722 }
723
724 #[cfg(test)]
725 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
726 &self.text
727 }
728
729 /// Retrieve a snapshot of the buffer's raw text, without any
730 /// language-related state like the syntax tree or diagnostics.
731 pub fn text_snapshot(&self) -> text::BufferSnapshot {
732 self.text.snapshot()
733 }
734
735 /// The file associated with the buffer, if any.
736 pub fn file(&self) -> Option<&Arc<dyn File>> {
737 self.file.as_ref()
738 }
739
740 /// The version of the buffer that was last saved or reloaded from disk.
741 pub fn saved_version(&self) -> &clock::Global {
742 &self.saved_version
743 }
744
745 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
746 pub fn saved_mtime(&self) -> Option<SystemTime> {
747 self.saved_mtime
748 }
749
750 /// Assign a language to the buffer.
751 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
752 self.parse_count += 1;
753 self.syntax_map.lock().clear();
754 self.language = language;
755 self.reparse(cx);
756 cx.emit(Event::LanguageChanged);
757 }
758
759 /// Assign a language registry to the buffer. This allows the buffer to retrieve
760 /// other languages if parts of the buffer are written in different languages.
761 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
762 self.syntax_map
763 .lock()
764 .set_language_registry(language_registry);
765 }
766
767 /// Assign the buffer a new [Capability].
768 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
769 self.capability = capability;
770 cx.emit(Event::CapabilityChanged)
771 }
772
773 /// This method is called to signal that the buffer has been saved.
774 pub fn did_save(
775 &mut self,
776 version: clock::Global,
777 mtime: Option<SystemTime>,
778 cx: &mut ModelContext<Self>,
779 ) {
780 self.saved_version = version;
781 self.has_conflict = false;
782 self.saved_mtime = mtime;
783 cx.emit(Event::Saved);
784 cx.notify();
785 }
786
787 /// Reloads the contents of the buffer from disk.
788 pub fn reload(
789 &mut self,
790 cx: &mut ModelContext<Self>,
791 ) -> oneshot::Receiver<Option<Transaction>> {
792 let (tx, rx) = futures::channel::oneshot::channel();
793 let prev_version = self.text.version();
794 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
795 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
796 let file = this.file.as_ref()?.as_local()?;
797 Some((file.mtime(), file.load(cx)))
798 })?
799 else {
800 return Ok(());
801 };
802
803 let new_text = new_text.await?;
804 let diff = this
805 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
806 .await;
807 this.update(&mut cx, |this, cx| {
808 if this.version() == diff.base_version {
809 this.finalize_last_transaction();
810 this.apply_diff(diff, cx);
811 tx.send(this.finalize_last_transaction().cloned()).ok();
812 this.has_conflict = false;
813 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
814 } else {
815 if !diff.edits.is_empty()
816 || this
817 .edits_since::<usize>(&diff.base_version)
818 .next()
819 .is_some()
820 {
821 this.has_conflict = true;
822 }
823
824 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
825 }
826
827 this.reload_task.take();
828 })
829 }));
830 rx
831 }
832
833 /// This method is called to signal that the buffer has been reloaded.
834 pub fn did_reload(
835 &mut self,
836 version: clock::Global,
837 line_ending: LineEnding,
838 mtime: Option<SystemTime>,
839 cx: &mut ModelContext<Self>,
840 ) {
841 self.saved_version = version;
842 self.text.set_line_ending(line_ending);
843 self.saved_mtime = mtime;
844 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
845 file.buffer_reloaded(
846 self.remote_id(),
847 &self.saved_version,
848 self.line_ending(),
849 self.saved_mtime,
850 cx,
851 );
852 }
853 cx.emit(Event::Reloaded);
854 cx.notify();
855 }
856
857 /// Updates the [File] backing this buffer. This should be called when
858 /// the file has changed or has been deleted.
859 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
860 let mut file_changed = false;
861
862 if let Some(old_file) = self.file.as_ref() {
863 if new_file.path() != old_file.path() {
864 file_changed = true;
865 }
866
867 if new_file.is_deleted() {
868 if !old_file.is_deleted() {
869 file_changed = true;
870 if !self.is_dirty() {
871 cx.emit(Event::DirtyChanged);
872 }
873 }
874 } else {
875 let new_mtime = new_file.mtime();
876 if new_mtime != old_file.mtime() {
877 file_changed = true;
878
879 if !self.is_dirty() {
880 self.reload(cx).close();
881 }
882 }
883 }
884 } else {
885 file_changed = true;
886 };
887
888 self.file = Some(new_file);
889 if file_changed {
890 self.file_update_count += 1;
891 cx.emit(Event::FileHandleChanged);
892 cx.notify();
893 }
894 }
895
896 /// Returns the current diff base, see [Buffer::set_diff_base].
897 pub fn diff_base(&self) -> Option<&Rope> {
898 self.diff_base.as_ref()
899 }
900
901 /// Sets the text that will be used to compute a Git diff
902 /// against the buffer text.
903 pub fn set_diff_base(&mut self, diff_base: Option<Rope>, cx: &mut ModelContext<Self>) {
904 self.diff_base = diff_base;
905 self.diff_base_version += 1;
906 if let Some(recalc_task) = self.git_diff_recalc(cx) {
907 cx.spawn(|buffer, mut cx| async move {
908 recalc_task.await;
909 buffer
910 .update(&mut cx, |_, cx| {
911 cx.emit(Event::DiffBaseChanged);
912 })
913 .ok();
914 })
915 .detach();
916 }
917 }
918
919 /// Returns a number, unique per diff base set to the buffer.
920 pub fn diff_base_version(&self) -> usize {
921 self.diff_base_version
922 }
923
924 /// Recomputes the Git diff status.
925 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
926 let diff_base = self.diff_base.clone()?; // TODO: Make this an Arc
927 let snapshot = self.snapshot();
928
929 let mut diff = self.git_diff.clone();
930 let diff = cx.background_executor().spawn(async move {
931 diff.update(&diff_base, &snapshot).await;
932 diff
933 });
934
935 Some(cx.spawn(|this, mut cx| async move {
936 let buffer_diff = diff.await;
937 this.update(&mut cx, |this, cx| {
938 this.git_diff = buffer_diff;
939 this.git_diff_update_count += 1;
940 cx.emit(Event::DiffUpdated);
941 })
942 .ok();
943 }))
944 }
945
946 /// Returns the primary [Language] assigned to this [Buffer].
947 pub fn language(&self) -> Option<&Arc<Language>> {
948 self.language.as_ref()
949 }
950
951 /// Returns the [Language] at the given location.
952 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
953 let offset = position.to_offset(self);
954 self.syntax_map
955 .lock()
956 .layers_for_range(offset..offset, &self.text)
957 .last()
958 .map(|info| info.language.clone())
959 .or_else(|| self.language.clone())
960 }
961
962 /// The number of times the buffer was parsed.
963 pub fn parse_count(&self) -> usize {
964 self.parse_count
965 }
966
967 /// The number of times selections were updated.
968 pub fn selections_update_count(&self) -> usize {
969 self.selections_update_count
970 }
971
972 /// The number of times diagnostics were updated.
973 pub fn diagnostics_update_count(&self) -> usize {
974 self.diagnostics_update_count
975 }
976
977 /// The number of times the underlying file was updated.
978 pub fn file_update_count(&self) -> usize {
979 self.file_update_count
980 }
981
982 /// The number of times the git diff status was updated.
983 pub fn git_diff_update_count(&self) -> usize {
984 self.git_diff_update_count
985 }
986
987 /// Whether the buffer is being parsed in the background.
988 #[cfg(any(test, feature = "test-support"))]
989 pub fn is_parsing(&self) -> bool {
990 self.parsing_in_background
991 }
992
993 /// Indicates whether the buffer contains any regions that may be
994 /// written in a language that hasn't been loaded yet.
995 pub fn contains_unknown_injections(&self) -> bool {
996 self.syntax_map.lock().contains_unknown_injections()
997 }
998
999 #[cfg(test)]
1000 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1001 self.sync_parse_timeout = timeout;
1002 }
1003
1004 /// Called after an edit to synchronize the buffer's main parse tree with
1005 /// the buffer's new underlying state.
1006 ///
1007 /// Locks the syntax map and interpolates the edits since the last reparse
1008 /// into the foreground syntax tree.
1009 ///
1010 /// Then takes a stable snapshot of the syntax map before unlocking it.
1011 /// The snapshot with the interpolated edits is sent to a background thread,
1012 /// where we ask Tree-sitter to perform an incremental parse.
1013 ///
1014 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1015 /// waiting on the parse to complete. As soon as it completes, we proceed
1016 /// synchronously, unless a 1ms timeout elapses.
1017 ///
1018 /// If we time out waiting on the parse, we spawn a second task waiting
1019 /// until the parse does complete and return with the interpolated tree still
1020 /// in the foreground. When the background parse completes, call back into
1021 /// the main thread and assign the foreground parse state.
1022 ///
1023 /// If the buffer or grammar changed since the start of the background parse,
1024 /// initiate an additional reparse recursively. To avoid concurrent parses
1025 /// for the same buffer, we only initiate a new parse if we are not already
1026 /// parsing in the background.
1027 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1028 if self.parsing_in_background {
1029 return;
1030 }
1031 let language = if let Some(language) = self.language.clone() {
1032 language
1033 } else {
1034 return;
1035 };
1036
1037 let text = self.text_snapshot();
1038 let parsed_version = self.version();
1039
1040 let mut syntax_map = self.syntax_map.lock();
1041 syntax_map.interpolate(&text);
1042 let language_registry = syntax_map.language_registry();
1043 let mut syntax_snapshot = syntax_map.snapshot();
1044 drop(syntax_map);
1045
1046 let parse_task = cx.background_executor().spawn({
1047 let language = language.clone();
1048 let language_registry = language_registry.clone();
1049 async move {
1050 syntax_snapshot.reparse(&text, language_registry, language);
1051 syntax_snapshot
1052 }
1053 });
1054
1055 match cx
1056 .background_executor()
1057 .block_with_timeout(self.sync_parse_timeout, parse_task)
1058 {
1059 Ok(new_syntax_snapshot) => {
1060 self.did_finish_parsing(new_syntax_snapshot, cx);
1061 return;
1062 }
1063 Err(parse_task) => {
1064 self.parsing_in_background = true;
1065 cx.spawn(move |this, mut cx| async move {
1066 let new_syntax_map = parse_task.await;
1067 this.update(&mut cx, move |this, cx| {
1068 let grammar_changed =
1069 this.language.as_ref().map_or(true, |current_language| {
1070 !Arc::ptr_eq(&language, current_language)
1071 });
1072 let language_registry_changed = new_syntax_map
1073 .contains_unknown_injections()
1074 && language_registry.map_or(false, |registry| {
1075 registry.version() != new_syntax_map.language_registry_version()
1076 });
1077 let parse_again = language_registry_changed
1078 || grammar_changed
1079 || this.version.changed_since(&parsed_version);
1080 this.did_finish_parsing(new_syntax_map, cx);
1081 this.parsing_in_background = false;
1082 if parse_again {
1083 this.reparse(cx);
1084 }
1085 })
1086 .ok();
1087 })
1088 .detach();
1089 }
1090 }
1091 }
1092
1093 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
1094 self.parse_count += 1;
1095 self.syntax_map.lock().did_parse(syntax_snapshot);
1096 self.request_autoindent(cx);
1097 cx.emit(Event::Reparsed);
1098 cx.notify();
1099 }
1100
1101 /// Assign to the buffer a set of diagnostics created by a given language server.
1102 pub fn update_diagnostics(
1103 &mut self,
1104 server_id: LanguageServerId,
1105 diagnostics: DiagnosticSet,
1106 cx: &mut ModelContext<Self>,
1107 ) {
1108 let lamport_timestamp = self.text.lamport_clock.tick();
1109 let op = Operation::UpdateDiagnostics {
1110 server_id,
1111 diagnostics: diagnostics.iter().cloned().collect(),
1112 lamport_timestamp,
1113 };
1114 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1115 self.send_operation(op, cx);
1116 }
1117
1118 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1119 if let Some(indent_sizes) = self.compute_autoindents() {
1120 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1121 match cx
1122 .background_executor()
1123 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1124 {
1125 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1126 Err(indent_sizes) => {
1127 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1128 let indent_sizes = indent_sizes.await;
1129 this.update(&mut cx, |this, cx| {
1130 this.apply_autoindents(indent_sizes, cx);
1131 })
1132 .ok();
1133 }));
1134 }
1135 }
1136 } else {
1137 self.autoindent_requests.clear();
1138 }
1139 }
1140
1141 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1142 let max_rows_between_yields = 100;
1143 let snapshot = self.snapshot();
1144 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1145 return None;
1146 }
1147
1148 let autoindent_requests = self.autoindent_requests.clone();
1149 Some(async move {
1150 let mut indent_sizes = BTreeMap::new();
1151 for request in autoindent_requests {
1152 // Resolve each edited range to its row in the current buffer and in the
1153 // buffer before this batch of edits.
1154 let mut row_ranges = Vec::new();
1155 let mut old_to_new_rows = BTreeMap::new();
1156 let mut language_indent_sizes_by_new_row = Vec::new();
1157 for entry in &request.entries {
1158 let position = entry.range.start;
1159 let new_row = position.to_point(&snapshot).row;
1160 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1161 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1162
1163 if !entry.first_line_is_new {
1164 let old_row = position.to_point(&request.before_edit).row;
1165 old_to_new_rows.insert(old_row, new_row);
1166 }
1167 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1168 }
1169
1170 // Build a map containing the suggested indentation for each of the edited lines
1171 // with respect to the state of the buffer before these edits. This map is keyed
1172 // by the rows for these lines in the current state of the buffer.
1173 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1174 let old_edited_ranges =
1175 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1176 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1177 let mut language_indent_size = IndentSize::default();
1178 for old_edited_range in old_edited_ranges {
1179 let suggestions = request
1180 .before_edit
1181 .suggest_autoindents(old_edited_range.clone())
1182 .into_iter()
1183 .flatten();
1184 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1185 if let Some(suggestion) = suggestion {
1186 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1187
1188 // Find the indent size based on the language for this row.
1189 while let Some((row, size)) = language_indent_sizes.peek() {
1190 if *row > new_row {
1191 break;
1192 }
1193 language_indent_size = *size;
1194 language_indent_sizes.next();
1195 }
1196
1197 let suggested_indent = old_to_new_rows
1198 .get(&suggestion.basis_row)
1199 .and_then(|from_row| {
1200 Some(old_suggestions.get(from_row).copied()?.0)
1201 })
1202 .unwrap_or_else(|| {
1203 request
1204 .before_edit
1205 .indent_size_for_line(suggestion.basis_row)
1206 })
1207 .with_delta(suggestion.delta, language_indent_size);
1208 old_suggestions
1209 .insert(new_row, (suggested_indent, suggestion.within_error));
1210 }
1211 }
1212 yield_now().await;
1213 }
1214
1215 // In block mode, only compute indentation suggestions for the first line
1216 // of each insertion. Otherwise, compute suggestions for every inserted line.
1217 let new_edited_row_ranges = contiguous_ranges(
1218 row_ranges.iter().flat_map(|(range, _)| {
1219 if request.is_block_mode {
1220 range.start..range.start + 1
1221 } else {
1222 range.clone()
1223 }
1224 }),
1225 max_rows_between_yields,
1226 );
1227
1228 // Compute new suggestions for each line, but only include them in the result
1229 // if they differ from the old suggestion for that line.
1230 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1231 let mut language_indent_size = IndentSize::default();
1232 for new_edited_row_range in new_edited_row_ranges {
1233 let suggestions = snapshot
1234 .suggest_autoindents(new_edited_row_range.clone())
1235 .into_iter()
1236 .flatten();
1237 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1238 if let Some(suggestion) = suggestion {
1239 // Find the indent size based on the language for this row.
1240 while let Some((row, size)) = language_indent_sizes.peek() {
1241 if *row > new_row {
1242 break;
1243 }
1244 language_indent_size = *size;
1245 language_indent_sizes.next();
1246 }
1247
1248 let suggested_indent = indent_sizes
1249 .get(&suggestion.basis_row)
1250 .copied()
1251 .unwrap_or_else(|| {
1252 snapshot.indent_size_for_line(suggestion.basis_row)
1253 })
1254 .with_delta(suggestion.delta, language_indent_size);
1255 if old_suggestions.get(&new_row).map_or(
1256 true,
1257 |(old_indentation, was_within_error)| {
1258 suggested_indent != *old_indentation
1259 && (!suggestion.within_error || *was_within_error)
1260 },
1261 ) {
1262 indent_sizes.insert(new_row, suggested_indent);
1263 }
1264 }
1265 }
1266 yield_now().await;
1267 }
1268
1269 // For each block of inserted text, adjust the indentation of the remaining
1270 // lines of the block by the same amount as the first line was adjusted.
1271 if request.is_block_mode {
1272 for (row_range, original_indent_column) in
1273 row_ranges
1274 .into_iter()
1275 .filter_map(|(range, original_indent_column)| {
1276 if range.len() > 1 {
1277 Some((range, original_indent_column?))
1278 } else {
1279 None
1280 }
1281 })
1282 {
1283 let new_indent = indent_sizes
1284 .get(&row_range.start)
1285 .copied()
1286 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1287 let delta = new_indent.len as i64 - original_indent_column as i64;
1288 if delta != 0 {
1289 for row in row_range.skip(1) {
1290 indent_sizes.entry(row).or_insert_with(|| {
1291 let mut size = snapshot.indent_size_for_line(row);
1292 if size.kind == new_indent.kind {
1293 match delta.cmp(&0) {
1294 Ordering::Greater => size.len += delta as u32,
1295 Ordering::Less => {
1296 size.len = size.len.saturating_sub(-delta as u32)
1297 }
1298 Ordering::Equal => {}
1299 }
1300 }
1301 size
1302 });
1303 }
1304 }
1305 }
1306 }
1307 }
1308
1309 indent_sizes
1310 })
1311 }
1312
1313 fn apply_autoindents(
1314 &mut self,
1315 indent_sizes: BTreeMap<u32, IndentSize>,
1316 cx: &mut ModelContext<Self>,
1317 ) {
1318 self.autoindent_requests.clear();
1319
1320 let edits: Vec<_> = indent_sizes
1321 .into_iter()
1322 .filter_map(|(row, indent_size)| {
1323 let current_size = indent_size_for_line(self, row);
1324 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1325 })
1326 .collect();
1327
1328 self.edit(edits, None, cx);
1329 }
1330
1331 /// Create a minimal edit that will cause the given row to be indented
1332 /// with the given size. After applying this edit, the length of the line
1333 /// will always be at least `new_size.len`.
1334 pub fn edit_for_indent_size_adjustment(
1335 row: u32,
1336 current_size: IndentSize,
1337 new_size: IndentSize,
1338 ) -> Option<(Range<Point>, String)> {
1339 if new_size.kind == current_size.kind {
1340 match new_size.len.cmp(¤t_size.len) {
1341 Ordering::Greater => {
1342 let point = Point::new(row, 0);
1343 Some((
1344 point..point,
1345 iter::repeat(new_size.char())
1346 .take((new_size.len - current_size.len) as usize)
1347 .collect::<String>(),
1348 ))
1349 }
1350
1351 Ordering::Less => Some((
1352 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1353 String::new(),
1354 )),
1355
1356 Ordering::Equal => None,
1357 }
1358 } else {
1359 Some((
1360 Point::new(row, 0)..Point::new(row, current_size.len),
1361 iter::repeat(new_size.char())
1362 .take(new_size.len as usize)
1363 .collect::<String>(),
1364 ))
1365 }
1366 }
1367
1368 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1369 /// and the given new text.
1370 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1371 let old_text = self.as_rope().clone();
1372 let base_version = self.version();
1373 cx.background_executor()
1374 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1375 let old_text = old_text.to_string();
1376 let line_ending = LineEnding::detect(&new_text);
1377 LineEnding::normalize(&mut new_text);
1378
1379 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1380 let empty: Arc<str> = "".into();
1381
1382 let mut edits = Vec::new();
1383 let mut old_offset = 0;
1384 let mut new_offset = 0;
1385 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1386 for change in diff.iter_all_changes().map(Some).chain([None]) {
1387 if let Some(change) = &change {
1388 let len = change.value().len();
1389 match change.tag() {
1390 ChangeTag::Equal => {
1391 old_offset += len;
1392 new_offset += len;
1393 }
1394 ChangeTag::Delete => {
1395 let old_end_offset = old_offset + len;
1396 if let Some((last_old_range, _)) = &mut last_edit {
1397 last_old_range.end = old_end_offset;
1398 } else {
1399 last_edit =
1400 Some((old_offset..old_end_offset, new_offset..new_offset));
1401 }
1402 old_offset = old_end_offset;
1403 }
1404 ChangeTag::Insert => {
1405 let new_end_offset = new_offset + len;
1406 if let Some((_, last_new_range)) = &mut last_edit {
1407 last_new_range.end = new_end_offset;
1408 } else {
1409 last_edit =
1410 Some((old_offset..old_offset, new_offset..new_end_offset));
1411 }
1412 new_offset = new_end_offset;
1413 }
1414 }
1415 }
1416
1417 if let Some((old_range, new_range)) = &last_edit {
1418 if old_offset > old_range.end
1419 || new_offset > new_range.end
1420 || change.is_none()
1421 {
1422 let text = if new_range.is_empty() {
1423 empty.clone()
1424 } else {
1425 new_text[new_range.clone()].into()
1426 };
1427 edits.push((old_range.clone(), text));
1428 last_edit.take();
1429 }
1430 }
1431 }
1432
1433 Diff {
1434 base_version,
1435 line_ending,
1436 edits,
1437 }
1438 })
1439 }
1440
1441 /// Spawns a background task that searches the buffer for any whitespace
1442 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1443 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1444 let old_text = self.as_rope().clone();
1445 let line_ending = self.line_ending();
1446 let base_version = self.version();
1447 cx.background_executor().spawn(async move {
1448 let ranges = trailing_whitespace_ranges(&old_text);
1449 let empty = Arc::<str>::from("");
1450 Diff {
1451 base_version,
1452 line_ending,
1453 edits: ranges
1454 .into_iter()
1455 .map(|range| (range, empty.clone()))
1456 .collect(),
1457 }
1458 })
1459 }
1460
1461 /// Ensures that the buffer ends with a single newline character, and
1462 /// no other whitespace.
1463 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1464 let len = self.len();
1465 let mut offset = len;
1466 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1467 let non_whitespace_len = chunk
1468 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1469 .len();
1470 offset -= chunk.len();
1471 offset += non_whitespace_len;
1472 if non_whitespace_len != 0 {
1473 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1474 return;
1475 }
1476 break;
1477 }
1478 }
1479 self.edit([(offset..len, "\n")], None, cx);
1480 }
1481
1482 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1483 /// calculated, then adjust the diff to account for those changes, and discard any
1484 /// parts of the diff that conflict with those changes.
1485 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1486 // Check for any edits to the buffer that have occurred since this diff
1487 // was computed.
1488 let snapshot = self.snapshot();
1489 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1490 let mut delta = 0;
1491 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1492 while let Some(edit_since) = edits_since.peek() {
1493 // If the edit occurs after a diff hunk, then it does not
1494 // affect that hunk.
1495 if edit_since.old.start > range.end {
1496 break;
1497 }
1498 // If the edit precedes the diff hunk, then adjust the hunk
1499 // to reflect the edit.
1500 else if edit_since.old.end < range.start {
1501 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1502 edits_since.next();
1503 }
1504 // If the edit intersects a diff hunk, then discard that hunk.
1505 else {
1506 return None;
1507 }
1508 }
1509
1510 let start = (range.start as i64 + delta) as usize;
1511 let end = (range.end as i64 + delta) as usize;
1512 Some((start..end, new_text))
1513 });
1514
1515 self.start_transaction();
1516 self.text.set_line_ending(diff.line_ending);
1517 self.edit(adjusted_edits, None, cx);
1518 self.end_transaction(cx)
1519 }
1520
1521 fn changed_since_saved_version(&self) -> bool {
1522 self.edits_since::<usize>(&self.saved_version)
1523 .next()
1524 .is_some()
1525 }
1526 /// Checks if the buffer has unsaved changes.
1527 pub fn is_dirty(&self) -> bool {
1528 (self.has_conflict || self.changed_since_saved_version())
1529 || self
1530 .file
1531 .as_ref()
1532 .map_or(false, |file| file.is_deleted() || !file.is_created())
1533 }
1534
1535 /// Checks if the buffer and its file have both changed since the buffer
1536 /// was last saved or reloaded.
1537 pub fn has_conflict(&self) -> bool {
1538 (self.has_conflict || self.changed_since_saved_version())
1539 && self
1540 .file
1541 .as_ref()
1542 .map_or(false, |file| file.mtime() > self.saved_mtime)
1543 }
1544
1545 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1546 pub fn subscribe(&mut self) -> Subscription {
1547 self.text.subscribe()
1548 }
1549
1550 /// Starts a transaction, if one is not already in-progress. When undoing or
1551 /// redoing edits, all of the edits performed within a transaction are undone
1552 /// or redone together.
1553 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1554 self.start_transaction_at(Instant::now())
1555 }
1556
1557 /// Starts a transaction, providing the current time. Subsequent transactions
1558 /// that occur within a short period of time will be grouped together. This
1559 /// is controlled by the buffer's undo grouping duration.
1560 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1561 self.transaction_depth += 1;
1562 if self.was_dirty_before_starting_transaction.is_none() {
1563 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1564 }
1565 self.text.start_transaction_at(now)
1566 }
1567
1568 /// Terminates the current transaction, if this is the outermost transaction.
1569 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1570 self.end_transaction_at(Instant::now(), cx)
1571 }
1572
1573 /// Terminates the current transaction, providing the current time. Subsequent transactions
1574 /// that occur within a short period of time will be grouped together. This
1575 /// is controlled by the buffer's undo grouping duration.
1576 pub fn end_transaction_at(
1577 &mut self,
1578 now: Instant,
1579 cx: &mut ModelContext<Self>,
1580 ) -> Option<TransactionId> {
1581 assert!(self.transaction_depth > 0);
1582 self.transaction_depth -= 1;
1583 let was_dirty = if self.transaction_depth == 0 {
1584 self.was_dirty_before_starting_transaction.take().unwrap()
1585 } else {
1586 false
1587 };
1588 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1589 self.did_edit(&start_version, was_dirty, cx);
1590 Some(transaction_id)
1591 } else {
1592 None
1593 }
1594 }
1595
1596 /// Manually add a transaction to the buffer's undo history.
1597 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1598 self.text.push_transaction(transaction, now);
1599 }
1600
1601 /// Prevent the last transaction from being grouped with any subsequent transactions,
1602 /// even if they occur with the buffer's undo grouping duration.
1603 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1604 self.text.finalize_last_transaction()
1605 }
1606
1607 /// Manually group all changes since a given transaction.
1608 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1609 self.text.group_until_transaction(transaction_id);
1610 }
1611
1612 /// Manually remove a transaction from the buffer's undo history
1613 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1614 self.text.forget_transaction(transaction_id);
1615 }
1616
1617 /// Manually merge two adjacent transactions in the buffer's undo history.
1618 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1619 self.text.merge_transactions(transaction, destination);
1620 }
1621
1622 /// Waits for the buffer to receive operations with the given timestamps.
1623 pub fn wait_for_edits(
1624 &mut self,
1625 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1626 ) -> impl Future<Output = Result<()>> {
1627 self.text.wait_for_edits(edit_ids)
1628 }
1629
1630 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1631 pub fn wait_for_anchors(
1632 &mut self,
1633 anchors: impl IntoIterator<Item = Anchor>,
1634 ) -> impl 'static + Future<Output = Result<()>> {
1635 self.text.wait_for_anchors(anchors)
1636 }
1637
1638 /// Waits for the buffer to receive operations up to the given version.
1639 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1640 self.text.wait_for_version(version)
1641 }
1642
1643 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1644 /// [`Buffer::wait_for_version`] to resolve with an error.
1645 pub fn give_up_waiting(&mut self) {
1646 self.text.give_up_waiting();
1647 }
1648
1649 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
1650 pub fn set_active_selections(
1651 &mut self,
1652 selections: Arc<[Selection<Anchor>]>,
1653 line_mode: bool,
1654 cursor_shape: CursorShape,
1655 cx: &mut ModelContext<Self>,
1656 ) {
1657 let lamport_timestamp = self.text.lamport_clock.tick();
1658 self.remote_selections.insert(
1659 self.text.replica_id(),
1660 SelectionSet {
1661 selections: selections.clone(),
1662 lamport_timestamp,
1663 line_mode,
1664 cursor_shape,
1665 },
1666 );
1667 self.send_operation(
1668 Operation::UpdateSelections {
1669 selections,
1670 line_mode,
1671 lamport_timestamp,
1672 cursor_shape,
1673 },
1674 cx,
1675 );
1676 }
1677
1678 /// Clears the selections, so that other replicas of the buffer do not see any selections for
1679 /// this replica.
1680 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1681 if self
1682 .remote_selections
1683 .get(&self.text.replica_id())
1684 .map_or(true, |set| !set.selections.is_empty())
1685 {
1686 self.set_active_selections(Arc::from([]), false, Default::default(), cx);
1687 }
1688 }
1689
1690 /// Replaces the buffer's entire text.
1691 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1692 where
1693 T: Into<Arc<str>>,
1694 {
1695 self.autoindent_requests.clear();
1696 self.edit([(0..self.len(), text)], None, cx)
1697 }
1698
1699 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
1700 /// delete, and a string of text to insert at that location.
1701 ///
1702 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
1703 /// request for the edited ranges, which will be processed when the buffer finishes
1704 /// parsing.
1705 ///
1706 /// Parsing takes place at the end of a transaction, and may compute synchronously
1707 /// or asynchronously, depending on the changes.
1708 pub fn edit<I, S, T>(
1709 &mut self,
1710 edits_iter: I,
1711 autoindent_mode: Option<AutoindentMode>,
1712 cx: &mut ModelContext<Self>,
1713 ) -> Option<clock::Lamport>
1714 where
1715 I: IntoIterator<Item = (Range<S>, T)>,
1716 S: ToOffset,
1717 T: Into<Arc<str>>,
1718 {
1719 // Skip invalid edits and coalesce contiguous ones.
1720 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1721 for (range, new_text) in edits_iter {
1722 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1723 if range.start > range.end {
1724 mem::swap(&mut range.start, &mut range.end);
1725 }
1726 let new_text = new_text.into();
1727 if !new_text.is_empty() || !range.is_empty() {
1728 if let Some((prev_range, prev_text)) = edits.last_mut() {
1729 if prev_range.end >= range.start {
1730 prev_range.end = cmp::max(prev_range.end, range.end);
1731 *prev_text = format!("{prev_text}{new_text}").into();
1732 } else {
1733 edits.push((range, new_text));
1734 }
1735 } else {
1736 edits.push((range, new_text));
1737 }
1738 }
1739 }
1740 if edits.is_empty() {
1741 return None;
1742 }
1743
1744 self.start_transaction();
1745 self.pending_autoindent.take();
1746 let autoindent_request = autoindent_mode
1747 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1748
1749 let edit_operation = self.text.edit(edits.iter().cloned());
1750 let edit_id = edit_operation.timestamp();
1751
1752 if let Some((before_edit, mode)) = autoindent_request {
1753 let mut delta = 0isize;
1754 let entries = edits
1755 .into_iter()
1756 .enumerate()
1757 .zip(&edit_operation.as_edit().unwrap().new_text)
1758 .map(|((ix, (range, _)), new_text)| {
1759 let new_text_length = new_text.len();
1760 let old_start = range.start.to_point(&before_edit);
1761 let new_start = (delta + range.start as isize) as usize;
1762 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1763
1764 let mut range_of_insertion_to_indent = 0..new_text_length;
1765 let mut first_line_is_new = false;
1766 let mut original_indent_column = None;
1767
1768 // When inserting an entire line at the beginning of an existing line,
1769 // treat the insertion as new.
1770 if new_text.contains('\n')
1771 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1772 {
1773 first_line_is_new = true;
1774 }
1775
1776 // When inserting text starting with a newline, avoid auto-indenting the
1777 // previous line.
1778 if new_text.starts_with('\n') {
1779 range_of_insertion_to_indent.start += 1;
1780 first_line_is_new = true;
1781 }
1782
1783 // Avoid auto-indenting after the insertion.
1784 if let AutoindentMode::Block {
1785 original_indent_columns,
1786 } = &mode
1787 {
1788 original_indent_column =
1789 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1790 indent_size_for_text(
1791 new_text[range_of_insertion_to_indent.clone()].chars(),
1792 )
1793 .len
1794 }));
1795 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1796 range_of_insertion_to_indent.end -= 1;
1797 }
1798 }
1799
1800 AutoindentRequestEntry {
1801 first_line_is_new,
1802 original_indent_column,
1803 indent_size: before_edit.language_indent_size_at(range.start, cx),
1804 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1805 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1806 }
1807 })
1808 .collect();
1809
1810 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1811 before_edit,
1812 entries,
1813 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1814 }));
1815 }
1816
1817 self.end_transaction(cx);
1818 self.send_operation(Operation::Buffer(edit_operation), cx);
1819 Some(edit_id)
1820 }
1821
1822 fn did_edit(
1823 &mut self,
1824 old_version: &clock::Global,
1825 was_dirty: bool,
1826 cx: &mut ModelContext<Self>,
1827 ) {
1828 if self.edits_since::<usize>(old_version).next().is_none() {
1829 return;
1830 }
1831
1832 self.reparse(cx);
1833
1834 cx.emit(Event::Edited);
1835 if was_dirty != self.is_dirty() {
1836 cx.emit(Event::DirtyChanged);
1837 }
1838 cx.notify();
1839 }
1840
1841 /// Applies the given remote operations to the buffer.
1842 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1843 &mut self,
1844 ops: I,
1845 cx: &mut ModelContext<Self>,
1846 ) -> Result<()> {
1847 self.pending_autoindent.take();
1848 let was_dirty = self.is_dirty();
1849 let old_version = self.version.clone();
1850 let mut deferred_ops = Vec::new();
1851 let buffer_ops = ops
1852 .into_iter()
1853 .filter_map(|op| match op {
1854 Operation::Buffer(op) => Some(op),
1855 _ => {
1856 if self.can_apply_op(&op) {
1857 self.apply_op(op, cx);
1858 } else {
1859 deferred_ops.push(op);
1860 }
1861 None
1862 }
1863 })
1864 .collect::<Vec<_>>();
1865 self.text.apply_ops(buffer_ops)?;
1866 self.deferred_ops.insert(deferred_ops);
1867 self.flush_deferred_ops(cx);
1868 self.did_edit(&old_version, was_dirty, cx);
1869 // Notify independently of whether the buffer was edited as the operations could include a
1870 // selection update.
1871 cx.notify();
1872 Ok(())
1873 }
1874
1875 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1876 let mut deferred_ops = Vec::new();
1877 for op in self.deferred_ops.drain().iter().cloned() {
1878 if self.can_apply_op(&op) {
1879 self.apply_op(op, cx);
1880 } else {
1881 deferred_ops.push(op);
1882 }
1883 }
1884 self.deferred_ops.insert(deferred_ops);
1885 }
1886
1887 fn can_apply_op(&self, operation: &Operation) -> bool {
1888 match operation {
1889 Operation::Buffer(_) => {
1890 unreachable!("buffer operations should never be applied at this layer")
1891 }
1892 Operation::UpdateDiagnostics {
1893 diagnostics: diagnostic_set,
1894 ..
1895 } => diagnostic_set.iter().all(|diagnostic| {
1896 self.text.can_resolve(&diagnostic.range.start)
1897 && self.text.can_resolve(&diagnostic.range.end)
1898 }),
1899 Operation::UpdateSelections { selections, .. } => selections
1900 .iter()
1901 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1902 Operation::UpdateCompletionTriggers { .. } => true,
1903 }
1904 }
1905
1906 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1907 match operation {
1908 Operation::Buffer(_) => {
1909 unreachable!("buffer operations should never be applied at this layer")
1910 }
1911 Operation::UpdateDiagnostics {
1912 server_id,
1913 diagnostics: diagnostic_set,
1914 lamport_timestamp,
1915 } => {
1916 let snapshot = self.snapshot();
1917 self.apply_diagnostic_update(
1918 server_id,
1919 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1920 lamport_timestamp,
1921 cx,
1922 );
1923 }
1924 Operation::UpdateSelections {
1925 selections,
1926 lamport_timestamp,
1927 line_mode,
1928 cursor_shape,
1929 } => {
1930 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1931 if set.lamport_timestamp > lamport_timestamp {
1932 return;
1933 }
1934 }
1935
1936 self.remote_selections.insert(
1937 lamport_timestamp.replica_id,
1938 SelectionSet {
1939 selections,
1940 lamport_timestamp,
1941 line_mode,
1942 cursor_shape,
1943 },
1944 );
1945 self.text.lamport_clock.observe(lamport_timestamp);
1946 self.selections_update_count += 1;
1947 }
1948 Operation::UpdateCompletionTriggers {
1949 triggers,
1950 lamport_timestamp,
1951 } => {
1952 self.completion_triggers = triggers;
1953 self.text.lamport_clock.observe(lamport_timestamp);
1954 }
1955 }
1956 }
1957
1958 fn apply_diagnostic_update(
1959 &mut self,
1960 server_id: LanguageServerId,
1961 diagnostics: DiagnosticSet,
1962 lamport_timestamp: clock::Lamport,
1963 cx: &mut ModelContext<Self>,
1964 ) {
1965 if lamport_timestamp > self.diagnostics_timestamp {
1966 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
1967 if diagnostics.len() == 0 {
1968 if let Ok(ix) = ix {
1969 self.diagnostics.remove(ix);
1970 }
1971 } else {
1972 match ix {
1973 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
1974 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
1975 };
1976 }
1977 self.diagnostics_timestamp = lamport_timestamp;
1978 self.diagnostics_update_count += 1;
1979 self.text.lamport_clock.observe(lamport_timestamp);
1980 cx.notify();
1981 cx.emit(Event::DiagnosticsUpdated);
1982 }
1983 }
1984
1985 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1986 cx.emit(Event::Operation(operation));
1987 }
1988
1989 /// Removes the selections for a given peer.
1990 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1991 self.remote_selections.remove(&replica_id);
1992 cx.notify();
1993 }
1994
1995 /// Undoes the most recent transaction.
1996 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1997 let was_dirty = self.is_dirty();
1998 let old_version = self.version.clone();
1999
2000 if let Some((transaction_id, operation)) = self.text.undo() {
2001 self.send_operation(Operation::Buffer(operation), cx);
2002 self.did_edit(&old_version, was_dirty, cx);
2003 Some(transaction_id)
2004 } else {
2005 None
2006 }
2007 }
2008
2009 /// Manually undoes a specific transaction in the buffer's undo history.
2010 pub fn undo_transaction(
2011 &mut self,
2012 transaction_id: TransactionId,
2013 cx: &mut ModelContext<Self>,
2014 ) -> bool {
2015 let was_dirty = self.is_dirty();
2016 let old_version = self.version.clone();
2017 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2018 self.send_operation(Operation::Buffer(operation), cx);
2019 self.did_edit(&old_version, was_dirty, cx);
2020 true
2021 } else {
2022 false
2023 }
2024 }
2025
2026 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2027 pub fn undo_to_transaction(
2028 &mut self,
2029 transaction_id: TransactionId,
2030 cx: &mut ModelContext<Self>,
2031 ) -> bool {
2032 let was_dirty = self.is_dirty();
2033 let old_version = self.version.clone();
2034
2035 let operations = self.text.undo_to_transaction(transaction_id);
2036 let undone = !operations.is_empty();
2037 for operation in operations {
2038 self.send_operation(Operation::Buffer(operation), cx);
2039 }
2040 if undone {
2041 self.did_edit(&old_version, was_dirty, cx)
2042 }
2043 undone
2044 }
2045
2046 /// Manually redoes a specific transaction in the buffer's redo history.
2047 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2048 let was_dirty = self.is_dirty();
2049 let old_version = self.version.clone();
2050
2051 if let Some((transaction_id, operation)) = self.text.redo() {
2052 self.send_operation(Operation::Buffer(operation), cx);
2053 self.did_edit(&old_version, was_dirty, cx);
2054 Some(transaction_id)
2055 } else {
2056 None
2057 }
2058 }
2059
2060 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2061 pub fn redo_to_transaction(
2062 &mut self,
2063 transaction_id: TransactionId,
2064 cx: &mut ModelContext<Self>,
2065 ) -> bool {
2066 let was_dirty = self.is_dirty();
2067 let old_version = self.version.clone();
2068
2069 let operations = self.text.redo_to_transaction(transaction_id);
2070 let redone = !operations.is_empty();
2071 for operation in operations {
2072 self.send_operation(Operation::Buffer(operation), cx);
2073 }
2074 if redone {
2075 self.did_edit(&old_version, was_dirty, cx)
2076 }
2077 redone
2078 }
2079
2080 /// Override current completion triggers with the user-provided completion triggers.
2081 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
2082 self.completion_triggers.clone_from(&triggers);
2083 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2084 self.send_operation(
2085 Operation::UpdateCompletionTriggers {
2086 triggers,
2087 lamport_timestamp: self.completion_triggers_timestamp,
2088 },
2089 cx,
2090 );
2091 cx.notify();
2092 }
2093
2094 /// Returns a list of strings which trigger a completion menu for this language.
2095 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2096 pub fn completion_triggers(&self) -> &[String] {
2097 &self.completion_triggers
2098 }
2099}
2100
2101#[doc(hidden)]
2102#[cfg(any(test, feature = "test-support"))]
2103impl Buffer {
2104 pub fn edit_via_marked_text(
2105 &mut self,
2106 marked_string: &str,
2107 autoindent_mode: Option<AutoindentMode>,
2108 cx: &mut ModelContext<Self>,
2109 ) {
2110 let edits = self.edits_for_marked_text(marked_string);
2111 self.edit(edits, autoindent_mode, cx);
2112 }
2113
2114 pub fn set_group_interval(&mut self, group_interval: Duration) {
2115 self.text.set_group_interval(group_interval);
2116 }
2117
2118 pub fn randomly_edit<T>(
2119 &mut self,
2120 rng: &mut T,
2121 old_range_count: usize,
2122 cx: &mut ModelContext<Self>,
2123 ) where
2124 T: rand::Rng,
2125 {
2126 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2127 let mut last_end = None;
2128 for _ in 0..old_range_count {
2129 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2130 break;
2131 }
2132
2133 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2134 let mut range = self.random_byte_range(new_start, rng);
2135 if rng.gen_bool(0.2) {
2136 mem::swap(&mut range.start, &mut range.end);
2137 }
2138 last_end = Some(range.end);
2139
2140 let new_text_len = rng.gen_range(0..10);
2141 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2142
2143 edits.push((range, new_text));
2144 }
2145 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2146 self.edit(edits, None, cx);
2147 }
2148
2149 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
2150 let was_dirty = self.is_dirty();
2151 let old_version = self.version.clone();
2152
2153 let ops = self.text.randomly_undo_redo(rng);
2154 if !ops.is_empty() {
2155 for op in ops {
2156 self.send_operation(Operation::Buffer(op), cx);
2157 self.did_edit(&old_version, was_dirty, cx);
2158 }
2159 }
2160 }
2161}
2162
2163impl EventEmitter<Event> for Buffer {}
2164
2165impl Deref for Buffer {
2166 type Target = TextBuffer;
2167
2168 fn deref(&self) -> &Self::Target {
2169 &self.text
2170 }
2171}
2172
2173impl BufferSnapshot {
2174 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2175 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2176 indent_size_for_line(self, row)
2177 }
2178 /// Returns [`IndentSize`] for a given position that respects user settings
2179 /// and language preferences.
2180 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
2181 let settings = language_settings(self.language_at(position), self.file(), cx);
2182 if settings.hard_tabs {
2183 IndentSize::tab()
2184 } else {
2185 IndentSize::spaces(settings.tab_size.get())
2186 }
2187 }
2188
2189 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2190 /// is passed in as `single_indent_size`.
2191 pub fn suggested_indents(
2192 &self,
2193 rows: impl Iterator<Item = u32>,
2194 single_indent_size: IndentSize,
2195 ) -> BTreeMap<u32, IndentSize> {
2196 let mut result = BTreeMap::new();
2197
2198 for row_range in contiguous_ranges(rows, 10) {
2199 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2200 Some(suggestions) => suggestions,
2201 _ => break,
2202 };
2203
2204 for (row, suggestion) in row_range.zip(suggestions) {
2205 let indent_size = if let Some(suggestion) = suggestion {
2206 result
2207 .get(&suggestion.basis_row)
2208 .copied()
2209 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2210 .with_delta(suggestion.delta, single_indent_size)
2211 } else {
2212 self.indent_size_for_line(row)
2213 };
2214
2215 result.insert(row, indent_size);
2216 }
2217 }
2218
2219 result
2220 }
2221
2222 fn suggest_autoindents(
2223 &self,
2224 row_range: Range<u32>,
2225 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2226 let config = &self.language.as_ref()?.config;
2227 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2228
2229 // Find the suggested indentation ranges based on the syntax tree.
2230 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2231 let end = Point::new(row_range.end, 0);
2232 let range = (start..end).to_offset(&self.text);
2233 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2234 Some(&grammar.indents_config.as_ref()?.query)
2235 });
2236 let indent_configs = matches
2237 .grammars()
2238 .iter()
2239 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2240 .collect::<Vec<_>>();
2241
2242 let mut indent_ranges = Vec::<Range<Point>>::new();
2243 let mut outdent_positions = Vec::<Point>::new();
2244 while let Some(mat) = matches.peek() {
2245 let mut start: Option<Point> = None;
2246 let mut end: Option<Point> = None;
2247
2248 let config = &indent_configs[mat.grammar_index];
2249 for capture in mat.captures {
2250 if capture.index == config.indent_capture_ix {
2251 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2252 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2253 } else if Some(capture.index) == config.start_capture_ix {
2254 start = Some(Point::from_ts_point(capture.node.end_position()));
2255 } else if Some(capture.index) == config.end_capture_ix {
2256 end = Some(Point::from_ts_point(capture.node.start_position()));
2257 } else if Some(capture.index) == config.outdent_capture_ix {
2258 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2259 }
2260 }
2261
2262 matches.advance();
2263 if let Some((start, end)) = start.zip(end) {
2264 if start.row == end.row {
2265 continue;
2266 }
2267
2268 let range = start..end;
2269 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2270 Err(ix) => indent_ranges.insert(ix, range),
2271 Ok(ix) => {
2272 let prev_range = &mut indent_ranges[ix];
2273 prev_range.end = prev_range.end.max(range.end);
2274 }
2275 }
2276 }
2277 }
2278
2279 let mut error_ranges = Vec::<Range<Point>>::new();
2280 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2281 Some(&grammar.error_query)
2282 });
2283 while let Some(mat) = matches.peek() {
2284 let node = mat.captures[0].node;
2285 let start = Point::from_ts_point(node.start_position());
2286 let end = Point::from_ts_point(node.end_position());
2287 let range = start..end;
2288 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2289 Ok(ix) | Err(ix) => ix,
2290 };
2291 let mut end_ix = ix;
2292 while let Some(existing_range) = error_ranges.get(end_ix) {
2293 if existing_range.end < end {
2294 end_ix += 1;
2295 } else {
2296 break;
2297 }
2298 }
2299 error_ranges.splice(ix..end_ix, [range]);
2300 matches.advance();
2301 }
2302
2303 outdent_positions.sort();
2304 for outdent_position in outdent_positions {
2305 // find the innermost indent range containing this outdent_position
2306 // set its end to the outdent position
2307 if let Some(range_to_truncate) = indent_ranges
2308 .iter_mut()
2309 .filter(|indent_range| indent_range.contains(&outdent_position))
2310 .last()
2311 {
2312 range_to_truncate.end = outdent_position;
2313 }
2314 }
2315
2316 // Find the suggested indentation increases and decreased based on regexes.
2317 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2318 self.for_each_line(
2319 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2320 ..Point::new(row_range.end, 0),
2321 |row, line| {
2322 if config
2323 .decrease_indent_pattern
2324 .as_ref()
2325 .map_or(false, |regex| regex.is_match(line))
2326 {
2327 indent_change_rows.push((row, Ordering::Less));
2328 }
2329 if config
2330 .increase_indent_pattern
2331 .as_ref()
2332 .map_or(false, |regex| regex.is_match(line))
2333 {
2334 indent_change_rows.push((row + 1, Ordering::Greater));
2335 }
2336 },
2337 );
2338
2339 let mut indent_changes = indent_change_rows.into_iter().peekable();
2340 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2341 prev_non_blank_row.unwrap_or(0)
2342 } else {
2343 row_range.start.saturating_sub(1)
2344 };
2345 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2346 Some(row_range.map(move |row| {
2347 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2348
2349 let mut indent_from_prev_row = false;
2350 let mut outdent_from_prev_row = false;
2351 let mut outdent_to_row = u32::MAX;
2352
2353 while let Some((indent_row, delta)) = indent_changes.peek() {
2354 match indent_row.cmp(&row) {
2355 Ordering::Equal => match delta {
2356 Ordering::Less => outdent_from_prev_row = true,
2357 Ordering::Greater => indent_from_prev_row = true,
2358 _ => {}
2359 },
2360
2361 Ordering::Greater => break,
2362 Ordering::Less => {}
2363 }
2364
2365 indent_changes.next();
2366 }
2367
2368 for range in &indent_ranges {
2369 if range.start.row >= row {
2370 break;
2371 }
2372 if range.start.row == prev_row && range.end > row_start {
2373 indent_from_prev_row = true;
2374 }
2375 if range.end > prev_row_start && range.end <= row_start {
2376 outdent_to_row = outdent_to_row.min(range.start.row);
2377 }
2378 }
2379
2380 let within_error = error_ranges
2381 .iter()
2382 .any(|e| e.start.row < row && e.end > row_start);
2383
2384 let suggestion = if outdent_to_row == prev_row
2385 || (outdent_from_prev_row && indent_from_prev_row)
2386 {
2387 Some(IndentSuggestion {
2388 basis_row: prev_row,
2389 delta: Ordering::Equal,
2390 within_error,
2391 })
2392 } else if indent_from_prev_row {
2393 Some(IndentSuggestion {
2394 basis_row: prev_row,
2395 delta: Ordering::Greater,
2396 within_error,
2397 })
2398 } else if outdent_to_row < prev_row {
2399 Some(IndentSuggestion {
2400 basis_row: outdent_to_row,
2401 delta: Ordering::Equal,
2402 within_error,
2403 })
2404 } else if outdent_from_prev_row {
2405 Some(IndentSuggestion {
2406 basis_row: prev_row,
2407 delta: Ordering::Less,
2408 within_error,
2409 })
2410 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2411 {
2412 Some(IndentSuggestion {
2413 basis_row: prev_row,
2414 delta: Ordering::Equal,
2415 within_error,
2416 })
2417 } else {
2418 None
2419 };
2420
2421 prev_row = row;
2422 prev_row_start = row_start;
2423 suggestion
2424 }))
2425 }
2426
2427 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2428 while row > 0 {
2429 row -= 1;
2430 if !self.is_line_blank(row) {
2431 return Some(row);
2432 }
2433 }
2434 None
2435 }
2436
2437 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2438 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2439 /// returned in chunks where each chunk has a single syntax highlighting style and
2440 /// diagnostic status.
2441 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2442 let range = range.start.to_offset(self)..range.end.to_offset(self);
2443
2444 let mut syntax = None;
2445 let mut diagnostic_endpoints = Vec::new();
2446 if language_aware {
2447 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
2448 grammar.highlights_query.as_ref()
2449 });
2450 let highlight_maps = captures
2451 .grammars()
2452 .into_iter()
2453 .map(|grammar| grammar.highlight_map())
2454 .collect();
2455 syntax = Some((captures, highlight_maps));
2456 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
2457 diagnostic_endpoints.push(DiagnosticEndpoint {
2458 offset: entry.range.start,
2459 is_start: true,
2460 severity: entry.diagnostic.severity,
2461 is_unnecessary: entry.diagnostic.is_unnecessary,
2462 });
2463 diagnostic_endpoints.push(DiagnosticEndpoint {
2464 offset: entry.range.end,
2465 is_start: false,
2466 severity: entry.diagnostic.severity,
2467 is_unnecessary: entry.diagnostic.is_unnecessary,
2468 });
2469 }
2470 diagnostic_endpoints
2471 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2472 }
2473
2474 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
2475 }
2476
2477 /// Invokes the given callback for each line of text in the given range of the buffer.
2478 /// Uses callback to avoid allocating a string for each line.
2479 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2480 let mut line = String::new();
2481 let mut row = range.start.row;
2482 for chunk in self
2483 .as_rope()
2484 .chunks_in_range(range.to_offset(self))
2485 .chain(["\n"])
2486 {
2487 for (newline_ix, text) in chunk.split('\n').enumerate() {
2488 if newline_ix > 0 {
2489 callback(row, &line);
2490 row += 1;
2491 line.clear();
2492 }
2493 line.push_str(text);
2494 }
2495 }
2496 }
2497
2498 /// Iterates over every [`SyntaxLayer`] in the buffer.
2499 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2500 self.syntax.layers_for_range(0..self.len(), &self.text)
2501 }
2502
2503 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2504 let offset = position.to_offset(self);
2505 self.syntax
2506 .layers_for_range(offset..offset, &self.text)
2507 .filter(|l| l.node().end_byte() > offset)
2508 .last()
2509 }
2510
2511 /// Returns the main [Language]
2512 pub fn language(&self) -> Option<&Arc<Language>> {
2513 self.language.as_ref()
2514 }
2515
2516 /// Returns the [Language] at the given location.
2517 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2518 self.syntax_layer_at(position)
2519 .map(|info| info.language)
2520 .or(self.language.as_ref())
2521 }
2522
2523 /// Returns the settings for the language at the given location.
2524 pub fn settings_at<'a, D: ToOffset>(
2525 &self,
2526 position: D,
2527 cx: &'a AppContext,
2528 ) -> &'a LanguageSettings {
2529 language_settings(self.language_at(position), self.file.as_ref(), cx)
2530 }
2531
2532 /// Returns the [LanguageScope] at the given location.
2533 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2534 let offset = position.to_offset(self);
2535 let mut scope = None;
2536 let mut smallest_range: Option<Range<usize>> = None;
2537
2538 // Use the layer that has the smallest node intersecting the given point.
2539 for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
2540 let mut cursor = layer.node().walk();
2541
2542 let mut range = None;
2543 loop {
2544 let child_range = cursor.node().byte_range();
2545 if !child_range.to_inclusive().contains(&offset) {
2546 break;
2547 }
2548
2549 range = Some(child_range);
2550 if cursor.goto_first_child_for_byte(offset).is_none() {
2551 break;
2552 }
2553 }
2554
2555 if let Some(range) = range {
2556 if smallest_range
2557 .as_ref()
2558 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2559 {
2560 smallest_range = Some(range);
2561 scope = Some(LanguageScope {
2562 language: layer.language.clone(),
2563 override_id: layer.override_id(offset, &self.text),
2564 });
2565 }
2566 }
2567 }
2568
2569 scope.or_else(|| {
2570 self.language.clone().map(|language| LanguageScope {
2571 language,
2572 override_id: None,
2573 })
2574 })
2575 }
2576
2577 /// Returns a tuple of the range and character kind of the word
2578 /// surrounding the given position.
2579 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2580 let mut start = start.to_offset(self);
2581 let mut end = start;
2582 let mut next_chars = self.chars_at(start).peekable();
2583 let mut prev_chars = self.reversed_chars_at(start).peekable();
2584
2585 let scope = self.language_scope_at(start);
2586 let kind = |c| char_kind(&scope, c);
2587 let word_kind = cmp::max(
2588 prev_chars.peek().copied().map(kind),
2589 next_chars.peek().copied().map(kind),
2590 );
2591
2592 for ch in prev_chars {
2593 if Some(kind(ch)) == word_kind && ch != '\n' {
2594 start -= ch.len_utf8();
2595 } else {
2596 break;
2597 }
2598 }
2599
2600 for ch in next_chars {
2601 if Some(kind(ch)) == word_kind && ch != '\n' {
2602 end += ch.len_utf8();
2603 } else {
2604 break;
2605 }
2606 }
2607
2608 (start..end, word_kind)
2609 }
2610
2611 /// Returns the range for the closes syntax node enclosing the given range.
2612 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2613 let range = range.start.to_offset(self)..range.end.to_offset(self);
2614 let mut result: Option<Range<usize>> = None;
2615 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
2616 let mut cursor = layer.node().walk();
2617
2618 // Descend to the first leaf that touches the start of the range,
2619 // and if the range is non-empty, extends beyond the start.
2620 while cursor.goto_first_child_for_byte(range.start).is_some() {
2621 if !range.is_empty() && cursor.node().end_byte() == range.start {
2622 cursor.goto_next_sibling();
2623 }
2624 }
2625
2626 // Ascend to the smallest ancestor that strictly contains the range.
2627 loop {
2628 let node_range = cursor.node().byte_range();
2629 if node_range.start <= range.start
2630 && node_range.end >= range.end
2631 && node_range.len() > range.len()
2632 {
2633 break;
2634 }
2635 if !cursor.goto_parent() {
2636 continue 'outer;
2637 }
2638 }
2639
2640 let left_node = cursor.node();
2641 let mut layer_result = left_node.byte_range();
2642
2643 // For an empty range, try to find another node immediately to the right of the range.
2644 if left_node.end_byte() == range.start {
2645 let mut right_node = None;
2646 while !cursor.goto_next_sibling() {
2647 if !cursor.goto_parent() {
2648 break;
2649 }
2650 }
2651
2652 while cursor.node().start_byte() == range.start {
2653 right_node = Some(cursor.node());
2654 if !cursor.goto_first_child() {
2655 break;
2656 }
2657 }
2658
2659 // If there is a candidate node on both sides of the (empty) range, then
2660 // decide between the two by favoring a named node over an anonymous token.
2661 // If both nodes are the same in that regard, favor the right one.
2662 if let Some(right_node) = right_node {
2663 if right_node.is_named() || !left_node.is_named() {
2664 layer_result = right_node.byte_range();
2665 }
2666 }
2667 }
2668
2669 if let Some(previous_result) = &result {
2670 if previous_result.len() < layer_result.len() {
2671 continue;
2672 }
2673 }
2674 result = Some(layer_result);
2675 }
2676
2677 result
2678 }
2679
2680 /// Returns the outline for the buffer.
2681 ///
2682 /// This method allows passing an optional [SyntaxTheme] to
2683 /// syntax-highlight the returned symbols.
2684 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2685 self.outline_items_containing(0..self.len(), true, theme)
2686 .map(Outline::new)
2687 }
2688
2689 /// Returns all the symbols that contain the given position.
2690 ///
2691 /// This method allows passing an optional [SyntaxTheme] to
2692 /// syntax-highlight the returned symbols.
2693 pub fn symbols_containing<T: ToOffset>(
2694 &self,
2695 position: T,
2696 theme: Option<&SyntaxTheme>,
2697 ) -> Option<Vec<OutlineItem<Anchor>>> {
2698 let position = position.to_offset(self);
2699 let mut items = self.outline_items_containing(
2700 position.saturating_sub(1)..self.len().min(position + 1),
2701 false,
2702 theme,
2703 )?;
2704 let mut prev_depth = None;
2705 items.retain(|item| {
2706 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2707 prev_depth = Some(item.depth);
2708 result
2709 });
2710 Some(items)
2711 }
2712
2713 fn outline_items_containing(
2714 &self,
2715 range: Range<usize>,
2716 include_extra_context: bool,
2717 theme: Option<&SyntaxTheme>,
2718 ) -> Option<Vec<OutlineItem<Anchor>>> {
2719 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2720 grammar.outline_config.as_ref().map(|c| &c.query)
2721 });
2722 let configs = matches
2723 .grammars()
2724 .iter()
2725 .map(|g| g.outline_config.as_ref().unwrap())
2726 .collect::<Vec<_>>();
2727
2728 let mut stack = Vec::<Range<usize>>::new();
2729 let mut items = Vec::new();
2730 while let Some(mat) = matches.peek() {
2731 let config = &configs[mat.grammar_index];
2732 let item_node = mat.captures.iter().find_map(|cap| {
2733 if cap.index == config.item_capture_ix {
2734 Some(cap.node)
2735 } else {
2736 None
2737 }
2738 })?;
2739
2740 let item_range = item_node.byte_range();
2741 if item_range.end < range.start || item_range.start > range.end {
2742 matches.advance();
2743 continue;
2744 }
2745
2746 let mut buffer_ranges = Vec::new();
2747 for capture in mat.captures {
2748 let node_is_name;
2749 if capture.index == config.name_capture_ix {
2750 node_is_name = true;
2751 } else if Some(capture.index) == config.context_capture_ix
2752 || (Some(capture.index) == config.extra_context_capture_ix
2753 && include_extra_context)
2754 {
2755 node_is_name = false;
2756 } else {
2757 continue;
2758 }
2759
2760 let mut range = capture.node.start_byte()..capture.node.end_byte();
2761 let start = capture.node.start_position();
2762 if capture.node.end_position().row > start.row {
2763 range.end =
2764 range.start + self.line_len(start.row as u32) as usize - start.column;
2765 }
2766
2767 if !range.is_empty() {
2768 buffer_ranges.push((range, node_is_name));
2769 }
2770 }
2771
2772 if buffer_ranges.is_empty() {
2773 matches.advance();
2774 continue;
2775 }
2776
2777 let mut text = String::new();
2778 let mut highlight_ranges = Vec::new();
2779 let mut name_ranges = Vec::new();
2780 let mut chunks = self.chunks(
2781 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
2782 true,
2783 );
2784 let mut last_buffer_range_end = 0;
2785 for (buffer_range, is_name) in buffer_ranges {
2786 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
2787 text.push(' ');
2788 }
2789 last_buffer_range_end = buffer_range.end;
2790 if is_name {
2791 let mut start = text.len();
2792 let end = start + buffer_range.len();
2793
2794 // When multiple names are captured, then the matcheable text
2795 // includes the whitespace in between the names.
2796 if !name_ranges.is_empty() {
2797 start -= 1;
2798 }
2799
2800 name_ranges.push(start..end);
2801 }
2802
2803 let mut offset = buffer_range.start;
2804 chunks.seek(offset);
2805 for mut chunk in chunks.by_ref() {
2806 if chunk.text.len() > buffer_range.end - offset {
2807 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
2808 offset = buffer_range.end;
2809 } else {
2810 offset += chunk.text.len();
2811 }
2812 let style = chunk
2813 .syntax_highlight_id
2814 .zip(theme)
2815 .and_then(|(highlight, theme)| highlight.style(theme));
2816 if let Some(style) = style {
2817 let start = text.len();
2818 let end = start + chunk.text.len();
2819 highlight_ranges.push((start..end, style));
2820 }
2821 text.push_str(chunk.text);
2822 if offset >= buffer_range.end {
2823 break;
2824 }
2825 }
2826 }
2827
2828 matches.advance();
2829 while stack.last().map_or(false, |prev_range| {
2830 prev_range.start > item_range.start || prev_range.end < item_range.end
2831 }) {
2832 stack.pop();
2833 }
2834 stack.push(item_range.clone());
2835
2836 items.push(OutlineItem {
2837 depth: stack.len() - 1,
2838 range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
2839 text,
2840 highlight_ranges,
2841 name_ranges,
2842 })
2843 }
2844 Some(items)
2845 }
2846
2847 /// For each grammar in the language, runs the provided
2848 /// [tree_sitter::Query] against the given range.
2849 pub fn matches(
2850 &self,
2851 range: Range<usize>,
2852 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
2853 ) -> SyntaxMapMatches {
2854 self.syntax.matches(range, self, query)
2855 }
2856
2857 /// Returns bracket range pairs overlapping or adjacent to `range`
2858 pub fn bracket_ranges<T: ToOffset>(
2859 &self,
2860 range: Range<T>,
2861 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2862 // Find bracket pairs that *inclusively* contain the given range.
2863 let range = range.start.to_offset(self).saturating_sub(1)
2864 ..self.len().min(range.end.to_offset(self) + 1);
2865
2866 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2867 grammar.brackets_config.as_ref().map(|c| &c.query)
2868 });
2869 let configs = matches
2870 .grammars()
2871 .iter()
2872 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2873 .collect::<Vec<_>>();
2874
2875 iter::from_fn(move || {
2876 while let Some(mat) = matches.peek() {
2877 let mut open = None;
2878 let mut close = None;
2879 let config = &configs[mat.grammar_index];
2880 for capture in mat.captures {
2881 if capture.index == config.open_capture_ix {
2882 open = Some(capture.node.byte_range());
2883 } else if capture.index == config.close_capture_ix {
2884 close = Some(capture.node.byte_range());
2885 }
2886 }
2887
2888 matches.advance();
2889
2890 let Some((open, close)) = open.zip(close) else {
2891 continue;
2892 };
2893
2894 let bracket_range = open.start..=close.end;
2895 if !bracket_range.overlaps(&range) {
2896 continue;
2897 }
2898
2899 return Some((open, close));
2900 }
2901 None
2902 })
2903 }
2904
2905 /// Returns enclosing bracket ranges containing the given range
2906 pub fn enclosing_bracket_ranges<T: ToOffset>(
2907 &self,
2908 range: Range<T>,
2909 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2910 let range = range.start.to_offset(self)..range.end.to_offset(self);
2911
2912 self.bracket_ranges(range.clone())
2913 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
2914 }
2915
2916 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
2917 ///
2918 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
2919 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
2920 &self,
2921 range: Range<T>,
2922 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
2923 ) -> Option<(Range<usize>, Range<usize>)> {
2924 let range = range.start.to_offset(self)..range.end.to_offset(self);
2925
2926 // Get the ranges of the innermost pair of brackets.
2927 let mut result: Option<(Range<usize>, Range<usize>)> = None;
2928
2929 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
2930 if let Some(range_filter) = range_filter {
2931 if !range_filter(open.clone(), close.clone()) {
2932 continue;
2933 }
2934 }
2935
2936 let len = close.end - open.start;
2937
2938 if let Some((existing_open, existing_close)) = &result {
2939 let existing_len = existing_close.end - existing_open.start;
2940 if len > existing_len {
2941 continue;
2942 }
2943 }
2944
2945 result = Some((open, close));
2946 }
2947
2948 result
2949 }
2950
2951 /// Returns anchor ranges for any matches of the redaction query.
2952 /// The buffer can be associated with multiple languages, and the redaction query associated with each
2953 /// will be run on the relevant section of the buffer.
2954 pub fn redacted_ranges<T: ToOffset>(
2955 &self,
2956 range: Range<T>,
2957 ) -> impl Iterator<Item = Range<usize>> + '_ {
2958 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
2959 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
2960 grammar
2961 .redactions_config
2962 .as_ref()
2963 .map(|config| &config.query)
2964 });
2965
2966 let configs = syntax_matches
2967 .grammars()
2968 .iter()
2969 .map(|grammar| grammar.redactions_config.as_ref())
2970 .collect::<Vec<_>>();
2971
2972 iter::from_fn(move || {
2973 let redacted_range = syntax_matches
2974 .peek()
2975 .and_then(|mat| {
2976 configs[mat.grammar_index].and_then(|config| {
2977 mat.captures
2978 .iter()
2979 .find(|capture| capture.index == config.redaction_capture_ix)
2980 })
2981 })
2982 .map(|mat| mat.node.byte_range());
2983 syntax_matches.advance();
2984 redacted_range
2985 })
2986 }
2987
2988 pub fn runnable_ranges(
2989 &self,
2990 range: Range<Anchor>,
2991 ) -> impl Iterator<Item = (Range<usize>, Runnable)> + '_ {
2992 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
2993
2994 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
2995 grammar.runnable_config.as_ref().map(|config| &config.query)
2996 });
2997
2998 let test_configs = syntax_matches
2999 .grammars()
3000 .iter()
3001 .map(|grammar| grammar.runnable_config.as_ref())
3002 .collect::<Vec<_>>();
3003
3004 iter::from_fn(move || {
3005 let test_range = syntax_matches
3006 .peek()
3007 .and_then(|mat| {
3008 test_configs[mat.grammar_index].and_then(|test_configs| {
3009 let tags = SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3010 test_configs.runnable_tags.get(&capture.index).cloned()
3011 }));
3012
3013 if tags.is_empty() {
3014 return None;
3015 }
3016
3017 Some((
3018 mat.captures
3019 .iter()
3020 .find(|capture| capture.index == test_configs.run_capture_ix)?,
3021 Runnable {
3022 tags,
3023 language: mat.language,
3024 buffer: self.remote_id(),
3025 },
3026 ))
3027 })
3028 })
3029 .map(|(mat, test_tags)| (mat.node.byte_range(), test_tags));
3030 syntax_matches.advance();
3031 test_range
3032 })
3033 }
3034
3035 /// Returns selections for remote peers intersecting the given range.
3036 #[allow(clippy::type_complexity)]
3037 pub fn remote_selections_in_range(
3038 &self,
3039 range: Range<Anchor>,
3040 ) -> impl Iterator<
3041 Item = (
3042 ReplicaId,
3043 bool,
3044 CursorShape,
3045 impl Iterator<Item = &Selection<Anchor>> + '_,
3046 ),
3047 > + '_ {
3048 self.remote_selections
3049 .iter()
3050 .filter(|(replica_id, set)| {
3051 **replica_id != self.text.replica_id() && !set.selections.is_empty()
3052 })
3053 .map(move |(replica_id, set)| {
3054 let start_ix = match set.selections.binary_search_by(|probe| {
3055 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3056 }) {
3057 Ok(ix) | Err(ix) => ix,
3058 };
3059 let end_ix = match set.selections.binary_search_by(|probe| {
3060 probe.start.cmp(&range.end, self).then(Ordering::Less)
3061 }) {
3062 Ok(ix) | Err(ix) => ix,
3063 };
3064
3065 (
3066 *replica_id,
3067 set.line_mode,
3068 set.cursor_shape,
3069 set.selections[start_ix..end_ix].iter(),
3070 )
3071 })
3072 }
3073
3074 /// Whether the buffer contains any git changes.
3075 pub fn has_git_diff(&self) -> bool {
3076 !self.git_diff.is_empty()
3077 }
3078
3079 /// Returns all the Git diff hunks intersecting the given
3080 /// row range.
3081 pub fn git_diff_hunks_in_row_range(
3082 &self,
3083 range: Range<u32>,
3084 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3085 self.git_diff.hunks_in_row_range(range, self)
3086 }
3087
3088 /// Returns all the Git diff hunks intersecting the given
3089 /// range.
3090 pub fn git_diff_hunks_intersecting_range(
3091 &self,
3092 range: Range<Anchor>,
3093 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3094 self.git_diff.hunks_intersecting_range(range, self)
3095 }
3096
3097 /// Returns all the Git diff hunks intersecting the given
3098 /// range, in reverse order.
3099 pub fn git_diff_hunks_intersecting_range_rev(
3100 &self,
3101 range: Range<Anchor>,
3102 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3103 self.git_diff.hunks_intersecting_range_rev(range, self)
3104 }
3105
3106 /// Returns if the buffer contains any diagnostics.
3107 pub fn has_diagnostics(&self) -> bool {
3108 !self.diagnostics.is_empty()
3109 }
3110
3111 /// Returns all the diagnostics intersecting the given range.
3112 pub fn diagnostics_in_range<'a, T, O>(
3113 &'a self,
3114 search_range: Range<T>,
3115 reversed: bool,
3116 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3117 where
3118 T: 'a + Clone + ToOffset,
3119 O: 'a + FromAnchor + Ord,
3120 {
3121 let mut iterators: Vec<_> = self
3122 .diagnostics
3123 .iter()
3124 .map(|(_, collection)| {
3125 collection
3126 .range::<T, O>(search_range.clone(), self, true, reversed)
3127 .peekable()
3128 })
3129 .collect();
3130
3131 std::iter::from_fn(move || {
3132 let (next_ix, _) = iterators
3133 .iter_mut()
3134 .enumerate()
3135 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3136 .min_by(|(_, a), (_, b)| a.range.start.cmp(&b.range.start))?;
3137 iterators[next_ix].next()
3138 })
3139 }
3140
3141 /// Returns all the diagnostic groups associated with the given
3142 /// language server id. If no language server id is provided,
3143 /// all diagnostics groups are returned.
3144 pub fn diagnostic_groups(
3145 &self,
3146 language_server_id: Option<LanguageServerId>,
3147 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3148 let mut groups = Vec::new();
3149
3150 if let Some(language_server_id) = language_server_id {
3151 if let Ok(ix) = self
3152 .diagnostics
3153 .binary_search_by_key(&language_server_id, |e| e.0)
3154 {
3155 self.diagnostics[ix]
3156 .1
3157 .groups(language_server_id, &mut groups, self);
3158 }
3159 } else {
3160 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3161 diagnostics.groups(*language_server_id, &mut groups, self);
3162 }
3163 }
3164
3165 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3166 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3167 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3168 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3169 });
3170
3171 groups
3172 }
3173
3174 /// Returns an iterator over the diagnostics for the given group.
3175 pub fn diagnostic_group<'a, O>(
3176 &'a self,
3177 group_id: usize,
3178 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3179 where
3180 O: 'a + FromAnchor,
3181 {
3182 self.diagnostics
3183 .iter()
3184 .flat_map(move |(_, set)| set.group(group_id, self))
3185 }
3186
3187 /// The number of times diagnostics were updated.
3188 pub fn diagnostics_update_count(&self) -> usize {
3189 self.diagnostics_update_count
3190 }
3191
3192 /// The number of times the buffer was parsed.
3193 pub fn parse_count(&self) -> usize {
3194 self.parse_count
3195 }
3196
3197 /// The number of times selections were updated.
3198 pub fn selections_update_count(&self) -> usize {
3199 self.selections_update_count
3200 }
3201
3202 /// Returns a snapshot of underlying file.
3203 pub fn file(&self) -> Option<&Arc<dyn File>> {
3204 self.file.as_ref()
3205 }
3206
3207 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
3208 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
3209 if let Some(file) = self.file() {
3210 if file.path().file_name().is_none() || include_root {
3211 Some(file.full_path(cx))
3212 } else {
3213 Some(file.path().to_path_buf())
3214 }
3215 } else {
3216 None
3217 }
3218 }
3219
3220 /// The number of times the underlying file was updated.
3221 pub fn file_update_count(&self) -> usize {
3222 self.file_update_count
3223 }
3224
3225 /// The number of times the git diff status was updated.
3226 pub fn git_diff_update_count(&self) -> usize {
3227 self.git_diff_update_count
3228 }
3229}
3230
3231fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
3232 indent_size_for_text(text.chars_at(Point::new(row, 0)))
3233}
3234
3235fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
3236 let mut result = IndentSize::spaces(0);
3237 for c in text {
3238 let kind = match c {
3239 ' ' => IndentKind::Space,
3240 '\t' => IndentKind::Tab,
3241 _ => break,
3242 };
3243 if result.len == 0 {
3244 result.kind = kind;
3245 }
3246 result.len += 1;
3247 }
3248 result
3249}
3250
3251impl Clone for BufferSnapshot {
3252 fn clone(&self) -> Self {
3253 Self {
3254 text: self.text.clone(),
3255 git_diff: self.git_diff.clone(),
3256 syntax: self.syntax.clone(),
3257 file: self.file.clone(),
3258 remote_selections: self.remote_selections.clone(),
3259 diagnostics: self.diagnostics.clone(),
3260 selections_update_count: self.selections_update_count,
3261 diagnostics_update_count: self.diagnostics_update_count,
3262 file_update_count: self.file_update_count,
3263 git_diff_update_count: self.git_diff_update_count,
3264 language: self.language.clone(),
3265 parse_count: self.parse_count,
3266 }
3267 }
3268}
3269
3270impl Deref for BufferSnapshot {
3271 type Target = text::BufferSnapshot;
3272
3273 fn deref(&self) -> &Self::Target {
3274 &self.text
3275 }
3276}
3277
3278unsafe impl<'a> Send for BufferChunks<'a> {}
3279
3280impl<'a> BufferChunks<'a> {
3281 pub(crate) fn new(
3282 text: &'a Rope,
3283 range: Range<usize>,
3284 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
3285 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
3286 ) -> Self {
3287 let mut highlights = None;
3288 if let Some((captures, highlight_maps)) = syntax {
3289 highlights = Some(BufferChunkHighlights {
3290 captures,
3291 next_capture: None,
3292 stack: Default::default(),
3293 highlight_maps,
3294 })
3295 }
3296
3297 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
3298 let chunks = text.chunks_in_range(range.clone());
3299
3300 BufferChunks {
3301 range,
3302 chunks,
3303 diagnostic_endpoints,
3304 error_depth: 0,
3305 warning_depth: 0,
3306 information_depth: 0,
3307 hint_depth: 0,
3308 unnecessary_depth: 0,
3309 highlights,
3310 }
3311 }
3312
3313 /// Seeks to the given byte offset in the buffer.
3314 pub fn seek(&mut self, offset: usize) {
3315 self.range.start = offset;
3316 self.chunks.seek(self.range.start);
3317 if let Some(highlights) = self.highlights.as_mut() {
3318 highlights
3319 .stack
3320 .retain(|(end_offset, _)| *end_offset > offset);
3321 if let Some(capture) = &highlights.next_capture {
3322 if offset >= capture.node.start_byte() {
3323 let next_capture_end = capture.node.end_byte();
3324 if offset < next_capture_end {
3325 highlights.stack.push((
3326 next_capture_end,
3327 highlights.highlight_maps[capture.grammar_index].get(capture.index),
3328 ));
3329 }
3330 highlights.next_capture.take();
3331 }
3332 }
3333 highlights.captures.set_byte_range(self.range.clone());
3334 }
3335 }
3336
3337 /// The current byte offset in the buffer.
3338 pub fn offset(&self) -> usize {
3339 self.range.start
3340 }
3341
3342 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
3343 let depth = match endpoint.severity {
3344 DiagnosticSeverity::ERROR => &mut self.error_depth,
3345 DiagnosticSeverity::WARNING => &mut self.warning_depth,
3346 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
3347 DiagnosticSeverity::HINT => &mut self.hint_depth,
3348 _ => return,
3349 };
3350 if endpoint.is_start {
3351 *depth += 1;
3352 } else {
3353 *depth -= 1;
3354 }
3355
3356 if endpoint.is_unnecessary {
3357 if endpoint.is_start {
3358 self.unnecessary_depth += 1;
3359 } else {
3360 self.unnecessary_depth -= 1;
3361 }
3362 }
3363 }
3364
3365 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
3366 if self.error_depth > 0 {
3367 Some(DiagnosticSeverity::ERROR)
3368 } else if self.warning_depth > 0 {
3369 Some(DiagnosticSeverity::WARNING)
3370 } else if self.information_depth > 0 {
3371 Some(DiagnosticSeverity::INFORMATION)
3372 } else if self.hint_depth > 0 {
3373 Some(DiagnosticSeverity::HINT)
3374 } else {
3375 None
3376 }
3377 }
3378
3379 fn current_code_is_unnecessary(&self) -> bool {
3380 self.unnecessary_depth > 0
3381 }
3382}
3383
3384impl<'a> Iterator for BufferChunks<'a> {
3385 type Item = Chunk<'a>;
3386
3387 fn next(&mut self) -> Option<Self::Item> {
3388 let mut next_capture_start = usize::MAX;
3389 let mut next_diagnostic_endpoint = usize::MAX;
3390
3391 if let Some(highlights) = self.highlights.as_mut() {
3392 while let Some((parent_capture_end, _)) = highlights.stack.last() {
3393 if *parent_capture_end <= self.range.start {
3394 highlights.stack.pop();
3395 } else {
3396 break;
3397 }
3398 }
3399
3400 if highlights.next_capture.is_none() {
3401 highlights.next_capture = highlights.captures.next();
3402 }
3403
3404 while let Some(capture) = highlights.next_capture.as_ref() {
3405 if self.range.start < capture.node.start_byte() {
3406 next_capture_start = capture.node.start_byte();
3407 break;
3408 } else {
3409 let highlight_id =
3410 highlights.highlight_maps[capture.grammar_index].get(capture.index);
3411 highlights
3412 .stack
3413 .push((capture.node.end_byte(), highlight_id));
3414 highlights.next_capture = highlights.captures.next();
3415 }
3416 }
3417 }
3418
3419 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
3420 if endpoint.offset <= self.range.start {
3421 self.update_diagnostic_depths(endpoint);
3422 self.diagnostic_endpoints.next();
3423 } else {
3424 next_diagnostic_endpoint = endpoint.offset;
3425 break;
3426 }
3427 }
3428
3429 if let Some(chunk) = self.chunks.peek() {
3430 let chunk_start = self.range.start;
3431 let mut chunk_end = (self.chunks.offset() + chunk.len())
3432 .min(next_capture_start)
3433 .min(next_diagnostic_endpoint);
3434 let mut highlight_id = None;
3435 if let Some(highlights) = self.highlights.as_ref() {
3436 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
3437 chunk_end = chunk_end.min(*parent_capture_end);
3438 highlight_id = Some(*parent_highlight_id);
3439 }
3440 }
3441
3442 let slice =
3443 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
3444 self.range.start = chunk_end;
3445 if self.range.start == self.chunks.offset() + chunk.len() {
3446 self.chunks.next().unwrap();
3447 }
3448
3449 Some(Chunk {
3450 text: slice,
3451 syntax_highlight_id: highlight_id,
3452 diagnostic_severity: self.current_diagnostic_severity(),
3453 is_unnecessary: self.current_code_is_unnecessary(),
3454 ..Default::default()
3455 })
3456 } else {
3457 None
3458 }
3459 }
3460}
3461
3462impl operation_queue::Operation for Operation {
3463 fn lamport_timestamp(&self) -> clock::Lamport {
3464 match self {
3465 Operation::Buffer(_) => {
3466 unreachable!("buffer operations should never be deferred at this layer")
3467 }
3468 Operation::UpdateDiagnostics {
3469 lamport_timestamp, ..
3470 }
3471 | Operation::UpdateSelections {
3472 lamport_timestamp, ..
3473 }
3474 | Operation::UpdateCompletionTriggers {
3475 lamport_timestamp, ..
3476 } => *lamport_timestamp,
3477 }
3478 }
3479}
3480
3481impl Default for Diagnostic {
3482 fn default() -> Self {
3483 Self {
3484 source: Default::default(),
3485 code: None,
3486 severity: DiagnosticSeverity::ERROR,
3487 message: Default::default(),
3488 group_id: 0,
3489 is_primary: false,
3490 is_disk_based: false,
3491 is_unnecessary: false,
3492 }
3493 }
3494}
3495
3496impl IndentSize {
3497 /// Returns an [IndentSize] representing the given spaces.
3498 pub fn spaces(len: u32) -> Self {
3499 Self {
3500 len,
3501 kind: IndentKind::Space,
3502 }
3503 }
3504
3505 /// Returns an [IndentSize] representing a tab.
3506 pub fn tab() -> Self {
3507 Self {
3508 len: 1,
3509 kind: IndentKind::Tab,
3510 }
3511 }
3512
3513 /// An iterator over the characters represented by this [IndentSize].
3514 pub fn chars(&self) -> impl Iterator<Item = char> {
3515 iter::repeat(self.char()).take(self.len as usize)
3516 }
3517
3518 /// The character representation of this [IndentSize].
3519 pub fn char(&self) -> char {
3520 match self.kind {
3521 IndentKind::Space => ' ',
3522 IndentKind::Tab => '\t',
3523 }
3524 }
3525
3526 /// Consumes the current [IndentSize] and returns a new one that has
3527 /// been shrunk or enlarged by the given size along the given direction.
3528 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
3529 match direction {
3530 Ordering::Less => {
3531 if self.kind == size.kind && self.len >= size.len {
3532 self.len -= size.len;
3533 }
3534 }
3535 Ordering::Equal => {}
3536 Ordering::Greater => {
3537 if self.len == 0 {
3538 self = size;
3539 } else if self.kind == size.kind {
3540 self.len += size.len;
3541 }
3542 }
3543 }
3544 self
3545 }
3546}
3547
3548#[cfg(any(test, feature = "test-support"))]
3549pub struct TestFile {
3550 pub path: Arc<Path>,
3551 pub root_name: String,
3552}
3553
3554#[cfg(any(test, feature = "test-support"))]
3555impl File for TestFile {
3556 fn path(&self) -> &Arc<Path> {
3557 &self.path
3558 }
3559
3560 fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
3561 PathBuf::from(&self.root_name).join(self.path.as_ref())
3562 }
3563
3564 fn as_local(&self) -> Option<&dyn LocalFile> {
3565 None
3566 }
3567
3568 fn mtime(&self) -> Option<SystemTime> {
3569 unimplemented!()
3570 }
3571
3572 fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
3573 self.path().file_name().unwrap_or(self.root_name.as_ref())
3574 }
3575
3576 fn worktree_id(&self) -> usize {
3577 0
3578 }
3579
3580 fn is_deleted(&self) -> bool {
3581 unimplemented!()
3582 }
3583
3584 fn as_any(&self) -> &dyn std::any::Any {
3585 unimplemented!()
3586 }
3587
3588 fn to_proto(&self) -> rpc::proto::File {
3589 unimplemented!()
3590 }
3591
3592 fn is_private(&self) -> bool {
3593 false
3594 }
3595}
3596
3597pub(crate) fn contiguous_ranges(
3598 values: impl Iterator<Item = u32>,
3599 max_len: usize,
3600) -> impl Iterator<Item = Range<u32>> {
3601 let mut values = values;
3602 let mut current_range: Option<Range<u32>> = None;
3603 std::iter::from_fn(move || loop {
3604 if let Some(value) = values.next() {
3605 if let Some(range) = &mut current_range {
3606 if value == range.end && range.len() < max_len {
3607 range.end += 1;
3608 continue;
3609 }
3610 }
3611
3612 let prev_range = current_range.clone();
3613 current_range = Some(value..(value + 1));
3614 if prev_range.is_some() {
3615 return prev_range;
3616 }
3617 } else {
3618 return current_range.take();
3619 }
3620 })
3621}
3622
3623/// Returns the [CharKind] for the given character. When a scope is provided,
3624/// the function checks if the character is considered a word character
3625/// based on the language scope's word character settings.
3626pub fn char_kind(scope: &Option<LanguageScope>, c: char) -> CharKind {
3627 if c.is_whitespace() {
3628 return CharKind::Whitespace;
3629 } else if c.is_alphanumeric() || c == '_' {
3630 return CharKind::Word;
3631 }
3632
3633 if let Some(scope) = scope {
3634 if let Some(characters) = scope.word_characters() {
3635 if characters.contains(&c) {
3636 return CharKind::Word;
3637 }
3638 }
3639 }
3640
3641 CharKind::Punctuation
3642}
3643
3644/// Find all of the ranges of whitespace that occur at the ends of lines
3645/// in the given rope.
3646///
3647/// This could also be done with a regex search, but this implementation
3648/// avoids copying text.
3649pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
3650 let mut ranges = Vec::new();
3651
3652 let mut offset = 0;
3653 let mut prev_chunk_trailing_whitespace_range = 0..0;
3654 for chunk in rope.chunks() {
3655 let mut prev_line_trailing_whitespace_range = 0..0;
3656 for (i, line) in chunk.split('\n').enumerate() {
3657 let line_end_offset = offset + line.len();
3658 let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
3659 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
3660
3661 if i == 0 && trimmed_line_len == 0 {
3662 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
3663 }
3664 if !prev_line_trailing_whitespace_range.is_empty() {
3665 ranges.push(prev_line_trailing_whitespace_range);
3666 }
3667
3668 offset = line_end_offset + 1;
3669 prev_line_trailing_whitespace_range = trailing_whitespace_range;
3670 }
3671
3672 offset -= 1;
3673 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
3674 }
3675
3676 if !prev_chunk_trailing_whitespace_range.is_empty() {
3677 ranges.push(prev_chunk_trailing_whitespace_range);
3678 }
3679
3680 ranges
3681}