1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatches,
14 SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 CodeLabel, LanguageScope, Outline,
17};
18use anyhow::{anyhow, Context, Result};
19pub use clock::ReplicaId;
20use futures::channel::oneshot;
21use gpui::{AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLabel};
22use lazy_static::lazy_static;
23use lsp::LanguageServerId;
24use parking_lot::Mutex;
25use similar::{ChangeTag, TextDiff};
26use smallvec::SmallVec;
27use smol::future::yield_now;
28use std::{
29 any::Any,
30 cmp::{self, Ordering},
31 collections::BTreeMap,
32 ffi::OsStr,
33 future::Future,
34 iter::{self, Iterator, Peekable},
35 mem,
36 ops::{Deref, Range},
37 path::{Path, PathBuf},
38 str,
39 sync::Arc,
40 time::{Duration, Instant, SystemTime},
41 vec,
42};
43use sum_tree::TreeMap;
44use text::operation_queue::OperationQueue;
45use text::*;
46pub use text::{
47 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
48 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
49 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
50 Transaction, TransactionId, Unclipped,
51};
52use theme::SyntaxTheme;
53#[cfg(any(test, feature = "test-support"))]
54use util::RandomCharIter;
55use util::RangeExt;
56
57#[cfg(any(test, feature = "test-support"))]
58pub use {tree_sitter_rust, tree_sitter_typescript};
59
60pub use lsp::DiagnosticSeverity;
61
62lazy_static! {
63 /// A label for the background task spawned by the buffer to compute
64 /// a diff against the contents of its file.
65 pub static ref BUFFER_DIFF_TASK: TaskLabel = TaskLabel::new();
66}
67
68/// Indicate whether a [Buffer] has permissions to edit.
69#[derive(PartialEq, Clone, Copy, Debug)]
70pub enum Capability {
71 /// The buffer is a mutable replica.
72 ReadWrite,
73 /// The buffer is a read-only replica.
74 ReadOnly,
75}
76
77/// An in-memory representation of a source code file, including its text,
78/// syntax trees, git status, and diagnostics.
79pub struct Buffer {
80 text: TextBuffer,
81 diff_base: Option<String>,
82 git_diff: git::diff::BufferDiff,
83 file: Option<Arc<dyn File>>,
84 /// The mtime of the file when this buffer was last loaded from
85 /// or saved to disk.
86 saved_mtime: Option<SystemTime>,
87 /// The version vector when this buffer was last loaded from
88 /// or saved to disk.
89 saved_version: clock::Global,
90 transaction_depth: usize,
91 was_dirty_before_starting_transaction: Option<bool>,
92 reload_task: Option<Task<Result<()>>>,
93 language: Option<Arc<Language>>,
94 autoindent_requests: Vec<Arc<AutoindentRequest>>,
95 pending_autoindent: Option<Task<()>>,
96 sync_parse_timeout: Duration,
97 syntax_map: Mutex<SyntaxMap>,
98 parsing_in_background: bool,
99 parse_count: usize,
100 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
101 remote_selections: TreeMap<ReplicaId, SelectionSet>,
102 selections_update_count: usize,
103 diagnostics_update_count: usize,
104 diagnostics_timestamp: clock::Lamport,
105 file_update_count: usize,
106 git_diff_update_count: usize,
107 completion_triggers: Vec<String>,
108 completion_triggers_timestamp: clock::Lamport,
109 deferred_ops: OperationQueue<Operation>,
110 capability: Capability,
111 has_conflict: bool,
112}
113
114/// An immutable, cheaply cloneable representation of a fixed
115/// state of a buffer.
116pub struct BufferSnapshot {
117 text: text::BufferSnapshot,
118 git_diff: git::diff::BufferDiff,
119 pub(crate) syntax: SyntaxSnapshot,
120 file: Option<Arc<dyn File>>,
121 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
122 diagnostics_update_count: usize,
123 file_update_count: usize,
124 git_diff_update_count: usize,
125 remote_selections: TreeMap<ReplicaId, SelectionSet>,
126 selections_update_count: usize,
127 language: Option<Arc<Language>>,
128 parse_count: usize,
129}
130
131/// The kind and amount of indentation in a particular line. For now,
132/// assumes that indentation is all the same character.
133#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
134pub struct IndentSize {
135 /// The number of bytes that comprise the indentation.
136 pub len: u32,
137 /// The kind of whitespace used for indentation.
138 pub kind: IndentKind,
139}
140
141/// A whitespace character that's used for indentation.
142#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
143pub enum IndentKind {
144 /// An ASCII space character.
145 #[default]
146 Space,
147 /// An ASCII tab character.
148 Tab,
149}
150
151/// The shape of a selection cursor.
152#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
153pub enum CursorShape {
154 /// A vertical bar
155 #[default]
156 Bar,
157 /// A block that surrounds the following character
158 Block,
159 /// An underline that runs along the following character
160 Underscore,
161 /// A box drawn around the following character
162 Hollow,
163}
164
165#[derive(Clone, Debug)]
166struct SelectionSet {
167 line_mode: bool,
168 cursor_shape: CursorShape,
169 selections: Arc<[Selection<Anchor>]>,
170 lamport_timestamp: clock::Lamport,
171}
172
173/// A diagnostic associated with a certain range of a buffer.
174#[derive(Clone, Debug, PartialEq, Eq)]
175pub struct Diagnostic {
176 /// The name of the service that produced this diagnostic.
177 pub source: Option<String>,
178 /// A machine-readable code that identifies this diagnostic.
179 pub code: Option<String>,
180 /// Whether this diagnostic is a hint, warning, or error.
181 pub severity: DiagnosticSeverity,
182 /// The human-readable message associated with this diagnostic.
183 pub message: String,
184 /// An id that identifies the group to which this diagnostic belongs.
185 ///
186 /// When a language server produces a diagnostic with
187 /// one or more associated diagnostics, those diagnostics are all
188 /// assigned a single group id.
189 pub group_id: usize,
190 /// Whether this diagnostic is the primary diagnostic for its group.
191 ///
192 /// In a given group, the primary diagnostic is the top-level diagnostic
193 /// returned by the language server. The non-primary diagnostics are the
194 /// associated diagnostics.
195 pub is_primary: bool,
196 /// Whether this diagnostic is considered to originate from an analysis of
197 /// files on disk, as opposed to any unsaved buffer contents. This is a
198 /// property of a given diagnostic source, and is configured for a given
199 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
200 /// for the language server.
201 pub is_disk_based: bool,
202 /// Whether this diagnostic marks unnecessary code.
203 pub is_unnecessary: bool,
204}
205
206/// TODO - move this into the `project` crate and make it private.
207pub async fn prepare_completion_documentation(
208 documentation: &lsp::Documentation,
209 language_registry: &Arc<LanguageRegistry>,
210 language: Option<Arc<Language>>,
211) -> Documentation {
212 match documentation {
213 lsp::Documentation::String(text) => {
214 if text.lines().count() <= 1 {
215 Documentation::SingleLine(text.clone())
216 } else {
217 Documentation::MultiLinePlainText(text.clone())
218 }
219 }
220
221 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
222 lsp::MarkupKind::PlainText => {
223 if value.lines().count() <= 1 {
224 Documentation::SingleLine(value.clone())
225 } else {
226 Documentation::MultiLinePlainText(value.clone())
227 }
228 }
229
230 lsp::MarkupKind::Markdown => {
231 let parsed = parse_markdown(value, language_registry, language).await;
232 Documentation::MultiLineMarkdown(parsed)
233 }
234 },
235 }
236}
237
238/// Documentation associated with a [`Completion`].
239#[derive(Clone, Debug)]
240pub enum Documentation {
241 /// There is no documentation for this completion.
242 Undocumented,
243 /// A single line of documentation.
244 SingleLine(String),
245 /// Multiple lines of plain text documentation.
246 MultiLinePlainText(String),
247 /// Markdown documentation.
248 MultiLineMarkdown(ParsedMarkdown),
249}
250
251/// A completion provided by a language server
252#[derive(Clone, Debug)]
253pub struct Completion {
254 /// The range of the buffer that will be replaced.
255 pub old_range: Range<Anchor>,
256 /// The new text that will be inserted.
257 pub new_text: String,
258 /// A label for this completion that is shown in the menu.
259 pub label: CodeLabel,
260 /// The id of the language server that produced this completion.
261 pub server_id: LanguageServerId,
262 /// The documentation for this completion.
263 pub documentation: Option<Documentation>,
264 /// The raw completion provided by the language server.
265 pub lsp_completion: lsp::CompletionItem,
266}
267
268/// A code action provided by a language server.
269#[derive(Clone, Debug)]
270pub struct CodeAction {
271 /// The id of the language server that produced this code action.
272 pub server_id: LanguageServerId,
273 /// The range of the buffer where this code action is applicable.
274 pub range: Range<Anchor>,
275 /// The raw code action provided by the language server.
276 pub lsp_action: lsp::CodeAction,
277}
278
279/// An operation used to synchronize this buffer with its other replicas.
280#[derive(Clone, Debug, PartialEq)]
281pub enum Operation {
282 /// A text operation.
283 Buffer(text::Operation),
284
285 /// An update to the buffer's diagnostics.
286 UpdateDiagnostics {
287 /// The id of the language server that produced the new diagnostics.
288 server_id: LanguageServerId,
289 /// The diagnostics.
290 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
291 /// The buffer's lamport timestamp.
292 lamport_timestamp: clock::Lamport,
293 },
294
295 /// An update to the most recent selections in this buffer.
296 UpdateSelections {
297 /// The selections.
298 selections: Arc<[Selection<Anchor>]>,
299 /// The buffer's lamport timestamp.
300 lamport_timestamp: clock::Lamport,
301 /// Whether the selections are in 'line mode'.
302 line_mode: bool,
303 /// The [`CursorShape`] associated with these selections.
304 cursor_shape: CursorShape,
305 },
306
307 /// An update to the characters that should trigger autocompletion
308 /// for this buffer.
309 UpdateCompletionTriggers {
310 /// The characters that trigger autocompletion.
311 triggers: Vec<String>,
312 /// The buffer's lamport timestamp.
313 lamport_timestamp: clock::Lamport,
314 },
315}
316
317/// An event that occurs in a buffer.
318#[derive(Clone, Debug, PartialEq)]
319pub enum Event {
320 /// The buffer was changed in a way that must be
321 /// propagated to its other replicas.
322 Operation(Operation),
323 /// The buffer was edited.
324 Edited,
325 /// The buffer's `dirty` bit changed.
326 DirtyChanged,
327 /// The buffer was saved.
328 Saved,
329 /// The buffer's file was changed on disk.
330 FileHandleChanged,
331 /// The buffer was reloaded.
332 Reloaded,
333 /// The buffer's diff_base changed.
334 DiffBaseChanged,
335 /// The buffer's language was changed.
336 LanguageChanged,
337 /// The buffer's syntax trees were updated.
338 Reparsed,
339 /// The buffer's diagnostics were updated.
340 DiagnosticsUpdated,
341 /// The buffer gained or lost editing capabilities.
342 CapabilityChanged,
343 /// The buffer was explicitly requested to close.
344 Closed,
345}
346
347/// The file associated with a buffer.
348pub trait File: Send + Sync {
349 /// Returns the [`LocalFile`] associated with this file, if the
350 /// file is local.
351 fn as_local(&self) -> Option<&dyn LocalFile>;
352
353 /// Returns whether this file is local.
354 fn is_local(&self) -> bool {
355 self.as_local().is_some()
356 }
357
358 /// Returns the file's mtime.
359 fn mtime(&self) -> Option<SystemTime>;
360
361 /// Returns the path of this file relative to the worktree's root directory.
362 fn path(&self) -> &Arc<Path>;
363
364 /// Returns the path of this file relative to the worktree's parent directory (this means it
365 /// includes the name of the worktree's root folder).
366 fn full_path(&self, cx: &AppContext) -> PathBuf;
367
368 /// Returns the last component of this handle's absolute path. If this handle refers to the root
369 /// of its worktree, then this method will return the name of the worktree itself.
370 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
371
372 /// Returns the id of the worktree to which this file belongs.
373 ///
374 /// This is needed for looking up project-specific settings.
375 fn worktree_id(&self) -> usize;
376
377 /// Returns whether the file has been deleted.
378 fn is_deleted(&self) -> bool;
379
380 /// Returns whether the file existed on disk at one point
381 fn is_created(&self) -> bool {
382 self.mtime().is_some()
383 }
384
385 /// Converts this file into an [`Any`] trait object.
386 fn as_any(&self) -> &dyn Any;
387
388 /// Converts this file into a protobuf message.
389 fn to_proto(&self) -> rpc::proto::File;
390
391 /// Return whether Zed considers this to be a private file.
392 fn is_private(&self) -> bool;
393}
394
395/// The file associated with a buffer, in the case where the file is on the local disk.
396pub trait LocalFile: File {
397 /// Returns the absolute path of this file.
398 fn abs_path(&self, cx: &AppContext) -> PathBuf;
399
400 /// Loads the file's contents from disk.
401 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
402
403 /// Called when the buffer is reloaded from disk.
404 fn buffer_reloaded(
405 &self,
406 buffer_id: BufferId,
407 version: &clock::Global,
408 line_ending: LineEnding,
409 mtime: Option<SystemTime>,
410 cx: &mut AppContext,
411 );
412
413 /// Returns true if the file should not be shared with collaborators.
414 fn is_private(&self, _: &AppContext) -> bool {
415 false
416 }
417}
418
419/// The auto-indent behavior associated with an editing operation.
420/// For some editing operations, each affected line of text has its
421/// indentation recomputed. For other operations, the entire block
422/// of edited text is adjusted uniformly.
423#[derive(Clone, Debug)]
424pub enum AutoindentMode {
425 /// Indent each line of inserted text.
426 EachLine,
427 /// Apply the same indentation adjustment to all of the lines
428 /// in a given insertion.
429 Block {
430 /// The original indentation level of the first line of each
431 /// insertion, if it has been copied.
432 original_indent_columns: Vec<u32>,
433 },
434}
435
436#[derive(Clone)]
437struct AutoindentRequest {
438 before_edit: BufferSnapshot,
439 entries: Vec<AutoindentRequestEntry>,
440 is_block_mode: bool,
441}
442
443#[derive(Clone)]
444struct AutoindentRequestEntry {
445 /// A range of the buffer whose indentation should be adjusted.
446 range: Range<Anchor>,
447 /// Whether or not these lines should be considered brand new, for the
448 /// purpose of auto-indent. When text is not new, its indentation will
449 /// only be adjusted if the suggested indentation level has *changed*
450 /// since the edit was made.
451 first_line_is_new: bool,
452 indent_size: IndentSize,
453 original_indent_column: Option<u32>,
454}
455
456#[derive(Debug)]
457struct IndentSuggestion {
458 basis_row: u32,
459 delta: Ordering,
460 within_error: bool,
461}
462
463struct BufferChunkHighlights<'a> {
464 captures: SyntaxMapCaptures<'a>,
465 next_capture: Option<SyntaxMapCapture<'a>>,
466 stack: Vec<(usize, HighlightId)>,
467 highlight_maps: Vec<HighlightMap>,
468}
469
470/// An iterator that yields chunks of a buffer's text, along with their
471/// syntax highlights and diagnostic status.
472pub struct BufferChunks<'a> {
473 range: Range<usize>,
474 chunks: text::Chunks<'a>,
475 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
476 error_depth: usize,
477 warning_depth: usize,
478 information_depth: usize,
479 hint_depth: usize,
480 unnecessary_depth: usize,
481 highlights: Option<BufferChunkHighlights<'a>>,
482}
483
484/// A chunk of a buffer's text, along with its syntax highlight and
485/// diagnostic status.
486#[derive(Clone, Copy, Debug, Default)]
487pub struct Chunk<'a> {
488 /// The text of the chunk.
489 pub text: &'a str,
490 /// The syntax highlighting style of the chunk.
491 pub syntax_highlight_id: Option<HighlightId>,
492 /// The highlight style that has been applied to this chunk in
493 /// the editor.
494 pub highlight_style: Option<HighlightStyle>,
495 /// The severity of diagnostic associated with this chunk, if any.
496 pub diagnostic_severity: Option<DiagnosticSeverity>,
497 /// Whether this chunk of text is marked as unnecessary.
498 pub is_unnecessary: bool,
499 /// Whether this chunk of text was originally a tab character.
500 pub is_tab: bool,
501}
502
503/// A set of edits to a given version of a buffer, computed asynchronously.
504pub struct Diff {
505 pub(crate) base_version: clock::Global,
506 line_ending: LineEnding,
507 edits: Vec<(Range<usize>, Arc<str>)>,
508}
509
510#[derive(Clone, Copy)]
511pub(crate) struct DiagnosticEndpoint {
512 offset: usize,
513 is_start: bool,
514 severity: DiagnosticSeverity,
515 is_unnecessary: bool,
516}
517
518/// A class of characters, used for characterizing a run of text.
519#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
520pub enum CharKind {
521 /// Whitespace.
522 Whitespace,
523 /// Punctuation.
524 Punctuation,
525 /// Word.
526 Word,
527}
528
529impl Buffer {
530 /// Create a new buffer with the given base text.
531 pub fn new<T: Into<String>>(replica_id: ReplicaId, id: BufferId, base_text: T) -> Self {
532 Self::build(
533 TextBuffer::new(replica_id, id, base_text.into()),
534 None,
535 None,
536 Capability::ReadWrite,
537 )
538 }
539
540 /// Create a new buffer that is a replica of a remote buffer.
541 pub fn remote(
542 remote_id: BufferId,
543 replica_id: ReplicaId,
544 capability: Capability,
545 base_text: String,
546 ) -> Self {
547 Self::build(
548 TextBuffer::new(replica_id, remote_id, base_text),
549 None,
550 None,
551 capability,
552 )
553 }
554
555 /// Create a new buffer that is a replica of a remote buffer, populating its
556 /// state from the given protobuf message.
557 pub fn from_proto(
558 replica_id: ReplicaId,
559 capability: Capability,
560 message: proto::BufferState,
561 file: Option<Arc<dyn File>>,
562 ) -> Result<Self> {
563 let buffer_id = BufferId::new(message.id)
564 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
565 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
566 let mut this = Self::build(
567 buffer,
568 message.diff_base.map(|text| text.into_boxed_str().into()),
569 file,
570 capability,
571 );
572 this.text.set_line_ending(proto::deserialize_line_ending(
573 rpc::proto::LineEnding::from_i32(message.line_ending)
574 .ok_or_else(|| anyhow!("missing line_ending"))?,
575 ));
576 this.saved_version = proto::deserialize_version(&message.saved_version);
577 this.saved_mtime = message.saved_mtime.map(|time| time.into());
578 Ok(this)
579 }
580
581 /// Serialize the buffer's state to a protobuf message.
582 pub fn to_proto(&self) -> proto::BufferState {
583 proto::BufferState {
584 id: self.remote_id().into(),
585 file: self.file.as_ref().map(|f| f.to_proto()),
586 base_text: self.base_text().to_string(),
587 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
588 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
589 saved_version: proto::serialize_version(&self.saved_version),
590 saved_mtime: self.saved_mtime.map(|time| time.into()),
591 }
592 }
593
594 /// Serialize as protobufs all of the changes to the buffer since the given version.
595 pub fn serialize_ops(
596 &self,
597 since: Option<clock::Global>,
598 cx: &AppContext,
599 ) -> Task<Vec<proto::Operation>> {
600 let mut operations = Vec::new();
601 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
602
603 operations.extend(self.remote_selections.iter().map(|(_, set)| {
604 proto::serialize_operation(&Operation::UpdateSelections {
605 selections: set.selections.clone(),
606 lamport_timestamp: set.lamport_timestamp,
607 line_mode: set.line_mode,
608 cursor_shape: set.cursor_shape,
609 })
610 }));
611
612 for (server_id, diagnostics) in &self.diagnostics {
613 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
614 lamport_timestamp: self.diagnostics_timestamp,
615 server_id: *server_id,
616 diagnostics: diagnostics.iter().cloned().collect(),
617 }));
618 }
619
620 operations.push(proto::serialize_operation(
621 &Operation::UpdateCompletionTriggers {
622 triggers: self.completion_triggers.clone(),
623 lamport_timestamp: self.completion_triggers_timestamp,
624 },
625 ));
626
627 let text_operations = self.text.operations().clone();
628 cx.background_executor().spawn(async move {
629 let since = since.unwrap_or_default();
630 operations.extend(
631 text_operations
632 .iter()
633 .filter(|(_, op)| !since.observed(op.timestamp()))
634 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
635 );
636 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
637 operations
638 })
639 }
640
641 /// Assign a language to the buffer, returning the buffer.
642 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
643 self.set_language(Some(language), cx);
644 self
645 }
646
647 /// Returns the [Capability] of this buffer.
648 pub fn capability(&self) -> Capability {
649 self.capability
650 }
651
652 /// Whether this buffer can only be read.
653 pub fn read_only(&self) -> bool {
654 self.capability == Capability::ReadOnly
655 }
656
657 /// Builds a [Buffer] with the given underlying [TextBuffer], diff base, [File] and [Capability].
658 pub fn build(
659 buffer: TextBuffer,
660 diff_base: Option<String>,
661 file: Option<Arc<dyn File>>,
662 capability: Capability,
663 ) -> Self {
664 let saved_mtime = file.as_ref().and_then(|file| file.mtime());
665
666 Self {
667 saved_mtime,
668 saved_version: buffer.version(),
669 reload_task: None,
670 transaction_depth: 0,
671 was_dirty_before_starting_transaction: None,
672 text: buffer,
673 diff_base,
674 git_diff: git::diff::BufferDiff::new(),
675 file,
676 capability,
677 syntax_map: Mutex::new(SyntaxMap::new()),
678 parsing_in_background: false,
679 parse_count: 0,
680 sync_parse_timeout: Duration::from_millis(1),
681 autoindent_requests: Default::default(),
682 pending_autoindent: Default::default(),
683 language: None,
684 remote_selections: Default::default(),
685 selections_update_count: 0,
686 diagnostics: Default::default(),
687 diagnostics_update_count: 0,
688 diagnostics_timestamp: Default::default(),
689 file_update_count: 0,
690 git_diff_update_count: 0,
691 completion_triggers: Default::default(),
692 completion_triggers_timestamp: Default::default(),
693 deferred_ops: OperationQueue::new(),
694 has_conflict: false,
695 }
696 }
697
698 /// Retrieve a snapshot of the buffer's current state. This is computationally
699 /// cheap, and allows reading from the buffer on a background thread.
700 pub fn snapshot(&self) -> BufferSnapshot {
701 let text = self.text.snapshot();
702 let mut syntax_map = self.syntax_map.lock();
703 syntax_map.interpolate(&text);
704 let syntax = syntax_map.snapshot();
705
706 BufferSnapshot {
707 text,
708 syntax,
709 git_diff: self.git_diff.clone(),
710 file: self.file.clone(),
711 remote_selections: self.remote_selections.clone(),
712 diagnostics: self.diagnostics.clone(),
713 diagnostics_update_count: self.diagnostics_update_count,
714 file_update_count: self.file_update_count,
715 git_diff_update_count: self.git_diff_update_count,
716 language: self.language.clone(),
717 parse_count: self.parse_count,
718 selections_update_count: self.selections_update_count,
719 }
720 }
721
722 #[cfg(test)]
723 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
724 &self.text
725 }
726
727 /// Retrieve a snapshot of the buffer's raw text, without any
728 /// language-related state like the syntax tree or diagnostics.
729 pub fn text_snapshot(&self) -> text::BufferSnapshot {
730 self.text.snapshot()
731 }
732
733 /// The file associated with the buffer, if any.
734 pub fn file(&self) -> Option<&Arc<dyn File>> {
735 self.file.as_ref()
736 }
737
738 /// The version of the buffer that was last saved or reloaded from disk.
739 pub fn saved_version(&self) -> &clock::Global {
740 &self.saved_version
741 }
742
743 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
744 pub fn saved_mtime(&self) -> Option<SystemTime> {
745 self.saved_mtime
746 }
747
748 /// Assign a language to the buffer.
749 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
750 self.parse_count += 1;
751 self.syntax_map.lock().clear();
752 self.language = language;
753 self.reparse(cx);
754 cx.emit(Event::LanguageChanged);
755 }
756
757 /// Assign a language registry to the buffer. This allows the buffer to retrieve
758 /// other languages if parts of the buffer are written in different languages.
759 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
760 self.syntax_map
761 .lock()
762 .set_language_registry(language_registry);
763 }
764
765 /// Assign the buffer a new [Capability].
766 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
767 self.capability = capability;
768 cx.emit(Event::CapabilityChanged)
769 }
770
771 /// This method is called to signal that the buffer has been saved.
772 pub fn did_save(
773 &mut self,
774 version: clock::Global,
775 mtime: Option<SystemTime>,
776 cx: &mut ModelContext<Self>,
777 ) {
778 self.saved_version = version;
779 self.has_conflict = false;
780 self.saved_mtime = mtime;
781 cx.emit(Event::Saved);
782 cx.notify();
783 }
784
785 /// Reloads the contents of the buffer from disk.
786 pub fn reload(
787 &mut self,
788 cx: &mut ModelContext<Self>,
789 ) -> oneshot::Receiver<Option<Transaction>> {
790 let (tx, rx) = futures::channel::oneshot::channel();
791 let prev_version = self.text.version();
792 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
793 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
794 let file = this.file.as_ref()?.as_local()?;
795 Some((file.mtime(), file.load(cx)))
796 })?
797 else {
798 return Ok(());
799 };
800
801 let new_text = new_text.await?;
802 let diff = this
803 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
804 .await;
805 this.update(&mut cx, |this, cx| {
806 if this.version() == diff.base_version {
807 this.finalize_last_transaction();
808 this.apply_diff(diff, cx);
809 tx.send(this.finalize_last_transaction().cloned()).ok();
810 this.has_conflict = false;
811 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
812 } else {
813 if !diff.edits.is_empty()
814 || this
815 .edits_since::<usize>(&diff.base_version)
816 .next()
817 .is_some()
818 {
819 this.has_conflict = true;
820 }
821
822 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
823 }
824
825 this.reload_task.take();
826 })
827 }));
828 rx
829 }
830
831 /// This method is called to signal that the buffer has been reloaded.
832 pub fn did_reload(
833 &mut self,
834 version: clock::Global,
835 line_ending: LineEnding,
836 mtime: Option<SystemTime>,
837 cx: &mut ModelContext<Self>,
838 ) {
839 self.saved_version = version;
840 self.text.set_line_ending(line_ending);
841 self.saved_mtime = mtime;
842 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
843 file.buffer_reloaded(
844 self.remote_id(),
845 &self.saved_version,
846 self.line_ending(),
847 self.saved_mtime,
848 cx,
849 );
850 }
851 cx.emit(Event::Reloaded);
852 cx.notify();
853 }
854
855 /// Updates the [File] backing this buffer. This should be called when
856 /// the file has changed or has been deleted.
857 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
858 let mut file_changed = false;
859
860 if let Some(old_file) = self.file.as_ref() {
861 if new_file.path() != old_file.path() {
862 file_changed = true;
863 }
864
865 if new_file.is_deleted() {
866 if !old_file.is_deleted() {
867 file_changed = true;
868 if !self.is_dirty() {
869 cx.emit(Event::DirtyChanged);
870 }
871 }
872 } else {
873 let new_mtime = new_file.mtime();
874 if new_mtime != old_file.mtime() {
875 file_changed = true;
876
877 if !self.is_dirty() {
878 self.reload(cx).close();
879 }
880 }
881 }
882 } else {
883 file_changed = true;
884 };
885
886 self.file = Some(new_file);
887 if file_changed {
888 self.file_update_count += 1;
889 cx.emit(Event::FileHandleChanged);
890 cx.notify();
891 }
892 }
893
894 /// Returns the current diff base, see [Buffer::set_diff_base].
895 pub fn diff_base(&self) -> Option<&str> {
896 self.diff_base.as_deref()
897 }
898
899 /// Sets the text that will be used to compute a Git diff
900 /// against the buffer text.
901 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
902 self.diff_base = diff_base;
903 if let Some(recalc_task) = self.git_diff_recalc(cx) {
904 cx.spawn(|buffer, mut cx| async move {
905 recalc_task.await;
906 buffer
907 .update(&mut cx, |_, cx| {
908 cx.emit(Event::DiffBaseChanged);
909 })
910 .ok();
911 })
912 .detach();
913 }
914 }
915
916 /// Recomputes the Git diff status.
917 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
918 let diff_base = self.diff_base.clone()?; // TODO: Make this an Arc
919 let snapshot = self.snapshot();
920
921 let mut diff = self.git_diff.clone();
922 let diff = cx.background_executor().spawn(async move {
923 diff.update(&diff_base, &snapshot).await;
924 diff
925 });
926
927 Some(cx.spawn(|this, mut cx| async move {
928 let buffer_diff = diff.await;
929 this.update(&mut cx, |this, _| {
930 this.git_diff = buffer_diff;
931 this.git_diff_update_count += 1;
932 })
933 .ok();
934 }))
935 }
936
937 /// Returns the primary [Language] assigned to this [Buffer].
938 pub fn language(&self) -> Option<&Arc<Language>> {
939 self.language.as_ref()
940 }
941
942 /// Returns the [Language] at the given location.
943 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
944 let offset = position.to_offset(self);
945 self.syntax_map
946 .lock()
947 .layers_for_range(offset..offset, &self.text)
948 .last()
949 .map(|info| info.language.clone())
950 .or_else(|| self.language.clone())
951 }
952
953 /// The number of times the buffer was parsed.
954 pub fn parse_count(&self) -> usize {
955 self.parse_count
956 }
957
958 /// The number of times selections were updated.
959 pub fn selections_update_count(&self) -> usize {
960 self.selections_update_count
961 }
962
963 /// The number of times diagnostics were updated.
964 pub fn diagnostics_update_count(&self) -> usize {
965 self.diagnostics_update_count
966 }
967
968 /// The number of times the underlying file was updated.
969 pub fn file_update_count(&self) -> usize {
970 self.file_update_count
971 }
972
973 /// The number of times the git diff status was updated.
974 pub fn git_diff_update_count(&self) -> usize {
975 self.git_diff_update_count
976 }
977
978 /// Whether the buffer is being parsed in the background.
979 #[cfg(any(test, feature = "test-support"))]
980 pub fn is_parsing(&self) -> bool {
981 self.parsing_in_background
982 }
983
984 /// Indicates whether the buffer contains any regions that may be
985 /// written in a language that hasn't been loaded yet.
986 pub fn contains_unknown_injections(&self) -> bool {
987 self.syntax_map.lock().contains_unknown_injections()
988 }
989
990 #[cfg(test)]
991 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
992 self.sync_parse_timeout = timeout;
993 }
994
995 /// Called after an edit to synchronize the buffer's main parse tree with
996 /// the buffer's new underlying state.
997 ///
998 /// Locks the syntax map and interpolates the edits since the last reparse
999 /// into the foreground syntax tree.
1000 ///
1001 /// Then takes a stable snapshot of the syntax map before unlocking it.
1002 /// The snapshot with the interpolated edits is sent to a background thread,
1003 /// where we ask Tree-sitter to perform an incremental parse.
1004 ///
1005 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1006 /// waiting on the parse to complete. As soon as it completes, we proceed
1007 /// synchronously, unless a 1ms timeout elapses.
1008 ///
1009 /// If we time out waiting on the parse, we spawn a second task waiting
1010 /// until the parse does complete and return with the interpolated tree still
1011 /// in the foreground. When the background parse completes, call back into
1012 /// the main thread and assign the foreground parse state.
1013 ///
1014 /// If the buffer or grammar changed since the start of the background parse,
1015 /// initiate an additional reparse recursively. To avoid concurrent parses
1016 /// for the same buffer, we only initiate a new parse if we are not already
1017 /// parsing in the background.
1018 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1019 if self.parsing_in_background {
1020 return;
1021 }
1022 let language = if let Some(language) = self.language.clone() {
1023 language
1024 } else {
1025 return;
1026 };
1027
1028 let text = self.text_snapshot();
1029 let parsed_version = self.version();
1030
1031 let mut syntax_map = self.syntax_map.lock();
1032 syntax_map.interpolate(&text);
1033 let language_registry = syntax_map.language_registry();
1034 let mut syntax_snapshot = syntax_map.snapshot();
1035 drop(syntax_map);
1036
1037 let parse_task = cx.background_executor().spawn({
1038 let language = language.clone();
1039 let language_registry = language_registry.clone();
1040 async move {
1041 syntax_snapshot.reparse(&text, language_registry, language);
1042 syntax_snapshot
1043 }
1044 });
1045
1046 match cx
1047 .background_executor()
1048 .block_with_timeout(self.sync_parse_timeout, parse_task)
1049 {
1050 Ok(new_syntax_snapshot) => {
1051 self.did_finish_parsing(new_syntax_snapshot, cx);
1052 return;
1053 }
1054 Err(parse_task) => {
1055 self.parsing_in_background = true;
1056 cx.spawn(move |this, mut cx| async move {
1057 let new_syntax_map = parse_task.await;
1058 this.update(&mut cx, move |this, cx| {
1059 let grammar_changed =
1060 this.language.as_ref().map_or(true, |current_language| {
1061 !Arc::ptr_eq(&language, current_language)
1062 });
1063 let language_registry_changed = new_syntax_map
1064 .contains_unknown_injections()
1065 && language_registry.map_or(false, |registry| {
1066 registry.version() != new_syntax_map.language_registry_version()
1067 });
1068 let parse_again = language_registry_changed
1069 || grammar_changed
1070 || this.version.changed_since(&parsed_version);
1071 this.did_finish_parsing(new_syntax_map, cx);
1072 this.parsing_in_background = false;
1073 if parse_again {
1074 this.reparse(cx);
1075 }
1076 })
1077 .ok();
1078 })
1079 .detach();
1080 }
1081 }
1082 }
1083
1084 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
1085 self.parse_count += 1;
1086 self.syntax_map.lock().did_parse(syntax_snapshot);
1087 self.request_autoindent(cx);
1088 cx.emit(Event::Reparsed);
1089 cx.notify();
1090 }
1091
1092 /// Assign to the buffer a set of diagnostics created by a given language server.
1093 pub fn update_diagnostics(
1094 &mut self,
1095 server_id: LanguageServerId,
1096 diagnostics: DiagnosticSet,
1097 cx: &mut ModelContext<Self>,
1098 ) {
1099 let lamport_timestamp = self.text.lamport_clock.tick();
1100 let op = Operation::UpdateDiagnostics {
1101 server_id,
1102 diagnostics: diagnostics.iter().cloned().collect(),
1103 lamport_timestamp,
1104 };
1105 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1106 self.send_operation(op, cx);
1107 }
1108
1109 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1110 if let Some(indent_sizes) = self.compute_autoindents() {
1111 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1112 match cx
1113 .background_executor()
1114 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1115 {
1116 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1117 Err(indent_sizes) => {
1118 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1119 let indent_sizes = indent_sizes.await;
1120 this.update(&mut cx, |this, cx| {
1121 this.apply_autoindents(indent_sizes, cx);
1122 })
1123 .ok();
1124 }));
1125 }
1126 }
1127 } else {
1128 self.autoindent_requests.clear();
1129 }
1130 }
1131
1132 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1133 let max_rows_between_yields = 100;
1134 let snapshot = self.snapshot();
1135 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1136 return None;
1137 }
1138
1139 let autoindent_requests = self.autoindent_requests.clone();
1140 Some(async move {
1141 let mut indent_sizes = BTreeMap::new();
1142 for request in autoindent_requests {
1143 // Resolve each edited range to its row in the current buffer and in the
1144 // buffer before this batch of edits.
1145 let mut row_ranges = Vec::new();
1146 let mut old_to_new_rows = BTreeMap::new();
1147 let mut language_indent_sizes_by_new_row = Vec::new();
1148 for entry in &request.entries {
1149 let position = entry.range.start;
1150 let new_row = position.to_point(&snapshot).row;
1151 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1152 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1153
1154 if !entry.first_line_is_new {
1155 let old_row = position.to_point(&request.before_edit).row;
1156 old_to_new_rows.insert(old_row, new_row);
1157 }
1158 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1159 }
1160
1161 // Build a map containing the suggested indentation for each of the edited lines
1162 // with respect to the state of the buffer before these edits. This map is keyed
1163 // by the rows for these lines in the current state of the buffer.
1164 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1165 let old_edited_ranges =
1166 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1167 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1168 let mut language_indent_size = IndentSize::default();
1169 for old_edited_range in old_edited_ranges {
1170 let suggestions = request
1171 .before_edit
1172 .suggest_autoindents(old_edited_range.clone())
1173 .into_iter()
1174 .flatten();
1175 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1176 if let Some(suggestion) = suggestion {
1177 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1178
1179 // Find the indent size based on the language for this row.
1180 while let Some((row, size)) = language_indent_sizes.peek() {
1181 if *row > new_row {
1182 break;
1183 }
1184 language_indent_size = *size;
1185 language_indent_sizes.next();
1186 }
1187
1188 let suggested_indent = old_to_new_rows
1189 .get(&suggestion.basis_row)
1190 .and_then(|from_row| {
1191 Some(old_suggestions.get(from_row).copied()?.0)
1192 })
1193 .unwrap_or_else(|| {
1194 request
1195 .before_edit
1196 .indent_size_for_line(suggestion.basis_row)
1197 })
1198 .with_delta(suggestion.delta, language_indent_size);
1199 old_suggestions
1200 .insert(new_row, (suggested_indent, suggestion.within_error));
1201 }
1202 }
1203 yield_now().await;
1204 }
1205
1206 // In block mode, only compute indentation suggestions for the first line
1207 // of each insertion. Otherwise, compute suggestions for every inserted line.
1208 let new_edited_row_ranges = contiguous_ranges(
1209 row_ranges.iter().flat_map(|(range, _)| {
1210 if request.is_block_mode {
1211 range.start..range.start + 1
1212 } else {
1213 range.clone()
1214 }
1215 }),
1216 max_rows_between_yields,
1217 );
1218
1219 // Compute new suggestions for each line, but only include them in the result
1220 // if they differ from the old suggestion for that line.
1221 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1222 let mut language_indent_size = IndentSize::default();
1223 for new_edited_row_range in new_edited_row_ranges {
1224 let suggestions = snapshot
1225 .suggest_autoindents(new_edited_row_range.clone())
1226 .into_iter()
1227 .flatten();
1228 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1229 if let Some(suggestion) = suggestion {
1230 // Find the indent size based on the language for this row.
1231 while let Some((row, size)) = language_indent_sizes.peek() {
1232 if *row > new_row {
1233 break;
1234 }
1235 language_indent_size = *size;
1236 language_indent_sizes.next();
1237 }
1238
1239 let suggested_indent = indent_sizes
1240 .get(&suggestion.basis_row)
1241 .copied()
1242 .unwrap_or_else(|| {
1243 snapshot.indent_size_for_line(suggestion.basis_row)
1244 })
1245 .with_delta(suggestion.delta, language_indent_size);
1246 if old_suggestions.get(&new_row).map_or(
1247 true,
1248 |(old_indentation, was_within_error)| {
1249 suggested_indent != *old_indentation
1250 && (!suggestion.within_error || *was_within_error)
1251 },
1252 ) {
1253 indent_sizes.insert(new_row, suggested_indent);
1254 }
1255 }
1256 }
1257 yield_now().await;
1258 }
1259
1260 // For each block of inserted text, adjust the indentation of the remaining
1261 // lines of the block by the same amount as the first line was adjusted.
1262 if request.is_block_mode {
1263 for (row_range, original_indent_column) in
1264 row_ranges
1265 .into_iter()
1266 .filter_map(|(range, original_indent_column)| {
1267 if range.len() > 1 {
1268 Some((range, original_indent_column?))
1269 } else {
1270 None
1271 }
1272 })
1273 {
1274 let new_indent = indent_sizes
1275 .get(&row_range.start)
1276 .copied()
1277 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1278 let delta = new_indent.len as i64 - original_indent_column as i64;
1279 if delta != 0 {
1280 for row in row_range.skip(1) {
1281 indent_sizes.entry(row).or_insert_with(|| {
1282 let mut size = snapshot.indent_size_for_line(row);
1283 if size.kind == new_indent.kind {
1284 match delta.cmp(&0) {
1285 Ordering::Greater => size.len += delta as u32,
1286 Ordering::Less => {
1287 size.len = size.len.saturating_sub(-delta as u32)
1288 }
1289 Ordering::Equal => {}
1290 }
1291 }
1292 size
1293 });
1294 }
1295 }
1296 }
1297 }
1298 }
1299
1300 indent_sizes
1301 })
1302 }
1303
1304 fn apply_autoindents(
1305 &mut self,
1306 indent_sizes: BTreeMap<u32, IndentSize>,
1307 cx: &mut ModelContext<Self>,
1308 ) {
1309 self.autoindent_requests.clear();
1310
1311 let edits: Vec<_> = indent_sizes
1312 .into_iter()
1313 .filter_map(|(row, indent_size)| {
1314 let current_size = indent_size_for_line(self, row);
1315 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1316 })
1317 .collect();
1318
1319 self.edit(edits, None, cx);
1320 }
1321
1322 /// Create a minimal edit that will cause the given row to be indented
1323 /// with the given size. After applying this edit, the length of the line
1324 /// will always be at least `new_size.len`.
1325 pub fn edit_for_indent_size_adjustment(
1326 row: u32,
1327 current_size: IndentSize,
1328 new_size: IndentSize,
1329 ) -> Option<(Range<Point>, String)> {
1330 if new_size.kind != current_size.kind {
1331 Some((
1332 Point::new(row, 0)..Point::new(row, current_size.len),
1333 iter::repeat(new_size.char())
1334 .take(new_size.len as usize)
1335 .collect::<String>(),
1336 ))
1337 } else {
1338 match new_size.len.cmp(¤t_size.len) {
1339 Ordering::Greater => {
1340 let point = Point::new(row, 0);
1341 Some((
1342 point..point,
1343 iter::repeat(new_size.char())
1344 .take((new_size.len - current_size.len) as usize)
1345 .collect::<String>(),
1346 ))
1347 }
1348
1349 Ordering::Less => Some((
1350 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1351 String::new(),
1352 )),
1353
1354 Ordering::Equal => None,
1355 }
1356 }
1357 }
1358
1359 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1360 /// and the given new text.
1361 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1362 let old_text = self.as_rope().clone();
1363 let base_version = self.version();
1364 cx.background_executor()
1365 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1366 let old_text = old_text.to_string();
1367 let line_ending = LineEnding::detect(&new_text);
1368 LineEnding::normalize(&mut new_text);
1369
1370 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1371 let empty: Arc<str> = "".into();
1372
1373 let mut edits = Vec::new();
1374 let mut old_offset = 0;
1375 let mut new_offset = 0;
1376 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1377 for change in diff.iter_all_changes().map(Some).chain([None]) {
1378 if let Some(change) = &change {
1379 let len = change.value().len();
1380 match change.tag() {
1381 ChangeTag::Equal => {
1382 old_offset += len;
1383 new_offset += len;
1384 }
1385 ChangeTag::Delete => {
1386 let old_end_offset = old_offset + len;
1387 if let Some((last_old_range, _)) = &mut last_edit {
1388 last_old_range.end = old_end_offset;
1389 } else {
1390 last_edit =
1391 Some((old_offset..old_end_offset, new_offset..new_offset));
1392 }
1393 old_offset = old_end_offset;
1394 }
1395 ChangeTag::Insert => {
1396 let new_end_offset = new_offset + len;
1397 if let Some((_, last_new_range)) = &mut last_edit {
1398 last_new_range.end = new_end_offset;
1399 } else {
1400 last_edit =
1401 Some((old_offset..old_offset, new_offset..new_end_offset));
1402 }
1403 new_offset = new_end_offset;
1404 }
1405 }
1406 }
1407
1408 if let Some((old_range, new_range)) = &last_edit {
1409 if old_offset > old_range.end
1410 || new_offset > new_range.end
1411 || change.is_none()
1412 {
1413 let text = if new_range.is_empty() {
1414 empty.clone()
1415 } else {
1416 new_text[new_range.clone()].into()
1417 };
1418 edits.push((old_range.clone(), text));
1419 last_edit.take();
1420 }
1421 }
1422 }
1423
1424 Diff {
1425 base_version,
1426 line_ending,
1427 edits,
1428 }
1429 })
1430 }
1431
1432 /// Spawns a background task that searches the buffer for any whitespace
1433 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1434 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1435 let old_text = self.as_rope().clone();
1436 let line_ending = self.line_ending();
1437 let base_version = self.version();
1438 cx.background_executor().spawn(async move {
1439 let ranges = trailing_whitespace_ranges(&old_text);
1440 let empty = Arc::<str>::from("");
1441 Diff {
1442 base_version,
1443 line_ending,
1444 edits: ranges
1445 .into_iter()
1446 .map(|range| (range, empty.clone()))
1447 .collect(),
1448 }
1449 })
1450 }
1451
1452 /// Ensures that the buffer ends with a single newline character, and
1453 /// no other whitespace.
1454 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1455 let len = self.len();
1456 let mut offset = len;
1457 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1458 let non_whitespace_len = chunk
1459 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1460 .len();
1461 offset -= chunk.len();
1462 offset += non_whitespace_len;
1463 if non_whitespace_len != 0 {
1464 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1465 return;
1466 }
1467 break;
1468 }
1469 }
1470 self.edit([(offset..len, "\n")], None, cx);
1471 }
1472
1473 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1474 /// calculated, then adjust the diff to account for those changes, and discard any
1475 /// parts of the diff that conflict with those changes.
1476 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1477 // Check for any edits to the buffer that have occurred since this diff
1478 // was computed.
1479 let snapshot = self.snapshot();
1480 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1481 let mut delta = 0;
1482 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1483 while let Some(edit_since) = edits_since.peek() {
1484 // If the edit occurs after a diff hunk, then it does not
1485 // affect that hunk.
1486 if edit_since.old.start > range.end {
1487 break;
1488 }
1489 // If the edit precedes the diff hunk, then adjust the hunk
1490 // to reflect the edit.
1491 else if edit_since.old.end < range.start {
1492 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1493 edits_since.next();
1494 }
1495 // If the edit intersects a diff hunk, then discard that hunk.
1496 else {
1497 return None;
1498 }
1499 }
1500
1501 let start = (range.start as i64 + delta) as usize;
1502 let end = (range.end as i64 + delta) as usize;
1503 Some((start..end, new_text))
1504 });
1505
1506 self.start_transaction();
1507 self.text.set_line_ending(diff.line_ending);
1508 self.edit(adjusted_edits, None, cx);
1509 self.end_transaction(cx)
1510 }
1511
1512 fn changed_since_saved_version(&self) -> bool {
1513 self.edits_since::<usize>(&self.saved_version)
1514 .next()
1515 .is_some()
1516 }
1517 /// Checks if the buffer has unsaved changes.
1518 pub fn is_dirty(&self) -> bool {
1519 (self.has_conflict || self.changed_since_saved_version())
1520 || self
1521 .file
1522 .as_ref()
1523 .map_or(false, |file| file.is_deleted() || !file.is_created())
1524 }
1525
1526 /// Checks if the buffer and its file have both changed since the buffer
1527 /// was last saved or reloaded.
1528 pub fn has_conflict(&self) -> bool {
1529 (self.has_conflict || self.changed_since_saved_version())
1530 && self
1531 .file
1532 .as_ref()
1533 .map_or(false, |file| file.mtime() > self.saved_mtime)
1534 }
1535
1536 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1537 pub fn subscribe(&mut self) -> Subscription {
1538 self.text.subscribe()
1539 }
1540
1541 /// Starts a transaction, if one is not already in-progress. When undoing or
1542 /// redoing edits, all of the edits performed within a transaction are undone
1543 /// or redone together.
1544 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1545 self.start_transaction_at(Instant::now())
1546 }
1547
1548 /// Starts a transaction, providing the current time. Subsequent transactions
1549 /// that occur within a short period of time will be grouped together. This
1550 /// is controlled by the buffer's undo grouping duration.
1551 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1552 self.transaction_depth += 1;
1553 if self.was_dirty_before_starting_transaction.is_none() {
1554 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1555 }
1556 self.text.start_transaction_at(now)
1557 }
1558
1559 /// Terminates the current transaction, if this is the outermost transaction.
1560 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1561 self.end_transaction_at(Instant::now(), cx)
1562 }
1563
1564 /// Terminates the current transaction, providing the current time. Subsequent transactions
1565 /// that occur within a short period of time will be grouped together. This
1566 /// is controlled by the buffer's undo grouping duration.
1567 pub fn end_transaction_at(
1568 &mut self,
1569 now: Instant,
1570 cx: &mut ModelContext<Self>,
1571 ) -> Option<TransactionId> {
1572 assert!(self.transaction_depth > 0);
1573 self.transaction_depth -= 1;
1574 let was_dirty = if self.transaction_depth == 0 {
1575 self.was_dirty_before_starting_transaction.take().unwrap()
1576 } else {
1577 false
1578 };
1579 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1580 self.did_edit(&start_version, was_dirty, cx);
1581 Some(transaction_id)
1582 } else {
1583 None
1584 }
1585 }
1586
1587 /// Manually add a transaction to the buffer's undo history.
1588 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1589 self.text.push_transaction(transaction, now);
1590 }
1591
1592 /// Prevent the last transaction from being grouped with any subsequent transactions,
1593 /// even if they occur with the buffer's undo grouping duration.
1594 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1595 self.text.finalize_last_transaction()
1596 }
1597
1598 /// Manually group all changes since a given transaction.
1599 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1600 self.text.group_until_transaction(transaction_id);
1601 }
1602
1603 /// Manually remove a transaction from the buffer's undo history
1604 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1605 self.text.forget_transaction(transaction_id);
1606 }
1607
1608 /// Manually merge two adjacent transactions in the buffer's undo history.
1609 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1610 self.text.merge_transactions(transaction, destination);
1611 }
1612
1613 /// Waits for the buffer to receive operations with the given timestamps.
1614 pub fn wait_for_edits(
1615 &mut self,
1616 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1617 ) -> impl Future<Output = Result<()>> {
1618 self.text.wait_for_edits(edit_ids)
1619 }
1620
1621 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1622 pub fn wait_for_anchors(
1623 &mut self,
1624 anchors: impl IntoIterator<Item = Anchor>,
1625 ) -> impl 'static + Future<Output = Result<()>> {
1626 self.text.wait_for_anchors(anchors)
1627 }
1628
1629 /// Waits for the buffer to receive operations up to the given version.
1630 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1631 self.text.wait_for_version(version)
1632 }
1633
1634 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1635 /// [`Buffer::wait_for_version`] to resolve with an error.
1636 pub fn give_up_waiting(&mut self) {
1637 self.text.give_up_waiting();
1638 }
1639
1640 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
1641 pub fn set_active_selections(
1642 &mut self,
1643 selections: Arc<[Selection<Anchor>]>,
1644 line_mode: bool,
1645 cursor_shape: CursorShape,
1646 cx: &mut ModelContext<Self>,
1647 ) {
1648 let lamport_timestamp = self.text.lamport_clock.tick();
1649 self.remote_selections.insert(
1650 self.text.replica_id(),
1651 SelectionSet {
1652 selections: selections.clone(),
1653 lamport_timestamp,
1654 line_mode,
1655 cursor_shape,
1656 },
1657 );
1658 self.send_operation(
1659 Operation::UpdateSelections {
1660 selections,
1661 line_mode,
1662 lamport_timestamp,
1663 cursor_shape,
1664 },
1665 cx,
1666 );
1667 }
1668
1669 /// Clears the selections, so that other replicas of the buffer do not see any selections for
1670 /// this replica.
1671 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1672 if self
1673 .remote_selections
1674 .get(&self.text.replica_id())
1675 .map_or(true, |set| !set.selections.is_empty())
1676 {
1677 self.set_active_selections(Arc::from([]), false, Default::default(), cx);
1678 }
1679 }
1680
1681 /// Replaces the buffer's entire text.
1682 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1683 where
1684 T: Into<Arc<str>>,
1685 {
1686 self.autoindent_requests.clear();
1687 self.edit([(0..self.len(), text)], None, cx)
1688 }
1689
1690 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
1691 /// delete, and a string of text to insert at that location.
1692 ///
1693 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
1694 /// request for the edited ranges, which will be processed when the buffer finishes
1695 /// parsing.
1696 ///
1697 /// Parsing takes place at the end of a transaction, and may compute synchronously
1698 /// or asynchronously, depending on the changes.
1699 pub fn edit<I, S, T>(
1700 &mut self,
1701 edits_iter: I,
1702 autoindent_mode: Option<AutoindentMode>,
1703 cx: &mut ModelContext<Self>,
1704 ) -> Option<clock::Lamport>
1705 where
1706 I: IntoIterator<Item = (Range<S>, T)>,
1707 S: ToOffset,
1708 T: Into<Arc<str>>,
1709 {
1710 // Skip invalid edits and coalesce contiguous ones.
1711 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1712 for (range, new_text) in edits_iter {
1713 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1714 if range.start > range.end {
1715 mem::swap(&mut range.start, &mut range.end);
1716 }
1717 let new_text = new_text.into();
1718 if !new_text.is_empty() || !range.is_empty() {
1719 if let Some((prev_range, prev_text)) = edits.last_mut() {
1720 if prev_range.end >= range.start {
1721 prev_range.end = cmp::max(prev_range.end, range.end);
1722 *prev_text = format!("{prev_text}{new_text}").into();
1723 } else {
1724 edits.push((range, new_text));
1725 }
1726 } else {
1727 edits.push((range, new_text));
1728 }
1729 }
1730 }
1731 if edits.is_empty() {
1732 return None;
1733 }
1734
1735 self.start_transaction();
1736 self.pending_autoindent.take();
1737 let autoindent_request = autoindent_mode
1738 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1739
1740 let edit_operation = self.text.edit(edits.iter().cloned());
1741 let edit_id = edit_operation.timestamp();
1742
1743 if let Some((before_edit, mode)) = autoindent_request {
1744 let mut delta = 0isize;
1745 let entries = edits
1746 .into_iter()
1747 .enumerate()
1748 .zip(&edit_operation.as_edit().unwrap().new_text)
1749 .map(|((ix, (range, _)), new_text)| {
1750 let new_text_length = new_text.len();
1751 let old_start = range.start.to_point(&before_edit);
1752 let new_start = (delta + range.start as isize) as usize;
1753 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1754
1755 let mut range_of_insertion_to_indent = 0..new_text_length;
1756 let mut first_line_is_new = false;
1757 let mut original_indent_column = None;
1758
1759 // When inserting an entire line at the beginning of an existing line,
1760 // treat the insertion as new.
1761 if new_text.contains('\n')
1762 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1763 {
1764 first_line_is_new = true;
1765 }
1766
1767 // When inserting text starting with a newline, avoid auto-indenting the
1768 // previous line.
1769 if new_text.starts_with('\n') {
1770 range_of_insertion_to_indent.start += 1;
1771 first_line_is_new = true;
1772 }
1773
1774 // Avoid auto-indenting after the insertion.
1775 if let AutoindentMode::Block {
1776 original_indent_columns,
1777 } = &mode
1778 {
1779 original_indent_column =
1780 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1781 indent_size_for_text(
1782 new_text[range_of_insertion_to_indent.clone()].chars(),
1783 )
1784 .len
1785 }));
1786 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1787 range_of_insertion_to_indent.end -= 1;
1788 }
1789 }
1790
1791 AutoindentRequestEntry {
1792 first_line_is_new,
1793 original_indent_column,
1794 indent_size: before_edit.language_indent_size_at(range.start, cx),
1795 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1796 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1797 }
1798 })
1799 .collect();
1800
1801 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1802 before_edit,
1803 entries,
1804 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1805 }));
1806 }
1807
1808 self.end_transaction(cx);
1809 self.send_operation(Operation::Buffer(edit_operation), cx);
1810 Some(edit_id)
1811 }
1812
1813 fn did_edit(
1814 &mut self,
1815 old_version: &clock::Global,
1816 was_dirty: bool,
1817 cx: &mut ModelContext<Self>,
1818 ) {
1819 if self.edits_since::<usize>(old_version).next().is_none() {
1820 return;
1821 }
1822
1823 self.reparse(cx);
1824
1825 cx.emit(Event::Edited);
1826 if was_dirty != self.is_dirty() {
1827 cx.emit(Event::DirtyChanged);
1828 }
1829 cx.notify();
1830 }
1831
1832 /// Applies the given remote operations to the buffer.
1833 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1834 &mut self,
1835 ops: I,
1836 cx: &mut ModelContext<Self>,
1837 ) -> Result<()> {
1838 self.pending_autoindent.take();
1839 let was_dirty = self.is_dirty();
1840 let old_version = self.version.clone();
1841 let mut deferred_ops = Vec::new();
1842 let buffer_ops = ops
1843 .into_iter()
1844 .filter_map(|op| match op {
1845 Operation::Buffer(op) => Some(op),
1846 _ => {
1847 if self.can_apply_op(&op) {
1848 self.apply_op(op, cx);
1849 } else {
1850 deferred_ops.push(op);
1851 }
1852 None
1853 }
1854 })
1855 .collect::<Vec<_>>();
1856 self.text.apply_ops(buffer_ops)?;
1857 self.deferred_ops.insert(deferred_ops);
1858 self.flush_deferred_ops(cx);
1859 self.did_edit(&old_version, was_dirty, cx);
1860 // Notify independently of whether the buffer was edited as the operations could include a
1861 // selection update.
1862 cx.notify();
1863 Ok(())
1864 }
1865
1866 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1867 let mut deferred_ops = Vec::new();
1868 for op in self.deferred_ops.drain().iter().cloned() {
1869 if self.can_apply_op(&op) {
1870 self.apply_op(op, cx);
1871 } else {
1872 deferred_ops.push(op);
1873 }
1874 }
1875 self.deferred_ops.insert(deferred_ops);
1876 }
1877
1878 fn can_apply_op(&self, operation: &Operation) -> bool {
1879 match operation {
1880 Operation::Buffer(_) => {
1881 unreachable!("buffer operations should never be applied at this layer")
1882 }
1883 Operation::UpdateDiagnostics {
1884 diagnostics: diagnostic_set,
1885 ..
1886 } => diagnostic_set.iter().all(|diagnostic| {
1887 self.text.can_resolve(&diagnostic.range.start)
1888 && self.text.can_resolve(&diagnostic.range.end)
1889 }),
1890 Operation::UpdateSelections { selections, .. } => selections
1891 .iter()
1892 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1893 Operation::UpdateCompletionTriggers { .. } => true,
1894 }
1895 }
1896
1897 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1898 match operation {
1899 Operation::Buffer(_) => {
1900 unreachable!("buffer operations should never be applied at this layer")
1901 }
1902 Operation::UpdateDiagnostics {
1903 server_id,
1904 diagnostics: diagnostic_set,
1905 lamport_timestamp,
1906 } => {
1907 let snapshot = self.snapshot();
1908 self.apply_diagnostic_update(
1909 server_id,
1910 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1911 lamport_timestamp,
1912 cx,
1913 );
1914 }
1915 Operation::UpdateSelections {
1916 selections,
1917 lamport_timestamp,
1918 line_mode,
1919 cursor_shape,
1920 } => {
1921 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1922 if set.lamport_timestamp > lamport_timestamp {
1923 return;
1924 }
1925 }
1926
1927 self.remote_selections.insert(
1928 lamport_timestamp.replica_id,
1929 SelectionSet {
1930 selections,
1931 lamport_timestamp,
1932 line_mode,
1933 cursor_shape,
1934 },
1935 );
1936 self.text.lamport_clock.observe(lamport_timestamp);
1937 self.selections_update_count += 1;
1938 }
1939 Operation::UpdateCompletionTriggers {
1940 triggers,
1941 lamport_timestamp,
1942 } => {
1943 self.completion_triggers = triggers;
1944 self.text.lamport_clock.observe(lamport_timestamp);
1945 }
1946 }
1947 }
1948
1949 fn apply_diagnostic_update(
1950 &mut self,
1951 server_id: LanguageServerId,
1952 diagnostics: DiagnosticSet,
1953 lamport_timestamp: clock::Lamport,
1954 cx: &mut ModelContext<Self>,
1955 ) {
1956 if lamport_timestamp > self.diagnostics_timestamp {
1957 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
1958 if diagnostics.len() == 0 {
1959 if let Ok(ix) = ix {
1960 self.diagnostics.remove(ix);
1961 }
1962 } else {
1963 match ix {
1964 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
1965 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
1966 };
1967 }
1968 self.diagnostics_timestamp = lamport_timestamp;
1969 self.diagnostics_update_count += 1;
1970 self.text.lamport_clock.observe(lamport_timestamp);
1971 cx.notify();
1972 cx.emit(Event::DiagnosticsUpdated);
1973 }
1974 }
1975
1976 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1977 cx.emit(Event::Operation(operation));
1978 }
1979
1980 /// Removes the selections for a given peer.
1981 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1982 self.remote_selections.remove(&replica_id);
1983 cx.notify();
1984 }
1985
1986 /// Undoes the most recent transaction.
1987 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1988 let was_dirty = self.is_dirty();
1989 let old_version = self.version.clone();
1990
1991 if let Some((transaction_id, operation)) = self.text.undo() {
1992 self.send_operation(Operation::Buffer(operation), cx);
1993 self.did_edit(&old_version, was_dirty, cx);
1994 Some(transaction_id)
1995 } else {
1996 None
1997 }
1998 }
1999
2000 /// Manually undoes a specific transaction in the buffer's undo history.
2001 pub fn undo_transaction(
2002 &mut self,
2003 transaction_id: TransactionId,
2004 cx: &mut ModelContext<Self>,
2005 ) -> bool {
2006 let was_dirty = self.is_dirty();
2007 let old_version = self.version.clone();
2008 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2009 self.send_operation(Operation::Buffer(operation), cx);
2010 self.did_edit(&old_version, was_dirty, cx);
2011 true
2012 } else {
2013 false
2014 }
2015 }
2016
2017 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2018 pub fn undo_to_transaction(
2019 &mut self,
2020 transaction_id: TransactionId,
2021 cx: &mut ModelContext<Self>,
2022 ) -> bool {
2023 let was_dirty = self.is_dirty();
2024 let old_version = self.version.clone();
2025
2026 let operations = self.text.undo_to_transaction(transaction_id);
2027 let undone = !operations.is_empty();
2028 for operation in operations {
2029 self.send_operation(Operation::Buffer(operation), cx);
2030 }
2031 if undone {
2032 self.did_edit(&old_version, was_dirty, cx)
2033 }
2034 undone
2035 }
2036
2037 /// Manually redoes a specific transaction in the buffer's redo history.
2038 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2039 let was_dirty = self.is_dirty();
2040 let old_version = self.version.clone();
2041
2042 if let Some((transaction_id, operation)) = self.text.redo() {
2043 self.send_operation(Operation::Buffer(operation), cx);
2044 self.did_edit(&old_version, was_dirty, cx);
2045 Some(transaction_id)
2046 } else {
2047 None
2048 }
2049 }
2050
2051 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2052 pub fn redo_to_transaction(
2053 &mut self,
2054 transaction_id: TransactionId,
2055 cx: &mut ModelContext<Self>,
2056 ) -> bool {
2057 let was_dirty = self.is_dirty();
2058 let old_version = self.version.clone();
2059
2060 let operations = self.text.redo_to_transaction(transaction_id);
2061 let redone = !operations.is_empty();
2062 for operation in operations {
2063 self.send_operation(Operation::Buffer(operation), cx);
2064 }
2065 if redone {
2066 self.did_edit(&old_version, was_dirty, cx)
2067 }
2068 redone
2069 }
2070
2071 /// Override current completion triggers with the user-provided completion triggers.
2072 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
2073 self.completion_triggers = triggers.clone();
2074 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2075 self.send_operation(
2076 Operation::UpdateCompletionTriggers {
2077 triggers,
2078 lamport_timestamp: self.completion_triggers_timestamp,
2079 },
2080 cx,
2081 );
2082 cx.notify();
2083 }
2084
2085 /// Returns a list of strings which trigger a completion menu for this language.
2086 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2087 pub fn completion_triggers(&self) -> &[String] {
2088 &self.completion_triggers
2089 }
2090}
2091
2092#[doc(hidden)]
2093#[cfg(any(test, feature = "test-support"))]
2094impl Buffer {
2095 pub fn edit_via_marked_text(
2096 &mut self,
2097 marked_string: &str,
2098 autoindent_mode: Option<AutoindentMode>,
2099 cx: &mut ModelContext<Self>,
2100 ) {
2101 let edits = self.edits_for_marked_text(marked_string);
2102 self.edit(edits, autoindent_mode, cx);
2103 }
2104
2105 pub fn set_group_interval(&mut self, group_interval: Duration) {
2106 self.text.set_group_interval(group_interval);
2107 }
2108
2109 pub fn randomly_edit<T>(
2110 &mut self,
2111 rng: &mut T,
2112 old_range_count: usize,
2113 cx: &mut ModelContext<Self>,
2114 ) where
2115 T: rand::Rng,
2116 {
2117 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2118 let mut last_end = None;
2119 for _ in 0..old_range_count {
2120 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2121 break;
2122 }
2123
2124 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2125 let mut range = self.random_byte_range(new_start, rng);
2126 if rng.gen_bool(0.2) {
2127 mem::swap(&mut range.start, &mut range.end);
2128 }
2129 last_end = Some(range.end);
2130
2131 let new_text_len = rng.gen_range(0..10);
2132 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2133
2134 edits.push((range, new_text));
2135 }
2136 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2137 self.edit(edits, None, cx);
2138 }
2139
2140 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
2141 let was_dirty = self.is_dirty();
2142 let old_version = self.version.clone();
2143
2144 let ops = self.text.randomly_undo_redo(rng);
2145 if !ops.is_empty() {
2146 for op in ops {
2147 self.send_operation(Operation::Buffer(op), cx);
2148 self.did_edit(&old_version, was_dirty, cx);
2149 }
2150 }
2151 }
2152}
2153
2154impl EventEmitter<Event> for Buffer {}
2155
2156impl Deref for Buffer {
2157 type Target = TextBuffer;
2158
2159 fn deref(&self) -> &Self::Target {
2160 &self.text
2161 }
2162}
2163
2164impl BufferSnapshot {
2165 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2166 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2167 indent_size_for_line(self, row)
2168 }
2169 /// Returns [`IndentSize`] for a given position that respects user settings
2170 /// and language preferences.
2171 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
2172 let settings = language_settings(self.language_at(position), self.file(), cx);
2173 if settings.hard_tabs {
2174 IndentSize::tab()
2175 } else {
2176 IndentSize::spaces(settings.tab_size.get())
2177 }
2178 }
2179
2180 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2181 /// is passed in as `single_indent_size`.
2182 pub fn suggested_indents(
2183 &self,
2184 rows: impl Iterator<Item = u32>,
2185 single_indent_size: IndentSize,
2186 ) -> BTreeMap<u32, IndentSize> {
2187 let mut result = BTreeMap::new();
2188
2189 for row_range in contiguous_ranges(rows, 10) {
2190 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2191 Some(suggestions) => suggestions,
2192 _ => break,
2193 };
2194
2195 for (row, suggestion) in row_range.zip(suggestions) {
2196 let indent_size = if let Some(suggestion) = suggestion {
2197 result
2198 .get(&suggestion.basis_row)
2199 .copied()
2200 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2201 .with_delta(suggestion.delta, single_indent_size)
2202 } else {
2203 self.indent_size_for_line(row)
2204 };
2205
2206 result.insert(row, indent_size);
2207 }
2208 }
2209
2210 result
2211 }
2212
2213 fn suggest_autoindents(
2214 &self,
2215 row_range: Range<u32>,
2216 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2217 let config = &self.language.as_ref()?.config;
2218 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2219
2220 // Find the suggested indentation ranges based on the syntax tree.
2221 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2222 let end = Point::new(row_range.end, 0);
2223 let range = (start..end).to_offset(&self.text);
2224 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2225 Some(&grammar.indents_config.as_ref()?.query)
2226 });
2227 let indent_configs = matches
2228 .grammars()
2229 .iter()
2230 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2231 .collect::<Vec<_>>();
2232
2233 let mut indent_ranges = Vec::<Range<Point>>::new();
2234 let mut outdent_positions = Vec::<Point>::new();
2235 while let Some(mat) = matches.peek() {
2236 let mut start: Option<Point> = None;
2237 let mut end: Option<Point> = None;
2238
2239 let config = &indent_configs[mat.grammar_index];
2240 for capture in mat.captures {
2241 if capture.index == config.indent_capture_ix {
2242 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2243 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2244 } else if Some(capture.index) == config.start_capture_ix {
2245 start = Some(Point::from_ts_point(capture.node.end_position()));
2246 } else if Some(capture.index) == config.end_capture_ix {
2247 end = Some(Point::from_ts_point(capture.node.start_position()));
2248 } else if Some(capture.index) == config.outdent_capture_ix {
2249 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2250 }
2251 }
2252
2253 matches.advance();
2254 if let Some((start, end)) = start.zip(end) {
2255 if start.row == end.row {
2256 continue;
2257 }
2258
2259 let range = start..end;
2260 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2261 Err(ix) => indent_ranges.insert(ix, range),
2262 Ok(ix) => {
2263 let prev_range = &mut indent_ranges[ix];
2264 prev_range.end = prev_range.end.max(range.end);
2265 }
2266 }
2267 }
2268 }
2269
2270 let mut error_ranges = Vec::<Range<Point>>::new();
2271 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2272 Some(&grammar.error_query)
2273 });
2274 while let Some(mat) = matches.peek() {
2275 let node = mat.captures[0].node;
2276 let start = Point::from_ts_point(node.start_position());
2277 let end = Point::from_ts_point(node.end_position());
2278 let range = start..end;
2279 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2280 Ok(ix) | Err(ix) => ix,
2281 };
2282 let mut end_ix = ix;
2283 while let Some(existing_range) = error_ranges.get(end_ix) {
2284 if existing_range.end < end {
2285 end_ix += 1;
2286 } else {
2287 break;
2288 }
2289 }
2290 error_ranges.splice(ix..end_ix, [range]);
2291 matches.advance();
2292 }
2293
2294 outdent_positions.sort();
2295 for outdent_position in outdent_positions {
2296 // find the innermost indent range containing this outdent_position
2297 // set its end to the outdent position
2298 if let Some(range_to_truncate) = indent_ranges
2299 .iter_mut()
2300 .filter(|indent_range| indent_range.contains(&outdent_position))
2301 .last()
2302 {
2303 range_to_truncate.end = outdent_position;
2304 }
2305 }
2306
2307 // Find the suggested indentation increases and decreased based on regexes.
2308 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2309 self.for_each_line(
2310 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2311 ..Point::new(row_range.end, 0),
2312 |row, line| {
2313 if config
2314 .decrease_indent_pattern
2315 .as_ref()
2316 .map_or(false, |regex| regex.is_match(line))
2317 {
2318 indent_change_rows.push((row, Ordering::Less));
2319 }
2320 if config
2321 .increase_indent_pattern
2322 .as_ref()
2323 .map_or(false, |regex| regex.is_match(line))
2324 {
2325 indent_change_rows.push((row + 1, Ordering::Greater));
2326 }
2327 },
2328 );
2329
2330 let mut indent_changes = indent_change_rows.into_iter().peekable();
2331 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2332 prev_non_blank_row.unwrap_or(0)
2333 } else {
2334 row_range.start.saturating_sub(1)
2335 };
2336 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2337 Some(row_range.map(move |row| {
2338 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2339
2340 let mut indent_from_prev_row = false;
2341 let mut outdent_from_prev_row = false;
2342 let mut outdent_to_row = u32::MAX;
2343
2344 while let Some((indent_row, delta)) = indent_changes.peek() {
2345 match indent_row.cmp(&row) {
2346 Ordering::Equal => match delta {
2347 Ordering::Less => outdent_from_prev_row = true,
2348 Ordering::Greater => indent_from_prev_row = true,
2349 _ => {}
2350 },
2351
2352 Ordering::Greater => break,
2353 Ordering::Less => {}
2354 }
2355
2356 indent_changes.next();
2357 }
2358
2359 for range in &indent_ranges {
2360 if range.start.row >= row {
2361 break;
2362 }
2363 if range.start.row == prev_row && range.end > row_start {
2364 indent_from_prev_row = true;
2365 }
2366 if range.end > prev_row_start && range.end <= row_start {
2367 outdent_to_row = outdent_to_row.min(range.start.row);
2368 }
2369 }
2370
2371 let within_error = error_ranges
2372 .iter()
2373 .any(|e| e.start.row < row && e.end > row_start);
2374
2375 let suggestion = if outdent_to_row == prev_row
2376 || (outdent_from_prev_row && indent_from_prev_row)
2377 {
2378 Some(IndentSuggestion {
2379 basis_row: prev_row,
2380 delta: Ordering::Equal,
2381 within_error,
2382 })
2383 } else if indent_from_prev_row {
2384 Some(IndentSuggestion {
2385 basis_row: prev_row,
2386 delta: Ordering::Greater,
2387 within_error,
2388 })
2389 } else if outdent_to_row < prev_row {
2390 Some(IndentSuggestion {
2391 basis_row: outdent_to_row,
2392 delta: Ordering::Equal,
2393 within_error,
2394 })
2395 } else if outdent_from_prev_row {
2396 Some(IndentSuggestion {
2397 basis_row: prev_row,
2398 delta: Ordering::Less,
2399 within_error,
2400 })
2401 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2402 {
2403 Some(IndentSuggestion {
2404 basis_row: prev_row,
2405 delta: Ordering::Equal,
2406 within_error,
2407 })
2408 } else {
2409 None
2410 };
2411
2412 prev_row = row;
2413 prev_row_start = row_start;
2414 suggestion
2415 }))
2416 }
2417
2418 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2419 while row > 0 {
2420 row -= 1;
2421 if !self.is_line_blank(row) {
2422 return Some(row);
2423 }
2424 }
2425 None
2426 }
2427
2428 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2429 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2430 /// returned in chunks where each chunk has a single syntax highlighting style and
2431 /// diagnostic status.
2432 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2433 let range = range.start.to_offset(self)..range.end.to_offset(self);
2434
2435 let mut syntax = None;
2436 let mut diagnostic_endpoints = Vec::new();
2437 if language_aware {
2438 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
2439 grammar.highlights_query.as_ref()
2440 });
2441 let highlight_maps = captures
2442 .grammars()
2443 .into_iter()
2444 .map(|grammar| grammar.highlight_map())
2445 .collect();
2446 syntax = Some((captures, highlight_maps));
2447 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
2448 diagnostic_endpoints.push(DiagnosticEndpoint {
2449 offset: entry.range.start,
2450 is_start: true,
2451 severity: entry.diagnostic.severity,
2452 is_unnecessary: entry.diagnostic.is_unnecessary,
2453 });
2454 diagnostic_endpoints.push(DiagnosticEndpoint {
2455 offset: entry.range.end,
2456 is_start: false,
2457 severity: entry.diagnostic.severity,
2458 is_unnecessary: entry.diagnostic.is_unnecessary,
2459 });
2460 }
2461 diagnostic_endpoints
2462 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2463 }
2464
2465 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
2466 }
2467
2468 /// Invokes the given callback for each line of text in the given range of the buffer.
2469 /// Uses callback to avoid allocating a string for each line.
2470 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2471 let mut line = String::new();
2472 let mut row = range.start.row;
2473 for chunk in self
2474 .as_rope()
2475 .chunks_in_range(range.to_offset(self))
2476 .chain(["\n"])
2477 {
2478 for (newline_ix, text) in chunk.split('\n').enumerate() {
2479 if newline_ix > 0 {
2480 callback(row, &line);
2481 row += 1;
2482 line.clear();
2483 }
2484 line.push_str(text);
2485 }
2486 }
2487 }
2488
2489 /// Iterates over every [`SyntaxLayer`] in the buffer.
2490 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2491 self.syntax.layers_for_range(0..self.len(), &self.text)
2492 }
2493
2494 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2495 let offset = position.to_offset(self);
2496 self.syntax
2497 .layers_for_range(offset..offset, &self.text)
2498 .filter(|l| l.node().end_byte() > offset)
2499 .last()
2500 }
2501
2502 /// Returns the [Language] at the given location.
2503 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2504 self.syntax_layer_at(position)
2505 .map(|info| info.language)
2506 .or(self.language.as_ref())
2507 }
2508
2509 /// Returns the settings for the language at the given location.
2510 pub fn settings_at<'a, D: ToOffset>(
2511 &self,
2512 position: D,
2513 cx: &'a AppContext,
2514 ) -> &'a LanguageSettings {
2515 language_settings(self.language_at(position), self.file.as_ref(), cx)
2516 }
2517
2518 /// Returns the [LanguageScope] at the given location.
2519 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2520 let offset = position.to_offset(self);
2521 let mut scope = None;
2522 let mut smallest_range: Option<Range<usize>> = None;
2523
2524 // Use the layer that has the smallest node intersecting the given point.
2525 for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
2526 let mut cursor = layer.node().walk();
2527
2528 let mut range = None;
2529 loop {
2530 let child_range = cursor.node().byte_range();
2531 if !child_range.to_inclusive().contains(&offset) {
2532 break;
2533 }
2534
2535 range = Some(child_range);
2536 if cursor.goto_first_child_for_byte(offset).is_none() {
2537 break;
2538 }
2539 }
2540
2541 if let Some(range) = range {
2542 if smallest_range
2543 .as_ref()
2544 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2545 {
2546 smallest_range = Some(range);
2547 scope = Some(LanguageScope {
2548 language: layer.language.clone(),
2549 override_id: layer.override_id(offset, &self.text),
2550 });
2551 }
2552 }
2553 }
2554
2555 scope.or_else(|| {
2556 self.language.clone().map(|language| LanguageScope {
2557 language,
2558 override_id: None,
2559 })
2560 })
2561 }
2562
2563 /// Returns a tuple of the range and character kind of the word
2564 /// surrounding the given position.
2565 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2566 let mut start = start.to_offset(self);
2567 let mut end = start;
2568 let mut next_chars = self.chars_at(start).peekable();
2569 let mut prev_chars = self.reversed_chars_at(start).peekable();
2570
2571 let scope = self.language_scope_at(start);
2572 let kind = |c| char_kind(&scope, c);
2573 let word_kind = cmp::max(
2574 prev_chars.peek().copied().map(kind),
2575 next_chars.peek().copied().map(kind),
2576 );
2577
2578 for ch in prev_chars {
2579 if Some(kind(ch)) == word_kind && ch != '\n' {
2580 start -= ch.len_utf8();
2581 } else {
2582 break;
2583 }
2584 }
2585
2586 for ch in next_chars {
2587 if Some(kind(ch)) == word_kind && ch != '\n' {
2588 end += ch.len_utf8();
2589 } else {
2590 break;
2591 }
2592 }
2593
2594 (start..end, word_kind)
2595 }
2596
2597 /// Returns the range for the closes syntax node enclosing the given range.
2598 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2599 let range = range.start.to_offset(self)..range.end.to_offset(self);
2600 let mut result: Option<Range<usize>> = None;
2601 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
2602 let mut cursor = layer.node().walk();
2603
2604 // Descend to the first leaf that touches the start of the range,
2605 // and if the range is non-empty, extends beyond the start.
2606 while cursor.goto_first_child_for_byte(range.start).is_some() {
2607 if !range.is_empty() && cursor.node().end_byte() == range.start {
2608 cursor.goto_next_sibling();
2609 }
2610 }
2611
2612 // Ascend to the smallest ancestor that strictly contains the range.
2613 loop {
2614 let node_range = cursor.node().byte_range();
2615 if node_range.start <= range.start
2616 && node_range.end >= range.end
2617 && node_range.len() > range.len()
2618 {
2619 break;
2620 }
2621 if !cursor.goto_parent() {
2622 continue 'outer;
2623 }
2624 }
2625
2626 let left_node = cursor.node();
2627 let mut layer_result = left_node.byte_range();
2628
2629 // For an empty range, try to find another node immediately to the right of the range.
2630 if left_node.end_byte() == range.start {
2631 let mut right_node = None;
2632 while !cursor.goto_next_sibling() {
2633 if !cursor.goto_parent() {
2634 break;
2635 }
2636 }
2637
2638 while cursor.node().start_byte() == range.start {
2639 right_node = Some(cursor.node());
2640 if !cursor.goto_first_child() {
2641 break;
2642 }
2643 }
2644
2645 // If there is a candidate node on both sides of the (empty) range, then
2646 // decide between the two by favoring a named node over an anonymous token.
2647 // If both nodes are the same in that regard, favor the right one.
2648 if let Some(right_node) = right_node {
2649 if right_node.is_named() || !left_node.is_named() {
2650 layer_result = right_node.byte_range();
2651 }
2652 }
2653 }
2654
2655 if let Some(previous_result) = &result {
2656 if previous_result.len() < layer_result.len() {
2657 continue;
2658 }
2659 }
2660 result = Some(layer_result);
2661 }
2662
2663 result
2664 }
2665
2666 /// Returns the outline for the buffer.
2667 ///
2668 /// This method allows passing an optional [SyntaxTheme] to
2669 /// syntax-highlight the returned symbols.
2670 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2671 self.outline_items_containing(0..self.len(), true, theme)
2672 .map(Outline::new)
2673 }
2674
2675 /// Returns all the symbols that contain the given position.
2676 ///
2677 /// This method allows passing an optional [SyntaxTheme] to
2678 /// syntax-highlight the returned symbols.
2679 pub fn symbols_containing<T: ToOffset>(
2680 &self,
2681 position: T,
2682 theme: Option<&SyntaxTheme>,
2683 ) -> Option<Vec<OutlineItem<Anchor>>> {
2684 let position = position.to_offset(self);
2685 let mut items = self.outline_items_containing(
2686 position.saturating_sub(1)..self.len().min(position + 1),
2687 false,
2688 theme,
2689 )?;
2690 let mut prev_depth = None;
2691 items.retain(|item| {
2692 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2693 prev_depth = Some(item.depth);
2694 result
2695 });
2696 Some(items)
2697 }
2698
2699 fn outline_items_containing(
2700 &self,
2701 range: Range<usize>,
2702 include_extra_context: bool,
2703 theme: Option<&SyntaxTheme>,
2704 ) -> Option<Vec<OutlineItem<Anchor>>> {
2705 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2706 grammar.outline_config.as_ref().map(|c| &c.query)
2707 });
2708 let configs = matches
2709 .grammars()
2710 .iter()
2711 .map(|g| g.outline_config.as_ref().unwrap())
2712 .collect::<Vec<_>>();
2713
2714 let mut stack = Vec::<Range<usize>>::new();
2715 let mut items = Vec::new();
2716 while let Some(mat) = matches.peek() {
2717 let config = &configs[mat.grammar_index];
2718 let item_node = mat.captures.iter().find_map(|cap| {
2719 if cap.index == config.item_capture_ix {
2720 Some(cap.node)
2721 } else {
2722 None
2723 }
2724 })?;
2725
2726 let item_range = item_node.byte_range();
2727 if item_range.end < range.start || item_range.start > range.end {
2728 matches.advance();
2729 continue;
2730 }
2731
2732 let mut buffer_ranges = Vec::new();
2733 for capture in mat.captures {
2734 let node_is_name;
2735 if capture.index == config.name_capture_ix {
2736 node_is_name = true;
2737 } else if Some(capture.index) == config.context_capture_ix
2738 || (Some(capture.index) == config.extra_context_capture_ix
2739 && include_extra_context)
2740 {
2741 node_is_name = false;
2742 } else {
2743 continue;
2744 }
2745
2746 let mut range = capture.node.start_byte()..capture.node.end_byte();
2747 let start = capture.node.start_position();
2748 if capture.node.end_position().row > start.row {
2749 range.end =
2750 range.start + self.line_len(start.row as u32) as usize - start.column;
2751 }
2752
2753 buffer_ranges.push((range, node_is_name));
2754 }
2755
2756 if buffer_ranges.is_empty() {
2757 continue;
2758 }
2759
2760 let mut text = String::new();
2761 let mut highlight_ranges = Vec::new();
2762 let mut name_ranges = Vec::new();
2763 let mut chunks = self.chunks(
2764 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
2765 true,
2766 );
2767 let mut last_buffer_range_end = 0;
2768 for (buffer_range, is_name) in buffer_ranges {
2769 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
2770 text.push(' ');
2771 }
2772 last_buffer_range_end = buffer_range.end;
2773 if is_name {
2774 let mut start = text.len();
2775 let end = start + buffer_range.len();
2776
2777 // When multiple names are captured, then the matcheable text
2778 // includes the whitespace in between the names.
2779 if !name_ranges.is_empty() {
2780 start -= 1;
2781 }
2782
2783 name_ranges.push(start..end);
2784 }
2785
2786 let mut offset = buffer_range.start;
2787 chunks.seek(offset);
2788 for mut chunk in chunks.by_ref() {
2789 if chunk.text.len() > buffer_range.end - offset {
2790 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
2791 offset = buffer_range.end;
2792 } else {
2793 offset += chunk.text.len();
2794 }
2795 let style = chunk
2796 .syntax_highlight_id
2797 .zip(theme)
2798 .and_then(|(highlight, theme)| highlight.style(theme));
2799 if let Some(style) = style {
2800 let start = text.len();
2801 let end = start + chunk.text.len();
2802 highlight_ranges.push((start..end, style));
2803 }
2804 text.push_str(chunk.text);
2805 if offset >= buffer_range.end {
2806 break;
2807 }
2808 }
2809 }
2810
2811 matches.advance();
2812 while stack.last().map_or(false, |prev_range| {
2813 prev_range.start > item_range.start || prev_range.end < item_range.end
2814 }) {
2815 stack.pop();
2816 }
2817 stack.push(item_range.clone());
2818
2819 items.push(OutlineItem {
2820 depth: stack.len() - 1,
2821 range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
2822 text,
2823 highlight_ranges,
2824 name_ranges,
2825 })
2826 }
2827 Some(items)
2828 }
2829
2830 /// For each grammar in the language, runs the provided
2831 /// [tree_sitter::Query] against the given range.
2832 pub fn matches(
2833 &self,
2834 range: Range<usize>,
2835 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
2836 ) -> SyntaxMapMatches {
2837 self.syntax.matches(range, self, query)
2838 }
2839
2840 /// Returns bracket range pairs overlapping or adjacent to `range`
2841 pub fn bracket_ranges<T: ToOffset>(
2842 &self,
2843 range: Range<T>,
2844 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2845 // Find bracket pairs that *inclusively* contain the given range.
2846 let range = range.start.to_offset(self).saturating_sub(1)
2847 ..self.len().min(range.end.to_offset(self) + 1);
2848
2849 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2850 grammar.brackets_config.as_ref().map(|c| &c.query)
2851 });
2852 let configs = matches
2853 .grammars()
2854 .iter()
2855 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2856 .collect::<Vec<_>>();
2857
2858 iter::from_fn(move || {
2859 while let Some(mat) = matches.peek() {
2860 let mut open = None;
2861 let mut close = None;
2862 let config = &configs[mat.grammar_index];
2863 for capture in mat.captures {
2864 if capture.index == config.open_capture_ix {
2865 open = Some(capture.node.byte_range());
2866 } else if capture.index == config.close_capture_ix {
2867 close = Some(capture.node.byte_range());
2868 }
2869 }
2870
2871 matches.advance();
2872
2873 let Some((open, close)) = open.zip(close) else {
2874 continue;
2875 };
2876
2877 let bracket_range = open.start..=close.end;
2878 if !bracket_range.overlaps(&range) {
2879 continue;
2880 }
2881
2882 return Some((open, close));
2883 }
2884 None
2885 })
2886 }
2887
2888 /// Returns enclosing bracket ranges containing the given range
2889 pub fn enclosing_bracket_ranges<T: ToOffset>(
2890 &self,
2891 range: Range<T>,
2892 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
2893 let range = range.start.to_offset(self)..range.end.to_offset(self);
2894
2895 self.bracket_ranges(range.clone())
2896 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
2897 }
2898
2899 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
2900 ///
2901 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
2902 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
2903 &self,
2904 range: Range<T>,
2905 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
2906 ) -> Option<(Range<usize>, Range<usize>)> {
2907 let range = range.start.to_offset(self)..range.end.to_offset(self);
2908
2909 // Get the ranges of the innermost pair of brackets.
2910 let mut result: Option<(Range<usize>, Range<usize>)> = None;
2911
2912 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
2913 if let Some(range_filter) = range_filter {
2914 if !range_filter(open.clone(), close.clone()) {
2915 continue;
2916 }
2917 }
2918
2919 let len = close.end - open.start;
2920
2921 if let Some((existing_open, existing_close)) = &result {
2922 let existing_len = existing_close.end - existing_open.start;
2923 if len > existing_len {
2924 continue;
2925 }
2926 }
2927
2928 result = Some((open, close));
2929 }
2930
2931 result
2932 }
2933
2934 /// Returns anchor ranges for any matches of the redaction query.
2935 /// The buffer can be associated with multiple languages, and the redaction query associated with each
2936 /// will be run on the relevant section of the buffer.
2937 pub fn redacted_ranges<T: ToOffset>(
2938 &self,
2939 range: Range<T>,
2940 ) -> impl Iterator<Item = Range<usize>> + '_ {
2941 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
2942 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
2943 grammar
2944 .redactions_config
2945 .as_ref()
2946 .map(|config| &config.query)
2947 });
2948
2949 let configs = syntax_matches
2950 .grammars()
2951 .iter()
2952 .map(|grammar| grammar.redactions_config.as_ref())
2953 .collect::<Vec<_>>();
2954
2955 iter::from_fn(move || {
2956 let redacted_range = syntax_matches
2957 .peek()
2958 .and_then(|mat| {
2959 configs[mat.grammar_index].and_then(|config| {
2960 mat.captures
2961 .iter()
2962 .find(|capture| capture.index == config.redaction_capture_ix)
2963 })
2964 })
2965 .map(|mat| mat.node.byte_range());
2966 syntax_matches.advance();
2967 redacted_range
2968 })
2969 }
2970
2971 /// Returns selections for remote peers intersecting the given range.
2972 #[allow(clippy::type_complexity)]
2973 pub fn remote_selections_in_range(
2974 &self,
2975 range: Range<Anchor>,
2976 ) -> impl Iterator<
2977 Item = (
2978 ReplicaId,
2979 bool,
2980 CursorShape,
2981 impl Iterator<Item = &Selection<Anchor>> + '_,
2982 ),
2983 > + '_ {
2984 self.remote_selections
2985 .iter()
2986 .filter(|(replica_id, set)| {
2987 **replica_id != self.text.replica_id() && !set.selections.is_empty()
2988 })
2989 .map(move |(replica_id, set)| {
2990 let start_ix = match set.selections.binary_search_by(|probe| {
2991 probe.end.cmp(&range.start, self).then(Ordering::Greater)
2992 }) {
2993 Ok(ix) | Err(ix) => ix,
2994 };
2995 let end_ix = match set.selections.binary_search_by(|probe| {
2996 probe.start.cmp(&range.end, self).then(Ordering::Less)
2997 }) {
2998 Ok(ix) | Err(ix) => ix,
2999 };
3000
3001 (
3002 *replica_id,
3003 set.line_mode,
3004 set.cursor_shape,
3005 set.selections[start_ix..end_ix].iter(),
3006 )
3007 })
3008 }
3009
3010 /// Whether the buffer contains any git changes.
3011 pub fn has_git_diff(&self) -> bool {
3012 !self.git_diff.is_empty()
3013 }
3014
3015 /// Returns all the Git diff hunks intersecting the given
3016 /// row range.
3017 pub fn git_diff_hunks_in_row_range(
3018 &self,
3019 range: Range<u32>,
3020 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3021 self.git_diff.hunks_in_row_range(range, self)
3022 }
3023
3024 /// Returns all the Git diff hunks intersecting the given
3025 /// range.
3026 pub fn git_diff_hunks_intersecting_range(
3027 &self,
3028 range: Range<Anchor>,
3029 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3030 self.git_diff.hunks_intersecting_range(range, self)
3031 }
3032
3033 /// Returns all the Git diff hunks intersecting the given
3034 /// range, in reverse order.
3035 pub fn git_diff_hunks_intersecting_range_rev(
3036 &self,
3037 range: Range<Anchor>,
3038 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3039 self.git_diff.hunks_intersecting_range_rev(range, self)
3040 }
3041
3042 /// Returns if the buffer contains any diagnostics.
3043 pub fn has_diagnostics(&self) -> bool {
3044 !self.diagnostics.is_empty()
3045 }
3046
3047 /// Returns all the diagnostics intersecting the given range.
3048 pub fn diagnostics_in_range<'a, T, O>(
3049 &'a self,
3050 search_range: Range<T>,
3051 reversed: bool,
3052 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3053 where
3054 T: 'a + Clone + ToOffset,
3055 O: 'a + FromAnchor + Ord,
3056 {
3057 let mut iterators: Vec<_> = self
3058 .diagnostics
3059 .iter()
3060 .map(|(_, collection)| {
3061 collection
3062 .range::<T, O>(search_range.clone(), self, true, reversed)
3063 .peekable()
3064 })
3065 .collect();
3066
3067 std::iter::from_fn(move || {
3068 let (next_ix, _) = iterators
3069 .iter_mut()
3070 .enumerate()
3071 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3072 .min_by(|(_, a), (_, b)| a.range.start.cmp(&b.range.start))?;
3073 iterators[next_ix].next()
3074 })
3075 }
3076
3077 /// Returns all the diagnostic groups associated with the given
3078 /// language server id. If no language server id is provided,
3079 /// all diagnostics groups are returned.
3080 pub fn diagnostic_groups(
3081 &self,
3082 language_server_id: Option<LanguageServerId>,
3083 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3084 let mut groups = Vec::new();
3085
3086 if let Some(language_server_id) = language_server_id {
3087 if let Ok(ix) = self
3088 .diagnostics
3089 .binary_search_by_key(&language_server_id, |e| e.0)
3090 {
3091 self.diagnostics[ix]
3092 .1
3093 .groups(language_server_id, &mut groups, self);
3094 }
3095 } else {
3096 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3097 diagnostics.groups(*language_server_id, &mut groups, self);
3098 }
3099 }
3100
3101 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3102 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3103 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3104 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3105 });
3106
3107 groups
3108 }
3109
3110 /// Returns an iterator over the diagnostics for the given group.
3111 pub fn diagnostic_group<'a, O>(
3112 &'a self,
3113 group_id: usize,
3114 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3115 where
3116 O: 'a + FromAnchor,
3117 {
3118 self.diagnostics
3119 .iter()
3120 .flat_map(move |(_, set)| set.group(group_id, self))
3121 }
3122
3123 /// The number of times diagnostics were updated.
3124 pub fn diagnostics_update_count(&self) -> usize {
3125 self.diagnostics_update_count
3126 }
3127
3128 /// The number of times the buffer was parsed.
3129 pub fn parse_count(&self) -> usize {
3130 self.parse_count
3131 }
3132
3133 /// The number of times selections were updated.
3134 pub fn selections_update_count(&self) -> usize {
3135 self.selections_update_count
3136 }
3137
3138 /// Returns a snapshot of underlying file.
3139 pub fn file(&self) -> Option<&Arc<dyn File>> {
3140 self.file.as_ref()
3141 }
3142
3143 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
3144 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
3145 if let Some(file) = self.file() {
3146 if file.path().file_name().is_none() || include_root {
3147 Some(file.full_path(cx))
3148 } else {
3149 Some(file.path().to_path_buf())
3150 }
3151 } else {
3152 None
3153 }
3154 }
3155
3156 /// The number of times the underlying file was updated.
3157 pub fn file_update_count(&self) -> usize {
3158 self.file_update_count
3159 }
3160
3161 /// The number of times the git diff status was updated.
3162 pub fn git_diff_update_count(&self) -> usize {
3163 self.git_diff_update_count
3164 }
3165}
3166
3167fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
3168 indent_size_for_text(text.chars_at(Point::new(row, 0)))
3169}
3170
3171fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
3172 let mut result = IndentSize::spaces(0);
3173 for c in text {
3174 let kind = match c {
3175 ' ' => IndentKind::Space,
3176 '\t' => IndentKind::Tab,
3177 _ => break,
3178 };
3179 if result.len == 0 {
3180 result.kind = kind;
3181 }
3182 result.len += 1;
3183 }
3184 result
3185}
3186
3187impl Clone for BufferSnapshot {
3188 fn clone(&self) -> Self {
3189 Self {
3190 text: self.text.clone(),
3191 git_diff: self.git_diff.clone(),
3192 syntax: self.syntax.clone(),
3193 file: self.file.clone(),
3194 remote_selections: self.remote_selections.clone(),
3195 diagnostics: self.diagnostics.clone(),
3196 selections_update_count: self.selections_update_count,
3197 diagnostics_update_count: self.diagnostics_update_count,
3198 file_update_count: self.file_update_count,
3199 git_diff_update_count: self.git_diff_update_count,
3200 language: self.language.clone(),
3201 parse_count: self.parse_count,
3202 }
3203 }
3204}
3205
3206impl Deref for BufferSnapshot {
3207 type Target = text::BufferSnapshot;
3208
3209 fn deref(&self) -> &Self::Target {
3210 &self.text
3211 }
3212}
3213
3214unsafe impl<'a> Send for BufferChunks<'a> {}
3215
3216impl<'a> BufferChunks<'a> {
3217 pub(crate) fn new(
3218 text: &'a Rope,
3219 range: Range<usize>,
3220 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
3221 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
3222 ) -> Self {
3223 let mut highlights = None;
3224 if let Some((captures, highlight_maps)) = syntax {
3225 highlights = Some(BufferChunkHighlights {
3226 captures,
3227 next_capture: None,
3228 stack: Default::default(),
3229 highlight_maps,
3230 })
3231 }
3232
3233 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
3234 let chunks = text.chunks_in_range(range.clone());
3235
3236 BufferChunks {
3237 range,
3238 chunks,
3239 diagnostic_endpoints,
3240 error_depth: 0,
3241 warning_depth: 0,
3242 information_depth: 0,
3243 hint_depth: 0,
3244 unnecessary_depth: 0,
3245 highlights,
3246 }
3247 }
3248
3249 /// Seeks to the given byte offset in the buffer.
3250 pub fn seek(&mut self, offset: usize) {
3251 self.range.start = offset;
3252 self.chunks.seek(self.range.start);
3253 if let Some(highlights) = self.highlights.as_mut() {
3254 highlights
3255 .stack
3256 .retain(|(end_offset, _)| *end_offset > offset);
3257 if let Some(capture) = &highlights.next_capture {
3258 if offset >= capture.node.start_byte() {
3259 let next_capture_end = capture.node.end_byte();
3260 if offset < next_capture_end {
3261 highlights.stack.push((
3262 next_capture_end,
3263 highlights.highlight_maps[capture.grammar_index].get(capture.index),
3264 ));
3265 }
3266 highlights.next_capture.take();
3267 }
3268 }
3269 highlights.captures.set_byte_range(self.range.clone());
3270 }
3271 }
3272
3273 /// The current byte offset in the buffer.
3274 pub fn offset(&self) -> usize {
3275 self.range.start
3276 }
3277
3278 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
3279 let depth = match endpoint.severity {
3280 DiagnosticSeverity::ERROR => &mut self.error_depth,
3281 DiagnosticSeverity::WARNING => &mut self.warning_depth,
3282 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
3283 DiagnosticSeverity::HINT => &mut self.hint_depth,
3284 _ => return,
3285 };
3286 if endpoint.is_start {
3287 *depth += 1;
3288 } else {
3289 *depth -= 1;
3290 }
3291
3292 if endpoint.is_unnecessary {
3293 if endpoint.is_start {
3294 self.unnecessary_depth += 1;
3295 } else {
3296 self.unnecessary_depth -= 1;
3297 }
3298 }
3299 }
3300
3301 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
3302 if self.error_depth > 0 {
3303 Some(DiagnosticSeverity::ERROR)
3304 } else if self.warning_depth > 0 {
3305 Some(DiagnosticSeverity::WARNING)
3306 } else if self.information_depth > 0 {
3307 Some(DiagnosticSeverity::INFORMATION)
3308 } else if self.hint_depth > 0 {
3309 Some(DiagnosticSeverity::HINT)
3310 } else {
3311 None
3312 }
3313 }
3314
3315 fn current_code_is_unnecessary(&self) -> bool {
3316 self.unnecessary_depth > 0
3317 }
3318}
3319
3320impl<'a> Iterator for BufferChunks<'a> {
3321 type Item = Chunk<'a>;
3322
3323 fn next(&mut self) -> Option<Self::Item> {
3324 let mut next_capture_start = usize::MAX;
3325 let mut next_diagnostic_endpoint = usize::MAX;
3326
3327 if let Some(highlights) = self.highlights.as_mut() {
3328 while let Some((parent_capture_end, _)) = highlights.stack.last() {
3329 if *parent_capture_end <= self.range.start {
3330 highlights.stack.pop();
3331 } else {
3332 break;
3333 }
3334 }
3335
3336 if highlights.next_capture.is_none() {
3337 highlights.next_capture = highlights.captures.next();
3338 }
3339
3340 while let Some(capture) = highlights.next_capture.as_ref() {
3341 if self.range.start < capture.node.start_byte() {
3342 next_capture_start = capture.node.start_byte();
3343 break;
3344 } else {
3345 let highlight_id =
3346 highlights.highlight_maps[capture.grammar_index].get(capture.index);
3347 highlights
3348 .stack
3349 .push((capture.node.end_byte(), highlight_id));
3350 highlights.next_capture = highlights.captures.next();
3351 }
3352 }
3353 }
3354
3355 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
3356 if endpoint.offset <= self.range.start {
3357 self.update_diagnostic_depths(endpoint);
3358 self.diagnostic_endpoints.next();
3359 } else {
3360 next_diagnostic_endpoint = endpoint.offset;
3361 break;
3362 }
3363 }
3364
3365 if let Some(chunk) = self.chunks.peek() {
3366 let chunk_start = self.range.start;
3367 let mut chunk_end = (self.chunks.offset() + chunk.len())
3368 .min(next_capture_start)
3369 .min(next_diagnostic_endpoint);
3370 let mut highlight_id = None;
3371 if let Some(highlights) = self.highlights.as_ref() {
3372 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
3373 chunk_end = chunk_end.min(*parent_capture_end);
3374 highlight_id = Some(*parent_highlight_id);
3375 }
3376 }
3377
3378 let slice =
3379 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
3380 self.range.start = chunk_end;
3381 if self.range.start == self.chunks.offset() + chunk.len() {
3382 self.chunks.next().unwrap();
3383 }
3384
3385 Some(Chunk {
3386 text: slice,
3387 syntax_highlight_id: highlight_id,
3388 diagnostic_severity: self.current_diagnostic_severity(),
3389 is_unnecessary: self.current_code_is_unnecessary(),
3390 ..Default::default()
3391 })
3392 } else {
3393 None
3394 }
3395 }
3396}
3397
3398impl operation_queue::Operation for Operation {
3399 fn lamport_timestamp(&self) -> clock::Lamport {
3400 match self {
3401 Operation::Buffer(_) => {
3402 unreachable!("buffer operations should never be deferred at this layer")
3403 }
3404 Operation::UpdateDiagnostics {
3405 lamport_timestamp, ..
3406 }
3407 | Operation::UpdateSelections {
3408 lamport_timestamp, ..
3409 }
3410 | Operation::UpdateCompletionTriggers {
3411 lamport_timestamp, ..
3412 } => *lamport_timestamp,
3413 }
3414 }
3415}
3416
3417impl Default for Diagnostic {
3418 fn default() -> Self {
3419 Self {
3420 source: Default::default(),
3421 code: None,
3422 severity: DiagnosticSeverity::ERROR,
3423 message: Default::default(),
3424 group_id: 0,
3425 is_primary: false,
3426 is_disk_based: false,
3427 is_unnecessary: false,
3428 }
3429 }
3430}
3431
3432impl IndentSize {
3433 /// Returns an [IndentSize] representing the given spaces.
3434 pub fn spaces(len: u32) -> Self {
3435 Self {
3436 len,
3437 kind: IndentKind::Space,
3438 }
3439 }
3440
3441 /// Returns an [IndentSize] representing a tab.
3442 pub fn tab() -> Self {
3443 Self {
3444 len: 1,
3445 kind: IndentKind::Tab,
3446 }
3447 }
3448
3449 /// An iterator over the characters represented by this [IndentSize].
3450 pub fn chars(&self) -> impl Iterator<Item = char> {
3451 iter::repeat(self.char()).take(self.len as usize)
3452 }
3453
3454 /// The character representation of this [IndentSize].
3455 pub fn char(&self) -> char {
3456 match self.kind {
3457 IndentKind::Space => ' ',
3458 IndentKind::Tab => '\t',
3459 }
3460 }
3461
3462 /// Consumes the current [IndentSize] and returns a new one that has
3463 /// been shrunk or enlarged by the given size along the given direction.
3464 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
3465 match direction {
3466 Ordering::Less => {
3467 if self.kind == size.kind && self.len >= size.len {
3468 self.len -= size.len;
3469 }
3470 }
3471 Ordering::Equal => {}
3472 Ordering::Greater => {
3473 if self.len == 0 {
3474 self = size;
3475 } else if self.kind == size.kind {
3476 self.len += size.len;
3477 }
3478 }
3479 }
3480 self
3481 }
3482}
3483
3484impl Completion {
3485 /// A key that can be used to sort completions when displaying
3486 /// them to the user.
3487 pub fn sort_key(&self) -> (usize, &str) {
3488 let kind_key = match self.lsp_completion.kind {
3489 Some(lsp::CompletionItemKind::KEYWORD) => 0,
3490 Some(lsp::CompletionItemKind::VARIABLE) => 1,
3491 _ => 2,
3492 };
3493 (kind_key, &self.label.text[self.label.filter_range.clone()])
3494 }
3495
3496 /// Whether this completion is a snippet.
3497 pub fn is_snippet(&self) -> bool {
3498 self.lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
3499 }
3500}
3501
3502#[cfg(any(test, feature = "test-support"))]
3503pub struct TestFile {
3504 pub path: Arc<Path>,
3505 pub root_name: String,
3506}
3507
3508#[cfg(any(test, feature = "test-support"))]
3509impl File for TestFile {
3510 fn path(&self) -> &Arc<Path> {
3511 &self.path
3512 }
3513
3514 fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
3515 PathBuf::from(&self.root_name).join(self.path.as_ref())
3516 }
3517
3518 fn as_local(&self) -> Option<&dyn LocalFile> {
3519 None
3520 }
3521
3522 fn mtime(&self) -> Option<SystemTime> {
3523 unimplemented!()
3524 }
3525
3526 fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
3527 self.path().file_name().unwrap_or(self.root_name.as_ref())
3528 }
3529
3530 fn worktree_id(&self) -> usize {
3531 0
3532 }
3533
3534 fn is_deleted(&self) -> bool {
3535 unimplemented!()
3536 }
3537
3538 fn as_any(&self) -> &dyn std::any::Any {
3539 unimplemented!()
3540 }
3541
3542 fn to_proto(&self) -> rpc::proto::File {
3543 unimplemented!()
3544 }
3545
3546 fn is_private(&self) -> bool {
3547 false
3548 }
3549}
3550
3551pub(crate) fn contiguous_ranges(
3552 values: impl Iterator<Item = u32>,
3553 max_len: usize,
3554) -> impl Iterator<Item = Range<u32>> {
3555 let mut values = values;
3556 let mut current_range: Option<Range<u32>> = None;
3557 std::iter::from_fn(move || loop {
3558 if let Some(value) = values.next() {
3559 if let Some(range) = &mut current_range {
3560 if value == range.end && range.len() < max_len {
3561 range.end += 1;
3562 continue;
3563 }
3564 }
3565
3566 let prev_range = current_range.clone();
3567 current_range = Some(value..(value + 1));
3568 if prev_range.is_some() {
3569 return prev_range;
3570 }
3571 } else {
3572 return current_range.take();
3573 }
3574 })
3575}
3576
3577/// Returns the [CharKind] for the given character. When a scope is provided,
3578/// the function checks if the character is considered a word character
3579/// based on the language scope's word character settings.
3580pub fn char_kind(scope: &Option<LanguageScope>, c: char) -> CharKind {
3581 if c.is_whitespace() {
3582 return CharKind::Whitespace;
3583 } else if c.is_alphanumeric() || c == '_' {
3584 return CharKind::Word;
3585 }
3586
3587 if let Some(scope) = scope {
3588 if let Some(characters) = scope.word_characters() {
3589 if characters.contains(&c) {
3590 return CharKind::Word;
3591 }
3592 }
3593 }
3594
3595 CharKind::Punctuation
3596}
3597
3598/// Find all of the ranges of whitespace that occur at the ends of lines
3599/// in the given rope.
3600///
3601/// This could also be done with a regex search, but this implementation
3602/// avoids copying text.
3603pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
3604 let mut ranges = Vec::new();
3605
3606 let mut offset = 0;
3607 let mut prev_chunk_trailing_whitespace_range = 0..0;
3608 for chunk in rope.chunks() {
3609 let mut prev_line_trailing_whitespace_range = 0..0;
3610 for (i, line) in chunk.split('\n').enumerate() {
3611 let line_end_offset = offset + line.len();
3612 let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
3613 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
3614
3615 if i == 0 && trimmed_line_len == 0 {
3616 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
3617 }
3618 if !prev_line_trailing_whitespace_range.is_empty() {
3619 ranges.push(prev_line_trailing_whitespace_range);
3620 }
3621
3622 offset = line_end_offset + 1;
3623 prev_line_trailing_whitespace_range = trailing_whitespace_range;
3624 }
3625
3626 offset -= 1;
3627 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
3628 }
3629
3630 if !prev_chunk_trailing_whitespace_range.is_empty() {
3631 ranges.push(prev_chunk_trailing_whitespace_range);
3632 }
3633
3634 ranges
3635}