1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, IndentGuideSettings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
14 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
18};
19use anyhow::{anyhow, Context, Result};
20use async_watch as watch;
21pub use clock::ReplicaId;
22use futures::channel::oneshot;
23use gpui::{
24 AnyElement, AppContext, EventEmitter, HighlightStyle, ModelContext, Pixels, Task, TaskLabel,
25 WindowContext,
26};
27use lsp::LanguageServerId;
28use parking_lot::Mutex;
29use schemars::JsonSchema;
30use serde::{Deserialize, Serialize};
31use serde_json::Value;
32use settings::WorktreeId;
33use similar::{ChangeTag, TextDiff};
34use smallvec::SmallVec;
35use smol::future::yield_now;
36use std::{
37 any::Any,
38 cell::Cell,
39 cmp::{self, Ordering, Reverse},
40 collections::BTreeMap,
41 ffi::OsStr,
42 fmt,
43 future::Future,
44 iter::{self, Iterator, Peekable},
45 mem,
46 ops::{Deref, DerefMut, Range},
47 path::{Path, PathBuf},
48 str,
49 sync::{Arc, LazyLock},
50 time::{Duration, Instant, SystemTime},
51 vec,
52};
53use sum_tree::TreeMap;
54use text::operation_queue::OperationQueue;
55use text::*;
56pub use text::{
57 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
58 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
59 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
60 Transaction, TransactionId, Unclipped,
61};
62use theme::SyntaxTheme;
63#[cfg(any(test, feature = "test-support"))]
64use util::RandomCharIter;
65use util::RangeExt;
66
67#[cfg(any(test, feature = "test-support"))]
68pub use {tree_sitter_rust, tree_sitter_typescript};
69
70pub use lsp::DiagnosticSeverity;
71
72/// A label for the background task spawned by the buffer to compute
73/// a diff against the contents of its file.
74pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
75
76/// Indicate whether a [Buffer] has permissions to edit.
77#[derive(PartialEq, Clone, Copy, Debug)]
78pub enum Capability {
79 /// The buffer is a mutable replica.
80 ReadWrite,
81 /// The buffer is a read-only replica.
82 ReadOnly,
83}
84
85pub type BufferRow = u32;
86
87/// An in-memory representation of a source code file, including its text,
88/// syntax trees, git status, and diagnostics.
89pub struct Buffer {
90 text: TextBuffer,
91 diff_base: Option<Rope>,
92 git_diff: git::diff::BufferDiff,
93 file: Option<Arc<dyn File>>,
94 /// The mtime of the file when this buffer was last loaded from
95 /// or saved to disk.
96 saved_mtime: Option<SystemTime>,
97 /// The version vector when this buffer was last loaded from
98 /// or saved to disk.
99 saved_version: clock::Global,
100 preview_version: clock::Global,
101 transaction_depth: usize,
102 was_dirty_before_starting_transaction: Option<bool>,
103 reload_task: Option<Task<Result<()>>>,
104 language: Option<Arc<Language>>,
105 autoindent_requests: Vec<Arc<AutoindentRequest>>,
106 pending_autoindent: Option<Task<()>>,
107 sync_parse_timeout: Duration,
108 syntax_map: Mutex<SyntaxMap>,
109 parsing_in_background: bool,
110 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
111 non_text_state_update_count: usize,
112 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
113 remote_selections: TreeMap<ReplicaId, SelectionSet>,
114 diagnostics_timestamp: clock::Lamport,
115 completion_triggers: Vec<String>,
116 completion_triggers_timestamp: clock::Lamport,
117 deferred_ops: OperationQueue<Operation>,
118 capability: Capability,
119 has_conflict: bool,
120 diff_base_version: usize,
121 /// Memoize calls to has_changes_since(saved_version).
122 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
123 has_unsaved_edits: Cell<(clock::Global, bool)>,
124}
125
126#[derive(Copy, Clone, Debug, PartialEq, Eq)]
127pub enum ParseStatus {
128 Idle,
129 Parsing,
130}
131
132/// An immutable, cheaply cloneable representation of a fixed
133/// state of a buffer.
134pub struct BufferSnapshot {
135 text: text::BufferSnapshot,
136 git_diff: git::diff::BufferDiff,
137 pub(crate) syntax: SyntaxSnapshot,
138 file: Option<Arc<dyn File>>,
139 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
140 remote_selections: TreeMap<ReplicaId, SelectionSet>,
141 language: Option<Arc<Language>>,
142 non_text_state_update_count: usize,
143}
144
145/// The kind and amount of indentation in a particular line. For now,
146/// assumes that indentation is all the same character.
147#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
148pub struct IndentSize {
149 /// The number of bytes that comprise the indentation.
150 pub len: u32,
151 /// The kind of whitespace used for indentation.
152 pub kind: IndentKind,
153}
154
155/// A whitespace character that's used for indentation.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
157pub enum IndentKind {
158 /// An ASCII space character.
159 #[default]
160 Space,
161 /// An ASCII tab character.
162 Tab,
163}
164
165/// The shape of a selection cursor.
166#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
167#[serde(rename_all = "snake_case")]
168pub enum CursorShape {
169 /// A vertical bar
170 #[default]
171 Bar,
172 /// A block that surrounds the following character
173 Block,
174 /// An underline that runs along the following character
175 Underscore,
176 /// A box drawn around the following character
177 Hollow,
178}
179
180#[derive(Clone, Debug)]
181struct SelectionSet {
182 line_mode: bool,
183 cursor_shape: CursorShape,
184 selections: Arc<[Selection<Anchor>]>,
185 lamport_timestamp: clock::Lamport,
186}
187
188/// A diagnostic associated with a certain range of a buffer.
189#[derive(Clone, Debug, PartialEq, Eq)]
190pub struct Diagnostic {
191 /// The name of the service that produced this diagnostic.
192 pub source: Option<String>,
193 /// A machine-readable code that identifies this diagnostic.
194 pub code: Option<String>,
195 /// Whether this diagnostic is a hint, warning, or error.
196 pub severity: DiagnosticSeverity,
197 /// The human-readable message associated with this diagnostic.
198 pub message: String,
199 /// An id that identifies the group to which this diagnostic belongs.
200 ///
201 /// When a language server produces a diagnostic with
202 /// one or more associated diagnostics, those diagnostics are all
203 /// assigned a single group id.
204 pub group_id: usize,
205 /// Whether this diagnostic is the primary diagnostic for its group.
206 ///
207 /// In a given group, the primary diagnostic is the top-level diagnostic
208 /// returned by the language server. The non-primary diagnostics are the
209 /// associated diagnostics.
210 pub is_primary: bool,
211 /// Whether this diagnostic is considered to originate from an analysis of
212 /// files on disk, as opposed to any unsaved buffer contents. This is a
213 /// property of a given diagnostic source, and is configured for a given
214 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
215 /// for the language server.
216 pub is_disk_based: bool,
217 /// Whether this diagnostic marks unnecessary code.
218 pub is_unnecessary: bool,
219 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
220 pub data: Option<Value>,
221}
222
223/// TODO - move this into the `project` crate and make it private.
224pub async fn prepare_completion_documentation(
225 documentation: &lsp::Documentation,
226 language_registry: &Arc<LanguageRegistry>,
227 language: Option<Arc<Language>>,
228) -> Documentation {
229 match documentation {
230 lsp::Documentation::String(text) => {
231 if text.lines().count() <= 1 {
232 Documentation::SingleLine(text.clone())
233 } else {
234 Documentation::MultiLinePlainText(text.clone())
235 }
236 }
237
238 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
239 lsp::MarkupKind::PlainText => {
240 if value.lines().count() <= 1 {
241 Documentation::SingleLine(value.clone())
242 } else {
243 Documentation::MultiLinePlainText(value.clone())
244 }
245 }
246
247 lsp::MarkupKind::Markdown => {
248 let parsed = parse_markdown(value, language_registry, language).await;
249 Documentation::MultiLineMarkdown(parsed)
250 }
251 },
252 }
253}
254
255/// Documentation associated with a [`Completion`].
256#[derive(Clone, Debug)]
257pub enum Documentation {
258 /// There is no documentation for this completion.
259 Undocumented,
260 /// A single line of documentation.
261 SingleLine(String),
262 /// Multiple lines of plain text documentation.
263 MultiLinePlainText(String),
264 /// Markdown documentation.
265 MultiLineMarkdown(ParsedMarkdown),
266}
267
268/// An operation used to synchronize this buffer with its other replicas.
269#[derive(Clone, Debug, PartialEq)]
270pub enum Operation {
271 /// A text operation.
272 Buffer(text::Operation),
273
274 /// An update to the buffer's diagnostics.
275 UpdateDiagnostics {
276 /// The id of the language server that produced the new diagnostics.
277 server_id: LanguageServerId,
278 /// The diagnostics.
279 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
280 /// The buffer's lamport timestamp.
281 lamport_timestamp: clock::Lamport,
282 },
283
284 /// An update to the most recent selections in this buffer.
285 UpdateSelections {
286 /// The selections.
287 selections: Arc<[Selection<Anchor>]>,
288 /// The buffer's lamport timestamp.
289 lamport_timestamp: clock::Lamport,
290 /// Whether the selections are in 'line mode'.
291 line_mode: bool,
292 /// The [`CursorShape`] associated with these selections.
293 cursor_shape: CursorShape,
294 },
295
296 /// An update to the characters that should trigger autocompletion
297 /// for this buffer.
298 UpdateCompletionTriggers {
299 /// The characters that trigger autocompletion.
300 triggers: Vec<String>,
301 /// The buffer's lamport timestamp.
302 lamport_timestamp: clock::Lamport,
303 },
304}
305
306/// An event that occurs in a buffer.
307#[derive(Clone, Debug, PartialEq)]
308pub enum BufferEvent {
309 /// The buffer was changed in a way that must be
310 /// propagated to its other replicas.
311 Operation(Operation),
312 /// The buffer was edited.
313 Edited,
314 /// The buffer's `dirty` bit changed.
315 DirtyChanged,
316 /// The buffer was saved.
317 Saved,
318 /// The buffer's file was changed on disk.
319 FileHandleChanged,
320 /// The buffer was reloaded.
321 Reloaded,
322 /// The buffer's diff_base changed.
323 DiffBaseChanged,
324 /// Buffer's excerpts for a certain diff base were recalculated.
325 DiffUpdated,
326 /// The buffer's language was changed.
327 LanguageChanged,
328 /// The buffer's syntax trees were updated.
329 Reparsed,
330 /// The buffer's diagnostics were updated.
331 DiagnosticsUpdated,
332 /// The buffer gained or lost editing capabilities.
333 CapabilityChanged,
334 /// The buffer was explicitly requested to close.
335 Closed,
336 /// The buffer was discarded when closing.
337 Discarded,
338}
339
340/// The file associated with a buffer.
341pub trait File: Send + Sync {
342 /// Returns the [`LocalFile`] associated with this file, if the
343 /// file is local.
344 fn as_local(&self) -> Option<&dyn LocalFile>;
345
346 /// Returns whether this file is local.
347 fn is_local(&self) -> bool {
348 self.as_local().is_some()
349 }
350
351 /// Returns the file's mtime.
352 fn mtime(&self) -> Option<SystemTime>;
353
354 /// Returns the path of this file relative to the worktree's root directory.
355 fn path(&self) -> &Arc<Path>;
356
357 /// Returns the path of this file relative to the worktree's parent directory (this means it
358 /// includes the name of the worktree's root folder).
359 fn full_path(&self, cx: &AppContext) -> PathBuf;
360
361 /// Returns the last component of this handle's absolute path. If this handle refers to the root
362 /// of its worktree, then this method will return the name of the worktree itself.
363 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
364
365 /// Returns the id of the worktree to which this file belongs.
366 ///
367 /// This is needed for looking up project-specific settings.
368 fn worktree_id(&self, cx: &AppContext) -> WorktreeId;
369
370 /// Returns whether the file has been deleted.
371 fn is_deleted(&self) -> bool;
372
373 /// Returns whether the file existed on disk at one point
374 fn is_created(&self) -> bool {
375 self.mtime().is_some()
376 }
377
378 /// Converts this file into an [`Any`] trait object.
379 fn as_any(&self) -> &dyn Any;
380
381 /// Converts this file into a protobuf message.
382 fn to_proto(&self, cx: &AppContext) -> rpc::proto::File;
383
384 /// Return whether Zed considers this to be a private file.
385 fn is_private(&self) -> bool;
386}
387
388/// The file associated with a buffer, in the case where the file is on the local disk.
389pub trait LocalFile: File {
390 /// Returns the absolute path of this file
391 fn abs_path(&self, cx: &AppContext) -> PathBuf;
392
393 /// Loads the file's contents from disk.
394 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
395
396 /// Returns true if the file should not be shared with collaborators.
397 fn is_private(&self, _: &AppContext) -> bool {
398 false
399 }
400}
401
402/// The auto-indent behavior associated with an editing operation.
403/// For some editing operations, each affected line of text has its
404/// indentation recomputed. For other operations, the entire block
405/// of edited text is adjusted uniformly.
406#[derive(Clone, Debug)]
407pub enum AutoindentMode {
408 /// Indent each line of inserted text.
409 EachLine,
410 /// Apply the same indentation adjustment to all of the lines
411 /// in a given insertion.
412 Block {
413 /// The original indentation level of the first line of each
414 /// insertion, if it has been copied.
415 original_indent_columns: Vec<u32>,
416 },
417}
418
419#[derive(Clone)]
420struct AutoindentRequest {
421 before_edit: BufferSnapshot,
422 entries: Vec<AutoindentRequestEntry>,
423 is_block_mode: bool,
424}
425
426#[derive(Clone)]
427struct AutoindentRequestEntry {
428 /// A range of the buffer whose indentation should be adjusted.
429 range: Range<Anchor>,
430 /// Whether or not these lines should be considered brand new, for the
431 /// purpose of auto-indent. When text is not new, its indentation will
432 /// only be adjusted if the suggested indentation level has *changed*
433 /// since the edit was made.
434 first_line_is_new: bool,
435 indent_size: IndentSize,
436 original_indent_column: Option<u32>,
437}
438
439#[derive(Debug)]
440struct IndentSuggestion {
441 basis_row: u32,
442 delta: Ordering,
443 within_error: bool,
444}
445
446struct BufferChunkHighlights<'a> {
447 captures: SyntaxMapCaptures<'a>,
448 next_capture: Option<SyntaxMapCapture<'a>>,
449 stack: Vec<(usize, HighlightId)>,
450 highlight_maps: Vec<HighlightMap>,
451}
452
453/// An iterator that yields chunks of a buffer's text, along with their
454/// syntax highlights and diagnostic status.
455pub struct BufferChunks<'a> {
456 buffer_snapshot: Option<&'a BufferSnapshot>,
457 range: Range<usize>,
458 chunks: text::Chunks<'a>,
459 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
460 error_depth: usize,
461 warning_depth: usize,
462 information_depth: usize,
463 hint_depth: usize,
464 unnecessary_depth: usize,
465 highlights: Option<BufferChunkHighlights<'a>>,
466}
467
468/// A chunk of a buffer's text, along with its syntax highlight and
469/// diagnostic status.
470#[derive(Clone, Debug, Default)]
471pub struct Chunk<'a> {
472 /// The text of the chunk.
473 pub text: &'a str,
474 /// The syntax highlighting style of the chunk.
475 pub syntax_highlight_id: Option<HighlightId>,
476 /// The highlight style that has been applied to this chunk in
477 /// the editor.
478 pub highlight_style: Option<HighlightStyle>,
479 /// The severity of diagnostic associated with this chunk, if any.
480 pub diagnostic_severity: Option<DiagnosticSeverity>,
481 /// Whether this chunk of text is marked as unnecessary.
482 pub is_unnecessary: bool,
483 /// Whether this chunk of text was originally a tab character.
484 pub is_tab: bool,
485 /// An optional recipe for how the chunk should be presented.
486 pub renderer: Option<ChunkRenderer>,
487}
488
489/// A recipe for how the chunk should be presented.
490#[derive(Clone)]
491pub struct ChunkRenderer {
492 /// creates a custom element to represent this chunk.
493 pub render: Arc<dyn Send + Sync + Fn(&mut ChunkRendererContext) -> AnyElement>,
494 /// If true, the element is constrained to the shaped width of the text.
495 pub constrain_width: bool,
496}
497
498pub struct ChunkRendererContext<'a, 'b> {
499 pub context: &'a mut WindowContext<'b>,
500 pub max_width: Pixels,
501}
502
503impl fmt::Debug for ChunkRenderer {
504 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
505 f.debug_struct("ChunkRenderer")
506 .field("constrain_width", &self.constrain_width)
507 .finish()
508 }
509}
510
511impl<'a, 'b> Deref for ChunkRendererContext<'a, 'b> {
512 type Target = WindowContext<'b>;
513
514 fn deref(&self) -> &Self::Target {
515 self.context
516 }
517}
518
519impl<'a, 'b> DerefMut for ChunkRendererContext<'a, 'b> {
520 fn deref_mut(&mut self) -> &mut Self::Target {
521 self.context
522 }
523}
524
525/// A set of edits to a given version of a buffer, computed asynchronously.
526#[derive(Debug)]
527pub struct Diff {
528 pub(crate) base_version: clock::Global,
529 line_ending: LineEnding,
530 edits: Vec<(Range<usize>, Arc<str>)>,
531}
532
533#[derive(Clone, Copy)]
534pub(crate) struct DiagnosticEndpoint {
535 offset: usize,
536 is_start: bool,
537 severity: DiagnosticSeverity,
538 is_unnecessary: bool,
539}
540
541/// A class of characters, used for characterizing a run of text.
542#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
543pub enum CharKind {
544 /// Whitespace.
545 Whitespace,
546 /// Punctuation.
547 Punctuation,
548 /// Word.
549 Word,
550}
551
552/// A runnable is a set of data about a region that could be resolved into a task
553pub struct Runnable {
554 pub tags: SmallVec<[RunnableTag; 1]>,
555 pub language: Arc<Language>,
556 pub buffer: BufferId,
557}
558
559#[derive(Clone, Debug, PartialEq)]
560pub struct IndentGuide {
561 pub buffer_id: BufferId,
562 pub start_row: BufferRow,
563 pub end_row: BufferRow,
564 pub depth: u32,
565 pub tab_size: u32,
566 pub settings: IndentGuideSettings,
567}
568
569impl IndentGuide {
570 pub fn indent_level(&self) -> u32 {
571 self.depth * self.tab_size
572 }
573}
574
575impl Buffer {
576 /// Create a new buffer with the given base text.
577 pub fn local<T: Into<String>>(base_text: T, cx: &mut ModelContext<Self>) -> Self {
578 Self::build(
579 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
580 None,
581 None,
582 Capability::ReadWrite,
583 )
584 }
585
586 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
587 pub fn local_normalized(
588 base_text_normalized: Rope,
589 line_ending: LineEnding,
590 cx: &mut ModelContext<Self>,
591 ) -> Self {
592 Self::build(
593 TextBuffer::new_normalized(
594 0,
595 cx.entity_id().as_non_zero_u64().into(),
596 line_ending,
597 base_text_normalized,
598 ),
599 None,
600 None,
601 Capability::ReadWrite,
602 )
603 }
604
605 /// Create a new buffer that is a replica of a remote buffer.
606 pub fn remote(
607 remote_id: BufferId,
608 replica_id: ReplicaId,
609 capability: Capability,
610 base_text: impl Into<String>,
611 ) -> Self {
612 Self::build(
613 TextBuffer::new(replica_id, remote_id, base_text.into()),
614 None,
615 None,
616 capability,
617 )
618 }
619
620 /// Create a new buffer that is a replica of a remote buffer, populating its
621 /// state from the given protobuf message.
622 pub fn from_proto(
623 replica_id: ReplicaId,
624 capability: Capability,
625 message: proto::BufferState,
626 file: Option<Arc<dyn File>>,
627 ) -> Result<Self> {
628 let buffer_id = BufferId::new(message.id)
629 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
630 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
631 let mut this = Self::build(buffer, message.diff_base, file, capability);
632 this.text.set_line_ending(proto::deserialize_line_ending(
633 rpc::proto::LineEnding::from_i32(message.line_ending)
634 .ok_or_else(|| anyhow!("missing line_ending"))?,
635 ));
636 this.saved_version = proto::deserialize_version(&message.saved_version);
637 this.saved_mtime = message.saved_mtime.map(|time| time.into());
638 Ok(this)
639 }
640
641 /// Serialize the buffer's state to a protobuf message.
642 pub fn to_proto(&self, cx: &AppContext) -> proto::BufferState {
643 proto::BufferState {
644 id: self.remote_id().into(),
645 file: self.file.as_ref().map(|f| f.to_proto(cx)),
646 base_text: self.base_text().to_string(),
647 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
648 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
649 saved_version: proto::serialize_version(&self.saved_version),
650 saved_mtime: self.saved_mtime.map(|time| time.into()),
651 }
652 }
653
654 /// Serialize as protobufs all of the changes to the buffer since the given version.
655 pub fn serialize_ops(
656 &self,
657 since: Option<clock::Global>,
658 cx: &AppContext,
659 ) -> Task<Vec<proto::Operation>> {
660 let mut operations = Vec::new();
661 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
662
663 operations.extend(self.remote_selections.iter().map(|(_, set)| {
664 proto::serialize_operation(&Operation::UpdateSelections {
665 selections: set.selections.clone(),
666 lamport_timestamp: set.lamport_timestamp,
667 line_mode: set.line_mode,
668 cursor_shape: set.cursor_shape,
669 })
670 }));
671
672 for (server_id, diagnostics) in &self.diagnostics {
673 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
674 lamport_timestamp: self.diagnostics_timestamp,
675 server_id: *server_id,
676 diagnostics: diagnostics.iter().cloned().collect(),
677 }));
678 }
679
680 operations.push(proto::serialize_operation(
681 &Operation::UpdateCompletionTriggers {
682 triggers: self.completion_triggers.clone(),
683 lamport_timestamp: self.completion_triggers_timestamp,
684 },
685 ));
686
687 let text_operations = self.text.operations().clone();
688 cx.background_executor().spawn(async move {
689 let since = since.unwrap_or_default();
690 operations.extend(
691 text_operations
692 .iter()
693 .filter(|(_, op)| !since.observed(op.timestamp()))
694 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
695 );
696 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
697 operations
698 })
699 }
700
701 /// Assign a language to the buffer, returning the buffer.
702 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
703 self.set_language(Some(language), cx);
704 self
705 }
706
707 /// Returns the [Capability] of this buffer.
708 pub fn capability(&self) -> Capability {
709 self.capability
710 }
711
712 /// Whether this buffer can only be read.
713 pub fn read_only(&self) -> bool {
714 self.capability == Capability::ReadOnly
715 }
716
717 /// Builds a [Buffer] with the given underlying [TextBuffer], diff base, [File] and [Capability].
718 pub fn build(
719 buffer: TextBuffer,
720 diff_base: Option<String>,
721 file: Option<Arc<dyn File>>,
722 capability: Capability,
723 ) -> Self {
724 let saved_mtime = file.as_ref().and_then(|file| file.mtime());
725 let snapshot = buffer.snapshot();
726 let git_diff = git::diff::BufferDiff::new(&snapshot);
727 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
728 Self {
729 saved_mtime,
730 saved_version: buffer.version(),
731 preview_version: buffer.version(),
732 reload_task: None,
733 transaction_depth: 0,
734 was_dirty_before_starting_transaction: None,
735 has_unsaved_edits: Cell::new((buffer.version(), false)),
736 text: buffer,
737 diff_base: diff_base
738 .map(|mut raw_diff_base| {
739 LineEnding::normalize(&mut raw_diff_base);
740 raw_diff_base
741 })
742 .map(Rope::from),
743 diff_base_version: 0,
744 git_diff,
745 file,
746 capability,
747 syntax_map,
748 parsing_in_background: false,
749 non_text_state_update_count: 0,
750 sync_parse_timeout: Duration::from_millis(1),
751 parse_status: async_watch::channel(ParseStatus::Idle),
752 autoindent_requests: Default::default(),
753 pending_autoindent: Default::default(),
754 language: None,
755 remote_selections: Default::default(),
756 diagnostics: Default::default(),
757 diagnostics_timestamp: Default::default(),
758 completion_triggers: Default::default(),
759 completion_triggers_timestamp: Default::default(),
760 deferred_ops: OperationQueue::new(),
761 has_conflict: false,
762 }
763 }
764
765 /// Retrieve a snapshot of the buffer's current state. This is computationally
766 /// cheap, and allows reading from the buffer on a background thread.
767 pub fn snapshot(&self) -> BufferSnapshot {
768 let text = self.text.snapshot();
769 let mut syntax_map = self.syntax_map.lock();
770 syntax_map.interpolate(&text);
771 let syntax = syntax_map.snapshot();
772
773 BufferSnapshot {
774 text,
775 syntax,
776 git_diff: self.git_diff.clone(),
777 file: self.file.clone(),
778 remote_selections: self.remote_selections.clone(),
779 diagnostics: self.diagnostics.clone(),
780 language: self.language.clone(),
781 non_text_state_update_count: self.non_text_state_update_count,
782 }
783 }
784
785 #[cfg(test)]
786 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
787 &self.text
788 }
789
790 /// Retrieve a snapshot of the buffer's raw text, without any
791 /// language-related state like the syntax tree or diagnostics.
792 pub fn text_snapshot(&self) -> text::BufferSnapshot {
793 self.text.snapshot()
794 }
795
796 /// The file associated with the buffer, if any.
797 pub fn file(&self) -> Option<&Arc<dyn File>> {
798 self.file.as_ref()
799 }
800
801 /// The version of the buffer that was last saved or reloaded from disk.
802 pub fn saved_version(&self) -> &clock::Global {
803 &self.saved_version
804 }
805
806 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
807 pub fn saved_mtime(&self) -> Option<SystemTime> {
808 self.saved_mtime
809 }
810
811 /// Assign a language to the buffer.
812 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
813 self.non_text_state_update_count += 1;
814 self.syntax_map.lock().clear(&self.text);
815 self.language = language;
816 self.reparse(cx);
817 cx.emit(BufferEvent::LanguageChanged);
818 }
819
820 /// Assign a language registry to the buffer. This allows the buffer to retrieve
821 /// other languages if parts of the buffer are written in different languages.
822 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
823 self.syntax_map
824 .lock()
825 .set_language_registry(language_registry);
826 }
827
828 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
829 self.syntax_map.lock().language_registry()
830 }
831
832 /// Assign the buffer a new [Capability].
833 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
834 self.capability = capability;
835 cx.emit(BufferEvent::CapabilityChanged)
836 }
837
838 /// This method is called to signal that the buffer has been saved.
839 pub fn did_save(
840 &mut self,
841 version: clock::Global,
842 mtime: Option<SystemTime>,
843 cx: &mut ModelContext<Self>,
844 ) {
845 self.saved_version = version;
846 self.has_unsaved_edits
847 .set((self.saved_version().clone(), false));
848 self.has_conflict = false;
849 self.saved_mtime = mtime;
850 cx.emit(BufferEvent::Saved);
851 cx.notify();
852 }
853
854 /// This method is called to signal that the buffer has been discarded.
855 pub fn discarded(&mut self, cx: &mut ModelContext<Self>) {
856 cx.emit(BufferEvent::Discarded);
857 cx.notify();
858 }
859
860 /// Reloads the contents of the buffer from disk.
861 pub fn reload(
862 &mut self,
863 cx: &mut ModelContext<Self>,
864 ) -> oneshot::Receiver<Option<Transaction>> {
865 let (tx, rx) = futures::channel::oneshot::channel();
866 let prev_version = self.text.version();
867 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
868 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
869 let file = this.file.as_ref()?.as_local()?;
870 Some((file.mtime(), file.load(cx)))
871 })?
872 else {
873 return Ok(());
874 };
875
876 let new_text = new_text.await?;
877 let diff = this
878 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
879 .await;
880 this.update(&mut cx, |this, cx| {
881 if this.version() == diff.base_version {
882 this.finalize_last_transaction();
883 this.apply_diff(diff, cx);
884 tx.send(this.finalize_last_transaction().cloned()).ok();
885 this.has_conflict = false;
886 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
887 } else {
888 if !diff.edits.is_empty()
889 || this
890 .edits_since::<usize>(&diff.base_version)
891 .next()
892 .is_some()
893 {
894 this.has_conflict = true;
895 }
896
897 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
898 }
899
900 this.reload_task.take();
901 })
902 }));
903 rx
904 }
905
906 /// This method is called to signal that the buffer has been reloaded.
907 pub fn did_reload(
908 &mut self,
909 version: clock::Global,
910 line_ending: LineEnding,
911 mtime: Option<SystemTime>,
912 cx: &mut ModelContext<Self>,
913 ) {
914 self.saved_version = version;
915 self.has_unsaved_edits
916 .set((self.saved_version.clone(), false));
917 self.text.set_line_ending(line_ending);
918 self.saved_mtime = mtime;
919 cx.emit(BufferEvent::Reloaded);
920 cx.notify();
921 }
922
923 /// Updates the [File] backing this buffer. This should be called when
924 /// the file has changed or has been deleted.
925 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
926 let mut file_changed = false;
927
928 if let Some(old_file) = self.file.as_ref() {
929 if new_file.path() != old_file.path() {
930 file_changed = true;
931 }
932
933 if new_file.is_deleted() {
934 if !old_file.is_deleted() {
935 file_changed = true;
936 if !self.is_dirty() {
937 cx.emit(BufferEvent::DirtyChanged);
938 }
939 }
940 } else {
941 let new_mtime = new_file.mtime();
942 if new_mtime != old_file.mtime() {
943 file_changed = true;
944
945 if !self.is_dirty() {
946 self.reload(cx).close();
947 }
948 }
949 }
950 } else {
951 file_changed = true;
952 };
953
954 self.file = Some(new_file);
955 if file_changed {
956 self.non_text_state_update_count += 1;
957 cx.emit(BufferEvent::FileHandleChanged);
958 cx.notify();
959 }
960 }
961
962 /// Returns the current diff base, see [Buffer::set_diff_base].
963 pub fn diff_base(&self) -> Option<&Rope> {
964 self.diff_base.as_ref()
965 }
966
967 /// Sets the text that will be used to compute a Git diff
968 /// against the buffer text.
969 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
970 self.diff_base = diff_base
971 .map(|mut raw_diff_base| {
972 LineEnding::normalize(&mut raw_diff_base);
973 raw_diff_base
974 })
975 .map(Rope::from);
976 self.diff_base_version += 1;
977 if let Some(recalc_task) = self.git_diff_recalc(cx) {
978 cx.spawn(|buffer, mut cx| async move {
979 recalc_task.await;
980 buffer
981 .update(&mut cx, |_, cx| {
982 cx.emit(BufferEvent::DiffBaseChanged);
983 })
984 .ok();
985 })
986 .detach();
987 }
988 }
989
990 /// Returns a number, unique per diff base set to the buffer.
991 pub fn diff_base_version(&self) -> usize {
992 self.diff_base_version
993 }
994
995 /// Recomputes the Git diff status.
996 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
997 let diff_base = self.diff_base.clone()?;
998 let snapshot = self.snapshot();
999
1000 let mut diff = self.git_diff.clone();
1001 let diff = cx.background_executor().spawn(async move {
1002 diff.update(&diff_base, &snapshot).await;
1003 diff
1004 });
1005
1006 Some(cx.spawn(|this, mut cx| async move {
1007 let buffer_diff = diff.await;
1008 this.update(&mut cx, |this, cx| {
1009 this.git_diff = buffer_diff;
1010 this.non_text_state_update_count += 1;
1011 cx.emit(BufferEvent::DiffUpdated);
1012 })
1013 .ok();
1014 }))
1015 }
1016
1017 /// Returns the primary [Language] assigned to this [Buffer].
1018 pub fn language(&self) -> Option<&Arc<Language>> {
1019 self.language.as_ref()
1020 }
1021
1022 /// Returns the [Language] at the given location.
1023 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1024 let offset = position.to_offset(self);
1025 self.syntax_map
1026 .lock()
1027 .layers_for_range(offset..offset, &self.text, false)
1028 .last()
1029 .map(|info| info.language.clone())
1030 .or_else(|| self.language.clone())
1031 }
1032
1033 /// An integer version number that accounts for all updates besides
1034 /// the buffer's text itself (which is versioned via a version vector).
1035 pub fn non_text_state_update_count(&self) -> usize {
1036 self.non_text_state_update_count
1037 }
1038
1039 /// Whether the buffer is being parsed in the background.
1040 #[cfg(any(test, feature = "test-support"))]
1041 pub fn is_parsing(&self) -> bool {
1042 self.parsing_in_background
1043 }
1044
1045 /// Indicates whether the buffer contains any regions that may be
1046 /// written in a language that hasn't been loaded yet.
1047 pub fn contains_unknown_injections(&self) -> bool {
1048 self.syntax_map.lock().contains_unknown_injections()
1049 }
1050
1051 #[cfg(test)]
1052 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1053 self.sync_parse_timeout = timeout;
1054 }
1055
1056 /// Called after an edit to synchronize the buffer's main parse tree with
1057 /// the buffer's new underlying state.
1058 ///
1059 /// Locks the syntax map and interpolates the edits since the last reparse
1060 /// into the foreground syntax tree.
1061 ///
1062 /// Then takes a stable snapshot of the syntax map before unlocking it.
1063 /// The snapshot with the interpolated edits is sent to a background thread,
1064 /// where we ask Tree-sitter to perform an incremental parse.
1065 ///
1066 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1067 /// waiting on the parse to complete. As soon as it completes, we proceed
1068 /// synchronously, unless a 1ms timeout elapses.
1069 ///
1070 /// If we time out waiting on the parse, we spawn a second task waiting
1071 /// until the parse does complete and return with the interpolated tree still
1072 /// in the foreground. When the background parse completes, call back into
1073 /// the main thread and assign the foreground parse state.
1074 ///
1075 /// If the buffer or grammar changed since the start of the background parse,
1076 /// initiate an additional reparse recursively. To avoid concurrent parses
1077 /// for the same buffer, we only initiate a new parse if we are not already
1078 /// parsing in the background.
1079 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1080 if self.parsing_in_background {
1081 return;
1082 }
1083 let language = if let Some(language) = self.language.clone() {
1084 language
1085 } else {
1086 return;
1087 };
1088
1089 let text = self.text_snapshot();
1090 let parsed_version = self.version();
1091
1092 let mut syntax_map = self.syntax_map.lock();
1093 syntax_map.interpolate(&text);
1094 let language_registry = syntax_map.language_registry();
1095 let mut syntax_snapshot = syntax_map.snapshot();
1096 drop(syntax_map);
1097
1098 let parse_task = cx.background_executor().spawn({
1099 let language = language.clone();
1100 let language_registry = language_registry.clone();
1101 async move {
1102 syntax_snapshot.reparse(&text, language_registry, language);
1103 syntax_snapshot
1104 }
1105 });
1106
1107 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1108 match cx
1109 .background_executor()
1110 .block_with_timeout(self.sync_parse_timeout, parse_task)
1111 {
1112 Ok(new_syntax_snapshot) => {
1113 self.did_finish_parsing(new_syntax_snapshot, cx);
1114 }
1115 Err(parse_task) => {
1116 self.parsing_in_background = true;
1117 cx.spawn(move |this, mut cx| async move {
1118 let new_syntax_map = parse_task.await;
1119 this.update(&mut cx, move |this, cx| {
1120 let grammar_changed =
1121 this.language.as_ref().map_or(true, |current_language| {
1122 !Arc::ptr_eq(&language, current_language)
1123 });
1124 let language_registry_changed = new_syntax_map
1125 .contains_unknown_injections()
1126 && language_registry.map_or(false, |registry| {
1127 registry.version() != new_syntax_map.language_registry_version()
1128 });
1129 let parse_again = language_registry_changed
1130 || grammar_changed
1131 || this.version.changed_since(&parsed_version);
1132 this.did_finish_parsing(new_syntax_map, cx);
1133 this.parsing_in_background = false;
1134 if parse_again {
1135 this.reparse(cx);
1136 }
1137 })
1138 .ok();
1139 })
1140 .detach();
1141 }
1142 }
1143 }
1144
1145 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
1146 self.non_text_state_update_count += 1;
1147 self.syntax_map.lock().did_parse(syntax_snapshot);
1148 self.request_autoindent(cx);
1149 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1150 cx.emit(BufferEvent::Reparsed);
1151 cx.notify();
1152 }
1153
1154 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1155 self.parse_status.1.clone()
1156 }
1157
1158 /// Assign to the buffer a set of diagnostics created by a given language server.
1159 pub fn update_diagnostics(
1160 &mut self,
1161 server_id: LanguageServerId,
1162 diagnostics: DiagnosticSet,
1163 cx: &mut ModelContext<Self>,
1164 ) {
1165 let lamport_timestamp = self.text.lamport_clock.tick();
1166 let op = Operation::UpdateDiagnostics {
1167 server_id,
1168 diagnostics: diagnostics.iter().cloned().collect(),
1169 lamport_timestamp,
1170 };
1171 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1172 self.send_operation(op, cx);
1173 }
1174
1175 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1176 if let Some(indent_sizes) = self.compute_autoindents() {
1177 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1178 match cx
1179 .background_executor()
1180 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1181 {
1182 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1183 Err(indent_sizes) => {
1184 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1185 let indent_sizes = indent_sizes.await;
1186 this.update(&mut cx, |this, cx| {
1187 this.apply_autoindents(indent_sizes, cx);
1188 })
1189 .ok();
1190 }));
1191 }
1192 }
1193 } else {
1194 self.autoindent_requests.clear();
1195 }
1196 }
1197
1198 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1199 let max_rows_between_yields = 100;
1200 let snapshot = self.snapshot();
1201 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1202 return None;
1203 }
1204
1205 let autoindent_requests = self.autoindent_requests.clone();
1206 Some(async move {
1207 let mut indent_sizes = BTreeMap::new();
1208 for request in autoindent_requests {
1209 // Resolve each edited range to its row in the current buffer and in the
1210 // buffer before this batch of edits.
1211 let mut row_ranges = Vec::new();
1212 let mut old_to_new_rows = BTreeMap::new();
1213 let mut language_indent_sizes_by_new_row = Vec::new();
1214 for entry in &request.entries {
1215 let position = entry.range.start;
1216 let new_row = position.to_point(&snapshot).row;
1217 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1218 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1219
1220 if !entry.first_line_is_new {
1221 let old_row = position.to_point(&request.before_edit).row;
1222 old_to_new_rows.insert(old_row, new_row);
1223 }
1224 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1225 }
1226
1227 // Build a map containing the suggested indentation for each of the edited lines
1228 // with respect to the state of the buffer before these edits. This map is keyed
1229 // by the rows for these lines in the current state of the buffer.
1230 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1231 let old_edited_ranges =
1232 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1233 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1234 let mut language_indent_size = IndentSize::default();
1235 for old_edited_range in old_edited_ranges {
1236 let suggestions = request
1237 .before_edit
1238 .suggest_autoindents(old_edited_range.clone())
1239 .into_iter()
1240 .flatten();
1241 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1242 if let Some(suggestion) = suggestion {
1243 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1244
1245 // Find the indent size based on the language for this row.
1246 while let Some((row, size)) = language_indent_sizes.peek() {
1247 if *row > new_row {
1248 break;
1249 }
1250 language_indent_size = *size;
1251 language_indent_sizes.next();
1252 }
1253
1254 let suggested_indent = old_to_new_rows
1255 .get(&suggestion.basis_row)
1256 .and_then(|from_row| {
1257 Some(old_suggestions.get(from_row).copied()?.0)
1258 })
1259 .unwrap_or_else(|| {
1260 request
1261 .before_edit
1262 .indent_size_for_line(suggestion.basis_row)
1263 })
1264 .with_delta(suggestion.delta, language_indent_size);
1265 old_suggestions
1266 .insert(new_row, (suggested_indent, suggestion.within_error));
1267 }
1268 }
1269 yield_now().await;
1270 }
1271
1272 // In block mode, only compute indentation suggestions for the first line
1273 // of each insertion. Otherwise, compute suggestions for every inserted line.
1274 let new_edited_row_ranges = contiguous_ranges(
1275 row_ranges.iter().flat_map(|(range, _)| {
1276 if request.is_block_mode {
1277 range.start..range.start + 1
1278 } else {
1279 range.clone()
1280 }
1281 }),
1282 max_rows_between_yields,
1283 );
1284
1285 // Compute new suggestions for each line, but only include them in the result
1286 // if they differ from the old suggestion for that line.
1287 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1288 let mut language_indent_size = IndentSize::default();
1289 for new_edited_row_range in new_edited_row_ranges {
1290 let suggestions = snapshot
1291 .suggest_autoindents(new_edited_row_range.clone())
1292 .into_iter()
1293 .flatten();
1294 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1295 if let Some(suggestion) = suggestion {
1296 // Find the indent size based on the language for this row.
1297 while let Some((row, size)) = language_indent_sizes.peek() {
1298 if *row > new_row {
1299 break;
1300 }
1301 language_indent_size = *size;
1302 language_indent_sizes.next();
1303 }
1304
1305 let suggested_indent = indent_sizes
1306 .get(&suggestion.basis_row)
1307 .copied()
1308 .unwrap_or_else(|| {
1309 snapshot.indent_size_for_line(suggestion.basis_row)
1310 })
1311 .with_delta(suggestion.delta, language_indent_size);
1312 if old_suggestions.get(&new_row).map_or(
1313 true,
1314 |(old_indentation, was_within_error)| {
1315 suggested_indent != *old_indentation
1316 && (!suggestion.within_error || *was_within_error)
1317 },
1318 ) {
1319 indent_sizes.insert(new_row, suggested_indent);
1320 }
1321 }
1322 }
1323 yield_now().await;
1324 }
1325
1326 // For each block of inserted text, adjust the indentation of the remaining
1327 // lines of the block by the same amount as the first line was adjusted.
1328 if request.is_block_mode {
1329 for (row_range, original_indent_column) in
1330 row_ranges
1331 .into_iter()
1332 .filter_map(|(range, original_indent_column)| {
1333 if range.len() > 1 {
1334 Some((range, original_indent_column?))
1335 } else {
1336 None
1337 }
1338 })
1339 {
1340 let new_indent = indent_sizes
1341 .get(&row_range.start)
1342 .copied()
1343 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1344 let delta = new_indent.len as i64 - original_indent_column as i64;
1345 if delta != 0 {
1346 for row in row_range.skip(1) {
1347 indent_sizes.entry(row).or_insert_with(|| {
1348 let mut size = snapshot.indent_size_for_line(row);
1349 if size.kind == new_indent.kind {
1350 match delta.cmp(&0) {
1351 Ordering::Greater => size.len += delta as u32,
1352 Ordering::Less => {
1353 size.len = size.len.saturating_sub(-delta as u32)
1354 }
1355 Ordering::Equal => {}
1356 }
1357 }
1358 size
1359 });
1360 }
1361 }
1362 }
1363 }
1364 }
1365
1366 indent_sizes
1367 })
1368 }
1369
1370 fn apply_autoindents(
1371 &mut self,
1372 indent_sizes: BTreeMap<u32, IndentSize>,
1373 cx: &mut ModelContext<Self>,
1374 ) {
1375 self.autoindent_requests.clear();
1376
1377 let edits: Vec<_> = indent_sizes
1378 .into_iter()
1379 .filter_map(|(row, indent_size)| {
1380 let current_size = indent_size_for_line(self, row);
1381 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1382 })
1383 .collect();
1384
1385 let preserve_preview = self.preserve_preview();
1386 self.edit(edits, None, cx);
1387 if preserve_preview {
1388 self.refresh_preview();
1389 }
1390 }
1391
1392 /// Create a minimal edit that will cause the given row to be indented
1393 /// with the given size. After applying this edit, the length of the line
1394 /// will always be at least `new_size.len`.
1395 pub fn edit_for_indent_size_adjustment(
1396 row: u32,
1397 current_size: IndentSize,
1398 new_size: IndentSize,
1399 ) -> Option<(Range<Point>, String)> {
1400 if new_size.kind == current_size.kind {
1401 match new_size.len.cmp(¤t_size.len) {
1402 Ordering::Greater => {
1403 let point = Point::new(row, 0);
1404 Some((
1405 point..point,
1406 iter::repeat(new_size.char())
1407 .take((new_size.len - current_size.len) as usize)
1408 .collect::<String>(),
1409 ))
1410 }
1411
1412 Ordering::Less => Some((
1413 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1414 String::new(),
1415 )),
1416
1417 Ordering::Equal => None,
1418 }
1419 } else {
1420 Some((
1421 Point::new(row, 0)..Point::new(row, current_size.len),
1422 iter::repeat(new_size.char())
1423 .take(new_size.len as usize)
1424 .collect::<String>(),
1425 ))
1426 }
1427 }
1428
1429 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1430 /// and the given new text.
1431 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1432 let old_text = self.as_rope().clone();
1433 let base_version = self.version();
1434 cx.background_executor()
1435 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1436 let old_text = old_text.to_string();
1437 let line_ending = LineEnding::detect(&new_text);
1438 LineEnding::normalize(&mut new_text);
1439
1440 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1441 let empty: Arc<str> = Arc::default();
1442
1443 let mut edits = Vec::new();
1444 let mut old_offset = 0;
1445 let mut new_offset = 0;
1446 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1447 for change in diff.iter_all_changes().map(Some).chain([None]) {
1448 if let Some(change) = &change {
1449 let len = change.value().len();
1450 match change.tag() {
1451 ChangeTag::Equal => {
1452 old_offset += len;
1453 new_offset += len;
1454 }
1455 ChangeTag::Delete => {
1456 let old_end_offset = old_offset + len;
1457 if let Some((last_old_range, _)) = &mut last_edit {
1458 last_old_range.end = old_end_offset;
1459 } else {
1460 last_edit =
1461 Some((old_offset..old_end_offset, new_offset..new_offset));
1462 }
1463 old_offset = old_end_offset;
1464 }
1465 ChangeTag::Insert => {
1466 let new_end_offset = new_offset + len;
1467 if let Some((_, last_new_range)) = &mut last_edit {
1468 last_new_range.end = new_end_offset;
1469 } else {
1470 last_edit =
1471 Some((old_offset..old_offset, new_offset..new_end_offset));
1472 }
1473 new_offset = new_end_offset;
1474 }
1475 }
1476 }
1477
1478 if let Some((old_range, new_range)) = &last_edit {
1479 if old_offset > old_range.end
1480 || new_offset > new_range.end
1481 || change.is_none()
1482 {
1483 let text = if new_range.is_empty() {
1484 empty.clone()
1485 } else {
1486 new_text[new_range.clone()].into()
1487 };
1488 edits.push((old_range.clone(), text));
1489 last_edit.take();
1490 }
1491 }
1492 }
1493
1494 Diff {
1495 base_version,
1496 line_ending,
1497 edits,
1498 }
1499 })
1500 }
1501
1502 /// Spawns a background task that searches the buffer for any whitespace
1503 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1504 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1505 let old_text = self.as_rope().clone();
1506 let line_ending = self.line_ending();
1507 let base_version = self.version();
1508 cx.background_executor().spawn(async move {
1509 let ranges = trailing_whitespace_ranges(&old_text);
1510 let empty = Arc::<str>::from("");
1511 Diff {
1512 base_version,
1513 line_ending,
1514 edits: ranges
1515 .into_iter()
1516 .map(|range| (range, empty.clone()))
1517 .collect(),
1518 }
1519 })
1520 }
1521
1522 /// Ensures that the buffer ends with a single newline character, and
1523 /// no other whitespace.
1524 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1525 let len = self.len();
1526 let mut offset = len;
1527 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1528 let non_whitespace_len = chunk
1529 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1530 .len();
1531 offset -= chunk.len();
1532 offset += non_whitespace_len;
1533 if non_whitespace_len != 0 {
1534 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1535 return;
1536 }
1537 break;
1538 }
1539 }
1540 self.edit([(offset..len, "\n")], None, cx);
1541 }
1542
1543 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1544 /// calculated, then adjust the diff to account for those changes, and discard any
1545 /// parts of the diff that conflict with those changes.
1546 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1547 // Check for any edits to the buffer that have occurred since this diff
1548 // was computed.
1549 let snapshot = self.snapshot();
1550 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1551 let mut delta = 0;
1552 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1553 while let Some(edit_since) = edits_since.peek() {
1554 // If the edit occurs after a diff hunk, then it does not
1555 // affect that hunk.
1556 if edit_since.old.start > range.end {
1557 break;
1558 }
1559 // If the edit precedes the diff hunk, then adjust the hunk
1560 // to reflect the edit.
1561 else if edit_since.old.end < range.start {
1562 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1563 edits_since.next();
1564 }
1565 // If the edit intersects a diff hunk, then discard that hunk.
1566 else {
1567 return None;
1568 }
1569 }
1570
1571 let start = (range.start as i64 + delta) as usize;
1572 let end = (range.end as i64 + delta) as usize;
1573 Some((start..end, new_text))
1574 });
1575
1576 self.start_transaction();
1577 self.text.set_line_ending(diff.line_ending);
1578 self.edit(adjusted_edits, None, cx);
1579 self.end_transaction(cx)
1580 }
1581
1582 fn has_unsaved_edits(&self) -> bool {
1583 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1584
1585 if last_version == self.version {
1586 self.has_unsaved_edits
1587 .set((last_version, has_unsaved_edits));
1588 return has_unsaved_edits;
1589 }
1590
1591 let has_edits = self.has_edits_since(&self.saved_version);
1592 self.has_unsaved_edits
1593 .set((self.version.clone(), has_edits));
1594 has_edits
1595 }
1596
1597 /// Checks if the buffer has unsaved changes.
1598 pub fn is_dirty(&self) -> bool {
1599 self.capability != Capability::ReadOnly
1600 && (self.has_conflict
1601 || self.has_unsaved_edits()
1602 || self
1603 .file
1604 .as_ref()
1605 .map_or(false, |file| file.is_deleted() || !file.is_created()))
1606 }
1607
1608 /// Checks if the buffer and its file have both changed since the buffer
1609 /// was last saved or reloaded.
1610 pub fn has_conflict(&self) -> bool {
1611 self.has_conflict
1612 || self.file.as_ref().map_or(false, |file| {
1613 file.mtime() > self.saved_mtime && self.has_unsaved_edits()
1614 })
1615 }
1616
1617 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1618 pub fn subscribe(&mut self) -> Subscription {
1619 self.text.subscribe()
1620 }
1621
1622 /// Starts a transaction, if one is not already in-progress. When undoing or
1623 /// redoing edits, all of the edits performed within a transaction are undone
1624 /// or redone together.
1625 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1626 self.start_transaction_at(Instant::now())
1627 }
1628
1629 /// Starts a transaction, providing the current time. Subsequent transactions
1630 /// that occur within a short period of time will be grouped together. This
1631 /// is controlled by the buffer's undo grouping duration.
1632 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1633 self.transaction_depth += 1;
1634 if self.was_dirty_before_starting_transaction.is_none() {
1635 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1636 }
1637 self.text.start_transaction_at(now)
1638 }
1639
1640 /// Terminates the current transaction, if this is the outermost transaction.
1641 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1642 self.end_transaction_at(Instant::now(), cx)
1643 }
1644
1645 /// Terminates the current transaction, providing the current time. Subsequent transactions
1646 /// that occur within a short period of time will be grouped together. This
1647 /// is controlled by the buffer's undo grouping duration.
1648 pub fn end_transaction_at(
1649 &mut self,
1650 now: Instant,
1651 cx: &mut ModelContext<Self>,
1652 ) -> Option<TransactionId> {
1653 assert!(self.transaction_depth > 0);
1654 self.transaction_depth -= 1;
1655 let was_dirty = if self.transaction_depth == 0 {
1656 self.was_dirty_before_starting_transaction.take().unwrap()
1657 } else {
1658 false
1659 };
1660 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1661 self.did_edit(&start_version, was_dirty, cx);
1662 Some(transaction_id)
1663 } else {
1664 None
1665 }
1666 }
1667
1668 /// Manually add a transaction to the buffer's undo history.
1669 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1670 self.text.push_transaction(transaction, now);
1671 }
1672
1673 /// Prevent the last transaction from being grouped with any subsequent transactions,
1674 /// even if they occur with the buffer's undo grouping duration.
1675 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1676 self.text.finalize_last_transaction()
1677 }
1678
1679 /// Manually group all changes since a given transaction.
1680 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1681 self.text.group_until_transaction(transaction_id);
1682 }
1683
1684 /// Manually remove a transaction from the buffer's undo history
1685 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1686 self.text.forget_transaction(transaction_id);
1687 }
1688
1689 /// Manually merge two adjacent transactions in the buffer's undo history.
1690 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1691 self.text.merge_transactions(transaction, destination);
1692 }
1693
1694 /// Waits for the buffer to receive operations with the given timestamps.
1695 pub fn wait_for_edits(
1696 &mut self,
1697 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1698 ) -> impl Future<Output = Result<()>> {
1699 self.text.wait_for_edits(edit_ids)
1700 }
1701
1702 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1703 pub fn wait_for_anchors(
1704 &mut self,
1705 anchors: impl IntoIterator<Item = Anchor>,
1706 ) -> impl 'static + Future<Output = Result<()>> {
1707 self.text.wait_for_anchors(anchors)
1708 }
1709
1710 /// Waits for the buffer to receive operations up to the given version.
1711 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1712 self.text.wait_for_version(version)
1713 }
1714
1715 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1716 /// [`Buffer::wait_for_version`] to resolve with an error.
1717 pub fn give_up_waiting(&mut self) {
1718 self.text.give_up_waiting();
1719 }
1720
1721 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
1722 pub fn set_active_selections(
1723 &mut self,
1724 selections: Arc<[Selection<Anchor>]>,
1725 line_mode: bool,
1726 cursor_shape: CursorShape,
1727 cx: &mut ModelContext<Self>,
1728 ) {
1729 let lamport_timestamp = self.text.lamport_clock.tick();
1730 self.remote_selections.insert(
1731 self.text.replica_id(),
1732 SelectionSet {
1733 selections: selections.clone(),
1734 lamport_timestamp,
1735 line_mode,
1736 cursor_shape,
1737 },
1738 );
1739 self.send_operation(
1740 Operation::UpdateSelections {
1741 selections,
1742 line_mode,
1743 lamport_timestamp,
1744 cursor_shape,
1745 },
1746 cx,
1747 );
1748 self.non_text_state_update_count += 1;
1749 cx.notify();
1750 }
1751
1752 /// Clears the selections, so that other replicas of the buffer do not see any selections for
1753 /// this replica.
1754 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1755 if self
1756 .remote_selections
1757 .get(&self.text.replica_id())
1758 .map_or(true, |set| !set.selections.is_empty())
1759 {
1760 self.set_active_selections(Arc::default(), false, Default::default(), cx);
1761 }
1762 }
1763
1764 /// Replaces the buffer's entire text.
1765 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1766 where
1767 T: Into<Arc<str>>,
1768 {
1769 self.autoindent_requests.clear();
1770 self.edit([(0..self.len(), text)], None, cx)
1771 }
1772
1773 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
1774 /// delete, and a string of text to insert at that location.
1775 ///
1776 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
1777 /// request for the edited ranges, which will be processed when the buffer finishes
1778 /// parsing.
1779 ///
1780 /// Parsing takes place at the end of a transaction, and may compute synchronously
1781 /// or asynchronously, depending on the changes.
1782 pub fn edit<I, S, T>(
1783 &mut self,
1784 edits_iter: I,
1785 autoindent_mode: Option<AutoindentMode>,
1786 cx: &mut ModelContext<Self>,
1787 ) -> Option<clock::Lamport>
1788 where
1789 I: IntoIterator<Item = (Range<S>, T)>,
1790 S: ToOffset,
1791 T: Into<Arc<str>>,
1792 {
1793 // Skip invalid edits and coalesce contiguous ones.
1794 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1795 for (range, new_text) in edits_iter {
1796 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1797 if range.start > range.end {
1798 mem::swap(&mut range.start, &mut range.end);
1799 }
1800 let new_text = new_text.into();
1801 if !new_text.is_empty() || !range.is_empty() {
1802 if let Some((prev_range, prev_text)) = edits.last_mut() {
1803 if prev_range.end >= range.start {
1804 prev_range.end = cmp::max(prev_range.end, range.end);
1805 *prev_text = format!("{prev_text}{new_text}").into();
1806 } else {
1807 edits.push((range, new_text));
1808 }
1809 } else {
1810 edits.push((range, new_text));
1811 }
1812 }
1813 }
1814 if edits.is_empty() {
1815 return None;
1816 }
1817
1818 self.start_transaction();
1819 self.pending_autoindent.take();
1820 let autoindent_request = autoindent_mode
1821 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1822
1823 let edit_operation = self.text.edit(edits.iter().cloned());
1824 let edit_id = edit_operation.timestamp();
1825
1826 if let Some((before_edit, mode)) = autoindent_request {
1827 let mut delta = 0isize;
1828 let entries = edits
1829 .into_iter()
1830 .enumerate()
1831 .zip(&edit_operation.as_edit().unwrap().new_text)
1832 .map(|((ix, (range, _)), new_text)| {
1833 let new_text_length = new_text.len();
1834 let old_start = range.start.to_point(&before_edit);
1835 let new_start = (delta + range.start as isize) as usize;
1836 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1837
1838 let mut range_of_insertion_to_indent = 0..new_text_length;
1839 let mut first_line_is_new = false;
1840 let mut original_indent_column = None;
1841
1842 // When inserting an entire line at the beginning of an existing line,
1843 // treat the insertion as new.
1844 if new_text.contains('\n')
1845 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1846 {
1847 first_line_is_new = true;
1848 }
1849
1850 // When inserting text starting with a newline, avoid auto-indenting the
1851 // previous line.
1852 if new_text.starts_with('\n') {
1853 range_of_insertion_to_indent.start += 1;
1854 first_line_is_new = true;
1855 }
1856
1857 // Avoid auto-indenting after the insertion.
1858 if let AutoindentMode::Block {
1859 original_indent_columns,
1860 } = &mode
1861 {
1862 original_indent_column =
1863 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1864 indent_size_for_text(
1865 new_text[range_of_insertion_to_indent.clone()].chars(),
1866 )
1867 .len
1868 }));
1869 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1870 range_of_insertion_to_indent.end -= 1;
1871 }
1872 }
1873
1874 AutoindentRequestEntry {
1875 first_line_is_new,
1876 original_indent_column,
1877 indent_size: before_edit.language_indent_size_at(range.start, cx),
1878 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1879 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1880 }
1881 })
1882 .collect();
1883
1884 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1885 before_edit,
1886 entries,
1887 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1888 }));
1889 }
1890
1891 self.end_transaction(cx);
1892 self.send_operation(Operation::Buffer(edit_operation), cx);
1893 Some(edit_id)
1894 }
1895
1896 fn did_edit(
1897 &mut self,
1898 old_version: &clock::Global,
1899 was_dirty: bool,
1900 cx: &mut ModelContext<Self>,
1901 ) {
1902 if self.edits_since::<usize>(old_version).next().is_none() {
1903 return;
1904 }
1905
1906 self.reparse(cx);
1907
1908 cx.emit(BufferEvent::Edited);
1909 if was_dirty != self.is_dirty() {
1910 cx.emit(BufferEvent::DirtyChanged);
1911 }
1912 cx.notify();
1913 }
1914
1915 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
1916 // You can also request the insertion of empty lines above and below the line starting at the returned point.
1917 pub fn insert_empty_line(
1918 &mut self,
1919 position: impl ToPoint,
1920 space_above: bool,
1921 space_below: bool,
1922 cx: &mut ModelContext<Self>,
1923 ) -> Point {
1924 let mut position = position.to_point(self);
1925
1926 self.start_transaction();
1927
1928 self.edit(
1929 [(position..position, "\n")],
1930 Some(AutoindentMode::EachLine),
1931 cx,
1932 );
1933
1934 if position.column > 0 {
1935 position += Point::new(1, 0);
1936 }
1937
1938 if !self.is_line_blank(position.row) {
1939 self.edit(
1940 [(position..position, "\n")],
1941 Some(AutoindentMode::EachLine),
1942 cx,
1943 );
1944 }
1945
1946 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
1947 self.edit(
1948 [(position..position, "\n")],
1949 Some(AutoindentMode::EachLine),
1950 cx,
1951 );
1952 position.row += 1;
1953 }
1954
1955 if space_below
1956 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
1957 {
1958 self.edit(
1959 [(position..position, "\n")],
1960 Some(AutoindentMode::EachLine),
1961 cx,
1962 );
1963 }
1964
1965 self.end_transaction(cx);
1966
1967 position
1968 }
1969
1970 /// Applies the given remote operations to the buffer.
1971 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1972 &mut self,
1973 ops: I,
1974 cx: &mut ModelContext<Self>,
1975 ) {
1976 self.pending_autoindent.take();
1977 let was_dirty = self.is_dirty();
1978 let old_version = self.version.clone();
1979 let mut deferred_ops = Vec::new();
1980 let buffer_ops = ops
1981 .into_iter()
1982 .filter_map(|op| match op {
1983 Operation::Buffer(op) => Some(op),
1984 _ => {
1985 if self.can_apply_op(&op) {
1986 self.apply_op(op, cx);
1987 } else {
1988 deferred_ops.push(op);
1989 }
1990 None
1991 }
1992 })
1993 .collect::<Vec<_>>();
1994 self.text.apply_ops(buffer_ops);
1995 self.deferred_ops.insert(deferred_ops);
1996 self.flush_deferred_ops(cx);
1997 self.did_edit(&old_version, was_dirty, cx);
1998 // Notify independently of whether the buffer was edited as the operations could include a
1999 // selection update.
2000 cx.notify();
2001 }
2002
2003 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
2004 let mut deferred_ops = Vec::new();
2005 for op in self.deferred_ops.drain().iter().cloned() {
2006 if self.can_apply_op(&op) {
2007 self.apply_op(op, cx);
2008 } else {
2009 deferred_ops.push(op);
2010 }
2011 }
2012 self.deferred_ops.insert(deferred_ops);
2013 }
2014
2015 pub fn has_deferred_ops(&self) -> bool {
2016 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2017 }
2018
2019 fn can_apply_op(&self, operation: &Operation) -> bool {
2020 match operation {
2021 Operation::Buffer(_) => {
2022 unreachable!("buffer operations should never be applied at this layer")
2023 }
2024 Operation::UpdateDiagnostics {
2025 diagnostics: diagnostic_set,
2026 ..
2027 } => diagnostic_set.iter().all(|diagnostic| {
2028 self.text.can_resolve(&diagnostic.range.start)
2029 && self.text.can_resolve(&diagnostic.range.end)
2030 }),
2031 Operation::UpdateSelections { selections, .. } => selections
2032 .iter()
2033 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2034 Operation::UpdateCompletionTriggers { .. } => true,
2035 }
2036 }
2037
2038 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
2039 match operation {
2040 Operation::Buffer(_) => {
2041 unreachable!("buffer operations should never be applied at this layer")
2042 }
2043 Operation::UpdateDiagnostics {
2044 server_id,
2045 diagnostics: diagnostic_set,
2046 lamport_timestamp,
2047 } => {
2048 let snapshot = self.snapshot();
2049 self.apply_diagnostic_update(
2050 server_id,
2051 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2052 lamport_timestamp,
2053 cx,
2054 );
2055 }
2056 Operation::UpdateSelections {
2057 selections,
2058 lamport_timestamp,
2059 line_mode,
2060 cursor_shape,
2061 } => {
2062 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2063 if set.lamport_timestamp > lamport_timestamp {
2064 return;
2065 }
2066 }
2067
2068 self.remote_selections.insert(
2069 lamport_timestamp.replica_id,
2070 SelectionSet {
2071 selections,
2072 lamport_timestamp,
2073 line_mode,
2074 cursor_shape,
2075 },
2076 );
2077 self.text.lamport_clock.observe(lamport_timestamp);
2078 self.non_text_state_update_count += 1;
2079 }
2080 Operation::UpdateCompletionTriggers {
2081 triggers,
2082 lamport_timestamp,
2083 } => {
2084 self.completion_triggers = triggers;
2085 self.text.lamport_clock.observe(lamport_timestamp);
2086 }
2087 }
2088 }
2089
2090 fn apply_diagnostic_update(
2091 &mut self,
2092 server_id: LanguageServerId,
2093 diagnostics: DiagnosticSet,
2094 lamport_timestamp: clock::Lamport,
2095 cx: &mut ModelContext<Self>,
2096 ) {
2097 if lamport_timestamp > self.diagnostics_timestamp {
2098 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2099 if diagnostics.is_empty() {
2100 if let Ok(ix) = ix {
2101 self.diagnostics.remove(ix);
2102 }
2103 } else {
2104 match ix {
2105 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2106 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2107 };
2108 }
2109 self.diagnostics_timestamp = lamport_timestamp;
2110 self.non_text_state_update_count += 1;
2111 self.text.lamport_clock.observe(lamport_timestamp);
2112 cx.notify();
2113 cx.emit(BufferEvent::DiagnosticsUpdated);
2114 }
2115 }
2116
2117 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
2118 cx.emit(BufferEvent::Operation(operation));
2119 }
2120
2121 /// Removes the selections for a given peer.
2122 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
2123 self.remote_selections.remove(&replica_id);
2124 cx.notify();
2125 }
2126
2127 /// Undoes the most recent transaction.
2128 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2129 let was_dirty = self.is_dirty();
2130 let old_version = self.version.clone();
2131
2132 if let Some((transaction_id, operation)) = self.text.undo() {
2133 self.send_operation(Operation::Buffer(operation), cx);
2134 self.did_edit(&old_version, was_dirty, cx);
2135 Some(transaction_id)
2136 } else {
2137 None
2138 }
2139 }
2140
2141 /// Manually undoes a specific transaction in the buffer's undo history.
2142 pub fn undo_transaction(
2143 &mut self,
2144 transaction_id: TransactionId,
2145 cx: &mut ModelContext<Self>,
2146 ) -> bool {
2147 let was_dirty = self.is_dirty();
2148 let old_version = self.version.clone();
2149 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2150 self.send_operation(Operation::Buffer(operation), cx);
2151 self.did_edit(&old_version, was_dirty, cx);
2152 true
2153 } else {
2154 false
2155 }
2156 }
2157
2158 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2159 pub fn undo_to_transaction(
2160 &mut self,
2161 transaction_id: TransactionId,
2162 cx: &mut ModelContext<Self>,
2163 ) -> bool {
2164 let was_dirty = self.is_dirty();
2165 let old_version = self.version.clone();
2166
2167 let operations = self.text.undo_to_transaction(transaction_id);
2168 let undone = !operations.is_empty();
2169 for operation in operations {
2170 self.send_operation(Operation::Buffer(operation), cx);
2171 }
2172 if undone {
2173 self.did_edit(&old_version, was_dirty, cx)
2174 }
2175 undone
2176 }
2177
2178 /// Manually redoes a specific transaction in the buffer's redo history.
2179 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2180 let was_dirty = self.is_dirty();
2181 let old_version = self.version.clone();
2182
2183 if let Some((transaction_id, operation)) = self.text.redo() {
2184 self.send_operation(Operation::Buffer(operation), cx);
2185 self.did_edit(&old_version, was_dirty, cx);
2186 Some(transaction_id)
2187 } else {
2188 None
2189 }
2190 }
2191
2192 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2193 pub fn redo_to_transaction(
2194 &mut self,
2195 transaction_id: TransactionId,
2196 cx: &mut ModelContext<Self>,
2197 ) -> bool {
2198 let was_dirty = self.is_dirty();
2199 let old_version = self.version.clone();
2200
2201 let operations = self.text.redo_to_transaction(transaction_id);
2202 let redone = !operations.is_empty();
2203 for operation in operations {
2204 self.send_operation(Operation::Buffer(operation), cx);
2205 }
2206 if redone {
2207 self.did_edit(&old_version, was_dirty, cx)
2208 }
2209 redone
2210 }
2211
2212 /// Override current completion triggers with the user-provided completion triggers.
2213 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
2214 self.completion_triggers.clone_from(&triggers);
2215 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2216 self.send_operation(
2217 Operation::UpdateCompletionTriggers {
2218 triggers,
2219 lamport_timestamp: self.completion_triggers_timestamp,
2220 },
2221 cx,
2222 );
2223 cx.notify();
2224 }
2225
2226 /// Returns a list of strings which trigger a completion menu for this language.
2227 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2228 pub fn completion_triggers(&self) -> &[String] {
2229 &self.completion_triggers
2230 }
2231
2232 /// Call this directly after performing edits to prevent the preview tab
2233 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2234 /// to return false until there are additional edits.
2235 pub fn refresh_preview(&mut self) {
2236 self.preview_version = self.version.clone();
2237 }
2238
2239 /// Whether we should preserve the preview status of a tab containing this buffer.
2240 pub fn preserve_preview(&self) -> bool {
2241 !self.has_edits_since(&self.preview_version)
2242 }
2243}
2244
2245#[doc(hidden)]
2246#[cfg(any(test, feature = "test-support"))]
2247impl Buffer {
2248 pub fn edit_via_marked_text(
2249 &mut self,
2250 marked_string: &str,
2251 autoindent_mode: Option<AutoindentMode>,
2252 cx: &mut ModelContext<Self>,
2253 ) {
2254 let edits = self.edits_for_marked_text(marked_string);
2255 self.edit(edits, autoindent_mode, cx);
2256 }
2257
2258 pub fn set_group_interval(&mut self, group_interval: Duration) {
2259 self.text.set_group_interval(group_interval);
2260 }
2261
2262 pub fn randomly_edit<T>(
2263 &mut self,
2264 rng: &mut T,
2265 old_range_count: usize,
2266 cx: &mut ModelContext<Self>,
2267 ) where
2268 T: rand::Rng,
2269 {
2270 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2271 let mut last_end = None;
2272 for _ in 0..old_range_count {
2273 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2274 break;
2275 }
2276
2277 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2278 let mut range = self.random_byte_range(new_start, rng);
2279 if rng.gen_bool(0.2) {
2280 mem::swap(&mut range.start, &mut range.end);
2281 }
2282 last_end = Some(range.end);
2283
2284 let new_text_len = rng.gen_range(0..10);
2285 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2286
2287 edits.push((range, new_text));
2288 }
2289 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2290 self.edit(edits, None, cx);
2291 }
2292
2293 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
2294 let was_dirty = self.is_dirty();
2295 let old_version = self.version.clone();
2296
2297 let ops = self.text.randomly_undo_redo(rng);
2298 if !ops.is_empty() {
2299 for op in ops {
2300 self.send_operation(Operation::Buffer(op), cx);
2301 self.did_edit(&old_version, was_dirty, cx);
2302 }
2303 }
2304 }
2305}
2306
2307impl EventEmitter<BufferEvent> for Buffer {}
2308
2309impl Deref for Buffer {
2310 type Target = TextBuffer;
2311
2312 fn deref(&self) -> &Self::Target {
2313 &self.text
2314 }
2315}
2316
2317impl BufferSnapshot {
2318 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2319 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2320 indent_size_for_line(self, row)
2321 }
2322 /// Returns [`IndentSize`] for a given position that respects user settings
2323 /// and language preferences.
2324 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
2325 let settings = language_settings(self.language_at(position), self.file(), cx);
2326 if settings.hard_tabs {
2327 IndentSize::tab()
2328 } else {
2329 IndentSize::spaces(settings.tab_size.get())
2330 }
2331 }
2332
2333 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2334 /// is passed in as `single_indent_size`.
2335 pub fn suggested_indents(
2336 &self,
2337 rows: impl Iterator<Item = u32>,
2338 single_indent_size: IndentSize,
2339 ) -> BTreeMap<u32, IndentSize> {
2340 let mut result = BTreeMap::new();
2341
2342 for row_range in contiguous_ranges(rows, 10) {
2343 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2344 Some(suggestions) => suggestions,
2345 _ => break,
2346 };
2347
2348 for (row, suggestion) in row_range.zip(suggestions) {
2349 let indent_size = if let Some(suggestion) = suggestion {
2350 result
2351 .get(&suggestion.basis_row)
2352 .copied()
2353 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2354 .with_delta(suggestion.delta, single_indent_size)
2355 } else {
2356 self.indent_size_for_line(row)
2357 };
2358
2359 result.insert(row, indent_size);
2360 }
2361 }
2362
2363 result
2364 }
2365
2366 fn suggest_autoindents(
2367 &self,
2368 row_range: Range<u32>,
2369 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2370 let config = &self.language.as_ref()?.config;
2371 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2372
2373 // Find the suggested indentation ranges based on the syntax tree.
2374 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2375 let end = Point::new(row_range.end, 0);
2376 let range = (start..end).to_offset(&self.text);
2377 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2378 Some(&grammar.indents_config.as_ref()?.query)
2379 });
2380 let indent_configs = matches
2381 .grammars()
2382 .iter()
2383 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2384 .collect::<Vec<_>>();
2385
2386 let mut indent_ranges = Vec::<Range<Point>>::new();
2387 let mut outdent_positions = Vec::<Point>::new();
2388 while let Some(mat) = matches.peek() {
2389 let mut start: Option<Point> = None;
2390 let mut end: Option<Point> = None;
2391
2392 let config = &indent_configs[mat.grammar_index];
2393 for capture in mat.captures {
2394 if capture.index == config.indent_capture_ix {
2395 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2396 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2397 } else if Some(capture.index) == config.start_capture_ix {
2398 start = Some(Point::from_ts_point(capture.node.end_position()));
2399 } else if Some(capture.index) == config.end_capture_ix {
2400 end = Some(Point::from_ts_point(capture.node.start_position()));
2401 } else if Some(capture.index) == config.outdent_capture_ix {
2402 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2403 }
2404 }
2405
2406 matches.advance();
2407 if let Some((start, end)) = start.zip(end) {
2408 if start.row == end.row {
2409 continue;
2410 }
2411
2412 let range = start..end;
2413 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2414 Err(ix) => indent_ranges.insert(ix, range),
2415 Ok(ix) => {
2416 let prev_range = &mut indent_ranges[ix];
2417 prev_range.end = prev_range.end.max(range.end);
2418 }
2419 }
2420 }
2421 }
2422
2423 let mut error_ranges = Vec::<Range<Point>>::new();
2424 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2425 Some(&grammar.error_query)
2426 });
2427 while let Some(mat) = matches.peek() {
2428 let node = mat.captures[0].node;
2429 let start = Point::from_ts_point(node.start_position());
2430 let end = Point::from_ts_point(node.end_position());
2431 let range = start..end;
2432 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2433 Ok(ix) | Err(ix) => ix,
2434 };
2435 let mut end_ix = ix;
2436 while let Some(existing_range) = error_ranges.get(end_ix) {
2437 if existing_range.end < end {
2438 end_ix += 1;
2439 } else {
2440 break;
2441 }
2442 }
2443 error_ranges.splice(ix..end_ix, [range]);
2444 matches.advance();
2445 }
2446
2447 outdent_positions.sort();
2448 for outdent_position in outdent_positions {
2449 // find the innermost indent range containing this outdent_position
2450 // set its end to the outdent position
2451 if let Some(range_to_truncate) = indent_ranges
2452 .iter_mut()
2453 .filter(|indent_range| indent_range.contains(&outdent_position))
2454 .last()
2455 {
2456 range_to_truncate.end = outdent_position;
2457 }
2458 }
2459
2460 // Find the suggested indentation increases and decreased based on regexes.
2461 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2462 self.for_each_line(
2463 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2464 ..Point::new(row_range.end, 0),
2465 |row, line| {
2466 if config
2467 .decrease_indent_pattern
2468 .as_ref()
2469 .map_or(false, |regex| regex.is_match(line))
2470 {
2471 indent_change_rows.push((row, Ordering::Less));
2472 }
2473 if config
2474 .increase_indent_pattern
2475 .as_ref()
2476 .map_or(false, |regex| regex.is_match(line))
2477 {
2478 indent_change_rows.push((row + 1, Ordering::Greater));
2479 }
2480 },
2481 );
2482
2483 let mut indent_changes = indent_change_rows.into_iter().peekable();
2484 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2485 prev_non_blank_row.unwrap_or(0)
2486 } else {
2487 row_range.start.saturating_sub(1)
2488 };
2489 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2490 Some(row_range.map(move |row| {
2491 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2492
2493 let mut indent_from_prev_row = false;
2494 let mut outdent_from_prev_row = false;
2495 let mut outdent_to_row = u32::MAX;
2496
2497 while let Some((indent_row, delta)) = indent_changes.peek() {
2498 match indent_row.cmp(&row) {
2499 Ordering::Equal => match delta {
2500 Ordering::Less => outdent_from_prev_row = true,
2501 Ordering::Greater => indent_from_prev_row = true,
2502 _ => {}
2503 },
2504
2505 Ordering::Greater => break,
2506 Ordering::Less => {}
2507 }
2508
2509 indent_changes.next();
2510 }
2511
2512 for range in &indent_ranges {
2513 if range.start.row >= row {
2514 break;
2515 }
2516 if range.start.row == prev_row && range.end > row_start {
2517 indent_from_prev_row = true;
2518 }
2519 if range.end > prev_row_start && range.end <= row_start {
2520 outdent_to_row = outdent_to_row.min(range.start.row);
2521 }
2522 }
2523
2524 let within_error = error_ranges
2525 .iter()
2526 .any(|e| e.start.row < row && e.end > row_start);
2527
2528 let suggestion = if outdent_to_row == prev_row
2529 || (outdent_from_prev_row && indent_from_prev_row)
2530 {
2531 Some(IndentSuggestion {
2532 basis_row: prev_row,
2533 delta: Ordering::Equal,
2534 within_error,
2535 })
2536 } else if indent_from_prev_row {
2537 Some(IndentSuggestion {
2538 basis_row: prev_row,
2539 delta: Ordering::Greater,
2540 within_error,
2541 })
2542 } else if outdent_to_row < prev_row {
2543 Some(IndentSuggestion {
2544 basis_row: outdent_to_row,
2545 delta: Ordering::Equal,
2546 within_error,
2547 })
2548 } else if outdent_from_prev_row {
2549 Some(IndentSuggestion {
2550 basis_row: prev_row,
2551 delta: Ordering::Less,
2552 within_error,
2553 })
2554 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2555 {
2556 Some(IndentSuggestion {
2557 basis_row: prev_row,
2558 delta: Ordering::Equal,
2559 within_error,
2560 })
2561 } else {
2562 None
2563 };
2564
2565 prev_row = row;
2566 prev_row_start = row_start;
2567 suggestion
2568 }))
2569 }
2570
2571 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2572 while row > 0 {
2573 row -= 1;
2574 if !self.is_line_blank(row) {
2575 return Some(row);
2576 }
2577 }
2578 None
2579 }
2580
2581 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
2582 let captures = self.syntax.captures(range, &self.text, |grammar| {
2583 grammar.highlights_query.as_ref()
2584 });
2585 let highlight_maps = captures
2586 .grammars()
2587 .iter()
2588 .map(|grammar| grammar.highlight_map())
2589 .collect();
2590 (captures, highlight_maps)
2591 }
2592 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2593 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2594 /// returned in chunks where each chunk has a single syntax highlighting style and
2595 /// diagnostic status.
2596 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2597 let range = range.start.to_offset(self)..range.end.to_offset(self);
2598
2599 let mut syntax = None;
2600 if language_aware {
2601 syntax = Some(self.get_highlights(range.clone()));
2602 }
2603 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
2604 let diagnostics = language_aware;
2605 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
2606 }
2607
2608 /// Invokes the given callback for each line of text in the given range of the buffer.
2609 /// Uses callback to avoid allocating a string for each line.
2610 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2611 let mut line = String::new();
2612 let mut row = range.start.row;
2613 for chunk in self
2614 .as_rope()
2615 .chunks_in_range(range.to_offset(self))
2616 .chain(["\n"])
2617 {
2618 for (newline_ix, text) in chunk.split('\n').enumerate() {
2619 if newline_ix > 0 {
2620 callback(row, &line);
2621 row += 1;
2622 line.clear();
2623 }
2624 line.push_str(text);
2625 }
2626 }
2627 }
2628
2629 /// Iterates over every [`SyntaxLayer`] in the buffer.
2630 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2631 self.syntax
2632 .layers_for_range(0..self.len(), &self.text, true)
2633 }
2634
2635 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2636 let offset = position.to_offset(self);
2637 self.syntax
2638 .layers_for_range(offset..offset, &self.text, false)
2639 .filter(|l| l.node().end_byte() > offset)
2640 .last()
2641 }
2642
2643 /// Returns the main [Language]
2644 pub fn language(&self) -> Option<&Arc<Language>> {
2645 self.language.as_ref()
2646 }
2647
2648 /// Returns the [Language] at the given location.
2649 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2650 self.syntax_layer_at(position)
2651 .map(|info| info.language)
2652 .or(self.language.as_ref())
2653 }
2654
2655 /// Returns the settings for the language at the given location.
2656 pub fn settings_at<'a, D: ToOffset>(
2657 &self,
2658 position: D,
2659 cx: &'a AppContext,
2660 ) -> &'a LanguageSettings {
2661 language_settings(self.language_at(position), self.file.as_ref(), cx)
2662 }
2663
2664 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
2665 CharClassifier::new(self.language_scope_at(point))
2666 }
2667
2668 /// Returns the [LanguageScope] at the given location.
2669 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2670 let offset = position.to_offset(self);
2671 let mut scope = None;
2672 let mut smallest_range: Option<Range<usize>> = None;
2673
2674 // Use the layer that has the smallest node intersecting the given point.
2675 for layer in self
2676 .syntax
2677 .layers_for_range(offset..offset, &self.text, false)
2678 {
2679 let mut cursor = layer.node().walk();
2680
2681 let mut range = None;
2682 loop {
2683 let child_range = cursor.node().byte_range();
2684 if !child_range.to_inclusive().contains(&offset) {
2685 break;
2686 }
2687
2688 range = Some(child_range);
2689 if cursor.goto_first_child_for_byte(offset).is_none() {
2690 break;
2691 }
2692 }
2693
2694 if let Some(range) = range {
2695 if smallest_range
2696 .as_ref()
2697 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2698 {
2699 smallest_range = Some(range);
2700 scope = Some(LanguageScope {
2701 language: layer.language.clone(),
2702 override_id: layer.override_id(offset, &self.text),
2703 });
2704 }
2705 }
2706 }
2707
2708 scope.or_else(|| {
2709 self.language.clone().map(|language| LanguageScope {
2710 language,
2711 override_id: None,
2712 })
2713 })
2714 }
2715
2716 /// Returns a tuple of the range and character kind of the word
2717 /// surrounding the given position.
2718 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2719 let mut start = start.to_offset(self);
2720 let mut end = start;
2721 let mut next_chars = self.chars_at(start).peekable();
2722 let mut prev_chars = self.reversed_chars_at(start).peekable();
2723
2724 let classifier = self.char_classifier_at(start);
2725 let word_kind = cmp::max(
2726 prev_chars.peek().copied().map(|c| classifier.kind(c)),
2727 next_chars.peek().copied().map(|c| classifier.kind(c)),
2728 );
2729
2730 for ch in prev_chars {
2731 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
2732 start -= ch.len_utf8();
2733 } else {
2734 break;
2735 }
2736 }
2737
2738 for ch in next_chars {
2739 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
2740 end += ch.len_utf8();
2741 } else {
2742 break;
2743 }
2744 }
2745
2746 (start..end, word_kind)
2747 }
2748
2749 /// Returns the range for the closes syntax node enclosing the given range.
2750 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2751 let range = range.start.to_offset(self)..range.end.to_offset(self);
2752 let mut result: Option<Range<usize>> = None;
2753 'outer: for layer in self
2754 .syntax
2755 .layers_for_range(range.clone(), &self.text, true)
2756 {
2757 let mut cursor = layer.node().walk();
2758
2759 // Descend to the first leaf that touches the start of the range,
2760 // and if the range is non-empty, extends beyond the start.
2761 while cursor.goto_first_child_for_byte(range.start).is_some() {
2762 if !range.is_empty() && cursor.node().end_byte() == range.start {
2763 cursor.goto_next_sibling();
2764 }
2765 }
2766
2767 // Ascend to the smallest ancestor that strictly contains the range.
2768 loop {
2769 let node_range = cursor.node().byte_range();
2770 if node_range.start <= range.start
2771 && node_range.end >= range.end
2772 && node_range.len() > range.len()
2773 {
2774 break;
2775 }
2776 if !cursor.goto_parent() {
2777 continue 'outer;
2778 }
2779 }
2780
2781 let left_node = cursor.node();
2782 let mut layer_result = left_node.byte_range();
2783
2784 // For an empty range, try to find another node immediately to the right of the range.
2785 if left_node.end_byte() == range.start {
2786 let mut right_node = None;
2787 while !cursor.goto_next_sibling() {
2788 if !cursor.goto_parent() {
2789 break;
2790 }
2791 }
2792
2793 while cursor.node().start_byte() == range.start {
2794 right_node = Some(cursor.node());
2795 if !cursor.goto_first_child() {
2796 break;
2797 }
2798 }
2799
2800 // If there is a candidate node on both sides of the (empty) range, then
2801 // decide between the two by favoring a named node over an anonymous token.
2802 // If both nodes are the same in that regard, favor the right one.
2803 if let Some(right_node) = right_node {
2804 if right_node.is_named() || !left_node.is_named() {
2805 layer_result = right_node.byte_range();
2806 }
2807 }
2808 }
2809
2810 if let Some(previous_result) = &result {
2811 if previous_result.len() < layer_result.len() {
2812 continue;
2813 }
2814 }
2815 result = Some(layer_result);
2816 }
2817
2818 result
2819 }
2820
2821 /// Returns the outline for the buffer.
2822 ///
2823 /// This method allows passing an optional [SyntaxTheme] to
2824 /// syntax-highlight the returned symbols.
2825 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2826 self.outline_items_containing(0..self.len(), true, theme)
2827 .map(Outline::new)
2828 }
2829
2830 /// Returns all the symbols that contain the given position.
2831 ///
2832 /// This method allows passing an optional [SyntaxTheme] to
2833 /// syntax-highlight the returned symbols.
2834 pub fn symbols_containing<T: ToOffset>(
2835 &self,
2836 position: T,
2837 theme: Option<&SyntaxTheme>,
2838 ) -> Option<Vec<OutlineItem<Anchor>>> {
2839 let position = position.to_offset(self);
2840 let mut items = self.outline_items_containing(
2841 position.saturating_sub(1)..self.len().min(position + 1),
2842 false,
2843 theme,
2844 )?;
2845 let mut prev_depth = None;
2846 items.retain(|item| {
2847 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2848 prev_depth = Some(item.depth);
2849 result
2850 });
2851 Some(items)
2852 }
2853
2854 pub fn outline_items_containing<T: ToOffset>(
2855 &self,
2856 range: Range<T>,
2857 include_extra_context: bool,
2858 theme: Option<&SyntaxTheme>,
2859 ) -> Option<Vec<OutlineItem<Anchor>>> {
2860 let range = range.to_offset(self);
2861 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2862 grammar.outline_config.as_ref().map(|c| &c.query)
2863 });
2864 let configs = matches
2865 .grammars()
2866 .iter()
2867 .map(|g| g.outline_config.as_ref().unwrap())
2868 .collect::<Vec<_>>();
2869
2870 let mut items = Vec::new();
2871 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
2872 while let Some(mat) = matches.peek() {
2873 let config = &configs[mat.grammar_index];
2874 if let Some(item) =
2875 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
2876 {
2877 items.push(item);
2878 } else if let Some(capture) = mat
2879 .captures
2880 .iter()
2881 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
2882 {
2883 let capture_range = capture.node.start_position()..capture.node.end_position();
2884 let mut capture_row_range =
2885 capture_range.start.row as u32..capture_range.end.row as u32;
2886 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
2887 {
2888 capture_row_range.end -= 1;
2889 }
2890 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
2891 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
2892 last_row_range.end = capture_row_range.end;
2893 } else {
2894 annotation_row_ranges.push(capture_row_range);
2895 }
2896 } else {
2897 annotation_row_ranges.push(capture_row_range);
2898 }
2899 }
2900 matches.advance();
2901 }
2902
2903 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
2904
2905 // Assign depths based on containment relationships and convert to anchors.
2906 let mut item_ends_stack = Vec::<Point>::new();
2907 let mut anchor_items = Vec::new();
2908 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
2909 for item in items {
2910 while let Some(last_end) = item_ends_stack.last().copied() {
2911 if last_end < item.range.end {
2912 item_ends_stack.pop();
2913 } else {
2914 break;
2915 }
2916 }
2917
2918 let mut annotation_row_range = None;
2919 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
2920 let row_preceding_item = item.range.start.row.saturating_sub(1);
2921 if next_annotation_row_range.end < row_preceding_item {
2922 annotation_row_ranges.next();
2923 } else {
2924 if next_annotation_row_range.end == row_preceding_item {
2925 annotation_row_range = Some(next_annotation_row_range.clone());
2926 annotation_row_ranges.next();
2927 }
2928 break;
2929 }
2930 }
2931
2932 anchor_items.push(OutlineItem {
2933 depth: item_ends_stack.len(),
2934 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
2935 text: item.text,
2936 highlight_ranges: item.highlight_ranges,
2937 name_ranges: item.name_ranges,
2938 body_range: item.body_range.map(|body_range| {
2939 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
2940 }),
2941 annotation_range: annotation_row_range.map(|annotation_range| {
2942 self.anchor_after(Point::new(annotation_range.start, 0))
2943 ..self.anchor_before(Point::new(
2944 annotation_range.end,
2945 self.line_len(annotation_range.end),
2946 ))
2947 }),
2948 });
2949 item_ends_stack.push(item.range.end);
2950 }
2951
2952 Some(anchor_items)
2953 }
2954
2955 fn next_outline_item(
2956 &self,
2957 config: &OutlineConfig,
2958 mat: &SyntaxMapMatch,
2959 range: &Range<usize>,
2960 include_extra_context: bool,
2961 theme: Option<&SyntaxTheme>,
2962 ) -> Option<OutlineItem<Point>> {
2963 let item_node = mat.captures.iter().find_map(|cap| {
2964 if cap.index == config.item_capture_ix {
2965 Some(cap.node)
2966 } else {
2967 None
2968 }
2969 })?;
2970
2971 let item_byte_range = item_node.byte_range();
2972 if item_byte_range.end < range.start || item_byte_range.start > range.end {
2973 return None;
2974 }
2975 let item_point_range = Point::from_ts_point(item_node.start_position())
2976 ..Point::from_ts_point(item_node.end_position());
2977
2978 let mut open_point = None;
2979 let mut close_point = None;
2980 let mut buffer_ranges = Vec::new();
2981 for capture in mat.captures {
2982 let node_is_name;
2983 if capture.index == config.name_capture_ix {
2984 node_is_name = true;
2985 } else if Some(capture.index) == config.context_capture_ix
2986 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
2987 {
2988 node_is_name = false;
2989 } else {
2990 if Some(capture.index) == config.open_capture_ix {
2991 open_point = Some(Point::from_ts_point(capture.node.end_position()));
2992 } else if Some(capture.index) == config.close_capture_ix {
2993 close_point = Some(Point::from_ts_point(capture.node.start_position()));
2994 }
2995
2996 continue;
2997 }
2998
2999 let mut range = capture.node.start_byte()..capture.node.end_byte();
3000 let start = capture.node.start_position();
3001 if capture.node.end_position().row > start.row {
3002 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3003 }
3004
3005 if !range.is_empty() {
3006 buffer_ranges.push((range, node_is_name));
3007 }
3008 }
3009 if buffer_ranges.is_empty() {
3010 return None;
3011 }
3012 let mut text = String::new();
3013 let mut highlight_ranges = Vec::new();
3014 let mut name_ranges = Vec::new();
3015 let mut chunks = self.chunks(
3016 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3017 true,
3018 );
3019 let mut last_buffer_range_end = 0;
3020 for (buffer_range, is_name) in buffer_ranges {
3021 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
3022 text.push(' ');
3023 }
3024 last_buffer_range_end = buffer_range.end;
3025 if is_name {
3026 let mut start = text.len();
3027 let end = start + buffer_range.len();
3028
3029 // When multiple names are captured, then the matchable text
3030 // includes the whitespace in between the names.
3031 if !name_ranges.is_empty() {
3032 start -= 1;
3033 }
3034
3035 name_ranges.push(start..end);
3036 }
3037
3038 let mut offset = buffer_range.start;
3039 chunks.seek(buffer_range.clone());
3040 for mut chunk in chunks.by_ref() {
3041 if chunk.text.len() > buffer_range.end - offset {
3042 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3043 offset = buffer_range.end;
3044 } else {
3045 offset += chunk.text.len();
3046 }
3047 let style = chunk
3048 .syntax_highlight_id
3049 .zip(theme)
3050 .and_then(|(highlight, theme)| highlight.style(theme));
3051 if let Some(style) = style {
3052 let start = text.len();
3053 let end = start + chunk.text.len();
3054 highlight_ranges.push((start..end, style));
3055 }
3056 text.push_str(chunk.text);
3057 if offset >= buffer_range.end {
3058 break;
3059 }
3060 }
3061 }
3062
3063 Some(OutlineItem {
3064 depth: 0, // We'll calculate the depth later
3065 range: item_point_range,
3066 text,
3067 highlight_ranges,
3068 name_ranges,
3069 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3070 annotation_range: None,
3071 })
3072 }
3073
3074 /// For each grammar in the language, runs the provided
3075 /// [tree_sitter::Query] against the given range.
3076 pub fn matches(
3077 &self,
3078 range: Range<usize>,
3079 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3080 ) -> SyntaxMapMatches {
3081 self.syntax.matches(range, self, query)
3082 }
3083
3084 /// Returns bracket range pairs overlapping or adjacent to `range`
3085 pub fn bracket_ranges<T: ToOffset>(
3086 &self,
3087 range: Range<T>,
3088 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3089 // Find bracket pairs that *inclusively* contain the given range.
3090 let range = range.start.to_offset(self).saturating_sub(1)
3091 ..self.len().min(range.end.to_offset(self) + 1);
3092
3093 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3094 grammar.brackets_config.as_ref().map(|c| &c.query)
3095 });
3096 let configs = matches
3097 .grammars()
3098 .iter()
3099 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3100 .collect::<Vec<_>>();
3101
3102 iter::from_fn(move || {
3103 while let Some(mat) = matches.peek() {
3104 let mut open = None;
3105 let mut close = None;
3106 let config = &configs[mat.grammar_index];
3107 for capture in mat.captures {
3108 if capture.index == config.open_capture_ix {
3109 open = Some(capture.node.byte_range());
3110 } else if capture.index == config.close_capture_ix {
3111 close = Some(capture.node.byte_range());
3112 }
3113 }
3114
3115 matches.advance();
3116
3117 let Some((open, close)) = open.zip(close) else {
3118 continue;
3119 };
3120
3121 let bracket_range = open.start..=close.end;
3122 if !bracket_range.overlaps(&range) {
3123 continue;
3124 }
3125
3126 return Some((open, close));
3127 }
3128 None
3129 })
3130 }
3131
3132 /// Returns enclosing bracket ranges containing the given range
3133 pub fn enclosing_bracket_ranges<T: ToOffset>(
3134 &self,
3135 range: Range<T>,
3136 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3137 let range = range.start.to_offset(self)..range.end.to_offset(self);
3138
3139 self.bracket_ranges(range.clone())
3140 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
3141 }
3142
3143 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3144 ///
3145 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3146 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3147 &self,
3148 range: Range<T>,
3149 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3150 ) -> Option<(Range<usize>, Range<usize>)> {
3151 let range = range.start.to_offset(self)..range.end.to_offset(self);
3152
3153 // Get the ranges of the innermost pair of brackets.
3154 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3155
3156 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
3157 if let Some(range_filter) = range_filter {
3158 if !range_filter(open.clone(), close.clone()) {
3159 continue;
3160 }
3161 }
3162
3163 let len = close.end - open.start;
3164
3165 if let Some((existing_open, existing_close)) = &result {
3166 let existing_len = existing_close.end - existing_open.start;
3167 if len > existing_len {
3168 continue;
3169 }
3170 }
3171
3172 result = Some((open, close));
3173 }
3174
3175 result
3176 }
3177
3178 /// Returns anchor ranges for any matches of the redaction query.
3179 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3180 /// will be run on the relevant section of the buffer.
3181 pub fn redacted_ranges<T: ToOffset>(
3182 &self,
3183 range: Range<T>,
3184 ) -> impl Iterator<Item = Range<usize>> + '_ {
3185 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3186 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3187 grammar
3188 .redactions_config
3189 .as_ref()
3190 .map(|config| &config.query)
3191 });
3192
3193 let configs = syntax_matches
3194 .grammars()
3195 .iter()
3196 .map(|grammar| grammar.redactions_config.as_ref())
3197 .collect::<Vec<_>>();
3198
3199 iter::from_fn(move || {
3200 let redacted_range = syntax_matches
3201 .peek()
3202 .and_then(|mat| {
3203 configs[mat.grammar_index].and_then(|config| {
3204 mat.captures
3205 .iter()
3206 .find(|capture| capture.index == config.redaction_capture_ix)
3207 })
3208 })
3209 .map(|mat| mat.node.byte_range());
3210 syntax_matches.advance();
3211 redacted_range
3212 })
3213 }
3214
3215 pub fn injections_intersecting_range<T: ToOffset>(
3216 &self,
3217 range: Range<T>,
3218 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3219 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3220
3221 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3222 grammar
3223 .injection_config
3224 .as_ref()
3225 .map(|config| &config.query)
3226 });
3227
3228 let configs = syntax_matches
3229 .grammars()
3230 .iter()
3231 .map(|grammar| grammar.injection_config.as_ref())
3232 .collect::<Vec<_>>();
3233
3234 iter::from_fn(move || {
3235 let ranges = syntax_matches.peek().and_then(|mat| {
3236 let config = &configs[mat.grammar_index]?;
3237 let content_capture_range = mat.captures.iter().find_map(|capture| {
3238 if capture.index == config.content_capture_ix {
3239 Some(capture.node.byte_range())
3240 } else {
3241 None
3242 }
3243 })?;
3244 let language = self.language_at(content_capture_range.start)?;
3245 Some((content_capture_range, language))
3246 });
3247 syntax_matches.advance();
3248 ranges
3249 })
3250 }
3251
3252 pub fn runnable_ranges(
3253 &self,
3254 range: Range<Anchor>,
3255 ) -> impl Iterator<Item = RunnableRange> + '_ {
3256 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3257
3258 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3259 grammar.runnable_config.as_ref().map(|config| &config.query)
3260 });
3261
3262 let test_configs = syntax_matches
3263 .grammars()
3264 .iter()
3265 .map(|grammar| grammar.runnable_config.as_ref())
3266 .collect::<Vec<_>>();
3267
3268 iter::from_fn(move || loop {
3269 let mat = syntax_matches.peek()?;
3270
3271 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3272 let mut run_range = None;
3273 let full_range = mat.captures.iter().fold(
3274 Range {
3275 start: usize::MAX,
3276 end: 0,
3277 },
3278 |mut acc, next| {
3279 let byte_range = next.node.byte_range();
3280 if acc.start > byte_range.start {
3281 acc.start = byte_range.start;
3282 }
3283 if acc.end < byte_range.end {
3284 acc.end = byte_range.end;
3285 }
3286 acc
3287 },
3288 );
3289 if full_range.start > full_range.end {
3290 // We did not find a full spanning range of this match.
3291 return None;
3292 }
3293 let extra_captures: SmallVec<[_; 1]> =
3294 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3295 test_configs
3296 .extra_captures
3297 .get(capture.index as usize)
3298 .cloned()
3299 .and_then(|tag_name| match tag_name {
3300 RunnableCapture::Named(name) => {
3301 Some((capture.node.byte_range(), name))
3302 }
3303 RunnableCapture::Run => {
3304 let _ = run_range.insert(capture.node.byte_range());
3305 None
3306 }
3307 })
3308 }));
3309 let run_range = run_range?;
3310 let tags = test_configs
3311 .query
3312 .property_settings(mat.pattern_index)
3313 .iter()
3314 .filter_map(|property| {
3315 if *property.key == *"tag" {
3316 property
3317 .value
3318 .as_ref()
3319 .map(|value| RunnableTag(value.to_string().into()))
3320 } else {
3321 None
3322 }
3323 })
3324 .collect();
3325 let extra_captures = extra_captures
3326 .into_iter()
3327 .map(|(range, name)| {
3328 (
3329 name.to_string(),
3330 self.text_for_range(range.clone()).collect::<String>(),
3331 )
3332 })
3333 .collect();
3334 // All tags should have the same range.
3335 Some(RunnableRange {
3336 run_range,
3337 full_range,
3338 runnable: Runnable {
3339 tags,
3340 language: mat.language,
3341 buffer: self.remote_id(),
3342 },
3343 extra_captures,
3344 buffer_id: self.remote_id(),
3345 })
3346 });
3347
3348 syntax_matches.advance();
3349 if test_range.is_some() {
3350 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3351 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3352 return test_range;
3353 }
3354 })
3355 }
3356
3357 pub fn indent_guides_in_range(
3358 &self,
3359 range: Range<Anchor>,
3360 ignore_disabled_for_language: bool,
3361 cx: &AppContext,
3362 ) -> Vec<IndentGuide> {
3363 let language_settings = language_settings(self.language(), self.file.as_ref(), cx);
3364 let settings = language_settings.indent_guides;
3365 if !ignore_disabled_for_language && !settings.enabled {
3366 return Vec::new();
3367 }
3368 let tab_size = language_settings.tab_size.get() as u32;
3369
3370 let start_row = range.start.to_point(self).row;
3371 let end_row = range.end.to_point(self).row;
3372 let row_range = start_row..end_row + 1;
3373
3374 let mut row_indents = self.line_indents_in_row_range(row_range.clone());
3375
3376 let mut result_vec = Vec::new();
3377 let mut indent_stack = SmallVec::<[IndentGuide; 8]>::new();
3378
3379 while let Some((first_row, mut line_indent)) = row_indents.next() {
3380 let current_depth = indent_stack.len() as u32;
3381
3382 // When encountering empty, continue until found useful line indent
3383 // then add to the indent stack with the depth found
3384 let mut found_indent = false;
3385 let mut last_row = first_row;
3386 if line_indent.is_line_empty() {
3387 let mut trailing_row = end_row;
3388 while !found_indent {
3389 let (target_row, new_line_indent) =
3390 if let Some(display_row) = row_indents.next() {
3391 display_row
3392 } else {
3393 // This means we reached the end of the given range and found empty lines at the end.
3394 // We need to traverse further until we find a non-empty line to know if we need to add
3395 // an indent guide for the last visible indent.
3396 trailing_row += 1;
3397
3398 const TRAILING_ROW_SEARCH_LIMIT: u32 = 25;
3399 if trailing_row > self.max_point().row
3400 || trailing_row > end_row + TRAILING_ROW_SEARCH_LIMIT
3401 {
3402 break;
3403 }
3404 let new_line_indent = self.line_indent_for_row(trailing_row);
3405 (trailing_row, new_line_indent)
3406 };
3407
3408 if new_line_indent.is_line_empty() {
3409 continue;
3410 }
3411 last_row = target_row.min(end_row);
3412 line_indent = new_line_indent;
3413 found_indent = true;
3414 break;
3415 }
3416 } else {
3417 found_indent = true
3418 }
3419
3420 let depth = if found_indent {
3421 line_indent.len(tab_size) / tab_size
3422 + ((line_indent.len(tab_size) % tab_size) > 0) as u32
3423 } else {
3424 current_depth
3425 };
3426
3427 match depth.cmp(¤t_depth) {
3428 Ordering::Less => {
3429 for _ in 0..(current_depth - depth) {
3430 let mut indent = indent_stack.pop().unwrap();
3431 if last_row != first_row {
3432 // In this case, we landed on an empty row, had to seek forward,
3433 // and discovered that the indent we where on is ending.
3434 // This means that the last display row must
3435 // be on line that ends this indent range, so we
3436 // should display the range up to the first non-empty line
3437 indent.end_row = first_row.saturating_sub(1);
3438 }
3439
3440 result_vec.push(indent)
3441 }
3442 }
3443 Ordering::Greater => {
3444 for next_depth in current_depth..depth {
3445 indent_stack.push(IndentGuide {
3446 buffer_id: self.remote_id(),
3447 start_row: first_row,
3448 end_row: last_row,
3449 depth: next_depth,
3450 tab_size,
3451 settings,
3452 });
3453 }
3454 }
3455 _ => {}
3456 }
3457
3458 for indent in indent_stack.iter_mut() {
3459 indent.end_row = last_row;
3460 }
3461 }
3462
3463 result_vec.extend(indent_stack);
3464
3465 result_vec
3466 }
3467
3468 pub async fn enclosing_indent(
3469 &self,
3470 mut buffer_row: BufferRow,
3471 ) -> Option<(Range<BufferRow>, LineIndent)> {
3472 let max_row = self.max_point().row;
3473 if buffer_row >= max_row {
3474 return None;
3475 }
3476
3477 let mut target_indent = self.line_indent_for_row(buffer_row);
3478
3479 // If the current row is at the start of an indented block, we want to return this
3480 // block as the enclosing indent.
3481 if !target_indent.is_line_empty() && buffer_row < max_row {
3482 let next_line_indent = self.line_indent_for_row(buffer_row + 1);
3483 if !next_line_indent.is_line_empty()
3484 && target_indent.raw_len() < next_line_indent.raw_len()
3485 {
3486 target_indent = next_line_indent;
3487 buffer_row += 1;
3488 }
3489 }
3490
3491 const SEARCH_ROW_LIMIT: u32 = 25000;
3492 const SEARCH_WHITESPACE_ROW_LIMIT: u32 = 2500;
3493 const YIELD_INTERVAL: u32 = 100;
3494
3495 let mut accessed_row_counter = 0;
3496
3497 // If there is a blank line at the current row, search for the next non indented lines
3498 if target_indent.is_line_empty() {
3499 let start = buffer_row.saturating_sub(SEARCH_WHITESPACE_ROW_LIMIT);
3500 let end = (max_row + 1).min(buffer_row + SEARCH_WHITESPACE_ROW_LIMIT);
3501
3502 let mut non_empty_line_above = None;
3503 for (row, indent) in self
3504 .text
3505 .reversed_line_indents_in_row_range(start..buffer_row)
3506 {
3507 accessed_row_counter += 1;
3508 if accessed_row_counter == YIELD_INTERVAL {
3509 accessed_row_counter = 0;
3510 yield_now().await;
3511 }
3512 if !indent.is_line_empty() {
3513 non_empty_line_above = Some((row, indent));
3514 break;
3515 }
3516 }
3517
3518 let mut non_empty_line_below = None;
3519 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3520 accessed_row_counter += 1;
3521 if accessed_row_counter == YIELD_INTERVAL {
3522 accessed_row_counter = 0;
3523 yield_now().await;
3524 }
3525 if !indent.is_line_empty() {
3526 non_empty_line_below = Some((row, indent));
3527 break;
3528 }
3529 }
3530
3531 let (row, indent) = match (non_empty_line_above, non_empty_line_below) {
3532 (Some((above_row, above_indent)), Some((below_row, below_indent))) => {
3533 if above_indent.raw_len() >= below_indent.raw_len() {
3534 (above_row, above_indent)
3535 } else {
3536 (below_row, below_indent)
3537 }
3538 }
3539 (Some(above), None) => above,
3540 (None, Some(below)) => below,
3541 _ => return None,
3542 };
3543
3544 target_indent = indent;
3545 buffer_row = row;
3546 }
3547
3548 let start = buffer_row.saturating_sub(SEARCH_ROW_LIMIT);
3549 let end = (max_row + 1).min(buffer_row + SEARCH_ROW_LIMIT);
3550
3551 let mut start_indent = None;
3552 for (row, indent) in self
3553 .text
3554 .reversed_line_indents_in_row_range(start..buffer_row)
3555 {
3556 accessed_row_counter += 1;
3557 if accessed_row_counter == YIELD_INTERVAL {
3558 accessed_row_counter = 0;
3559 yield_now().await;
3560 }
3561 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3562 start_indent = Some((row, indent));
3563 break;
3564 }
3565 }
3566 let (start_row, start_indent_size) = start_indent?;
3567
3568 let mut end_indent = (end, None);
3569 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3570 accessed_row_counter += 1;
3571 if accessed_row_counter == YIELD_INTERVAL {
3572 accessed_row_counter = 0;
3573 yield_now().await;
3574 }
3575 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3576 end_indent = (row.saturating_sub(1), Some(indent));
3577 break;
3578 }
3579 }
3580 let (end_row, end_indent_size) = end_indent;
3581
3582 let indent = if let Some(end_indent_size) = end_indent_size {
3583 if start_indent_size.raw_len() > end_indent_size.raw_len() {
3584 start_indent_size
3585 } else {
3586 end_indent_size
3587 }
3588 } else {
3589 start_indent_size
3590 };
3591
3592 Some((start_row..end_row, indent))
3593 }
3594
3595 /// Returns selections for remote peers intersecting the given range.
3596 #[allow(clippy::type_complexity)]
3597 pub fn selections_in_range(
3598 &self,
3599 range: Range<Anchor>,
3600 include_local: bool,
3601 ) -> impl Iterator<
3602 Item = (
3603 ReplicaId,
3604 bool,
3605 CursorShape,
3606 impl Iterator<Item = &Selection<Anchor>> + '_,
3607 ),
3608 > + '_ {
3609 self.remote_selections
3610 .iter()
3611 .filter(move |(replica_id, set)| {
3612 (include_local || **replica_id != self.text.replica_id())
3613 && !set.selections.is_empty()
3614 })
3615 .map(move |(replica_id, set)| {
3616 let start_ix = match set.selections.binary_search_by(|probe| {
3617 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3618 }) {
3619 Ok(ix) | Err(ix) => ix,
3620 };
3621 let end_ix = match set.selections.binary_search_by(|probe| {
3622 probe.start.cmp(&range.end, self).then(Ordering::Less)
3623 }) {
3624 Ok(ix) | Err(ix) => ix,
3625 };
3626
3627 (
3628 *replica_id,
3629 set.line_mode,
3630 set.cursor_shape,
3631 set.selections[start_ix..end_ix].iter(),
3632 )
3633 })
3634 }
3635
3636 /// Whether the buffer contains any git changes.
3637 pub fn has_git_diff(&self) -> bool {
3638 !self.git_diff.is_empty()
3639 }
3640
3641 /// Returns all the Git diff hunks intersecting the given
3642 /// row range.
3643 pub fn git_diff_hunks_in_row_range(
3644 &self,
3645 range: Range<BufferRow>,
3646 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3647 self.git_diff.hunks_in_row_range(range, self)
3648 }
3649
3650 /// Returns all the Git diff hunks intersecting the given
3651 /// range.
3652 pub fn git_diff_hunks_intersecting_range(
3653 &self,
3654 range: Range<Anchor>,
3655 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3656 self.git_diff.hunks_intersecting_range(range, self)
3657 }
3658
3659 /// Returns all the Git diff hunks intersecting the given
3660 /// range, in reverse order.
3661 pub fn git_diff_hunks_intersecting_range_rev(
3662 &self,
3663 range: Range<Anchor>,
3664 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3665 self.git_diff.hunks_intersecting_range_rev(range, self)
3666 }
3667
3668 /// Returns if the buffer contains any diagnostics.
3669 pub fn has_diagnostics(&self) -> bool {
3670 !self.diagnostics.is_empty()
3671 }
3672
3673 /// Returns all the diagnostics intersecting the given range.
3674 pub fn diagnostics_in_range<'a, T, O>(
3675 &'a self,
3676 search_range: Range<T>,
3677 reversed: bool,
3678 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3679 where
3680 T: 'a + Clone + ToOffset,
3681 O: 'a + FromAnchor + Ord,
3682 {
3683 let mut iterators: Vec<_> = self
3684 .diagnostics
3685 .iter()
3686 .map(|(_, collection)| {
3687 collection
3688 .range::<T, O>(search_range.clone(), self, true, reversed)
3689 .peekable()
3690 })
3691 .collect();
3692
3693 std::iter::from_fn(move || {
3694 let (next_ix, _) = iterators
3695 .iter_mut()
3696 .enumerate()
3697 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3698 .min_by(|(_, a), (_, b)| {
3699 let cmp = a
3700 .range
3701 .start
3702 .cmp(&b.range.start)
3703 // when range is equal, sort by diagnostic severity
3704 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3705 // and stabilize order with group_id
3706 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3707 if reversed {
3708 cmp.reverse()
3709 } else {
3710 cmp
3711 }
3712 })?;
3713 iterators[next_ix].next()
3714 })
3715 }
3716
3717 /// Returns all the diagnostic groups associated with the given
3718 /// language server id. If no language server id is provided,
3719 /// all diagnostics groups are returned.
3720 pub fn diagnostic_groups(
3721 &self,
3722 language_server_id: Option<LanguageServerId>,
3723 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3724 let mut groups = Vec::new();
3725
3726 if let Some(language_server_id) = language_server_id {
3727 if let Ok(ix) = self
3728 .diagnostics
3729 .binary_search_by_key(&language_server_id, |e| e.0)
3730 {
3731 self.diagnostics[ix]
3732 .1
3733 .groups(language_server_id, &mut groups, self);
3734 }
3735 } else {
3736 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3737 diagnostics.groups(*language_server_id, &mut groups, self);
3738 }
3739 }
3740
3741 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3742 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3743 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3744 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3745 });
3746
3747 groups
3748 }
3749
3750 /// Returns an iterator over the diagnostics for the given group.
3751 pub fn diagnostic_group<'a, O>(
3752 &'a self,
3753 group_id: usize,
3754 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3755 where
3756 O: 'a + FromAnchor,
3757 {
3758 self.diagnostics
3759 .iter()
3760 .flat_map(move |(_, set)| set.group(group_id, self))
3761 }
3762
3763 /// An integer version number that accounts for all updates besides
3764 /// the buffer's text itself (which is versioned via a version vector).
3765 pub fn non_text_state_update_count(&self) -> usize {
3766 self.non_text_state_update_count
3767 }
3768
3769 /// Returns a snapshot of underlying file.
3770 pub fn file(&self) -> Option<&Arc<dyn File>> {
3771 self.file.as_ref()
3772 }
3773
3774 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
3775 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
3776 if let Some(file) = self.file() {
3777 if file.path().file_name().is_none() || include_root {
3778 Some(file.full_path(cx))
3779 } else {
3780 Some(file.path().to_path_buf())
3781 }
3782 } else {
3783 None
3784 }
3785 }
3786}
3787
3788fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
3789 indent_size_for_text(text.chars_at(Point::new(row, 0)))
3790}
3791
3792fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
3793 let mut result = IndentSize::spaces(0);
3794 for c in text {
3795 let kind = match c {
3796 ' ' => IndentKind::Space,
3797 '\t' => IndentKind::Tab,
3798 _ => break,
3799 };
3800 if result.len == 0 {
3801 result.kind = kind;
3802 }
3803 result.len += 1;
3804 }
3805 result
3806}
3807
3808impl Clone for BufferSnapshot {
3809 fn clone(&self) -> Self {
3810 Self {
3811 text: self.text.clone(),
3812 git_diff: self.git_diff.clone(),
3813 syntax: self.syntax.clone(),
3814 file: self.file.clone(),
3815 remote_selections: self.remote_selections.clone(),
3816 diagnostics: self.diagnostics.clone(),
3817 language: self.language.clone(),
3818 non_text_state_update_count: self.non_text_state_update_count,
3819 }
3820 }
3821}
3822
3823impl Deref for BufferSnapshot {
3824 type Target = text::BufferSnapshot;
3825
3826 fn deref(&self) -> &Self::Target {
3827 &self.text
3828 }
3829}
3830
3831unsafe impl<'a> Send for BufferChunks<'a> {}
3832
3833impl<'a> BufferChunks<'a> {
3834 pub(crate) fn new(
3835 text: &'a Rope,
3836 range: Range<usize>,
3837 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
3838 diagnostics: bool,
3839 buffer_snapshot: Option<&'a BufferSnapshot>,
3840 ) -> Self {
3841 let mut highlights = None;
3842 if let Some((captures, highlight_maps)) = syntax {
3843 highlights = Some(BufferChunkHighlights {
3844 captures,
3845 next_capture: None,
3846 stack: Default::default(),
3847 highlight_maps,
3848 })
3849 }
3850
3851 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
3852 let chunks = text.chunks_in_range(range.clone());
3853
3854 let mut this = BufferChunks {
3855 range,
3856 buffer_snapshot,
3857 chunks,
3858 diagnostic_endpoints,
3859 error_depth: 0,
3860 warning_depth: 0,
3861 information_depth: 0,
3862 hint_depth: 0,
3863 unnecessary_depth: 0,
3864 highlights,
3865 };
3866 this.initialize_diagnostic_endpoints();
3867 this
3868 }
3869
3870 /// Seeks to the given byte offset in the buffer.
3871 pub fn seek(&mut self, range: Range<usize>) {
3872 let old_range = std::mem::replace(&mut self.range, range.clone());
3873 self.chunks.set_range(self.range.clone());
3874 if let Some(highlights) = self.highlights.as_mut() {
3875 if old_range.start >= self.range.start && old_range.end <= self.range.end {
3876 // Reuse existing highlights stack, as the new range is a subrange of the old one.
3877 highlights
3878 .stack
3879 .retain(|(end_offset, _)| *end_offset > range.start);
3880 if let Some(capture) = &highlights.next_capture {
3881 if range.start >= capture.node.start_byte() {
3882 let next_capture_end = capture.node.end_byte();
3883 if range.start < next_capture_end {
3884 highlights.stack.push((
3885 next_capture_end,
3886 highlights.highlight_maps[capture.grammar_index].get(capture.index),
3887 ));
3888 }
3889 highlights.next_capture.take();
3890 }
3891 }
3892 } else if let Some(snapshot) = self.buffer_snapshot {
3893 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
3894 *highlights = BufferChunkHighlights {
3895 captures,
3896 next_capture: None,
3897 stack: Default::default(),
3898 highlight_maps,
3899 };
3900 } else {
3901 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
3902 // Seeking such BufferChunks is not supported.
3903 debug_assert!(false, "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot");
3904 }
3905
3906 highlights.captures.set_byte_range(self.range.clone());
3907 self.initialize_diagnostic_endpoints();
3908 }
3909 }
3910
3911 fn initialize_diagnostic_endpoints(&mut self) {
3912 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
3913 if let Some(buffer) = self.buffer_snapshot {
3914 let mut diagnostic_endpoints = Vec::new();
3915 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
3916 diagnostic_endpoints.push(DiagnosticEndpoint {
3917 offset: entry.range.start,
3918 is_start: true,
3919 severity: entry.diagnostic.severity,
3920 is_unnecessary: entry.diagnostic.is_unnecessary,
3921 });
3922 diagnostic_endpoints.push(DiagnosticEndpoint {
3923 offset: entry.range.end,
3924 is_start: false,
3925 severity: entry.diagnostic.severity,
3926 is_unnecessary: entry.diagnostic.is_unnecessary,
3927 });
3928 }
3929 diagnostic_endpoints
3930 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
3931 *diagnostics = diagnostic_endpoints.into_iter().peekable();
3932 }
3933 }
3934 }
3935
3936 /// The current byte offset in the buffer.
3937 pub fn offset(&self) -> usize {
3938 self.range.start
3939 }
3940
3941 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
3942 let depth = match endpoint.severity {
3943 DiagnosticSeverity::ERROR => &mut self.error_depth,
3944 DiagnosticSeverity::WARNING => &mut self.warning_depth,
3945 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
3946 DiagnosticSeverity::HINT => &mut self.hint_depth,
3947 _ => return,
3948 };
3949 if endpoint.is_start {
3950 *depth += 1;
3951 } else {
3952 *depth -= 1;
3953 }
3954
3955 if endpoint.is_unnecessary {
3956 if endpoint.is_start {
3957 self.unnecessary_depth += 1;
3958 } else {
3959 self.unnecessary_depth -= 1;
3960 }
3961 }
3962 }
3963
3964 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
3965 if self.error_depth > 0 {
3966 Some(DiagnosticSeverity::ERROR)
3967 } else if self.warning_depth > 0 {
3968 Some(DiagnosticSeverity::WARNING)
3969 } else if self.information_depth > 0 {
3970 Some(DiagnosticSeverity::INFORMATION)
3971 } else if self.hint_depth > 0 {
3972 Some(DiagnosticSeverity::HINT)
3973 } else {
3974 None
3975 }
3976 }
3977
3978 fn current_code_is_unnecessary(&self) -> bool {
3979 self.unnecessary_depth > 0
3980 }
3981}
3982
3983impl<'a> Iterator for BufferChunks<'a> {
3984 type Item = Chunk<'a>;
3985
3986 fn next(&mut self) -> Option<Self::Item> {
3987 let mut next_capture_start = usize::MAX;
3988 let mut next_diagnostic_endpoint = usize::MAX;
3989
3990 if let Some(highlights) = self.highlights.as_mut() {
3991 while let Some((parent_capture_end, _)) = highlights.stack.last() {
3992 if *parent_capture_end <= self.range.start {
3993 highlights.stack.pop();
3994 } else {
3995 break;
3996 }
3997 }
3998
3999 if highlights.next_capture.is_none() {
4000 highlights.next_capture = highlights.captures.next();
4001 }
4002
4003 while let Some(capture) = highlights.next_capture.as_ref() {
4004 if self.range.start < capture.node.start_byte() {
4005 next_capture_start = capture.node.start_byte();
4006 break;
4007 } else {
4008 let highlight_id =
4009 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4010 highlights
4011 .stack
4012 .push((capture.node.end_byte(), highlight_id));
4013 highlights.next_capture = highlights.captures.next();
4014 }
4015 }
4016 }
4017
4018 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4019 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4020 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4021 if endpoint.offset <= self.range.start {
4022 self.update_diagnostic_depths(endpoint);
4023 diagnostic_endpoints.next();
4024 } else {
4025 next_diagnostic_endpoint = endpoint.offset;
4026 break;
4027 }
4028 }
4029 }
4030 self.diagnostic_endpoints = diagnostic_endpoints;
4031
4032 if let Some(chunk) = self.chunks.peek() {
4033 let chunk_start = self.range.start;
4034 let mut chunk_end = (self.chunks.offset() + chunk.len())
4035 .min(next_capture_start)
4036 .min(next_diagnostic_endpoint);
4037 let mut highlight_id = None;
4038 if let Some(highlights) = self.highlights.as_ref() {
4039 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4040 chunk_end = chunk_end.min(*parent_capture_end);
4041 highlight_id = Some(*parent_highlight_id);
4042 }
4043 }
4044
4045 let slice =
4046 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4047 self.range.start = chunk_end;
4048 if self.range.start == self.chunks.offset() + chunk.len() {
4049 self.chunks.next().unwrap();
4050 }
4051
4052 Some(Chunk {
4053 text: slice,
4054 syntax_highlight_id: highlight_id,
4055 diagnostic_severity: self.current_diagnostic_severity(),
4056 is_unnecessary: self.current_code_is_unnecessary(),
4057 ..Default::default()
4058 })
4059 } else {
4060 None
4061 }
4062 }
4063}
4064
4065impl operation_queue::Operation for Operation {
4066 fn lamport_timestamp(&self) -> clock::Lamport {
4067 match self {
4068 Operation::Buffer(_) => {
4069 unreachable!("buffer operations should never be deferred at this layer")
4070 }
4071 Operation::UpdateDiagnostics {
4072 lamport_timestamp, ..
4073 }
4074 | Operation::UpdateSelections {
4075 lamport_timestamp, ..
4076 }
4077 | Operation::UpdateCompletionTriggers {
4078 lamport_timestamp, ..
4079 } => *lamport_timestamp,
4080 }
4081 }
4082}
4083
4084impl Default for Diagnostic {
4085 fn default() -> Self {
4086 Self {
4087 source: Default::default(),
4088 code: None,
4089 severity: DiagnosticSeverity::ERROR,
4090 message: Default::default(),
4091 group_id: 0,
4092 is_primary: false,
4093 is_disk_based: false,
4094 is_unnecessary: false,
4095 data: None,
4096 }
4097 }
4098}
4099
4100impl IndentSize {
4101 /// Returns an [IndentSize] representing the given spaces.
4102 pub fn spaces(len: u32) -> Self {
4103 Self {
4104 len,
4105 kind: IndentKind::Space,
4106 }
4107 }
4108
4109 /// Returns an [IndentSize] representing a tab.
4110 pub fn tab() -> Self {
4111 Self {
4112 len: 1,
4113 kind: IndentKind::Tab,
4114 }
4115 }
4116
4117 /// An iterator over the characters represented by this [IndentSize].
4118 pub fn chars(&self) -> impl Iterator<Item = char> {
4119 iter::repeat(self.char()).take(self.len as usize)
4120 }
4121
4122 /// The character representation of this [IndentSize].
4123 pub fn char(&self) -> char {
4124 match self.kind {
4125 IndentKind::Space => ' ',
4126 IndentKind::Tab => '\t',
4127 }
4128 }
4129
4130 /// Consumes the current [IndentSize] and returns a new one that has
4131 /// been shrunk or enlarged by the given size along the given direction.
4132 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4133 match direction {
4134 Ordering::Less => {
4135 if self.kind == size.kind && self.len >= size.len {
4136 self.len -= size.len;
4137 }
4138 }
4139 Ordering::Equal => {}
4140 Ordering::Greater => {
4141 if self.len == 0 {
4142 self = size;
4143 } else if self.kind == size.kind {
4144 self.len += size.len;
4145 }
4146 }
4147 }
4148 self
4149 }
4150}
4151
4152#[cfg(any(test, feature = "test-support"))]
4153pub struct TestFile {
4154 pub path: Arc<Path>,
4155 pub root_name: String,
4156}
4157
4158#[cfg(any(test, feature = "test-support"))]
4159impl File for TestFile {
4160 fn path(&self) -> &Arc<Path> {
4161 &self.path
4162 }
4163
4164 fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
4165 PathBuf::from(&self.root_name).join(self.path.as_ref())
4166 }
4167
4168 fn as_local(&self) -> Option<&dyn LocalFile> {
4169 None
4170 }
4171
4172 fn mtime(&self) -> Option<SystemTime> {
4173 unimplemented!()
4174 }
4175
4176 fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
4177 self.path().file_name().unwrap_or(self.root_name.as_ref())
4178 }
4179
4180 fn worktree_id(&self, _: &AppContext) -> WorktreeId {
4181 WorktreeId::from_usize(0)
4182 }
4183
4184 fn is_deleted(&self) -> bool {
4185 unimplemented!()
4186 }
4187
4188 fn as_any(&self) -> &dyn std::any::Any {
4189 unimplemented!()
4190 }
4191
4192 fn to_proto(&self, _: &AppContext) -> rpc::proto::File {
4193 unimplemented!()
4194 }
4195
4196 fn is_private(&self) -> bool {
4197 false
4198 }
4199}
4200
4201pub(crate) fn contiguous_ranges(
4202 values: impl Iterator<Item = u32>,
4203 max_len: usize,
4204) -> impl Iterator<Item = Range<u32>> {
4205 let mut values = values;
4206 let mut current_range: Option<Range<u32>> = None;
4207 std::iter::from_fn(move || loop {
4208 if let Some(value) = values.next() {
4209 if let Some(range) = &mut current_range {
4210 if value == range.end && range.len() < max_len {
4211 range.end += 1;
4212 continue;
4213 }
4214 }
4215
4216 let prev_range = current_range.clone();
4217 current_range = Some(value..(value + 1));
4218 if prev_range.is_some() {
4219 return prev_range;
4220 }
4221 } else {
4222 return current_range.take();
4223 }
4224 })
4225}
4226
4227#[derive(Default, Debug)]
4228pub struct CharClassifier {
4229 scope: Option<LanguageScope>,
4230 for_completion: bool,
4231 ignore_punctuation: bool,
4232}
4233
4234impl CharClassifier {
4235 pub fn new(scope: Option<LanguageScope>) -> Self {
4236 Self {
4237 scope,
4238 for_completion: false,
4239 ignore_punctuation: false,
4240 }
4241 }
4242
4243 pub fn for_completion(self, for_completion: bool) -> Self {
4244 Self {
4245 for_completion,
4246 ..self
4247 }
4248 }
4249
4250 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4251 Self {
4252 ignore_punctuation,
4253 ..self
4254 }
4255 }
4256
4257 pub fn is_whitespace(&self, c: char) -> bool {
4258 self.kind(c) == CharKind::Whitespace
4259 }
4260
4261 pub fn is_word(&self, c: char) -> bool {
4262 self.kind(c) == CharKind::Word
4263 }
4264
4265 pub fn is_punctuation(&self, c: char) -> bool {
4266 self.kind(c) == CharKind::Punctuation
4267 }
4268
4269 pub fn kind(&self, c: char) -> CharKind {
4270 if c.is_whitespace() {
4271 return CharKind::Whitespace;
4272 } else if c.is_alphanumeric() || c == '_' {
4273 return CharKind::Word;
4274 }
4275
4276 if let Some(scope) = &self.scope {
4277 if let Some(characters) = scope.word_characters() {
4278 if characters.contains(&c) {
4279 if c == '-' && !self.for_completion && !self.ignore_punctuation {
4280 return CharKind::Punctuation;
4281 }
4282 return CharKind::Word;
4283 }
4284 }
4285 }
4286
4287 if self.ignore_punctuation {
4288 CharKind::Word
4289 } else {
4290 CharKind::Punctuation
4291 }
4292 }
4293}
4294
4295/// Find all of the ranges of whitespace that occur at the ends of lines
4296/// in the given rope.
4297///
4298/// This could also be done with a regex search, but this implementation
4299/// avoids copying text.
4300pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4301 let mut ranges = Vec::new();
4302
4303 let mut offset = 0;
4304 let mut prev_chunk_trailing_whitespace_range = 0..0;
4305 for chunk in rope.chunks() {
4306 let mut prev_line_trailing_whitespace_range = 0..0;
4307 for (i, line) in chunk.split('\n').enumerate() {
4308 let line_end_offset = offset + line.len();
4309 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4310 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4311
4312 if i == 0 && trimmed_line_len == 0 {
4313 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4314 }
4315 if !prev_line_trailing_whitespace_range.is_empty() {
4316 ranges.push(prev_line_trailing_whitespace_range);
4317 }
4318
4319 offset = line_end_offset + 1;
4320 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4321 }
4322
4323 offset -= 1;
4324 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4325 }
4326
4327 if !prev_chunk_trailing_whitespace_range.is_empty() {
4328 ranges.push(prev_chunk_trailing_whitespace_range);
4329 }
4330
4331 ranges
4332}