1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, IndentGuideSettings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
14 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
18};
19use anyhow::{anyhow, Context, Result};
20use async_watch as watch;
21pub use clock::ReplicaId;
22use futures::channel::oneshot;
23use gpui::{
24 AnyElement, AppContext, EventEmitter, HighlightStyle, ModelContext, Pixels, Task, TaskLabel,
25 WindowContext,
26};
27use lsp::LanguageServerId;
28use parking_lot::Mutex;
29use schemars::JsonSchema;
30use serde::{Deserialize, Serialize};
31use serde_json::Value;
32use settings::WorktreeId;
33use similar::{ChangeTag, TextDiff};
34use smallvec::SmallVec;
35use smol::future::yield_now;
36use std::{
37 any::Any,
38 cell::Cell,
39 cmp::{self, Ordering, Reverse},
40 collections::BTreeMap,
41 ffi::OsStr,
42 fmt,
43 future::Future,
44 iter::{self, Iterator, Peekable},
45 mem,
46 ops::{Deref, DerefMut, Range},
47 path::{Path, PathBuf},
48 str,
49 sync::{Arc, LazyLock},
50 time::{Duration, Instant, SystemTime},
51 vec,
52};
53use sum_tree::TreeMap;
54use text::operation_queue::OperationQueue;
55use text::*;
56pub use text::{
57 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
58 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
59 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
60 Transaction, TransactionId, Unclipped,
61};
62use theme::SyntaxTheme;
63#[cfg(any(test, feature = "test-support"))]
64use util::RandomCharIter;
65use util::RangeExt;
66
67#[cfg(any(test, feature = "test-support"))]
68pub use {tree_sitter_rust, tree_sitter_typescript};
69
70pub use lsp::DiagnosticSeverity;
71
72/// A label for the background task spawned by the buffer to compute
73/// a diff against the contents of its file.
74pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
75
76/// Indicate whether a [Buffer] has permissions to edit.
77#[derive(PartialEq, Clone, Copy, Debug)]
78pub enum Capability {
79 /// The buffer is a mutable replica.
80 ReadWrite,
81 /// The buffer is a read-only replica.
82 ReadOnly,
83}
84
85pub type BufferRow = u32;
86
87/// An in-memory representation of a source code file, including its text,
88/// syntax trees, git status, and diagnostics.
89pub struct Buffer {
90 text: TextBuffer,
91 diff_base: Option<Rope>,
92 git_diff: git::diff::BufferDiff,
93 file: Option<Arc<dyn File>>,
94 /// The mtime of the file when this buffer was last loaded from
95 /// or saved to disk.
96 saved_mtime: Option<SystemTime>,
97 /// The version vector when this buffer was last loaded from
98 /// or saved to disk.
99 saved_version: clock::Global,
100 preview_version: clock::Global,
101 transaction_depth: usize,
102 was_dirty_before_starting_transaction: Option<bool>,
103 reload_task: Option<Task<Result<()>>>,
104 language: Option<Arc<Language>>,
105 autoindent_requests: Vec<Arc<AutoindentRequest>>,
106 pending_autoindent: Option<Task<()>>,
107 sync_parse_timeout: Duration,
108 syntax_map: Mutex<SyntaxMap>,
109 parsing_in_background: bool,
110 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
111 non_text_state_update_count: usize,
112 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
113 remote_selections: TreeMap<ReplicaId, SelectionSet>,
114 diagnostics_timestamp: clock::Lamport,
115 completion_triggers: Vec<String>,
116 completion_triggers_timestamp: clock::Lamport,
117 deferred_ops: OperationQueue<Operation>,
118 capability: Capability,
119 has_conflict: bool,
120 diff_base_version: usize,
121 /// Memoize calls to has_changes_since(saved_version).
122 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
123 has_unsaved_edits: Cell<(clock::Global, bool)>,
124}
125
126#[derive(Copy, Clone, Debug, PartialEq, Eq)]
127pub enum ParseStatus {
128 Idle,
129 Parsing,
130}
131
132/// An immutable, cheaply cloneable representation of a fixed
133/// state of a buffer.
134pub struct BufferSnapshot {
135 text: text::BufferSnapshot,
136 git_diff: git::diff::BufferDiff,
137 pub(crate) syntax: SyntaxSnapshot,
138 file: Option<Arc<dyn File>>,
139 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
140 remote_selections: TreeMap<ReplicaId, SelectionSet>,
141 language: Option<Arc<Language>>,
142 non_text_state_update_count: usize,
143}
144
145/// The kind and amount of indentation in a particular line. For now,
146/// assumes that indentation is all the same character.
147#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
148pub struct IndentSize {
149 /// The number of bytes that comprise the indentation.
150 pub len: u32,
151 /// The kind of whitespace used for indentation.
152 pub kind: IndentKind,
153}
154
155/// A whitespace character that's used for indentation.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
157pub enum IndentKind {
158 /// An ASCII space character.
159 #[default]
160 Space,
161 /// An ASCII tab character.
162 Tab,
163}
164
165/// The shape of a selection cursor.
166#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
167#[serde(rename_all = "snake_case")]
168pub enum CursorShape {
169 /// A vertical bar
170 #[default]
171 Bar,
172 /// A block that surrounds the following character
173 Block,
174 /// An underline that runs along the following character
175 Underscore,
176 /// A box drawn around the following character
177 Hollow,
178}
179
180#[derive(Clone, Debug)]
181struct SelectionSet {
182 line_mode: bool,
183 cursor_shape: CursorShape,
184 selections: Arc<[Selection<Anchor>]>,
185 lamport_timestamp: clock::Lamport,
186}
187
188/// A diagnostic associated with a certain range of a buffer.
189#[derive(Clone, Debug, PartialEq, Eq)]
190pub struct Diagnostic {
191 /// The name of the service that produced this diagnostic.
192 pub source: Option<String>,
193 /// A machine-readable code that identifies this diagnostic.
194 pub code: Option<String>,
195 /// Whether this diagnostic is a hint, warning, or error.
196 pub severity: DiagnosticSeverity,
197 /// The human-readable message associated with this diagnostic.
198 pub message: String,
199 /// An id that identifies the group to which this diagnostic belongs.
200 ///
201 /// When a language server produces a diagnostic with
202 /// one or more associated diagnostics, those diagnostics are all
203 /// assigned a single group id.
204 pub group_id: usize,
205 /// Whether this diagnostic is the primary diagnostic for its group.
206 ///
207 /// In a given group, the primary diagnostic is the top-level diagnostic
208 /// returned by the language server. The non-primary diagnostics are the
209 /// associated diagnostics.
210 pub is_primary: bool,
211 /// Whether this diagnostic is considered to originate from an analysis of
212 /// files on disk, as opposed to any unsaved buffer contents. This is a
213 /// property of a given diagnostic source, and is configured for a given
214 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
215 /// for the language server.
216 pub is_disk_based: bool,
217 /// Whether this diagnostic marks unnecessary code.
218 pub is_unnecessary: bool,
219 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
220 pub data: Option<Value>,
221}
222
223/// TODO - move this into the `project` crate and make it private.
224pub async fn prepare_completion_documentation(
225 documentation: &lsp::Documentation,
226 language_registry: &Arc<LanguageRegistry>,
227 language: Option<Arc<Language>>,
228) -> Documentation {
229 match documentation {
230 lsp::Documentation::String(text) => {
231 if text.lines().count() <= 1 {
232 Documentation::SingleLine(text.clone())
233 } else {
234 Documentation::MultiLinePlainText(text.clone())
235 }
236 }
237
238 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
239 lsp::MarkupKind::PlainText => {
240 if value.lines().count() <= 1 {
241 Documentation::SingleLine(value.clone())
242 } else {
243 Documentation::MultiLinePlainText(value.clone())
244 }
245 }
246
247 lsp::MarkupKind::Markdown => {
248 let parsed = parse_markdown(value, language_registry, language).await;
249 Documentation::MultiLineMarkdown(parsed)
250 }
251 },
252 }
253}
254
255/// Documentation associated with a [`Completion`].
256#[derive(Clone, Debug)]
257pub enum Documentation {
258 /// There is no documentation for this completion.
259 Undocumented,
260 /// A single line of documentation.
261 SingleLine(String),
262 /// Multiple lines of plain text documentation.
263 MultiLinePlainText(String),
264 /// Markdown documentation.
265 MultiLineMarkdown(ParsedMarkdown),
266}
267
268/// An operation used to synchronize this buffer with its other replicas.
269#[derive(Clone, Debug, PartialEq)]
270pub enum Operation {
271 /// A text operation.
272 Buffer(text::Operation),
273
274 /// An update to the buffer's diagnostics.
275 UpdateDiagnostics {
276 /// The id of the language server that produced the new diagnostics.
277 server_id: LanguageServerId,
278 /// The diagnostics.
279 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
280 /// The buffer's lamport timestamp.
281 lamport_timestamp: clock::Lamport,
282 },
283
284 /// An update to the most recent selections in this buffer.
285 UpdateSelections {
286 /// The selections.
287 selections: Arc<[Selection<Anchor>]>,
288 /// The buffer's lamport timestamp.
289 lamport_timestamp: clock::Lamport,
290 /// Whether the selections are in 'line mode'.
291 line_mode: bool,
292 /// The [`CursorShape`] associated with these selections.
293 cursor_shape: CursorShape,
294 },
295
296 /// An update to the characters that should trigger autocompletion
297 /// for this buffer.
298 UpdateCompletionTriggers {
299 /// The characters that trigger autocompletion.
300 triggers: Vec<String>,
301 /// The buffer's lamport timestamp.
302 lamport_timestamp: clock::Lamport,
303 },
304}
305
306/// An event that occurs in a buffer.
307#[derive(Clone, Debug, PartialEq)]
308pub enum BufferEvent {
309 /// The buffer was changed in a way that must be
310 /// propagated to its other replicas.
311 Operation(Operation),
312 /// The buffer was edited.
313 Edited,
314 /// The buffer's `dirty` bit changed.
315 DirtyChanged,
316 /// The buffer was saved.
317 Saved,
318 /// The buffer's file was changed on disk.
319 FileHandleChanged,
320 /// The buffer was reloaded.
321 Reloaded,
322 /// The buffer's diff_base changed.
323 DiffBaseChanged,
324 /// Buffer's excerpts for a certain diff base were recalculated.
325 DiffUpdated,
326 /// The buffer's language was changed.
327 LanguageChanged,
328 /// The buffer's syntax trees were updated.
329 Reparsed,
330 /// The buffer's diagnostics were updated.
331 DiagnosticsUpdated,
332 /// The buffer gained or lost editing capabilities.
333 CapabilityChanged,
334 /// The buffer was explicitly requested to close.
335 Closed,
336 /// The buffer was discarded when closing.
337 Discarded,
338}
339
340/// The file associated with a buffer.
341pub trait File: Send + Sync {
342 /// Returns the [`LocalFile`] associated with this file, if the
343 /// file is local.
344 fn as_local(&self) -> Option<&dyn LocalFile>;
345
346 /// Returns whether this file is local.
347 fn is_local(&self) -> bool {
348 self.as_local().is_some()
349 }
350
351 /// Returns the file's mtime.
352 fn mtime(&self) -> Option<SystemTime>;
353
354 /// Returns the path of this file relative to the worktree's root directory.
355 fn path(&self) -> &Arc<Path>;
356
357 /// Returns the path of this file relative to the worktree's parent directory (this means it
358 /// includes the name of the worktree's root folder).
359 fn full_path(&self, cx: &AppContext) -> PathBuf;
360
361 /// Returns the last component of this handle's absolute path. If this handle refers to the root
362 /// of its worktree, then this method will return the name of the worktree itself.
363 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
364
365 /// Returns the id of the worktree to which this file belongs.
366 ///
367 /// This is needed for looking up project-specific settings.
368 fn worktree_id(&self, cx: &AppContext) -> WorktreeId;
369
370 /// Returns whether the file has been deleted.
371 fn is_deleted(&self) -> bool;
372
373 /// Returns whether the file existed on disk at one point
374 fn is_created(&self) -> bool {
375 self.mtime().is_some()
376 }
377
378 /// Converts this file into an [`Any`] trait object.
379 fn as_any(&self) -> &dyn Any;
380
381 /// Converts this file into a protobuf message.
382 fn to_proto(&self, cx: &AppContext) -> rpc::proto::File;
383
384 /// Return whether Zed considers this to be a private file.
385 fn is_private(&self) -> bool;
386}
387
388/// The file associated with a buffer, in the case where the file is on the local disk.
389pub trait LocalFile: File {
390 /// Returns the absolute path of this file
391 fn abs_path(&self, cx: &AppContext) -> PathBuf;
392
393 /// Loads the file's contents from disk.
394 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
395
396 /// Returns true if the file should not be shared with collaborators.
397 fn is_private(&self, _: &AppContext) -> bool {
398 false
399 }
400}
401
402/// The auto-indent behavior associated with an editing operation.
403/// For some editing operations, each affected line of text has its
404/// indentation recomputed. For other operations, the entire block
405/// of edited text is adjusted uniformly.
406#[derive(Clone, Debug)]
407pub enum AutoindentMode {
408 /// Indent each line of inserted text.
409 EachLine,
410 /// Apply the same indentation adjustment to all of the lines
411 /// in a given insertion.
412 Block {
413 /// The original indentation level of the first line of each
414 /// insertion, if it has been copied.
415 original_indent_columns: Vec<u32>,
416 },
417}
418
419#[derive(Clone)]
420struct AutoindentRequest {
421 before_edit: BufferSnapshot,
422 entries: Vec<AutoindentRequestEntry>,
423 is_block_mode: bool,
424}
425
426#[derive(Clone)]
427struct AutoindentRequestEntry {
428 /// A range of the buffer whose indentation should be adjusted.
429 range: Range<Anchor>,
430 /// Whether or not these lines should be considered brand new, for the
431 /// purpose of auto-indent. When text is not new, its indentation will
432 /// only be adjusted if the suggested indentation level has *changed*
433 /// since the edit was made.
434 first_line_is_new: bool,
435 indent_size: IndentSize,
436 original_indent_column: Option<u32>,
437}
438
439#[derive(Debug)]
440struct IndentSuggestion {
441 basis_row: u32,
442 delta: Ordering,
443 within_error: bool,
444}
445
446struct BufferChunkHighlights<'a> {
447 captures: SyntaxMapCaptures<'a>,
448 next_capture: Option<SyntaxMapCapture<'a>>,
449 stack: Vec<(usize, HighlightId)>,
450 highlight_maps: Vec<HighlightMap>,
451}
452
453/// An iterator that yields chunks of a buffer's text, along with their
454/// syntax highlights and diagnostic status.
455pub struct BufferChunks<'a> {
456 buffer_snapshot: Option<&'a BufferSnapshot>,
457 range: Range<usize>,
458 chunks: text::Chunks<'a>,
459 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
460 error_depth: usize,
461 warning_depth: usize,
462 information_depth: usize,
463 hint_depth: usize,
464 unnecessary_depth: usize,
465 highlights: Option<BufferChunkHighlights<'a>>,
466}
467
468/// A chunk of a buffer's text, along with its syntax highlight and
469/// diagnostic status.
470#[derive(Clone, Debug, Default)]
471pub struct Chunk<'a> {
472 /// The text of the chunk.
473 pub text: &'a str,
474 /// The syntax highlighting style of the chunk.
475 pub syntax_highlight_id: Option<HighlightId>,
476 /// The highlight style that has been applied to this chunk in
477 /// the editor.
478 pub highlight_style: Option<HighlightStyle>,
479 /// The severity of diagnostic associated with this chunk, if any.
480 pub diagnostic_severity: Option<DiagnosticSeverity>,
481 /// Whether this chunk of text is marked as unnecessary.
482 pub is_unnecessary: bool,
483 /// Whether this chunk of text was originally a tab character.
484 pub is_tab: bool,
485 /// An optional recipe for how the chunk should be presented.
486 pub renderer: Option<ChunkRenderer>,
487}
488
489/// A recipe for how the chunk should be presented.
490#[derive(Clone)]
491pub struct ChunkRenderer {
492 /// creates a custom element to represent this chunk.
493 pub render: Arc<dyn Send + Sync + Fn(&mut ChunkRendererContext) -> AnyElement>,
494 /// If true, the element is constrained to the shaped width of the text.
495 pub constrain_width: bool,
496}
497
498pub struct ChunkRendererContext<'a, 'b> {
499 pub context: &'a mut WindowContext<'b>,
500 pub max_width: Pixels,
501}
502
503impl fmt::Debug for ChunkRenderer {
504 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
505 f.debug_struct("ChunkRenderer")
506 .field("constrain_width", &self.constrain_width)
507 .finish()
508 }
509}
510
511impl<'a, 'b> Deref for ChunkRendererContext<'a, 'b> {
512 type Target = WindowContext<'b>;
513
514 fn deref(&self) -> &Self::Target {
515 self.context
516 }
517}
518
519impl<'a, 'b> DerefMut for ChunkRendererContext<'a, 'b> {
520 fn deref_mut(&mut self) -> &mut Self::Target {
521 self.context
522 }
523}
524
525/// A set of edits to a given version of a buffer, computed asynchronously.
526#[derive(Debug)]
527pub struct Diff {
528 pub(crate) base_version: clock::Global,
529 line_ending: LineEnding,
530 edits: Vec<(Range<usize>, Arc<str>)>,
531}
532
533#[derive(Clone, Copy)]
534pub(crate) struct DiagnosticEndpoint {
535 offset: usize,
536 is_start: bool,
537 severity: DiagnosticSeverity,
538 is_unnecessary: bool,
539}
540
541/// A class of characters, used for characterizing a run of text.
542#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
543pub enum CharKind {
544 /// Whitespace.
545 Whitespace,
546 /// Punctuation.
547 Punctuation,
548 /// Word.
549 Word,
550}
551
552/// A runnable is a set of data about a region that could be resolved into a task
553pub struct Runnable {
554 pub tags: SmallVec<[RunnableTag; 1]>,
555 pub language: Arc<Language>,
556 pub buffer: BufferId,
557}
558
559#[derive(Clone, Debug, PartialEq)]
560pub struct IndentGuide {
561 pub buffer_id: BufferId,
562 pub start_row: BufferRow,
563 pub end_row: BufferRow,
564 pub depth: u32,
565 pub tab_size: u32,
566 pub settings: IndentGuideSettings,
567}
568
569impl IndentGuide {
570 pub fn indent_level(&self) -> u32 {
571 self.depth * self.tab_size
572 }
573}
574
575impl Buffer {
576 /// Create a new buffer with the given base text.
577 pub fn local<T: Into<String>>(base_text: T, cx: &mut ModelContext<Self>) -> Self {
578 Self::build(
579 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
580 None,
581 None,
582 Capability::ReadWrite,
583 )
584 }
585
586 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
587 pub fn local_normalized(
588 base_text_normalized: Rope,
589 line_ending: LineEnding,
590 cx: &mut ModelContext<Self>,
591 ) -> Self {
592 Self::build(
593 TextBuffer::new_normalized(
594 0,
595 cx.entity_id().as_non_zero_u64().into(),
596 line_ending,
597 base_text_normalized,
598 ),
599 None,
600 None,
601 Capability::ReadWrite,
602 )
603 }
604
605 /// Create a new buffer that is a replica of a remote buffer.
606 pub fn remote(
607 remote_id: BufferId,
608 replica_id: ReplicaId,
609 capability: Capability,
610 base_text: impl Into<String>,
611 ) -> Self {
612 Self::build(
613 TextBuffer::new(replica_id, remote_id, base_text.into()),
614 None,
615 None,
616 capability,
617 )
618 }
619
620 /// Create a new buffer that is a replica of a remote buffer, populating its
621 /// state from the given protobuf message.
622 pub fn from_proto(
623 replica_id: ReplicaId,
624 capability: Capability,
625 message: proto::BufferState,
626 file: Option<Arc<dyn File>>,
627 ) -> Result<Self> {
628 let buffer_id = BufferId::new(message.id)
629 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
630 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
631 let mut this = Self::build(buffer, message.diff_base, file, capability);
632 this.text.set_line_ending(proto::deserialize_line_ending(
633 rpc::proto::LineEnding::from_i32(message.line_ending)
634 .ok_or_else(|| anyhow!("missing line_ending"))?,
635 ));
636 this.saved_version = proto::deserialize_version(&message.saved_version);
637 this.saved_mtime = message.saved_mtime.map(|time| time.into());
638 Ok(this)
639 }
640
641 /// Serialize the buffer's state to a protobuf message.
642 pub fn to_proto(&self, cx: &AppContext) -> proto::BufferState {
643 proto::BufferState {
644 id: self.remote_id().into(),
645 file: self.file.as_ref().map(|f| f.to_proto(cx)),
646 base_text: self.base_text().to_string(),
647 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
648 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
649 saved_version: proto::serialize_version(&self.saved_version),
650 saved_mtime: self.saved_mtime.map(|time| time.into()),
651 }
652 }
653
654 /// Serialize as protobufs all of the changes to the buffer since the given version.
655 pub fn serialize_ops(
656 &self,
657 since: Option<clock::Global>,
658 cx: &AppContext,
659 ) -> Task<Vec<proto::Operation>> {
660 let mut operations = Vec::new();
661 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
662
663 operations.extend(self.remote_selections.iter().map(|(_, set)| {
664 proto::serialize_operation(&Operation::UpdateSelections {
665 selections: set.selections.clone(),
666 lamport_timestamp: set.lamport_timestamp,
667 line_mode: set.line_mode,
668 cursor_shape: set.cursor_shape,
669 })
670 }));
671
672 for (server_id, diagnostics) in &self.diagnostics {
673 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
674 lamport_timestamp: self.diagnostics_timestamp,
675 server_id: *server_id,
676 diagnostics: diagnostics.iter().cloned().collect(),
677 }));
678 }
679
680 operations.push(proto::serialize_operation(
681 &Operation::UpdateCompletionTriggers {
682 triggers: self.completion_triggers.clone(),
683 lamport_timestamp: self.completion_triggers_timestamp,
684 },
685 ));
686
687 let text_operations = self.text.operations().clone();
688 cx.background_executor().spawn(async move {
689 let since = since.unwrap_or_default();
690 operations.extend(
691 text_operations
692 .iter()
693 .filter(|(_, op)| !since.observed(op.timestamp()))
694 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
695 );
696 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
697 operations
698 })
699 }
700
701 /// Assign a language to the buffer, returning the buffer.
702 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
703 self.set_language(Some(language), cx);
704 self
705 }
706
707 /// Returns the [Capability] of this buffer.
708 pub fn capability(&self) -> Capability {
709 self.capability
710 }
711
712 /// Whether this buffer can only be read.
713 pub fn read_only(&self) -> bool {
714 self.capability == Capability::ReadOnly
715 }
716
717 /// Builds a [Buffer] with the given underlying [TextBuffer], diff base, [File] and [Capability].
718 pub fn build(
719 buffer: TextBuffer,
720 diff_base: Option<String>,
721 file: Option<Arc<dyn File>>,
722 capability: Capability,
723 ) -> Self {
724 let saved_mtime = file.as_ref().and_then(|file| file.mtime());
725 let snapshot = buffer.snapshot();
726 let git_diff = git::diff::BufferDiff::new(&snapshot);
727 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
728 Self {
729 saved_mtime,
730 saved_version: buffer.version(),
731 preview_version: buffer.version(),
732 reload_task: None,
733 transaction_depth: 0,
734 was_dirty_before_starting_transaction: None,
735 has_unsaved_edits: Cell::new((buffer.version(), false)),
736 text: buffer,
737 diff_base: diff_base
738 .map(|mut raw_diff_base| {
739 LineEnding::normalize(&mut raw_diff_base);
740 raw_diff_base
741 })
742 .map(Rope::from),
743 diff_base_version: 0,
744 git_diff,
745 file,
746 capability,
747 syntax_map,
748 parsing_in_background: false,
749 non_text_state_update_count: 0,
750 sync_parse_timeout: Duration::from_millis(1),
751 parse_status: async_watch::channel(ParseStatus::Idle),
752 autoindent_requests: Default::default(),
753 pending_autoindent: Default::default(),
754 language: None,
755 remote_selections: Default::default(),
756 diagnostics: Default::default(),
757 diagnostics_timestamp: Default::default(),
758 completion_triggers: Default::default(),
759 completion_triggers_timestamp: Default::default(),
760 deferred_ops: OperationQueue::new(),
761 has_conflict: false,
762 }
763 }
764
765 /// Retrieve a snapshot of the buffer's current state. This is computationally
766 /// cheap, and allows reading from the buffer on a background thread.
767 pub fn snapshot(&self) -> BufferSnapshot {
768 let text = self.text.snapshot();
769 let mut syntax_map = self.syntax_map.lock();
770 syntax_map.interpolate(&text);
771 let syntax = syntax_map.snapshot();
772
773 BufferSnapshot {
774 text,
775 syntax,
776 git_diff: self.git_diff.clone(),
777 file: self.file.clone(),
778 remote_selections: self.remote_selections.clone(),
779 diagnostics: self.diagnostics.clone(),
780 language: self.language.clone(),
781 non_text_state_update_count: self.non_text_state_update_count,
782 }
783 }
784
785 #[cfg(test)]
786 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
787 &self.text
788 }
789
790 /// Retrieve a snapshot of the buffer's raw text, without any
791 /// language-related state like the syntax tree or diagnostics.
792 pub fn text_snapshot(&self) -> text::BufferSnapshot {
793 self.text.snapshot()
794 }
795
796 /// The file associated with the buffer, if any.
797 pub fn file(&self) -> Option<&Arc<dyn File>> {
798 self.file.as_ref()
799 }
800
801 /// The version of the buffer that was last saved or reloaded from disk.
802 pub fn saved_version(&self) -> &clock::Global {
803 &self.saved_version
804 }
805
806 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
807 pub fn saved_mtime(&self) -> Option<SystemTime> {
808 self.saved_mtime
809 }
810
811 /// Assign a language to the buffer.
812 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
813 self.non_text_state_update_count += 1;
814 self.syntax_map.lock().clear(&self.text);
815 self.language = language;
816 self.reparse(cx);
817 cx.emit(BufferEvent::LanguageChanged);
818 }
819
820 /// Assign a language registry to the buffer. This allows the buffer to retrieve
821 /// other languages if parts of the buffer are written in different languages.
822 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
823 self.syntax_map
824 .lock()
825 .set_language_registry(language_registry);
826 }
827
828 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
829 self.syntax_map.lock().language_registry()
830 }
831
832 /// Assign the buffer a new [Capability].
833 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
834 self.capability = capability;
835 cx.emit(BufferEvent::CapabilityChanged)
836 }
837
838 /// This method is called to signal that the buffer has been saved.
839 pub fn did_save(
840 &mut self,
841 version: clock::Global,
842 mtime: Option<SystemTime>,
843 cx: &mut ModelContext<Self>,
844 ) {
845 self.saved_version = version;
846 self.has_unsaved_edits
847 .set((self.saved_version().clone(), false));
848 self.has_conflict = false;
849 self.saved_mtime = mtime;
850 cx.emit(BufferEvent::Saved);
851 cx.notify();
852 }
853
854 /// This method is called to signal that the buffer has been discarded.
855 pub fn discarded(&mut self, cx: &mut ModelContext<Self>) {
856 cx.emit(BufferEvent::Discarded);
857 cx.notify();
858 }
859
860 /// Reloads the contents of the buffer from disk.
861 pub fn reload(
862 &mut self,
863 cx: &mut ModelContext<Self>,
864 ) -> oneshot::Receiver<Option<Transaction>> {
865 let (tx, rx) = futures::channel::oneshot::channel();
866 let prev_version = self.text.version();
867 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
868 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
869 let file = this.file.as_ref()?.as_local()?;
870 Some((file.mtime(), file.load(cx)))
871 })?
872 else {
873 return Ok(());
874 };
875
876 let new_text = new_text.await?;
877 let diff = this
878 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
879 .await;
880 this.update(&mut cx, |this, cx| {
881 if this.version() == diff.base_version {
882 this.finalize_last_transaction();
883 this.apply_diff(diff, cx);
884 tx.send(this.finalize_last_transaction().cloned()).ok();
885 this.has_conflict = false;
886 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
887 } else {
888 if !diff.edits.is_empty()
889 || this
890 .edits_since::<usize>(&diff.base_version)
891 .next()
892 .is_some()
893 {
894 this.has_conflict = true;
895 }
896
897 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
898 }
899
900 this.reload_task.take();
901 })
902 }));
903 rx
904 }
905
906 /// This method is called to signal that the buffer has been reloaded.
907 pub fn did_reload(
908 &mut self,
909 version: clock::Global,
910 line_ending: LineEnding,
911 mtime: Option<SystemTime>,
912 cx: &mut ModelContext<Self>,
913 ) {
914 self.saved_version = version;
915 self.has_unsaved_edits
916 .set((self.saved_version.clone(), false));
917 self.text.set_line_ending(line_ending);
918 self.saved_mtime = mtime;
919 cx.emit(BufferEvent::Reloaded);
920 cx.notify();
921 }
922
923 /// Updates the [File] backing this buffer. This should be called when
924 /// the file has changed or has been deleted.
925 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
926 let mut file_changed = false;
927
928 if let Some(old_file) = self.file.as_ref() {
929 if new_file.path() != old_file.path() {
930 file_changed = true;
931 }
932
933 if new_file.is_deleted() {
934 if !old_file.is_deleted() {
935 file_changed = true;
936 if !self.is_dirty() {
937 cx.emit(BufferEvent::DirtyChanged);
938 }
939 }
940 } else {
941 let new_mtime = new_file.mtime();
942 if new_mtime != old_file.mtime() {
943 file_changed = true;
944
945 if !self.is_dirty() {
946 self.reload(cx).close();
947 }
948 }
949 }
950 } else {
951 file_changed = true;
952 };
953
954 self.file = Some(new_file);
955 if file_changed {
956 self.non_text_state_update_count += 1;
957 cx.emit(BufferEvent::FileHandleChanged);
958 cx.notify();
959 }
960 }
961
962 /// Returns the current diff base, see [Buffer::set_diff_base].
963 pub fn diff_base(&self) -> Option<&Rope> {
964 self.diff_base.as_ref()
965 }
966
967 /// Sets the text that will be used to compute a Git diff
968 /// against the buffer text.
969 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
970 self.diff_base = diff_base
971 .map(|mut raw_diff_base| {
972 LineEnding::normalize(&mut raw_diff_base);
973 raw_diff_base
974 })
975 .map(Rope::from);
976 self.diff_base_version += 1;
977 if let Some(recalc_task) = self.git_diff_recalc(cx) {
978 cx.spawn(|buffer, mut cx| async move {
979 recalc_task.await;
980 buffer
981 .update(&mut cx, |_, cx| {
982 cx.emit(BufferEvent::DiffBaseChanged);
983 })
984 .ok();
985 })
986 .detach();
987 }
988 }
989
990 /// Returns a number, unique per diff base set to the buffer.
991 pub fn diff_base_version(&self) -> usize {
992 self.diff_base_version
993 }
994
995 /// Recomputes the Git diff status.
996 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
997 let diff_base = self.diff_base.clone()?;
998 let snapshot = self.snapshot();
999
1000 let mut diff = self.git_diff.clone();
1001 let diff = cx.background_executor().spawn(async move {
1002 diff.update(&diff_base, &snapshot).await;
1003 diff
1004 });
1005
1006 Some(cx.spawn(|this, mut cx| async move {
1007 let buffer_diff = diff.await;
1008 this.update(&mut cx, |this, cx| {
1009 this.git_diff = buffer_diff;
1010 this.non_text_state_update_count += 1;
1011 cx.emit(BufferEvent::DiffUpdated);
1012 })
1013 .ok();
1014 }))
1015 }
1016
1017 /// Returns the primary [Language] assigned to this [Buffer].
1018 pub fn language(&self) -> Option<&Arc<Language>> {
1019 self.language.as_ref()
1020 }
1021
1022 /// Returns the [Language] at the given location.
1023 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1024 let offset = position.to_offset(self);
1025 self.syntax_map
1026 .lock()
1027 .layers_for_range(offset..offset, &self.text, false)
1028 .last()
1029 .map(|info| info.language.clone())
1030 .or_else(|| self.language.clone())
1031 }
1032
1033 /// An integer version number that accounts for all updates besides
1034 /// the buffer's text itself (which is versioned via a version vector).
1035 pub fn non_text_state_update_count(&self) -> usize {
1036 self.non_text_state_update_count
1037 }
1038
1039 /// Whether the buffer is being parsed in the background.
1040 #[cfg(any(test, feature = "test-support"))]
1041 pub fn is_parsing(&self) -> bool {
1042 self.parsing_in_background
1043 }
1044
1045 /// Indicates whether the buffer contains any regions that may be
1046 /// written in a language that hasn't been loaded yet.
1047 pub fn contains_unknown_injections(&self) -> bool {
1048 self.syntax_map.lock().contains_unknown_injections()
1049 }
1050
1051 #[cfg(test)]
1052 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1053 self.sync_parse_timeout = timeout;
1054 }
1055
1056 /// Called after an edit to synchronize the buffer's main parse tree with
1057 /// the buffer's new underlying state.
1058 ///
1059 /// Locks the syntax map and interpolates the edits since the last reparse
1060 /// into the foreground syntax tree.
1061 ///
1062 /// Then takes a stable snapshot of the syntax map before unlocking it.
1063 /// The snapshot with the interpolated edits is sent to a background thread,
1064 /// where we ask Tree-sitter to perform an incremental parse.
1065 ///
1066 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1067 /// waiting on the parse to complete. As soon as it completes, we proceed
1068 /// synchronously, unless a 1ms timeout elapses.
1069 ///
1070 /// If we time out waiting on the parse, we spawn a second task waiting
1071 /// until the parse does complete and return with the interpolated tree still
1072 /// in the foreground. When the background parse completes, call back into
1073 /// the main thread and assign the foreground parse state.
1074 ///
1075 /// If the buffer or grammar changed since the start of the background parse,
1076 /// initiate an additional reparse recursively. To avoid concurrent parses
1077 /// for the same buffer, we only initiate a new parse if we are not already
1078 /// parsing in the background.
1079 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1080 if self.parsing_in_background {
1081 return;
1082 }
1083 let language = if let Some(language) = self.language.clone() {
1084 language
1085 } else {
1086 return;
1087 };
1088
1089 let text = self.text_snapshot();
1090 let parsed_version = self.version();
1091
1092 let mut syntax_map = self.syntax_map.lock();
1093 syntax_map.interpolate(&text);
1094 let language_registry = syntax_map.language_registry();
1095 let mut syntax_snapshot = syntax_map.snapshot();
1096 drop(syntax_map);
1097
1098 let parse_task = cx.background_executor().spawn({
1099 let language = language.clone();
1100 let language_registry = language_registry.clone();
1101 async move {
1102 syntax_snapshot.reparse(&text, language_registry, language);
1103 syntax_snapshot
1104 }
1105 });
1106
1107 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1108 match cx
1109 .background_executor()
1110 .block_with_timeout(self.sync_parse_timeout, parse_task)
1111 {
1112 Ok(new_syntax_snapshot) => {
1113 self.did_finish_parsing(new_syntax_snapshot, cx);
1114 }
1115 Err(parse_task) => {
1116 self.parsing_in_background = true;
1117 cx.spawn(move |this, mut cx| async move {
1118 let new_syntax_map = parse_task.await;
1119 this.update(&mut cx, move |this, cx| {
1120 let grammar_changed =
1121 this.language.as_ref().map_or(true, |current_language| {
1122 !Arc::ptr_eq(&language, current_language)
1123 });
1124 let language_registry_changed = new_syntax_map
1125 .contains_unknown_injections()
1126 && language_registry.map_or(false, |registry| {
1127 registry.version() != new_syntax_map.language_registry_version()
1128 });
1129 let parse_again = language_registry_changed
1130 || grammar_changed
1131 || this.version.changed_since(&parsed_version);
1132 this.did_finish_parsing(new_syntax_map, cx);
1133 this.parsing_in_background = false;
1134 if parse_again {
1135 this.reparse(cx);
1136 }
1137 })
1138 .ok();
1139 })
1140 .detach();
1141 }
1142 }
1143 }
1144
1145 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
1146 self.non_text_state_update_count += 1;
1147 self.syntax_map.lock().did_parse(syntax_snapshot);
1148 self.request_autoindent(cx);
1149 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1150 cx.emit(BufferEvent::Reparsed);
1151 cx.notify();
1152 }
1153
1154 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1155 self.parse_status.1.clone()
1156 }
1157
1158 /// Assign to the buffer a set of diagnostics created by a given language server.
1159 pub fn update_diagnostics(
1160 &mut self,
1161 server_id: LanguageServerId,
1162 diagnostics: DiagnosticSet,
1163 cx: &mut ModelContext<Self>,
1164 ) {
1165 let lamport_timestamp = self.text.lamport_clock.tick();
1166 let op = Operation::UpdateDiagnostics {
1167 server_id,
1168 diagnostics: diagnostics.iter().cloned().collect(),
1169 lamport_timestamp,
1170 };
1171 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1172 self.send_operation(op, cx);
1173 }
1174
1175 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1176 if let Some(indent_sizes) = self.compute_autoindents() {
1177 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1178 match cx
1179 .background_executor()
1180 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1181 {
1182 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1183 Err(indent_sizes) => {
1184 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1185 let indent_sizes = indent_sizes.await;
1186 this.update(&mut cx, |this, cx| {
1187 this.apply_autoindents(indent_sizes, cx);
1188 })
1189 .ok();
1190 }));
1191 }
1192 }
1193 } else {
1194 self.autoindent_requests.clear();
1195 }
1196 }
1197
1198 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1199 let max_rows_between_yields = 100;
1200 let snapshot = self.snapshot();
1201 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1202 return None;
1203 }
1204
1205 let autoindent_requests = self.autoindent_requests.clone();
1206 Some(async move {
1207 let mut indent_sizes = BTreeMap::new();
1208 for request in autoindent_requests {
1209 // Resolve each edited range to its row in the current buffer and in the
1210 // buffer before this batch of edits.
1211 let mut row_ranges = Vec::new();
1212 let mut old_to_new_rows = BTreeMap::new();
1213 let mut language_indent_sizes_by_new_row = Vec::new();
1214 for entry in &request.entries {
1215 let position = entry.range.start;
1216 let new_row = position.to_point(&snapshot).row;
1217 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1218 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1219
1220 if !entry.first_line_is_new {
1221 let old_row = position.to_point(&request.before_edit).row;
1222 old_to_new_rows.insert(old_row, new_row);
1223 }
1224 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1225 }
1226
1227 // Build a map containing the suggested indentation for each of the edited lines
1228 // with respect to the state of the buffer before these edits. This map is keyed
1229 // by the rows for these lines in the current state of the buffer.
1230 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1231 let old_edited_ranges =
1232 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1233 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1234 let mut language_indent_size = IndentSize::default();
1235 for old_edited_range in old_edited_ranges {
1236 let suggestions = request
1237 .before_edit
1238 .suggest_autoindents(old_edited_range.clone())
1239 .into_iter()
1240 .flatten();
1241 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1242 if let Some(suggestion) = suggestion {
1243 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1244
1245 // Find the indent size based on the language for this row.
1246 while let Some((row, size)) = language_indent_sizes.peek() {
1247 if *row > new_row {
1248 break;
1249 }
1250 language_indent_size = *size;
1251 language_indent_sizes.next();
1252 }
1253
1254 let suggested_indent = old_to_new_rows
1255 .get(&suggestion.basis_row)
1256 .and_then(|from_row| {
1257 Some(old_suggestions.get(from_row).copied()?.0)
1258 })
1259 .unwrap_or_else(|| {
1260 request
1261 .before_edit
1262 .indent_size_for_line(suggestion.basis_row)
1263 })
1264 .with_delta(suggestion.delta, language_indent_size);
1265 old_suggestions
1266 .insert(new_row, (suggested_indent, suggestion.within_error));
1267 }
1268 }
1269 yield_now().await;
1270 }
1271
1272 // In block mode, only compute indentation suggestions for the first line
1273 // of each insertion. Otherwise, compute suggestions for every inserted line.
1274 let new_edited_row_ranges = contiguous_ranges(
1275 row_ranges.iter().flat_map(|(range, _)| {
1276 if request.is_block_mode {
1277 range.start..range.start + 1
1278 } else {
1279 range.clone()
1280 }
1281 }),
1282 max_rows_between_yields,
1283 );
1284
1285 // Compute new suggestions for each line, but only include them in the result
1286 // if they differ from the old suggestion for that line.
1287 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1288 let mut language_indent_size = IndentSize::default();
1289 for new_edited_row_range in new_edited_row_ranges {
1290 let suggestions = snapshot
1291 .suggest_autoindents(new_edited_row_range.clone())
1292 .into_iter()
1293 .flatten();
1294 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1295 if let Some(suggestion) = suggestion {
1296 // Find the indent size based on the language for this row.
1297 while let Some((row, size)) = language_indent_sizes.peek() {
1298 if *row > new_row {
1299 break;
1300 }
1301 language_indent_size = *size;
1302 language_indent_sizes.next();
1303 }
1304
1305 let suggested_indent = indent_sizes
1306 .get(&suggestion.basis_row)
1307 .copied()
1308 .unwrap_or_else(|| {
1309 snapshot.indent_size_for_line(suggestion.basis_row)
1310 })
1311 .with_delta(suggestion.delta, language_indent_size);
1312 if old_suggestions.get(&new_row).map_or(
1313 true,
1314 |(old_indentation, was_within_error)| {
1315 suggested_indent != *old_indentation
1316 && (!suggestion.within_error || *was_within_error)
1317 },
1318 ) {
1319 indent_sizes.insert(new_row, suggested_indent);
1320 }
1321 }
1322 }
1323 yield_now().await;
1324 }
1325
1326 // For each block of inserted text, adjust the indentation of the remaining
1327 // lines of the block by the same amount as the first line was adjusted.
1328 if request.is_block_mode {
1329 for (row_range, original_indent_column) in
1330 row_ranges
1331 .into_iter()
1332 .filter_map(|(range, original_indent_column)| {
1333 if range.len() > 1 {
1334 Some((range, original_indent_column?))
1335 } else {
1336 None
1337 }
1338 })
1339 {
1340 let new_indent = indent_sizes
1341 .get(&row_range.start)
1342 .copied()
1343 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1344 let delta = new_indent.len as i64 - original_indent_column as i64;
1345 if delta != 0 {
1346 for row in row_range.skip(1) {
1347 indent_sizes.entry(row).or_insert_with(|| {
1348 let mut size = snapshot.indent_size_for_line(row);
1349 if size.kind == new_indent.kind {
1350 match delta.cmp(&0) {
1351 Ordering::Greater => size.len += delta as u32,
1352 Ordering::Less => {
1353 size.len = size.len.saturating_sub(-delta as u32)
1354 }
1355 Ordering::Equal => {}
1356 }
1357 }
1358 size
1359 });
1360 }
1361 }
1362 }
1363 }
1364 }
1365
1366 indent_sizes
1367 })
1368 }
1369
1370 fn apply_autoindents(
1371 &mut self,
1372 indent_sizes: BTreeMap<u32, IndentSize>,
1373 cx: &mut ModelContext<Self>,
1374 ) {
1375 self.autoindent_requests.clear();
1376
1377 let edits: Vec<_> = indent_sizes
1378 .into_iter()
1379 .filter_map(|(row, indent_size)| {
1380 let current_size = indent_size_for_line(self, row);
1381 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1382 })
1383 .collect();
1384
1385 let preserve_preview = self.preserve_preview();
1386 self.edit(edits, None, cx);
1387 if preserve_preview {
1388 self.refresh_preview();
1389 }
1390 }
1391
1392 /// Create a minimal edit that will cause the given row to be indented
1393 /// with the given size. After applying this edit, the length of the line
1394 /// will always be at least `new_size.len`.
1395 pub fn edit_for_indent_size_adjustment(
1396 row: u32,
1397 current_size: IndentSize,
1398 new_size: IndentSize,
1399 ) -> Option<(Range<Point>, String)> {
1400 if new_size.kind == current_size.kind {
1401 match new_size.len.cmp(¤t_size.len) {
1402 Ordering::Greater => {
1403 let point = Point::new(row, 0);
1404 Some((
1405 point..point,
1406 iter::repeat(new_size.char())
1407 .take((new_size.len - current_size.len) as usize)
1408 .collect::<String>(),
1409 ))
1410 }
1411
1412 Ordering::Less => Some((
1413 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1414 String::new(),
1415 )),
1416
1417 Ordering::Equal => None,
1418 }
1419 } else {
1420 Some((
1421 Point::new(row, 0)..Point::new(row, current_size.len),
1422 iter::repeat(new_size.char())
1423 .take(new_size.len as usize)
1424 .collect::<String>(),
1425 ))
1426 }
1427 }
1428
1429 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1430 /// and the given new text.
1431 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1432 let old_text = self.as_rope().clone();
1433 let base_version = self.version();
1434 cx.background_executor()
1435 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1436 let old_text = old_text.to_string();
1437 let line_ending = LineEnding::detect(&new_text);
1438 LineEnding::normalize(&mut new_text);
1439
1440 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1441 let empty: Arc<str> = Arc::default();
1442
1443 let mut edits = Vec::new();
1444 let mut old_offset = 0;
1445 let mut new_offset = 0;
1446 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1447 for change in diff.iter_all_changes().map(Some).chain([None]) {
1448 if let Some(change) = &change {
1449 let len = change.value().len();
1450 match change.tag() {
1451 ChangeTag::Equal => {
1452 old_offset += len;
1453 new_offset += len;
1454 }
1455 ChangeTag::Delete => {
1456 let old_end_offset = old_offset + len;
1457 if let Some((last_old_range, _)) = &mut last_edit {
1458 last_old_range.end = old_end_offset;
1459 } else {
1460 last_edit =
1461 Some((old_offset..old_end_offset, new_offset..new_offset));
1462 }
1463 old_offset = old_end_offset;
1464 }
1465 ChangeTag::Insert => {
1466 let new_end_offset = new_offset + len;
1467 if let Some((_, last_new_range)) = &mut last_edit {
1468 last_new_range.end = new_end_offset;
1469 } else {
1470 last_edit =
1471 Some((old_offset..old_offset, new_offset..new_end_offset));
1472 }
1473 new_offset = new_end_offset;
1474 }
1475 }
1476 }
1477
1478 if let Some((old_range, new_range)) = &last_edit {
1479 if old_offset > old_range.end
1480 || new_offset > new_range.end
1481 || change.is_none()
1482 {
1483 let text = if new_range.is_empty() {
1484 empty.clone()
1485 } else {
1486 new_text[new_range.clone()].into()
1487 };
1488 edits.push((old_range.clone(), text));
1489 last_edit.take();
1490 }
1491 }
1492 }
1493
1494 Diff {
1495 base_version,
1496 line_ending,
1497 edits,
1498 }
1499 })
1500 }
1501
1502 /// Spawns a background task that searches the buffer for any whitespace
1503 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1504 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1505 let old_text = self.as_rope().clone();
1506 let line_ending = self.line_ending();
1507 let base_version = self.version();
1508 cx.background_executor().spawn(async move {
1509 let ranges = trailing_whitespace_ranges(&old_text);
1510 let empty = Arc::<str>::from("");
1511 Diff {
1512 base_version,
1513 line_ending,
1514 edits: ranges
1515 .into_iter()
1516 .map(|range| (range, empty.clone()))
1517 .collect(),
1518 }
1519 })
1520 }
1521
1522 /// Ensures that the buffer ends with a single newline character, and
1523 /// no other whitespace.
1524 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1525 let len = self.len();
1526 let mut offset = len;
1527 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1528 let non_whitespace_len = chunk
1529 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1530 .len();
1531 offset -= chunk.len();
1532 offset += non_whitespace_len;
1533 if non_whitespace_len != 0 {
1534 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1535 return;
1536 }
1537 break;
1538 }
1539 }
1540 self.edit([(offset..len, "\n")], None, cx);
1541 }
1542
1543 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1544 /// calculated, then adjust the diff to account for those changes, and discard any
1545 /// parts of the diff that conflict with those changes.
1546 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1547 // Check for any edits to the buffer that have occurred since this diff
1548 // was computed.
1549 let snapshot = self.snapshot();
1550 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1551 let mut delta = 0;
1552 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1553 while let Some(edit_since) = edits_since.peek() {
1554 // If the edit occurs after a diff hunk, then it does not
1555 // affect that hunk.
1556 if edit_since.old.start > range.end {
1557 break;
1558 }
1559 // If the edit precedes the diff hunk, then adjust the hunk
1560 // to reflect the edit.
1561 else if edit_since.old.end < range.start {
1562 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1563 edits_since.next();
1564 }
1565 // If the edit intersects a diff hunk, then discard that hunk.
1566 else {
1567 return None;
1568 }
1569 }
1570
1571 let start = (range.start as i64 + delta) as usize;
1572 let end = (range.end as i64 + delta) as usize;
1573 Some((start..end, new_text))
1574 });
1575
1576 self.start_transaction();
1577 self.text.set_line_ending(diff.line_ending);
1578 self.edit(adjusted_edits, None, cx);
1579 self.end_transaction(cx)
1580 }
1581
1582 fn has_unsaved_edits(&self) -> bool {
1583 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1584
1585 if last_version == self.version {
1586 self.has_unsaved_edits
1587 .set((last_version, has_unsaved_edits));
1588 return has_unsaved_edits;
1589 }
1590
1591 let has_edits = self.has_edits_since(&self.saved_version);
1592 self.has_unsaved_edits
1593 .set((self.version.clone(), has_edits));
1594 has_edits
1595 }
1596
1597 /// Checks if the buffer has unsaved changes.
1598 pub fn is_dirty(&self) -> bool {
1599 self.capability != Capability::ReadOnly
1600 && (self.has_conflict
1601 || self.has_unsaved_edits()
1602 || self
1603 .file
1604 .as_ref()
1605 .map_or(false, |file| file.is_deleted() || !file.is_created()))
1606 }
1607
1608 /// Checks if the buffer and its file have both changed since the buffer
1609 /// was last saved or reloaded.
1610 pub fn has_conflict(&self) -> bool {
1611 self.has_conflict
1612 || self.file.as_ref().map_or(false, |file| {
1613 file.mtime() > self.saved_mtime && self.has_unsaved_edits()
1614 })
1615 }
1616
1617 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1618 pub fn subscribe(&mut self) -> Subscription {
1619 self.text.subscribe()
1620 }
1621
1622 /// Starts a transaction, if one is not already in-progress. When undoing or
1623 /// redoing edits, all of the edits performed within a transaction are undone
1624 /// or redone together.
1625 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1626 self.start_transaction_at(Instant::now())
1627 }
1628
1629 /// Starts a transaction, providing the current time. Subsequent transactions
1630 /// that occur within a short period of time will be grouped together. This
1631 /// is controlled by the buffer's undo grouping duration.
1632 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1633 self.transaction_depth += 1;
1634 if self.was_dirty_before_starting_transaction.is_none() {
1635 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1636 }
1637 self.text.start_transaction_at(now)
1638 }
1639
1640 /// Terminates the current transaction, if this is the outermost transaction.
1641 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1642 self.end_transaction_at(Instant::now(), cx)
1643 }
1644
1645 /// Terminates the current transaction, providing the current time. Subsequent transactions
1646 /// that occur within a short period of time will be grouped together. This
1647 /// is controlled by the buffer's undo grouping duration.
1648 pub fn end_transaction_at(
1649 &mut self,
1650 now: Instant,
1651 cx: &mut ModelContext<Self>,
1652 ) -> Option<TransactionId> {
1653 assert!(self.transaction_depth > 0);
1654 self.transaction_depth -= 1;
1655 let was_dirty = if self.transaction_depth == 0 {
1656 self.was_dirty_before_starting_transaction.take().unwrap()
1657 } else {
1658 false
1659 };
1660 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1661 self.did_edit(&start_version, was_dirty, cx);
1662 Some(transaction_id)
1663 } else {
1664 None
1665 }
1666 }
1667
1668 /// Manually add a transaction to the buffer's undo history.
1669 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1670 self.text.push_transaction(transaction, now);
1671 }
1672
1673 /// Prevent the last transaction from being grouped with any subsequent transactions,
1674 /// even if they occur with the buffer's undo grouping duration.
1675 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1676 self.text.finalize_last_transaction()
1677 }
1678
1679 /// Manually group all changes since a given transaction.
1680 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1681 self.text.group_until_transaction(transaction_id);
1682 }
1683
1684 /// Manually remove a transaction from the buffer's undo history
1685 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1686 self.text.forget_transaction(transaction_id);
1687 }
1688
1689 /// Manually merge two adjacent transactions in the buffer's undo history.
1690 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1691 self.text.merge_transactions(transaction, destination);
1692 }
1693
1694 /// Waits for the buffer to receive operations with the given timestamps.
1695 pub fn wait_for_edits(
1696 &mut self,
1697 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1698 ) -> impl Future<Output = Result<()>> {
1699 self.text.wait_for_edits(edit_ids)
1700 }
1701
1702 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1703 pub fn wait_for_anchors(
1704 &mut self,
1705 anchors: impl IntoIterator<Item = Anchor>,
1706 ) -> impl 'static + Future<Output = Result<()>> {
1707 self.text.wait_for_anchors(anchors)
1708 }
1709
1710 /// Waits for the buffer to receive operations up to the given version.
1711 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1712 self.text.wait_for_version(version)
1713 }
1714
1715 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1716 /// [`Buffer::wait_for_version`] to resolve with an error.
1717 pub fn give_up_waiting(&mut self) {
1718 self.text.give_up_waiting();
1719 }
1720
1721 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
1722 pub fn set_active_selections(
1723 &mut self,
1724 selections: Arc<[Selection<Anchor>]>,
1725 line_mode: bool,
1726 cursor_shape: CursorShape,
1727 cx: &mut ModelContext<Self>,
1728 ) {
1729 let lamport_timestamp = self.text.lamport_clock.tick();
1730 self.remote_selections.insert(
1731 self.text.replica_id(),
1732 SelectionSet {
1733 selections: selections.clone(),
1734 lamport_timestamp,
1735 line_mode,
1736 cursor_shape,
1737 },
1738 );
1739 self.send_operation(
1740 Operation::UpdateSelections {
1741 selections,
1742 line_mode,
1743 lamport_timestamp,
1744 cursor_shape,
1745 },
1746 cx,
1747 );
1748 self.non_text_state_update_count += 1;
1749 cx.notify();
1750 }
1751
1752 /// Clears the selections, so that other replicas of the buffer do not see any selections for
1753 /// this replica.
1754 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1755 if self
1756 .remote_selections
1757 .get(&self.text.replica_id())
1758 .map_or(true, |set| !set.selections.is_empty())
1759 {
1760 self.set_active_selections(Arc::default(), false, Default::default(), cx);
1761 }
1762 }
1763
1764 /// Replaces the buffer's entire text.
1765 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1766 where
1767 T: Into<Arc<str>>,
1768 {
1769 self.autoindent_requests.clear();
1770 self.edit([(0..self.len(), text)], None, cx)
1771 }
1772
1773 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
1774 /// delete, and a string of text to insert at that location.
1775 ///
1776 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
1777 /// request for the edited ranges, which will be processed when the buffer finishes
1778 /// parsing.
1779 ///
1780 /// Parsing takes place at the end of a transaction, and may compute synchronously
1781 /// or asynchronously, depending on the changes.
1782 pub fn edit<I, S, T>(
1783 &mut self,
1784 edits_iter: I,
1785 autoindent_mode: Option<AutoindentMode>,
1786 cx: &mut ModelContext<Self>,
1787 ) -> Option<clock::Lamport>
1788 where
1789 I: IntoIterator<Item = (Range<S>, T)>,
1790 S: ToOffset,
1791 T: Into<Arc<str>>,
1792 {
1793 // Skip invalid edits and coalesce contiguous ones.
1794 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1795 for (range, new_text) in edits_iter {
1796 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1797 if range.start > range.end {
1798 mem::swap(&mut range.start, &mut range.end);
1799 }
1800 let new_text = new_text.into();
1801 if !new_text.is_empty() || !range.is_empty() {
1802 if let Some((prev_range, prev_text)) = edits.last_mut() {
1803 if prev_range.end >= range.start {
1804 prev_range.end = cmp::max(prev_range.end, range.end);
1805 *prev_text = format!("{prev_text}{new_text}").into();
1806 } else {
1807 edits.push((range, new_text));
1808 }
1809 } else {
1810 edits.push((range, new_text));
1811 }
1812 }
1813 }
1814 if edits.is_empty() {
1815 return None;
1816 }
1817
1818 self.start_transaction();
1819 self.pending_autoindent.take();
1820 let autoindent_request = autoindent_mode
1821 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1822
1823 let edit_operation = self.text.edit(edits.iter().cloned());
1824 let edit_id = edit_operation.timestamp();
1825
1826 if let Some((before_edit, mode)) = autoindent_request {
1827 let mut delta = 0isize;
1828 let entries = edits
1829 .into_iter()
1830 .enumerate()
1831 .zip(&edit_operation.as_edit().unwrap().new_text)
1832 .map(|((ix, (range, _)), new_text)| {
1833 let new_text_length = new_text.len();
1834 let old_start = range.start.to_point(&before_edit);
1835 let new_start = (delta + range.start as isize) as usize;
1836 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1837
1838 let mut range_of_insertion_to_indent = 0..new_text_length;
1839 let mut first_line_is_new = false;
1840 let mut original_indent_column = None;
1841
1842 // When inserting an entire line at the beginning of an existing line,
1843 // treat the insertion as new.
1844 if new_text.contains('\n')
1845 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1846 {
1847 first_line_is_new = true;
1848 }
1849
1850 // When inserting text starting with a newline, avoid auto-indenting the
1851 // previous line.
1852 if new_text.starts_with('\n') {
1853 range_of_insertion_to_indent.start += 1;
1854 first_line_is_new = true;
1855 }
1856
1857 // Avoid auto-indenting after the insertion.
1858 if let AutoindentMode::Block {
1859 original_indent_columns,
1860 } = &mode
1861 {
1862 original_indent_column =
1863 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1864 indent_size_for_text(
1865 new_text[range_of_insertion_to_indent.clone()].chars(),
1866 )
1867 .len
1868 }));
1869 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1870 range_of_insertion_to_indent.end -= 1;
1871 }
1872 }
1873
1874 AutoindentRequestEntry {
1875 first_line_is_new,
1876 original_indent_column,
1877 indent_size: before_edit.language_indent_size_at(range.start, cx),
1878 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1879 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1880 }
1881 })
1882 .collect();
1883
1884 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1885 before_edit,
1886 entries,
1887 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1888 }));
1889 }
1890
1891 self.end_transaction(cx);
1892 self.send_operation(Operation::Buffer(edit_operation), cx);
1893 Some(edit_id)
1894 }
1895
1896 fn did_edit(
1897 &mut self,
1898 old_version: &clock::Global,
1899 was_dirty: bool,
1900 cx: &mut ModelContext<Self>,
1901 ) {
1902 if self.edits_since::<usize>(old_version).next().is_none() {
1903 return;
1904 }
1905
1906 self.reparse(cx);
1907
1908 cx.emit(BufferEvent::Edited);
1909 if was_dirty != self.is_dirty() {
1910 cx.emit(BufferEvent::DirtyChanged);
1911 }
1912 cx.notify();
1913 }
1914
1915 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
1916 // You can also request the insertion of empty lines above and below the line starting at the returned point.
1917 pub fn insert_empty_line(
1918 &mut self,
1919 position: impl ToPoint,
1920 space_above: bool,
1921 space_below: bool,
1922 cx: &mut ModelContext<Self>,
1923 ) -> Point {
1924 let mut position = position.to_point(self);
1925
1926 self.start_transaction();
1927
1928 self.edit(
1929 [(position..position, "\n")],
1930 Some(AutoindentMode::EachLine),
1931 cx,
1932 );
1933
1934 if position.column > 0 {
1935 position += Point::new(1, 0);
1936 }
1937
1938 if !self.is_line_blank(position.row) {
1939 self.edit(
1940 [(position..position, "\n")],
1941 Some(AutoindentMode::EachLine),
1942 cx,
1943 );
1944 }
1945
1946 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
1947 self.edit(
1948 [(position..position, "\n")],
1949 Some(AutoindentMode::EachLine),
1950 cx,
1951 );
1952 position.row += 1;
1953 }
1954
1955 if space_below
1956 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
1957 {
1958 self.edit(
1959 [(position..position, "\n")],
1960 Some(AutoindentMode::EachLine),
1961 cx,
1962 );
1963 }
1964
1965 self.end_transaction(cx);
1966
1967 position
1968 }
1969
1970 /// Applies the given remote operations to the buffer.
1971 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1972 &mut self,
1973 ops: I,
1974 cx: &mut ModelContext<Self>,
1975 ) -> Result<()> {
1976 self.pending_autoindent.take();
1977 let was_dirty = self.is_dirty();
1978 let old_version = self.version.clone();
1979 let mut deferred_ops = Vec::new();
1980 let buffer_ops = ops
1981 .into_iter()
1982 .filter_map(|op| match op {
1983 Operation::Buffer(op) => Some(op),
1984 _ => {
1985 if self.can_apply_op(&op) {
1986 self.apply_op(op, cx);
1987 } else {
1988 deferred_ops.push(op);
1989 }
1990 None
1991 }
1992 })
1993 .collect::<Vec<_>>();
1994 self.text.apply_ops(buffer_ops)?;
1995 self.deferred_ops.insert(deferred_ops);
1996 self.flush_deferred_ops(cx);
1997 self.did_edit(&old_version, was_dirty, cx);
1998 // Notify independently of whether the buffer was edited as the operations could include a
1999 // selection update.
2000 cx.notify();
2001 Ok(())
2002 }
2003
2004 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
2005 let mut deferred_ops = Vec::new();
2006 for op in self.deferred_ops.drain().iter().cloned() {
2007 if self.can_apply_op(&op) {
2008 self.apply_op(op, cx);
2009 } else {
2010 deferred_ops.push(op);
2011 }
2012 }
2013 self.deferred_ops.insert(deferred_ops);
2014 }
2015
2016 pub fn has_deferred_ops(&self) -> bool {
2017 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2018 }
2019
2020 fn can_apply_op(&self, operation: &Operation) -> bool {
2021 match operation {
2022 Operation::Buffer(_) => {
2023 unreachable!("buffer operations should never be applied at this layer")
2024 }
2025 Operation::UpdateDiagnostics {
2026 diagnostics: diagnostic_set,
2027 ..
2028 } => diagnostic_set.iter().all(|diagnostic| {
2029 self.text.can_resolve(&diagnostic.range.start)
2030 && self.text.can_resolve(&diagnostic.range.end)
2031 }),
2032 Operation::UpdateSelections { selections, .. } => selections
2033 .iter()
2034 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2035 Operation::UpdateCompletionTriggers { .. } => true,
2036 }
2037 }
2038
2039 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
2040 match operation {
2041 Operation::Buffer(_) => {
2042 unreachable!("buffer operations should never be applied at this layer")
2043 }
2044 Operation::UpdateDiagnostics {
2045 server_id,
2046 diagnostics: diagnostic_set,
2047 lamport_timestamp,
2048 } => {
2049 let snapshot = self.snapshot();
2050 self.apply_diagnostic_update(
2051 server_id,
2052 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2053 lamport_timestamp,
2054 cx,
2055 );
2056 }
2057 Operation::UpdateSelections {
2058 selections,
2059 lamport_timestamp,
2060 line_mode,
2061 cursor_shape,
2062 } => {
2063 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2064 if set.lamport_timestamp > lamport_timestamp {
2065 return;
2066 }
2067 }
2068
2069 self.remote_selections.insert(
2070 lamport_timestamp.replica_id,
2071 SelectionSet {
2072 selections,
2073 lamport_timestamp,
2074 line_mode,
2075 cursor_shape,
2076 },
2077 );
2078 self.text.lamport_clock.observe(lamport_timestamp);
2079 self.non_text_state_update_count += 1;
2080 }
2081 Operation::UpdateCompletionTriggers {
2082 triggers,
2083 lamport_timestamp,
2084 } => {
2085 self.completion_triggers = triggers;
2086 self.text.lamport_clock.observe(lamport_timestamp);
2087 }
2088 }
2089 }
2090
2091 fn apply_diagnostic_update(
2092 &mut self,
2093 server_id: LanguageServerId,
2094 diagnostics: DiagnosticSet,
2095 lamport_timestamp: clock::Lamport,
2096 cx: &mut ModelContext<Self>,
2097 ) {
2098 if lamport_timestamp > self.diagnostics_timestamp {
2099 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2100 if diagnostics.is_empty() {
2101 if let Ok(ix) = ix {
2102 self.diagnostics.remove(ix);
2103 }
2104 } else {
2105 match ix {
2106 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2107 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2108 };
2109 }
2110 self.diagnostics_timestamp = lamport_timestamp;
2111 self.non_text_state_update_count += 1;
2112 self.text.lamport_clock.observe(lamport_timestamp);
2113 cx.notify();
2114 cx.emit(BufferEvent::DiagnosticsUpdated);
2115 }
2116 }
2117
2118 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
2119 cx.emit(BufferEvent::Operation(operation));
2120 }
2121
2122 /// Removes the selections for a given peer.
2123 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
2124 self.remote_selections.remove(&replica_id);
2125 cx.notify();
2126 }
2127
2128 /// Undoes the most recent transaction.
2129 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2130 let was_dirty = self.is_dirty();
2131 let old_version = self.version.clone();
2132
2133 if let Some((transaction_id, operation)) = self.text.undo() {
2134 self.send_operation(Operation::Buffer(operation), cx);
2135 self.did_edit(&old_version, was_dirty, cx);
2136 Some(transaction_id)
2137 } else {
2138 None
2139 }
2140 }
2141
2142 /// Manually undoes a specific transaction in the buffer's undo history.
2143 pub fn undo_transaction(
2144 &mut self,
2145 transaction_id: TransactionId,
2146 cx: &mut ModelContext<Self>,
2147 ) -> bool {
2148 let was_dirty = self.is_dirty();
2149 let old_version = self.version.clone();
2150 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2151 self.send_operation(Operation::Buffer(operation), cx);
2152 self.did_edit(&old_version, was_dirty, cx);
2153 true
2154 } else {
2155 false
2156 }
2157 }
2158
2159 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2160 pub fn undo_to_transaction(
2161 &mut self,
2162 transaction_id: TransactionId,
2163 cx: &mut ModelContext<Self>,
2164 ) -> bool {
2165 let was_dirty = self.is_dirty();
2166 let old_version = self.version.clone();
2167
2168 let operations = self.text.undo_to_transaction(transaction_id);
2169 let undone = !operations.is_empty();
2170 for operation in operations {
2171 self.send_operation(Operation::Buffer(operation), cx);
2172 }
2173 if undone {
2174 self.did_edit(&old_version, was_dirty, cx)
2175 }
2176 undone
2177 }
2178
2179 /// Manually redoes a specific transaction in the buffer's redo history.
2180 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2181 let was_dirty = self.is_dirty();
2182 let old_version = self.version.clone();
2183
2184 if let Some((transaction_id, operation)) = self.text.redo() {
2185 self.send_operation(Operation::Buffer(operation), cx);
2186 self.did_edit(&old_version, was_dirty, cx);
2187 Some(transaction_id)
2188 } else {
2189 None
2190 }
2191 }
2192
2193 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2194 pub fn redo_to_transaction(
2195 &mut self,
2196 transaction_id: TransactionId,
2197 cx: &mut ModelContext<Self>,
2198 ) -> bool {
2199 let was_dirty = self.is_dirty();
2200 let old_version = self.version.clone();
2201
2202 let operations = self.text.redo_to_transaction(transaction_id);
2203 let redone = !operations.is_empty();
2204 for operation in operations {
2205 self.send_operation(Operation::Buffer(operation), cx);
2206 }
2207 if redone {
2208 self.did_edit(&old_version, was_dirty, cx)
2209 }
2210 redone
2211 }
2212
2213 /// Override current completion triggers with the user-provided completion triggers.
2214 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
2215 self.completion_triggers.clone_from(&triggers);
2216 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2217 self.send_operation(
2218 Operation::UpdateCompletionTriggers {
2219 triggers,
2220 lamport_timestamp: self.completion_triggers_timestamp,
2221 },
2222 cx,
2223 );
2224 cx.notify();
2225 }
2226
2227 /// Returns a list of strings which trigger a completion menu for this language.
2228 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2229 pub fn completion_triggers(&self) -> &[String] {
2230 &self.completion_triggers
2231 }
2232
2233 /// Call this directly after performing edits to prevent the preview tab
2234 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2235 /// to return false until there are additional edits.
2236 pub fn refresh_preview(&mut self) {
2237 self.preview_version = self.version.clone();
2238 }
2239
2240 /// Whether we should preserve the preview status of a tab containing this buffer.
2241 pub fn preserve_preview(&self) -> bool {
2242 !self.has_edits_since(&self.preview_version)
2243 }
2244}
2245
2246#[doc(hidden)]
2247#[cfg(any(test, feature = "test-support"))]
2248impl Buffer {
2249 pub fn edit_via_marked_text(
2250 &mut self,
2251 marked_string: &str,
2252 autoindent_mode: Option<AutoindentMode>,
2253 cx: &mut ModelContext<Self>,
2254 ) {
2255 let edits = self.edits_for_marked_text(marked_string);
2256 self.edit(edits, autoindent_mode, cx);
2257 }
2258
2259 pub fn set_group_interval(&mut self, group_interval: Duration) {
2260 self.text.set_group_interval(group_interval);
2261 }
2262
2263 pub fn randomly_edit<T>(
2264 &mut self,
2265 rng: &mut T,
2266 old_range_count: usize,
2267 cx: &mut ModelContext<Self>,
2268 ) where
2269 T: rand::Rng,
2270 {
2271 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2272 let mut last_end = None;
2273 for _ in 0..old_range_count {
2274 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2275 break;
2276 }
2277
2278 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2279 let mut range = self.random_byte_range(new_start, rng);
2280 if rng.gen_bool(0.2) {
2281 mem::swap(&mut range.start, &mut range.end);
2282 }
2283 last_end = Some(range.end);
2284
2285 let new_text_len = rng.gen_range(0..10);
2286 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2287
2288 edits.push((range, new_text));
2289 }
2290 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2291 self.edit(edits, None, cx);
2292 }
2293
2294 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
2295 let was_dirty = self.is_dirty();
2296 let old_version = self.version.clone();
2297
2298 let ops = self.text.randomly_undo_redo(rng);
2299 if !ops.is_empty() {
2300 for op in ops {
2301 self.send_operation(Operation::Buffer(op), cx);
2302 self.did_edit(&old_version, was_dirty, cx);
2303 }
2304 }
2305 }
2306}
2307
2308impl EventEmitter<BufferEvent> for Buffer {}
2309
2310impl Deref for Buffer {
2311 type Target = TextBuffer;
2312
2313 fn deref(&self) -> &Self::Target {
2314 &self.text
2315 }
2316}
2317
2318impl BufferSnapshot {
2319 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2320 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2321 indent_size_for_line(self, row)
2322 }
2323 /// Returns [`IndentSize`] for a given position that respects user settings
2324 /// and language preferences.
2325 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
2326 let settings = language_settings(self.language_at(position), self.file(), cx);
2327 if settings.hard_tabs {
2328 IndentSize::tab()
2329 } else {
2330 IndentSize::spaces(settings.tab_size.get())
2331 }
2332 }
2333
2334 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2335 /// is passed in as `single_indent_size`.
2336 pub fn suggested_indents(
2337 &self,
2338 rows: impl Iterator<Item = u32>,
2339 single_indent_size: IndentSize,
2340 ) -> BTreeMap<u32, IndentSize> {
2341 let mut result = BTreeMap::new();
2342
2343 for row_range in contiguous_ranges(rows, 10) {
2344 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2345 Some(suggestions) => suggestions,
2346 _ => break,
2347 };
2348
2349 for (row, suggestion) in row_range.zip(suggestions) {
2350 let indent_size = if let Some(suggestion) = suggestion {
2351 result
2352 .get(&suggestion.basis_row)
2353 .copied()
2354 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2355 .with_delta(suggestion.delta, single_indent_size)
2356 } else {
2357 self.indent_size_for_line(row)
2358 };
2359
2360 result.insert(row, indent_size);
2361 }
2362 }
2363
2364 result
2365 }
2366
2367 fn suggest_autoindents(
2368 &self,
2369 row_range: Range<u32>,
2370 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2371 let config = &self.language.as_ref()?.config;
2372 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2373
2374 // Find the suggested indentation ranges based on the syntax tree.
2375 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2376 let end = Point::new(row_range.end, 0);
2377 let range = (start..end).to_offset(&self.text);
2378 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2379 Some(&grammar.indents_config.as_ref()?.query)
2380 });
2381 let indent_configs = matches
2382 .grammars()
2383 .iter()
2384 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2385 .collect::<Vec<_>>();
2386
2387 let mut indent_ranges = Vec::<Range<Point>>::new();
2388 let mut outdent_positions = Vec::<Point>::new();
2389 while let Some(mat) = matches.peek() {
2390 let mut start: Option<Point> = None;
2391 let mut end: Option<Point> = None;
2392
2393 let config = &indent_configs[mat.grammar_index];
2394 for capture in mat.captures {
2395 if capture.index == config.indent_capture_ix {
2396 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2397 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2398 } else if Some(capture.index) == config.start_capture_ix {
2399 start = Some(Point::from_ts_point(capture.node.end_position()));
2400 } else if Some(capture.index) == config.end_capture_ix {
2401 end = Some(Point::from_ts_point(capture.node.start_position()));
2402 } else if Some(capture.index) == config.outdent_capture_ix {
2403 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2404 }
2405 }
2406
2407 matches.advance();
2408 if let Some((start, end)) = start.zip(end) {
2409 if start.row == end.row {
2410 continue;
2411 }
2412
2413 let range = start..end;
2414 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2415 Err(ix) => indent_ranges.insert(ix, range),
2416 Ok(ix) => {
2417 let prev_range = &mut indent_ranges[ix];
2418 prev_range.end = prev_range.end.max(range.end);
2419 }
2420 }
2421 }
2422 }
2423
2424 let mut error_ranges = Vec::<Range<Point>>::new();
2425 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2426 Some(&grammar.error_query)
2427 });
2428 while let Some(mat) = matches.peek() {
2429 let node = mat.captures[0].node;
2430 let start = Point::from_ts_point(node.start_position());
2431 let end = Point::from_ts_point(node.end_position());
2432 let range = start..end;
2433 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2434 Ok(ix) | Err(ix) => ix,
2435 };
2436 let mut end_ix = ix;
2437 while let Some(existing_range) = error_ranges.get(end_ix) {
2438 if existing_range.end < end {
2439 end_ix += 1;
2440 } else {
2441 break;
2442 }
2443 }
2444 error_ranges.splice(ix..end_ix, [range]);
2445 matches.advance();
2446 }
2447
2448 outdent_positions.sort();
2449 for outdent_position in outdent_positions {
2450 // find the innermost indent range containing this outdent_position
2451 // set its end to the outdent position
2452 if let Some(range_to_truncate) = indent_ranges
2453 .iter_mut()
2454 .filter(|indent_range| indent_range.contains(&outdent_position))
2455 .last()
2456 {
2457 range_to_truncate.end = outdent_position;
2458 }
2459 }
2460
2461 // Find the suggested indentation increases and decreased based on regexes.
2462 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2463 self.for_each_line(
2464 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2465 ..Point::new(row_range.end, 0),
2466 |row, line| {
2467 if config
2468 .decrease_indent_pattern
2469 .as_ref()
2470 .map_or(false, |regex| regex.is_match(line))
2471 {
2472 indent_change_rows.push((row, Ordering::Less));
2473 }
2474 if config
2475 .increase_indent_pattern
2476 .as_ref()
2477 .map_or(false, |regex| regex.is_match(line))
2478 {
2479 indent_change_rows.push((row + 1, Ordering::Greater));
2480 }
2481 },
2482 );
2483
2484 let mut indent_changes = indent_change_rows.into_iter().peekable();
2485 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2486 prev_non_blank_row.unwrap_or(0)
2487 } else {
2488 row_range.start.saturating_sub(1)
2489 };
2490 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2491 Some(row_range.map(move |row| {
2492 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2493
2494 let mut indent_from_prev_row = false;
2495 let mut outdent_from_prev_row = false;
2496 let mut outdent_to_row = u32::MAX;
2497
2498 while let Some((indent_row, delta)) = indent_changes.peek() {
2499 match indent_row.cmp(&row) {
2500 Ordering::Equal => match delta {
2501 Ordering::Less => outdent_from_prev_row = true,
2502 Ordering::Greater => indent_from_prev_row = true,
2503 _ => {}
2504 },
2505
2506 Ordering::Greater => break,
2507 Ordering::Less => {}
2508 }
2509
2510 indent_changes.next();
2511 }
2512
2513 for range in &indent_ranges {
2514 if range.start.row >= row {
2515 break;
2516 }
2517 if range.start.row == prev_row && range.end > row_start {
2518 indent_from_prev_row = true;
2519 }
2520 if range.end > prev_row_start && range.end <= row_start {
2521 outdent_to_row = outdent_to_row.min(range.start.row);
2522 }
2523 }
2524
2525 let within_error = error_ranges
2526 .iter()
2527 .any(|e| e.start.row < row && e.end > row_start);
2528
2529 let suggestion = if outdent_to_row == prev_row
2530 || (outdent_from_prev_row && indent_from_prev_row)
2531 {
2532 Some(IndentSuggestion {
2533 basis_row: prev_row,
2534 delta: Ordering::Equal,
2535 within_error,
2536 })
2537 } else if indent_from_prev_row {
2538 Some(IndentSuggestion {
2539 basis_row: prev_row,
2540 delta: Ordering::Greater,
2541 within_error,
2542 })
2543 } else if outdent_to_row < prev_row {
2544 Some(IndentSuggestion {
2545 basis_row: outdent_to_row,
2546 delta: Ordering::Equal,
2547 within_error,
2548 })
2549 } else if outdent_from_prev_row {
2550 Some(IndentSuggestion {
2551 basis_row: prev_row,
2552 delta: Ordering::Less,
2553 within_error,
2554 })
2555 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2556 {
2557 Some(IndentSuggestion {
2558 basis_row: prev_row,
2559 delta: Ordering::Equal,
2560 within_error,
2561 })
2562 } else {
2563 None
2564 };
2565
2566 prev_row = row;
2567 prev_row_start = row_start;
2568 suggestion
2569 }))
2570 }
2571
2572 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2573 while row > 0 {
2574 row -= 1;
2575 if !self.is_line_blank(row) {
2576 return Some(row);
2577 }
2578 }
2579 None
2580 }
2581
2582 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
2583 let captures = self.syntax.captures(range, &self.text, |grammar| {
2584 grammar.highlights_query.as_ref()
2585 });
2586 let highlight_maps = captures
2587 .grammars()
2588 .iter()
2589 .map(|grammar| grammar.highlight_map())
2590 .collect();
2591 (captures, highlight_maps)
2592 }
2593 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2594 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2595 /// returned in chunks where each chunk has a single syntax highlighting style and
2596 /// diagnostic status.
2597 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2598 let range = range.start.to_offset(self)..range.end.to_offset(self);
2599
2600 let mut syntax = None;
2601 if language_aware {
2602 syntax = Some(self.get_highlights(range.clone()));
2603 }
2604 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
2605 let diagnostics = language_aware;
2606 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
2607 }
2608
2609 /// Invokes the given callback for each line of text in the given range of the buffer.
2610 /// Uses callback to avoid allocating a string for each line.
2611 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2612 let mut line = String::new();
2613 let mut row = range.start.row;
2614 for chunk in self
2615 .as_rope()
2616 .chunks_in_range(range.to_offset(self))
2617 .chain(["\n"])
2618 {
2619 for (newline_ix, text) in chunk.split('\n').enumerate() {
2620 if newline_ix > 0 {
2621 callback(row, &line);
2622 row += 1;
2623 line.clear();
2624 }
2625 line.push_str(text);
2626 }
2627 }
2628 }
2629
2630 /// Iterates over every [`SyntaxLayer`] in the buffer.
2631 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2632 self.syntax
2633 .layers_for_range(0..self.len(), &self.text, true)
2634 }
2635
2636 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2637 let offset = position.to_offset(self);
2638 self.syntax
2639 .layers_for_range(offset..offset, &self.text, false)
2640 .filter(|l| l.node().end_byte() > offset)
2641 .last()
2642 }
2643
2644 /// Returns the main [Language]
2645 pub fn language(&self) -> Option<&Arc<Language>> {
2646 self.language.as_ref()
2647 }
2648
2649 /// Returns the [Language] at the given location.
2650 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2651 self.syntax_layer_at(position)
2652 .map(|info| info.language)
2653 .or(self.language.as_ref())
2654 }
2655
2656 /// Returns the settings for the language at the given location.
2657 pub fn settings_at<'a, D: ToOffset>(
2658 &self,
2659 position: D,
2660 cx: &'a AppContext,
2661 ) -> &'a LanguageSettings {
2662 language_settings(self.language_at(position), self.file.as_ref(), cx)
2663 }
2664
2665 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
2666 CharClassifier::new(self.language_scope_at(point))
2667 }
2668
2669 /// Returns the [LanguageScope] at the given location.
2670 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2671 let offset = position.to_offset(self);
2672 let mut scope = None;
2673 let mut smallest_range: Option<Range<usize>> = None;
2674
2675 // Use the layer that has the smallest node intersecting the given point.
2676 for layer in self
2677 .syntax
2678 .layers_for_range(offset..offset, &self.text, false)
2679 {
2680 let mut cursor = layer.node().walk();
2681
2682 let mut range = None;
2683 loop {
2684 let child_range = cursor.node().byte_range();
2685 if !child_range.to_inclusive().contains(&offset) {
2686 break;
2687 }
2688
2689 range = Some(child_range);
2690 if cursor.goto_first_child_for_byte(offset).is_none() {
2691 break;
2692 }
2693 }
2694
2695 if let Some(range) = range {
2696 if smallest_range
2697 .as_ref()
2698 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2699 {
2700 smallest_range = Some(range);
2701 scope = Some(LanguageScope {
2702 language: layer.language.clone(),
2703 override_id: layer.override_id(offset, &self.text),
2704 });
2705 }
2706 }
2707 }
2708
2709 scope.or_else(|| {
2710 self.language.clone().map(|language| LanguageScope {
2711 language,
2712 override_id: None,
2713 })
2714 })
2715 }
2716
2717 /// Returns a tuple of the range and character kind of the word
2718 /// surrounding the given position.
2719 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2720 let mut start = start.to_offset(self);
2721 let mut end = start;
2722 let mut next_chars = self.chars_at(start).peekable();
2723 let mut prev_chars = self.reversed_chars_at(start).peekable();
2724
2725 let classifier = self.char_classifier_at(start);
2726 let word_kind = cmp::max(
2727 prev_chars.peek().copied().map(|c| classifier.kind(c)),
2728 next_chars.peek().copied().map(|c| classifier.kind(c)),
2729 );
2730
2731 for ch in prev_chars {
2732 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
2733 start -= ch.len_utf8();
2734 } else {
2735 break;
2736 }
2737 }
2738
2739 for ch in next_chars {
2740 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
2741 end += ch.len_utf8();
2742 } else {
2743 break;
2744 }
2745 }
2746
2747 (start..end, word_kind)
2748 }
2749
2750 /// Returns the range for the closes syntax node enclosing the given range.
2751 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2752 let range = range.start.to_offset(self)..range.end.to_offset(self);
2753 let mut result: Option<Range<usize>> = None;
2754 'outer: for layer in self
2755 .syntax
2756 .layers_for_range(range.clone(), &self.text, true)
2757 {
2758 let mut cursor = layer.node().walk();
2759
2760 // Descend to the first leaf that touches the start of the range,
2761 // and if the range is non-empty, extends beyond the start.
2762 while cursor.goto_first_child_for_byte(range.start).is_some() {
2763 if !range.is_empty() && cursor.node().end_byte() == range.start {
2764 cursor.goto_next_sibling();
2765 }
2766 }
2767
2768 // Ascend to the smallest ancestor that strictly contains the range.
2769 loop {
2770 let node_range = cursor.node().byte_range();
2771 if node_range.start <= range.start
2772 && node_range.end >= range.end
2773 && node_range.len() > range.len()
2774 {
2775 break;
2776 }
2777 if !cursor.goto_parent() {
2778 continue 'outer;
2779 }
2780 }
2781
2782 let left_node = cursor.node();
2783 let mut layer_result = left_node.byte_range();
2784
2785 // For an empty range, try to find another node immediately to the right of the range.
2786 if left_node.end_byte() == range.start {
2787 let mut right_node = None;
2788 while !cursor.goto_next_sibling() {
2789 if !cursor.goto_parent() {
2790 break;
2791 }
2792 }
2793
2794 while cursor.node().start_byte() == range.start {
2795 right_node = Some(cursor.node());
2796 if !cursor.goto_first_child() {
2797 break;
2798 }
2799 }
2800
2801 // If there is a candidate node on both sides of the (empty) range, then
2802 // decide between the two by favoring a named node over an anonymous token.
2803 // If both nodes are the same in that regard, favor the right one.
2804 if let Some(right_node) = right_node {
2805 if right_node.is_named() || !left_node.is_named() {
2806 layer_result = right_node.byte_range();
2807 }
2808 }
2809 }
2810
2811 if let Some(previous_result) = &result {
2812 if previous_result.len() < layer_result.len() {
2813 continue;
2814 }
2815 }
2816 result = Some(layer_result);
2817 }
2818
2819 result
2820 }
2821
2822 /// Returns the outline for the buffer.
2823 ///
2824 /// This method allows passing an optional [SyntaxTheme] to
2825 /// syntax-highlight the returned symbols.
2826 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2827 self.outline_items_containing(0..self.len(), true, theme)
2828 .map(Outline::new)
2829 }
2830
2831 /// Returns all the symbols that contain the given position.
2832 ///
2833 /// This method allows passing an optional [SyntaxTheme] to
2834 /// syntax-highlight the returned symbols.
2835 pub fn symbols_containing<T: ToOffset>(
2836 &self,
2837 position: T,
2838 theme: Option<&SyntaxTheme>,
2839 ) -> Option<Vec<OutlineItem<Anchor>>> {
2840 let position = position.to_offset(self);
2841 let mut items = self.outline_items_containing(
2842 position.saturating_sub(1)..self.len().min(position + 1),
2843 false,
2844 theme,
2845 )?;
2846 let mut prev_depth = None;
2847 items.retain(|item| {
2848 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2849 prev_depth = Some(item.depth);
2850 result
2851 });
2852 Some(items)
2853 }
2854
2855 pub fn outline_items_containing<T: ToOffset>(
2856 &self,
2857 range: Range<T>,
2858 include_extra_context: bool,
2859 theme: Option<&SyntaxTheme>,
2860 ) -> Option<Vec<OutlineItem<Anchor>>> {
2861 let range = range.to_offset(self);
2862 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2863 grammar.outline_config.as_ref().map(|c| &c.query)
2864 });
2865 let configs = matches
2866 .grammars()
2867 .iter()
2868 .map(|g| g.outline_config.as_ref().unwrap())
2869 .collect::<Vec<_>>();
2870
2871 let mut items = Vec::new();
2872 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
2873 while let Some(mat) = matches.peek() {
2874 let config = &configs[mat.grammar_index];
2875 if let Some(item) =
2876 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
2877 {
2878 items.push(item);
2879 } else if let Some(capture) = mat
2880 .captures
2881 .iter()
2882 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
2883 {
2884 let capture_range = capture.node.start_position()..capture.node.end_position();
2885 let mut capture_row_range =
2886 capture_range.start.row as u32..capture_range.end.row as u32;
2887 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
2888 {
2889 capture_row_range.end -= 1;
2890 }
2891 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
2892 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
2893 last_row_range.end = capture_row_range.end;
2894 } else {
2895 annotation_row_ranges.push(capture_row_range);
2896 }
2897 } else {
2898 annotation_row_ranges.push(capture_row_range);
2899 }
2900 }
2901 matches.advance();
2902 }
2903
2904 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
2905
2906 // Assign depths based on containment relationships and convert to anchors.
2907 let mut item_ends_stack = Vec::<Point>::new();
2908 let mut anchor_items = Vec::new();
2909 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
2910 for item in items {
2911 while let Some(last_end) = item_ends_stack.last().copied() {
2912 if last_end < item.range.end {
2913 item_ends_stack.pop();
2914 } else {
2915 break;
2916 }
2917 }
2918
2919 let mut annotation_row_range = None;
2920 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
2921 let row_preceding_item = item.range.start.row.saturating_sub(1);
2922 if next_annotation_row_range.end < row_preceding_item {
2923 annotation_row_ranges.next();
2924 } else {
2925 if next_annotation_row_range.end == row_preceding_item {
2926 annotation_row_range = Some(next_annotation_row_range.clone());
2927 annotation_row_ranges.next();
2928 }
2929 break;
2930 }
2931 }
2932
2933 anchor_items.push(OutlineItem {
2934 depth: item_ends_stack.len(),
2935 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
2936 text: item.text,
2937 highlight_ranges: item.highlight_ranges,
2938 name_ranges: item.name_ranges,
2939 body_range: item.body_range.map(|body_range| {
2940 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
2941 }),
2942 annotation_range: annotation_row_range.map(|annotation_range| {
2943 self.anchor_after(Point::new(annotation_range.start, 0))
2944 ..self.anchor_before(Point::new(
2945 annotation_range.end,
2946 self.line_len(annotation_range.end),
2947 ))
2948 }),
2949 });
2950 item_ends_stack.push(item.range.end);
2951 }
2952
2953 Some(anchor_items)
2954 }
2955
2956 fn next_outline_item(
2957 &self,
2958 config: &OutlineConfig,
2959 mat: &SyntaxMapMatch,
2960 range: &Range<usize>,
2961 include_extra_context: bool,
2962 theme: Option<&SyntaxTheme>,
2963 ) -> Option<OutlineItem<Point>> {
2964 let item_node = mat.captures.iter().find_map(|cap| {
2965 if cap.index == config.item_capture_ix {
2966 Some(cap.node)
2967 } else {
2968 None
2969 }
2970 })?;
2971
2972 let item_byte_range = item_node.byte_range();
2973 if item_byte_range.end < range.start || item_byte_range.start > range.end {
2974 return None;
2975 }
2976 let item_point_range = Point::from_ts_point(item_node.start_position())
2977 ..Point::from_ts_point(item_node.end_position());
2978
2979 let mut open_point = None;
2980 let mut close_point = None;
2981 let mut buffer_ranges = Vec::new();
2982 for capture in mat.captures {
2983 let node_is_name;
2984 if capture.index == config.name_capture_ix {
2985 node_is_name = true;
2986 } else if Some(capture.index) == config.context_capture_ix
2987 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
2988 {
2989 node_is_name = false;
2990 } else {
2991 if Some(capture.index) == config.open_capture_ix {
2992 open_point = Some(Point::from_ts_point(capture.node.end_position()));
2993 } else if Some(capture.index) == config.close_capture_ix {
2994 close_point = Some(Point::from_ts_point(capture.node.start_position()));
2995 }
2996
2997 continue;
2998 }
2999
3000 let mut range = capture.node.start_byte()..capture.node.end_byte();
3001 let start = capture.node.start_position();
3002 if capture.node.end_position().row > start.row {
3003 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3004 }
3005
3006 if !range.is_empty() {
3007 buffer_ranges.push((range, node_is_name));
3008 }
3009 }
3010 if buffer_ranges.is_empty() {
3011 return None;
3012 }
3013 let mut text = String::new();
3014 let mut highlight_ranges = Vec::new();
3015 let mut name_ranges = Vec::new();
3016 let mut chunks = self.chunks(
3017 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3018 true,
3019 );
3020 let mut last_buffer_range_end = 0;
3021 for (buffer_range, is_name) in buffer_ranges {
3022 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
3023 text.push(' ');
3024 }
3025 last_buffer_range_end = buffer_range.end;
3026 if is_name {
3027 let mut start = text.len();
3028 let end = start + buffer_range.len();
3029
3030 // When multiple names are captured, then the matchable text
3031 // includes the whitespace in between the names.
3032 if !name_ranges.is_empty() {
3033 start -= 1;
3034 }
3035
3036 name_ranges.push(start..end);
3037 }
3038
3039 let mut offset = buffer_range.start;
3040 chunks.seek(buffer_range.clone());
3041 for mut chunk in chunks.by_ref() {
3042 if chunk.text.len() > buffer_range.end - offset {
3043 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3044 offset = buffer_range.end;
3045 } else {
3046 offset += chunk.text.len();
3047 }
3048 let style = chunk
3049 .syntax_highlight_id
3050 .zip(theme)
3051 .and_then(|(highlight, theme)| highlight.style(theme));
3052 if let Some(style) = style {
3053 let start = text.len();
3054 let end = start + chunk.text.len();
3055 highlight_ranges.push((start..end, style));
3056 }
3057 text.push_str(chunk.text);
3058 if offset >= buffer_range.end {
3059 break;
3060 }
3061 }
3062 }
3063
3064 Some(OutlineItem {
3065 depth: 0, // We'll calculate the depth later
3066 range: item_point_range,
3067 text,
3068 highlight_ranges,
3069 name_ranges,
3070 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3071 annotation_range: None,
3072 })
3073 }
3074
3075 /// For each grammar in the language, runs the provided
3076 /// [tree_sitter::Query] against the given range.
3077 pub fn matches(
3078 &self,
3079 range: Range<usize>,
3080 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3081 ) -> SyntaxMapMatches {
3082 self.syntax.matches(range, self, query)
3083 }
3084
3085 /// Returns bracket range pairs overlapping or adjacent to `range`
3086 pub fn bracket_ranges<T: ToOffset>(
3087 &self,
3088 range: Range<T>,
3089 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3090 // Find bracket pairs that *inclusively* contain the given range.
3091 let range = range.start.to_offset(self).saturating_sub(1)
3092 ..self.len().min(range.end.to_offset(self) + 1);
3093
3094 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3095 grammar.brackets_config.as_ref().map(|c| &c.query)
3096 });
3097 let configs = matches
3098 .grammars()
3099 .iter()
3100 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3101 .collect::<Vec<_>>();
3102
3103 iter::from_fn(move || {
3104 while let Some(mat) = matches.peek() {
3105 let mut open = None;
3106 let mut close = None;
3107 let config = &configs[mat.grammar_index];
3108 for capture in mat.captures {
3109 if capture.index == config.open_capture_ix {
3110 open = Some(capture.node.byte_range());
3111 } else if capture.index == config.close_capture_ix {
3112 close = Some(capture.node.byte_range());
3113 }
3114 }
3115
3116 matches.advance();
3117
3118 let Some((open, close)) = open.zip(close) else {
3119 continue;
3120 };
3121
3122 let bracket_range = open.start..=close.end;
3123 if !bracket_range.overlaps(&range) {
3124 continue;
3125 }
3126
3127 return Some((open, close));
3128 }
3129 None
3130 })
3131 }
3132
3133 /// Returns enclosing bracket ranges containing the given range
3134 pub fn enclosing_bracket_ranges<T: ToOffset>(
3135 &self,
3136 range: Range<T>,
3137 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3138 let range = range.start.to_offset(self)..range.end.to_offset(self);
3139
3140 self.bracket_ranges(range.clone())
3141 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
3142 }
3143
3144 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3145 ///
3146 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3147 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3148 &self,
3149 range: Range<T>,
3150 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3151 ) -> Option<(Range<usize>, Range<usize>)> {
3152 let range = range.start.to_offset(self)..range.end.to_offset(self);
3153
3154 // Get the ranges of the innermost pair of brackets.
3155 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3156
3157 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
3158 if let Some(range_filter) = range_filter {
3159 if !range_filter(open.clone(), close.clone()) {
3160 continue;
3161 }
3162 }
3163
3164 let len = close.end - open.start;
3165
3166 if let Some((existing_open, existing_close)) = &result {
3167 let existing_len = existing_close.end - existing_open.start;
3168 if len > existing_len {
3169 continue;
3170 }
3171 }
3172
3173 result = Some((open, close));
3174 }
3175
3176 result
3177 }
3178
3179 /// Returns anchor ranges for any matches of the redaction query.
3180 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3181 /// will be run on the relevant section of the buffer.
3182 pub fn redacted_ranges<T: ToOffset>(
3183 &self,
3184 range: Range<T>,
3185 ) -> impl Iterator<Item = Range<usize>> + '_ {
3186 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3187 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3188 grammar
3189 .redactions_config
3190 .as_ref()
3191 .map(|config| &config.query)
3192 });
3193
3194 let configs = syntax_matches
3195 .grammars()
3196 .iter()
3197 .map(|grammar| grammar.redactions_config.as_ref())
3198 .collect::<Vec<_>>();
3199
3200 iter::from_fn(move || {
3201 let redacted_range = syntax_matches
3202 .peek()
3203 .and_then(|mat| {
3204 configs[mat.grammar_index].and_then(|config| {
3205 mat.captures
3206 .iter()
3207 .find(|capture| capture.index == config.redaction_capture_ix)
3208 })
3209 })
3210 .map(|mat| mat.node.byte_range());
3211 syntax_matches.advance();
3212 redacted_range
3213 })
3214 }
3215
3216 pub fn injections_intersecting_range<T: ToOffset>(
3217 &self,
3218 range: Range<T>,
3219 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3220 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3221
3222 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3223 grammar
3224 .injection_config
3225 .as_ref()
3226 .map(|config| &config.query)
3227 });
3228
3229 let configs = syntax_matches
3230 .grammars()
3231 .iter()
3232 .map(|grammar| grammar.injection_config.as_ref())
3233 .collect::<Vec<_>>();
3234
3235 iter::from_fn(move || {
3236 let ranges = syntax_matches.peek().and_then(|mat| {
3237 let config = &configs[mat.grammar_index]?;
3238 let content_capture_range = mat.captures.iter().find_map(|capture| {
3239 if capture.index == config.content_capture_ix {
3240 Some(capture.node.byte_range())
3241 } else {
3242 None
3243 }
3244 })?;
3245 let language = self.language_at(content_capture_range.start)?;
3246 Some((content_capture_range, language))
3247 });
3248 syntax_matches.advance();
3249 ranges
3250 })
3251 }
3252
3253 pub fn runnable_ranges(
3254 &self,
3255 range: Range<Anchor>,
3256 ) -> impl Iterator<Item = RunnableRange> + '_ {
3257 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3258
3259 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3260 grammar.runnable_config.as_ref().map(|config| &config.query)
3261 });
3262
3263 let test_configs = syntax_matches
3264 .grammars()
3265 .iter()
3266 .map(|grammar| grammar.runnable_config.as_ref())
3267 .collect::<Vec<_>>();
3268
3269 iter::from_fn(move || loop {
3270 let mat = syntax_matches.peek()?;
3271
3272 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3273 let mut run_range = None;
3274 let full_range = mat.captures.iter().fold(
3275 Range {
3276 start: usize::MAX,
3277 end: 0,
3278 },
3279 |mut acc, next| {
3280 let byte_range = next.node.byte_range();
3281 if acc.start > byte_range.start {
3282 acc.start = byte_range.start;
3283 }
3284 if acc.end < byte_range.end {
3285 acc.end = byte_range.end;
3286 }
3287 acc
3288 },
3289 );
3290 if full_range.start > full_range.end {
3291 // We did not find a full spanning range of this match.
3292 return None;
3293 }
3294 let extra_captures: SmallVec<[_; 1]> =
3295 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3296 test_configs
3297 .extra_captures
3298 .get(capture.index as usize)
3299 .cloned()
3300 .and_then(|tag_name| match tag_name {
3301 RunnableCapture::Named(name) => {
3302 Some((capture.node.byte_range(), name))
3303 }
3304 RunnableCapture::Run => {
3305 let _ = run_range.insert(capture.node.byte_range());
3306 None
3307 }
3308 })
3309 }));
3310 let run_range = run_range?;
3311 let tags = test_configs
3312 .query
3313 .property_settings(mat.pattern_index)
3314 .iter()
3315 .filter_map(|property| {
3316 if *property.key == *"tag" {
3317 property
3318 .value
3319 .as_ref()
3320 .map(|value| RunnableTag(value.to_string().into()))
3321 } else {
3322 None
3323 }
3324 })
3325 .collect();
3326 let extra_captures = extra_captures
3327 .into_iter()
3328 .map(|(range, name)| {
3329 (
3330 name.to_string(),
3331 self.text_for_range(range.clone()).collect::<String>(),
3332 )
3333 })
3334 .collect();
3335 // All tags should have the same range.
3336 Some(RunnableRange {
3337 run_range,
3338 full_range,
3339 runnable: Runnable {
3340 tags,
3341 language: mat.language,
3342 buffer: self.remote_id(),
3343 },
3344 extra_captures,
3345 buffer_id: self.remote_id(),
3346 })
3347 });
3348
3349 syntax_matches.advance();
3350 if test_range.is_some() {
3351 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3352 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3353 return test_range;
3354 }
3355 })
3356 }
3357
3358 pub fn indent_guides_in_range(
3359 &self,
3360 range: Range<Anchor>,
3361 ignore_disabled_for_language: bool,
3362 cx: &AppContext,
3363 ) -> Vec<IndentGuide> {
3364 let language_settings = language_settings(self.language(), self.file.as_ref(), cx);
3365 let settings = language_settings.indent_guides;
3366 if !ignore_disabled_for_language && !settings.enabled {
3367 return Vec::new();
3368 }
3369 let tab_size = language_settings.tab_size.get() as u32;
3370
3371 let start_row = range.start.to_point(self).row;
3372 let end_row = range.end.to_point(self).row;
3373 let row_range = start_row..end_row + 1;
3374
3375 let mut row_indents = self.line_indents_in_row_range(row_range.clone());
3376
3377 let mut result_vec = Vec::new();
3378 let mut indent_stack = SmallVec::<[IndentGuide; 8]>::new();
3379
3380 while let Some((first_row, mut line_indent)) = row_indents.next() {
3381 let current_depth = indent_stack.len() as u32;
3382
3383 // When encountering empty, continue until found useful line indent
3384 // then add to the indent stack with the depth found
3385 let mut found_indent = false;
3386 let mut last_row = first_row;
3387 if line_indent.is_line_empty() {
3388 let mut trailing_row = end_row;
3389 while !found_indent {
3390 let (target_row, new_line_indent) =
3391 if let Some(display_row) = row_indents.next() {
3392 display_row
3393 } else {
3394 // This means we reached the end of the given range and found empty lines at the end.
3395 // We need to traverse further until we find a non-empty line to know if we need to add
3396 // an indent guide for the last visible indent.
3397 trailing_row += 1;
3398
3399 const TRAILING_ROW_SEARCH_LIMIT: u32 = 25;
3400 if trailing_row > self.max_point().row
3401 || trailing_row > end_row + TRAILING_ROW_SEARCH_LIMIT
3402 {
3403 break;
3404 }
3405 let new_line_indent = self.line_indent_for_row(trailing_row);
3406 (trailing_row, new_line_indent)
3407 };
3408
3409 if new_line_indent.is_line_empty() {
3410 continue;
3411 }
3412 last_row = target_row.min(end_row);
3413 line_indent = new_line_indent;
3414 found_indent = true;
3415 break;
3416 }
3417 } else {
3418 found_indent = true
3419 }
3420
3421 let depth = if found_indent {
3422 line_indent.len(tab_size) / tab_size
3423 + ((line_indent.len(tab_size) % tab_size) > 0) as u32
3424 } else {
3425 current_depth
3426 };
3427
3428 match depth.cmp(¤t_depth) {
3429 Ordering::Less => {
3430 for _ in 0..(current_depth - depth) {
3431 let mut indent = indent_stack.pop().unwrap();
3432 if last_row != first_row {
3433 // In this case, we landed on an empty row, had to seek forward,
3434 // and discovered that the indent we where on is ending.
3435 // This means that the last display row must
3436 // be on line that ends this indent range, so we
3437 // should display the range up to the first non-empty line
3438 indent.end_row = first_row.saturating_sub(1);
3439 }
3440
3441 result_vec.push(indent)
3442 }
3443 }
3444 Ordering::Greater => {
3445 for next_depth in current_depth..depth {
3446 indent_stack.push(IndentGuide {
3447 buffer_id: self.remote_id(),
3448 start_row: first_row,
3449 end_row: last_row,
3450 depth: next_depth,
3451 tab_size,
3452 settings,
3453 });
3454 }
3455 }
3456 _ => {}
3457 }
3458
3459 for indent in indent_stack.iter_mut() {
3460 indent.end_row = last_row;
3461 }
3462 }
3463
3464 result_vec.extend(indent_stack);
3465
3466 result_vec
3467 }
3468
3469 pub async fn enclosing_indent(
3470 &self,
3471 mut buffer_row: BufferRow,
3472 ) -> Option<(Range<BufferRow>, LineIndent)> {
3473 let max_row = self.max_point().row;
3474 if buffer_row >= max_row {
3475 return None;
3476 }
3477
3478 let mut target_indent = self.line_indent_for_row(buffer_row);
3479
3480 // If the current row is at the start of an indented block, we want to return this
3481 // block as the enclosing indent.
3482 if !target_indent.is_line_empty() && buffer_row < max_row {
3483 let next_line_indent = self.line_indent_for_row(buffer_row + 1);
3484 if !next_line_indent.is_line_empty()
3485 && target_indent.raw_len() < next_line_indent.raw_len()
3486 {
3487 target_indent = next_line_indent;
3488 buffer_row += 1;
3489 }
3490 }
3491
3492 const SEARCH_ROW_LIMIT: u32 = 25000;
3493 const SEARCH_WHITESPACE_ROW_LIMIT: u32 = 2500;
3494 const YIELD_INTERVAL: u32 = 100;
3495
3496 let mut accessed_row_counter = 0;
3497
3498 // If there is a blank line at the current row, search for the next non indented lines
3499 if target_indent.is_line_empty() {
3500 let start = buffer_row.saturating_sub(SEARCH_WHITESPACE_ROW_LIMIT);
3501 let end = (max_row + 1).min(buffer_row + SEARCH_WHITESPACE_ROW_LIMIT);
3502
3503 let mut non_empty_line_above = None;
3504 for (row, indent) in self
3505 .text
3506 .reversed_line_indents_in_row_range(start..buffer_row)
3507 {
3508 accessed_row_counter += 1;
3509 if accessed_row_counter == YIELD_INTERVAL {
3510 accessed_row_counter = 0;
3511 yield_now().await;
3512 }
3513 if !indent.is_line_empty() {
3514 non_empty_line_above = Some((row, indent));
3515 break;
3516 }
3517 }
3518
3519 let mut non_empty_line_below = None;
3520 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3521 accessed_row_counter += 1;
3522 if accessed_row_counter == YIELD_INTERVAL {
3523 accessed_row_counter = 0;
3524 yield_now().await;
3525 }
3526 if !indent.is_line_empty() {
3527 non_empty_line_below = Some((row, indent));
3528 break;
3529 }
3530 }
3531
3532 let (row, indent) = match (non_empty_line_above, non_empty_line_below) {
3533 (Some((above_row, above_indent)), Some((below_row, below_indent))) => {
3534 if above_indent.raw_len() >= below_indent.raw_len() {
3535 (above_row, above_indent)
3536 } else {
3537 (below_row, below_indent)
3538 }
3539 }
3540 (Some(above), None) => above,
3541 (None, Some(below)) => below,
3542 _ => return None,
3543 };
3544
3545 target_indent = indent;
3546 buffer_row = row;
3547 }
3548
3549 let start = buffer_row.saturating_sub(SEARCH_ROW_LIMIT);
3550 let end = (max_row + 1).min(buffer_row + SEARCH_ROW_LIMIT);
3551
3552 let mut start_indent = None;
3553 for (row, indent) in self
3554 .text
3555 .reversed_line_indents_in_row_range(start..buffer_row)
3556 {
3557 accessed_row_counter += 1;
3558 if accessed_row_counter == YIELD_INTERVAL {
3559 accessed_row_counter = 0;
3560 yield_now().await;
3561 }
3562 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3563 start_indent = Some((row, indent));
3564 break;
3565 }
3566 }
3567 let (start_row, start_indent_size) = start_indent?;
3568
3569 let mut end_indent = (end, None);
3570 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3571 accessed_row_counter += 1;
3572 if accessed_row_counter == YIELD_INTERVAL {
3573 accessed_row_counter = 0;
3574 yield_now().await;
3575 }
3576 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3577 end_indent = (row.saturating_sub(1), Some(indent));
3578 break;
3579 }
3580 }
3581 let (end_row, end_indent_size) = end_indent;
3582
3583 let indent = if let Some(end_indent_size) = end_indent_size {
3584 if start_indent_size.raw_len() > end_indent_size.raw_len() {
3585 start_indent_size
3586 } else {
3587 end_indent_size
3588 }
3589 } else {
3590 start_indent_size
3591 };
3592
3593 Some((start_row..end_row, indent))
3594 }
3595
3596 /// Returns selections for remote peers intersecting the given range.
3597 #[allow(clippy::type_complexity)]
3598 pub fn selections_in_range(
3599 &self,
3600 range: Range<Anchor>,
3601 include_local: bool,
3602 ) -> impl Iterator<
3603 Item = (
3604 ReplicaId,
3605 bool,
3606 CursorShape,
3607 impl Iterator<Item = &Selection<Anchor>> + '_,
3608 ),
3609 > + '_ {
3610 self.remote_selections
3611 .iter()
3612 .filter(move |(replica_id, set)| {
3613 (include_local || **replica_id != self.text.replica_id())
3614 && !set.selections.is_empty()
3615 })
3616 .map(move |(replica_id, set)| {
3617 let start_ix = match set.selections.binary_search_by(|probe| {
3618 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3619 }) {
3620 Ok(ix) | Err(ix) => ix,
3621 };
3622 let end_ix = match set.selections.binary_search_by(|probe| {
3623 probe.start.cmp(&range.end, self).then(Ordering::Less)
3624 }) {
3625 Ok(ix) | Err(ix) => ix,
3626 };
3627
3628 (
3629 *replica_id,
3630 set.line_mode,
3631 set.cursor_shape,
3632 set.selections[start_ix..end_ix].iter(),
3633 )
3634 })
3635 }
3636
3637 /// Whether the buffer contains any git changes.
3638 pub fn has_git_diff(&self) -> bool {
3639 !self.git_diff.is_empty()
3640 }
3641
3642 /// Returns all the Git diff hunks intersecting the given
3643 /// row range.
3644 pub fn git_diff_hunks_in_row_range(
3645 &self,
3646 range: Range<BufferRow>,
3647 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3648 self.git_diff.hunks_in_row_range(range, self)
3649 }
3650
3651 /// Returns all the Git diff hunks intersecting the given
3652 /// range.
3653 pub fn git_diff_hunks_intersecting_range(
3654 &self,
3655 range: Range<Anchor>,
3656 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3657 self.git_diff.hunks_intersecting_range(range, self)
3658 }
3659
3660 /// Returns all the Git diff hunks intersecting the given
3661 /// range, in reverse order.
3662 pub fn git_diff_hunks_intersecting_range_rev(
3663 &self,
3664 range: Range<Anchor>,
3665 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3666 self.git_diff.hunks_intersecting_range_rev(range, self)
3667 }
3668
3669 /// Returns if the buffer contains any diagnostics.
3670 pub fn has_diagnostics(&self) -> bool {
3671 !self.diagnostics.is_empty()
3672 }
3673
3674 /// Returns all the diagnostics intersecting the given range.
3675 pub fn diagnostics_in_range<'a, T, O>(
3676 &'a self,
3677 search_range: Range<T>,
3678 reversed: bool,
3679 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3680 where
3681 T: 'a + Clone + ToOffset,
3682 O: 'a + FromAnchor + Ord,
3683 {
3684 let mut iterators: Vec<_> = self
3685 .diagnostics
3686 .iter()
3687 .map(|(_, collection)| {
3688 collection
3689 .range::<T, O>(search_range.clone(), self, true, reversed)
3690 .peekable()
3691 })
3692 .collect();
3693
3694 std::iter::from_fn(move || {
3695 let (next_ix, _) = iterators
3696 .iter_mut()
3697 .enumerate()
3698 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3699 .min_by(|(_, a), (_, b)| {
3700 let cmp = a
3701 .range
3702 .start
3703 .cmp(&b.range.start)
3704 // when range is equal, sort by diagnostic severity
3705 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3706 // and stabilize order with group_id
3707 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3708 if reversed {
3709 cmp.reverse()
3710 } else {
3711 cmp
3712 }
3713 })?;
3714 iterators[next_ix].next()
3715 })
3716 }
3717
3718 /// Returns all the diagnostic groups associated with the given
3719 /// language server id. If no language server id is provided,
3720 /// all diagnostics groups are returned.
3721 pub fn diagnostic_groups(
3722 &self,
3723 language_server_id: Option<LanguageServerId>,
3724 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3725 let mut groups = Vec::new();
3726
3727 if let Some(language_server_id) = language_server_id {
3728 if let Ok(ix) = self
3729 .diagnostics
3730 .binary_search_by_key(&language_server_id, |e| e.0)
3731 {
3732 self.diagnostics[ix]
3733 .1
3734 .groups(language_server_id, &mut groups, self);
3735 }
3736 } else {
3737 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3738 diagnostics.groups(*language_server_id, &mut groups, self);
3739 }
3740 }
3741
3742 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3743 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3744 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3745 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3746 });
3747
3748 groups
3749 }
3750
3751 /// Returns an iterator over the diagnostics for the given group.
3752 pub fn diagnostic_group<'a, O>(
3753 &'a self,
3754 group_id: usize,
3755 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3756 where
3757 O: 'a + FromAnchor,
3758 {
3759 self.diagnostics
3760 .iter()
3761 .flat_map(move |(_, set)| set.group(group_id, self))
3762 }
3763
3764 /// An integer version number that accounts for all updates besides
3765 /// the buffer's text itself (which is versioned via a version vector).
3766 pub fn non_text_state_update_count(&self) -> usize {
3767 self.non_text_state_update_count
3768 }
3769
3770 /// Returns a snapshot of underlying file.
3771 pub fn file(&self) -> Option<&Arc<dyn File>> {
3772 self.file.as_ref()
3773 }
3774
3775 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
3776 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
3777 if let Some(file) = self.file() {
3778 if file.path().file_name().is_none() || include_root {
3779 Some(file.full_path(cx))
3780 } else {
3781 Some(file.path().to_path_buf())
3782 }
3783 } else {
3784 None
3785 }
3786 }
3787}
3788
3789fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
3790 indent_size_for_text(text.chars_at(Point::new(row, 0)))
3791}
3792
3793fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
3794 let mut result = IndentSize::spaces(0);
3795 for c in text {
3796 let kind = match c {
3797 ' ' => IndentKind::Space,
3798 '\t' => IndentKind::Tab,
3799 _ => break,
3800 };
3801 if result.len == 0 {
3802 result.kind = kind;
3803 }
3804 result.len += 1;
3805 }
3806 result
3807}
3808
3809impl Clone for BufferSnapshot {
3810 fn clone(&self) -> Self {
3811 Self {
3812 text: self.text.clone(),
3813 git_diff: self.git_diff.clone(),
3814 syntax: self.syntax.clone(),
3815 file: self.file.clone(),
3816 remote_selections: self.remote_selections.clone(),
3817 diagnostics: self.diagnostics.clone(),
3818 language: self.language.clone(),
3819 non_text_state_update_count: self.non_text_state_update_count,
3820 }
3821 }
3822}
3823
3824impl Deref for BufferSnapshot {
3825 type Target = text::BufferSnapshot;
3826
3827 fn deref(&self) -> &Self::Target {
3828 &self.text
3829 }
3830}
3831
3832unsafe impl<'a> Send for BufferChunks<'a> {}
3833
3834impl<'a> BufferChunks<'a> {
3835 pub(crate) fn new(
3836 text: &'a Rope,
3837 range: Range<usize>,
3838 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
3839 diagnostics: bool,
3840 buffer_snapshot: Option<&'a BufferSnapshot>,
3841 ) -> Self {
3842 let mut highlights = None;
3843 if let Some((captures, highlight_maps)) = syntax {
3844 highlights = Some(BufferChunkHighlights {
3845 captures,
3846 next_capture: None,
3847 stack: Default::default(),
3848 highlight_maps,
3849 })
3850 }
3851
3852 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
3853 let chunks = text.chunks_in_range(range.clone());
3854
3855 let mut this = BufferChunks {
3856 range,
3857 buffer_snapshot,
3858 chunks,
3859 diagnostic_endpoints,
3860 error_depth: 0,
3861 warning_depth: 0,
3862 information_depth: 0,
3863 hint_depth: 0,
3864 unnecessary_depth: 0,
3865 highlights,
3866 };
3867 this.initialize_diagnostic_endpoints();
3868 this
3869 }
3870
3871 /// Seeks to the given byte offset in the buffer.
3872 pub fn seek(&mut self, range: Range<usize>) {
3873 let old_range = std::mem::replace(&mut self.range, range.clone());
3874 self.chunks.set_range(self.range.clone());
3875 if let Some(highlights) = self.highlights.as_mut() {
3876 if old_range.start >= self.range.start && old_range.end <= self.range.end {
3877 // Reuse existing highlights stack, as the new range is a subrange of the old one.
3878 highlights
3879 .stack
3880 .retain(|(end_offset, _)| *end_offset > range.start);
3881 if let Some(capture) = &highlights.next_capture {
3882 if range.start >= capture.node.start_byte() {
3883 let next_capture_end = capture.node.end_byte();
3884 if range.start < next_capture_end {
3885 highlights.stack.push((
3886 next_capture_end,
3887 highlights.highlight_maps[capture.grammar_index].get(capture.index),
3888 ));
3889 }
3890 highlights.next_capture.take();
3891 }
3892 }
3893 } else if let Some(snapshot) = self.buffer_snapshot {
3894 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
3895 *highlights = BufferChunkHighlights {
3896 captures,
3897 next_capture: None,
3898 stack: Default::default(),
3899 highlight_maps,
3900 };
3901 } else {
3902 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
3903 // Seeking such BufferChunks is not supported.
3904 debug_assert!(false, "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot");
3905 }
3906
3907 highlights.captures.set_byte_range(self.range.clone());
3908 self.initialize_diagnostic_endpoints();
3909 }
3910 }
3911
3912 fn initialize_diagnostic_endpoints(&mut self) {
3913 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
3914 if let Some(buffer) = self.buffer_snapshot {
3915 let mut diagnostic_endpoints = Vec::new();
3916 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
3917 diagnostic_endpoints.push(DiagnosticEndpoint {
3918 offset: entry.range.start,
3919 is_start: true,
3920 severity: entry.diagnostic.severity,
3921 is_unnecessary: entry.diagnostic.is_unnecessary,
3922 });
3923 diagnostic_endpoints.push(DiagnosticEndpoint {
3924 offset: entry.range.end,
3925 is_start: false,
3926 severity: entry.diagnostic.severity,
3927 is_unnecessary: entry.diagnostic.is_unnecessary,
3928 });
3929 }
3930 diagnostic_endpoints
3931 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
3932 *diagnostics = diagnostic_endpoints.into_iter().peekable();
3933 }
3934 }
3935 }
3936
3937 /// The current byte offset in the buffer.
3938 pub fn offset(&self) -> usize {
3939 self.range.start
3940 }
3941
3942 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
3943 let depth = match endpoint.severity {
3944 DiagnosticSeverity::ERROR => &mut self.error_depth,
3945 DiagnosticSeverity::WARNING => &mut self.warning_depth,
3946 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
3947 DiagnosticSeverity::HINT => &mut self.hint_depth,
3948 _ => return,
3949 };
3950 if endpoint.is_start {
3951 *depth += 1;
3952 } else {
3953 *depth -= 1;
3954 }
3955
3956 if endpoint.is_unnecessary {
3957 if endpoint.is_start {
3958 self.unnecessary_depth += 1;
3959 } else {
3960 self.unnecessary_depth -= 1;
3961 }
3962 }
3963 }
3964
3965 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
3966 if self.error_depth > 0 {
3967 Some(DiagnosticSeverity::ERROR)
3968 } else if self.warning_depth > 0 {
3969 Some(DiagnosticSeverity::WARNING)
3970 } else if self.information_depth > 0 {
3971 Some(DiagnosticSeverity::INFORMATION)
3972 } else if self.hint_depth > 0 {
3973 Some(DiagnosticSeverity::HINT)
3974 } else {
3975 None
3976 }
3977 }
3978
3979 fn current_code_is_unnecessary(&self) -> bool {
3980 self.unnecessary_depth > 0
3981 }
3982}
3983
3984impl<'a> Iterator for BufferChunks<'a> {
3985 type Item = Chunk<'a>;
3986
3987 fn next(&mut self) -> Option<Self::Item> {
3988 let mut next_capture_start = usize::MAX;
3989 let mut next_diagnostic_endpoint = usize::MAX;
3990
3991 if let Some(highlights) = self.highlights.as_mut() {
3992 while let Some((parent_capture_end, _)) = highlights.stack.last() {
3993 if *parent_capture_end <= self.range.start {
3994 highlights.stack.pop();
3995 } else {
3996 break;
3997 }
3998 }
3999
4000 if highlights.next_capture.is_none() {
4001 highlights.next_capture = highlights.captures.next();
4002 }
4003
4004 while let Some(capture) = highlights.next_capture.as_ref() {
4005 if self.range.start < capture.node.start_byte() {
4006 next_capture_start = capture.node.start_byte();
4007 break;
4008 } else {
4009 let highlight_id =
4010 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4011 highlights
4012 .stack
4013 .push((capture.node.end_byte(), highlight_id));
4014 highlights.next_capture = highlights.captures.next();
4015 }
4016 }
4017 }
4018
4019 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4020 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4021 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4022 if endpoint.offset <= self.range.start {
4023 self.update_diagnostic_depths(endpoint);
4024 diagnostic_endpoints.next();
4025 } else {
4026 next_diagnostic_endpoint = endpoint.offset;
4027 break;
4028 }
4029 }
4030 }
4031 self.diagnostic_endpoints = diagnostic_endpoints;
4032
4033 if let Some(chunk) = self.chunks.peek() {
4034 let chunk_start = self.range.start;
4035 let mut chunk_end = (self.chunks.offset() + chunk.len())
4036 .min(next_capture_start)
4037 .min(next_diagnostic_endpoint);
4038 let mut highlight_id = None;
4039 if let Some(highlights) = self.highlights.as_ref() {
4040 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4041 chunk_end = chunk_end.min(*parent_capture_end);
4042 highlight_id = Some(*parent_highlight_id);
4043 }
4044 }
4045
4046 let slice =
4047 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4048 self.range.start = chunk_end;
4049 if self.range.start == self.chunks.offset() + chunk.len() {
4050 self.chunks.next().unwrap();
4051 }
4052
4053 Some(Chunk {
4054 text: slice,
4055 syntax_highlight_id: highlight_id,
4056 diagnostic_severity: self.current_diagnostic_severity(),
4057 is_unnecessary: self.current_code_is_unnecessary(),
4058 ..Default::default()
4059 })
4060 } else {
4061 None
4062 }
4063 }
4064}
4065
4066impl operation_queue::Operation for Operation {
4067 fn lamport_timestamp(&self) -> clock::Lamport {
4068 match self {
4069 Operation::Buffer(_) => {
4070 unreachable!("buffer operations should never be deferred at this layer")
4071 }
4072 Operation::UpdateDiagnostics {
4073 lamport_timestamp, ..
4074 }
4075 | Operation::UpdateSelections {
4076 lamport_timestamp, ..
4077 }
4078 | Operation::UpdateCompletionTriggers {
4079 lamport_timestamp, ..
4080 } => *lamport_timestamp,
4081 }
4082 }
4083}
4084
4085impl Default for Diagnostic {
4086 fn default() -> Self {
4087 Self {
4088 source: Default::default(),
4089 code: None,
4090 severity: DiagnosticSeverity::ERROR,
4091 message: Default::default(),
4092 group_id: 0,
4093 is_primary: false,
4094 is_disk_based: false,
4095 is_unnecessary: false,
4096 data: None,
4097 }
4098 }
4099}
4100
4101impl IndentSize {
4102 /// Returns an [IndentSize] representing the given spaces.
4103 pub fn spaces(len: u32) -> Self {
4104 Self {
4105 len,
4106 kind: IndentKind::Space,
4107 }
4108 }
4109
4110 /// Returns an [IndentSize] representing a tab.
4111 pub fn tab() -> Self {
4112 Self {
4113 len: 1,
4114 kind: IndentKind::Tab,
4115 }
4116 }
4117
4118 /// An iterator over the characters represented by this [IndentSize].
4119 pub fn chars(&self) -> impl Iterator<Item = char> {
4120 iter::repeat(self.char()).take(self.len as usize)
4121 }
4122
4123 /// The character representation of this [IndentSize].
4124 pub fn char(&self) -> char {
4125 match self.kind {
4126 IndentKind::Space => ' ',
4127 IndentKind::Tab => '\t',
4128 }
4129 }
4130
4131 /// Consumes the current [IndentSize] and returns a new one that has
4132 /// been shrunk or enlarged by the given size along the given direction.
4133 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4134 match direction {
4135 Ordering::Less => {
4136 if self.kind == size.kind && self.len >= size.len {
4137 self.len -= size.len;
4138 }
4139 }
4140 Ordering::Equal => {}
4141 Ordering::Greater => {
4142 if self.len == 0 {
4143 self = size;
4144 } else if self.kind == size.kind {
4145 self.len += size.len;
4146 }
4147 }
4148 }
4149 self
4150 }
4151}
4152
4153#[cfg(any(test, feature = "test-support"))]
4154pub struct TestFile {
4155 pub path: Arc<Path>,
4156 pub root_name: String,
4157}
4158
4159#[cfg(any(test, feature = "test-support"))]
4160impl File for TestFile {
4161 fn path(&self) -> &Arc<Path> {
4162 &self.path
4163 }
4164
4165 fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
4166 PathBuf::from(&self.root_name).join(self.path.as_ref())
4167 }
4168
4169 fn as_local(&self) -> Option<&dyn LocalFile> {
4170 None
4171 }
4172
4173 fn mtime(&self) -> Option<SystemTime> {
4174 unimplemented!()
4175 }
4176
4177 fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
4178 self.path().file_name().unwrap_or(self.root_name.as_ref())
4179 }
4180
4181 fn worktree_id(&self, _: &AppContext) -> WorktreeId {
4182 WorktreeId::from_usize(0)
4183 }
4184
4185 fn is_deleted(&self) -> bool {
4186 unimplemented!()
4187 }
4188
4189 fn as_any(&self) -> &dyn std::any::Any {
4190 unimplemented!()
4191 }
4192
4193 fn to_proto(&self, _: &AppContext) -> rpc::proto::File {
4194 unimplemented!()
4195 }
4196
4197 fn is_private(&self) -> bool {
4198 false
4199 }
4200}
4201
4202pub(crate) fn contiguous_ranges(
4203 values: impl Iterator<Item = u32>,
4204 max_len: usize,
4205) -> impl Iterator<Item = Range<u32>> {
4206 let mut values = values;
4207 let mut current_range: Option<Range<u32>> = None;
4208 std::iter::from_fn(move || loop {
4209 if let Some(value) = values.next() {
4210 if let Some(range) = &mut current_range {
4211 if value == range.end && range.len() < max_len {
4212 range.end += 1;
4213 continue;
4214 }
4215 }
4216
4217 let prev_range = current_range.clone();
4218 current_range = Some(value..(value + 1));
4219 if prev_range.is_some() {
4220 return prev_range;
4221 }
4222 } else {
4223 return current_range.take();
4224 }
4225 })
4226}
4227
4228#[derive(Default, Debug)]
4229pub struct CharClassifier {
4230 scope: Option<LanguageScope>,
4231 for_completion: bool,
4232 ignore_punctuation: bool,
4233}
4234
4235impl CharClassifier {
4236 pub fn new(scope: Option<LanguageScope>) -> Self {
4237 Self {
4238 scope,
4239 for_completion: false,
4240 ignore_punctuation: false,
4241 }
4242 }
4243
4244 pub fn for_completion(self, for_completion: bool) -> Self {
4245 Self {
4246 for_completion,
4247 ..self
4248 }
4249 }
4250
4251 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4252 Self {
4253 ignore_punctuation,
4254 ..self
4255 }
4256 }
4257
4258 pub fn is_whitespace(&self, c: char) -> bool {
4259 self.kind(c) == CharKind::Whitespace
4260 }
4261
4262 pub fn is_word(&self, c: char) -> bool {
4263 self.kind(c) == CharKind::Word
4264 }
4265
4266 pub fn is_punctuation(&self, c: char) -> bool {
4267 self.kind(c) == CharKind::Punctuation
4268 }
4269
4270 pub fn kind(&self, c: char) -> CharKind {
4271 if c.is_whitespace() {
4272 return CharKind::Whitespace;
4273 } else if c.is_alphanumeric() || c == '_' {
4274 return CharKind::Word;
4275 }
4276
4277 if let Some(scope) = &self.scope {
4278 if let Some(characters) = scope.word_characters() {
4279 if characters.contains(&c) {
4280 if c == '-' && !self.for_completion && !self.ignore_punctuation {
4281 return CharKind::Punctuation;
4282 }
4283 return CharKind::Word;
4284 }
4285 }
4286 }
4287
4288 if self.ignore_punctuation {
4289 CharKind::Word
4290 } else {
4291 CharKind::Punctuation
4292 }
4293 }
4294}
4295
4296/// Find all of the ranges of whitespace that occur at the ends of lines
4297/// in the given rope.
4298///
4299/// This could also be done with a regex search, but this implementation
4300/// avoids copying text.
4301pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4302 let mut ranges = Vec::new();
4303
4304 let mut offset = 0;
4305 let mut prev_chunk_trailing_whitespace_range = 0..0;
4306 for chunk in rope.chunks() {
4307 let mut prev_line_trailing_whitespace_range = 0..0;
4308 for (i, line) in chunk.split('\n').enumerate() {
4309 let line_end_offset = offset + line.len();
4310 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4311 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4312
4313 if i == 0 && trimmed_line_len == 0 {
4314 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4315 }
4316 if !prev_line_trailing_whitespace_range.is_empty() {
4317 ranges.push(prev_line_trailing_whitespace_range);
4318 }
4319
4320 offset = line_end_offset + 1;
4321 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4322 }
4323
4324 offset -= 1;
4325 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4326 }
4327
4328 if !prev_chunk_trailing_whitespace_range.is_empty() {
4329 ranges.push(prev_chunk_trailing_whitespace_range);
4330 }
4331
4332 ranges
4333}