1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, IndentGuideSettings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
14 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
18};
19use anyhow::{anyhow, Context, Result};
20use async_watch as watch;
21pub use clock::ReplicaId;
22use futures::channel::oneshot;
23use gpui::{
24 AnyElement, AppContext, EventEmitter, HighlightStyle, ModelContext, Pixels, Task, TaskLabel,
25 WindowContext,
26};
27use lsp::LanguageServerId;
28use parking_lot::Mutex;
29use schemars::JsonSchema;
30use serde::{Deserialize, Serialize};
31use serde_json::Value;
32use settings::WorktreeId;
33use similar::{ChangeTag, TextDiff};
34use smallvec::SmallVec;
35use smol::future::yield_now;
36use std::{
37 any::Any,
38 cell::Cell,
39 cmp::{self, Ordering, Reverse},
40 collections::BTreeMap,
41 ffi::OsStr,
42 fmt,
43 future::Future,
44 iter::{self, Iterator, Peekable},
45 mem,
46 ops::{Deref, DerefMut, Range},
47 path::{Path, PathBuf},
48 str,
49 sync::{Arc, LazyLock},
50 time::{Duration, Instant, SystemTime},
51 vec,
52};
53use sum_tree::TreeMap;
54use text::operation_queue::OperationQueue;
55use text::*;
56pub use text::{
57 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
58 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
59 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
60 Transaction, TransactionId, Unclipped,
61};
62use theme::SyntaxTheme;
63#[cfg(any(test, feature = "test-support"))]
64use util::RandomCharIter;
65use util::RangeExt;
66
67#[cfg(any(test, feature = "test-support"))]
68pub use {tree_sitter_rust, tree_sitter_typescript};
69
70pub use lsp::DiagnosticSeverity;
71
72/// A label for the background task spawned by the buffer to compute
73/// a diff against the contents of its file.
74pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
75
76/// Indicate whether a [Buffer] has permissions to edit.
77#[derive(PartialEq, Clone, Copy, Debug)]
78pub enum Capability {
79 /// The buffer is a mutable replica.
80 ReadWrite,
81 /// The buffer is a read-only replica.
82 ReadOnly,
83}
84
85pub type BufferRow = u32;
86
87/// An in-memory representation of a source code file, including its text,
88/// syntax trees, git status, and diagnostics.
89pub struct Buffer {
90 text: TextBuffer,
91 diff_base: Option<Rope>,
92 git_diff: git::diff::BufferDiff,
93 file: Option<Arc<dyn File>>,
94 /// The mtime of the file when this buffer was last loaded from
95 /// or saved to disk.
96 saved_mtime: Option<SystemTime>,
97 /// The version vector when this buffer was last loaded from
98 /// or saved to disk.
99 saved_version: clock::Global,
100 preview_version: clock::Global,
101 transaction_depth: usize,
102 was_dirty_before_starting_transaction: Option<bool>,
103 reload_task: Option<Task<Result<()>>>,
104 language: Option<Arc<Language>>,
105 autoindent_requests: Vec<Arc<AutoindentRequest>>,
106 pending_autoindent: Option<Task<()>>,
107 sync_parse_timeout: Duration,
108 syntax_map: Mutex<SyntaxMap>,
109 parsing_in_background: bool,
110 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
111 non_text_state_update_count: usize,
112 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
113 remote_selections: TreeMap<ReplicaId, SelectionSet>,
114 diagnostics_timestamp: clock::Lamport,
115 completion_triggers: Vec<String>,
116 completion_triggers_timestamp: clock::Lamport,
117 deferred_ops: OperationQueue<Operation>,
118 capability: Capability,
119 has_conflict: bool,
120 diff_base_version: usize,
121 /// Memoize calls to has_changes_since(saved_version).
122 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
123 has_unsaved_edits: Cell<(clock::Global, bool)>,
124}
125
126#[derive(Copy, Clone, Debug, PartialEq, Eq)]
127pub enum ParseStatus {
128 Idle,
129 Parsing,
130}
131
132/// An immutable, cheaply cloneable representation of a fixed
133/// state of a buffer.
134pub struct BufferSnapshot {
135 text: text::BufferSnapshot,
136 git_diff: git::diff::BufferDiff,
137 pub(crate) syntax: SyntaxSnapshot,
138 file: Option<Arc<dyn File>>,
139 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
140 remote_selections: TreeMap<ReplicaId, SelectionSet>,
141 language: Option<Arc<Language>>,
142 non_text_state_update_count: usize,
143}
144
145/// The kind and amount of indentation in a particular line. For now,
146/// assumes that indentation is all the same character.
147#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
148pub struct IndentSize {
149 /// The number of bytes that comprise the indentation.
150 pub len: u32,
151 /// The kind of whitespace used for indentation.
152 pub kind: IndentKind,
153}
154
155/// A whitespace character that's used for indentation.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
157pub enum IndentKind {
158 /// An ASCII space character.
159 #[default]
160 Space,
161 /// An ASCII tab character.
162 Tab,
163}
164
165/// The shape of a selection cursor.
166#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
167#[serde(rename_all = "snake_case")]
168pub enum CursorShape {
169 /// A vertical bar
170 #[default]
171 Bar,
172 /// A block that surrounds the following character
173 Block,
174 /// An underline that runs along the following character
175 Underscore,
176 /// A box drawn around the following character
177 Hollow,
178}
179
180#[derive(Clone, Debug)]
181struct SelectionSet {
182 line_mode: bool,
183 cursor_shape: CursorShape,
184 selections: Arc<[Selection<Anchor>]>,
185 lamport_timestamp: clock::Lamport,
186}
187
188/// A diagnostic associated with a certain range of a buffer.
189#[derive(Clone, Debug, PartialEq, Eq)]
190pub struct Diagnostic {
191 /// The name of the service that produced this diagnostic.
192 pub source: Option<String>,
193 /// A machine-readable code that identifies this diagnostic.
194 pub code: Option<String>,
195 /// Whether this diagnostic is a hint, warning, or error.
196 pub severity: DiagnosticSeverity,
197 /// The human-readable message associated with this diagnostic.
198 pub message: String,
199 /// An id that identifies the group to which this diagnostic belongs.
200 ///
201 /// When a language server produces a diagnostic with
202 /// one or more associated diagnostics, those diagnostics are all
203 /// assigned a single group id.
204 pub group_id: usize,
205 /// Whether this diagnostic is the primary diagnostic for its group.
206 ///
207 /// In a given group, the primary diagnostic is the top-level diagnostic
208 /// returned by the language server. The non-primary diagnostics are the
209 /// associated diagnostics.
210 pub is_primary: bool,
211 /// Whether this diagnostic is considered to originate from an analysis of
212 /// files on disk, as opposed to any unsaved buffer contents. This is a
213 /// property of a given diagnostic source, and is configured for a given
214 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
215 /// for the language server.
216 pub is_disk_based: bool,
217 /// Whether this diagnostic marks unnecessary code.
218 pub is_unnecessary: bool,
219 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
220 pub data: Option<Value>,
221}
222
223/// TODO - move this into the `project` crate and make it private.
224pub async fn prepare_completion_documentation(
225 documentation: &lsp::Documentation,
226 language_registry: &Arc<LanguageRegistry>,
227 language: Option<Arc<Language>>,
228) -> Documentation {
229 match documentation {
230 lsp::Documentation::String(text) => {
231 if text.lines().count() <= 1 {
232 Documentation::SingleLine(text.clone())
233 } else {
234 Documentation::MultiLinePlainText(text.clone())
235 }
236 }
237
238 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
239 lsp::MarkupKind::PlainText => {
240 if value.lines().count() <= 1 {
241 Documentation::SingleLine(value.clone())
242 } else {
243 Documentation::MultiLinePlainText(value.clone())
244 }
245 }
246
247 lsp::MarkupKind::Markdown => {
248 let parsed = parse_markdown(value, language_registry, language).await;
249 Documentation::MultiLineMarkdown(parsed)
250 }
251 },
252 }
253}
254
255/// Documentation associated with a [`Completion`].
256#[derive(Clone, Debug)]
257pub enum Documentation {
258 /// There is no documentation for this completion.
259 Undocumented,
260 /// A single line of documentation.
261 SingleLine(String),
262 /// Multiple lines of plain text documentation.
263 MultiLinePlainText(String),
264 /// Markdown documentation.
265 MultiLineMarkdown(ParsedMarkdown),
266}
267
268/// An operation used to synchronize this buffer with its other replicas.
269#[derive(Clone, Debug, PartialEq)]
270pub enum Operation {
271 /// A text operation.
272 Buffer(text::Operation),
273
274 /// An update to the buffer's diagnostics.
275 UpdateDiagnostics {
276 /// The id of the language server that produced the new diagnostics.
277 server_id: LanguageServerId,
278 /// The diagnostics.
279 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
280 /// The buffer's lamport timestamp.
281 lamport_timestamp: clock::Lamport,
282 },
283
284 /// An update to the most recent selections in this buffer.
285 UpdateSelections {
286 /// The selections.
287 selections: Arc<[Selection<Anchor>]>,
288 /// The buffer's lamport timestamp.
289 lamport_timestamp: clock::Lamport,
290 /// Whether the selections are in 'line mode'.
291 line_mode: bool,
292 /// The [`CursorShape`] associated with these selections.
293 cursor_shape: CursorShape,
294 },
295
296 /// An update to the characters that should trigger autocompletion
297 /// for this buffer.
298 UpdateCompletionTriggers {
299 /// The characters that trigger autocompletion.
300 triggers: Vec<String>,
301 /// The buffer's lamport timestamp.
302 lamport_timestamp: clock::Lamport,
303 },
304}
305
306/// An event that occurs in a buffer.
307#[derive(Clone, Debug, PartialEq)]
308pub enum BufferEvent {
309 /// The buffer was changed in a way that must be
310 /// propagated to its other replicas.
311 Operation(Operation),
312 /// The buffer was edited.
313 Edited,
314 /// The buffer's `dirty` bit changed.
315 DirtyChanged,
316 /// The buffer was saved.
317 Saved,
318 /// The buffer's file was changed on disk.
319 FileHandleChanged,
320 /// The buffer was reloaded.
321 Reloaded,
322 /// The buffer's diff_base changed.
323 DiffBaseChanged,
324 /// Buffer's excerpts for a certain diff base were recalculated.
325 DiffUpdated,
326 /// The buffer's language was changed.
327 LanguageChanged,
328 /// The buffer's syntax trees were updated.
329 Reparsed,
330 /// The buffer's diagnostics were updated.
331 DiagnosticsUpdated,
332 /// The buffer gained or lost editing capabilities.
333 CapabilityChanged,
334 /// The buffer was explicitly requested to close.
335 Closed,
336 /// The buffer was discarded when closing.
337 Discarded,
338}
339
340/// The file associated with a buffer.
341pub trait File: Send + Sync {
342 /// Returns the [`LocalFile`] associated with this file, if the
343 /// file is local.
344 fn as_local(&self) -> Option<&dyn LocalFile>;
345
346 /// Returns whether this file is local.
347 fn is_local(&self) -> bool {
348 self.as_local().is_some()
349 }
350
351 /// Returns the file's mtime.
352 fn mtime(&self) -> Option<SystemTime>;
353
354 /// Returns the path of this file relative to the worktree's root directory.
355 fn path(&self) -> &Arc<Path>;
356
357 /// Returns the path of this file relative to the worktree's parent directory (this means it
358 /// includes the name of the worktree's root folder).
359 fn full_path(&self, cx: &AppContext) -> PathBuf;
360
361 /// Returns the last component of this handle's absolute path. If this handle refers to the root
362 /// of its worktree, then this method will return the name of the worktree itself.
363 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
364
365 /// Returns the id of the worktree to which this file belongs.
366 ///
367 /// This is needed for looking up project-specific settings.
368 fn worktree_id(&self, cx: &AppContext) -> WorktreeId;
369
370 /// Returns whether the file has been deleted.
371 fn is_deleted(&self) -> bool;
372
373 /// Returns whether the file existed on disk at one point
374 fn is_created(&self) -> bool {
375 self.mtime().is_some()
376 }
377
378 /// Converts this file into an [`Any`] trait object.
379 fn as_any(&self) -> &dyn Any;
380
381 /// Converts this file into a protobuf message.
382 fn to_proto(&self, cx: &AppContext) -> rpc::proto::File;
383
384 /// Return whether Zed considers this to be a private file.
385 fn is_private(&self) -> bool;
386}
387
388/// The file associated with a buffer, in the case where the file is on the local disk.
389pub trait LocalFile: File {
390 /// Returns the absolute path of this file
391 fn abs_path(&self, cx: &AppContext) -> PathBuf;
392
393 /// Loads the file's contents from disk.
394 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
395
396 /// Returns true if the file should not be shared with collaborators.
397 fn is_private(&self, _: &AppContext) -> bool {
398 false
399 }
400}
401
402/// The auto-indent behavior associated with an editing operation.
403/// For some editing operations, each affected line of text has its
404/// indentation recomputed. For other operations, the entire block
405/// of edited text is adjusted uniformly.
406#[derive(Clone, Debug)]
407pub enum AutoindentMode {
408 /// Indent each line of inserted text.
409 EachLine,
410 /// Apply the same indentation adjustment to all of the lines
411 /// in a given insertion.
412 Block {
413 /// The original indentation level of the first line of each
414 /// insertion, if it has been copied.
415 original_indent_columns: Vec<u32>,
416 },
417}
418
419#[derive(Clone)]
420struct AutoindentRequest {
421 before_edit: BufferSnapshot,
422 entries: Vec<AutoindentRequestEntry>,
423 is_block_mode: bool,
424}
425
426#[derive(Clone)]
427struct AutoindentRequestEntry {
428 /// A range of the buffer whose indentation should be adjusted.
429 range: Range<Anchor>,
430 /// Whether or not these lines should be considered brand new, for the
431 /// purpose of auto-indent. When text is not new, its indentation will
432 /// only be adjusted if the suggested indentation level has *changed*
433 /// since the edit was made.
434 first_line_is_new: bool,
435 indent_size: IndentSize,
436 original_indent_column: Option<u32>,
437}
438
439#[derive(Debug)]
440struct IndentSuggestion {
441 basis_row: u32,
442 delta: Ordering,
443 within_error: bool,
444}
445
446struct BufferChunkHighlights<'a> {
447 captures: SyntaxMapCaptures<'a>,
448 next_capture: Option<SyntaxMapCapture<'a>>,
449 stack: Vec<(usize, HighlightId)>,
450 highlight_maps: Vec<HighlightMap>,
451}
452
453/// An iterator that yields chunks of a buffer's text, along with their
454/// syntax highlights and diagnostic status.
455pub struct BufferChunks<'a> {
456 buffer_snapshot: Option<&'a BufferSnapshot>,
457 range: Range<usize>,
458 chunks: text::Chunks<'a>,
459 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
460 error_depth: usize,
461 warning_depth: usize,
462 information_depth: usize,
463 hint_depth: usize,
464 unnecessary_depth: usize,
465 highlights: Option<BufferChunkHighlights<'a>>,
466}
467
468/// A chunk of a buffer's text, along with its syntax highlight and
469/// diagnostic status.
470#[derive(Clone, Debug, Default)]
471pub struct Chunk<'a> {
472 /// The text of the chunk.
473 pub text: &'a str,
474 /// The syntax highlighting style of the chunk.
475 pub syntax_highlight_id: Option<HighlightId>,
476 /// The highlight style that has been applied to this chunk in
477 /// the editor.
478 pub highlight_style: Option<HighlightStyle>,
479 /// The severity of diagnostic associated with this chunk, if any.
480 pub diagnostic_severity: Option<DiagnosticSeverity>,
481 /// Whether this chunk of text is marked as unnecessary.
482 pub is_unnecessary: bool,
483 /// Whether this chunk of text was originally a tab character.
484 pub is_tab: bool,
485 /// An optional recipe for how the chunk should be presented.
486 pub renderer: Option<ChunkRenderer>,
487}
488
489/// A recipe for how the chunk should be presented.
490#[derive(Clone)]
491pub struct ChunkRenderer {
492 /// creates a custom element to represent this chunk.
493 pub render: Arc<dyn Send + Sync + Fn(&mut ChunkRendererContext) -> AnyElement>,
494 /// If true, the element is constrained to the shaped width of the text.
495 pub constrain_width: bool,
496}
497
498pub struct ChunkRendererContext<'a, 'b> {
499 pub context: &'a mut WindowContext<'b>,
500 pub max_width: Pixels,
501}
502
503impl fmt::Debug for ChunkRenderer {
504 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
505 f.debug_struct("ChunkRenderer")
506 .field("constrain_width", &self.constrain_width)
507 .finish()
508 }
509}
510
511impl<'a, 'b> Deref for ChunkRendererContext<'a, 'b> {
512 type Target = WindowContext<'b>;
513
514 fn deref(&self) -> &Self::Target {
515 self.context
516 }
517}
518
519impl<'a, 'b> DerefMut for ChunkRendererContext<'a, 'b> {
520 fn deref_mut(&mut self) -> &mut Self::Target {
521 self.context
522 }
523}
524
525/// A set of edits to a given version of a buffer, computed asynchronously.
526#[derive(Debug)]
527pub struct Diff {
528 pub(crate) base_version: clock::Global,
529 line_ending: LineEnding,
530 edits: Vec<(Range<usize>, Arc<str>)>,
531}
532
533#[derive(Clone, Copy)]
534pub(crate) struct DiagnosticEndpoint {
535 offset: usize,
536 is_start: bool,
537 severity: DiagnosticSeverity,
538 is_unnecessary: bool,
539}
540
541/// A class of characters, used for characterizing a run of text.
542#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
543pub enum CharKind {
544 /// Whitespace.
545 Whitespace,
546 /// Punctuation.
547 Punctuation,
548 /// Word.
549 Word,
550}
551
552/// A runnable is a set of data about a region that could be resolved into a task
553pub struct Runnable {
554 pub tags: SmallVec<[RunnableTag; 1]>,
555 pub language: Arc<Language>,
556 pub buffer: BufferId,
557}
558
559#[derive(Clone, Debug, PartialEq)]
560pub struct IndentGuide {
561 pub buffer_id: BufferId,
562 pub start_row: BufferRow,
563 pub end_row: BufferRow,
564 pub depth: u32,
565 pub tab_size: u32,
566 pub settings: IndentGuideSettings,
567}
568
569impl IndentGuide {
570 pub fn indent_level(&self) -> u32 {
571 self.depth * self.tab_size
572 }
573}
574
575impl Buffer {
576 /// Create a new buffer with the given base text.
577 pub fn local<T: Into<String>>(base_text: T, cx: &mut ModelContext<Self>) -> Self {
578 Self::build(
579 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
580 None,
581 None,
582 Capability::ReadWrite,
583 )
584 }
585
586 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
587 pub fn local_normalized(
588 base_text_normalized: Rope,
589 line_ending: LineEnding,
590 cx: &mut ModelContext<Self>,
591 ) -> Self {
592 Self::build(
593 TextBuffer::new_normalized(
594 0,
595 cx.entity_id().as_non_zero_u64().into(),
596 line_ending,
597 base_text_normalized,
598 ),
599 None,
600 None,
601 Capability::ReadWrite,
602 )
603 }
604
605 /// Create a new buffer that is a replica of a remote buffer.
606 pub fn remote(
607 remote_id: BufferId,
608 replica_id: ReplicaId,
609 capability: Capability,
610 base_text: impl Into<String>,
611 ) -> Self {
612 Self::build(
613 TextBuffer::new(replica_id, remote_id, base_text.into()),
614 None,
615 None,
616 capability,
617 )
618 }
619
620 /// Create a new buffer that is a replica of a remote buffer, populating its
621 /// state from the given protobuf message.
622 pub fn from_proto(
623 replica_id: ReplicaId,
624 capability: Capability,
625 message: proto::BufferState,
626 file: Option<Arc<dyn File>>,
627 ) -> Result<Self> {
628 let buffer_id = BufferId::new(message.id)
629 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
630 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
631 let mut this = Self::build(buffer, message.diff_base, file, capability);
632 this.text.set_line_ending(proto::deserialize_line_ending(
633 rpc::proto::LineEnding::from_i32(message.line_ending)
634 .ok_or_else(|| anyhow!("missing line_ending"))?,
635 ));
636 this.saved_version = proto::deserialize_version(&message.saved_version);
637 this.saved_mtime = message.saved_mtime.map(|time| time.into());
638 Ok(this)
639 }
640
641 /// Serialize the buffer's state to a protobuf message.
642 pub fn to_proto(&self, cx: &AppContext) -> proto::BufferState {
643 proto::BufferState {
644 id: self.remote_id().into(),
645 file: self.file.as_ref().map(|f| f.to_proto(cx)),
646 base_text: self.base_text().to_string(),
647 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
648 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
649 saved_version: proto::serialize_version(&self.saved_version),
650 saved_mtime: self.saved_mtime.map(|time| time.into()),
651 }
652 }
653
654 /// Serialize as protobufs all of the changes to the buffer since the given version.
655 pub fn serialize_ops(
656 &self,
657 since: Option<clock::Global>,
658 cx: &AppContext,
659 ) -> Task<Vec<proto::Operation>> {
660 let mut operations = Vec::new();
661 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
662
663 operations.extend(self.remote_selections.iter().map(|(_, set)| {
664 proto::serialize_operation(&Operation::UpdateSelections {
665 selections: set.selections.clone(),
666 lamport_timestamp: set.lamport_timestamp,
667 line_mode: set.line_mode,
668 cursor_shape: set.cursor_shape,
669 })
670 }));
671
672 for (server_id, diagnostics) in &self.diagnostics {
673 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
674 lamport_timestamp: self.diagnostics_timestamp,
675 server_id: *server_id,
676 diagnostics: diagnostics.iter().cloned().collect(),
677 }));
678 }
679
680 operations.push(proto::serialize_operation(
681 &Operation::UpdateCompletionTriggers {
682 triggers: self.completion_triggers.clone(),
683 lamport_timestamp: self.completion_triggers_timestamp,
684 },
685 ));
686
687 let text_operations = self.text.operations().clone();
688 cx.background_executor().spawn(async move {
689 let since = since.unwrap_or_default();
690 operations.extend(
691 text_operations
692 .iter()
693 .filter(|(_, op)| !since.observed(op.timestamp()))
694 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
695 );
696 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
697 operations
698 })
699 }
700
701 /// Assign a language to the buffer, returning the buffer.
702 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
703 self.set_language(Some(language), cx);
704 self
705 }
706
707 /// Returns the [Capability] of this buffer.
708 pub fn capability(&self) -> Capability {
709 self.capability
710 }
711
712 /// Whether this buffer can only be read.
713 pub fn read_only(&self) -> bool {
714 self.capability == Capability::ReadOnly
715 }
716
717 /// Builds a [Buffer] with the given underlying [TextBuffer], diff base, [File] and [Capability].
718 pub fn build(
719 buffer: TextBuffer,
720 diff_base: Option<String>,
721 file: Option<Arc<dyn File>>,
722 capability: Capability,
723 ) -> Self {
724 let saved_mtime = file.as_ref().and_then(|file| file.mtime());
725
726 Self {
727 saved_mtime,
728 saved_version: buffer.version(),
729 preview_version: buffer.version(),
730 reload_task: None,
731 transaction_depth: 0,
732 was_dirty_before_starting_transaction: None,
733 has_unsaved_edits: Cell::new((buffer.version(), false)),
734 text: buffer,
735 diff_base: diff_base
736 .map(|mut raw_diff_base| {
737 LineEnding::normalize(&mut raw_diff_base);
738 raw_diff_base
739 })
740 .map(Rope::from),
741 diff_base_version: 0,
742 git_diff: git::diff::BufferDiff::new(),
743 file,
744 capability,
745 syntax_map: Mutex::new(SyntaxMap::new()),
746 parsing_in_background: false,
747 non_text_state_update_count: 0,
748 sync_parse_timeout: Duration::from_millis(1),
749 parse_status: async_watch::channel(ParseStatus::Idle),
750 autoindent_requests: Default::default(),
751 pending_autoindent: Default::default(),
752 language: None,
753 remote_selections: Default::default(),
754 diagnostics: Default::default(),
755 diagnostics_timestamp: Default::default(),
756 completion_triggers: Default::default(),
757 completion_triggers_timestamp: Default::default(),
758 deferred_ops: OperationQueue::new(),
759 has_conflict: false,
760 }
761 }
762
763 /// Retrieve a snapshot of the buffer's current state. This is computationally
764 /// cheap, and allows reading from the buffer on a background thread.
765 pub fn snapshot(&self) -> BufferSnapshot {
766 let text = self.text.snapshot();
767 let mut syntax_map = self.syntax_map.lock();
768 syntax_map.interpolate(&text);
769 let syntax = syntax_map.snapshot();
770
771 BufferSnapshot {
772 text,
773 syntax,
774 git_diff: self.git_diff.clone(),
775 file: self.file.clone(),
776 remote_selections: self.remote_selections.clone(),
777 diagnostics: self.diagnostics.clone(),
778 language: self.language.clone(),
779 non_text_state_update_count: self.non_text_state_update_count,
780 }
781 }
782
783 #[cfg(test)]
784 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
785 &self.text
786 }
787
788 /// Retrieve a snapshot of the buffer's raw text, without any
789 /// language-related state like the syntax tree or diagnostics.
790 pub fn text_snapshot(&self) -> text::BufferSnapshot {
791 self.text.snapshot()
792 }
793
794 /// The file associated with the buffer, if any.
795 pub fn file(&self) -> Option<&Arc<dyn File>> {
796 self.file.as_ref()
797 }
798
799 /// The version of the buffer that was last saved or reloaded from disk.
800 pub fn saved_version(&self) -> &clock::Global {
801 &self.saved_version
802 }
803
804 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
805 pub fn saved_mtime(&self) -> Option<SystemTime> {
806 self.saved_mtime
807 }
808
809 /// Assign a language to the buffer.
810 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
811 self.non_text_state_update_count += 1;
812 self.syntax_map.lock().clear();
813 self.language = language;
814 self.reparse(cx);
815 cx.emit(BufferEvent::LanguageChanged);
816 }
817
818 /// Assign a language registry to the buffer. This allows the buffer to retrieve
819 /// other languages if parts of the buffer are written in different languages.
820 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
821 self.syntax_map
822 .lock()
823 .set_language_registry(language_registry);
824 }
825
826 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
827 self.syntax_map.lock().language_registry()
828 }
829
830 /// Assign the buffer a new [Capability].
831 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
832 self.capability = capability;
833 cx.emit(BufferEvent::CapabilityChanged)
834 }
835
836 /// This method is called to signal that the buffer has been saved.
837 pub fn did_save(
838 &mut self,
839 version: clock::Global,
840 mtime: Option<SystemTime>,
841 cx: &mut ModelContext<Self>,
842 ) {
843 self.saved_version = version;
844 self.has_unsaved_edits
845 .set((self.saved_version().clone(), false));
846 self.has_conflict = false;
847 self.saved_mtime = mtime;
848 cx.emit(BufferEvent::Saved);
849 cx.notify();
850 }
851
852 /// This method is called to signal that the buffer has been discarded.
853 pub fn discarded(&mut self, cx: &mut ModelContext<Self>) {
854 cx.emit(BufferEvent::Discarded);
855 cx.notify();
856 }
857
858 /// Reloads the contents of the buffer from disk.
859 pub fn reload(
860 &mut self,
861 cx: &mut ModelContext<Self>,
862 ) -> oneshot::Receiver<Option<Transaction>> {
863 let (tx, rx) = futures::channel::oneshot::channel();
864 let prev_version = self.text.version();
865 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
866 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
867 let file = this.file.as_ref()?.as_local()?;
868 Some((file.mtime(), file.load(cx)))
869 })?
870 else {
871 return Ok(());
872 };
873
874 let new_text = new_text.await?;
875 let diff = this
876 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
877 .await;
878 this.update(&mut cx, |this, cx| {
879 if this.version() == diff.base_version {
880 this.finalize_last_transaction();
881 this.apply_diff(diff, cx);
882 tx.send(this.finalize_last_transaction().cloned()).ok();
883 this.has_conflict = false;
884 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
885 } else {
886 if !diff.edits.is_empty()
887 || this
888 .edits_since::<usize>(&diff.base_version)
889 .next()
890 .is_some()
891 {
892 this.has_conflict = true;
893 }
894
895 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
896 }
897
898 this.reload_task.take();
899 })
900 }));
901 rx
902 }
903
904 /// This method is called to signal that the buffer has been reloaded.
905 pub fn did_reload(
906 &mut self,
907 version: clock::Global,
908 line_ending: LineEnding,
909 mtime: Option<SystemTime>,
910 cx: &mut ModelContext<Self>,
911 ) {
912 self.saved_version = version;
913 self.has_unsaved_edits
914 .set((self.saved_version.clone(), false));
915 self.text.set_line_ending(line_ending);
916 self.saved_mtime = mtime;
917 cx.emit(BufferEvent::Reloaded);
918 cx.notify();
919 }
920
921 /// Updates the [File] backing this buffer. This should be called when
922 /// the file has changed or has been deleted.
923 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
924 let mut file_changed = false;
925
926 if let Some(old_file) = self.file.as_ref() {
927 if new_file.path() != old_file.path() {
928 file_changed = true;
929 }
930
931 if new_file.is_deleted() {
932 if !old_file.is_deleted() {
933 file_changed = true;
934 if !self.is_dirty() {
935 cx.emit(BufferEvent::DirtyChanged);
936 }
937 }
938 } else {
939 let new_mtime = new_file.mtime();
940 if new_mtime != old_file.mtime() {
941 file_changed = true;
942
943 if !self.is_dirty() {
944 self.reload(cx).close();
945 }
946 }
947 }
948 } else {
949 file_changed = true;
950 };
951
952 self.file = Some(new_file);
953 if file_changed {
954 self.non_text_state_update_count += 1;
955 cx.emit(BufferEvent::FileHandleChanged);
956 cx.notify();
957 }
958 }
959
960 /// Returns the current diff base, see [Buffer::set_diff_base].
961 pub fn diff_base(&self) -> Option<&Rope> {
962 self.diff_base.as_ref()
963 }
964
965 /// Sets the text that will be used to compute a Git diff
966 /// against the buffer text.
967 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
968 self.diff_base = diff_base
969 .map(|mut raw_diff_base| {
970 LineEnding::normalize(&mut raw_diff_base);
971 raw_diff_base
972 })
973 .map(Rope::from);
974 self.diff_base_version += 1;
975 if let Some(recalc_task) = self.git_diff_recalc(cx) {
976 cx.spawn(|buffer, mut cx| async move {
977 recalc_task.await;
978 buffer
979 .update(&mut cx, |_, cx| {
980 cx.emit(BufferEvent::DiffBaseChanged);
981 })
982 .ok();
983 })
984 .detach();
985 }
986 }
987
988 /// Returns a number, unique per diff base set to the buffer.
989 pub fn diff_base_version(&self) -> usize {
990 self.diff_base_version
991 }
992
993 /// Recomputes the Git diff status.
994 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
995 let diff_base = self.diff_base.clone()?;
996 let snapshot = self.snapshot();
997
998 let mut diff = self.git_diff.clone();
999 let diff = cx.background_executor().spawn(async move {
1000 diff.update(&diff_base, &snapshot).await;
1001 diff
1002 });
1003
1004 Some(cx.spawn(|this, mut cx| async move {
1005 let buffer_diff = diff.await;
1006 this.update(&mut cx, |this, cx| {
1007 this.git_diff = buffer_diff;
1008 this.non_text_state_update_count += 1;
1009 cx.emit(BufferEvent::DiffUpdated);
1010 })
1011 .ok();
1012 }))
1013 }
1014
1015 /// Returns the primary [Language] assigned to this [Buffer].
1016 pub fn language(&self) -> Option<&Arc<Language>> {
1017 self.language.as_ref()
1018 }
1019
1020 /// Returns the [Language] at the given location.
1021 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1022 let offset = position.to_offset(self);
1023 self.syntax_map
1024 .lock()
1025 .layers_for_range(offset..offset, &self.text, false)
1026 .last()
1027 .map(|info| info.language.clone())
1028 .or_else(|| self.language.clone())
1029 }
1030
1031 /// An integer version number that accounts for all updates besides
1032 /// the buffer's text itself (which is versioned via a version vector).
1033 pub fn non_text_state_update_count(&self) -> usize {
1034 self.non_text_state_update_count
1035 }
1036
1037 /// Whether the buffer is being parsed in the background.
1038 #[cfg(any(test, feature = "test-support"))]
1039 pub fn is_parsing(&self) -> bool {
1040 self.parsing_in_background
1041 }
1042
1043 /// Indicates whether the buffer contains any regions that may be
1044 /// written in a language that hasn't been loaded yet.
1045 pub fn contains_unknown_injections(&self) -> bool {
1046 self.syntax_map.lock().contains_unknown_injections()
1047 }
1048
1049 #[cfg(test)]
1050 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1051 self.sync_parse_timeout = timeout;
1052 }
1053
1054 /// Called after an edit to synchronize the buffer's main parse tree with
1055 /// the buffer's new underlying state.
1056 ///
1057 /// Locks the syntax map and interpolates the edits since the last reparse
1058 /// into the foreground syntax tree.
1059 ///
1060 /// Then takes a stable snapshot of the syntax map before unlocking it.
1061 /// The snapshot with the interpolated edits is sent to a background thread,
1062 /// where we ask Tree-sitter to perform an incremental parse.
1063 ///
1064 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1065 /// waiting on the parse to complete. As soon as it completes, we proceed
1066 /// synchronously, unless a 1ms timeout elapses.
1067 ///
1068 /// If we time out waiting on the parse, we spawn a second task waiting
1069 /// until the parse does complete and return with the interpolated tree still
1070 /// in the foreground. When the background parse completes, call back into
1071 /// the main thread and assign the foreground parse state.
1072 ///
1073 /// If the buffer or grammar changed since the start of the background parse,
1074 /// initiate an additional reparse recursively. To avoid concurrent parses
1075 /// for the same buffer, we only initiate a new parse if we are not already
1076 /// parsing in the background.
1077 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1078 if self.parsing_in_background {
1079 return;
1080 }
1081 let language = if let Some(language) = self.language.clone() {
1082 language
1083 } else {
1084 return;
1085 };
1086
1087 let text = self.text_snapshot();
1088 let parsed_version = self.version();
1089
1090 let mut syntax_map = self.syntax_map.lock();
1091 syntax_map.interpolate(&text);
1092 let language_registry = syntax_map.language_registry();
1093 let mut syntax_snapshot = syntax_map.snapshot();
1094 drop(syntax_map);
1095
1096 let parse_task = cx.background_executor().spawn({
1097 let language = language.clone();
1098 let language_registry = language_registry.clone();
1099 async move {
1100 syntax_snapshot.reparse(&text, language_registry, language);
1101 syntax_snapshot
1102 }
1103 });
1104
1105 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1106 match cx
1107 .background_executor()
1108 .block_with_timeout(self.sync_parse_timeout, parse_task)
1109 {
1110 Ok(new_syntax_snapshot) => {
1111 self.did_finish_parsing(new_syntax_snapshot, cx);
1112 }
1113 Err(parse_task) => {
1114 self.parsing_in_background = true;
1115 cx.spawn(move |this, mut cx| async move {
1116 let new_syntax_map = parse_task.await;
1117 this.update(&mut cx, move |this, cx| {
1118 let grammar_changed =
1119 this.language.as_ref().map_or(true, |current_language| {
1120 !Arc::ptr_eq(&language, current_language)
1121 });
1122 let language_registry_changed = new_syntax_map
1123 .contains_unknown_injections()
1124 && language_registry.map_or(false, |registry| {
1125 registry.version() != new_syntax_map.language_registry_version()
1126 });
1127 let parse_again = language_registry_changed
1128 || grammar_changed
1129 || this.version.changed_since(&parsed_version);
1130 this.did_finish_parsing(new_syntax_map, cx);
1131 this.parsing_in_background = false;
1132 if parse_again {
1133 this.reparse(cx);
1134 }
1135 })
1136 .ok();
1137 })
1138 .detach();
1139 }
1140 }
1141 }
1142
1143 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
1144 self.non_text_state_update_count += 1;
1145 self.syntax_map.lock().did_parse(syntax_snapshot);
1146 self.request_autoindent(cx);
1147 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1148 cx.emit(BufferEvent::Reparsed);
1149 cx.notify();
1150 }
1151
1152 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1153 self.parse_status.1.clone()
1154 }
1155
1156 /// Assign to the buffer a set of diagnostics created by a given language server.
1157 pub fn update_diagnostics(
1158 &mut self,
1159 server_id: LanguageServerId,
1160 diagnostics: DiagnosticSet,
1161 cx: &mut ModelContext<Self>,
1162 ) {
1163 let lamport_timestamp = self.text.lamport_clock.tick();
1164 let op = Operation::UpdateDiagnostics {
1165 server_id,
1166 diagnostics: diagnostics.iter().cloned().collect(),
1167 lamport_timestamp,
1168 };
1169 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1170 self.send_operation(op, cx);
1171 }
1172
1173 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1174 if let Some(indent_sizes) = self.compute_autoindents() {
1175 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1176 match cx
1177 .background_executor()
1178 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1179 {
1180 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1181 Err(indent_sizes) => {
1182 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1183 let indent_sizes = indent_sizes.await;
1184 this.update(&mut cx, |this, cx| {
1185 this.apply_autoindents(indent_sizes, cx);
1186 })
1187 .ok();
1188 }));
1189 }
1190 }
1191 } else {
1192 self.autoindent_requests.clear();
1193 }
1194 }
1195
1196 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1197 let max_rows_between_yields = 100;
1198 let snapshot = self.snapshot();
1199 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1200 return None;
1201 }
1202
1203 let autoindent_requests = self.autoindent_requests.clone();
1204 Some(async move {
1205 let mut indent_sizes = BTreeMap::new();
1206 for request in autoindent_requests {
1207 // Resolve each edited range to its row in the current buffer and in the
1208 // buffer before this batch of edits.
1209 let mut row_ranges = Vec::new();
1210 let mut old_to_new_rows = BTreeMap::new();
1211 let mut language_indent_sizes_by_new_row = Vec::new();
1212 for entry in &request.entries {
1213 let position = entry.range.start;
1214 let new_row = position.to_point(&snapshot).row;
1215 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1216 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1217
1218 if !entry.first_line_is_new {
1219 let old_row = position.to_point(&request.before_edit).row;
1220 old_to_new_rows.insert(old_row, new_row);
1221 }
1222 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1223 }
1224
1225 // Build a map containing the suggested indentation for each of the edited lines
1226 // with respect to the state of the buffer before these edits. This map is keyed
1227 // by the rows for these lines in the current state of the buffer.
1228 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1229 let old_edited_ranges =
1230 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1231 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1232 let mut language_indent_size = IndentSize::default();
1233 for old_edited_range in old_edited_ranges {
1234 let suggestions = request
1235 .before_edit
1236 .suggest_autoindents(old_edited_range.clone())
1237 .into_iter()
1238 .flatten();
1239 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1240 if let Some(suggestion) = suggestion {
1241 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1242
1243 // Find the indent size based on the language for this row.
1244 while let Some((row, size)) = language_indent_sizes.peek() {
1245 if *row > new_row {
1246 break;
1247 }
1248 language_indent_size = *size;
1249 language_indent_sizes.next();
1250 }
1251
1252 let suggested_indent = old_to_new_rows
1253 .get(&suggestion.basis_row)
1254 .and_then(|from_row| {
1255 Some(old_suggestions.get(from_row).copied()?.0)
1256 })
1257 .unwrap_or_else(|| {
1258 request
1259 .before_edit
1260 .indent_size_for_line(suggestion.basis_row)
1261 })
1262 .with_delta(suggestion.delta, language_indent_size);
1263 old_suggestions
1264 .insert(new_row, (suggested_indent, suggestion.within_error));
1265 }
1266 }
1267 yield_now().await;
1268 }
1269
1270 // In block mode, only compute indentation suggestions for the first line
1271 // of each insertion. Otherwise, compute suggestions for every inserted line.
1272 let new_edited_row_ranges = contiguous_ranges(
1273 row_ranges.iter().flat_map(|(range, _)| {
1274 if request.is_block_mode {
1275 range.start..range.start + 1
1276 } else {
1277 range.clone()
1278 }
1279 }),
1280 max_rows_between_yields,
1281 );
1282
1283 // Compute new suggestions for each line, but only include them in the result
1284 // if they differ from the old suggestion for that line.
1285 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1286 let mut language_indent_size = IndentSize::default();
1287 for new_edited_row_range in new_edited_row_ranges {
1288 let suggestions = snapshot
1289 .suggest_autoindents(new_edited_row_range.clone())
1290 .into_iter()
1291 .flatten();
1292 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1293 if let Some(suggestion) = suggestion {
1294 // Find the indent size based on the language for this row.
1295 while let Some((row, size)) = language_indent_sizes.peek() {
1296 if *row > new_row {
1297 break;
1298 }
1299 language_indent_size = *size;
1300 language_indent_sizes.next();
1301 }
1302
1303 let suggested_indent = indent_sizes
1304 .get(&suggestion.basis_row)
1305 .copied()
1306 .unwrap_or_else(|| {
1307 snapshot.indent_size_for_line(suggestion.basis_row)
1308 })
1309 .with_delta(suggestion.delta, language_indent_size);
1310 if old_suggestions.get(&new_row).map_or(
1311 true,
1312 |(old_indentation, was_within_error)| {
1313 suggested_indent != *old_indentation
1314 && (!suggestion.within_error || *was_within_error)
1315 },
1316 ) {
1317 indent_sizes.insert(new_row, suggested_indent);
1318 }
1319 }
1320 }
1321 yield_now().await;
1322 }
1323
1324 // For each block of inserted text, adjust the indentation of the remaining
1325 // lines of the block by the same amount as the first line was adjusted.
1326 if request.is_block_mode {
1327 for (row_range, original_indent_column) in
1328 row_ranges
1329 .into_iter()
1330 .filter_map(|(range, original_indent_column)| {
1331 if range.len() > 1 {
1332 Some((range, original_indent_column?))
1333 } else {
1334 None
1335 }
1336 })
1337 {
1338 let new_indent = indent_sizes
1339 .get(&row_range.start)
1340 .copied()
1341 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1342 let delta = new_indent.len as i64 - original_indent_column as i64;
1343 if delta != 0 {
1344 for row in row_range.skip(1) {
1345 indent_sizes.entry(row).or_insert_with(|| {
1346 let mut size = snapshot.indent_size_for_line(row);
1347 if size.kind == new_indent.kind {
1348 match delta.cmp(&0) {
1349 Ordering::Greater => size.len += delta as u32,
1350 Ordering::Less => {
1351 size.len = size.len.saturating_sub(-delta as u32)
1352 }
1353 Ordering::Equal => {}
1354 }
1355 }
1356 size
1357 });
1358 }
1359 }
1360 }
1361 }
1362 }
1363
1364 indent_sizes
1365 })
1366 }
1367
1368 fn apply_autoindents(
1369 &mut self,
1370 indent_sizes: BTreeMap<u32, IndentSize>,
1371 cx: &mut ModelContext<Self>,
1372 ) {
1373 self.autoindent_requests.clear();
1374
1375 let edits: Vec<_> = indent_sizes
1376 .into_iter()
1377 .filter_map(|(row, indent_size)| {
1378 let current_size = indent_size_for_line(self, row);
1379 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1380 })
1381 .collect();
1382
1383 let preserve_preview = self.preserve_preview();
1384 self.edit(edits, None, cx);
1385 if preserve_preview {
1386 self.refresh_preview();
1387 }
1388 }
1389
1390 /// Create a minimal edit that will cause the given row to be indented
1391 /// with the given size. After applying this edit, the length of the line
1392 /// will always be at least `new_size.len`.
1393 pub fn edit_for_indent_size_adjustment(
1394 row: u32,
1395 current_size: IndentSize,
1396 new_size: IndentSize,
1397 ) -> Option<(Range<Point>, String)> {
1398 if new_size.kind == current_size.kind {
1399 match new_size.len.cmp(¤t_size.len) {
1400 Ordering::Greater => {
1401 let point = Point::new(row, 0);
1402 Some((
1403 point..point,
1404 iter::repeat(new_size.char())
1405 .take((new_size.len - current_size.len) as usize)
1406 .collect::<String>(),
1407 ))
1408 }
1409
1410 Ordering::Less => Some((
1411 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1412 String::new(),
1413 )),
1414
1415 Ordering::Equal => None,
1416 }
1417 } else {
1418 Some((
1419 Point::new(row, 0)..Point::new(row, current_size.len),
1420 iter::repeat(new_size.char())
1421 .take(new_size.len as usize)
1422 .collect::<String>(),
1423 ))
1424 }
1425 }
1426
1427 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1428 /// and the given new text.
1429 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1430 let old_text = self.as_rope().clone();
1431 let base_version = self.version();
1432 cx.background_executor()
1433 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1434 let old_text = old_text.to_string();
1435 let line_ending = LineEnding::detect(&new_text);
1436 LineEnding::normalize(&mut new_text);
1437
1438 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1439 let empty: Arc<str> = Arc::default();
1440
1441 let mut edits = Vec::new();
1442 let mut old_offset = 0;
1443 let mut new_offset = 0;
1444 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1445 for change in diff.iter_all_changes().map(Some).chain([None]) {
1446 if let Some(change) = &change {
1447 let len = change.value().len();
1448 match change.tag() {
1449 ChangeTag::Equal => {
1450 old_offset += len;
1451 new_offset += len;
1452 }
1453 ChangeTag::Delete => {
1454 let old_end_offset = old_offset + len;
1455 if let Some((last_old_range, _)) = &mut last_edit {
1456 last_old_range.end = old_end_offset;
1457 } else {
1458 last_edit =
1459 Some((old_offset..old_end_offset, new_offset..new_offset));
1460 }
1461 old_offset = old_end_offset;
1462 }
1463 ChangeTag::Insert => {
1464 let new_end_offset = new_offset + len;
1465 if let Some((_, last_new_range)) = &mut last_edit {
1466 last_new_range.end = new_end_offset;
1467 } else {
1468 last_edit =
1469 Some((old_offset..old_offset, new_offset..new_end_offset));
1470 }
1471 new_offset = new_end_offset;
1472 }
1473 }
1474 }
1475
1476 if let Some((old_range, new_range)) = &last_edit {
1477 if old_offset > old_range.end
1478 || new_offset > new_range.end
1479 || change.is_none()
1480 {
1481 let text = if new_range.is_empty() {
1482 empty.clone()
1483 } else {
1484 new_text[new_range.clone()].into()
1485 };
1486 edits.push((old_range.clone(), text));
1487 last_edit.take();
1488 }
1489 }
1490 }
1491
1492 Diff {
1493 base_version,
1494 line_ending,
1495 edits,
1496 }
1497 })
1498 }
1499
1500 /// Spawns a background task that searches the buffer for any whitespace
1501 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1502 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1503 let old_text = self.as_rope().clone();
1504 let line_ending = self.line_ending();
1505 let base_version = self.version();
1506 cx.background_executor().spawn(async move {
1507 let ranges = trailing_whitespace_ranges(&old_text);
1508 let empty = Arc::<str>::from("");
1509 Diff {
1510 base_version,
1511 line_ending,
1512 edits: ranges
1513 .into_iter()
1514 .map(|range| (range, empty.clone()))
1515 .collect(),
1516 }
1517 })
1518 }
1519
1520 /// Ensures that the buffer ends with a single newline character, and
1521 /// no other whitespace.
1522 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1523 let len = self.len();
1524 let mut offset = len;
1525 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1526 let non_whitespace_len = chunk
1527 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1528 .len();
1529 offset -= chunk.len();
1530 offset += non_whitespace_len;
1531 if non_whitespace_len != 0 {
1532 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1533 return;
1534 }
1535 break;
1536 }
1537 }
1538 self.edit([(offset..len, "\n")], None, cx);
1539 }
1540
1541 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1542 /// calculated, then adjust the diff to account for those changes, and discard any
1543 /// parts of the diff that conflict with those changes.
1544 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1545 // Check for any edits to the buffer that have occurred since this diff
1546 // was computed.
1547 let snapshot = self.snapshot();
1548 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1549 let mut delta = 0;
1550 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1551 while let Some(edit_since) = edits_since.peek() {
1552 // If the edit occurs after a diff hunk, then it does not
1553 // affect that hunk.
1554 if edit_since.old.start > range.end {
1555 break;
1556 }
1557 // If the edit precedes the diff hunk, then adjust the hunk
1558 // to reflect the edit.
1559 else if edit_since.old.end < range.start {
1560 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1561 edits_since.next();
1562 }
1563 // If the edit intersects a diff hunk, then discard that hunk.
1564 else {
1565 return None;
1566 }
1567 }
1568
1569 let start = (range.start as i64 + delta) as usize;
1570 let end = (range.end as i64 + delta) as usize;
1571 Some((start..end, new_text))
1572 });
1573
1574 self.start_transaction();
1575 self.text.set_line_ending(diff.line_ending);
1576 self.edit(adjusted_edits, None, cx);
1577 self.end_transaction(cx)
1578 }
1579
1580 fn has_unsaved_edits(&self) -> bool {
1581 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1582
1583 if last_version == self.version {
1584 self.has_unsaved_edits
1585 .set((last_version, has_unsaved_edits));
1586 return has_unsaved_edits;
1587 }
1588
1589 let has_edits = self.has_edits_since(&self.saved_version);
1590 self.has_unsaved_edits
1591 .set((self.version.clone(), has_edits));
1592 has_edits
1593 }
1594
1595 /// Checks if the buffer has unsaved changes.
1596 pub fn is_dirty(&self) -> bool {
1597 self.capability != Capability::ReadOnly
1598 && (self.has_conflict
1599 || self.has_unsaved_edits()
1600 || self
1601 .file
1602 .as_ref()
1603 .map_or(false, |file| file.is_deleted() || !file.is_created()))
1604 }
1605
1606 /// Checks if the buffer and its file have both changed since the buffer
1607 /// was last saved or reloaded.
1608 pub fn has_conflict(&self) -> bool {
1609 self.has_conflict
1610 || self.file.as_ref().map_or(false, |file| {
1611 file.mtime() > self.saved_mtime && self.has_unsaved_edits()
1612 })
1613 }
1614
1615 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1616 pub fn subscribe(&mut self) -> Subscription {
1617 self.text.subscribe()
1618 }
1619
1620 /// Starts a transaction, if one is not already in-progress. When undoing or
1621 /// redoing edits, all of the edits performed within a transaction are undone
1622 /// or redone together.
1623 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1624 self.start_transaction_at(Instant::now())
1625 }
1626
1627 /// Starts a transaction, providing the current time. Subsequent transactions
1628 /// that occur within a short period of time will be grouped together. This
1629 /// is controlled by the buffer's undo grouping duration.
1630 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1631 self.transaction_depth += 1;
1632 if self.was_dirty_before_starting_transaction.is_none() {
1633 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1634 }
1635 self.text.start_transaction_at(now)
1636 }
1637
1638 /// Terminates the current transaction, if this is the outermost transaction.
1639 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1640 self.end_transaction_at(Instant::now(), cx)
1641 }
1642
1643 /// Terminates the current transaction, providing the current time. Subsequent transactions
1644 /// that occur within a short period of time will be grouped together. This
1645 /// is controlled by the buffer's undo grouping duration.
1646 pub fn end_transaction_at(
1647 &mut self,
1648 now: Instant,
1649 cx: &mut ModelContext<Self>,
1650 ) -> Option<TransactionId> {
1651 assert!(self.transaction_depth > 0);
1652 self.transaction_depth -= 1;
1653 let was_dirty = if self.transaction_depth == 0 {
1654 self.was_dirty_before_starting_transaction.take().unwrap()
1655 } else {
1656 false
1657 };
1658 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1659 self.did_edit(&start_version, was_dirty, cx);
1660 Some(transaction_id)
1661 } else {
1662 None
1663 }
1664 }
1665
1666 /// Manually add a transaction to the buffer's undo history.
1667 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1668 self.text.push_transaction(transaction, now);
1669 }
1670
1671 /// Prevent the last transaction from being grouped with any subsequent transactions,
1672 /// even if they occur with the buffer's undo grouping duration.
1673 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1674 self.text.finalize_last_transaction()
1675 }
1676
1677 /// Manually group all changes since a given transaction.
1678 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1679 self.text.group_until_transaction(transaction_id);
1680 }
1681
1682 /// Manually remove a transaction from the buffer's undo history
1683 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1684 self.text.forget_transaction(transaction_id);
1685 }
1686
1687 /// Manually merge two adjacent transactions in the buffer's undo history.
1688 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1689 self.text.merge_transactions(transaction, destination);
1690 }
1691
1692 /// Waits for the buffer to receive operations with the given timestamps.
1693 pub fn wait_for_edits(
1694 &mut self,
1695 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1696 ) -> impl Future<Output = Result<()>> {
1697 self.text.wait_for_edits(edit_ids)
1698 }
1699
1700 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1701 pub fn wait_for_anchors(
1702 &mut self,
1703 anchors: impl IntoIterator<Item = Anchor>,
1704 ) -> impl 'static + Future<Output = Result<()>> {
1705 self.text.wait_for_anchors(anchors)
1706 }
1707
1708 /// Waits for the buffer to receive operations up to the given version.
1709 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1710 self.text.wait_for_version(version)
1711 }
1712
1713 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1714 /// [`Buffer::wait_for_version`] to resolve with an error.
1715 pub fn give_up_waiting(&mut self) {
1716 self.text.give_up_waiting();
1717 }
1718
1719 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
1720 pub fn set_active_selections(
1721 &mut self,
1722 selections: Arc<[Selection<Anchor>]>,
1723 line_mode: bool,
1724 cursor_shape: CursorShape,
1725 cx: &mut ModelContext<Self>,
1726 ) {
1727 let lamport_timestamp = self.text.lamport_clock.tick();
1728 self.remote_selections.insert(
1729 self.text.replica_id(),
1730 SelectionSet {
1731 selections: selections.clone(),
1732 lamport_timestamp,
1733 line_mode,
1734 cursor_shape,
1735 },
1736 );
1737 self.send_operation(
1738 Operation::UpdateSelections {
1739 selections,
1740 line_mode,
1741 lamport_timestamp,
1742 cursor_shape,
1743 },
1744 cx,
1745 );
1746 self.non_text_state_update_count += 1;
1747 cx.notify();
1748 }
1749
1750 /// Clears the selections, so that other replicas of the buffer do not see any selections for
1751 /// this replica.
1752 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1753 if self
1754 .remote_selections
1755 .get(&self.text.replica_id())
1756 .map_or(true, |set| !set.selections.is_empty())
1757 {
1758 self.set_active_selections(Arc::default(), false, Default::default(), cx);
1759 }
1760 }
1761
1762 /// Replaces the buffer's entire text.
1763 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1764 where
1765 T: Into<Arc<str>>,
1766 {
1767 self.autoindent_requests.clear();
1768 self.edit([(0..self.len(), text)], None, cx)
1769 }
1770
1771 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
1772 /// delete, and a string of text to insert at that location.
1773 ///
1774 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
1775 /// request for the edited ranges, which will be processed when the buffer finishes
1776 /// parsing.
1777 ///
1778 /// Parsing takes place at the end of a transaction, and may compute synchronously
1779 /// or asynchronously, depending on the changes.
1780 pub fn edit<I, S, T>(
1781 &mut self,
1782 edits_iter: I,
1783 autoindent_mode: Option<AutoindentMode>,
1784 cx: &mut ModelContext<Self>,
1785 ) -> Option<clock::Lamport>
1786 where
1787 I: IntoIterator<Item = (Range<S>, T)>,
1788 S: ToOffset,
1789 T: Into<Arc<str>>,
1790 {
1791 // Skip invalid edits and coalesce contiguous ones.
1792 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1793 for (range, new_text) in edits_iter {
1794 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1795 if range.start > range.end {
1796 mem::swap(&mut range.start, &mut range.end);
1797 }
1798 let new_text = new_text.into();
1799 if !new_text.is_empty() || !range.is_empty() {
1800 if let Some((prev_range, prev_text)) = edits.last_mut() {
1801 if prev_range.end >= range.start {
1802 prev_range.end = cmp::max(prev_range.end, range.end);
1803 *prev_text = format!("{prev_text}{new_text}").into();
1804 } else {
1805 edits.push((range, new_text));
1806 }
1807 } else {
1808 edits.push((range, new_text));
1809 }
1810 }
1811 }
1812 if edits.is_empty() {
1813 return None;
1814 }
1815
1816 self.start_transaction();
1817 self.pending_autoindent.take();
1818 let autoindent_request = autoindent_mode
1819 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1820
1821 let edit_operation = self.text.edit(edits.iter().cloned());
1822 let edit_id = edit_operation.timestamp();
1823
1824 if let Some((before_edit, mode)) = autoindent_request {
1825 let mut delta = 0isize;
1826 let entries = edits
1827 .into_iter()
1828 .enumerate()
1829 .zip(&edit_operation.as_edit().unwrap().new_text)
1830 .map(|((ix, (range, _)), new_text)| {
1831 let new_text_length = new_text.len();
1832 let old_start = range.start.to_point(&before_edit);
1833 let new_start = (delta + range.start as isize) as usize;
1834 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1835
1836 let mut range_of_insertion_to_indent = 0..new_text_length;
1837 let mut first_line_is_new = false;
1838 let mut original_indent_column = None;
1839
1840 // When inserting an entire line at the beginning of an existing line,
1841 // treat the insertion as new.
1842 if new_text.contains('\n')
1843 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1844 {
1845 first_line_is_new = true;
1846 }
1847
1848 // When inserting text starting with a newline, avoid auto-indenting the
1849 // previous line.
1850 if new_text.starts_with('\n') {
1851 range_of_insertion_to_indent.start += 1;
1852 first_line_is_new = true;
1853 }
1854
1855 // Avoid auto-indenting after the insertion.
1856 if let AutoindentMode::Block {
1857 original_indent_columns,
1858 } = &mode
1859 {
1860 original_indent_column =
1861 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1862 indent_size_for_text(
1863 new_text[range_of_insertion_to_indent.clone()].chars(),
1864 )
1865 .len
1866 }));
1867 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1868 range_of_insertion_to_indent.end -= 1;
1869 }
1870 }
1871
1872 AutoindentRequestEntry {
1873 first_line_is_new,
1874 original_indent_column,
1875 indent_size: before_edit.language_indent_size_at(range.start, cx),
1876 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1877 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1878 }
1879 })
1880 .collect();
1881
1882 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1883 before_edit,
1884 entries,
1885 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1886 }));
1887 }
1888
1889 self.end_transaction(cx);
1890 self.send_operation(Operation::Buffer(edit_operation), cx);
1891 Some(edit_id)
1892 }
1893
1894 fn did_edit(
1895 &mut self,
1896 old_version: &clock::Global,
1897 was_dirty: bool,
1898 cx: &mut ModelContext<Self>,
1899 ) {
1900 if self.edits_since::<usize>(old_version).next().is_none() {
1901 return;
1902 }
1903
1904 self.reparse(cx);
1905
1906 cx.emit(BufferEvent::Edited);
1907 if was_dirty != self.is_dirty() {
1908 cx.emit(BufferEvent::DirtyChanged);
1909 }
1910 cx.notify();
1911 }
1912
1913 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
1914 // You can also request the insertion of empty lines above and below the line starting at the returned point.
1915 pub fn insert_empty_line(
1916 &mut self,
1917 position: impl ToPoint,
1918 space_above: bool,
1919 space_below: bool,
1920 cx: &mut ModelContext<Self>,
1921 ) -> Point {
1922 let mut position = position.to_point(self);
1923
1924 self.start_transaction();
1925
1926 self.edit(
1927 [(position..position, "\n")],
1928 Some(AutoindentMode::EachLine),
1929 cx,
1930 );
1931
1932 if position.column > 0 {
1933 position += Point::new(1, 0);
1934 }
1935
1936 if !self.is_line_blank(position.row) {
1937 self.edit(
1938 [(position..position, "\n")],
1939 Some(AutoindentMode::EachLine),
1940 cx,
1941 );
1942 }
1943
1944 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
1945 self.edit(
1946 [(position..position, "\n")],
1947 Some(AutoindentMode::EachLine),
1948 cx,
1949 );
1950 position.row += 1;
1951 }
1952
1953 if space_below
1954 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
1955 {
1956 self.edit(
1957 [(position..position, "\n")],
1958 Some(AutoindentMode::EachLine),
1959 cx,
1960 );
1961 }
1962
1963 self.end_transaction(cx);
1964
1965 position
1966 }
1967
1968 /// Applies the given remote operations to the buffer.
1969 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1970 &mut self,
1971 ops: I,
1972 cx: &mut ModelContext<Self>,
1973 ) -> Result<()> {
1974 self.pending_autoindent.take();
1975 let was_dirty = self.is_dirty();
1976 let old_version = self.version.clone();
1977 let mut deferred_ops = Vec::new();
1978 let buffer_ops = ops
1979 .into_iter()
1980 .filter_map(|op| match op {
1981 Operation::Buffer(op) => Some(op),
1982 _ => {
1983 if self.can_apply_op(&op) {
1984 self.apply_op(op, cx);
1985 } else {
1986 deferred_ops.push(op);
1987 }
1988 None
1989 }
1990 })
1991 .collect::<Vec<_>>();
1992 self.text.apply_ops(buffer_ops)?;
1993 self.deferred_ops.insert(deferred_ops);
1994 self.flush_deferred_ops(cx);
1995 self.did_edit(&old_version, was_dirty, cx);
1996 // Notify independently of whether the buffer was edited as the operations could include a
1997 // selection update.
1998 cx.notify();
1999 Ok(())
2000 }
2001
2002 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
2003 let mut deferred_ops = Vec::new();
2004 for op in self.deferred_ops.drain().iter().cloned() {
2005 if self.can_apply_op(&op) {
2006 self.apply_op(op, cx);
2007 } else {
2008 deferred_ops.push(op);
2009 }
2010 }
2011 self.deferred_ops.insert(deferred_ops);
2012 }
2013
2014 pub fn has_deferred_ops(&self) -> bool {
2015 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2016 }
2017
2018 fn can_apply_op(&self, operation: &Operation) -> bool {
2019 match operation {
2020 Operation::Buffer(_) => {
2021 unreachable!("buffer operations should never be applied at this layer")
2022 }
2023 Operation::UpdateDiagnostics {
2024 diagnostics: diagnostic_set,
2025 ..
2026 } => diagnostic_set.iter().all(|diagnostic| {
2027 self.text.can_resolve(&diagnostic.range.start)
2028 && self.text.can_resolve(&diagnostic.range.end)
2029 }),
2030 Operation::UpdateSelections { selections, .. } => selections
2031 .iter()
2032 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2033 Operation::UpdateCompletionTriggers { .. } => true,
2034 }
2035 }
2036
2037 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
2038 match operation {
2039 Operation::Buffer(_) => {
2040 unreachable!("buffer operations should never be applied at this layer")
2041 }
2042 Operation::UpdateDiagnostics {
2043 server_id,
2044 diagnostics: diagnostic_set,
2045 lamport_timestamp,
2046 } => {
2047 let snapshot = self.snapshot();
2048 self.apply_diagnostic_update(
2049 server_id,
2050 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2051 lamport_timestamp,
2052 cx,
2053 );
2054 }
2055 Operation::UpdateSelections {
2056 selections,
2057 lamport_timestamp,
2058 line_mode,
2059 cursor_shape,
2060 } => {
2061 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2062 if set.lamport_timestamp > lamport_timestamp {
2063 return;
2064 }
2065 }
2066
2067 self.remote_selections.insert(
2068 lamport_timestamp.replica_id,
2069 SelectionSet {
2070 selections,
2071 lamport_timestamp,
2072 line_mode,
2073 cursor_shape,
2074 },
2075 );
2076 self.text.lamport_clock.observe(lamport_timestamp);
2077 self.non_text_state_update_count += 1;
2078 }
2079 Operation::UpdateCompletionTriggers {
2080 triggers,
2081 lamport_timestamp,
2082 } => {
2083 self.completion_triggers = triggers;
2084 self.text.lamport_clock.observe(lamport_timestamp);
2085 }
2086 }
2087 }
2088
2089 fn apply_diagnostic_update(
2090 &mut self,
2091 server_id: LanguageServerId,
2092 diagnostics: DiagnosticSet,
2093 lamport_timestamp: clock::Lamport,
2094 cx: &mut ModelContext<Self>,
2095 ) {
2096 if lamport_timestamp > self.diagnostics_timestamp {
2097 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2098 if diagnostics.is_empty() {
2099 if let Ok(ix) = ix {
2100 self.diagnostics.remove(ix);
2101 }
2102 } else {
2103 match ix {
2104 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2105 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2106 };
2107 }
2108 self.diagnostics_timestamp = lamport_timestamp;
2109 self.non_text_state_update_count += 1;
2110 self.text.lamport_clock.observe(lamport_timestamp);
2111 cx.notify();
2112 cx.emit(BufferEvent::DiagnosticsUpdated);
2113 }
2114 }
2115
2116 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
2117 cx.emit(BufferEvent::Operation(operation));
2118 }
2119
2120 /// Removes the selections for a given peer.
2121 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
2122 self.remote_selections.remove(&replica_id);
2123 cx.notify();
2124 }
2125
2126 /// Undoes the most recent transaction.
2127 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2128 let was_dirty = self.is_dirty();
2129 let old_version = self.version.clone();
2130
2131 if let Some((transaction_id, operation)) = self.text.undo() {
2132 self.send_operation(Operation::Buffer(operation), cx);
2133 self.did_edit(&old_version, was_dirty, cx);
2134 Some(transaction_id)
2135 } else {
2136 None
2137 }
2138 }
2139
2140 /// Manually undoes a specific transaction in the buffer's undo history.
2141 pub fn undo_transaction(
2142 &mut self,
2143 transaction_id: TransactionId,
2144 cx: &mut ModelContext<Self>,
2145 ) -> bool {
2146 let was_dirty = self.is_dirty();
2147 let old_version = self.version.clone();
2148 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2149 self.send_operation(Operation::Buffer(operation), cx);
2150 self.did_edit(&old_version, was_dirty, cx);
2151 true
2152 } else {
2153 false
2154 }
2155 }
2156
2157 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2158 pub fn undo_to_transaction(
2159 &mut self,
2160 transaction_id: TransactionId,
2161 cx: &mut ModelContext<Self>,
2162 ) -> bool {
2163 let was_dirty = self.is_dirty();
2164 let old_version = self.version.clone();
2165
2166 let operations = self.text.undo_to_transaction(transaction_id);
2167 let undone = !operations.is_empty();
2168 for operation in operations {
2169 self.send_operation(Operation::Buffer(operation), cx);
2170 }
2171 if undone {
2172 self.did_edit(&old_version, was_dirty, cx)
2173 }
2174 undone
2175 }
2176
2177 /// Manually redoes a specific transaction in the buffer's redo history.
2178 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2179 let was_dirty = self.is_dirty();
2180 let old_version = self.version.clone();
2181
2182 if let Some((transaction_id, operation)) = self.text.redo() {
2183 self.send_operation(Operation::Buffer(operation), cx);
2184 self.did_edit(&old_version, was_dirty, cx);
2185 Some(transaction_id)
2186 } else {
2187 None
2188 }
2189 }
2190
2191 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2192 pub fn redo_to_transaction(
2193 &mut self,
2194 transaction_id: TransactionId,
2195 cx: &mut ModelContext<Self>,
2196 ) -> bool {
2197 let was_dirty = self.is_dirty();
2198 let old_version = self.version.clone();
2199
2200 let operations = self.text.redo_to_transaction(transaction_id);
2201 let redone = !operations.is_empty();
2202 for operation in operations {
2203 self.send_operation(Operation::Buffer(operation), cx);
2204 }
2205 if redone {
2206 self.did_edit(&old_version, was_dirty, cx)
2207 }
2208 redone
2209 }
2210
2211 /// Override current completion triggers with the user-provided completion triggers.
2212 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
2213 self.completion_triggers.clone_from(&triggers);
2214 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2215 self.send_operation(
2216 Operation::UpdateCompletionTriggers {
2217 triggers,
2218 lamport_timestamp: self.completion_triggers_timestamp,
2219 },
2220 cx,
2221 );
2222 cx.notify();
2223 }
2224
2225 /// Returns a list of strings which trigger a completion menu for this language.
2226 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2227 pub fn completion_triggers(&self) -> &[String] {
2228 &self.completion_triggers
2229 }
2230
2231 /// Call this directly after performing edits to prevent the preview tab
2232 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2233 /// to return false until there are additional edits.
2234 pub fn refresh_preview(&mut self) {
2235 self.preview_version = self.version.clone();
2236 }
2237
2238 /// Whether we should preserve the preview status of a tab containing this buffer.
2239 pub fn preserve_preview(&self) -> bool {
2240 !self.has_edits_since(&self.preview_version)
2241 }
2242}
2243
2244#[doc(hidden)]
2245#[cfg(any(test, feature = "test-support"))]
2246impl Buffer {
2247 pub fn edit_via_marked_text(
2248 &mut self,
2249 marked_string: &str,
2250 autoindent_mode: Option<AutoindentMode>,
2251 cx: &mut ModelContext<Self>,
2252 ) {
2253 let edits = self.edits_for_marked_text(marked_string);
2254 self.edit(edits, autoindent_mode, cx);
2255 }
2256
2257 pub fn set_group_interval(&mut self, group_interval: Duration) {
2258 self.text.set_group_interval(group_interval);
2259 }
2260
2261 pub fn randomly_edit<T>(
2262 &mut self,
2263 rng: &mut T,
2264 old_range_count: usize,
2265 cx: &mut ModelContext<Self>,
2266 ) where
2267 T: rand::Rng,
2268 {
2269 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2270 let mut last_end = None;
2271 for _ in 0..old_range_count {
2272 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2273 break;
2274 }
2275
2276 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2277 let mut range = self.random_byte_range(new_start, rng);
2278 if rng.gen_bool(0.2) {
2279 mem::swap(&mut range.start, &mut range.end);
2280 }
2281 last_end = Some(range.end);
2282
2283 let new_text_len = rng.gen_range(0..10);
2284 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2285
2286 edits.push((range, new_text));
2287 }
2288 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2289 self.edit(edits, None, cx);
2290 }
2291
2292 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
2293 let was_dirty = self.is_dirty();
2294 let old_version = self.version.clone();
2295
2296 let ops = self.text.randomly_undo_redo(rng);
2297 if !ops.is_empty() {
2298 for op in ops {
2299 self.send_operation(Operation::Buffer(op), cx);
2300 self.did_edit(&old_version, was_dirty, cx);
2301 }
2302 }
2303 }
2304}
2305
2306impl EventEmitter<BufferEvent> for Buffer {}
2307
2308impl Deref for Buffer {
2309 type Target = TextBuffer;
2310
2311 fn deref(&self) -> &Self::Target {
2312 &self.text
2313 }
2314}
2315
2316impl BufferSnapshot {
2317 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2318 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2319 indent_size_for_line(self, row)
2320 }
2321 /// Returns [`IndentSize`] for a given position that respects user settings
2322 /// and language preferences.
2323 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
2324 let settings = language_settings(self.language_at(position), self.file(), cx);
2325 if settings.hard_tabs {
2326 IndentSize::tab()
2327 } else {
2328 IndentSize::spaces(settings.tab_size.get())
2329 }
2330 }
2331
2332 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2333 /// is passed in as `single_indent_size`.
2334 pub fn suggested_indents(
2335 &self,
2336 rows: impl Iterator<Item = u32>,
2337 single_indent_size: IndentSize,
2338 ) -> BTreeMap<u32, IndentSize> {
2339 let mut result = BTreeMap::new();
2340
2341 for row_range in contiguous_ranges(rows, 10) {
2342 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2343 Some(suggestions) => suggestions,
2344 _ => break,
2345 };
2346
2347 for (row, suggestion) in row_range.zip(suggestions) {
2348 let indent_size = if let Some(suggestion) = suggestion {
2349 result
2350 .get(&suggestion.basis_row)
2351 .copied()
2352 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2353 .with_delta(suggestion.delta, single_indent_size)
2354 } else {
2355 self.indent_size_for_line(row)
2356 };
2357
2358 result.insert(row, indent_size);
2359 }
2360 }
2361
2362 result
2363 }
2364
2365 fn suggest_autoindents(
2366 &self,
2367 row_range: Range<u32>,
2368 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2369 let config = &self.language.as_ref()?.config;
2370 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2371
2372 // Find the suggested indentation ranges based on the syntax tree.
2373 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2374 let end = Point::new(row_range.end, 0);
2375 let range = (start..end).to_offset(&self.text);
2376 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2377 Some(&grammar.indents_config.as_ref()?.query)
2378 });
2379 let indent_configs = matches
2380 .grammars()
2381 .iter()
2382 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2383 .collect::<Vec<_>>();
2384
2385 let mut indent_ranges = Vec::<Range<Point>>::new();
2386 let mut outdent_positions = Vec::<Point>::new();
2387 while let Some(mat) = matches.peek() {
2388 let mut start: Option<Point> = None;
2389 let mut end: Option<Point> = None;
2390
2391 let config = &indent_configs[mat.grammar_index];
2392 for capture in mat.captures {
2393 if capture.index == config.indent_capture_ix {
2394 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2395 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2396 } else if Some(capture.index) == config.start_capture_ix {
2397 start = Some(Point::from_ts_point(capture.node.end_position()));
2398 } else if Some(capture.index) == config.end_capture_ix {
2399 end = Some(Point::from_ts_point(capture.node.start_position()));
2400 } else if Some(capture.index) == config.outdent_capture_ix {
2401 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2402 }
2403 }
2404
2405 matches.advance();
2406 if let Some((start, end)) = start.zip(end) {
2407 if start.row == end.row {
2408 continue;
2409 }
2410
2411 let range = start..end;
2412 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2413 Err(ix) => indent_ranges.insert(ix, range),
2414 Ok(ix) => {
2415 let prev_range = &mut indent_ranges[ix];
2416 prev_range.end = prev_range.end.max(range.end);
2417 }
2418 }
2419 }
2420 }
2421
2422 let mut error_ranges = Vec::<Range<Point>>::new();
2423 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2424 Some(&grammar.error_query)
2425 });
2426 while let Some(mat) = matches.peek() {
2427 let node = mat.captures[0].node;
2428 let start = Point::from_ts_point(node.start_position());
2429 let end = Point::from_ts_point(node.end_position());
2430 let range = start..end;
2431 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2432 Ok(ix) | Err(ix) => ix,
2433 };
2434 let mut end_ix = ix;
2435 while let Some(existing_range) = error_ranges.get(end_ix) {
2436 if existing_range.end < end {
2437 end_ix += 1;
2438 } else {
2439 break;
2440 }
2441 }
2442 error_ranges.splice(ix..end_ix, [range]);
2443 matches.advance();
2444 }
2445
2446 outdent_positions.sort();
2447 for outdent_position in outdent_positions {
2448 // find the innermost indent range containing this outdent_position
2449 // set its end to the outdent position
2450 if let Some(range_to_truncate) = indent_ranges
2451 .iter_mut()
2452 .filter(|indent_range| indent_range.contains(&outdent_position))
2453 .last()
2454 {
2455 range_to_truncate.end = outdent_position;
2456 }
2457 }
2458
2459 // Find the suggested indentation increases and decreased based on regexes.
2460 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2461 self.for_each_line(
2462 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2463 ..Point::new(row_range.end, 0),
2464 |row, line| {
2465 if config
2466 .decrease_indent_pattern
2467 .as_ref()
2468 .map_or(false, |regex| regex.is_match(line))
2469 {
2470 indent_change_rows.push((row, Ordering::Less));
2471 }
2472 if config
2473 .increase_indent_pattern
2474 .as_ref()
2475 .map_or(false, |regex| regex.is_match(line))
2476 {
2477 indent_change_rows.push((row + 1, Ordering::Greater));
2478 }
2479 },
2480 );
2481
2482 let mut indent_changes = indent_change_rows.into_iter().peekable();
2483 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2484 prev_non_blank_row.unwrap_or(0)
2485 } else {
2486 row_range.start.saturating_sub(1)
2487 };
2488 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2489 Some(row_range.map(move |row| {
2490 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2491
2492 let mut indent_from_prev_row = false;
2493 let mut outdent_from_prev_row = false;
2494 let mut outdent_to_row = u32::MAX;
2495
2496 while let Some((indent_row, delta)) = indent_changes.peek() {
2497 match indent_row.cmp(&row) {
2498 Ordering::Equal => match delta {
2499 Ordering::Less => outdent_from_prev_row = true,
2500 Ordering::Greater => indent_from_prev_row = true,
2501 _ => {}
2502 },
2503
2504 Ordering::Greater => break,
2505 Ordering::Less => {}
2506 }
2507
2508 indent_changes.next();
2509 }
2510
2511 for range in &indent_ranges {
2512 if range.start.row >= row {
2513 break;
2514 }
2515 if range.start.row == prev_row && range.end > row_start {
2516 indent_from_prev_row = true;
2517 }
2518 if range.end > prev_row_start && range.end <= row_start {
2519 outdent_to_row = outdent_to_row.min(range.start.row);
2520 }
2521 }
2522
2523 let within_error = error_ranges
2524 .iter()
2525 .any(|e| e.start.row < row && e.end > row_start);
2526
2527 let suggestion = if outdent_to_row == prev_row
2528 || (outdent_from_prev_row && indent_from_prev_row)
2529 {
2530 Some(IndentSuggestion {
2531 basis_row: prev_row,
2532 delta: Ordering::Equal,
2533 within_error,
2534 })
2535 } else if indent_from_prev_row {
2536 Some(IndentSuggestion {
2537 basis_row: prev_row,
2538 delta: Ordering::Greater,
2539 within_error,
2540 })
2541 } else if outdent_to_row < prev_row {
2542 Some(IndentSuggestion {
2543 basis_row: outdent_to_row,
2544 delta: Ordering::Equal,
2545 within_error,
2546 })
2547 } else if outdent_from_prev_row {
2548 Some(IndentSuggestion {
2549 basis_row: prev_row,
2550 delta: Ordering::Less,
2551 within_error,
2552 })
2553 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2554 {
2555 Some(IndentSuggestion {
2556 basis_row: prev_row,
2557 delta: Ordering::Equal,
2558 within_error,
2559 })
2560 } else {
2561 None
2562 };
2563
2564 prev_row = row;
2565 prev_row_start = row_start;
2566 suggestion
2567 }))
2568 }
2569
2570 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2571 while row > 0 {
2572 row -= 1;
2573 if !self.is_line_blank(row) {
2574 return Some(row);
2575 }
2576 }
2577 None
2578 }
2579
2580 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
2581 let captures = self.syntax.captures(range, &self.text, |grammar| {
2582 grammar.highlights_query.as_ref()
2583 });
2584 let highlight_maps = captures
2585 .grammars()
2586 .iter()
2587 .map(|grammar| grammar.highlight_map())
2588 .collect();
2589 (captures, highlight_maps)
2590 }
2591 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2592 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2593 /// returned in chunks where each chunk has a single syntax highlighting style and
2594 /// diagnostic status.
2595 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2596 let range = range.start.to_offset(self)..range.end.to_offset(self);
2597
2598 let mut syntax = None;
2599 if language_aware {
2600 syntax = Some(self.get_highlights(range.clone()));
2601 }
2602 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
2603 let diagnostics = language_aware;
2604 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
2605 }
2606
2607 /// Invokes the given callback for each line of text in the given range of the buffer.
2608 /// Uses callback to avoid allocating a string for each line.
2609 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2610 let mut line = String::new();
2611 let mut row = range.start.row;
2612 for chunk in self
2613 .as_rope()
2614 .chunks_in_range(range.to_offset(self))
2615 .chain(["\n"])
2616 {
2617 for (newline_ix, text) in chunk.split('\n').enumerate() {
2618 if newline_ix > 0 {
2619 callback(row, &line);
2620 row += 1;
2621 line.clear();
2622 }
2623 line.push_str(text);
2624 }
2625 }
2626 }
2627
2628 /// Iterates over every [`SyntaxLayer`] in the buffer.
2629 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2630 self.syntax
2631 .layers_for_range(0..self.len(), &self.text, true)
2632 }
2633
2634 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2635 let offset = position.to_offset(self);
2636 self.syntax
2637 .layers_for_range(offset..offset, &self.text, false)
2638 .filter(|l| l.node().end_byte() > offset)
2639 .last()
2640 }
2641
2642 /// Returns the main [Language]
2643 pub fn language(&self) -> Option<&Arc<Language>> {
2644 self.language.as_ref()
2645 }
2646
2647 /// Returns the [Language] at the given location.
2648 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2649 self.syntax_layer_at(position)
2650 .map(|info| info.language)
2651 .or(self.language.as_ref())
2652 }
2653
2654 /// Returns the settings for the language at the given location.
2655 pub fn settings_at<'a, D: ToOffset>(
2656 &self,
2657 position: D,
2658 cx: &'a AppContext,
2659 ) -> &'a LanguageSettings {
2660 language_settings(self.language_at(position), self.file.as_ref(), cx)
2661 }
2662
2663 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
2664 CharClassifier::new(self.language_scope_at(point))
2665 }
2666
2667 /// Returns the [LanguageScope] at the given location.
2668 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2669 let offset = position.to_offset(self);
2670 let mut scope = None;
2671 let mut smallest_range: Option<Range<usize>> = None;
2672
2673 // Use the layer that has the smallest node intersecting the given point.
2674 for layer in self
2675 .syntax
2676 .layers_for_range(offset..offset, &self.text, false)
2677 {
2678 let mut cursor = layer.node().walk();
2679
2680 let mut range = None;
2681 loop {
2682 let child_range = cursor.node().byte_range();
2683 if !child_range.to_inclusive().contains(&offset) {
2684 break;
2685 }
2686
2687 range = Some(child_range);
2688 if cursor.goto_first_child_for_byte(offset).is_none() {
2689 break;
2690 }
2691 }
2692
2693 if let Some(range) = range {
2694 if smallest_range
2695 .as_ref()
2696 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2697 {
2698 smallest_range = Some(range);
2699 scope = Some(LanguageScope {
2700 language: layer.language.clone(),
2701 override_id: layer.override_id(offset, &self.text),
2702 });
2703 }
2704 }
2705 }
2706
2707 scope.or_else(|| {
2708 self.language.clone().map(|language| LanguageScope {
2709 language,
2710 override_id: None,
2711 })
2712 })
2713 }
2714
2715 /// Returns a tuple of the range and character kind of the word
2716 /// surrounding the given position.
2717 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2718 let mut start = start.to_offset(self);
2719 let mut end = start;
2720 let mut next_chars = self.chars_at(start).peekable();
2721 let mut prev_chars = self.reversed_chars_at(start).peekable();
2722
2723 let classifier = self.char_classifier_at(start);
2724 let word_kind = cmp::max(
2725 prev_chars.peek().copied().map(|c| classifier.kind(c)),
2726 next_chars.peek().copied().map(|c| classifier.kind(c)),
2727 );
2728
2729 for ch in prev_chars {
2730 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
2731 start -= ch.len_utf8();
2732 } else {
2733 break;
2734 }
2735 }
2736
2737 for ch in next_chars {
2738 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
2739 end += ch.len_utf8();
2740 } else {
2741 break;
2742 }
2743 }
2744
2745 (start..end, word_kind)
2746 }
2747
2748 /// Returns the range for the closes syntax node enclosing the given range.
2749 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2750 let range = range.start.to_offset(self)..range.end.to_offset(self);
2751 let mut result: Option<Range<usize>> = None;
2752 'outer: for layer in self
2753 .syntax
2754 .layers_for_range(range.clone(), &self.text, true)
2755 {
2756 let mut cursor = layer.node().walk();
2757
2758 // Descend to the first leaf that touches the start of the range,
2759 // and if the range is non-empty, extends beyond the start.
2760 while cursor.goto_first_child_for_byte(range.start).is_some() {
2761 if !range.is_empty() && cursor.node().end_byte() == range.start {
2762 cursor.goto_next_sibling();
2763 }
2764 }
2765
2766 // Ascend to the smallest ancestor that strictly contains the range.
2767 loop {
2768 let node_range = cursor.node().byte_range();
2769 if node_range.start <= range.start
2770 && node_range.end >= range.end
2771 && node_range.len() > range.len()
2772 {
2773 break;
2774 }
2775 if !cursor.goto_parent() {
2776 continue 'outer;
2777 }
2778 }
2779
2780 let left_node = cursor.node();
2781 let mut layer_result = left_node.byte_range();
2782
2783 // For an empty range, try to find another node immediately to the right of the range.
2784 if left_node.end_byte() == range.start {
2785 let mut right_node = None;
2786 while !cursor.goto_next_sibling() {
2787 if !cursor.goto_parent() {
2788 break;
2789 }
2790 }
2791
2792 while cursor.node().start_byte() == range.start {
2793 right_node = Some(cursor.node());
2794 if !cursor.goto_first_child() {
2795 break;
2796 }
2797 }
2798
2799 // If there is a candidate node on both sides of the (empty) range, then
2800 // decide between the two by favoring a named node over an anonymous token.
2801 // If both nodes are the same in that regard, favor the right one.
2802 if let Some(right_node) = right_node {
2803 if right_node.is_named() || !left_node.is_named() {
2804 layer_result = right_node.byte_range();
2805 }
2806 }
2807 }
2808
2809 if let Some(previous_result) = &result {
2810 if previous_result.len() < layer_result.len() {
2811 continue;
2812 }
2813 }
2814 result = Some(layer_result);
2815 }
2816
2817 result
2818 }
2819
2820 /// Returns the outline for the buffer.
2821 ///
2822 /// This method allows passing an optional [SyntaxTheme] to
2823 /// syntax-highlight the returned symbols.
2824 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2825 self.outline_items_containing(0..self.len(), true, theme)
2826 .map(Outline::new)
2827 }
2828
2829 /// Returns all the symbols that contain the given position.
2830 ///
2831 /// This method allows passing an optional [SyntaxTheme] to
2832 /// syntax-highlight the returned symbols.
2833 pub fn symbols_containing<T: ToOffset>(
2834 &self,
2835 position: T,
2836 theme: Option<&SyntaxTheme>,
2837 ) -> Option<Vec<OutlineItem<Anchor>>> {
2838 let position = position.to_offset(self);
2839 let mut items = self.outline_items_containing(
2840 position.saturating_sub(1)..self.len().min(position + 1),
2841 false,
2842 theme,
2843 )?;
2844 let mut prev_depth = None;
2845 items.retain(|item| {
2846 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2847 prev_depth = Some(item.depth);
2848 result
2849 });
2850 Some(items)
2851 }
2852
2853 pub fn outline_items_containing<T: ToOffset>(
2854 &self,
2855 range: Range<T>,
2856 include_extra_context: bool,
2857 theme: Option<&SyntaxTheme>,
2858 ) -> Option<Vec<OutlineItem<Anchor>>> {
2859 let range = range.to_offset(self);
2860 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2861 grammar.outline_config.as_ref().map(|c| &c.query)
2862 });
2863 let configs = matches
2864 .grammars()
2865 .iter()
2866 .map(|g| g.outline_config.as_ref().unwrap())
2867 .collect::<Vec<_>>();
2868
2869 let mut items = Vec::new();
2870 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
2871 while let Some(mat) = matches.peek() {
2872 let config = &configs[mat.grammar_index];
2873 if let Some(item) =
2874 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
2875 {
2876 items.push(item);
2877 } else if let Some(capture) = mat
2878 .captures
2879 .iter()
2880 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
2881 {
2882 let capture_range = capture.node.start_position()..capture.node.end_position();
2883 let mut capture_row_range =
2884 capture_range.start.row as u32..capture_range.end.row as u32;
2885 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
2886 {
2887 capture_row_range.end -= 1;
2888 }
2889 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
2890 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
2891 last_row_range.end = capture_row_range.end;
2892 } else {
2893 annotation_row_ranges.push(capture_row_range);
2894 }
2895 } else {
2896 annotation_row_ranges.push(capture_row_range);
2897 }
2898 }
2899 matches.advance();
2900 }
2901
2902 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
2903
2904 // Assign depths based on containment relationships and convert to anchors.
2905 let mut item_ends_stack = Vec::<Point>::new();
2906 let mut anchor_items = Vec::new();
2907 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
2908 for item in items {
2909 while let Some(last_end) = item_ends_stack.last().copied() {
2910 if last_end < item.range.end {
2911 item_ends_stack.pop();
2912 } else {
2913 break;
2914 }
2915 }
2916
2917 let mut annotation_row_range = None;
2918 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
2919 let row_preceding_item = item.range.start.row.saturating_sub(1);
2920 if next_annotation_row_range.end < row_preceding_item {
2921 annotation_row_ranges.next();
2922 } else {
2923 if next_annotation_row_range.end == row_preceding_item {
2924 annotation_row_range = Some(next_annotation_row_range.clone());
2925 annotation_row_ranges.next();
2926 }
2927 break;
2928 }
2929 }
2930
2931 anchor_items.push(OutlineItem {
2932 depth: item_ends_stack.len(),
2933 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
2934 text: item.text,
2935 highlight_ranges: item.highlight_ranges,
2936 name_ranges: item.name_ranges,
2937 body_range: item.body_range.map(|body_range| {
2938 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
2939 }),
2940 annotation_range: annotation_row_range.map(|annotation_range| {
2941 self.anchor_after(Point::new(annotation_range.start, 0))
2942 ..self.anchor_before(Point::new(
2943 annotation_range.end,
2944 self.line_len(annotation_range.end),
2945 ))
2946 }),
2947 });
2948 item_ends_stack.push(item.range.end);
2949 }
2950
2951 Some(anchor_items)
2952 }
2953
2954 fn next_outline_item(
2955 &self,
2956 config: &OutlineConfig,
2957 mat: &SyntaxMapMatch,
2958 range: &Range<usize>,
2959 include_extra_context: bool,
2960 theme: Option<&SyntaxTheme>,
2961 ) -> Option<OutlineItem<Point>> {
2962 let item_node = mat.captures.iter().find_map(|cap| {
2963 if cap.index == config.item_capture_ix {
2964 Some(cap.node)
2965 } else {
2966 None
2967 }
2968 })?;
2969
2970 let item_byte_range = item_node.byte_range();
2971 if item_byte_range.end < range.start || item_byte_range.start > range.end {
2972 return None;
2973 }
2974 let item_point_range = Point::from_ts_point(item_node.start_position())
2975 ..Point::from_ts_point(item_node.end_position());
2976
2977 let mut open_point = None;
2978 let mut close_point = None;
2979 let mut buffer_ranges = Vec::new();
2980 for capture in mat.captures {
2981 let node_is_name;
2982 if capture.index == config.name_capture_ix {
2983 node_is_name = true;
2984 } else if Some(capture.index) == config.context_capture_ix
2985 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
2986 {
2987 node_is_name = false;
2988 } else {
2989 if Some(capture.index) == config.open_capture_ix {
2990 open_point = Some(Point::from_ts_point(capture.node.end_position()));
2991 } else if Some(capture.index) == config.close_capture_ix {
2992 close_point = Some(Point::from_ts_point(capture.node.start_position()));
2993 }
2994
2995 continue;
2996 }
2997
2998 let mut range = capture.node.start_byte()..capture.node.end_byte();
2999 let start = capture.node.start_position();
3000 if capture.node.end_position().row > start.row {
3001 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3002 }
3003
3004 if !range.is_empty() {
3005 buffer_ranges.push((range, node_is_name));
3006 }
3007 }
3008 if buffer_ranges.is_empty() {
3009 return None;
3010 }
3011 let mut text = String::new();
3012 let mut highlight_ranges = Vec::new();
3013 let mut name_ranges = Vec::new();
3014 let mut chunks = self.chunks(
3015 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3016 true,
3017 );
3018 let mut last_buffer_range_end = 0;
3019 for (buffer_range, is_name) in buffer_ranges {
3020 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
3021 text.push(' ');
3022 }
3023 last_buffer_range_end = buffer_range.end;
3024 if is_name {
3025 let mut start = text.len();
3026 let end = start + buffer_range.len();
3027
3028 // When multiple names are captured, then the matcheable text
3029 // includes the whitespace in between the names.
3030 if !name_ranges.is_empty() {
3031 start -= 1;
3032 }
3033
3034 name_ranges.push(start..end);
3035 }
3036
3037 let mut offset = buffer_range.start;
3038 chunks.seek(buffer_range.clone());
3039 for mut chunk in chunks.by_ref() {
3040 if chunk.text.len() > buffer_range.end - offset {
3041 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3042 offset = buffer_range.end;
3043 } else {
3044 offset += chunk.text.len();
3045 }
3046 let style = chunk
3047 .syntax_highlight_id
3048 .zip(theme)
3049 .and_then(|(highlight, theme)| highlight.style(theme));
3050 if let Some(style) = style {
3051 let start = text.len();
3052 let end = start + chunk.text.len();
3053 highlight_ranges.push((start..end, style));
3054 }
3055 text.push_str(chunk.text);
3056 if offset >= buffer_range.end {
3057 break;
3058 }
3059 }
3060 }
3061
3062 Some(OutlineItem {
3063 depth: 0, // We'll calculate the depth later
3064 range: item_point_range,
3065 text,
3066 highlight_ranges,
3067 name_ranges,
3068 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3069 annotation_range: None,
3070 })
3071 }
3072
3073 /// For each grammar in the language, runs the provided
3074 /// [tree_sitter::Query] against the given range.
3075 pub fn matches(
3076 &self,
3077 range: Range<usize>,
3078 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3079 ) -> SyntaxMapMatches {
3080 self.syntax.matches(range, self, query)
3081 }
3082
3083 /// Returns bracket range pairs overlapping or adjacent to `range`
3084 pub fn bracket_ranges<T: ToOffset>(
3085 &self,
3086 range: Range<T>,
3087 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3088 // Find bracket pairs that *inclusively* contain the given range.
3089 let range = range.start.to_offset(self).saturating_sub(1)
3090 ..self.len().min(range.end.to_offset(self) + 1);
3091
3092 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3093 grammar.brackets_config.as_ref().map(|c| &c.query)
3094 });
3095 let configs = matches
3096 .grammars()
3097 .iter()
3098 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3099 .collect::<Vec<_>>();
3100
3101 iter::from_fn(move || {
3102 while let Some(mat) = matches.peek() {
3103 let mut open = None;
3104 let mut close = None;
3105 let config = &configs[mat.grammar_index];
3106 for capture in mat.captures {
3107 if capture.index == config.open_capture_ix {
3108 open = Some(capture.node.byte_range());
3109 } else if capture.index == config.close_capture_ix {
3110 close = Some(capture.node.byte_range());
3111 }
3112 }
3113
3114 matches.advance();
3115
3116 let Some((open, close)) = open.zip(close) else {
3117 continue;
3118 };
3119
3120 let bracket_range = open.start..=close.end;
3121 if !bracket_range.overlaps(&range) {
3122 continue;
3123 }
3124
3125 return Some((open, close));
3126 }
3127 None
3128 })
3129 }
3130
3131 /// Returns enclosing bracket ranges containing the given range
3132 pub fn enclosing_bracket_ranges<T: ToOffset>(
3133 &self,
3134 range: Range<T>,
3135 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3136 let range = range.start.to_offset(self)..range.end.to_offset(self);
3137
3138 self.bracket_ranges(range.clone())
3139 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
3140 }
3141
3142 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3143 ///
3144 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3145 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3146 &self,
3147 range: Range<T>,
3148 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3149 ) -> Option<(Range<usize>, Range<usize>)> {
3150 let range = range.start.to_offset(self)..range.end.to_offset(self);
3151
3152 // Get the ranges of the innermost pair of brackets.
3153 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3154
3155 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
3156 if let Some(range_filter) = range_filter {
3157 if !range_filter(open.clone(), close.clone()) {
3158 continue;
3159 }
3160 }
3161
3162 let len = close.end - open.start;
3163
3164 if let Some((existing_open, existing_close)) = &result {
3165 let existing_len = existing_close.end - existing_open.start;
3166 if len > existing_len {
3167 continue;
3168 }
3169 }
3170
3171 result = Some((open, close));
3172 }
3173
3174 result
3175 }
3176
3177 /// Returns anchor ranges for any matches of the redaction query.
3178 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3179 /// will be run on the relevant section of the buffer.
3180 pub fn redacted_ranges<T: ToOffset>(
3181 &self,
3182 range: Range<T>,
3183 ) -> impl Iterator<Item = Range<usize>> + '_ {
3184 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3185 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3186 grammar
3187 .redactions_config
3188 .as_ref()
3189 .map(|config| &config.query)
3190 });
3191
3192 let configs = syntax_matches
3193 .grammars()
3194 .iter()
3195 .map(|grammar| grammar.redactions_config.as_ref())
3196 .collect::<Vec<_>>();
3197
3198 iter::from_fn(move || {
3199 let redacted_range = syntax_matches
3200 .peek()
3201 .and_then(|mat| {
3202 configs[mat.grammar_index].and_then(|config| {
3203 mat.captures
3204 .iter()
3205 .find(|capture| capture.index == config.redaction_capture_ix)
3206 })
3207 })
3208 .map(|mat| mat.node.byte_range());
3209 syntax_matches.advance();
3210 redacted_range
3211 })
3212 }
3213
3214 pub fn injections_intersecting_range<T: ToOffset>(
3215 &self,
3216 range: Range<T>,
3217 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3218 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3219
3220 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3221 grammar
3222 .injection_config
3223 .as_ref()
3224 .map(|config| &config.query)
3225 });
3226
3227 let configs = syntax_matches
3228 .grammars()
3229 .iter()
3230 .map(|grammar| grammar.injection_config.as_ref())
3231 .collect::<Vec<_>>();
3232
3233 iter::from_fn(move || {
3234 let ranges = syntax_matches.peek().and_then(|mat| {
3235 let config = &configs[mat.grammar_index]?;
3236 let content_capture_range = mat.captures.iter().find_map(|capture| {
3237 if capture.index == config.content_capture_ix {
3238 Some(capture.node.byte_range())
3239 } else {
3240 None
3241 }
3242 })?;
3243 let language = self.language_at(content_capture_range.start)?;
3244 Some((content_capture_range, language))
3245 });
3246 syntax_matches.advance();
3247 ranges
3248 })
3249 }
3250
3251 pub fn runnable_ranges(
3252 &self,
3253 range: Range<Anchor>,
3254 ) -> impl Iterator<Item = RunnableRange> + '_ {
3255 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3256
3257 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3258 grammar.runnable_config.as_ref().map(|config| &config.query)
3259 });
3260
3261 let test_configs = syntax_matches
3262 .grammars()
3263 .iter()
3264 .map(|grammar| grammar.runnable_config.as_ref())
3265 .collect::<Vec<_>>();
3266
3267 iter::from_fn(move || loop {
3268 let mat = syntax_matches.peek()?;
3269
3270 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3271 let mut run_range = None;
3272 let full_range = mat.captures.iter().fold(
3273 Range {
3274 start: usize::MAX,
3275 end: 0,
3276 },
3277 |mut acc, next| {
3278 let byte_range = next.node.byte_range();
3279 if acc.start > byte_range.start {
3280 acc.start = byte_range.start;
3281 }
3282 if acc.end < byte_range.end {
3283 acc.end = byte_range.end;
3284 }
3285 acc
3286 },
3287 );
3288 if full_range.start > full_range.end {
3289 // We did not find a full spanning range of this match.
3290 return None;
3291 }
3292 let extra_captures: SmallVec<[_; 1]> =
3293 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3294 test_configs
3295 .extra_captures
3296 .get(capture.index as usize)
3297 .cloned()
3298 .and_then(|tag_name| match tag_name {
3299 RunnableCapture::Named(name) => {
3300 Some((capture.node.byte_range(), name))
3301 }
3302 RunnableCapture::Run => {
3303 let _ = run_range.insert(capture.node.byte_range());
3304 None
3305 }
3306 })
3307 }));
3308 let run_range = run_range?;
3309 let tags = test_configs
3310 .query
3311 .property_settings(mat.pattern_index)
3312 .iter()
3313 .filter_map(|property| {
3314 if *property.key == *"tag" {
3315 property
3316 .value
3317 .as_ref()
3318 .map(|value| RunnableTag(value.to_string().into()))
3319 } else {
3320 None
3321 }
3322 })
3323 .collect();
3324 let extra_captures = extra_captures
3325 .into_iter()
3326 .map(|(range, name)| {
3327 (
3328 name.to_string(),
3329 self.text_for_range(range.clone()).collect::<String>(),
3330 )
3331 })
3332 .collect();
3333 // All tags should have the same range.
3334 Some(RunnableRange {
3335 run_range,
3336 full_range,
3337 runnable: Runnable {
3338 tags,
3339 language: mat.language,
3340 buffer: self.remote_id(),
3341 },
3342 extra_captures,
3343 buffer_id: self.remote_id(),
3344 })
3345 });
3346
3347 syntax_matches.advance();
3348 if test_range.is_some() {
3349 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3350 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3351 return test_range;
3352 }
3353 })
3354 }
3355
3356 pub fn indent_guides_in_range(
3357 &self,
3358 range: Range<Anchor>,
3359 ignore_disabled_for_language: bool,
3360 cx: &AppContext,
3361 ) -> Vec<IndentGuide> {
3362 let language_settings = language_settings(self.language(), self.file.as_ref(), cx);
3363 let settings = language_settings.indent_guides;
3364 if !ignore_disabled_for_language && !settings.enabled {
3365 return Vec::new();
3366 }
3367 let tab_size = language_settings.tab_size.get() as u32;
3368
3369 let start_row = range.start.to_point(self).row;
3370 let end_row = range.end.to_point(self).row;
3371 let row_range = start_row..end_row + 1;
3372
3373 let mut row_indents = self.line_indents_in_row_range(row_range.clone());
3374
3375 let mut result_vec = Vec::new();
3376 let mut indent_stack = SmallVec::<[IndentGuide; 8]>::new();
3377
3378 while let Some((first_row, mut line_indent)) = row_indents.next() {
3379 let current_depth = indent_stack.len() as u32;
3380
3381 // When encountering empty, continue until found useful line indent
3382 // then add to the indent stack with the depth found
3383 let mut found_indent = false;
3384 let mut last_row = first_row;
3385 if line_indent.is_line_empty() {
3386 let mut trailing_row = end_row;
3387 while !found_indent {
3388 let (target_row, new_line_indent) =
3389 if let Some(display_row) = row_indents.next() {
3390 display_row
3391 } else {
3392 // This means we reached the end of the given range and found empty lines at the end.
3393 // We need to traverse further until we find a non-empty line to know if we need to add
3394 // an indent guide for the last visible indent.
3395 trailing_row += 1;
3396
3397 const TRAILING_ROW_SEARCH_LIMIT: u32 = 25;
3398 if trailing_row > self.max_point().row
3399 || trailing_row > end_row + TRAILING_ROW_SEARCH_LIMIT
3400 {
3401 break;
3402 }
3403 let new_line_indent = self.line_indent_for_row(trailing_row);
3404 (trailing_row, new_line_indent)
3405 };
3406
3407 if new_line_indent.is_line_empty() {
3408 continue;
3409 }
3410 last_row = target_row.min(end_row);
3411 line_indent = new_line_indent;
3412 found_indent = true;
3413 break;
3414 }
3415 } else {
3416 found_indent = true
3417 }
3418
3419 let depth = if found_indent {
3420 line_indent.len(tab_size) / tab_size
3421 + ((line_indent.len(tab_size) % tab_size) > 0) as u32
3422 } else {
3423 current_depth
3424 };
3425
3426 match depth.cmp(¤t_depth) {
3427 Ordering::Less => {
3428 for _ in 0..(current_depth - depth) {
3429 let mut indent = indent_stack.pop().unwrap();
3430 if last_row != first_row {
3431 // In this case, we landed on an empty row, had to seek forward,
3432 // and discovered that the indent we where on is ending.
3433 // This means that the last display row must
3434 // be on line that ends this indent range, so we
3435 // should display the range up to the first non-empty line
3436 indent.end_row = first_row.saturating_sub(1);
3437 }
3438
3439 result_vec.push(indent)
3440 }
3441 }
3442 Ordering::Greater => {
3443 for next_depth in current_depth..depth {
3444 indent_stack.push(IndentGuide {
3445 buffer_id: self.remote_id(),
3446 start_row: first_row,
3447 end_row: last_row,
3448 depth: next_depth,
3449 tab_size,
3450 settings,
3451 });
3452 }
3453 }
3454 _ => {}
3455 }
3456
3457 for indent in indent_stack.iter_mut() {
3458 indent.end_row = last_row;
3459 }
3460 }
3461
3462 result_vec.extend(indent_stack);
3463
3464 result_vec
3465 }
3466
3467 pub async fn enclosing_indent(
3468 &self,
3469 mut buffer_row: BufferRow,
3470 ) -> Option<(Range<BufferRow>, LineIndent)> {
3471 let max_row = self.max_point().row;
3472 if buffer_row >= max_row {
3473 return None;
3474 }
3475
3476 let mut target_indent = self.line_indent_for_row(buffer_row);
3477
3478 // If the current row is at the start of an indented block, we want to return this
3479 // block as the enclosing indent.
3480 if !target_indent.is_line_empty() && buffer_row < max_row {
3481 let next_line_indent = self.line_indent_for_row(buffer_row + 1);
3482 if !next_line_indent.is_line_empty()
3483 && target_indent.raw_len() < next_line_indent.raw_len()
3484 {
3485 target_indent = next_line_indent;
3486 buffer_row += 1;
3487 }
3488 }
3489
3490 const SEARCH_ROW_LIMIT: u32 = 25000;
3491 const SEARCH_WHITESPACE_ROW_LIMIT: u32 = 2500;
3492 const YIELD_INTERVAL: u32 = 100;
3493
3494 let mut accessed_row_counter = 0;
3495
3496 // If there is a blank line at the current row, search for the next non indented lines
3497 if target_indent.is_line_empty() {
3498 let start = buffer_row.saturating_sub(SEARCH_WHITESPACE_ROW_LIMIT);
3499 let end = (max_row + 1).min(buffer_row + SEARCH_WHITESPACE_ROW_LIMIT);
3500
3501 let mut non_empty_line_above = None;
3502 for (row, indent) in self
3503 .text
3504 .reversed_line_indents_in_row_range(start..buffer_row)
3505 {
3506 accessed_row_counter += 1;
3507 if accessed_row_counter == YIELD_INTERVAL {
3508 accessed_row_counter = 0;
3509 yield_now().await;
3510 }
3511 if !indent.is_line_empty() {
3512 non_empty_line_above = Some((row, indent));
3513 break;
3514 }
3515 }
3516
3517 let mut non_empty_line_below = None;
3518 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3519 accessed_row_counter += 1;
3520 if accessed_row_counter == YIELD_INTERVAL {
3521 accessed_row_counter = 0;
3522 yield_now().await;
3523 }
3524 if !indent.is_line_empty() {
3525 non_empty_line_below = Some((row, indent));
3526 break;
3527 }
3528 }
3529
3530 let (row, indent) = match (non_empty_line_above, non_empty_line_below) {
3531 (Some((above_row, above_indent)), Some((below_row, below_indent))) => {
3532 if above_indent.raw_len() >= below_indent.raw_len() {
3533 (above_row, above_indent)
3534 } else {
3535 (below_row, below_indent)
3536 }
3537 }
3538 (Some(above), None) => above,
3539 (None, Some(below)) => below,
3540 _ => return None,
3541 };
3542
3543 target_indent = indent;
3544 buffer_row = row;
3545 }
3546
3547 let start = buffer_row.saturating_sub(SEARCH_ROW_LIMIT);
3548 let end = (max_row + 1).min(buffer_row + SEARCH_ROW_LIMIT);
3549
3550 let mut start_indent = None;
3551 for (row, indent) in self
3552 .text
3553 .reversed_line_indents_in_row_range(start..buffer_row)
3554 {
3555 accessed_row_counter += 1;
3556 if accessed_row_counter == YIELD_INTERVAL {
3557 accessed_row_counter = 0;
3558 yield_now().await;
3559 }
3560 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3561 start_indent = Some((row, indent));
3562 break;
3563 }
3564 }
3565 let (start_row, start_indent_size) = start_indent?;
3566
3567 let mut end_indent = (end, None);
3568 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3569 accessed_row_counter += 1;
3570 if accessed_row_counter == YIELD_INTERVAL {
3571 accessed_row_counter = 0;
3572 yield_now().await;
3573 }
3574 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3575 end_indent = (row.saturating_sub(1), Some(indent));
3576 break;
3577 }
3578 }
3579 let (end_row, end_indent_size) = end_indent;
3580
3581 let indent = if let Some(end_indent_size) = end_indent_size {
3582 if start_indent_size.raw_len() > end_indent_size.raw_len() {
3583 start_indent_size
3584 } else {
3585 end_indent_size
3586 }
3587 } else {
3588 start_indent_size
3589 };
3590
3591 Some((start_row..end_row, indent))
3592 }
3593
3594 /// Returns selections for remote peers intersecting the given range.
3595 #[allow(clippy::type_complexity)]
3596 pub fn selections_in_range(
3597 &self,
3598 range: Range<Anchor>,
3599 include_local: bool,
3600 ) -> impl Iterator<
3601 Item = (
3602 ReplicaId,
3603 bool,
3604 CursorShape,
3605 impl Iterator<Item = &Selection<Anchor>> + '_,
3606 ),
3607 > + '_ {
3608 self.remote_selections
3609 .iter()
3610 .filter(move |(replica_id, set)| {
3611 (include_local || **replica_id != self.text.replica_id())
3612 && !set.selections.is_empty()
3613 })
3614 .map(move |(replica_id, set)| {
3615 let start_ix = match set.selections.binary_search_by(|probe| {
3616 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3617 }) {
3618 Ok(ix) | Err(ix) => ix,
3619 };
3620 let end_ix = match set.selections.binary_search_by(|probe| {
3621 probe.start.cmp(&range.end, self).then(Ordering::Less)
3622 }) {
3623 Ok(ix) | Err(ix) => ix,
3624 };
3625
3626 (
3627 *replica_id,
3628 set.line_mode,
3629 set.cursor_shape,
3630 set.selections[start_ix..end_ix].iter(),
3631 )
3632 })
3633 }
3634
3635 /// Whether the buffer contains any git changes.
3636 pub fn has_git_diff(&self) -> bool {
3637 !self.git_diff.is_empty()
3638 }
3639
3640 /// Returns all the Git diff hunks intersecting the given
3641 /// row range.
3642 pub fn git_diff_hunks_in_row_range(
3643 &self,
3644 range: Range<BufferRow>,
3645 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3646 self.git_diff.hunks_in_row_range(range, self)
3647 }
3648
3649 /// Returns all the Git diff hunks intersecting the given
3650 /// range.
3651 pub fn git_diff_hunks_intersecting_range(
3652 &self,
3653 range: Range<Anchor>,
3654 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3655 self.git_diff.hunks_intersecting_range(range, self)
3656 }
3657
3658 /// Returns all the Git diff hunks intersecting the given
3659 /// range, in reverse order.
3660 pub fn git_diff_hunks_intersecting_range_rev(
3661 &self,
3662 range: Range<Anchor>,
3663 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk<u32>> {
3664 self.git_diff.hunks_intersecting_range_rev(range, self)
3665 }
3666
3667 /// Returns if the buffer contains any diagnostics.
3668 pub fn has_diagnostics(&self) -> bool {
3669 !self.diagnostics.is_empty()
3670 }
3671
3672 /// Returns all the diagnostics intersecting the given range.
3673 pub fn diagnostics_in_range<'a, T, O>(
3674 &'a self,
3675 search_range: Range<T>,
3676 reversed: bool,
3677 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3678 where
3679 T: 'a + Clone + ToOffset,
3680 O: 'a + FromAnchor + Ord,
3681 {
3682 let mut iterators: Vec<_> = self
3683 .diagnostics
3684 .iter()
3685 .map(|(_, collection)| {
3686 collection
3687 .range::<T, O>(search_range.clone(), self, true, reversed)
3688 .peekable()
3689 })
3690 .collect();
3691
3692 std::iter::from_fn(move || {
3693 let (next_ix, _) = iterators
3694 .iter_mut()
3695 .enumerate()
3696 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3697 .min_by(|(_, a), (_, b)| {
3698 let cmp = a
3699 .range
3700 .start
3701 .cmp(&b.range.start)
3702 // when range is equal, sort by diagnostic severity
3703 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3704 // and stabilize order with group_id
3705 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3706 if reversed {
3707 cmp.reverse()
3708 } else {
3709 cmp
3710 }
3711 })?;
3712 iterators[next_ix].next()
3713 })
3714 }
3715
3716 /// Returns all the diagnostic groups associated with the given
3717 /// language server id. If no language server id is provided,
3718 /// all diagnostics groups are returned.
3719 pub fn diagnostic_groups(
3720 &self,
3721 language_server_id: Option<LanguageServerId>,
3722 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3723 let mut groups = Vec::new();
3724
3725 if let Some(language_server_id) = language_server_id {
3726 if let Ok(ix) = self
3727 .diagnostics
3728 .binary_search_by_key(&language_server_id, |e| e.0)
3729 {
3730 self.diagnostics[ix]
3731 .1
3732 .groups(language_server_id, &mut groups, self);
3733 }
3734 } else {
3735 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3736 diagnostics.groups(*language_server_id, &mut groups, self);
3737 }
3738 }
3739
3740 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3741 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3742 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3743 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3744 });
3745
3746 groups
3747 }
3748
3749 /// Returns an iterator over the diagnostics for the given group.
3750 pub fn diagnostic_group<'a, O>(
3751 &'a self,
3752 group_id: usize,
3753 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3754 where
3755 O: 'a + FromAnchor,
3756 {
3757 self.diagnostics
3758 .iter()
3759 .flat_map(move |(_, set)| set.group(group_id, self))
3760 }
3761
3762 /// An integer version number that accounts for all updates besides
3763 /// the buffer's text itself (which is versioned via a version vector).
3764 pub fn non_text_state_update_count(&self) -> usize {
3765 self.non_text_state_update_count
3766 }
3767
3768 /// Returns a snapshot of underlying file.
3769 pub fn file(&self) -> Option<&Arc<dyn File>> {
3770 self.file.as_ref()
3771 }
3772
3773 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
3774 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
3775 if let Some(file) = self.file() {
3776 if file.path().file_name().is_none() || include_root {
3777 Some(file.full_path(cx))
3778 } else {
3779 Some(file.path().to_path_buf())
3780 }
3781 } else {
3782 None
3783 }
3784 }
3785}
3786
3787fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
3788 indent_size_for_text(text.chars_at(Point::new(row, 0)))
3789}
3790
3791fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
3792 let mut result = IndentSize::spaces(0);
3793 for c in text {
3794 let kind = match c {
3795 ' ' => IndentKind::Space,
3796 '\t' => IndentKind::Tab,
3797 _ => break,
3798 };
3799 if result.len == 0 {
3800 result.kind = kind;
3801 }
3802 result.len += 1;
3803 }
3804 result
3805}
3806
3807impl Clone for BufferSnapshot {
3808 fn clone(&self) -> Self {
3809 Self {
3810 text: self.text.clone(),
3811 git_diff: self.git_diff.clone(),
3812 syntax: self.syntax.clone(),
3813 file: self.file.clone(),
3814 remote_selections: self.remote_selections.clone(),
3815 diagnostics: self.diagnostics.clone(),
3816 language: self.language.clone(),
3817 non_text_state_update_count: self.non_text_state_update_count,
3818 }
3819 }
3820}
3821
3822impl Deref for BufferSnapshot {
3823 type Target = text::BufferSnapshot;
3824
3825 fn deref(&self) -> &Self::Target {
3826 &self.text
3827 }
3828}
3829
3830unsafe impl<'a> Send for BufferChunks<'a> {}
3831
3832impl<'a> BufferChunks<'a> {
3833 pub(crate) fn new(
3834 text: &'a Rope,
3835 range: Range<usize>,
3836 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
3837 diagnostics: bool,
3838 buffer_snapshot: Option<&'a BufferSnapshot>,
3839 ) -> Self {
3840 let mut highlights = None;
3841 if let Some((captures, highlight_maps)) = syntax {
3842 highlights = Some(BufferChunkHighlights {
3843 captures,
3844 next_capture: None,
3845 stack: Default::default(),
3846 highlight_maps,
3847 })
3848 }
3849
3850 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
3851 let chunks = text.chunks_in_range(range.clone());
3852
3853 let mut this = BufferChunks {
3854 range,
3855 buffer_snapshot,
3856 chunks,
3857 diagnostic_endpoints,
3858 error_depth: 0,
3859 warning_depth: 0,
3860 information_depth: 0,
3861 hint_depth: 0,
3862 unnecessary_depth: 0,
3863 highlights,
3864 };
3865 this.initialize_diagnostic_endpoints();
3866 this
3867 }
3868
3869 /// Seeks to the given byte offset in the buffer.
3870 pub fn seek(&mut self, range: Range<usize>) {
3871 let old_range = std::mem::replace(&mut self.range, range.clone());
3872 self.chunks.set_range(self.range.clone());
3873 if let Some(highlights) = self.highlights.as_mut() {
3874 if old_range.start >= self.range.start && old_range.end <= self.range.end {
3875 // Reuse existing highlights stack, as the new range is a subrange of the old one.
3876 highlights
3877 .stack
3878 .retain(|(end_offset, _)| *end_offset > range.start);
3879 if let Some(capture) = &highlights.next_capture {
3880 if range.start >= capture.node.start_byte() {
3881 let next_capture_end = capture.node.end_byte();
3882 if range.start < next_capture_end {
3883 highlights.stack.push((
3884 next_capture_end,
3885 highlights.highlight_maps[capture.grammar_index].get(capture.index),
3886 ));
3887 }
3888 highlights.next_capture.take();
3889 }
3890 }
3891 } else if let Some(snapshot) = self.buffer_snapshot {
3892 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
3893 *highlights = BufferChunkHighlights {
3894 captures,
3895 next_capture: None,
3896 stack: Default::default(),
3897 highlight_maps,
3898 };
3899 } else {
3900 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
3901 // Seeking such BufferChunks is not supported.
3902 debug_assert!(false, "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot");
3903 }
3904
3905 highlights.captures.set_byte_range(self.range.clone());
3906 self.initialize_diagnostic_endpoints();
3907 }
3908 }
3909
3910 fn initialize_diagnostic_endpoints(&mut self) {
3911 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
3912 if let Some(buffer) = self.buffer_snapshot {
3913 let mut diagnostic_endpoints = Vec::new();
3914 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
3915 diagnostic_endpoints.push(DiagnosticEndpoint {
3916 offset: entry.range.start,
3917 is_start: true,
3918 severity: entry.diagnostic.severity,
3919 is_unnecessary: entry.diagnostic.is_unnecessary,
3920 });
3921 diagnostic_endpoints.push(DiagnosticEndpoint {
3922 offset: entry.range.end,
3923 is_start: false,
3924 severity: entry.diagnostic.severity,
3925 is_unnecessary: entry.diagnostic.is_unnecessary,
3926 });
3927 }
3928 diagnostic_endpoints
3929 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
3930 *diagnostics = diagnostic_endpoints.into_iter().peekable();
3931 }
3932 }
3933 }
3934
3935 /// The current byte offset in the buffer.
3936 pub fn offset(&self) -> usize {
3937 self.range.start
3938 }
3939
3940 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
3941 let depth = match endpoint.severity {
3942 DiagnosticSeverity::ERROR => &mut self.error_depth,
3943 DiagnosticSeverity::WARNING => &mut self.warning_depth,
3944 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
3945 DiagnosticSeverity::HINT => &mut self.hint_depth,
3946 _ => return,
3947 };
3948 if endpoint.is_start {
3949 *depth += 1;
3950 } else {
3951 *depth -= 1;
3952 }
3953
3954 if endpoint.is_unnecessary {
3955 if endpoint.is_start {
3956 self.unnecessary_depth += 1;
3957 } else {
3958 self.unnecessary_depth -= 1;
3959 }
3960 }
3961 }
3962
3963 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
3964 if self.error_depth > 0 {
3965 Some(DiagnosticSeverity::ERROR)
3966 } else if self.warning_depth > 0 {
3967 Some(DiagnosticSeverity::WARNING)
3968 } else if self.information_depth > 0 {
3969 Some(DiagnosticSeverity::INFORMATION)
3970 } else if self.hint_depth > 0 {
3971 Some(DiagnosticSeverity::HINT)
3972 } else {
3973 None
3974 }
3975 }
3976
3977 fn current_code_is_unnecessary(&self) -> bool {
3978 self.unnecessary_depth > 0
3979 }
3980}
3981
3982impl<'a> Iterator for BufferChunks<'a> {
3983 type Item = Chunk<'a>;
3984
3985 fn next(&mut self) -> Option<Self::Item> {
3986 let mut next_capture_start = usize::MAX;
3987 let mut next_diagnostic_endpoint = usize::MAX;
3988
3989 if let Some(highlights) = self.highlights.as_mut() {
3990 while let Some((parent_capture_end, _)) = highlights.stack.last() {
3991 if *parent_capture_end <= self.range.start {
3992 highlights.stack.pop();
3993 } else {
3994 break;
3995 }
3996 }
3997
3998 if highlights.next_capture.is_none() {
3999 highlights.next_capture = highlights.captures.next();
4000 }
4001
4002 while let Some(capture) = highlights.next_capture.as_ref() {
4003 if self.range.start < capture.node.start_byte() {
4004 next_capture_start = capture.node.start_byte();
4005 break;
4006 } else {
4007 let highlight_id =
4008 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4009 highlights
4010 .stack
4011 .push((capture.node.end_byte(), highlight_id));
4012 highlights.next_capture = highlights.captures.next();
4013 }
4014 }
4015 }
4016
4017 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4018 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4019 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4020 if endpoint.offset <= self.range.start {
4021 self.update_diagnostic_depths(endpoint);
4022 diagnostic_endpoints.next();
4023 } else {
4024 next_diagnostic_endpoint = endpoint.offset;
4025 break;
4026 }
4027 }
4028 }
4029 self.diagnostic_endpoints = diagnostic_endpoints;
4030
4031 if let Some(chunk) = self.chunks.peek() {
4032 let chunk_start = self.range.start;
4033 let mut chunk_end = (self.chunks.offset() + chunk.len())
4034 .min(next_capture_start)
4035 .min(next_diagnostic_endpoint);
4036 let mut highlight_id = None;
4037 if let Some(highlights) = self.highlights.as_ref() {
4038 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4039 chunk_end = chunk_end.min(*parent_capture_end);
4040 highlight_id = Some(*parent_highlight_id);
4041 }
4042 }
4043
4044 let slice =
4045 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4046 self.range.start = chunk_end;
4047 if self.range.start == self.chunks.offset() + chunk.len() {
4048 self.chunks.next().unwrap();
4049 }
4050
4051 Some(Chunk {
4052 text: slice,
4053 syntax_highlight_id: highlight_id,
4054 diagnostic_severity: self.current_diagnostic_severity(),
4055 is_unnecessary: self.current_code_is_unnecessary(),
4056 ..Default::default()
4057 })
4058 } else {
4059 None
4060 }
4061 }
4062}
4063
4064impl operation_queue::Operation for Operation {
4065 fn lamport_timestamp(&self) -> clock::Lamport {
4066 match self {
4067 Operation::Buffer(_) => {
4068 unreachable!("buffer operations should never be deferred at this layer")
4069 }
4070 Operation::UpdateDiagnostics {
4071 lamport_timestamp, ..
4072 }
4073 | Operation::UpdateSelections {
4074 lamport_timestamp, ..
4075 }
4076 | Operation::UpdateCompletionTriggers {
4077 lamport_timestamp, ..
4078 } => *lamport_timestamp,
4079 }
4080 }
4081}
4082
4083impl Default for Diagnostic {
4084 fn default() -> Self {
4085 Self {
4086 source: Default::default(),
4087 code: None,
4088 severity: DiagnosticSeverity::ERROR,
4089 message: Default::default(),
4090 group_id: 0,
4091 is_primary: false,
4092 is_disk_based: false,
4093 is_unnecessary: false,
4094 data: None,
4095 }
4096 }
4097}
4098
4099impl IndentSize {
4100 /// Returns an [IndentSize] representing the given spaces.
4101 pub fn spaces(len: u32) -> Self {
4102 Self {
4103 len,
4104 kind: IndentKind::Space,
4105 }
4106 }
4107
4108 /// Returns an [IndentSize] representing a tab.
4109 pub fn tab() -> Self {
4110 Self {
4111 len: 1,
4112 kind: IndentKind::Tab,
4113 }
4114 }
4115
4116 /// An iterator over the characters represented by this [IndentSize].
4117 pub fn chars(&self) -> impl Iterator<Item = char> {
4118 iter::repeat(self.char()).take(self.len as usize)
4119 }
4120
4121 /// The character representation of this [IndentSize].
4122 pub fn char(&self) -> char {
4123 match self.kind {
4124 IndentKind::Space => ' ',
4125 IndentKind::Tab => '\t',
4126 }
4127 }
4128
4129 /// Consumes the current [IndentSize] and returns a new one that has
4130 /// been shrunk or enlarged by the given size along the given direction.
4131 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4132 match direction {
4133 Ordering::Less => {
4134 if self.kind == size.kind && self.len >= size.len {
4135 self.len -= size.len;
4136 }
4137 }
4138 Ordering::Equal => {}
4139 Ordering::Greater => {
4140 if self.len == 0 {
4141 self = size;
4142 } else if self.kind == size.kind {
4143 self.len += size.len;
4144 }
4145 }
4146 }
4147 self
4148 }
4149}
4150
4151#[cfg(any(test, feature = "test-support"))]
4152pub struct TestFile {
4153 pub path: Arc<Path>,
4154 pub root_name: String,
4155}
4156
4157#[cfg(any(test, feature = "test-support"))]
4158impl File for TestFile {
4159 fn path(&self) -> &Arc<Path> {
4160 &self.path
4161 }
4162
4163 fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
4164 PathBuf::from(&self.root_name).join(self.path.as_ref())
4165 }
4166
4167 fn as_local(&self) -> Option<&dyn LocalFile> {
4168 None
4169 }
4170
4171 fn mtime(&self) -> Option<SystemTime> {
4172 unimplemented!()
4173 }
4174
4175 fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
4176 self.path().file_name().unwrap_or(self.root_name.as_ref())
4177 }
4178
4179 fn worktree_id(&self, _: &AppContext) -> WorktreeId {
4180 WorktreeId::from_usize(0)
4181 }
4182
4183 fn is_deleted(&self) -> bool {
4184 unimplemented!()
4185 }
4186
4187 fn as_any(&self) -> &dyn std::any::Any {
4188 unimplemented!()
4189 }
4190
4191 fn to_proto(&self, _: &AppContext) -> rpc::proto::File {
4192 unimplemented!()
4193 }
4194
4195 fn is_private(&self) -> bool {
4196 false
4197 }
4198}
4199
4200pub(crate) fn contiguous_ranges(
4201 values: impl Iterator<Item = u32>,
4202 max_len: usize,
4203) -> impl Iterator<Item = Range<u32>> {
4204 let mut values = values;
4205 let mut current_range: Option<Range<u32>> = None;
4206 std::iter::from_fn(move || loop {
4207 if let Some(value) = values.next() {
4208 if let Some(range) = &mut current_range {
4209 if value == range.end && range.len() < max_len {
4210 range.end += 1;
4211 continue;
4212 }
4213 }
4214
4215 let prev_range = current_range.clone();
4216 current_range = Some(value..(value + 1));
4217 if prev_range.is_some() {
4218 return prev_range;
4219 }
4220 } else {
4221 return current_range.take();
4222 }
4223 })
4224}
4225
4226#[derive(Default, Debug)]
4227pub struct CharClassifier {
4228 scope: Option<LanguageScope>,
4229 for_completion: bool,
4230 ignore_punctuation: bool,
4231}
4232
4233impl CharClassifier {
4234 pub fn new(scope: Option<LanguageScope>) -> Self {
4235 Self {
4236 scope,
4237 for_completion: false,
4238 ignore_punctuation: false,
4239 }
4240 }
4241
4242 pub fn for_completion(self, for_completion: bool) -> Self {
4243 Self {
4244 for_completion,
4245 ..self
4246 }
4247 }
4248
4249 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4250 Self {
4251 ignore_punctuation,
4252 ..self
4253 }
4254 }
4255
4256 pub fn is_whitespace(&self, c: char) -> bool {
4257 self.kind(c) == CharKind::Whitespace
4258 }
4259
4260 pub fn is_word(&self, c: char) -> bool {
4261 self.kind(c) == CharKind::Word
4262 }
4263
4264 pub fn is_punctuation(&self, c: char) -> bool {
4265 self.kind(c) == CharKind::Punctuation
4266 }
4267
4268 pub fn kind(&self, c: char) -> CharKind {
4269 if c.is_whitespace() {
4270 return CharKind::Whitespace;
4271 } else if c.is_alphanumeric() || c == '_' {
4272 return CharKind::Word;
4273 }
4274
4275 if let Some(scope) = &self.scope {
4276 if let Some(characters) = scope.word_characters() {
4277 if characters.contains(&c) {
4278 if c == '-' && !self.for_completion && !self.ignore_punctuation {
4279 return CharKind::Punctuation;
4280 }
4281 return CharKind::Word;
4282 }
4283 }
4284 }
4285
4286 if self.ignore_punctuation {
4287 CharKind::Word
4288 } else {
4289 CharKind::Punctuation
4290 }
4291 }
4292}
4293
4294/// Find all of the ranges of whitespace that occur at the ends of lines
4295/// in the given rope.
4296///
4297/// This could also be done with a regex search, but this implementation
4298/// avoids copying text.
4299pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4300 let mut ranges = Vec::new();
4301
4302 let mut offset = 0;
4303 let mut prev_chunk_trailing_whitespace_range = 0..0;
4304 for chunk in rope.chunks() {
4305 let mut prev_line_trailing_whitespace_range = 0..0;
4306 for (i, line) in chunk.split('\n').enumerate() {
4307 let line_end_offset = offset + line.len();
4308 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4309 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4310
4311 if i == 0 && trimmed_line_len == 0 {
4312 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4313 }
4314 if !prev_line_trailing_whitespace_range.is_empty() {
4315 ranges.push(prev_line_trailing_whitespace_range);
4316 }
4317
4318 offset = line_end_offset + 1;
4319 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4320 }
4321
4322 offset -= 1;
4323 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4324 }
4325
4326 if !prev_chunk_trailing_whitespace_range.is_empty() {
4327 ranges.push(prev_chunk_trailing_whitespace_range);
4328 }
4329
4330 ranges
4331}