1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, IndentGuideSettings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
14 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
18};
19use anyhow::{anyhow, Context, Result};
20use async_watch as watch;
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use futures::channel::oneshot;
25use gpui::{
26 AnyElement, AppContext, Context as _, EventEmitter, HighlightStyle, Model, ModelContext,
27 Pixels, Task, TaskLabel, WindowContext,
28};
29use lsp::LanguageServerId;
30use parking_lot::Mutex;
31use schemars::JsonSchema;
32use serde::{Deserialize, Serialize};
33use serde_json::Value;
34use settings::WorktreeId;
35use similar::{ChangeTag, TextDiff};
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 fmt,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, DerefMut, Range},
51 path::{Path, PathBuf},
52 str,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant, SystemTime},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
63 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
64 Transaction, TransactionId, Unclipped,
65};
66use theme::SyntaxTheme;
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{debug_panic, RangeExt};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76/// A label for the background task spawned by the buffer to compute
77/// a diff against the contents of its file.
78pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
79
80/// Indicate whether a [`Buffer`] has permissions to edit.
81#[derive(PartialEq, Clone, Copy, Debug)]
82pub enum Capability {
83 /// The buffer is a mutable replica.
84 ReadWrite,
85 /// The buffer is a read-only replica.
86 ReadOnly,
87}
88
89pub type BufferRow = u32;
90
91#[derive(Clone)]
92enum BufferDiffBase {
93 Git(Rope),
94 PastBufferVersion {
95 buffer: Model<Buffer>,
96 rope: Rope,
97 merged_operations: Vec<Lamport>,
98 },
99}
100
101/// An in-memory representation of a source code file, including its text,
102/// syntax trees, git status, and diagnostics.
103pub struct Buffer {
104 text: TextBuffer,
105 diff_base: Option<BufferDiffBase>,
106 git_diff: git::diff::BufferDiff,
107 /// Filesystem state, `None` when there is no path.
108 file: Option<Arc<dyn File>>,
109 /// The mtime of the file when this buffer was last loaded from
110 /// or saved to disk.
111 saved_mtime: Option<SystemTime>,
112 /// The version vector when this buffer was last loaded from
113 /// or saved to disk.
114 saved_version: clock::Global,
115 preview_version: clock::Global,
116 transaction_depth: usize,
117 was_dirty_before_starting_transaction: Option<bool>,
118 reload_task: Option<Task<Result<()>>>,
119 language: Option<Arc<Language>>,
120 autoindent_requests: Vec<Arc<AutoindentRequest>>,
121 pending_autoindent: Option<Task<()>>,
122 sync_parse_timeout: Duration,
123 syntax_map: Mutex<SyntaxMap>,
124 parsing_in_background: bool,
125 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
126 non_text_state_update_count: usize,
127 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
128 remote_selections: TreeMap<ReplicaId, SelectionSet>,
129 diagnostics_timestamp: clock::Lamport,
130 completion_triggers: BTreeSet<String>,
131 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
132 completion_triggers_timestamp: clock::Lamport,
133 deferred_ops: OperationQueue<Operation>,
134 capability: Capability,
135 has_conflict: bool,
136 diff_base_version: usize,
137 /// Memoize calls to has_changes_since(saved_version).
138 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
139 has_unsaved_edits: Cell<(clock::Global, bool)>,
140 _subscriptions: Vec<gpui::Subscription>,
141}
142
143#[derive(Copy, Clone, Debug, PartialEq, Eq)]
144pub enum ParseStatus {
145 Idle,
146 Parsing,
147}
148
149/// An immutable, cheaply cloneable representation of a fixed
150/// state of a buffer.
151pub struct BufferSnapshot {
152 text: text::BufferSnapshot,
153 git_diff: git::diff::BufferDiff,
154 pub(crate) syntax: SyntaxSnapshot,
155 file: Option<Arc<dyn File>>,
156 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
157 remote_selections: TreeMap<ReplicaId, SelectionSet>,
158 language: Option<Arc<Language>>,
159 non_text_state_update_count: usize,
160}
161
162/// The kind and amount of indentation in a particular line. For now,
163/// assumes that indentation is all the same character.
164#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
165pub struct IndentSize {
166 /// The number of bytes that comprise the indentation.
167 pub len: u32,
168 /// The kind of whitespace used for indentation.
169 pub kind: IndentKind,
170}
171
172/// A whitespace character that's used for indentation.
173#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
174pub enum IndentKind {
175 /// An ASCII space character.
176 #[default]
177 Space,
178 /// An ASCII tab character.
179 Tab,
180}
181
182/// The shape of a selection cursor.
183#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
184#[serde(rename_all = "snake_case")]
185pub enum CursorShape {
186 /// A vertical bar
187 #[default]
188 Bar,
189 /// A block that surrounds the following character
190 Block,
191 /// An underline that runs along the following character
192 Underline,
193 /// A box drawn around the following character
194 Hollow,
195}
196
197#[derive(Clone, Debug)]
198struct SelectionSet {
199 line_mode: bool,
200 cursor_shape: CursorShape,
201 selections: Arc<[Selection<Anchor>]>,
202 lamport_timestamp: clock::Lamport,
203}
204
205/// A diagnostic associated with a certain range of a buffer.
206#[derive(Clone, Debug, PartialEq, Eq)]
207pub struct Diagnostic {
208 /// The name of the service that produced this diagnostic.
209 pub source: Option<String>,
210 /// A machine-readable code that identifies this diagnostic.
211 pub code: Option<String>,
212 /// Whether this diagnostic is a hint, warning, or error.
213 pub severity: DiagnosticSeverity,
214 /// The human-readable message associated with this diagnostic.
215 pub message: String,
216 /// An id that identifies the group to which this diagnostic belongs.
217 ///
218 /// When a language server produces a diagnostic with
219 /// one or more associated diagnostics, those diagnostics are all
220 /// assigned a single group ID.
221 pub group_id: usize,
222 /// Whether this diagnostic is the primary diagnostic for its group.
223 ///
224 /// In a given group, the primary diagnostic is the top-level diagnostic
225 /// returned by the language server. The non-primary diagnostics are the
226 /// associated diagnostics.
227 pub is_primary: bool,
228 /// Whether this diagnostic is considered to originate from an analysis of
229 /// files on disk, as opposed to any unsaved buffer contents. This is a
230 /// property of a given diagnostic source, and is configured for a given
231 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
232 /// for the language server.
233 pub is_disk_based: bool,
234 /// Whether this diagnostic marks unnecessary code.
235 pub is_unnecessary: bool,
236 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
237 pub data: Option<Value>,
238}
239
240/// TODO - move this into the `project` crate and make it private.
241pub async fn prepare_completion_documentation(
242 documentation: &lsp::Documentation,
243 language_registry: &Arc<LanguageRegistry>,
244 language: Option<Arc<Language>>,
245) -> Documentation {
246 match documentation {
247 lsp::Documentation::String(text) => {
248 if text.lines().count() <= 1 {
249 Documentation::SingleLine(text.clone())
250 } else {
251 Documentation::MultiLinePlainText(text.clone())
252 }
253 }
254
255 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
256 lsp::MarkupKind::PlainText => {
257 if value.lines().count() <= 1 {
258 Documentation::SingleLine(value.clone())
259 } else {
260 Documentation::MultiLinePlainText(value.clone())
261 }
262 }
263
264 lsp::MarkupKind::Markdown => {
265 let parsed = parse_markdown(value, language_registry, language).await;
266 Documentation::MultiLineMarkdown(parsed)
267 }
268 },
269 }
270}
271
272/// Documentation associated with a [`Completion`].
273#[derive(Clone, Debug)]
274pub enum Documentation {
275 /// There is no documentation for this completion.
276 Undocumented,
277 /// A single line of documentation.
278 SingleLine(String),
279 /// Multiple lines of plain text documentation.
280 MultiLinePlainText(String),
281 /// Markdown documentation.
282 MultiLineMarkdown(ParsedMarkdown),
283}
284
285/// An operation used to synchronize this buffer with its other replicas.
286#[derive(Clone, Debug, PartialEq)]
287pub enum Operation {
288 /// A text operation.
289 Buffer(text::Operation),
290
291 /// An update to the buffer's diagnostics.
292 UpdateDiagnostics {
293 /// The id of the language server that produced the new diagnostics.
294 server_id: LanguageServerId,
295 /// The diagnostics.
296 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
297 /// The buffer's lamport timestamp.
298 lamport_timestamp: clock::Lamport,
299 },
300
301 /// An update to the most recent selections in this buffer.
302 UpdateSelections {
303 /// The selections.
304 selections: Arc<[Selection<Anchor>]>,
305 /// The buffer's lamport timestamp.
306 lamport_timestamp: clock::Lamport,
307 /// Whether the selections are in 'line mode'.
308 line_mode: bool,
309 /// The [`CursorShape`] associated with these selections.
310 cursor_shape: CursorShape,
311 },
312
313 /// An update to the characters that should trigger autocompletion
314 /// for this buffer.
315 UpdateCompletionTriggers {
316 /// The characters that trigger autocompletion.
317 triggers: Vec<String>,
318 /// The buffer's lamport timestamp.
319 lamport_timestamp: clock::Lamport,
320 /// The language server ID.
321 server_id: LanguageServerId,
322 },
323}
324
325/// An event that occurs in a buffer.
326#[derive(Clone, Debug, PartialEq)]
327pub enum BufferEvent {
328 /// The buffer was changed in a way that must be
329 /// propagated to its other replicas.
330 Operation {
331 operation: Operation,
332 is_local: bool,
333 },
334 /// The buffer was edited.
335 Edited,
336 /// The buffer's `dirty` bit changed.
337 DirtyChanged,
338 /// The buffer was saved.
339 Saved,
340 /// The buffer's file was changed on disk.
341 FileHandleChanged,
342 /// The buffer was reloaded.
343 Reloaded,
344 /// The buffer is in need of a reload
345 ReloadNeeded,
346 /// The buffer's diff_base changed.
347 DiffBaseChanged,
348 /// Buffer's excerpts for a certain diff base were recalculated.
349 DiffUpdated,
350 /// The buffer's language was changed.
351 LanguageChanged,
352 /// The buffer's syntax trees were updated.
353 Reparsed,
354 /// The buffer's diagnostics were updated.
355 DiagnosticsUpdated,
356 /// The buffer gained or lost editing capabilities.
357 CapabilityChanged,
358 /// The buffer was explicitly requested to close.
359 Closed,
360 /// The buffer was discarded when closing.
361 Discarded,
362}
363
364/// The file associated with a buffer.
365pub trait File: Send + Sync {
366 /// Returns the [`LocalFile`] associated with this file, if the
367 /// file is local.
368 fn as_local(&self) -> Option<&dyn LocalFile>;
369
370 /// Returns whether this file is local.
371 fn is_local(&self) -> bool {
372 self.as_local().is_some()
373 }
374
375 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
376 /// only available in some states, such as modification time.
377 fn disk_state(&self) -> DiskState;
378
379 /// Returns the path of this file relative to the worktree's root directory.
380 fn path(&self) -> &Arc<Path>;
381
382 /// Returns the path of this file relative to the worktree's parent directory (this means it
383 /// includes the name of the worktree's root folder).
384 fn full_path(&self, cx: &AppContext) -> PathBuf;
385
386 /// Returns the last component of this handle's absolute path. If this handle refers to the root
387 /// of its worktree, then this method will return the name of the worktree itself.
388 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
389
390 /// Returns the id of the worktree to which this file belongs.
391 ///
392 /// This is needed for looking up project-specific settings.
393 fn worktree_id(&self, cx: &AppContext) -> WorktreeId;
394
395 /// Converts this file into an [`Any`] trait object.
396 fn as_any(&self) -> &dyn Any;
397
398 /// Converts this file into a protobuf message.
399 fn to_proto(&self, cx: &AppContext) -> rpc::proto::File;
400
401 /// Return whether Zed considers this to be a private file.
402 fn is_private(&self) -> bool;
403}
404
405/// The file's storage status - whether it's stored (`Present`), and if so when it was last
406/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
407/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
408/// indicator for new files.
409#[derive(Clone, Copy, Debug, PartialEq)]
410pub enum DiskState {
411 /// File created in Zed that has not been saved.
412 New,
413 /// File present on the filesystem.
414 Present {
415 /// Last known mtime (modification time).
416 mtime: SystemTime,
417 },
418 /// Deleted file that was previously present.
419 Deleted,
420}
421
422impl DiskState {
423 /// Returns the file's last known modification time on disk.
424 pub fn mtime(self) -> Option<SystemTime> {
425 match self {
426 DiskState::New => None,
427 DiskState::Present { mtime } => Some(mtime),
428 DiskState::Deleted => None,
429 }
430 }
431}
432
433/// The file associated with a buffer, in the case where the file is on the local disk.
434pub trait LocalFile: File {
435 /// Returns the absolute path of this file
436 fn abs_path(&self, cx: &AppContext) -> PathBuf;
437
438 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
439 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
440
441 /// Loads the file's contents from disk.
442 fn load_bytes(&self, cx: &AppContext) -> Task<Result<Vec<u8>>>;
443
444 /// Returns true if the file should not be shared with collaborators.
445 fn is_private(&self, _: &AppContext) -> bool {
446 false
447 }
448}
449
450/// The auto-indent behavior associated with an editing operation.
451/// For some editing operations, each affected line of text has its
452/// indentation recomputed. For other operations, the entire block
453/// of edited text is adjusted uniformly.
454#[derive(Clone, Debug)]
455pub enum AutoindentMode {
456 /// Indent each line of inserted text.
457 EachLine,
458 /// Apply the same indentation adjustment to all of the lines
459 /// in a given insertion.
460 Block {
461 /// The original indentation level of the first line of each
462 /// insertion, if it has been copied.
463 original_indent_columns: Vec<u32>,
464 },
465}
466
467#[derive(Clone)]
468struct AutoindentRequest {
469 before_edit: BufferSnapshot,
470 entries: Vec<AutoindentRequestEntry>,
471 is_block_mode: bool,
472}
473
474#[derive(Debug, Clone)]
475struct AutoindentRequestEntry {
476 /// A range of the buffer whose indentation should be adjusted.
477 range: Range<Anchor>,
478 /// Whether or not these lines should be considered brand new, for the
479 /// purpose of auto-indent. When text is not new, its indentation will
480 /// only be adjusted if the suggested indentation level has *changed*
481 /// since the edit was made.
482 first_line_is_new: bool,
483 indent_size: IndentSize,
484 original_indent_column: Option<u32>,
485}
486
487#[derive(Debug)]
488struct IndentSuggestion {
489 basis_row: u32,
490 delta: Ordering,
491 within_error: bool,
492}
493
494struct BufferChunkHighlights<'a> {
495 captures: SyntaxMapCaptures<'a>,
496 next_capture: Option<SyntaxMapCapture<'a>>,
497 stack: Vec<(usize, HighlightId)>,
498 highlight_maps: Vec<HighlightMap>,
499}
500
501/// An iterator that yields chunks of a buffer's text, along with their
502/// syntax highlights and diagnostic status.
503pub struct BufferChunks<'a> {
504 buffer_snapshot: Option<&'a BufferSnapshot>,
505 range: Range<usize>,
506 chunks: text::Chunks<'a>,
507 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
508 error_depth: usize,
509 warning_depth: usize,
510 information_depth: usize,
511 hint_depth: usize,
512 unnecessary_depth: usize,
513 highlights: Option<BufferChunkHighlights<'a>>,
514}
515
516/// A chunk of a buffer's text, along with its syntax highlight and
517/// diagnostic status.
518#[derive(Clone, Debug, Default)]
519pub struct Chunk<'a> {
520 /// The text of the chunk.
521 pub text: &'a str,
522 /// The syntax highlighting style of the chunk.
523 pub syntax_highlight_id: Option<HighlightId>,
524 /// The highlight style that has been applied to this chunk in
525 /// the editor.
526 pub highlight_style: Option<HighlightStyle>,
527 /// The severity of diagnostic associated with this chunk, if any.
528 pub diagnostic_severity: Option<DiagnosticSeverity>,
529 /// Whether this chunk of text is marked as unnecessary.
530 pub is_unnecessary: bool,
531 /// Whether this chunk of text was originally a tab character.
532 pub is_tab: bool,
533 /// An optional recipe for how the chunk should be presented.
534 pub renderer: Option<ChunkRenderer>,
535}
536
537/// A recipe for how the chunk should be presented.
538#[derive(Clone)]
539pub struct ChunkRenderer {
540 /// creates a custom element to represent this chunk.
541 pub render: Arc<dyn Send + Sync + Fn(&mut ChunkRendererContext) -> AnyElement>,
542 /// If true, the element is constrained to the shaped width of the text.
543 pub constrain_width: bool,
544}
545
546pub struct ChunkRendererContext<'a, 'b> {
547 pub context: &'a mut WindowContext<'b>,
548 pub max_width: Pixels,
549}
550
551impl fmt::Debug for ChunkRenderer {
552 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
553 f.debug_struct("ChunkRenderer")
554 .field("constrain_width", &self.constrain_width)
555 .finish()
556 }
557}
558
559impl<'a, 'b> Deref for ChunkRendererContext<'a, 'b> {
560 type Target = WindowContext<'b>;
561
562 fn deref(&self) -> &Self::Target {
563 self.context
564 }
565}
566
567impl<'a, 'b> DerefMut for ChunkRendererContext<'a, 'b> {
568 fn deref_mut(&mut self) -> &mut Self::Target {
569 self.context
570 }
571}
572
573/// A set of edits to a given version of a buffer, computed asynchronously.
574#[derive(Debug)]
575pub struct Diff {
576 pub(crate) base_version: clock::Global,
577 line_ending: LineEnding,
578 edits: Vec<(Range<usize>, Arc<str>)>,
579}
580
581#[derive(Clone, Copy)]
582pub(crate) struct DiagnosticEndpoint {
583 offset: usize,
584 is_start: bool,
585 severity: DiagnosticSeverity,
586 is_unnecessary: bool,
587}
588
589/// A class of characters, used for characterizing a run of text.
590#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
591pub enum CharKind {
592 /// Whitespace.
593 Whitespace,
594 /// Punctuation.
595 Punctuation,
596 /// Word.
597 Word,
598}
599
600/// A runnable is a set of data about a region that could be resolved into a task
601pub struct Runnable {
602 pub tags: SmallVec<[RunnableTag; 1]>,
603 pub language: Arc<Language>,
604 pub buffer: BufferId,
605}
606
607#[derive(Clone, Debug, PartialEq)]
608pub struct IndentGuide {
609 pub buffer_id: BufferId,
610 pub start_row: BufferRow,
611 pub end_row: BufferRow,
612 pub depth: u32,
613 pub tab_size: u32,
614 pub settings: IndentGuideSettings,
615}
616
617impl IndentGuide {
618 pub fn indent_level(&self) -> u32 {
619 self.depth * self.tab_size
620 }
621}
622
623impl Buffer {
624 /// Create a new buffer with the given base text.
625 pub fn local<T: Into<String>>(base_text: T, cx: &ModelContext<Self>) -> Self {
626 Self::build(
627 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
628 None,
629 None,
630 Capability::ReadWrite,
631 )
632 }
633
634 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
635 pub fn local_normalized(
636 base_text_normalized: Rope,
637 line_ending: LineEnding,
638 cx: &ModelContext<Self>,
639 ) -> Self {
640 Self::build(
641 TextBuffer::new_normalized(
642 0,
643 cx.entity_id().as_non_zero_u64().into(),
644 line_ending,
645 base_text_normalized,
646 ),
647 None,
648 None,
649 Capability::ReadWrite,
650 )
651 }
652
653 /// Create a new buffer that is a replica of a remote buffer.
654 pub fn remote(
655 remote_id: BufferId,
656 replica_id: ReplicaId,
657 capability: Capability,
658 base_text: impl Into<String>,
659 ) -> Self {
660 Self::build(
661 TextBuffer::new(replica_id, remote_id, base_text.into()),
662 None,
663 None,
664 capability,
665 )
666 }
667
668 /// Create a new buffer that is a replica of a remote buffer, populating its
669 /// state from the given protobuf message.
670 pub fn from_proto(
671 replica_id: ReplicaId,
672 capability: Capability,
673 message: proto::BufferState,
674 file: Option<Arc<dyn File>>,
675 ) -> Result<Self> {
676 let buffer_id = BufferId::new(message.id)
677 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
678 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
679 let mut this = Self::build(buffer, message.diff_base, file, capability);
680 this.text.set_line_ending(proto::deserialize_line_ending(
681 rpc::proto::LineEnding::from_i32(message.line_ending)
682 .ok_or_else(|| anyhow!("missing line_ending"))?,
683 ));
684 this.saved_version = proto::deserialize_version(&message.saved_version);
685 this.saved_mtime = message.saved_mtime.map(|time| time.into());
686 Ok(this)
687 }
688
689 /// Serialize the buffer's state to a protobuf message.
690 pub fn to_proto(&self, cx: &AppContext) -> proto::BufferState {
691 proto::BufferState {
692 id: self.remote_id().into(),
693 file: self.file.as_ref().map(|f| f.to_proto(cx)),
694 base_text: self.base_text().to_string(),
695 diff_base: self.diff_base().as_ref().map(|h| h.to_string()),
696 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
697 saved_version: proto::serialize_version(&self.saved_version),
698 saved_mtime: self.saved_mtime.map(|time| time.into()),
699 }
700 }
701
702 /// Serialize as protobufs all of the changes to the buffer since the given version.
703 pub fn serialize_ops(
704 &self,
705 since: Option<clock::Global>,
706 cx: &AppContext,
707 ) -> Task<Vec<proto::Operation>> {
708 let mut operations = Vec::new();
709 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
710
711 operations.extend(self.remote_selections.iter().map(|(_, set)| {
712 proto::serialize_operation(&Operation::UpdateSelections {
713 selections: set.selections.clone(),
714 lamport_timestamp: set.lamport_timestamp,
715 line_mode: set.line_mode,
716 cursor_shape: set.cursor_shape,
717 })
718 }));
719
720 for (server_id, diagnostics) in &self.diagnostics {
721 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
722 lamport_timestamp: self.diagnostics_timestamp,
723 server_id: *server_id,
724 diagnostics: diagnostics.iter().cloned().collect(),
725 }));
726 }
727
728 for (server_id, completions) in &self.completion_triggers_per_language_server {
729 operations.push(proto::serialize_operation(
730 &Operation::UpdateCompletionTriggers {
731 triggers: completions.iter().cloned().collect(),
732 lamport_timestamp: self.completion_triggers_timestamp,
733 server_id: *server_id,
734 },
735 ));
736 }
737
738 let text_operations = self.text.operations().clone();
739 cx.background_executor().spawn(async move {
740 let since = since.unwrap_or_default();
741 operations.extend(
742 text_operations
743 .iter()
744 .filter(|(_, op)| !since.observed(op.timestamp()))
745 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
746 );
747 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
748 operations
749 })
750 }
751
752 /// Assign a language to the buffer, returning the buffer.
753 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
754 self.set_language(Some(language), cx);
755 self
756 }
757
758 /// Returns the [`Capability`] of this buffer.
759 pub fn capability(&self) -> Capability {
760 self.capability
761 }
762
763 /// Whether this buffer can only be read.
764 pub fn read_only(&self) -> bool {
765 self.capability == Capability::ReadOnly
766 }
767
768 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
769 pub fn build(
770 buffer: TextBuffer,
771 diff_base: Option<String>,
772 file: Option<Arc<dyn File>>,
773 capability: Capability,
774 ) -> Self {
775 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
776 let snapshot = buffer.snapshot();
777 let git_diff = git::diff::BufferDiff::new(&snapshot);
778 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
779 Self {
780 saved_mtime,
781 saved_version: buffer.version(),
782 preview_version: buffer.version(),
783 reload_task: None,
784 transaction_depth: 0,
785 was_dirty_before_starting_transaction: None,
786 has_unsaved_edits: Cell::new((buffer.version(), false)),
787 text: buffer,
788 diff_base: diff_base.map(|mut raw_diff_base| {
789 LineEnding::normalize(&mut raw_diff_base);
790 BufferDiffBase::Git(Rope::from(raw_diff_base))
791 }),
792 diff_base_version: 0,
793 git_diff,
794 file,
795 capability,
796 syntax_map,
797 parsing_in_background: false,
798 non_text_state_update_count: 0,
799 sync_parse_timeout: Duration::from_millis(1),
800 parse_status: async_watch::channel(ParseStatus::Idle),
801 autoindent_requests: Default::default(),
802 pending_autoindent: Default::default(),
803 language: None,
804 remote_selections: Default::default(),
805 diagnostics: Default::default(),
806 diagnostics_timestamp: Default::default(),
807 completion_triggers: Default::default(),
808 completion_triggers_per_language_server: Default::default(),
809 completion_triggers_timestamp: Default::default(),
810 deferred_ops: OperationQueue::new(),
811 has_conflict: false,
812 _subscriptions: Vec::new(),
813 }
814 }
815
816 /// Retrieve a snapshot of the buffer's current state. This is computationally
817 /// cheap, and allows reading from the buffer on a background thread.
818 pub fn snapshot(&self) -> BufferSnapshot {
819 let text = self.text.snapshot();
820 let mut syntax_map = self.syntax_map.lock();
821 syntax_map.interpolate(&text);
822 let syntax = syntax_map.snapshot();
823
824 BufferSnapshot {
825 text,
826 syntax,
827 git_diff: self.git_diff.clone(),
828 file: self.file.clone(),
829 remote_selections: self.remote_selections.clone(),
830 diagnostics: self.diagnostics.clone(),
831 language: self.language.clone(),
832 non_text_state_update_count: self.non_text_state_update_count,
833 }
834 }
835
836 pub fn branch(&mut self, cx: &mut ModelContext<Self>) -> Model<Self> {
837 let this = cx.handle();
838 cx.new_model(|cx| {
839 let mut branch = Self {
840 diff_base: Some(BufferDiffBase::PastBufferVersion {
841 buffer: this.clone(),
842 rope: self.as_rope().clone(),
843 merged_operations: Default::default(),
844 }),
845 language: self.language.clone(),
846 has_conflict: self.has_conflict,
847 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
848 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
849 ..Self::build(
850 self.text.branch(),
851 None,
852 self.file.clone(),
853 self.capability(),
854 )
855 };
856 if let Some(language_registry) = self.language_registry() {
857 branch.set_language_registry(language_registry);
858 }
859
860 // Reparse the branch buffer so that we get syntax highlighting immediately.
861 branch.reparse(cx);
862
863 branch
864 })
865 }
866
867 /// Applies all of the changes in this buffer that intersect any of the
868 /// given `ranges` to its base buffer.
869 ///
870 /// If `ranges` is empty, then all changes will be applied. This buffer must
871 /// be a branch buffer to call this method.
872 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut ModelContext<Self>) {
873 let Some(base_buffer) = self.diff_base_buffer() else {
874 debug_panic!("not a branch buffer");
875 return;
876 };
877
878 let mut ranges = if ranges.is_empty() {
879 &[0..usize::MAX]
880 } else {
881 ranges.as_slice()
882 }
883 .into_iter()
884 .peekable();
885
886 let mut edits = Vec::new();
887 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
888 let mut is_included = false;
889 while let Some(range) = ranges.peek() {
890 if range.end < edit.new.start {
891 ranges.next().unwrap();
892 } else {
893 if range.start <= edit.new.end {
894 is_included = true;
895 }
896 break;
897 }
898 }
899
900 if is_included {
901 edits.push((
902 edit.old.clone(),
903 self.text_for_range(edit.new.clone()).collect::<String>(),
904 ));
905 }
906 }
907
908 let operation = base_buffer.update(cx, |base_buffer, cx| {
909 cx.emit(BufferEvent::DiffBaseChanged);
910 base_buffer.edit(edits, None, cx)
911 });
912
913 if let Some(operation) = operation {
914 if let Some(BufferDiffBase::PastBufferVersion {
915 merged_operations, ..
916 }) = &mut self.diff_base
917 {
918 merged_operations.push(operation);
919 }
920 }
921 }
922
923 fn on_base_buffer_event(
924 &mut self,
925 _: Model<Buffer>,
926 event: &BufferEvent,
927 cx: &mut ModelContext<Self>,
928 ) {
929 let BufferEvent::Operation { operation, .. } = event else {
930 return;
931 };
932 let Some(BufferDiffBase::PastBufferVersion {
933 merged_operations, ..
934 }) = &mut self.diff_base
935 else {
936 return;
937 };
938
939 let mut operation_to_undo = None;
940 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
941 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
942 merged_operations.remove(ix);
943 operation_to_undo = Some(operation.timestamp);
944 }
945 }
946
947 self.apply_ops([operation.clone()], cx);
948
949 if let Some(timestamp) = operation_to_undo {
950 let counts = [(timestamp, u32::MAX)].into_iter().collect();
951 self.undo_operations(counts, cx);
952 }
953
954 self.diff_base_version += 1;
955 }
956
957 #[cfg(test)]
958 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
959 &self.text
960 }
961
962 /// Retrieve a snapshot of the buffer's raw text, without any
963 /// language-related state like the syntax tree or diagnostics.
964 pub fn text_snapshot(&self) -> text::BufferSnapshot {
965 self.text.snapshot()
966 }
967
968 /// The file associated with the buffer, if any.
969 pub fn file(&self) -> Option<&Arc<dyn File>> {
970 self.file.as_ref()
971 }
972
973 /// The version of the buffer that was last saved or reloaded from disk.
974 pub fn saved_version(&self) -> &clock::Global {
975 &self.saved_version
976 }
977
978 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
979 pub fn saved_mtime(&self) -> Option<SystemTime> {
980 self.saved_mtime
981 }
982
983 /// Assign a language to the buffer.
984 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
985 self.non_text_state_update_count += 1;
986 self.syntax_map.lock().clear(&self.text);
987 self.language = language;
988 self.reparse(cx);
989 cx.emit(BufferEvent::LanguageChanged);
990 }
991
992 /// Assign a language registry to the buffer. This allows the buffer to retrieve
993 /// other languages if parts of the buffer are written in different languages.
994 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
995 self.syntax_map
996 .lock()
997 .set_language_registry(language_registry);
998 }
999
1000 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1001 self.syntax_map.lock().language_registry()
1002 }
1003
1004 /// Assign the buffer a new [`Capability`].
1005 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
1006 self.capability = capability;
1007 cx.emit(BufferEvent::CapabilityChanged)
1008 }
1009
1010 /// This method is called to signal that the buffer has been saved.
1011 pub fn did_save(
1012 &mut self,
1013 version: clock::Global,
1014 mtime: Option<SystemTime>,
1015 cx: &mut ModelContext<Self>,
1016 ) {
1017 self.saved_version = version;
1018 self.has_unsaved_edits
1019 .set((self.saved_version().clone(), false));
1020 self.has_conflict = false;
1021 self.saved_mtime = mtime;
1022 cx.emit(BufferEvent::Saved);
1023 cx.notify();
1024 }
1025
1026 /// This method is called to signal that the buffer has been discarded.
1027 pub fn discarded(&self, cx: &mut ModelContext<Self>) {
1028 cx.emit(BufferEvent::Discarded);
1029 cx.notify();
1030 }
1031
1032 /// Reloads the contents of the buffer from disk.
1033 pub fn reload(&mut self, cx: &ModelContext<Self>) -> oneshot::Receiver<Option<Transaction>> {
1034 let (tx, rx) = futures::channel::oneshot::channel();
1035 let prev_version = self.text.version();
1036 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
1037 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
1038 let file = this.file.as_ref()?.as_local()?;
1039 Some((file.disk_state().mtime(), file.load(cx)))
1040 })?
1041 else {
1042 return Ok(());
1043 };
1044
1045 let new_text = new_text.await?;
1046 let diff = this
1047 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
1048 .await;
1049 this.update(&mut cx, |this, cx| {
1050 if this.version() == diff.base_version {
1051 this.finalize_last_transaction();
1052 this.apply_diff(diff, cx);
1053 tx.send(this.finalize_last_transaction().cloned()).ok();
1054 this.has_conflict = false;
1055 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1056 } else {
1057 if !diff.edits.is_empty()
1058 || this
1059 .edits_since::<usize>(&diff.base_version)
1060 .next()
1061 .is_some()
1062 {
1063 this.has_conflict = true;
1064 }
1065
1066 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1067 }
1068
1069 this.reload_task.take();
1070 })
1071 }));
1072 rx
1073 }
1074
1075 /// This method is called to signal that the buffer has been reloaded.
1076 pub fn did_reload(
1077 &mut self,
1078 version: clock::Global,
1079 line_ending: LineEnding,
1080 mtime: Option<SystemTime>,
1081 cx: &mut ModelContext<Self>,
1082 ) {
1083 self.saved_version = version;
1084 self.has_unsaved_edits
1085 .set((self.saved_version.clone(), false));
1086 self.text.set_line_ending(line_ending);
1087 self.saved_mtime = mtime;
1088 cx.emit(BufferEvent::Reloaded);
1089 cx.notify();
1090 }
1091
1092 /// Updates the [`File`] backing this buffer. This should be called when
1093 /// the file has changed or has been deleted.
1094 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
1095 let was_dirty = self.is_dirty();
1096 let mut file_changed = false;
1097
1098 if let Some(old_file) = self.file.as_ref() {
1099 if new_file.path() != old_file.path() {
1100 file_changed = true;
1101 }
1102
1103 let old_state = old_file.disk_state();
1104 let new_state = new_file.disk_state();
1105 if old_state != new_state {
1106 file_changed = true;
1107 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1108 cx.emit(BufferEvent::ReloadNeeded)
1109 }
1110 }
1111 } else {
1112 file_changed = true;
1113 };
1114
1115 self.file = Some(new_file);
1116 if file_changed {
1117 self.non_text_state_update_count += 1;
1118 if was_dirty != self.is_dirty() {
1119 cx.emit(BufferEvent::DirtyChanged);
1120 }
1121 cx.emit(BufferEvent::FileHandleChanged);
1122 cx.notify();
1123 }
1124 }
1125
1126 /// Returns the current diff base, see [`Buffer::set_diff_base`].
1127 pub fn diff_base(&self) -> Option<&Rope> {
1128 match self.diff_base.as_ref()? {
1129 BufferDiffBase::Git(rope) | BufferDiffBase::PastBufferVersion { rope, .. } => {
1130 Some(rope)
1131 }
1132 }
1133 }
1134
1135 /// Sets the text that will be used to compute a Git diff
1136 /// against the buffer text.
1137 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &ModelContext<Self>) {
1138 self.diff_base = diff_base.map(|mut raw_diff_base| {
1139 LineEnding::normalize(&mut raw_diff_base);
1140 BufferDiffBase::Git(Rope::from(raw_diff_base))
1141 });
1142 self.diff_base_version += 1;
1143 if let Some(recalc_task) = self.recalculate_diff(cx) {
1144 cx.spawn(|buffer, mut cx| async move {
1145 recalc_task.await;
1146 buffer
1147 .update(&mut cx, |_, cx| {
1148 cx.emit(BufferEvent::DiffBaseChanged);
1149 })
1150 .ok();
1151 })
1152 .detach();
1153 }
1154 }
1155
1156 /// Returns a number, unique per diff base set to the buffer.
1157 pub fn diff_base_version(&self) -> usize {
1158 self.diff_base_version
1159 }
1160
1161 pub fn diff_base_buffer(&self) -> Option<Model<Self>> {
1162 match self.diff_base.as_ref()? {
1163 BufferDiffBase::Git(_) => None,
1164 BufferDiffBase::PastBufferVersion { buffer, .. } => Some(buffer.clone()),
1165 }
1166 }
1167
1168 /// Recomputes the diff.
1169 pub fn recalculate_diff(&self, cx: &ModelContext<Self>) -> Option<Task<()>> {
1170 let diff_base_rope = match self.diff_base.as_ref()? {
1171 BufferDiffBase::Git(rope) => rope.clone(),
1172 BufferDiffBase::PastBufferVersion { buffer, .. } => buffer.read(cx).as_rope().clone(),
1173 };
1174
1175 let snapshot = self.snapshot();
1176 let mut diff = self.git_diff.clone();
1177 let diff = cx.background_executor().spawn(async move {
1178 diff.update(&diff_base_rope, &snapshot).await;
1179 (diff, diff_base_rope)
1180 });
1181
1182 Some(cx.spawn(|this, mut cx| async move {
1183 let (buffer_diff, diff_base_rope) = diff.await;
1184 this.update(&mut cx, |this, cx| {
1185 this.git_diff = buffer_diff;
1186 this.non_text_state_update_count += 1;
1187 if let Some(BufferDiffBase::PastBufferVersion { rope, .. }) = &mut this.diff_base {
1188 *rope = diff_base_rope;
1189 }
1190 cx.emit(BufferEvent::DiffUpdated);
1191 })
1192 .ok();
1193 }))
1194 }
1195
1196 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1197 pub fn language(&self) -> Option<&Arc<Language>> {
1198 self.language.as_ref()
1199 }
1200
1201 /// Returns the [`Language`] at the given location.
1202 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1203 let offset = position.to_offset(self);
1204 self.syntax_map
1205 .lock()
1206 .layers_for_range(offset..offset, &self.text, false)
1207 .last()
1208 .map(|info| info.language.clone())
1209 .or_else(|| self.language.clone())
1210 }
1211
1212 /// An integer version number that accounts for all updates besides
1213 /// the buffer's text itself (which is versioned via a version vector).
1214 pub fn non_text_state_update_count(&self) -> usize {
1215 self.non_text_state_update_count
1216 }
1217
1218 /// Whether the buffer is being parsed in the background.
1219 #[cfg(any(test, feature = "test-support"))]
1220 pub fn is_parsing(&self) -> bool {
1221 self.parsing_in_background
1222 }
1223
1224 /// Indicates whether the buffer contains any regions that may be
1225 /// written in a language that hasn't been loaded yet.
1226 pub fn contains_unknown_injections(&self) -> bool {
1227 self.syntax_map.lock().contains_unknown_injections()
1228 }
1229
1230 #[cfg(test)]
1231 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1232 self.sync_parse_timeout = timeout;
1233 }
1234
1235 /// Called after an edit to synchronize the buffer's main parse tree with
1236 /// the buffer's new underlying state.
1237 ///
1238 /// Locks the syntax map and interpolates the edits since the last reparse
1239 /// into the foreground syntax tree.
1240 ///
1241 /// Then takes a stable snapshot of the syntax map before unlocking it.
1242 /// The snapshot with the interpolated edits is sent to a background thread,
1243 /// where we ask Tree-sitter to perform an incremental parse.
1244 ///
1245 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1246 /// waiting on the parse to complete. As soon as it completes, we proceed
1247 /// synchronously, unless a 1ms timeout elapses.
1248 ///
1249 /// If we time out waiting on the parse, we spawn a second task waiting
1250 /// until the parse does complete and return with the interpolated tree still
1251 /// in the foreground. When the background parse completes, call back into
1252 /// the main thread and assign the foreground parse state.
1253 ///
1254 /// If the buffer or grammar changed since the start of the background parse,
1255 /// initiate an additional reparse recursively. To avoid concurrent parses
1256 /// for the same buffer, we only initiate a new parse if we are not already
1257 /// parsing in the background.
1258 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1259 if self.parsing_in_background {
1260 return;
1261 }
1262 let language = if let Some(language) = self.language.clone() {
1263 language
1264 } else {
1265 return;
1266 };
1267
1268 let text = self.text_snapshot();
1269 let parsed_version = self.version();
1270
1271 let mut syntax_map = self.syntax_map.lock();
1272 syntax_map.interpolate(&text);
1273 let language_registry = syntax_map.language_registry();
1274 let mut syntax_snapshot = syntax_map.snapshot();
1275 drop(syntax_map);
1276
1277 let parse_task = cx.background_executor().spawn({
1278 let language = language.clone();
1279 let language_registry = language_registry.clone();
1280 async move {
1281 syntax_snapshot.reparse(&text, language_registry, language);
1282 syntax_snapshot
1283 }
1284 });
1285
1286 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1287 match cx
1288 .background_executor()
1289 .block_with_timeout(self.sync_parse_timeout, parse_task)
1290 {
1291 Ok(new_syntax_snapshot) => {
1292 self.did_finish_parsing(new_syntax_snapshot, cx);
1293 }
1294 Err(parse_task) => {
1295 self.parsing_in_background = true;
1296 cx.spawn(move |this, mut cx| async move {
1297 let new_syntax_map = parse_task.await;
1298 this.update(&mut cx, move |this, cx| {
1299 let grammar_changed =
1300 this.language.as_ref().map_or(true, |current_language| {
1301 !Arc::ptr_eq(&language, current_language)
1302 });
1303 let language_registry_changed = new_syntax_map
1304 .contains_unknown_injections()
1305 && language_registry.map_or(false, |registry| {
1306 registry.version() != new_syntax_map.language_registry_version()
1307 });
1308 let parse_again = language_registry_changed
1309 || grammar_changed
1310 || this.version.changed_since(&parsed_version);
1311 this.did_finish_parsing(new_syntax_map, cx);
1312 this.parsing_in_background = false;
1313 if parse_again {
1314 this.reparse(cx);
1315 }
1316 })
1317 .ok();
1318 })
1319 .detach();
1320 }
1321 }
1322 }
1323
1324 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
1325 self.non_text_state_update_count += 1;
1326 self.syntax_map.lock().did_parse(syntax_snapshot);
1327 self.request_autoindent(cx);
1328 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1329 cx.emit(BufferEvent::Reparsed);
1330 cx.notify();
1331 }
1332
1333 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1334 self.parse_status.1.clone()
1335 }
1336
1337 /// Assign to the buffer a set of diagnostics created by a given language server.
1338 pub fn update_diagnostics(
1339 &mut self,
1340 server_id: LanguageServerId,
1341 diagnostics: DiagnosticSet,
1342 cx: &mut ModelContext<Self>,
1343 ) {
1344 let lamport_timestamp = self.text.lamport_clock.tick();
1345 let op = Operation::UpdateDiagnostics {
1346 server_id,
1347 diagnostics: diagnostics.iter().cloned().collect(),
1348 lamport_timestamp,
1349 };
1350 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1351 self.send_operation(op, true, cx);
1352 }
1353
1354 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1355 if let Some(indent_sizes) = self.compute_autoindents() {
1356 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1357 match cx
1358 .background_executor()
1359 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1360 {
1361 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1362 Err(indent_sizes) => {
1363 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1364 let indent_sizes = indent_sizes.await;
1365 this.update(&mut cx, |this, cx| {
1366 this.apply_autoindents(indent_sizes, cx);
1367 })
1368 .ok();
1369 }));
1370 }
1371 }
1372 } else {
1373 self.autoindent_requests.clear();
1374 }
1375 }
1376
1377 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1378 let max_rows_between_yields = 100;
1379 let snapshot = self.snapshot();
1380 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1381 return None;
1382 }
1383
1384 let autoindent_requests = self.autoindent_requests.clone();
1385 Some(async move {
1386 let mut indent_sizes = BTreeMap::new();
1387 for request in autoindent_requests {
1388 // Resolve each edited range to its row in the current buffer and in the
1389 // buffer before this batch of edits.
1390 let mut row_ranges = Vec::new();
1391 let mut old_to_new_rows = BTreeMap::new();
1392 let mut language_indent_sizes_by_new_row = Vec::new();
1393 for entry in &request.entries {
1394 let position = entry.range.start;
1395 let new_row = position.to_point(&snapshot).row;
1396 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1397 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1398
1399 if !entry.first_line_is_new {
1400 let old_row = position.to_point(&request.before_edit).row;
1401 old_to_new_rows.insert(old_row, new_row);
1402 }
1403 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1404 }
1405
1406 // Build a map containing the suggested indentation for each of the edited lines
1407 // with respect to the state of the buffer before these edits. This map is keyed
1408 // by the rows for these lines in the current state of the buffer.
1409 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1410 let old_edited_ranges =
1411 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1412 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1413 let mut language_indent_size = IndentSize::default();
1414 for old_edited_range in old_edited_ranges {
1415 let suggestions = request
1416 .before_edit
1417 .suggest_autoindents(old_edited_range.clone())
1418 .into_iter()
1419 .flatten();
1420 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1421 if let Some(suggestion) = suggestion {
1422 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1423
1424 // Find the indent size based on the language for this row.
1425 while let Some((row, size)) = language_indent_sizes.peek() {
1426 if *row > new_row {
1427 break;
1428 }
1429 language_indent_size = *size;
1430 language_indent_sizes.next();
1431 }
1432
1433 let suggested_indent = old_to_new_rows
1434 .get(&suggestion.basis_row)
1435 .and_then(|from_row| {
1436 Some(old_suggestions.get(from_row).copied()?.0)
1437 })
1438 .unwrap_or_else(|| {
1439 request
1440 .before_edit
1441 .indent_size_for_line(suggestion.basis_row)
1442 })
1443 .with_delta(suggestion.delta, language_indent_size);
1444 old_suggestions
1445 .insert(new_row, (suggested_indent, suggestion.within_error));
1446 }
1447 }
1448 yield_now().await;
1449 }
1450
1451 // Compute new suggestions for each line, but only include them in the result
1452 // if they differ from the old suggestion for that line.
1453 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1454 let mut language_indent_size = IndentSize::default();
1455 for (row_range, original_indent_column) in row_ranges {
1456 let new_edited_row_range = if request.is_block_mode {
1457 row_range.start..row_range.start + 1
1458 } else {
1459 row_range.clone()
1460 };
1461
1462 let suggestions = snapshot
1463 .suggest_autoindents(new_edited_row_range.clone())
1464 .into_iter()
1465 .flatten();
1466 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1467 if let Some(suggestion) = suggestion {
1468 // Find the indent size based on the language for this row.
1469 while let Some((row, size)) = language_indent_sizes.peek() {
1470 if *row > new_row {
1471 break;
1472 }
1473 language_indent_size = *size;
1474 language_indent_sizes.next();
1475 }
1476
1477 let suggested_indent = indent_sizes
1478 .get(&suggestion.basis_row)
1479 .copied()
1480 .unwrap_or_else(|| {
1481 snapshot.indent_size_for_line(suggestion.basis_row)
1482 })
1483 .with_delta(suggestion.delta, language_indent_size);
1484 if old_suggestions.get(&new_row).map_or(
1485 true,
1486 |(old_indentation, was_within_error)| {
1487 suggested_indent != *old_indentation
1488 && (!suggestion.within_error || *was_within_error)
1489 },
1490 ) {
1491 indent_sizes.insert(new_row, suggested_indent);
1492 }
1493 }
1494 }
1495
1496 if let (true, Some(original_indent_column)) =
1497 (request.is_block_mode, original_indent_column)
1498 {
1499 let new_indent = indent_sizes
1500 .get(&row_range.start)
1501 .copied()
1502 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1503 let delta = new_indent.len as i64 - original_indent_column as i64;
1504 if delta != 0 {
1505 for row in row_range.skip(1) {
1506 indent_sizes.entry(row).or_insert_with(|| {
1507 let mut size = snapshot.indent_size_for_line(row);
1508 if size.kind == new_indent.kind {
1509 match delta.cmp(&0) {
1510 Ordering::Greater => size.len += delta as u32,
1511 Ordering::Less => {
1512 size.len = size.len.saturating_sub(-delta as u32)
1513 }
1514 Ordering::Equal => {}
1515 }
1516 }
1517 size
1518 });
1519 }
1520 }
1521 }
1522
1523 yield_now().await;
1524 }
1525 }
1526
1527 indent_sizes
1528 })
1529 }
1530
1531 fn apply_autoindents(
1532 &mut self,
1533 indent_sizes: BTreeMap<u32, IndentSize>,
1534 cx: &mut ModelContext<Self>,
1535 ) {
1536 self.autoindent_requests.clear();
1537
1538 let edits: Vec<_> = indent_sizes
1539 .into_iter()
1540 .filter_map(|(row, indent_size)| {
1541 let current_size = indent_size_for_line(self, row);
1542 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1543 })
1544 .collect();
1545
1546 let preserve_preview = self.preserve_preview();
1547 self.edit(edits, None, cx);
1548 if preserve_preview {
1549 self.refresh_preview();
1550 }
1551 }
1552
1553 /// Create a minimal edit that will cause the given row to be indented
1554 /// with the given size. After applying this edit, the length of the line
1555 /// will always be at least `new_size.len`.
1556 pub fn edit_for_indent_size_adjustment(
1557 row: u32,
1558 current_size: IndentSize,
1559 new_size: IndentSize,
1560 ) -> Option<(Range<Point>, String)> {
1561 if new_size.kind == current_size.kind {
1562 match new_size.len.cmp(¤t_size.len) {
1563 Ordering::Greater => {
1564 let point = Point::new(row, 0);
1565 Some((
1566 point..point,
1567 iter::repeat(new_size.char())
1568 .take((new_size.len - current_size.len) as usize)
1569 .collect::<String>(),
1570 ))
1571 }
1572
1573 Ordering::Less => Some((
1574 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1575 String::new(),
1576 )),
1577
1578 Ordering::Equal => None,
1579 }
1580 } else {
1581 Some((
1582 Point::new(row, 0)..Point::new(row, current_size.len),
1583 iter::repeat(new_size.char())
1584 .take(new_size.len as usize)
1585 .collect::<String>(),
1586 ))
1587 }
1588 }
1589
1590 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1591 /// and the given new text.
1592 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1593 let old_text = self.as_rope().clone();
1594 let base_version = self.version();
1595 cx.background_executor()
1596 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1597 let old_text = old_text.to_string();
1598 let line_ending = LineEnding::detect(&new_text);
1599 LineEnding::normalize(&mut new_text);
1600
1601 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1602 let empty: Arc<str> = Arc::default();
1603
1604 let mut edits = Vec::new();
1605 let mut old_offset = 0;
1606 let mut new_offset = 0;
1607 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1608 for change in diff.iter_all_changes().map(Some).chain([None]) {
1609 if let Some(change) = &change {
1610 let len = change.value().len();
1611 match change.tag() {
1612 ChangeTag::Equal => {
1613 old_offset += len;
1614 new_offset += len;
1615 }
1616 ChangeTag::Delete => {
1617 let old_end_offset = old_offset + len;
1618 if let Some((last_old_range, _)) = &mut last_edit {
1619 last_old_range.end = old_end_offset;
1620 } else {
1621 last_edit =
1622 Some((old_offset..old_end_offset, new_offset..new_offset));
1623 }
1624 old_offset = old_end_offset;
1625 }
1626 ChangeTag::Insert => {
1627 let new_end_offset = new_offset + len;
1628 if let Some((_, last_new_range)) = &mut last_edit {
1629 last_new_range.end = new_end_offset;
1630 } else {
1631 last_edit =
1632 Some((old_offset..old_offset, new_offset..new_end_offset));
1633 }
1634 new_offset = new_end_offset;
1635 }
1636 }
1637 }
1638
1639 if let Some((old_range, new_range)) = &last_edit {
1640 if old_offset > old_range.end
1641 || new_offset > new_range.end
1642 || change.is_none()
1643 {
1644 let text = if new_range.is_empty() {
1645 empty.clone()
1646 } else {
1647 new_text[new_range.clone()].into()
1648 };
1649 edits.push((old_range.clone(), text));
1650 last_edit.take();
1651 }
1652 }
1653 }
1654
1655 Diff {
1656 base_version,
1657 line_ending,
1658 edits,
1659 }
1660 })
1661 }
1662
1663 /// Spawns a background task that searches the buffer for any whitespace
1664 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1665 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1666 let old_text = self.as_rope().clone();
1667 let line_ending = self.line_ending();
1668 let base_version = self.version();
1669 cx.background_executor().spawn(async move {
1670 let ranges = trailing_whitespace_ranges(&old_text);
1671 let empty = Arc::<str>::from("");
1672 Diff {
1673 base_version,
1674 line_ending,
1675 edits: ranges
1676 .into_iter()
1677 .map(|range| (range, empty.clone()))
1678 .collect(),
1679 }
1680 })
1681 }
1682
1683 /// Ensures that the buffer ends with a single newline character, and
1684 /// no other whitespace.
1685 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1686 let len = self.len();
1687 let mut offset = len;
1688 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1689 let non_whitespace_len = chunk
1690 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1691 .len();
1692 offset -= chunk.len();
1693 offset += non_whitespace_len;
1694 if non_whitespace_len != 0 {
1695 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1696 return;
1697 }
1698 break;
1699 }
1700 }
1701 self.edit([(offset..len, "\n")], None, cx);
1702 }
1703
1704 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1705 /// calculated, then adjust the diff to account for those changes, and discard any
1706 /// parts of the diff that conflict with those changes.
1707 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1708 // Check for any edits to the buffer that have occurred since this diff
1709 // was computed.
1710 let snapshot = self.snapshot();
1711 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1712 let mut delta = 0;
1713 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1714 while let Some(edit_since) = edits_since.peek() {
1715 // If the edit occurs after a diff hunk, then it does not
1716 // affect that hunk.
1717 if edit_since.old.start > range.end {
1718 break;
1719 }
1720 // If the edit precedes the diff hunk, then adjust the hunk
1721 // to reflect the edit.
1722 else if edit_since.old.end < range.start {
1723 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1724 edits_since.next();
1725 }
1726 // If the edit intersects a diff hunk, then discard that hunk.
1727 else {
1728 return None;
1729 }
1730 }
1731
1732 let start = (range.start as i64 + delta) as usize;
1733 let end = (range.end as i64 + delta) as usize;
1734 Some((start..end, new_text))
1735 });
1736
1737 self.start_transaction();
1738 self.text.set_line_ending(diff.line_ending);
1739 self.edit(adjusted_edits, None, cx);
1740 self.end_transaction(cx)
1741 }
1742
1743 fn has_unsaved_edits(&self) -> bool {
1744 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1745
1746 if last_version == self.version {
1747 self.has_unsaved_edits
1748 .set((last_version, has_unsaved_edits));
1749 return has_unsaved_edits;
1750 }
1751
1752 let has_edits = self.has_edits_since(&self.saved_version);
1753 self.has_unsaved_edits
1754 .set((self.version.clone(), has_edits));
1755 has_edits
1756 }
1757
1758 /// Checks if the buffer has unsaved changes.
1759 pub fn is_dirty(&self) -> bool {
1760 self.capability != Capability::ReadOnly
1761 && (self.has_conflict
1762 || self.file.as_ref().map_or(false, |file| {
1763 matches!(file.disk_state(), DiskState::New | DiskState::Deleted)
1764 })
1765 || self.has_unsaved_edits())
1766 }
1767
1768 /// Checks if the buffer and its file have both changed since the buffer
1769 /// was last saved or reloaded.
1770 pub fn has_conflict(&self) -> bool {
1771 if self.has_conflict {
1772 return true;
1773 }
1774 let Some(file) = self.file.as_ref() else {
1775 return false;
1776 };
1777 match file.disk_state() {
1778 DiskState::New | DiskState::Deleted => true,
1779 DiskState::Present { mtime } => match self.saved_mtime {
1780 Some(saved_mtime) => mtime > saved_mtime && self.has_unsaved_edits(),
1781 None => true,
1782 },
1783 }
1784 }
1785
1786 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1787 pub fn subscribe(&mut self) -> Subscription {
1788 self.text.subscribe()
1789 }
1790
1791 /// Starts a transaction, if one is not already in-progress. When undoing or
1792 /// redoing edits, all of the edits performed within a transaction are undone
1793 /// or redone together.
1794 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1795 self.start_transaction_at(Instant::now())
1796 }
1797
1798 /// Starts a transaction, providing the current time. Subsequent transactions
1799 /// that occur within a short period of time will be grouped together. This
1800 /// is controlled by the buffer's undo grouping duration.
1801 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1802 self.transaction_depth += 1;
1803 if self.was_dirty_before_starting_transaction.is_none() {
1804 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1805 }
1806 self.text.start_transaction_at(now)
1807 }
1808
1809 /// Terminates the current transaction, if this is the outermost transaction.
1810 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1811 self.end_transaction_at(Instant::now(), cx)
1812 }
1813
1814 /// Terminates the current transaction, providing the current time. Subsequent transactions
1815 /// that occur within a short period of time will be grouped together. This
1816 /// is controlled by the buffer's undo grouping duration.
1817 pub fn end_transaction_at(
1818 &mut self,
1819 now: Instant,
1820 cx: &mut ModelContext<Self>,
1821 ) -> Option<TransactionId> {
1822 assert!(self.transaction_depth > 0);
1823 self.transaction_depth -= 1;
1824 let was_dirty = if self.transaction_depth == 0 {
1825 self.was_dirty_before_starting_transaction.take().unwrap()
1826 } else {
1827 false
1828 };
1829 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1830 self.did_edit(&start_version, was_dirty, cx);
1831 Some(transaction_id)
1832 } else {
1833 None
1834 }
1835 }
1836
1837 /// Manually add a transaction to the buffer's undo history.
1838 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1839 self.text.push_transaction(transaction, now);
1840 }
1841
1842 /// Prevent the last transaction from being grouped with any subsequent transactions,
1843 /// even if they occur with the buffer's undo grouping duration.
1844 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1845 self.text.finalize_last_transaction()
1846 }
1847
1848 /// Manually group all changes since a given transaction.
1849 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1850 self.text.group_until_transaction(transaction_id);
1851 }
1852
1853 /// Manually remove a transaction from the buffer's undo history
1854 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1855 self.text.forget_transaction(transaction_id);
1856 }
1857
1858 /// Manually merge two adjacent transactions in the buffer's undo history.
1859 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1860 self.text.merge_transactions(transaction, destination);
1861 }
1862
1863 /// Waits for the buffer to receive operations with the given timestamps.
1864 pub fn wait_for_edits(
1865 &mut self,
1866 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1867 ) -> impl Future<Output = Result<()>> {
1868 self.text.wait_for_edits(edit_ids)
1869 }
1870
1871 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1872 pub fn wait_for_anchors(
1873 &mut self,
1874 anchors: impl IntoIterator<Item = Anchor>,
1875 ) -> impl 'static + Future<Output = Result<()>> {
1876 self.text.wait_for_anchors(anchors)
1877 }
1878
1879 /// Waits for the buffer to receive operations up to the given version.
1880 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1881 self.text.wait_for_version(version)
1882 }
1883
1884 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1885 /// [`Buffer::wait_for_version`] to resolve with an error.
1886 pub fn give_up_waiting(&mut self) {
1887 self.text.give_up_waiting();
1888 }
1889
1890 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
1891 pub fn set_active_selections(
1892 &mut self,
1893 selections: Arc<[Selection<Anchor>]>,
1894 line_mode: bool,
1895 cursor_shape: CursorShape,
1896 cx: &mut ModelContext<Self>,
1897 ) {
1898 let lamport_timestamp = self.text.lamport_clock.tick();
1899 self.remote_selections.insert(
1900 self.text.replica_id(),
1901 SelectionSet {
1902 selections: selections.clone(),
1903 lamport_timestamp,
1904 line_mode,
1905 cursor_shape,
1906 },
1907 );
1908 self.send_operation(
1909 Operation::UpdateSelections {
1910 selections,
1911 line_mode,
1912 lamport_timestamp,
1913 cursor_shape,
1914 },
1915 true,
1916 cx,
1917 );
1918 self.non_text_state_update_count += 1;
1919 cx.notify();
1920 }
1921
1922 /// Clears the selections, so that other replicas of the buffer do not see any selections for
1923 /// this replica.
1924 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1925 if self
1926 .remote_selections
1927 .get(&self.text.replica_id())
1928 .map_or(true, |set| !set.selections.is_empty())
1929 {
1930 self.set_active_selections(Arc::default(), false, Default::default(), cx);
1931 }
1932 }
1933
1934 /// Replaces the buffer's entire text.
1935 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1936 where
1937 T: Into<Arc<str>>,
1938 {
1939 self.autoindent_requests.clear();
1940 self.edit([(0..self.len(), text)], None, cx)
1941 }
1942
1943 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
1944 /// delete, and a string of text to insert at that location.
1945 ///
1946 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
1947 /// request for the edited ranges, which will be processed when the buffer finishes
1948 /// parsing.
1949 ///
1950 /// Parsing takes place at the end of a transaction, and may compute synchronously
1951 /// or asynchronously, depending on the changes.
1952 pub fn edit<I, S, T>(
1953 &mut self,
1954 edits_iter: I,
1955 autoindent_mode: Option<AutoindentMode>,
1956 cx: &mut ModelContext<Self>,
1957 ) -> Option<clock::Lamport>
1958 where
1959 I: IntoIterator<Item = (Range<S>, T)>,
1960 S: ToOffset,
1961 T: Into<Arc<str>>,
1962 {
1963 // Skip invalid edits and coalesce contiguous ones.
1964 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1965 for (range, new_text) in edits_iter {
1966 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1967 if range.start > range.end {
1968 mem::swap(&mut range.start, &mut range.end);
1969 }
1970 let new_text = new_text.into();
1971 if !new_text.is_empty() || !range.is_empty() {
1972 if let Some((prev_range, prev_text)) = edits.last_mut() {
1973 if prev_range.end >= range.start {
1974 prev_range.end = cmp::max(prev_range.end, range.end);
1975 *prev_text = format!("{prev_text}{new_text}").into();
1976 } else {
1977 edits.push((range, new_text));
1978 }
1979 } else {
1980 edits.push((range, new_text));
1981 }
1982 }
1983 }
1984 if edits.is_empty() {
1985 return None;
1986 }
1987
1988 self.start_transaction();
1989 self.pending_autoindent.take();
1990 let autoindent_request = autoindent_mode
1991 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1992
1993 let edit_operation = self.text.edit(edits.iter().cloned());
1994 let edit_id = edit_operation.timestamp();
1995
1996 if let Some((before_edit, mode)) = autoindent_request {
1997 let mut delta = 0isize;
1998 let entries = edits
1999 .into_iter()
2000 .enumerate()
2001 .zip(&edit_operation.as_edit().unwrap().new_text)
2002 .map(|((ix, (range, _)), new_text)| {
2003 let new_text_length = new_text.len();
2004 let old_start = range.start.to_point(&before_edit);
2005 let new_start = (delta + range.start as isize) as usize;
2006 let range_len = range.end - range.start;
2007 delta += new_text_length as isize - range_len as isize;
2008
2009 // Decide what range of the insertion to auto-indent, and whether
2010 // the first line of the insertion should be considered a newly-inserted line
2011 // or an edit to an existing line.
2012 let mut range_of_insertion_to_indent = 0..new_text_length;
2013 let mut first_line_is_new = true;
2014
2015 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2016 let old_line_end = before_edit.line_len(old_start.row);
2017
2018 if old_start.column > old_line_start {
2019 first_line_is_new = false;
2020 }
2021
2022 if !new_text.contains('\n')
2023 && (old_start.column + (range_len as u32) < old_line_end
2024 || old_line_end == old_line_start)
2025 {
2026 first_line_is_new = false;
2027 }
2028
2029 // When inserting text starting with a newline, avoid auto-indenting the
2030 // previous line.
2031 if new_text.starts_with('\n') {
2032 range_of_insertion_to_indent.start += 1;
2033 first_line_is_new = true;
2034 }
2035
2036 let mut original_indent_column = None;
2037 if let AutoindentMode::Block {
2038 original_indent_columns,
2039 } = &mode
2040 {
2041 original_indent_column =
2042 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
2043 indent_size_for_text(
2044 new_text[range_of_insertion_to_indent.clone()].chars(),
2045 )
2046 .len
2047 }));
2048
2049 // Avoid auto-indenting the line after the edit.
2050 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2051 range_of_insertion_to_indent.end -= 1;
2052 }
2053 }
2054
2055 AutoindentRequestEntry {
2056 first_line_is_new,
2057 original_indent_column,
2058 indent_size: before_edit.language_indent_size_at(range.start, cx),
2059 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2060 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2061 }
2062 })
2063 .collect();
2064
2065 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2066 before_edit,
2067 entries,
2068 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2069 }));
2070 }
2071
2072 self.end_transaction(cx);
2073 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2074 Some(edit_id)
2075 }
2076
2077 fn did_edit(
2078 &mut self,
2079 old_version: &clock::Global,
2080 was_dirty: bool,
2081 cx: &mut ModelContext<Self>,
2082 ) {
2083 if self.edits_since::<usize>(old_version).next().is_none() {
2084 return;
2085 }
2086
2087 self.reparse(cx);
2088
2089 cx.emit(BufferEvent::Edited);
2090 if was_dirty != self.is_dirty() {
2091 cx.emit(BufferEvent::DirtyChanged);
2092 }
2093 cx.notify();
2094 }
2095
2096 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2097 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2098 pub fn insert_empty_line(
2099 &mut self,
2100 position: impl ToPoint,
2101 space_above: bool,
2102 space_below: bool,
2103 cx: &mut ModelContext<Self>,
2104 ) -> Point {
2105 let mut position = position.to_point(self);
2106
2107 self.start_transaction();
2108
2109 self.edit(
2110 [(position..position, "\n")],
2111 Some(AutoindentMode::EachLine),
2112 cx,
2113 );
2114
2115 if position.column > 0 {
2116 position += Point::new(1, 0);
2117 }
2118
2119 if !self.is_line_blank(position.row) {
2120 self.edit(
2121 [(position..position, "\n")],
2122 Some(AutoindentMode::EachLine),
2123 cx,
2124 );
2125 }
2126
2127 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2128 self.edit(
2129 [(position..position, "\n")],
2130 Some(AutoindentMode::EachLine),
2131 cx,
2132 );
2133 position.row += 1;
2134 }
2135
2136 if space_below
2137 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2138 {
2139 self.edit(
2140 [(position..position, "\n")],
2141 Some(AutoindentMode::EachLine),
2142 cx,
2143 );
2144 }
2145
2146 self.end_transaction(cx);
2147
2148 position
2149 }
2150
2151 /// Applies the given remote operations to the buffer.
2152 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
2153 &mut self,
2154 ops: I,
2155 cx: &mut ModelContext<Self>,
2156 ) {
2157 self.pending_autoindent.take();
2158 let was_dirty = self.is_dirty();
2159 let old_version = self.version.clone();
2160 let mut deferred_ops = Vec::new();
2161 let buffer_ops = ops
2162 .into_iter()
2163 .filter_map(|op| match op {
2164 Operation::Buffer(op) => Some(op),
2165 _ => {
2166 if self.can_apply_op(&op) {
2167 self.apply_op(op, cx);
2168 } else {
2169 deferred_ops.push(op);
2170 }
2171 None
2172 }
2173 })
2174 .collect::<Vec<_>>();
2175 for operation in buffer_ops.iter() {
2176 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2177 }
2178 self.text.apply_ops(buffer_ops);
2179 self.deferred_ops.insert(deferred_ops);
2180 self.flush_deferred_ops(cx);
2181 self.did_edit(&old_version, was_dirty, cx);
2182 // Notify independently of whether the buffer was edited as the operations could include a
2183 // selection update.
2184 cx.notify();
2185 }
2186
2187 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
2188 let mut deferred_ops = Vec::new();
2189 for op in self.deferred_ops.drain().iter().cloned() {
2190 if self.can_apply_op(&op) {
2191 self.apply_op(op, cx);
2192 } else {
2193 deferred_ops.push(op);
2194 }
2195 }
2196 self.deferred_ops.insert(deferred_ops);
2197 }
2198
2199 pub fn has_deferred_ops(&self) -> bool {
2200 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2201 }
2202
2203 fn can_apply_op(&self, operation: &Operation) -> bool {
2204 match operation {
2205 Operation::Buffer(_) => {
2206 unreachable!("buffer operations should never be applied at this layer")
2207 }
2208 Operation::UpdateDiagnostics {
2209 diagnostics: diagnostic_set,
2210 ..
2211 } => diagnostic_set.iter().all(|diagnostic| {
2212 self.text.can_resolve(&diagnostic.range.start)
2213 && self.text.can_resolve(&diagnostic.range.end)
2214 }),
2215 Operation::UpdateSelections { selections, .. } => selections
2216 .iter()
2217 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2218 Operation::UpdateCompletionTriggers { .. } => true,
2219 }
2220 }
2221
2222 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
2223 match operation {
2224 Operation::Buffer(_) => {
2225 unreachable!("buffer operations should never be applied at this layer")
2226 }
2227 Operation::UpdateDiagnostics {
2228 server_id,
2229 diagnostics: diagnostic_set,
2230 lamport_timestamp,
2231 } => {
2232 let snapshot = self.snapshot();
2233 self.apply_diagnostic_update(
2234 server_id,
2235 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2236 lamport_timestamp,
2237 cx,
2238 );
2239 }
2240 Operation::UpdateSelections {
2241 selections,
2242 lamport_timestamp,
2243 line_mode,
2244 cursor_shape,
2245 } => {
2246 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2247 if set.lamport_timestamp > lamport_timestamp {
2248 return;
2249 }
2250 }
2251
2252 self.remote_selections.insert(
2253 lamport_timestamp.replica_id,
2254 SelectionSet {
2255 selections,
2256 lamport_timestamp,
2257 line_mode,
2258 cursor_shape,
2259 },
2260 );
2261 self.text.lamport_clock.observe(lamport_timestamp);
2262 self.non_text_state_update_count += 1;
2263 }
2264 Operation::UpdateCompletionTriggers {
2265 triggers,
2266 lamport_timestamp,
2267 server_id,
2268 } => {
2269 if triggers.is_empty() {
2270 self.completion_triggers_per_language_server
2271 .remove(&server_id);
2272 self.completion_triggers = self
2273 .completion_triggers_per_language_server
2274 .values()
2275 .flat_map(|triggers| triggers.into_iter().cloned())
2276 .collect();
2277 } else {
2278 self.completion_triggers_per_language_server
2279 .insert(server_id, triggers.iter().cloned().collect());
2280 self.completion_triggers.extend(triggers);
2281 }
2282 self.text.lamport_clock.observe(lamport_timestamp);
2283 }
2284 }
2285 }
2286
2287 fn apply_diagnostic_update(
2288 &mut self,
2289 server_id: LanguageServerId,
2290 diagnostics: DiagnosticSet,
2291 lamport_timestamp: clock::Lamport,
2292 cx: &mut ModelContext<Self>,
2293 ) {
2294 if lamport_timestamp > self.diagnostics_timestamp {
2295 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2296 if diagnostics.is_empty() {
2297 if let Ok(ix) = ix {
2298 self.diagnostics.remove(ix);
2299 }
2300 } else {
2301 match ix {
2302 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2303 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2304 };
2305 }
2306 self.diagnostics_timestamp = lamport_timestamp;
2307 self.non_text_state_update_count += 1;
2308 self.text.lamport_clock.observe(lamport_timestamp);
2309 cx.notify();
2310 cx.emit(BufferEvent::DiagnosticsUpdated);
2311 }
2312 }
2313
2314 fn send_operation(&self, operation: Operation, is_local: bool, cx: &mut ModelContext<Self>) {
2315 cx.emit(BufferEvent::Operation {
2316 operation,
2317 is_local,
2318 });
2319 }
2320
2321 /// Removes the selections for a given peer.
2322 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
2323 self.remote_selections.remove(&replica_id);
2324 cx.notify();
2325 }
2326
2327 /// Undoes the most recent transaction.
2328 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2329 let was_dirty = self.is_dirty();
2330 let old_version = self.version.clone();
2331
2332 if let Some((transaction_id, operation)) = self.text.undo() {
2333 self.send_operation(Operation::Buffer(operation), true, cx);
2334 self.did_edit(&old_version, was_dirty, cx);
2335 Some(transaction_id)
2336 } else {
2337 None
2338 }
2339 }
2340
2341 /// Manually undoes a specific transaction in the buffer's undo history.
2342 pub fn undo_transaction(
2343 &mut self,
2344 transaction_id: TransactionId,
2345 cx: &mut ModelContext<Self>,
2346 ) -> bool {
2347 let was_dirty = self.is_dirty();
2348 let old_version = self.version.clone();
2349 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2350 self.send_operation(Operation::Buffer(operation), true, cx);
2351 self.did_edit(&old_version, was_dirty, cx);
2352 true
2353 } else {
2354 false
2355 }
2356 }
2357
2358 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2359 pub fn undo_to_transaction(
2360 &mut self,
2361 transaction_id: TransactionId,
2362 cx: &mut ModelContext<Self>,
2363 ) -> bool {
2364 let was_dirty = self.is_dirty();
2365 let old_version = self.version.clone();
2366
2367 let operations = self.text.undo_to_transaction(transaction_id);
2368 let undone = !operations.is_empty();
2369 for operation in operations {
2370 self.send_operation(Operation::Buffer(operation), true, cx);
2371 }
2372 if undone {
2373 self.did_edit(&old_version, was_dirty, cx)
2374 }
2375 undone
2376 }
2377
2378 pub fn undo_operations(
2379 &mut self,
2380 counts: HashMap<Lamport, u32>,
2381 cx: &mut ModelContext<Buffer>,
2382 ) {
2383 let was_dirty = self.is_dirty();
2384 let operation = self.text.undo_operations(counts);
2385 let old_version = self.version.clone();
2386 self.send_operation(Operation::Buffer(operation), true, cx);
2387 self.did_edit(&old_version, was_dirty, cx);
2388 }
2389
2390 /// Manually redoes a specific transaction in the buffer's redo history.
2391 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2392 let was_dirty = self.is_dirty();
2393 let old_version = self.version.clone();
2394
2395 if let Some((transaction_id, operation)) = self.text.redo() {
2396 self.send_operation(Operation::Buffer(operation), true, cx);
2397 self.did_edit(&old_version, was_dirty, cx);
2398 Some(transaction_id)
2399 } else {
2400 None
2401 }
2402 }
2403
2404 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2405 pub fn redo_to_transaction(
2406 &mut self,
2407 transaction_id: TransactionId,
2408 cx: &mut ModelContext<Self>,
2409 ) -> bool {
2410 let was_dirty = self.is_dirty();
2411 let old_version = self.version.clone();
2412
2413 let operations = self.text.redo_to_transaction(transaction_id);
2414 let redone = !operations.is_empty();
2415 for operation in operations {
2416 self.send_operation(Operation::Buffer(operation), true, cx);
2417 }
2418 if redone {
2419 self.did_edit(&old_version, was_dirty, cx)
2420 }
2421 redone
2422 }
2423
2424 /// Override current completion triggers with the user-provided completion triggers.
2425 pub fn set_completion_triggers(
2426 &mut self,
2427 server_id: LanguageServerId,
2428 triggers: BTreeSet<String>,
2429 cx: &mut ModelContext<Self>,
2430 ) {
2431 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2432 if triggers.is_empty() {
2433 self.completion_triggers_per_language_server
2434 .remove(&server_id);
2435 self.completion_triggers = self
2436 .completion_triggers_per_language_server
2437 .values()
2438 .flat_map(|triggers| triggers.into_iter().cloned())
2439 .collect();
2440 } else {
2441 self.completion_triggers_per_language_server
2442 .insert(server_id, triggers.clone());
2443 self.completion_triggers.extend(triggers.iter().cloned());
2444 }
2445 self.send_operation(
2446 Operation::UpdateCompletionTriggers {
2447 triggers: triggers.iter().cloned().collect(),
2448 lamport_timestamp: self.completion_triggers_timestamp,
2449 server_id,
2450 },
2451 true,
2452 cx,
2453 );
2454 cx.notify();
2455 }
2456
2457 /// Returns a list of strings which trigger a completion menu for this language.
2458 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2459 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2460 &self.completion_triggers
2461 }
2462
2463 /// Call this directly after performing edits to prevent the preview tab
2464 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2465 /// to return false until there are additional edits.
2466 pub fn refresh_preview(&mut self) {
2467 self.preview_version = self.version.clone();
2468 }
2469
2470 /// Whether we should preserve the preview status of a tab containing this buffer.
2471 pub fn preserve_preview(&self) -> bool {
2472 !self.has_edits_since(&self.preview_version)
2473 }
2474}
2475
2476#[doc(hidden)]
2477#[cfg(any(test, feature = "test-support"))]
2478impl Buffer {
2479 pub fn edit_via_marked_text(
2480 &mut self,
2481 marked_string: &str,
2482 autoindent_mode: Option<AutoindentMode>,
2483 cx: &mut ModelContext<Self>,
2484 ) {
2485 let edits = self.edits_for_marked_text(marked_string);
2486 self.edit(edits, autoindent_mode, cx);
2487 }
2488
2489 pub fn set_group_interval(&mut self, group_interval: Duration) {
2490 self.text.set_group_interval(group_interval);
2491 }
2492
2493 pub fn randomly_edit<T>(
2494 &mut self,
2495 rng: &mut T,
2496 old_range_count: usize,
2497 cx: &mut ModelContext<Self>,
2498 ) where
2499 T: rand::Rng,
2500 {
2501 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2502 let mut last_end = None;
2503 for _ in 0..old_range_count {
2504 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2505 break;
2506 }
2507
2508 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2509 let mut range = self.random_byte_range(new_start, rng);
2510 if rng.gen_bool(0.2) {
2511 mem::swap(&mut range.start, &mut range.end);
2512 }
2513 last_end = Some(range.end);
2514
2515 let new_text_len = rng.gen_range(0..10);
2516 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2517
2518 edits.push((range, new_text));
2519 }
2520 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2521 self.edit(edits, None, cx);
2522 }
2523
2524 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
2525 let was_dirty = self.is_dirty();
2526 let old_version = self.version.clone();
2527
2528 let ops = self.text.randomly_undo_redo(rng);
2529 if !ops.is_empty() {
2530 for op in ops {
2531 self.send_operation(Operation::Buffer(op), true, cx);
2532 self.did_edit(&old_version, was_dirty, cx);
2533 }
2534 }
2535 }
2536}
2537
2538impl EventEmitter<BufferEvent> for Buffer {}
2539
2540impl Deref for Buffer {
2541 type Target = TextBuffer;
2542
2543 fn deref(&self) -> &Self::Target {
2544 &self.text
2545 }
2546}
2547
2548impl BufferSnapshot {
2549 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2550 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2551 indent_size_for_line(self, row)
2552 }
2553 /// Returns [`IndentSize`] for a given position that respects user settings
2554 /// and language preferences.
2555 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
2556 let settings = language_settings(
2557 self.language_at(position).map(|l| l.name()),
2558 self.file(),
2559 cx,
2560 );
2561 if settings.hard_tabs {
2562 IndentSize::tab()
2563 } else {
2564 IndentSize::spaces(settings.tab_size.get())
2565 }
2566 }
2567
2568 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2569 /// is passed in as `single_indent_size`.
2570 pub fn suggested_indents(
2571 &self,
2572 rows: impl Iterator<Item = u32>,
2573 single_indent_size: IndentSize,
2574 ) -> BTreeMap<u32, IndentSize> {
2575 let mut result = BTreeMap::new();
2576
2577 for row_range in contiguous_ranges(rows, 10) {
2578 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2579 Some(suggestions) => suggestions,
2580 _ => break,
2581 };
2582
2583 for (row, suggestion) in row_range.zip(suggestions) {
2584 let indent_size = if let Some(suggestion) = suggestion {
2585 result
2586 .get(&suggestion.basis_row)
2587 .copied()
2588 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2589 .with_delta(suggestion.delta, single_indent_size)
2590 } else {
2591 self.indent_size_for_line(row)
2592 };
2593
2594 result.insert(row, indent_size);
2595 }
2596 }
2597
2598 result
2599 }
2600
2601 fn suggest_autoindents(
2602 &self,
2603 row_range: Range<u32>,
2604 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2605 let config = &self.language.as_ref()?.config;
2606 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2607
2608 // Find the suggested indentation ranges based on the syntax tree.
2609 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2610 let end = Point::new(row_range.end, 0);
2611 let range = (start..end).to_offset(&self.text);
2612 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2613 Some(&grammar.indents_config.as_ref()?.query)
2614 });
2615 let indent_configs = matches
2616 .grammars()
2617 .iter()
2618 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2619 .collect::<Vec<_>>();
2620
2621 let mut indent_ranges = Vec::<Range<Point>>::new();
2622 let mut outdent_positions = Vec::<Point>::new();
2623 while let Some(mat) = matches.peek() {
2624 let mut start: Option<Point> = None;
2625 let mut end: Option<Point> = None;
2626
2627 let config = &indent_configs[mat.grammar_index];
2628 for capture in mat.captures {
2629 if capture.index == config.indent_capture_ix {
2630 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2631 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2632 } else if Some(capture.index) == config.start_capture_ix {
2633 start = Some(Point::from_ts_point(capture.node.end_position()));
2634 } else if Some(capture.index) == config.end_capture_ix {
2635 end = Some(Point::from_ts_point(capture.node.start_position()));
2636 } else if Some(capture.index) == config.outdent_capture_ix {
2637 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2638 }
2639 }
2640
2641 matches.advance();
2642 if let Some((start, end)) = start.zip(end) {
2643 if start.row == end.row {
2644 continue;
2645 }
2646
2647 let range = start..end;
2648 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2649 Err(ix) => indent_ranges.insert(ix, range),
2650 Ok(ix) => {
2651 let prev_range = &mut indent_ranges[ix];
2652 prev_range.end = prev_range.end.max(range.end);
2653 }
2654 }
2655 }
2656 }
2657
2658 let mut error_ranges = Vec::<Range<Point>>::new();
2659 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2660 Some(&grammar.error_query)
2661 });
2662 while let Some(mat) = matches.peek() {
2663 let node = mat.captures[0].node;
2664 let start = Point::from_ts_point(node.start_position());
2665 let end = Point::from_ts_point(node.end_position());
2666 let range = start..end;
2667 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2668 Ok(ix) | Err(ix) => ix,
2669 };
2670 let mut end_ix = ix;
2671 while let Some(existing_range) = error_ranges.get(end_ix) {
2672 if existing_range.end < end {
2673 end_ix += 1;
2674 } else {
2675 break;
2676 }
2677 }
2678 error_ranges.splice(ix..end_ix, [range]);
2679 matches.advance();
2680 }
2681
2682 outdent_positions.sort();
2683 for outdent_position in outdent_positions {
2684 // find the innermost indent range containing this outdent_position
2685 // set its end to the outdent position
2686 if let Some(range_to_truncate) = indent_ranges
2687 .iter_mut()
2688 .filter(|indent_range| indent_range.contains(&outdent_position))
2689 .last()
2690 {
2691 range_to_truncate.end = outdent_position;
2692 }
2693 }
2694
2695 // Find the suggested indentation increases and decreased based on regexes.
2696 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2697 self.for_each_line(
2698 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2699 ..Point::new(row_range.end, 0),
2700 |row, line| {
2701 if config
2702 .decrease_indent_pattern
2703 .as_ref()
2704 .map_or(false, |regex| regex.is_match(line))
2705 {
2706 indent_change_rows.push((row, Ordering::Less));
2707 }
2708 if config
2709 .increase_indent_pattern
2710 .as_ref()
2711 .map_or(false, |regex| regex.is_match(line))
2712 {
2713 indent_change_rows.push((row + 1, Ordering::Greater));
2714 }
2715 },
2716 );
2717
2718 let mut indent_changes = indent_change_rows.into_iter().peekable();
2719 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2720 prev_non_blank_row.unwrap_or(0)
2721 } else {
2722 row_range.start.saturating_sub(1)
2723 };
2724 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2725 Some(row_range.map(move |row| {
2726 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2727
2728 let mut indent_from_prev_row = false;
2729 let mut outdent_from_prev_row = false;
2730 let mut outdent_to_row = u32::MAX;
2731
2732 while let Some((indent_row, delta)) = indent_changes.peek() {
2733 match indent_row.cmp(&row) {
2734 Ordering::Equal => match delta {
2735 Ordering::Less => outdent_from_prev_row = true,
2736 Ordering::Greater => indent_from_prev_row = true,
2737 _ => {}
2738 },
2739
2740 Ordering::Greater => break,
2741 Ordering::Less => {}
2742 }
2743
2744 indent_changes.next();
2745 }
2746
2747 for range in &indent_ranges {
2748 if range.start.row >= row {
2749 break;
2750 }
2751 if range.start.row == prev_row && range.end > row_start {
2752 indent_from_prev_row = true;
2753 }
2754 if range.end > prev_row_start && range.end <= row_start {
2755 outdent_to_row = outdent_to_row.min(range.start.row);
2756 }
2757 }
2758
2759 let within_error = error_ranges
2760 .iter()
2761 .any(|e| e.start.row < row && e.end > row_start);
2762
2763 let suggestion = if outdent_to_row == prev_row
2764 || (outdent_from_prev_row && indent_from_prev_row)
2765 {
2766 Some(IndentSuggestion {
2767 basis_row: prev_row,
2768 delta: Ordering::Equal,
2769 within_error,
2770 })
2771 } else if indent_from_prev_row {
2772 Some(IndentSuggestion {
2773 basis_row: prev_row,
2774 delta: Ordering::Greater,
2775 within_error,
2776 })
2777 } else if outdent_to_row < prev_row {
2778 Some(IndentSuggestion {
2779 basis_row: outdent_to_row,
2780 delta: Ordering::Equal,
2781 within_error,
2782 })
2783 } else if outdent_from_prev_row {
2784 Some(IndentSuggestion {
2785 basis_row: prev_row,
2786 delta: Ordering::Less,
2787 within_error,
2788 })
2789 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2790 {
2791 Some(IndentSuggestion {
2792 basis_row: prev_row,
2793 delta: Ordering::Equal,
2794 within_error,
2795 })
2796 } else {
2797 None
2798 };
2799
2800 prev_row = row;
2801 prev_row_start = row_start;
2802 suggestion
2803 }))
2804 }
2805
2806 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2807 while row > 0 {
2808 row -= 1;
2809 if !self.is_line_blank(row) {
2810 return Some(row);
2811 }
2812 }
2813 None
2814 }
2815
2816 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
2817 let captures = self.syntax.captures(range, &self.text, |grammar| {
2818 grammar.highlights_query.as_ref()
2819 });
2820 let highlight_maps = captures
2821 .grammars()
2822 .iter()
2823 .map(|grammar| grammar.highlight_map())
2824 .collect();
2825 (captures, highlight_maps)
2826 }
2827
2828 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2829 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2830 /// returned in chunks where each chunk has a single syntax highlighting style and
2831 /// diagnostic status.
2832 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2833 let range = range.start.to_offset(self)..range.end.to_offset(self);
2834
2835 let mut syntax = None;
2836 if language_aware {
2837 syntax = Some(self.get_highlights(range.clone()));
2838 }
2839 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
2840 let diagnostics = language_aware;
2841 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
2842 }
2843
2844 /// Invokes the given callback for each line of text in the given range of the buffer.
2845 /// Uses callback to avoid allocating a string for each line.
2846 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2847 let mut line = String::new();
2848 let mut row = range.start.row;
2849 for chunk in self
2850 .as_rope()
2851 .chunks_in_range(range.to_offset(self))
2852 .chain(["\n"])
2853 {
2854 for (newline_ix, text) in chunk.split('\n').enumerate() {
2855 if newline_ix > 0 {
2856 callback(row, &line);
2857 row += 1;
2858 line.clear();
2859 }
2860 line.push_str(text);
2861 }
2862 }
2863 }
2864
2865 /// Iterates over every [`SyntaxLayer`] in the buffer.
2866 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2867 self.syntax
2868 .layers_for_range(0..self.len(), &self.text, true)
2869 }
2870
2871 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2872 let offset = position.to_offset(self);
2873 self.syntax
2874 .layers_for_range(offset..offset, &self.text, false)
2875 .filter(|l| l.node().end_byte() > offset)
2876 .last()
2877 }
2878
2879 /// Returns the main [`Language`].
2880 pub fn language(&self) -> Option<&Arc<Language>> {
2881 self.language.as_ref()
2882 }
2883
2884 /// Returns the [`Language`] at the given location.
2885 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2886 self.syntax_layer_at(position)
2887 .map(|info| info.language)
2888 .or(self.language.as_ref())
2889 }
2890
2891 /// Returns the settings for the language at the given location.
2892 pub fn settings_at<'a, D: ToOffset>(
2893 &'a self,
2894 position: D,
2895 cx: &'a AppContext,
2896 ) -> Cow<'a, LanguageSettings> {
2897 language_settings(
2898 self.language_at(position).map(|l| l.name()),
2899 self.file.as_ref(),
2900 cx,
2901 )
2902 }
2903
2904 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
2905 CharClassifier::new(self.language_scope_at(point))
2906 }
2907
2908 /// Returns the [`LanguageScope`] at the given location.
2909 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2910 let offset = position.to_offset(self);
2911 let mut scope = None;
2912 let mut smallest_range: Option<Range<usize>> = None;
2913
2914 // Use the layer that has the smallest node intersecting the given point.
2915 for layer in self
2916 .syntax
2917 .layers_for_range(offset..offset, &self.text, false)
2918 {
2919 let mut cursor = layer.node().walk();
2920
2921 let mut range = None;
2922 loop {
2923 let child_range = cursor.node().byte_range();
2924 if !child_range.to_inclusive().contains(&offset) {
2925 break;
2926 }
2927
2928 range = Some(child_range);
2929 if cursor.goto_first_child_for_byte(offset).is_none() {
2930 break;
2931 }
2932 }
2933
2934 if let Some(range) = range {
2935 if smallest_range
2936 .as_ref()
2937 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2938 {
2939 smallest_range = Some(range);
2940 scope = Some(LanguageScope {
2941 language: layer.language.clone(),
2942 override_id: layer.override_id(offset, &self.text),
2943 });
2944 }
2945 }
2946 }
2947
2948 scope.or_else(|| {
2949 self.language.clone().map(|language| LanguageScope {
2950 language,
2951 override_id: None,
2952 })
2953 })
2954 }
2955
2956 /// Returns a tuple of the range and character kind of the word
2957 /// surrounding the given position.
2958 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2959 let mut start = start.to_offset(self);
2960 let mut end = start;
2961 let mut next_chars = self.chars_at(start).peekable();
2962 let mut prev_chars = self.reversed_chars_at(start).peekable();
2963
2964 let classifier = self.char_classifier_at(start);
2965 let word_kind = cmp::max(
2966 prev_chars.peek().copied().map(|c| classifier.kind(c)),
2967 next_chars.peek().copied().map(|c| classifier.kind(c)),
2968 );
2969
2970 for ch in prev_chars {
2971 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
2972 start -= ch.len_utf8();
2973 } else {
2974 break;
2975 }
2976 }
2977
2978 for ch in next_chars {
2979 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
2980 end += ch.len_utf8();
2981 } else {
2982 break;
2983 }
2984 }
2985
2986 (start..end, word_kind)
2987 }
2988
2989 /// Returns the range for the closes syntax node enclosing the given range.
2990 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2991 let range = range.start.to_offset(self)..range.end.to_offset(self);
2992 let mut result: Option<Range<usize>> = None;
2993 'outer: for layer in self
2994 .syntax
2995 .layers_for_range(range.clone(), &self.text, true)
2996 {
2997 let mut cursor = layer.node().walk();
2998
2999 // Descend to the first leaf that touches the start of the range,
3000 // and if the range is non-empty, extends beyond the start.
3001 while cursor.goto_first_child_for_byte(range.start).is_some() {
3002 if !range.is_empty() && cursor.node().end_byte() == range.start {
3003 cursor.goto_next_sibling();
3004 }
3005 }
3006
3007 // Ascend to the smallest ancestor that strictly contains the range.
3008 loop {
3009 let node_range = cursor.node().byte_range();
3010 if node_range.start <= range.start
3011 && node_range.end >= range.end
3012 && node_range.len() > range.len()
3013 {
3014 break;
3015 }
3016 if !cursor.goto_parent() {
3017 continue 'outer;
3018 }
3019 }
3020
3021 let left_node = cursor.node();
3022 let mut layer_result = left_node.byte_range();
3023
3024 // For an empty range, try to find another node immediately to the right of the range.
3025 if left_node.end_byte() == range.start {
3026 let mut right_node = None;
3027 while !cursor.goto_next_sibling() {
3028 if !cursor.goto_parent() {
3029 break;
3030 }
3031 }
3032
3033 while cursor.node().start_byte() == range.start {
3034 right_node = Some(cursor.node());
3035 if !cursor.goto_first_child() {
3036 break;
3037 }
3038 }
3039
3040 // If there is a candidate node on both sides of the (empty) range, then
3041 // decide between the two by favoring a named node over an anonymous token.
3042 // If both nodes are the same in that regard, favor the right one.
3043 if let Some(right_node) = right_node {
3044 if right_node.is_named() || !left_node.is_named() {
3045 layer_result = right_node.byte_range();
3046 }
3047 }
3048 }
3049
3050 if let Some(previous_result) = &result {
3051 if previous_result.len() < layer_result.len() {
3052 continue;
3053 }
3054 }
3055 result = Some(layer_result);
3056 }
3057
3058 result
3059 }
3060
3061 /// Returns the outline for the buffer.
3062 ///
3063 /// This method allows passing an optional [`SyntaxTheme`] to
3064 /// syntax-highlight the returned symbols.
3065 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3066 self.outline_items_containing(0..self.len(), true, theme)
3067 .map(Outline::new)
3068 }
3069
3070 /// Returns all the symbols that contain the given position.
3071 ///
3072 /// This method allows passing an optional [`SyntaxTheme`] to
3073 /// syntax-highlight the returned symbols.
3074 pub fn symbols_containing<T: ToOffset>(
3075 &self,
3076 position: T,
3077 theme: Option<&SyntaxTheme>,
3078 ) -> Option<Vec<OutlineItem<Anchor>>> {
3079 let position = position.to_offset(self);
3080 let mut items = self.outline_items_containing(
3081 position.saturating_sub(1)..self.len().min(position + 1),
3082 false,
3083 theme,
3084 )?;
3085 let mut prev_depth = None;
3086 items.retain(|item| {
3087 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3088 prev_depth = Some(item.depth);
3089 result
3090 });
3091 Some(items)
3092 }
3093
3094 pub fn outline_items_containing<T: ToOffset>(
3095 &self,
3096 range: Range<T>,
3097 include_extra_context: bool,
3098 theme: Option<&SyntaxTheme>,
3099 ) -> Option<Vec<OutlineItem<Anchor>>> {
3100 let range = range.to_offset(self);
3101 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3102 grammar.outline_config.as_ref().map(|c| &c.query)
3103 });
3104 let configs = matches
3105 .grammars()
3106 .iter()
3107 .map(|g| g.outline_config.as_ref().unwrap())
3108 .collect::<Vec<_>>();
3109
3110 let mut items = Vec::new();
3111 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3112 while let Some(mat) = matches.peek() {
3113 let config = &configs[mat.grammar_index];
3114 if let Some(item) =
3115 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3116 {
3117 items.push(item);
3118 } else if let Some(capture) = mat
3119 .captures
3120 .iter()
3121 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3122 {
3123 let capture_range = capture.node.start_position()..capture.node.end_position();
3124 let mut capture_row_range =
3125 capture_range.start.row as u32..capture_range.end.row as u32;
3126 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3127 {
3128 capture_row_range.end -= 1;
3129 }
3130 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3131 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3132 last_row_range.end = capture_row_range.end;
3133 } else {
3134 annotation_row_ranges.push(capture_row_range);
3135 }
3136 } else {
3137 annotation_row_ranges.push(capture_row_range);
3138 }
3139 }
3140 matches.advance();
3141 }
3142
3143 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3144
3145 // Assign depths based on containment relationships and convert to anchors.
3146 let mut item_ends_stack = Vec::<Point>::new();
3147 let mut anchor_items = Vec::new();
3148 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3149 for item in items {
3150 while let Some(last_end) = item_ends_stack.last().copied() {
3151 if last_end < item.range.end {
3152 item_ends_stack.pop();
3153 } else {
3154 break;
3155 }
3156 }
3157
3158 let mut annotation_row_range = None;
3159 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3160 let row_preceding_item = item.range.start.row.saturating_sub(1);
3161 if next_annotation_row_range.end < row_preceding_item {
3162 annotation_row_ranges.next();
3163 } else {
3164 if next_annotation_row_range.end == row_preceding_item {
3165 annotation_row_range = Some(next_annotation_row_range.clone());
3166 annotation_row_ranges.next();
3167 }
3168 break;
3169 }
3170 }
3171
3172 anchor_items.push(OutlineItem {
3173 depth: item_ends_stack.len(),
3174 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3175 text: item.text,
3176 highlight_ranges: item.highlight_ranges,
3177 name_ranges: item.name_ranges,
3178 body_range: item.body_range.map(|body_range| {
3179 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3180 }),
3181 annotation_range: annotation_row_range.map(|annotation_range| {
3182 self.anchor_after(Point::new(annotation_range.start, 0))
3183 ..self.anchor_before(Point::new(
3184 annotation_range.end,
3185 self.line_len(annotation_range.end),
3186 ))
3187 }),
3188 });
3189 item_ends_stack.push(item.range.end);
3190 }
3191
3192 Some(anchor_items)
3193 }
3194
3195 fn next_outline_item(
3196 &self,
3197 config: &OutlineConfig,
3198 mat: &SyntaxMapMatch,
3199 range: &Range<usize>,
3200 include_extra_context: bool,
3201 theme: Option<&SyntaxTheme>,
3202 ) -> Option<OutlineItem<Point>> {
3203 let item_node = mat.captures.iter().find_map(|cap| {
3204 if cap.index == config.item_capture_ix {
3205 Some(cap.node)
3206 } else {
3207 None
3208 }
3209 })?;
3210
3211 let item_byte_range = item_node.byte_range();
3212 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3213 return None;
3214 }
3215 let item_point_range = Point::from_ts_point(item_node.start_position())
3216 ..Point::from_ts_point(item_node.end_position());
3217
3218 let mut open_point = None;
3219 let mut close_point = None;
3220 let mut buffer_ranges = Vec::new();
3221 for capture in mat.captures {
3222 let node_is_name;
3223 if capture.index == config.name_capture_ix {
3224 node_is_name = true;
3225 } else if Some(capture.index) == config.context_capture_ix
3226 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3227 {
3228 node_is_name = false;
3229 } else {
3230 if Some(capture.index) == config.open_capture_ix {
3231 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3232 } else if Some(capture.index) == config.close_capture_ix {
3233 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3234 }
3235
3236 continue;
3237 }
3238
3239 let mut range = capture.node.start_byte()..capture.node.end_byte();
3240 let start = capture.node.start_position();
3241 if capture.node.end_position().row > start.row {
3242 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3243 }
3244
3245 if !range.is_empty() {
3246 buffer_ranges.push((range, node_is_name));
3247 }
3248 }
3249 if buffer_ranges.is_empty() {
3250 return None;
3251 }
3252 let mut text = String::new();
3253 let mut highlight_ranges = Vec::new();
3254 let mut name_ranges = Vec::new();
3255 let mut chunks = self.chunks(
3256 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3257 true,
3258 );
3259 let mut last_buffer_range_end = 0;
3260 for (buffer_range, is_name) in buffer_ranges {
3261 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
3262 text.push(' ');
3263 }
3264 last_buffer_range_end = buffer_range.end;
3265 if is_name {
3266 let mut start = text.len();
3267 let end = start + buffer_range.len();
3268
3269 // When multiple names are captured, then the matchable text
3270 // includes the whitespace in between the names.
3271 if !name_ranges.is_empty() {
3272 start -= 1;
3273 }
3274
3275 name_ranges.push(start..end);
3276 }
3277
3278 let mut offset = buffer_range.start;
3279 chunks.seek(buffer_range.clone());
3280 for mut chunk in chunks.by_ref() {
3281 if chunk.text.len() > buffer_range.end - offset {
3282 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3283 offset = buffer_range.end;
3284 } else {
3285 offset += chunk.text.len();
3286 }
3287 let style = chunk
3288 .syntax_highlight_id
3289 .zip(theme)
3290 .and_then(|(highlight, theme)| highlight.style(theme));
3291 if let Some(style) = style {
3292 let start = text.len();
3293 let end = start + chunk.text.len();
3294 highlight_ranges.push((start..end, style));
3295 }
3296 text.push_str(chunk.text);
3297 if offset >= buffer_range.end {
3298 break;
3299 }
3300 }
3301 }
3302
3303 Some(OutlineItem {
3304 depth: 0, // We'll calculate the depth later
3305 range: item_point_range,
3306 text,
3307 highlight_ranges,
3308 name_ranges,
3309 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3310 annotation_range: None,
3311 })
3312 }
3313
3314 /// For each grammar in the language, runs the provided
3315 /// [`tree_sitter::Query`] against the given range.
3316 pub fn matches(
3317 &self,
3318 range: Range<usize>,
3319 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3320 ) -> SyntaxMapMatches {
3321 self.syntax.matches(range, self, query)
3322 }
3323
3324 /// Returns bracket range pairs overlapping or adjacent to `range`
3325 pub fn bracket_ranges<T: ToOffset>(
3326 &self,
3327 range: Range<T>,
3328 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3329 // Find bracket pairs that *inclusively* contain the given range.
3330 let range = range.start.to_offset(self).saturating_sub(1)
3331 ..self.len().min(range.end.to_offset(self) + 1);
3332
3333 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3334 grammar.brackets_config.as_ref().map(|c| &c.query)
3335 });
3336 let configs = matches
3337 .grammars()
3338 .iter()
3339 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3340 .collect::<Vec<_>>();
3341
3342 iter::from_fn(move || {
3343 while let Some(mat) = matches.peek() {
3344 let mut open = None;
3345 let mut close = None;
3346 let config = &configs[mat.grammar_index];
3347 for capture in mat.captures {
3348 if capture.index == config.open_capture_ix {
3349 open = Some(capture.node.byte_range());
3350 } else if capture.index == config.close_capture_ix {
3351 close = Some(capture.node.byte_range());
3352 }
3353 }
3354
3355 matches.advance();
3356
3357 let Some((open, close)) = open.zip(close) else {
3358 continue;
3359 };
3360
3361 let bracket_range = open.start..=close.end;
3362 if !bracket_range.overlaps(&range) {
3363 continue;
3364 }
3365
3366 return Some((open, close));
3367 }
3368 None
3369 })
3370 }
3371
3372 /// Returns enclosing bracket ranges containing the given range
3373 pub fn enclosing_bracket_ranges<T: ToOffset>(
3374 &self,
3375 range: Range<T>,
3376 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3377 let range = range.start.to_offset(self)..range.end.to_offset(self);
3378
3379 self.bracket_ranges(range.clone())
3380 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
3381 }
3382
3383 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3384 ///
3385 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3386 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3387 &self,
3388 range: Range<T>,
3389 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3390 ) -> Option<(Range<usize>, Range<usize>)> {
3391 let range = range.start.to_offset(self)..range.end.to_offset(self);
3392
3393 // Get the ranges of the innermost pair of brackets.
3394 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3395
3396 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
3397 if let Some(range_filter) = range_filter {
3398 if !range_filter(open.clone(), close.clone()) {
3399 continue;
3400 }
3401 }
3402
3403 let len = close.end - open.start;
3404
3405 if let Some((existing_open, existing_close)) = &result {
3406 let existing_len = existing_close.end - existing_open.start;
3407 if len > existing_len {
3408 continue;
3409 }
3410 }
3411
3412 result = Some((open, close));
3413 }
3414
3415 result
3416 }
3417
3418 /// Returns anchor ranges for any matches of the redaction query.
3419 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3420 /// will be run on the relevant section of the buffer.
3421 pub fn redacted_ranges<T: ToOffset>(
3422 &self,
3423 range: Range<T>,
3424 ) -> impl Iterator<Item = Range<usize>> + '_ {
3425 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3426 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3427 grammar
3428 .redactions_config
3429 .as_ref()
3430 .map(|config| &config.query)
3431 });
3432
3433 let configs = syntax_matches
3434 .grammars()
3435 .iter()
3436 .map(|grammar| grammar.redactions_config.as_ref())
3437 .collect::<Vec<_>>();
3438
3439 iter::from_fn(move || {
3440 let redacted_range = syntax_matches
3441 .peek()
3442 .and_then(|mat| {
3443 configs[mat.grammar_index].and_then(|config| {
3444 mat.captures
3445 .iter()
3446 .find(|capture| capture.index == config.redaction_capture_ix)
3447 })
3448 })
3449 .map(|mat| mat.node.byte_range());
3450 syntax_matches.advance();
3451 redacted_range
3452 })
3453 }
3454
3455 pub fn injections_intersecting_range<T: ToOffset>(
3456 &self,
3457 range: Range<T>,
3458 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3459 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3460
3461 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3462 grammar
3463 .injection_config
3464 .as_ref()
3465 .map(|config| &config.query)
3466 });
3467
3468 let configs = syntax_matches
3469 .grammars()
3470 .iter()
3471 .map(|grammar| grammar.injection_config.as_ref())
3472 .collect::<Vec<_>>();
3473
3474 iter::from_fn(move || {
3475 let ranges = syntax_matches.peek().and_then(|mat| {
3476 let config = &configs[mat.grammar_index]?;
3477 let content_capture_range = mat.captures.iter().find_map(|capture| {
3478 if capture.index == config.content_capture_ix {
3479 Some(capture.node.byte_range())
3480 } else {
3481 None
3482 }
3483 })?;
3484 let language = self.language_at(content_capture_range.start)?;
3485 Some((content_capture_range, language))
3486 });
3487 syntax_matches.advance();
3488 ranges
3489 })
3490 }
3491
3492 pub fn runnable_ranges(
3493 &self,
3494 range: Range<Anchor>,
3495 ) -> impl Iterator<Item = RunnableRange> + '_ {
3496 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3497
3498 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3499 grammar.runnable_config.as_ref().map(|config| &config.query)
3500 });
3501
3502 let test_configs = syntax_matches
3503 .grammars()
3504 .iter()
3505 .map(|grammar| grammar.runnable_config.as_ref())
3506 .collect::<Vec<_>>();
3507
3508 iter::from_fn(move || loop {
3509 let mat = syntax_matches.peek()?;
3510
3511 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3512 let mut run_range = None;
3513 let full_range = mat.captures.iter().fold(
3514 Range {
3515 start: usize::MAX,
3516 end: 0,
3517 },
3518 |mut acc, next| {
3519 let byte_range = next.node.byte_range();
3520 if acc.start > byte_range.start {
3521 acc.start = byte_range.start;
3522 }
3523 if acc.end < byte_range.end {
3524 acc.end = byte_range.end;
3525 }
3526 acc
3527 },
3528 );
3529 if full_range.start > full_range.end {
3530 // We did not find a full spanning range of this match.
3531 return None;
3532 }
3533 let extra_captures: SmallVec<[_; 1]> =
3534 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3535 test_configs
3536 .extra_captures
3537 .get(capture.index as usize)
3538 .cloned()
3539 .and_then(|tag_name| match tag_name {
3540 RunnableCapture::Named(name) => {
3541 Some((capture.node.byte_range(), name))
3542 }
3543 RunnableCapture::Run => {
3544 let _ = run_range.insert(capture.node.byte_range());
3545 None
3546 }
3547 })
3548 }));
3549 let run_range = run_range?;
3550 let tags = test_configs
3551 .query
3552 .property_settings(mat.pattern_index)
3553 .iter()
3554 .filter_map(|property| {
3555 if *property.key == *"tag" {
3556 property
3557 .value
3558 .as_ref()
3559 .map(|value| RunnableTag(value.to_string().into()))
3560 } else {
3561 None
3562 }
3563 })
3564 .collect();
3565 let extra_captures = extra_captures
3566 .into_iter()
3567 .map(|(range, name)| {
3568 (
3569 name.to_string(),
3570 self.text_for_range(range.clone()).collect::<String>(),
3571 )
3572 })
3573 .collect();
3574 // All tags should have the same range.
3575 Some(RunnableRange {
3576 run_range,
3577 full_range,
3578 runnable: Runnable {
3579 tags,
3580 language: mat.language,
3581 buffer: self.remote_id(),
3582 },
3583 extra_captures,
3584 buffer_id: self.remote_id(),
3585 })
3586 });
3587
3588 syntax_matches.advance();
3589 if test_range.is_some() {
3590 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3591 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3592 return test_range;
3593 }
3594 })
3595 }
3596
3597 pub fn indent_guides_in_range(
3598 &self,
3599 range: Range<Anchor>,
3600 ignore_disabled_for_language: bool,
3601 cx: &AppContext,
3602 ) -> Vec<IndentGuide> {
3603 let language_settings =
3604 language_settings(self.language().map(|l| l.name()), self.file.as_ref(), cx);
3605 let settings = language_settings.indent_guides;
3606 if !ignore_disabled_for_language && !settings.enabled {
3607 return Vec::new();
3608 }
3609 let tab_size = language_settings.tab_size.get() as u32;
3610
3611 let start_row = range.start.to_point(self).row;
3612 let end_row = range.end.to_point(self).row;
3613 let row_range = start_row..end_row + 1;
3614
3615 let mut row_indents = self.line_indents_in_row_range(row_range.clone());
3616
3617 let mut result_vec = Vec::new();
3618 let mut indent_stack = SmallVec::<[IndentGuide; 8]>::new();
3619
3620 while let Some((first_row, mut line_indent)) = row_indents.next() {
3621 let current_depth = indent_stack.len() as u32;
3622
3623 // When encountering empty, continue until found useful line indent
3624 // then add to the indent stack with the depth found
3625 let mut found_indent = false;
3626 let mut last_row = first_row;
3627 if line_indent.is_line_empty() {
3628 let mut trailing_row = end_row;
3629 while !found_indent {
3630 let (target_row, new_line_indent) =
3631 if let Some(display_row) = row_indents.next() {
3632 display_row
3633 } else {
3634 // This means we reached the end of the given range and found empty lines at the end.
3635 // We need to traverse further until we find a non-empty line to know if we need to add
3636 // an indent guide for the last visible indent.
3637 trailing_row += 1;
3638
3639 const TRAILING_ROW_SEARCH_LIMIT: u32 = 25;
3640 if trailing_row > self.max_point().row
3641 || trailing_row > end_row + TRAILING_ROW_SEARCH_LIMIT
3642 {
3643 break;
3644 }
3645 let new_line_indent = self.line_indent_for_row(trailing_row);
3646 (trailing_row, new_line_indent)
3647 };
3648
3649 if new_line_indent.is_line_empty() {
3650 continue;
3651 }
3652 last_row = target_row.min(end_row);
3653 line_indent = new_line_indent;
3654 found_indent = true;
3655 break;
3656 }
3657 } else {
3658 found_indent = true
3659 }
3660
3661 let depth = if found_indent {
3662 line_indent.len(tab_size) / tab_size
3663 + ((line_indent.len(tab_size) % tab_size) > 0) as u32
3664 } else {
3665 current_depth
3666 };
3667
3668 match depth.cmp(¤t_depth) {
3669 Ordering::Less => {
3670 for _ in 0..(current_depth - depth) {
3671 let mut indent = indent_stack.pop().unwrap();
3672 if last_row != first_row {
3673 // In this case, we landed on an empty row, had to seek forward,
3674 // and discovered that the indent we where on is ending.
3675 // This means that the last display row must
3676 // be on line that ends this indent range, so we
3677 // should display the range up to the first non-empty line
3678 indent.end_row = first_row.saturating_sub(1);
3679 }
3680
3681 result_vec.push(indent)
3682 }
3683 }
3684 Ordering::Greater => {
3685 for next_depth in current_depth..depth {
3686 indent_stack.push(IndentGuide {
3687 buffer_id: self.remote_id(),
3688 start_row: first_row,
3689 end_row: last_row,
3690 depth: next_depth,
3691 tab_size,
3692 settings,
3693 });
3694 }
3695 }
3696 _ => {}
3697 }
3698
3699 for indent in indent_stack.iter_mut() {
3700 indent.end_row = last_row;
3701 }
3702 }
3703
3704 result_vec.extend(indent_stack);
3705
3706 result_vec
3707 }
3708
3709 pub async fn enclosing_indent(
3710 &self,
3711 mut buffer_row: BufferRow,
3712 ) -> Option<(Range<BufferRow>, LineIndent)> {
3713 let max_row = self.max_point().row;
3714 if buffer_row >= max_row {
3715 return None;
3716 }
3717
3718 let mut target_indent = self.line_indent_for_row(buffer_row);
3719
3720 // If the current row is at the start of an indented block, we want to return this
3721 // block as the enclosing indent.
3722 if !target_indent.is_line_empty() && buffer_row < max_row {
3723 let next_line_indent = self.line_indent_for_row(buffer_row + 1);
3724 if !next_line_indent.is_line_empty()
3725 && target_indent.raw_len() < next_line_indent.raw_len()
3726 {
3727 target_indent = next_line_indent;
3728 buffer_row += 1;
3729 }
3730 }
3731
3732 const SEARCH_ROW_LIMIT: u32 = 25000;
3733 const SEARCH_WHITESPACE_ROW_LIMIT: u32 = 2500;
3734 const YIELD_INTERVAL: u32 = 100;
3735
3736 let mut accessed_row_counter = 0;
3737
3738 // If there is a blank line at the current row, search for the next non indented lines
3739 if target_indent.is_line_empty() {
3740 let start = buffer_row.saturating_sub(SEARCH_WHITESPACE_ROW_LIMIT);
3741 let end = (max_row + 1).min(buffer_row + SEARCH_WHITESPACE_ROW_LIMIT);
3742
3743 let mut non_empty_line_above = None;
3744 for (row, indent) in self
3745 .text
3746 .reversed_line_indents_in_row_range(start..buffer_row)
3747 {
3748 accessed_row_counter += 1;
3749 if accessed_row_counter == YIELD_INTERVAL {
3750 accessed_row_counter = 0;
3751 yield_now().await;
3752 }
3753 if !indent.is_line_empty() {
3754 non_empty_line_above = Some((row, indent));
3755 break;
3756 }
3757 }
3758
3759 let mut non_empty_line_below = None;
3760 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3761 accessed_row_counter += 1;
3762 if accessed_row_counter == YIELD_INTERVAL {
3763 accessed_row_counter = 0;
3764 yield_now().await;
3765 }
3766 if !indent.is_line_empty() {
3767 non_empty_line_below = Some((row, indent));
3768 break;
3769 }
3770 }
3771
3772 let (row, indent) = match (non_empty_line_above, non_empty_line_below) {
3773 (Some((above_row, above_indent)), Some((below_row, below_indent))) => {
3774 if above_indent.raw_len() >= below_indent.raw_len() {
3775 (above_row, above_indent)
3776 } else {
3777 (below_row, below_indent)
3778 }
3779 }
3780 (Some(above), None) => above,
3781 (None, Some(below)) => below,
3782 _ => return None,
3783 };
3784
3785 target_indent = indent;
3786 buffer_row = row;
3787 }
3788
3789 let start = buffer_row.saturating_sub(SEARCH_ROW_LIMIT);
3790 let end = (max_row + 1).min(buffer_row + SEARCH_ROW_LIMIT);
3791
3792 let mut start_indent = None;
3793 for (row, indent) in self
3794 .text
3795 .reversed_line_indents_in_row_range(start..buffer_row)
3796 {
3797 accessed_row_counter += 1;
3798 if accessed_row_counter == YIELD_INTERVAL {
3799 accessed_row_counter = 0;
3800 yield_now().await;
3801 }
3802 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3803 start_indent = Some((row, indent));
3804 break;
3805 }
3806 }
3807 let (start_row, start_indent_size) = start_indent?;
3808
3809 let mut end_indent = (end, None);
3810 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3811 accessed_row_counter += 1;
3812 if accessed_row_counter == YIELD_INTERVAL {
3813 accessed_row_counter = 0;
3814 yield_now().await;
3815 }
3816 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3817 end_indent = (row.saturating_sub(1), Some(indent));
3818 break;
3819 }
3820 }
3821 let (end_row, end_indent_size) = end_indent;
3822
3823 let indent = if let Some(end_indent_size) = end_indent_size {
3824 if start_indent_size.raw_len() > end_indent_size.raw_len() {
3825 start_indent_size
3826 } else {
3827 end_indent_size
3828 }
3829 } else {
3830 start_indent_size
3831 };
3832
3833 Some((start_row..end_row, indent))
3834 }
3835
3836 /// Returns selections for remote peers intersecting the given range.
3837 #[allow(clippy::type_complexity)]
3838 pub fn selections_in_range(
3839 &self,
3840 range: Range<Anchor>,
3841 include_local: bool,
3842 ) -> impl Iterator<
3843 Item = (
3844 ReplicaId,
3845 bool,
3846 CursorShape,
3847 impl Iterator<Item = &Selection<Anchor>> + '_,
3848 ),
3849 > + '_ {
3850 self.remote_selections
3851 .iter()
3852 .filter(move |(replica_id, set)| {
3853 (include_local || **replica_id != self.text.replica_id())
3854 && !set.selections.is_empty()
3855 })
3856 .map(move |(replica_id, set)| {
3857 let start_ix = match set.selections.binary_search_by(|probe| {
3858 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3859 }) {
3860 Ok(ix) | Err(ix) => ix,
3861 };
3862 let end_ix = match set.selections.binary_search_by(|probe| {
3863 probe.start.cmp(&range.end, self).then(Ordering::Less)
3864 }) {
3865 Ok(ix) | Err(ix) => ix,
3866 };
3867
3868 (
3869 *replica_id,
3870 set.line_mode,
3871 set.cursor_shape,
3872 set.selections[start_ix..end_ix].iter(),
3873 )
3874 })
3875 }
3876
3877 /// Whether the buffer contains any Git changes.
3878 pub fn has_git_diff(&self) -> bool {
3879 !self.git_diff.is_empty()
3880 }
3881
3882 /// Returns all the Git diff hunks intersecting the given row range.
3883 #[cfg(any(test, feature = "test-support"))]
3884 pub fn git_diff_hunks_in_row_range(
3885 &self,
3886 range: Range<BufferRow>,
3887 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk> {
3888 self.git_diff.hunks_in_row_range(range, self)
3889 }
3890
3891 /// Returns all the Git diff hunks intersecting the given
3892 /// range.
3893 pub fn git_diff_hunks_intersecting_range(
3894 &self,
3895 range: Range<Anchor>,
3896 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk> {
3897 self.git_diff.hunks_intersecting_range(range, self)
3898 }
3899
3900 /// Returns all the Git diff hunks intersecting the given
3901 /// range, in reverse order.
3902 pub fn git_diff_hunks_intersecting_range_rev(
3903 &self,
3904 range: Range<Anchor>,
3905 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk> {
3906 self.git_diff.hunks_intersecting_range_rev(range, self)
3907 }
3908
3909 /// Returns if the buffer contains any diagnostics.
3910 pub fn has_diagnostics(&self) -> bool {
3911 !self.diagnostics.is_empty()
3912 }
3913
3914 /// Returns all the diagnostics intersecting the given range.
3915 pub fn diagnostics_in_range<'a, T, O>(
3916 &'a self,
3917 search_range: Range<T>,
3918 reversed: bool,
3919 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3920 where
3921 T: 'a + Clone + ToOffset,
3922 O: 'a + FromAnchor + Ord,
3923 {
3924 let mut iterators: Vec<_> = self
3925 .diagnostics
3926 .iter()
3927 .map(|(_, collection)| {
3928 collection
3929 .range::<T, O>(search_range.clone(), self, true, reversed)
3930 .peekable()
3931 })
3932 .collect();
3933
3934 std::iter::from_fn(move || {
3935 let (next_ix, _) = iterators
3936 .iter_mut()
3937 .enumerate()
3938 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3939 .min_by(|(_, a), (_, b)| {
3940 let cmp = a
3941 .range
3942 .start
3943 .cmp(&b.range.start)
3944 // when range is equal, sort by diagnostic severity
3945 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3946 // and stabilize order with group_id
3947 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3948 if reversed {
3949 cmp.reverse()
3950 } else {
3951 cmp
3952 }
3953 })?;
3954 iterators[next_ix].next()
3955 })
3956 }
3957
3958 /// Returns all the diagnostic groups associated with the given
3959 /// language server ID. If no language server ID is provided,
3960 /// all diagnostics groups are returned.
3961 pub fn diagnostic_groups(
3962 &self,
3963 language_server_id: Option<LanguageServerId>,
3964 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3965 let mut groups = Vec::new();
3966
3967 if let Some(language_server_id) = language_server_id {
3968 if let Ok(ix) = self
3969 .diagnostics
3970 .binary_search_by_key(&language_server_id, |e| e.0)
3971 {
3972 self.diagnostics[ix]
3973 .1
3974 .groups(language_server_id, &mut groups, self);
3975 }
3976 } else {
3977 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3978 diagnostics.groups(*language_server_id, &mut groups, self);
3979 }
3980 }
3981
3982 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3983 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3984 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3985 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3986 });
3987
3988 groups
3989 }
3990
3991 /// Returns an iterator over the diagnostics for the given group.
3992 pub fn diagnostic_group<'a, O>(
3993 &'a self,
3994 group_id: usize,
3995 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3996 where
3997 O: 'a + FromAnchor,
3998 {
3999 self.diagnostics
4000 .iter()
4001 .flat_map(move |(_, set)| set.group(group_id, self))
4002 }
4003
4004 /// An integer version number that accounts for all updates besides
4005 /// the buffer's text itself (which is versioned via a version vector).
4006 pub fn non_text_state_update_count(&self) -> usize {
4007 self.non_text_state_update_count
4008 }
4009
4010 /// Returns a snapshot of underlying file.
4011 pub fn file(&self) -> Option<&Arc<dyn File>> {
4012 self.file.as_ref()
4013 }
4014
4015 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4016 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
4017 if let Some(file) = self.file() {
4018 if file.path().file_name().is_none() || include_root {
4019 Some(file.full_path(cx))
4020 } else {
4021 Some(file.path().to_path_buf())
4022 }
4023 } else {
4024 None
4025 }
4026 }
4027}
4028
4029fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4030 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4031}
4032
4033fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4034 let mut result = IndentSize::spaces(0);
4035 for c in text {
4036 let kind = match c {
4037 ' ' => IndentKind::Space,
4038 '\t' => IndentKind::Tab,
4039 _ => break,
4040 };
4041 if result.len == 0 {
4042 result.kind = kind;
4043 }
4044 result.len += 1;
4045 }
4046 result
4047}
4048
4049impl Clone for BufferSnapshot {
4050 fn clone(&self) -> Self {
4051 Self {
4052 text: self.text.clone(),
4053 git_diff: self.git_diff.clone(),
4054 syntax: self.syntax.clone(),
4055 file: self.file.clone(),
4056 remote_selections: self.remote_selections.clone(),
4057 diagnostics: self.diagnostics.clone(),
4058 language: self.language.clone(),
4059 non_text_state_update_count: self.non_text_state_update_count,
4060 }
4061 }
4062}
4063
4064impl Deref for BufferSnapshot {
4065 type Target = text::BufferSnapshot;
4066
4067 fn deref(&self) -> &Self::Target {
4068 &self.text
4069 }
4070}
4071
4072unsafe impl<'a> Send for BufferChunks<'a> {}
4073
4074impl<'a> BufferChunks<'a> {
4075 pub(crate) fn new(
4076 text: &'a Rope,
4077 range: Range<usize>,
4078 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4079 diagnostics: bool,
4080 buffer_snapshot: Option<&'a BufferSnapshot>,
4081 ) -> Self {
4082 let mut highlights = None;
4083 if let Some((captures, highlight_maps)) = syntax {
4084 highlights = Some(BufferChunkHighlights {
4085 captures,
4086 next_capture: None,
4087 stack: Default::default(),
4088 highlight_maps,
4089 })
4090 }
4091
4092 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4093 let chunks = text.chunks_in_range(range.clone());
4094
4095 let mut this = BufferChunks {
4096 range,
4097 buffer_snapshot,
4098 chunks,
4099 diagnostic_endpoints,
4100 error_depth: 0,
4101 warning_depth: 0,
4102 information_depth: 0,
4103 hint_depth: 0,
4104 unnecessary_depth: 0,
4105 highlights,
4106 };
4107 this.initialize_diagnostic_endpoints();
4108 this
4109 }
4110
4111 /// Seeks to the given byte offset in the buffer.
4112 pub fn seek(&mut self, range: Range<usize>) {
4113 let old_range = std::mem::replace(&mut self.range, range.clone());
4114 self.chunks.set_range(self.range.clone());
4115 if let Some(highlights) = self.highlights.as_mut() {
4116 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4117 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4118 highlights
4119 .stack
4120 .retain(|(end_offset, _)| *end_offset > range.start);
4121 if let Some(capture) = &highlights.next_capture {
4122 if range.start >= capture.node.start_byte() {
4123 let next_capture_end = capture.node.end_byte();
4124 if range.start < next_capture_end {
4125 highlights.stack.push((
4126 next_capture_end,
4127 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4128 ));
4129 }
4130 highlights.next_capture.take();
4131 }
4132 }
4133 } else if let Some(snapshot) = self.buffer_snapshot {
4134 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4135 *highlights = BufferChunkHighlights {
4136 captures,
4137 next_capture: None,
4138 stack: Default::default(),
4139 highlight_maps,
4140 };
4141 } else {
4142 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4143 // Seeking such BufferChunks is not supported.
4144 debug_assert!(false, "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot");
4145 }
4146
4147 highlights.captures.set_byte_range(self.range.clone());
4148 self.initialize_diagnostic_endpoints();
4149 }
4150 }
4151
4152 fn initialize_diagnostic_endpoints(&mut self) {
4153 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4154 if let Some(buffer) = self.buffer_snapshot {
4155 let mut diagnostic_endpoints = Vec::new();
4156 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4157 diagnostic_endpoints.push(DiagnosticEndpoint {
4158 offset: entry.range.start,
4159 is_start: true,
4160 severity: entry.diagnostic.severity,
4161 is_unnecessary: entry.diagnostic.is_unnecessary,
4162 });
4163 diagnostic_endpoints.push(DiagnosticEndpoint {
4164 offset: entry.range.end,
4165 is_start: false,
4166 severity: entry.diagnostic.severity,
4167 is_unnecessary: entry.diagnostic.is_unnecessary,
4168 });
4169 }
4170 diagnostic_endpoints
4171 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4172 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4173 self.hint_depth = 0;
4174 self.error_depth = 0;
4175 self.warning_depth = 0;
4176 self.information_depth = 0;
4177 }
4178 }
4179 }
4180
4181 /// The current byte offset in the buffer.
4182 pub fn offset(&self) -> usize {
4183 self.range.start
4184 }
4185
4186 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4187 let depth = match endpoint.severity {
4188 DiagnosticSeverity::ERROR => &mut self.error_depth,
4189 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4190 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4191 DiagnosticSeverity::HINT => &mut self.hint_depth,
4192 _ => return,
4193 };
4194 if endpoint.is_start {
4195 *depth += 1;
4196 } else {
4197 *depth -= 1;
4198 }
4199
4200 if endpoint.is_unnecessary {
4201 if endpoint.is_start {
4202 self.unnecessary_depth += 1;
4203 } else {
4204 self.unnecessary_depth -= 1;
4205 }
4206 }
4207 }
4208
4209 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4210 if self.error_depth > 0 {
4211 Some(DiagnosticSeverity::ERROR)
4212 } else if self.warning_depth > 0 {
4213 Some(DiagnosticSeverity::WARNING)
4214 } else if self.information_depth > 0 {
4215 Some(DiagnosticSeverity::INFORMATION)
4216 } else if self.hint_depth > 0 {
4217 Some(DiagnosticSeverity::HINT)
4218 } else {
4219 None
4220 }
4221 }
4222
4223 fn current_code_is_unnecessary(&self) -> bool {
4224 self.unnecessary_depth > 0
4225 }
4226}
4227
4228impl<'a> Iterator for BufferChunks<'a> {
4229 type Item = Chunk<'a>;
4230
4231 fn next(&mut self) -> Option<Self::Item> {
4232 let mut next_capture_start = usize::MAX;
4233 let mut next_diagnostic_endpoint = usize::MAX;
4234
4235 if let Some(highlights) = self.highlights.as_mut() {
4236 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4237 if *parent_capture_end <= self.range.start {
4238 highlights.stack.pop();
4239 } else {
4240 break;
4241 }
4242 }
4243
4244 if highlights.next_capture.is_none() {
4245 highlights.next_capture = highlights.captures.next();
4246 }
4247
4248 while let Some(capture) = highlights.next_capture.as_ref() {
4249 if self.range.start < capture.node.start_byte() {
4250 next_capture_start = capture.node.start_byte();
4251 break;
4252 } else {
4253 let highlight_id =
4254 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4255 highlights
4256 .stack
4257 .push((capture.node.end_byte(), highlight_id));
4258 highlights.next_capture = highlights.captures.next();
4259 }
4260 }
4261 }
4262
4263 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4264 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4265 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4266 if endpoint.offset <= self.range.start {
4267 self.update_diagnostic_depths(endpoint);
4268 diagnostic_endpoints.next();
4269 } else {
4270 next_diagnostic_endpoint = endpoint.offset;
4271 break;
4272 }
4273 }
4274 }
4275 self.diagnostic_endpoints = diagnostic_endpoints;
4276
4277 if let Some(chunk) = self.chunks.peek() {
4278 let chunk_start = self.range.start;
4279 let mut chunk_end = (self.chunks.offset() + chunk.len())
4280 .min(next_capture_start)
4281 .min(next_diagnostic_endpoint);
4282 let mut highlight_id = None;
4283 if let Some(highlights) = self.highlights.as_ref() {
4284 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4285 chunk_end = chunk_end.min(*parent_capture_end);
4286 highlight_id = Some(*parent_highlight_id);
4287 }
4288 }
4289
4290 let slice =
4291 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4292 self.range.start = chunk_end;
4293 if self.range.start == self.chunks.offset() + chunk.len() {
4294 self.chunks.next().unwrap();
4295 }
4296
4297 Some(Chunk {
4298 text: slice,
4299 syntax_highlight_id: highlight_id,
4300 diagnostic_severity: self.current_diagnostic_severity(),
4301 is_unnecessary: self.current_code_is_unnecessary(),
4302 ..Default::default()
4303 })
4304 } else {
4305 None
4306 }
4307 }
4308}
4309
4310impl operation_queue::Operation for Operation {
4311 fn lamport_timestamp(&self) -> clock::Lamport {
4312 match self {
4313 Operation::Buffer(_) => {
4314 unreachable!("buffer operations should never be deferred at this layer")
4315 }
4316 Operation::UpdateDiagnostics {
4317 lamport_timestamp, ..
4318 }
4319 | Operation::UpdateSelections {
4320 lamport_timestamp, ..
4321 }
4322 | Operation::UpdateCompletionTriggers {
4323 lamport_timestamp, ..
4324 } => *lamport_timestamp,
4325 }
4326 }
4327}
4328
4329impl Default for Diagnostic {
4330 fn default() -> Self {
4331 Self {
4332 source: Default::default(),
4333 code: None,
4334 severity: DiagnosticSeverity::ERROR,
4335 message: Default::default(),
4336 group_id: 0,
4337 is_primary: false,
4338 is_disk_based: false,
4339 is_unnecessary: false,
4340 data: None,
4341 }
4342 }
4343}
4344
4345impl IndentSize {
4346 /// Returns an [`IndentSize`] representing the given spaces.
4347 pub fn spaces(len: u32) -> Self {
4348 Self {
4349 len,
4350 kind: IndentKind::Space,
4351 }
4352 }
4353
4354 /// Returns an [`IndentSize`] representing a tab.
4355 pub fn tab() -> Self {
4356 Self {
4357 len: 1,
4358 kind: IndentKind::Tab,
4359 }
4360 }
4361
4362 /// An iterator over the characters represented by this [`IndentSize`].
4363 pub fn chars(&self) -> impl Iterator<Item = char> {
4364 iter::repeat(self.char()).take(self.len as usize)
4365 }
4366
4367 /// The character representation of this [`IndentSize`].
4368 pub fn char(&self) -> char {
4369 match self.kind {
4370 IndentKind::Space => ' ',
4371 IndentKind::Tab => '\t',
4372 }
4373 }
4374
4375 /// Consumes the current [`IndentSize`] and returns a new one that has
4376 /// been shrunk or enlarged by the given size along the given direction.
4377 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4378 match direction {
4379 Ordering::Less => {
4380 if self.kind == size.kind && self.len >= size.len {
4381 self.len -= size.len;
4382 }
4383 }
4384 Ordering::Equal => {}
4385 Ordering::Greater => {
4386 if self.len == 0 {
4387 self = size;
4388 } else if self.kind == size.kind {
4389 self.len += size.len;
4390 }
4391 }
4392 }
4393 self
4394 }
4395
4396 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4397 match self.kind {
4398 IndentKind::Space => self.len as usize,
4399 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4400 }
4401 }
4402}
4403
4404#[cfg(any(test, feature = "test-support"))]
4405pub struct TestFile {
4406 pub path: Arc<Path>,
4407 pub root_name: String,
4408}
4409
4410#[cfg(any(test, feature = "test-support"))]
4411impl File for TestFile {
4412 fn path(&self) -> &Arc<Path> {
4413 &self.path
4414 }
4415
4416 fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
4417 PathBuf::from(&self.root_name).join(self.path.as_ref())
4418 }
4419
4420 fn as_local(&self) -> Option<&dyn LocalFile> {
4421 None
4422 }
4423
4424 fn disk_state(&self) -> DiskState {
4425 unimplemented!()
4426 }
4427
4428 fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
4429 self.path().file_name().unwrap_or(self.root_name.as_ref())
4430 }
4431
4432 fn worktree_id(&self, _: &AppContext) -> WorktreeId {
4433 WorktreeId::from_usize(0)
4434 }
4435
4436 fn as_any(&self) -> &dyn std::any::Any {
4437 unimplemented!()
4438 }
4439
4440 fn to_proto(&self, _: &AppContext) -> rpc::proto::File {
4441 unimplemented!()
4442 }
4443
4444 fn is_private(&self) -> bool {
4445 false
4446 }
4447}
4448
4449pub(crate) fn contiguous_ranges(
4450 values: impl Iterator<Item = u32>,
4451 max_len: usize,
4452) -> impl Iterator<Item = Range<u32>> {
4453 let mut values = values;
4454 let mut current_range: Option<Range<u32>> = None;
4455 std::iter::from_fn(move || loop {
4456 if let Some(value) = values.next() {
4457 if let Some(range) = &mut current_range {
4458 if value == range.end && range.len() < max_len {
4459 range.end += 1;
4460 continue;
4461 }
4462 }
4463
4464 let prev_range = current_range.clone();
4465 current_range = Some(value..(value + 1));
4466 if prev_range.is_some() {
4467 return prev_range;
4468 }
4469 } else {
4470 return current_range.take();
4471 }
4472 })
4473}
4474
4475#[derive(Default, Debug)]
4476pub struct CharClassifier {
4477 scope: Option<LanguageScope>,
4478 for_completion: bool,
4479 ignore_punctuation: bool,
4480}
4481
4482impl CharClassifier {
4483 pub fn new(scope: Option<LanguageScope>) -> Self {
4484 Self {
4485 scope,
4486 for_completion: false,
4487 ignore_punctuation: false,
4488 }
4489 }
4490
4491 pub fn for_completion(self, for_completion: bool) -> Self {
4492 Self {
4493 for_completion,
4494 ..self
4495 }
4496 }
4497
4498 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4499 Self {
4500 ignore_punctuation,
4501 ..self
4502 }
4503 }
4504
4505 pub fn is_whitespace(&self, c: char) -> bool {
4506 self.kind(c) == CharKind::Whitespace
4507 }
4508
4509 pub fn is_word(&self, c: char) -> bool {
4510 self.kind(c) == CharKind::Word
4511 }
4512
4513 pub fn is_punctuation(&self, c: char) -> bool {
4514 self.kind(c) == CharKind::Punctuation
4515 }
4516
4517 pub fn kind(&self, c: char) -> CharKind {
4518 if c.is_whitespace() {
4519 return CharKind::Whitespace;
4520 } else if c.is_alphanumeric() || c == '_' {
4521 return CharKind::Word;
4522 }
4523
4524 if let Some(scope) = &self.scope {
4525 if let Some(characters) = scope.word_characters() {
4526 if characters.contains(&c) {
4527 if c == '-' && !self.for_completion && !self.ignore_punctuation {
4528 return CharKind::Punctuation;
4529 }
4530 return CharKind::Word;
4531 }
4532 }
4533 }
4534
4535 if self.ignore_punctuation {
4536 CharKind::Word
4537 } else {
4538 CharKind::Punctuation
4539 }
4540 }
4541}
4542
4543/// Find all of the ranges of whitespace that occur at the ends of lines
4544/// in the given rope.
4545///
4546/// This could also be done with a regex search, but this implementation
4547/// avoids copying text.
4548pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4549 let mut ranges = Vec::new();
4550
4551 let mut offset = 0;
4552 let mut prev_chunk_trailing_whitespace_range = 0..0;
4553 for chunk in rope.chunks() {
4554 let mut prev_line_trailing_whitespace_range = 0..0;
4555 for (i, line) in chunk.split('\n').enumerate() {
4556 let line_end_offset = offset + line.len();
4557 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4558 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4559
4560 if i == 0 && trimmed_line_len == 0 {
4561 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4562 }
4563 if !prev_line_trailing_whitespace_range.is_empty() {
4564 ranges.push(prev_line_trailing_whitespace_range);
4565 }
4566
4567 offset = line_end_offset + 1;
4568 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4569 }
4570
4571 offset -= 1;
4572 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4573 }
4574
4575 if !prev_chunk_trailing_whitespace_range.is_empty() {
4576 ranges.push(prev_chunk_trailing_whitespace_range);
4577 }
4578
4579 ranges
4580}