1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, IndentGuideSettings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
14 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
18};
19use anyhow::{anyhow, Context, Result};
20use async_watch as watch;
21use clock::Lamport;
22pub use clock::ReplicaId;
23use futures::channel::oneshot;
24use gpui::{
25 AnyElement, AppContext, Context as _, EventEmitter, HighlightStyle, Model, ModelContext,
26 Pixels, Task, TaskLabel, WindowContext,
27};
28use lsp::LanguageServerId;
29use parking_lot::Mutex;
30use schemars::JsonSchema;
31use serde::{Deserialize, Serialize};
32use serde_json::Value;
33use settings::WorktreeId;
34use similar::{ChangeTag, TextDiff};
35use smallvec::SmallVec;
36use smol::future::yield_now;
37use std::{
38 any::Any,
39 cell::Cell,
40 cmp::{self, Ordering, Reverse},
41 collections::BTreeMap,
42 ffi::OsStr,
43 fmt,
44 future::Future,
45 iter::{self, Iterator, Peekable},
46 mem,
47 ops::{Deref, DerefMut, Range},
48 path::{Path, PathBuf},
49 str,
50 sync::{Arc, LazyLock},
51 time::{Duration, Instant, SystemTime},
52 vec,
53};
54use sum_tree::TreeMap;
55use text::operation_queue::OperationQueue;
56use text::*;
57pub use text::{
58 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
59 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
60 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
61 Transaction, TransactionId, Unclipped,
62};
63use theme::SyntaxTheme;
64#[cfg(any(test, feature = "test-support"))]
65use util::RandomCharIter;
66use util::{debug_panic, RangeExt};
67
68#[cfg(any(test, feature = "test-support"))]
69pub use {tree_sitter_rust, tree_sitter_typescript};
70
71pub use lsp::DiagnosticSeverity;
72
73/// A label for the background task spawned by the buffer to compute
74/// a diff against the contents of its file.
75pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
76
77/// Indicate whether a [`Buffer`] has permissions to edit.
78#[derive(PartialEq, Clone, Copy, Debug)]
79pub enum Capability {
80 /// The buffer is a mutable replica.
81 ReadWrite,
82 /// The buffer is a read-only replica.
83 ReadOnly,
84}
85
86pub type BufferRow = u32;
87
88#[derive(Clone)]
89enum BufferDiffBase {
90 Git(Rope),
91 PastBufferVersion {
92 buffer: Model<Buffer>,
93 rope: Rope,
94 merged_operations: Vec<Lamport>,
95 },
96}
97
98/// An in-memory representation of a source code file, including its text,
99/// syntax trees, git status, and diagnostics.
100pub struct Buffer {
101 text: TextBuffer,
102 diff_base: Option<BufferDiffBase>,
103 git_diff: git::diff::BufferDiff,
104 file: Option<Arc<dyn File>>,
105 /// The mtime of the file when this buffer was last loaded from
106 /// or saved to disk.
107 saved_mtime: Option<SystemTime>,
108 /// The version vector when this buffer was last loaded from
109 /// or saved to disk.
110 saved_version: clock::Global,
111 preview_version: clock::Global,
112 transaction_depth: usize,
113 was_dirty_before_starting_transaction: Option<bool>,
114 reload_task: Option<Task<Result<()>>>,
115 language: Option<Arc<Language>>,
116 autoindent_requests: Vec<Arc<AutoindentRequest>>,
117 pending_autoindent: Option<Task<()>>,
118 sync_parse_timeout: Duration,
119 syntax_map: Mutex<SyntaxMap>,
120 parsing_in_background: bool,
121 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
122 non_text_state_update_count: usize,
123 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
124 remote_selections: TreeMap<ReplicaId, SelectionSet>,
125 diagnostics_timestamp: clock::Lamport,
126 completion_triggers: Vec<String>,
127 completion_triggers_timestamp: clock::Lamport,
128 deferred_ops: OperationQueue<Operation>,
129 capability: Capability,
130 has_conflict: bool,
131 diff_base_version: usize,
132 /// Memoize calls to has_changes_since(saved_version).
133 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
134 has_unsaved_edits: Cell<(clock::Global, bool)>,
135 _subscriptions: Vec<gpui::Subscription>,
136}
137
138#[derive(Copy, Clone, Debug, PartialEq, Eq)]
139pub enum ParseStatus {
140 Idle,
141 Parsing,
142}
143
144/// An immutable, cheaply cloneable representation of a fixed
145/// state of a buffer.
146pub struct BufferSnapshot {
147 text: text::BufferSnapshot,
148 git_diff: git::diff::BufferDiff,
149 pub(crate) syntax: SyntaxSnapshot,
150 file: Option<Arc<dyn File>>,
151 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
152 remote_selections: TreeMap<ReplicaId, SelectionSet>,
153 language: Option<Arc<Language>>,
154 non_text_state_update_count: usize,
155}
156
157/// The kind and amount of indentation in a particular line. For now,
158/// assumes that indentation is all the same character.
159#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
160pub struct IndentSize {
161 /// The number of bytes that comprise the indentation.
162 pub len: u32,
163 /// The kind of whitespace used for indentation.
164 pub kind: IndentKind,
165}
166
167/// A whitespace character that's used for indentation.
168#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
169pub enum IndentKind {
170 /// An ASCII space character.
171 #[default]
172 Space,
173 /// An ASCII tab character.
174 Tab,
175}
176
177/// The shape of a selection cursor.
178#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
179#[serde(rename_all = "snake_case")]
180pub enum CursorShape {
181 /// A vertical bar
182 #[default]
183 Bar,
184 /// A block that surrounds the following character
185 Block,
186 /// An underline that runs along the following character
187 Underscore,
188 /// A box drawn around the following character
189 Hollow,
190}
191
192#[derive(Clone, Debug)]
193struct SelectionSet {
194 line_mode: bool,
195 cursor_shape: CursorShape,
196 selections: Arc<[Selection<Anchor>]>,
197 lamport_timestamp: clock::Lamport,
198}
199
200/// A diagnostic associated with a certain range of a buffer.
201#[derive(Clone, Debug, PartialEq, Eq)]
202pub struct Diagnostic {
203 /// The name of the service that produced this diagnostic.
204 pub source: Option<String>,
205 /// A machine-readable code that identifies this diagnostic.
206 pub code: Option<String>,
207 /// Whether this diagnostic is a hint, warning, or error.
208 pub severity: DiagnosticSeverity,
209 /// The human-readable message associated with this diagnostic.
210 pub message: String,
211 /// An id that identifies the group to which this diagnostic belongs.
212 ///
213 /// When a language server produces a diagnostic with
214 /// one or more associated diagnostics, those diagnostics are all
215 /// assigned a single group ID.
216 pub group_id: usize,
217 /// Whether this diagnostic is the primary diagnostic for its group.
218 ///
219 /// In a given group, the primary diagnostic is the top-level diagnostic
220 /// returned by the language server. The non-primary diagnostics are the
221 /// associated diagnostics.
222 pub is_primary: bool,
223 /// Whether this diagnostic is considered to originate from an analysis of
224 /// files on disk, as opposed to any unsaved buffer contents. This is a
225 /// property of a given diagnostic source, and is configured for a given
226 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
227 /// for the language server.
228 pub is_disk_based: bool,
229 /// Whether this diagnostic marks unnecessary code.
230 pub is_unnecessary: bool,
231 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
232 pub data: Option<Value>,
233}
234
235/// TODO - move this into the `project` crate and make it private.
236pub async fn prepare_completion_documentation(
237 documentation: &lsp::Documentation,
238 language_registry: &Arc<LanguageRegistry>,
239 language: Option<Arc<Language>>,
240) -> Documentation {
241 match documentation {
242 lsp::Documentation::String(text) => {
243 if text.lines().count() <= 1 {
244 Documentation::SingleLine(text.clone())
245 } else {
246 Documentation::MultiLinePlainText(text.clone())
247 }
248 }
249
250 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
251 lsp::MarkupKind::PlainText => {
252 if value.lines().count() <= 1 {
253 Documentation::SingleLine(value.clone())
254 } else {
255 Documentation::MultiLinePlainText(value.clone())
256 }
257 }
258
259 lsp::MarkupKind::Markdown => {
260 let parsed = parse_markdown(value, language_registry, language).await;
261 Documentation::MultiLineMarkdown(parsed)
262 }
263 },
264 }
265}
266
267/// Documentation associated with a [`Completion`].
268#[derive(Clone, Debug)]
269pub enum Documentation {
270 /// There is no documentation for this completion.
271 Undocumented,
272 /// A single line of documentation.
273 SingleLine(String),
274 /// Multiple lines of plain text documentation.
275 MultiLinePlainText(String),
276 /// Markdown documentation.
277 MultiLineMarkdown(ParsedMarkdown),
278}
279
280/// An operation used to synchronize this buffer with its other replicas.
281#[derive(Clone, Debug, PartialEq)]
282pub enum Operation {
283 /// A text operation.
284 Buffer(text::Operation),
285
286 /// An update to the buffer's diagnostics.
287 UpdateDiagnostics {
288 /// The id of the language server that produced the new diagnostics.
289 server_id: LanguageServerId,
290 /// The diagnostics.
291 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
292 /// The buffer's lamport timestamp.
293 lamport_timestamp: clock::Lamport,
294 },
295
296 /// An update to the most recent selections in this buffer.
297 UpdateSelections {
298 /// The selections.
299 selections: Arc<[Selection<Anchor>]>,
300 /// The buffer's lamport timestamp.
301 lamport_timestamp: clock::Lamport,
302 /// Whether the selections are in 'line mode'.
303 line_mode: bool,
304 /// The [`CursorShape`] associated with these selections.
305 cursor_shape: CursorShape,
306 },
307
308 /// An update to the characters that should trigger autocompletion
309 /// for this buffer.
310 UpdateCompletionTriggers {
311 /// The characters that trigger autocompletion.
312 triggers: Vec<String>,
313 /// The buffer's lamport timestamp.
314 lamport_timestamp: clock::Lamport,
315 },
316}
317
318/// An event that occurs in a buffer.
319#[derive(Clone, Debug, PartialEq)]
320pub enum BufferEvent {
321 /// The buffer was changed in a way that must be
322 /// propagated to its other replicas.
323 Operation {
324 operation: Operation,
325 is_local: bool,
326 },
327 /// The buffer was edited.
328 Edited,
329 /// The buffer's `dirty` bit changed.
330 DirtyChanged,
331 /// The buffer was saved.
332 Saved,
333 /// The buffer's file was changed on disk.
334 FileHandleChanged,
335 /// The buffer was reloaded.
336 Reloaded,
337 /// The buffer's diff_base changed.
338 DiffBaseChanged,
339 /// Buffer's excerpts for a certain diff base were recalculated.
340 DiffUpdated,
341 /// The buffer's language was changed.
342 LanguageChanged,
343 /// The buffer's syntax trees were updated.
344 Reparsed,
345 /// The buffer's diagnostics were updated.
346 DiagnosticsUpdated,
347 /// The buffer gained or lost editing capabilities.
348 CapabilityChanged,
349 /// The buffer was explicitly requested to close.
350 Closed,
351 /// The buffer was discarded when closing.
352 Discarded,
353}
354
355/// The file associated with a buffer.
356pub trait File: Send + Sync {
357 /// Returns the [`LocalFile`] associated with this file, if the
358 /// file is local.
359 fn as_local(&self) -> Option<&dyn LocalFile>;
360
361 /// Returns whether this file is local.
362 fn is_local(&self) -> bool {
363 self.as_local().is_some()
364 }
365
366 /// Returns the file's mtime.
367 fn mtime(&self) -> Option<SystemTime>;
368
369 /// Returns the path of this file relative to the worktree's root directory.
370 fn path(&self) -> &Arc<Path>;
371
372 /// Returns the path of this file relative to the worktree's parent directory (this means it
373 /// includes the name of the worktree's root folder).
374 fn full_path(&self, cx: &AppContext) -> PathBuf;
375
376 /// Returns the last component of this handle's absolute path. If this handle refers to the root
377 /// of its worktree, then this method will return the name of the worktree itself.
378 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
379
380 /// Returns the id of the worktree to which this file belongs.
381 ///
382 /// This is needed for looking up project-specific settings.
383 fn worktree_id(&self, cx: &AppContext) -> WorktreeId;
384
385 /// Returns whether the file has been deleted.
386 fn is_deleted(&self) -> bool;
387
388 /// Returns whether the file existed on disk at one point
389 fn is_created(&self) -> bool {
390 self.mtime().is_some()
391 }
392
393 /// Converts this file into an [`Any`] trait object.
394 fn as_any(&self) -> &dyn Any;
395
396 /// Converts this file into a protobuf message.
397 fn to_proto(&self, cx: &AppContext) -> rpc::proto::File;
398
399 /// Return whether Zed considers this to be a private file.
400 fn is_private(&self) -> bool;
401}
402
403/// The file associated with a buffer, in the case where the file is on the local disk.
404pub trait LocalFile: File {
405 /// Returns the absolute path of this file
406 fn abs_path(&self, cx: &AppContext) -> PathBuf;
407
408 /// Loads the file's contents from disk.
409 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
410
411 /// Returns true if the file should not be shared with collaborators.
412 fn is_private(&self, _: &AppContext) -> bool {
413 false
414 }
415}
416
417/// The auto-indent behavior associated with an editing operation.
418/// For some editing operations, each affected line of text has its
419/// indentation recomputed. For other operations, the entire block
420/// of edited text is adjusted uniformly.
421#[derive(Clone, Debug)]
422pub enum AutoindentMode {
423 /// Indent each line of inserted text.
424 EachLine,
425 /// Apply the same indentation adjustment to all of the lines
426 /// in a given insertion.
427 Block {
428 /// The original indentation level of the first line of each
429 /// insertion, if it has been copied.
430 original_indent_columns: Vec<u32>,
431 },
432}
433
434#[derive(Clone)]
435struct AutoindentRequest {
436 before_edit: BufferSnapshot,
437 entries: Vec<AutoindentRequestEntry>,
438 is_block_mode: bool,
439}
440
441#[derive(Clone)]
442struct AutoindentRequestEntry {
443 /// A range of the buffer whose indentation should be adjusted.
444 range: Range<Anchor>,
445 /// Whether or not these lines should be considered brand new, for the
446 /// purpose of auto-indent. When text is not new, its indentation will
447 /// only be adjusted if the suggested indentation level has *changed*
448 /// since the edit was made.
449 first_line_is_new: bool,
450 indent_size: IndentSize,
451 original_indent_column: Option<u32>,
452}
453
454#[derive(Debug)]
455struct IndentSuggestion {
456 basis_row: u32,
457 delta: Ordering,
458 within_error: bool,
459}
460
461struct BufferChunkHighlights<'a> {
462 captures: SyntaxMapCaptures<'a>,
463 next_capture: Option<SyntaxMapCapture<'a>>,
464 stack: Vec<(usize, HighlightId)>,
465 highlight_maps: Vec<HighlightMap>,
466}
467
468/// An iterator that yields chunks of a buffer's text, along with their
469/// syntax highlights and diagnostic status.
470pub struct BufferChunks<'a> {
471 buffer_snapshot: Option<&'a BufferSnapshot>,
472 range: Range<usize>,
473 chunks: text::Chunks<'a>,
474 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
475 error_depth: usize,
476 warning_depth: usize,
477 information_depth: usize,
478 hint_depth: usize,
479 unnecessary_depth: usize,
480 highlights: Option<BufferChunkHighlights<'a>>,
481}
482
483/// A chunk of a buffer's text, along with its syntax highlight and
484/// diagnostic status.
485#[derive(Clone, Debug, Default)]
486pub struct Chunk<'a> {
487 /// The text of the chunk.
488 pub text: &'a str,
489 /// The syntax highlighting style of the chunk.
490 pub syntax_highlight_id: Option<HighlightId>,
491 /// The highlight style that has been applied to this chunk in
492 /// the editor.
493 pub highlight_style: Option<HighlightStyle>,
494 /// The severity of diagnostic associated with this chunk, if any.
495 pub diagnostic_severity: Option<DiagnosticSeverity>,
496 /// Whether this chunk of text is marked as unnecessary.
497 pub is_unnecessary: bool,
498 /// Whether this chunk of text was originally a tab character.
499 pub is_tab: bool,
500 /// An optional recipe for how the chunk should be presented.
501 pub renderer: Option<ChunkRenderer>,
502}
503
504/// A recipe for how the chunk should be presented.
505#[derive(Clone)]
506pub struct ChunkRenderer {
507 /// creates a custom element to represent this chunk.
508 pub render: Arc<dyn Send + Sync + Fn(&mut ChunkRendererContext) -> AnyElement>,
509 /// If true, the element is constrained to the shaped width of the text.
510 pub constrain_width: bool,
511}
512
513pub struct ChunkRendererContext<'a, 'b> {
514 pub context: &'a mut WindowContext<'b>,
515 pub max_width: Pixels,
516}
517
518impl fmt::Debug for ChunkRenderer {
519 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
520 f.debug_struct("ChunkRenderer")
521 .field("constrain_width", &self.constrain_width)
522 .finish()
523 }
524}
525
526impl<'a, 'b> Deref for ChunkRendererContext<'a, 'b> {
527 type Target = WindowContext<'b>;
528
529 fn deref(&self) -> &Self::Target {
530 self.context
531 }
532}
533
534impl<'a, 'b> DerefMut for ChunkRendererContext<'a, 'b> {
535 fn deref_mut(&mut self) -> &mut Self::Target {
536 self.context
537 }
538}
539
540/// A set of edits to a given version of a buffer, computed asynchronously.
541#[derive(Debug)]
542pub struct Diff {
543 pub(crate) base_version: clock::Global,
544 line_ending: LineEnding,
545 edits: Vec<(Range<usize>, Arc<str>)>,
546}
547
548#[derive(Clone, Copy)]
549pub(crate) struct DiagnosticEndpoint {
550 offset: usize,
551 is_start: bool,
552 severity: DiagnosticSeverity,
553 is_unnecessary: bool,
554}
555
556/// A class of characters, used for characterizing a run of text.
557#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
558pub enum CharKind {
559 /// Whitespace.
560 Whitespace,
561 /// Punctuation.
562 Punctuation,
563 /// Word.
564 Word,
565}
566
567/// A runnable is a set of data about a region that could be resolved into a task
568pub struct Runnable {
569 pub tags: SmallVec<[RunnableTag; 1]>,
570 pub language: Arc<Language>,
571 pub buffer: BufferId,
572}
573
574#[derive(Clone, Debug, PartialEq)]
575pub struct IndentGuide {
576 pub buffer_id: BufferId,
577 pub start_row: BufferRow,
578 pub end_row: BufferRow,
579 pub depth: u32,
580 pub tab_size: u32,
581 pub settings: IndentGuideSettings,
582}
583
584impl IndentGuide {
585 pub fn indent_level(&self) -> u32 {
586 self.depth * self.tab_size
587 }
588}
589
590impl Buffer {
591 /// Create a new buffer with the given base text.
592 pub fn local<T: Into<String>>(base_text: T, cx: &ModelContext<Self>) -> Self {
593 Self::build(
594 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
595 None,
596 None,
597 Capability::ReadWrite,
598 )
599 }
600
601 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
602 pub fn local_normalized(
603 base_text_normalized: Rope,
604 line_ending: LineEnding,
605 cx: &ModelContext<Self>,
606 ) -> Self {
607 Self::build(
608 TextBuffer::new_normalized(
609 0,
610 cx.entity_id().as_non_zero_u64().into(),
611 line_ending,
612 base_text_normalized,
613 ),
614 None,
615 None,
616 Capability::ReadWrite,
617 )
618 }
619
620 /// Create a new buffer that is a replica of a remote buffer.
621 pub fn remote(
622 remote_id: BufferId,
623 replica_id: ReplicaId,
624 capability: Capability,
625 base_text: impl Into<String>,
626 ) -> Self {
627 Self::build(
628 TextBuffer::new(replica_id, remote_id, base_text.into()),
629 None,
630 None,
631 capability,
632 )
633 }
634
635 /// Create a new buffer that is a replica of a remote buffer, populating its
636 /// state from the given protobuf message.
637 pub fn from_proto(
638 replica_id: ReplicaId,
639 capability: Capability,
640 message: proto::BufferState,
641 file: Option<Arc<dyn File>>,
642 ) -> Result<Self> {
643 let buffer_id = BufferId::new(message.id)
644 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
645 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
646 let mut this = Self::build(buffer, message.diff_base, file, capability);
647 this.text.set_line_ending(proto::deserialize_line_ending(
648 rpc::proto::LineEnding::from_i32(message.line_ending)
649 .ok_or_else(|| anyhow!("missing line_ending"))?,
650 ));
651 this.saved_version = proto::deserialize_version(&message.saved_version);
652 this.saved_mtime = message.saved_mtime.map(|time| time.into());
653 Ok(this)
654 }
655
656 /// Serialize the buffer's state to a protobuf message.
657 pub fn to_proto(&self, cx: &AppContext) -> proto::BufferState {
658 proto::BufferState {
659 id: self.remote_id().into(),
660 file: self.file.as_ref().map(|f| f.to_proto(cx)),
661 base_text: self.base_text().to_string(),
662 diff_base: self.diff_base().as_ref().map(|h| h.to_string()),
663 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
664 saved_version: proto::serialize_version(&self.saved_version),
665 saved_mtime: self.saved_mtime.map(|time| time.into()),
666 }
667 }
668
669 /// Serialize as protobufs all of the changes to the buffer since the given version.
670 pub fn serialize_ops(
671 &self,
672 since: Option<clock::Global>,
673 cx: &AppContext,
674 ) -> Task<Vec<proto::Operation>> {
675 let mut operations = Vec::new();
676 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
677
678 operations.extend(self.remote_selections.iter().map(|(_, set)| {
679 proto::serialize_operation(&Operation::UpdateSelections {
680 selections: set.selections.clone(),
681 lamport_timestamp: set.lamport_timestamp,
682 line_mode: set.line_mode,
683 cursor_shape: set.cursor_shape,
684 })
685 }));
686
687 for (server_id, diagnostics) in &self.diagnostics {
688 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
689 lamport_timestamp: self.diagnostics_timestamp,
690 server_id: *server_id,
691 diagnostics: diagnostics.iter().cloned().collect(),
692 }));
693 }
694
695 operations.push(proto::serialize_operation(
696 &Operation::UpdateCompletionTriggers {
697 triggers: self.completion_triggers.clone(),
698 lamport_timestamp: self.completion_triggers_timestamp,
699 },
700 ));
701
702 let text_operations = self.text.operations().clone();
703 cx.background_executor().spawn(async move {
704 let since = since.unwrap_or_default();
705 operations.extend(
706 text_operations
707 .iter()
708 .filter(|(_, op)| !since.observed(op.timestamp()))
709 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
710 );
711 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
712 operations
713 })
714 }
715
716 /// Assign a language to the buffer, returning the buffer.
717 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
718 self.set_language(Some(language), cx);
719 self
720 }
721
722 /// Returns the [`Capability`] of this buffer.
723 pub fn capability(&self) -> Capability {
724 self.capability
725 }
726
727 /// Whether this buffer can only be read.
728 pub fn read_only(&self) -> bool {
729 self.capability == Capability::ReadOnly
730 }
731
732 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
733 pub fn build(
734 buffer: TextBuffer,
735 diff_base: Option<String>,
736 file: Option<Arc<dyn File>>,
737 capability: Capability,
738 ) -> Self {
739 let saved_mtime = file.as_ref().and_then(|file| file.mtime());
740 let snapshot = buffer.snapshot();
741 let git_diff = git::diff::BufferDiff::new(&snapshot);
742 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
743 Self {
744 saved_mtime,
745 saved_version: buffer.version(),
746 preview_version: buffer.version(),
747 reload_task: None,
748 transaction_depth: 0,
749 was_dirty_before_starting_transaction: None,
750 has_unsaved_edits: Cell::new((buffer.version(), false)),
751 text: buffer,
752 diff_base: diff_base.map(|mut raw_diff_base| {
753 LineEnding::normalize(&mut raw_diff_base);
754 BufferDiffBase::Git(Rope::from(raw_diff_base))
755 }),
756 diff_base_version: 0,
757 git_diff,
758 file,
759 capability,
760 syntax_map,
761 parsing_in_background: false,
762 non_text_state_update_count: 0,
763 sync_parse_timeout: Duration::from_millis(1),
764 parse_status: async_watch::channel(ParseStatus::Idle),
765 autoindent_requests: Default::default(),
766 pending_autoindent: Default::default(),
767 language: None,
768 remote_selections: Default::default(),
769 diagnostics: Default::default(),
770 diagnostics_timestamp: Default::default(),
771 completion_triggers: Default::default(),
772 completion_triggers_timestamp: Default::default(),
773 deferred_ops: OperationQueue::new(),
774 has_conflict: false,
775 _subscriptions: Vec::new(),
776 }
777 }
778
779 /// Retrieve a snapshot of the buffer's current state. This is computationally
780 /// cheap, and allows reading from the buffer on a background thread.
781 pub fn snapshot(&self) -> BufferSnapshot {
782 let text = self.text.snapshot();
783 let mut syntax_map = self.syntax_map.lock();
784 syntax_map.interpolate(&text);
785 let syntax = syntax_map.snapshot();
786
787 BufferSnapshot {
788 text,
789 syntax,
790 git_diff: self.git_diff.clone(),
791 file: self.file.clone(),
792 remote_selections: self.remote_selections.clone(),
793 diagnostics: self.diagnostics.clone(),
794 language: self.language.clone(),
795 non_text_state_update_count: self.non_text_state_update_count,
796 }
797 }
798
799 pub fn branch(&mut self, cx: &mut ModelContext<Self>) -> Model<Self> {
800 let this = cx.handle();
801 cx.new_model(|cx| {
802 let mut branch = Self {
803 diff_base: Some(BufferDiffBase::PastBufferVersion {
804 buffer: this.clone(),
805 rope: self.as_rope().clone(),
806 merged_operations: Default::default(),
807 }),
808 language: self.language.clone(),
809 has_conflict: self.has_conflict,
810 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
811 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
812 ..Self::build(
813 self.text.branch(),
814 None,
815 self.file.clone(),
816 self.capability(),
817 )
818 };
819 if let Some(language_registry) = self.language_registry() {
820 branch.set_language_registry(language_registry);
821 }
822
823 // Reparse the branch buffer so that we get syntax highlighting immediately.
824 branch.reparse(cx);
825
826 branch
827 })
828 }
829
830 /// Applies all of the changes in this buffer that intersect any of the
831 /// given `ranges` to its base buffer.
832 ///
833 /// If `ranges` is empty, then all changes will be applied. This buffer must
834 /// be a branch buffer to call this method.
835 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut ModelContext<Self>) {
836 let Some(base_buffer) = self.diff_base_buffer() else {
837 debug_panic!("not a branch buffer");
838 return;
839 };
840
841 let mut ranges = if ranges.is_empty() {
842 &[0..usize::MAX]
843 } else {
844 ranges.as_slice()
845 }
846 .into_iter()
847 .peekable();
848
849 let mut edits = Vec::new();
850 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
851 let mut is_included = false;
852 while let Some(range) = ranges.peek() {
853 if range.end < edit.new.start {
854 ranges.next().unwrap();
855 } else {
856 if range.start <= edit.new.end {
857 is_included = true;
858 }
859 break;
860 }
861 }
862
863 if is_included {
864 edits.push((
865 edit.old.clone(),
866 self.text_for_range(edit.new.clone()).collect::<String>(),
867 ));
868 }
869 }
870
871 let operation = base_buffer.update(cx, |base_buffer, cx| {
872 cx.emit(BufferEvent::DiffBaseChanged);
873 base_buffer.edit(edits, None, cx)
874 });
875
876 if let Some(operation) = operation {
877 if let Some(BufferDiffBase::PastBufferVersion {
878 merged_operations, ..
879 }) = &mut self.diff_base
880 {
881 merged_operations.push(operation);
882 }
883 }
884 }
885
886 fn on_base_buffer_event(
887 &mut self,
888 _: Model<Buffer>,
889 event: &BufferEvent,
890 cx: &mut ModelContext<Self>,
891 ) {
892 let BufferEvent::Operation { operation, .. } = event else {
893 return;
894 };
895 let Some(BufferDiffBase::PastBufferVersion {
896 merged_operations, ..
897 }) = &mut self.diff_base
898 else {
899 return;
900 };
901
902 let mut operation_to_undo = None;
903 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
904 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
905 merged_operations.remove(ix);
906 operation_to_undo = Some(operation.timestamp);
907 }
908 }
909
910 self.apply_ops([operation.clone()], cx);
911
912 if let Some(timestamp) = operation_to_undo {
913 let operation = self
914 .text
915 .undo_operations([(timestamp, u32::MAX)].into_iter().collect());
916 self.send_operation(Operation::Buffer(operation), true, cx);
917 }
918
919 self.diff_base_version += 1;
920 }
921
922 #[cfg(test)]
923 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
924 &self.text
925 }
926
927 /// Retrieve a snapshot of the buffer's raw text, without any
928 /// language-related state like the syntax tree or diagnostics.
929 pub fn text_snapshot(&self) -> text::BufferSnapshot {
930 self.text.snapshot()
931 }
932
933 /// The file associated with the buffer, if any.
934 pub fn file(&self) -> Option<&Arc<dyn File>> {
935 self.file.as_ref()
936 }
937
938 /// The version of the buffer that was last saved or reloaded from disk.
939 pub fn saved_version(&self) -> &clock::Global {
940 &self.saved_version
941 }
942
943 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
944 pub fn saved_mtime(&self) -> Option<SystemTime> {
945 self.saved_mtime
946 }
947
948 /// Assign a language to the buffer.
949 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
950 self.non_text_state_update_count += 1;
951 self.syntax_map.lock().clear(&self.text);
952 self.language = language;
953 self.reparse(cx);
954 cx.emit(BufferEvent::LanguageChanged);
955 }
956
957 /// Assign a language registry to the buffer. This allows the buffer to retrieve
958 /// other languages if parts of the buffer are written in different languages.
959 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
960 self.syntax_map
961 .lock()
962 .set_language_registry(language_registry);
963 }
964
965 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
966 self.syntax_map.lock().language_registry()
967 }
968
969 /// Assign the buffer a new [`Capability`].
970 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
971 self.capability = capability;
972 cx.emit(BufferEvent::CapabilityChanged)
973 }
974
975 /// This method is called to signal that the buffer has been saved.
976 pub fn did_save(
977 &mut self,
978 version: clock::Global,
979 mtime: Option<SystemTime>,
980 cx: &mut ModelContext<Self>,
981 ) {
982 self.saved_version = version;
983 self.has_unsaved_edits
984 .set((self.saved_version().clone(), false));
985 self.has_conflict = false;
986 self.saved_mtime = mtime;
987 cx.emit(BufferEvent::Saved);
988 cx.notify();
989 }
990
991 /// This method is called to signal that the buffer has been discarded.
992 pub fn discarded(&self, cx: &mut ModelContext<Self>) {
993 cx.emit(BufferEvent::Discarded);
994 cx.notify();
995 }
996
997 /// Reloads the contents of the buffer from disk.
998 pub fn reload(&mut self, cx: &ModelContext<Self>) -> oneshot::Receiver<Option<Transaction>> {
999 let (tx, rx) = futures::channel::oneshot::channel();
1000 let prev_version = self.text.version();
1001 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
1002 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
1003 let file = this.file.as_ref()?.as_local()?;
1004 Some((file.mtime(), file.load(cx)))
1005 })?
1006 else {
1007 return Ok(());
1008 };
1009
1010 let new_text = new_text.await?;
1011 let diff = this
1012 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
1013 .await;
1014 this.update(&mut cx, |this, cx| {
1015 if this.version() == diff.base_version {
1016 this.finalize_last_transaction();
1017 this.apply_diff(diff, cx);
1018 tx.send(this.finalize_last_transaction().cloned()).ok();
1019 this.has_conflict = false;
1020 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1021 } else {
1022 if !diff.edits.is_empty()
1023 || this
1024 .edits_since::<usize>(&diff.base_version)
1025 .next()
1026 .is_some()
1027 {
1028 this.has_conflict = true;
1029 }
1030
1031 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1032 }
1033
1034 this.reload_task.take();
1035 })
1036 }));
1037 rx
1038 }
1039
1040 /// This method is called to signal that the buffer has been reloaded.
1041 pub fn did_reload(
1042 &mut self,
1043 version: clock::Global,
1044 line_ending: LineEnding,
1045 mtime: Option<SystemTime>,
1046 cx: &mut ModelContext<Self>,
1047 ) {
1048 self.saved_version = version;
1049 self.has_unsaved_edits
1050 .set((self.saved_version.clone(), false));
1051 self.text.set_line_ending(line_ending);
1052 self.saved_mtime = mtime;
1053 cx.emit(BufferEvent::Reloaded);
1054 cx.notify();
1055 }
1056
1057 /// Updates the [`File`] backing this buffer. This should be called when
1058 /// the file has changed or has been deleted.
1059 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
1060 let mut file_changed = false;
1061
1062 if let Some(old_file) = self.file.as_ref() {
1063 if new_file.path() != old_file.path() {
1064 file_changed = true;
1065 }
1066
1067 if new_file.is_deleted() {
1068 if !old_file.is_deleted() {
1069 file_changed = true;
1070 if !self.is_dirty() {
1071 cx.emit(BufferEvent::DirtyChanged);
1072 }
1073 }
1074 } else {
1075 let new_mtime = new_file.mtime();
1076 if new_mtime != old_file.mtime() {
1077 file_changed = true;
1078
1079 if !self.is_dirty() {
1080 self.reload(cx).close();
1081 }
1082 }
1083 }
1084 } else {
1085 file_changed = true;
1086 };
1087
1088 self.file = Some(new_file);
1089 if file_changed {
1090 self.non_text_state_update_count += 1;
1091 cx.emit(BufferEvent::FileHandleChanged);
1092 cx.notify();
1093 }
1094 }
1095
1096 /// Returns the current diff base, see [`Buffer::set_diff_base`].
1097 pub fn diff_base(&self) -> Option<&Rope> {
1098 match self.diff_base.as_ref()? {
1099 BufferDiffBase::Git(rope) | BufferDiffBase::PastBufferVersion { rope, .. } => {
1100 Some(rope)
1101 }
1102 }
1103 }
1104
1105 /// Sets the text that will be used to compute a Git diff
1106 /// against the buffer text.
1107 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &ModelContext<Self>) {
1108 self.diff_base = diff_base.map(|mut raw_diff_base| {
1109 LineEnding::normalize(&mut raw_diff_base);
1110 BufferDiffBase::Git(Rope::from(raw_diff_base))
1111 });
1112 self.diff_base_version += 1;
1113 if let Some(recalc_task) = self.recalculate_diff(cx) {
1114 cx.spawn(|buffer, mut cx| async move {
1115 recalc_task.await;
1116 buffer
1117 .update(&mut cx, |_, cx| {
1118 cx.emit(BufferEvent::DiffBaseChanged);
1119 })
1120 .ok();
1121 })
1122 .detach();
1123 }
1124 }
1125
1126 /// Returns a number, unique per diff base set to the buffer.
1127 pub fn diff_base_version(&self) -> usize {
1128 self.diff_base_version
1129 }
1130
1131 pub fn diff_base_buffer(&self) -> Option<Model<Self>> {
1132 match self.diff_base.as_ref()? {
1133 BufferDiffBase::Git(_) => None,
1134 BufferDiffBase::PastBufferVersion { buffer, .. } => Some(buffer.clone()),
1135 }
1136 }
1137
1138 /// Recomputes the diff.
1139 pub fn recalculate_diff(&self, cx: &ModelContext<Self>) -> Option<Task<()>> {
1140 let diff_base_rope = match self.diff_base.as_ref()? {
1141 BufferDiffBase::Git(rope) => rope.clone(),
1142 BufferDiffBase::PastBufferVersion { buffer, .. } => buffer.read(cx).as_rope().clone(),
1143 };
1144
1145 let snapshot = self.snapshot();
1146 let mut diff = self.git_diff.clone();
1147 let diff = cx.background_executor().spawn(async move {
1148 diff.update(&diff_base_rope, &snapshot).await;
1149 (diff, diff_base_rope)
1150 });
1151
1152 Some(cx.spawn(|this, mut cx| async move {
1153 let (buffer_diff, diff_base_rope) = diff.await;
1154 this.update(&mut cx, |this, cx| {
1155 this.git_diff = buffer_diff;
1156 this.non_text_state_update_count += 1;
1157 if let Some(BufferDiffBase::PastBufferVersion { rope, .. }) = &mut this.diff_base {
1158 *rope = diff_base_rope;
1159 }
1160 cx.emit(BufferEvent::DiffUpdated);
1161 })
1162 .ok();
1163 }))
1164 }
1165
1166 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1167 pub fn language(&self) -> Option<&Arc<Language>> {
1168 self.language.as_ref()
1169 }
1170
1171 /// Returns the [`Language`] at the given location.
1172 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1173 let offset = position.to_offset(self);
1174 self.syntax_map
1175 .lock()
1176 .layers_for_range(offset..offset, &self.text, false)
1177 .last()
1178 .map(|info| info.language.clone())
1179 .or_else(|| self.language.clone())
1180 }
1181
1182 /// An integer version number that accounts for all updates besides
1183 /// the buffer's text itself (which is versioned via a version vector).
1184 pub fn non_text_state_update_count(&self) -> usize {
1185 self.non_text_state_update_count
1186 }
1187
1188 /// Whether the buffer is being parsed in the background.
1189 #[cfg(any(test, feature = "test-support"))]
1190 pub fn is_parsing(&self) -> bool {
1191 self.parsing_in_background
1192 }
1193
1194 /// Indicates whether the buffer contains any regions that may be
1195 /// written in a language that hasn't been loaded yet.
1196 pub fn contains_unknown_injections(&self) -> bool {
1197 self.syntax_map.lock().contains_unknown_injections()
1198 }
1199
1200 #[cfg(test)]
1201 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1202 self.sync_parse_timeout = timeout;
1203 }
1204
1205 /// Called after an edit to synchronize the buffer's main parse tree with
1206 /// the buffer's new underlying state.
1207 ///
1208 /// Locks the syntax map and interpolates the edits since the last reparse
1209 /// into the foreground syntax tree.
1210 ///
1211 /// Then takes a stable snapshot of the syntax map before unlocking it.
1212 /// The snapshot with the interpolated edits is sent to a background thread,
1213 /// where we ask Tree-sitter to perform an incremental parse.
1214 ///
1215 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1216 /// waiting on the parse to complete. As soon as it completes, we proceed
1217 /// synchronously, unless a 1ms timeout elapses.
1218 ///
1219 /// If we time out waiting on the parse, we spawn a second task waiting
1220 /// until the parse does complete and return with the interpolated tree still
1221 /// in the foreground. When the background parse completes, call back into
1222 /// the main thread and assign the foreground parse state.
1223 ///
1224 /// If the buffer or grammar changed since the start of the background parse,
1225 /// initiate an additional reparse recursively. To avoid concurrent parses
1226 /// for the same buffer, we only initiate a new parse if we are not already
1227 /// parsing in the background.
1228 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1229 if self.parsing_in_background {
1230 return;
1231 }
1232 let language = if let Some(language) = self.language.clone() {
1233 language
1234 } else {
1235 return;
1236 };
1237
1238 let text = self.text_snapshot();
1239 let parsed_version = self.version();
1240
1241 let mut syntax_map = self.syntax_map.lock();
1242 syntax_map.interpolate(&text);
1243 let language_registry = syntax_map.language_registry();
1244 let mut syntax_snapshot = syntax_map.snapshot();
1245 drop(syntax_map);
1246
1247 let parse_task = cx.background_executor().spawn({
1248 let language = language.clone();
1249 let language_registry = language_registry.clone();
1250 async move {
1251 syntax_snapshot.reparse(&text, language_registry, language);
1252 syntax_snapshot
1253 }
1254 });
1255
1256 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1257 match cx
1258 .background_executor()
1259 .block_with_timeout(self.sync_parse_timeout, parse_task)
1260 {
1261 Ok(new_syntax_snapshot) => {
1262 self.did_finish_parsing(new_syntax_snapshot, cx);
1263 }
1264 Err(parse_task) => {
1265 self.parsing_in_background = true;
1266 cx.spawn(move |this, mut cx| async move {
1267 let new_syntax_map = parse_task.await;
1268 this.update(&mut cx, move |this, cx| {
1269 let grammar_changed =
1270 this.language.as_ref().map_or(true, |current_language| {
1271 !Arc::ptr_eq(&language, current_language)
1272 });
1273 let language_registry_changed = new_syntax_map
1274 .contains_unknown_injections()
1275 && language_registry.map_or(false, |registry| {
1276 registry.version() != new_syntax_map.language_registry_version()
1277 });
1278 let parse_again = language_registry_changed
1279 || grammar_changed
1280 || this.version.changed_since(&parsed_version);
1281 this.did_finish_parsing(new_syntax_map, cx);
1282 this.parsing_in_background = false;
1283 if parse_again {
1284 this.reparse(cx);
1285 }
1286 })
1287 .ok();
1288 })
1289 .detach();
1290 }
1291 }
1292 }
1293
1294 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
1295 self.non_text_state_update_count += 1;
1296 self.syntax_map.lock().did_parse(syntax_snapshot);
1297 self.request_autoindent(cx);
1298 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1299 cx.emit(BufferEvent::Reparsed);
1300 cx.notify();
1301 }
1302
1303 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1304 self.parse_status.1.clone()
1305 }
1306
1307 /// Assign to the buffer a set of diagnostics created by a given language server.
1308 pub fn update_diagnostics(
1309 &mut self,
1310 server_id: LanguageServerId,
1311 diagnostics: DiagnosticSet,
1312 cx: &mut ModelContext<Self>,
1313 ) {
1314 let lamport_timestamp = self.text.lamport_clock.tick();
1315 let op = Operation::UpdateDiagnostics {
1316 server_id,
1317 diagnostics: diagnostics.iter().cloned().collect(),
1318 lamport_timestamp,
1319 };
1320 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1321 self.send_operation(op, true, cx);
1322 }
1323
1324 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1325 if let Some(indent_sizes) = self.compute_autoindents() {
1326 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1327 match cx
1328 .background_executor()
1329 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1330 {
1331 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1332 Err(indent_sizes) => {
1333 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1334 let indent_sizes = indent_sizes.await;
1335 this.update(&mut cx, |this, cx| {
1336 this.apply_autoindents(indent_sizes, cx);
1337 })
1338 .ok();
1339 }));
1340 }
1341 }
1342 } else {
1343 self.autoindent_requests.clear();
1344 }
1345 }
1346
1347 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1348 let max_rows_between_yields = 100;
1349 let snapshot = self.snapshot();
1350 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1351 return None;
1352 }
1353
1354 let autoindent_requests = self.autoindent_requests.clone();
1355 Some(async move {
1356 let mut indent_sizes = BTreeMap::new();
1357 for request in autoindent_requests {
1358 // Resolve each edited range to its row in the current buffer and in the
1359 // buffer before this batch of edits.
1360 let mut row_ranges = Vec::new();
1361 let mut old_to_new_rows = BTreeMap::new();
1362 let mut language_indent_sizes_by_new_row = Vec::new();
1363 for entry in &request.entries {
1364 let position = entry.range.start;
1365 let new_row = position.to_point(&snapshot).row;
1366 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1367 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1368
1369 if !entry.first_line_is_new {
1370 let old_row = position.to_point(&request.before_edit).row;
1371 old_to_new_rows.insert(old_row, new_row);
1372 }
1373 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1374 }
1375
1376 // Build a map containing the suggested indentation for each of the edited lines
1377 // with respect to the state of the buffer before these edits. This map is keyed
1378 // by the rows for these lines in the current state of the buffer.
1379 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1380 let old_edited_ranges =
1381 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1382 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1383 let mut language_indent_size = IndentSize::default();
1384 for old_edited_range in old_edited_ranges {
1385 let suggestions = request
1386 .before_edit
1387 .suggest_autoindents(old_edited_range.clone())
1388 .into_iter()
1389 .flatten();
1390 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1391 if let Some(suggestion) = suggestion {
1392 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1393
1394 // Find the indent size based on the language for this row.
1395 while let Some((row, size)) = language_indent_sizes.peek() {
1396 if *row > new_row {
1397 break;
1398 }
1399 language_indent_size = *size;
1400 language_indent_sizes.next();
1401 }
1402
1403 let suggested_indent = old_to_new_rows
1404 .get(&suggestion.basis_row)
1405 .and_then(|from_row| {
1406 Some(old_suggestions.get(from_row).copied()?.0)
1407 })
1408 .unwrap_or_else(|| {
1409 request
1410 .before_edit
1411 .indent_size_for_line(suggestion.basis_row)
1412 })
1413 .with_delta(suggestion.delta, language_indent_size);
1414 old_suggestions
1415 .insert(new_row, (suggested_indent, suggestion.within_error));
1416 }
1417 }
1418 yield_now().await;
1419 }
1420
1421 // In block mode, only compute indentation suggestions for the first line
1422 // of each insertion. Otherwise, compute suggestions for every inserted line.
1423 let new_edited_row_ranges = contiguous_ranges(
1424 row_ranges.iter().flat_map(|(range, _)| {
1425 if request.is_block_mode {
1426 range.start..range.start + 1
1427 } else {
1428 range.clone()
1429 }
1430 }),
1431 max_rows_between_yields,
1432 );
1433
1434 // Compute new suggestions for each line, but only include them in the result
1435 // if they differ from the old suggestion for that line.
1436 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1437 let mut language_indent_size = IndentSize::default();
1438 for new_edited_row_range in new_edited_row_ranges {
1439 let suggestions = snapshot
1440 .suggest_autoindents(new_edited_row_range.clone())
1441 .into_iter()
1442 .flatten();
1443 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1444 if let Some(suggestion) = suggestion {
1445 // Find the indent size based on the language for this row.
1446 while let Some((row, size)) = language_indent_sizes.peek() {
1447 if *row > new_row {
1448 break;
1449 }
1450 language_indent_size = *size;
1451 language_indent_sizes.next();
1452 }
1453
1454 let suggested_indent = indent_sizes
1455 .get(&suggestion.basis_row)
1456 .copied()
1457 .unwrap_or_else(|| {
1458 snapshot.indent_size_for_line(suggestion.basis_row)
1459 })
1460 .with_delta(suggestion.delta, language_indent_size);
1461 if old_suggestions.get(&new_row).map_or(
1462 true,
1463 |(old_indentation, was_within_error)| {
1464 suggested_indent != *old_indentation
1465 && (!suggestion.within_error || *was_within_error)
1466 },
1467 ) {
1468 indent_sizes.insert(new_row, suggested_indent);
1469 }
1470 }
1471 }
1472 yield_now().await;
1473 }
1474
1475 // For each block of inserted text, adjust the indentation of the remaining
1476 // lines of the block by the same amount as the first line was adjusted.
1477 if request.is_block_mode {
1478 for (row_range, original_indent_column) in
1479 row_ranges
1480 .into_iter()
1481 .filter_map(|(range, original_indent_column)| {
1482 if range.len() > 1 {
1483 Some((range, original_indent_column?))
1484 } else {
1485 None
1486 }
1487 })
1488 {
1489 let new_indent = indent_sizes
1490 .get(&row_range.start)
1491 .copied()
1492 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1493 let delta = new_indent.len as i64 - original_indent_column as i64;
1494 if delta != 0 {
1495 for row in row_range.skip(1) {
1496 indent_sizes.entry(row).or_insert_with(|| {
1497 let mut size = snapshot.indent_size_for_line(row);
1498 if size.kind == new_indent.kind {
1499 match delta.cmp(&0) {
1500 Ordering::Greater => size.len += delta as u32,
1501 Ordering::Less => {
1502 size.len = size.len.saturating_sub(-delta as u32)
1503 }
1504 Ordering::Equal => {}
1505 }
1506 }
1507 size
1508 });
1509 }
1510 }
1511 }
1512 }
1513 }
1514
1515 indent_sizes
1516 })
1517 }
1518
1519 fn apply_autoindents(
1520 &mut self,
1521 indent_sizes: BTreeMap<u32, IndentSize>,
1522 cx: &mut ModelContext<Self>,
1523 ) {
1524 self.autoindent_requests.clear();
1525
1526 let edits: Vec<_> = indent_sizes
1527 .into_iter()
1528 .filter_map(|(row, indent_size)| {
1529 let current_size = indent_size_for_line(self, row);
1530 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1531 })
1532 .collect();
1533
1534 let preserve_preview = self.preserve_preview();
1535 self.edit(edits, None, cx);
1536 if preserve_preview {
1537 self.refresh_preview();
1538 }
1539 }
1540
1541 /// Create a minimal edit that will cause the given row to be indented
1542 /// with the given size. After applying this edit, the length of the line
1543 /// will always be at least `new_size.len`.
1544 pub fn edit_for_indent_size_adjustment(
1545 row: u32,
1546 current_size: IndentSize,
1547 new_size: IndentSize,
1548 ) -> Option<(Range<Point>, String)> {
1549 if new_size.kind == current_size.kind {
1550 match new_size.len.cmp(¤t_size.len) {
1551 Ordering::Greater => {
1552 let point = Point::new(row, 0);
1553 Some((
1554 point..point,
1555 iter::repeat(new_size.char())
1556 .take((new_size.len - current_size.len) as usize)
1557 .collect::<String>(),
1558 ))
1559 }
1560
1561 Ordering::Less => Some((
1562 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1563 String::new(),
1564 )),
1565
1566 Ordering::Equal => None,
1567 }
1568 } else {
1569 Some((
1570 Point::new(row, 0)..Point::new(row, current_size.len),
1571 iter::repeat(new_size.char())
1572 .take(new_size.len as usize)
1573 .collect::<String>(),
1574 ))
1575 }
1576 }
1577
1578 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1579 /// and the given new text.
1580 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1581 let old_text = self.as_rope().clone();
1582 let base_version = self.version();
1583 cx.background_executor()
1584 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1585 let old_text = old_text.to_string();
1586 let line_ending = LineEnding::detect(&new_text);
1587 LineEnding::normalize(&mut new_text);
1588
1589 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1590 let empty: Arc<str> = Arc::default();
1591
1592 let mut edits = Vec::new();
1593 let mut old_offset = 0;
1594 let mut new_offset = 0;
1595 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1596 for change in diff.iter_all_changes().map(Some).chain([None]) {
1597 if let Some(change) = &change {
1598 let len = change.value().len();
1599 match change.tag() {
1600 ChangeTag::Equal => {
1601 old_offset += len;
1602 new_offset += len;
1603 }
1604 ChangeTag::Delete => {
1605 let old_end_offset = old_offset + len;
1606 if let Some((last_old_range, _)) = &mut last_edit {
1607 last_old_range.end = old_end_offset;
1608 } else {
1609 last_edit =
1610 Some((old_offset..old_end_offset, new_offset..new_offset));
1611 }
1612 old_offset = old_end_offset;
1613 }
1614 ChangeTag::Insert => {
1615 let new_end_offset = new_offset + len;
1616 if let Some((_, last_new_range)) = &mut last_edit {
1617 last_new_range.end = new_end_offset;
1618 } else {
1619 last_edit =
1620 Some((old_offset..old_offset, new_offset..new_end_offset));
1621 }
1622 new_offset = new_end_offset;
1623 }
1624 }
1625 }
1626
1627 if let Some((old_range, new_range)) = &last_edit {
1628 if old_offset > old_range.end
1629 || new_offset > new_range.end
1630 || change.is_none()
1631 {
1632 let text = if new_range.is_empty() {
1633 empty.clone()
1634 } else {
1635 new_text[new_range.clone()].into()
1636 };
1637 edits.push((old_range.clone(), text));
1638 last_edit.take();
1639 }
1640 }
1641 }
1642
1643 Diff {
1644 base_version,
1645 line_ending,
1646 edits,
1647 }
1648 })
1649 }
1650
1651 /// Spawns a background task that searches the buffer for any whitespace
1652 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1653 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1654 let old_text = self.as_rope().clone();
1655 let line_ending = self.line_ending();
1656 let base_version = self.version();
1657 cx.background_executor().spawn(async move {
1658 let ranges = trailing_whitespace_ranges(&old_text);
1659 let empty = Arc::<str>::from("");
1660 Diff {
1661 base_version,
1662 line_ending,
1663 edits: ranges
1664 .into_iter()
1665 .map(|range| (range, empty.clone()))
1666 .collect(),
1667 }
1668 })
1669 }
1670
1671 /// Ensures that the buffer ends with a single newline character, and
1672 /// no other whitespace.
1673 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1674 let len = self.len();
1675 let mut offset = len;
1676 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1677 let non_whitespace_len = chunk
1678 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1679 .len();
1680 offset -= chunk.len();
1681 offset += non_whitespace_len;
1682 if non_whitespace_len != 0 {
1683 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1684 return;
1685 }
1686 break;
1687 }
1688 }
1689 self.edit([(offset..len, "\n")], None, cx);
1690 }
1691
1692 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1693 /// calculated, then adjust the diff to account for those changes, and discard any
1694 /// parts of the diff that conflict with those changes.
1695 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1696 // Check for any edits to the buffer that have occurred since this diff
1697 // was computed.
1698 let snapshot = self.snapshot();
1699 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1700 let mut delta = 0;
1701 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1702 while let Some(edit_since) = edits_since.peek() {
1703 // If the edit occurs after a diff hunk, then it does not
1704 // affect that hunk.
1705 if edit_since.old.start > range.end {
1706 break;
1707 }
1708 // If the edit precedes the diff hunk, then adjust the hunk
1709 // to reflect the edit.
1710 else if edit_since.old.end < range.start {
1711 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1712 edits_since.next();
1713 }
1714 // If the edit intersects a diff hunk, then discard that hunk.
1715 else {
1716 return None;
1717 }
1718 }
1719
1720 let start = (range.start as i64 + delta) as usize;
1721 let end = (range.end as i64 + delta) as usize;
1722 Some((start..end, new_text))
1723 });
1724
1725 self.start_transaction();
1726 self.text.set_line_ending(diff.line_ending);
1727 self.edit(adjusted_edits, None, cx);
1728 self.end_transaction(cx)
1729 }
1730
1731 fn has_unsaved_edits(&self) -> bool {
1732 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1733
1734 if last_version == self.version {
1735 self.has_unsaved_edits
1736 .set((last_version, has_unsaved_edits));
1737 return has_unsaved_edits;
1738 }
1739
1740 let has_edits = self.has_edits_since(&self.saved_version);
1741 self.has_unsaved_edits
1742 .set((self.version.clone(), has_edits));
1743 has_edits
1744 }
1745
1746 /// Checks if the buffer has unsaved changes.
1747 pub fn is_dirty(&self) -> bool {
1748 self.capability != Capability::ReadOnly
1749 && (self.has_conflict
1750 || self.has_unsaved_edits()
1751 || self
1752 .file
1753 .as_ref()
1754 .map_or(false, |file| file.is_deleted() || !file.is_created()))
1755 }
1756
1757 /// Checks if the buffer and its file have both changed since the buffer
1758 /// was last saved or reloaded.
1759 pub fn has_conflict(&self) -> bool {
1760 self.has_conflict
1761 || self.file.as_ref().map_or(false, |file| {
1762 file.mtime() > self.saved_mtime && self.has_unsaved_edits()
1763 })
1764 }
1765
1766 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1767 pub fn subscribe(&mut self) -> Subscription {
1768 self.text.subscribe()
1769 }
1770
1771 /// Starts a transaction, if one is not already in-progress. When undoing or
1772 /// redoing edits, all of the edits performed within a transaction are undone
1773 /// or redone together.
1774 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1775 self.start_transaction_at(Instant::now())
1776 }
1777
1778 /// Starts a transaction, providing the current time. Subsequent transactions
1779 /// that occur within a short period of time will be grouped together. This
1780 /// is controlled by the buffer's undo grouping duration.
1781 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1782 self.transaction_depth += 1;
1783 if self.was_dirty_before_starting_transaction.is_none() {
1784 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1785 }
1786 self.text.start_transaction_at(now)
1787 }
1788
1789 /// Terminates the current transaction, if this is the outermost transaction.
1790 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1791 self.end_transaction_at(Instant::now(), cx)
1792 }
1793
1794 /// Terminates the current transaction, providing the current time. Subsequent transactions
1795 /// that occur within a short period of time will be grouped together. This
1796 /// is controlled by the buffer's undo grouping duration.
1797 pub fn end_transaction_at(
1798 &mut self,
1799 now: Instant,
1800 cx: &mut ModelContext<Self>,
1801 ) -> Option<TransactionId> {
1802 assert!(self.transaction_depth > 0);
1803 self.transaction_depth -= 1;
1804 let was_dirty = if self.transaction_depth == 0 {
1805 self.was_dirty_before_starting_transaction.take().unwrap()
1806 } else {
1807 false
1808 };
1809 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1810 self.did_edit(&start_version, was_dirty, cx);
1811 Some(transaction_id)
1812 } else {
1813 None
1814 }
1815 }
1816
1817 /// Manually add a transaction to the buffer's undo history.
1818 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1819 self.text.push_transaction(transaction, now);
1820 }
1821
1822 /// Prevent the last transaction from being grouped with any subsequent transactions,
1823 /// even if they occur with the buffer's undo grouping duration.
1824 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1825 self.text.finalize_last_transaction()
1826 }
1827
1828 /// Manually group all changes since a given transaction.
1829 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1830 self.text.group_until_transaction(transaction_id);
1831 }
1832
1833 /// Manually remove a transaction from the buffer's undo history
1834 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1835 self.text.forget_transaction(transaction_id);
1836 }
1837
1838 /// Manually merge two adjacent transactions in the buffer's undo history.
1839 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1840 self.text.merge_transactions(transaction, destination);
1841 }
1842
1843 /// Waits for the buffer to receive operations with the given timestamps.
1844 pub fn wait_for_edits(
1845 &mut self,
1846 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1847 ) -> impl Future<Output = Result<()>> {
1848 self.text.wait_for_edits(edit_ids)
1849 }
1850
1851 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1852 pub fn wait_for_anchors(
1853 &mut self,
1854 anchors: impl IntoIterator<Item = Anchor>,
1855 ) -> impl 'static + Future<Output = Result<()>> {
1856 self.text.wait_for_anchors(anchors)
1857 }
1858
1859 /// Waits for the buffer to receive operations up to the given version.
1860 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1861 self.text.wait_for_version(version)
1862 }
1863
1864 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1865 /// [`Buffer::wait_for_version`] to resolve with an error.
1866 pub fn give_up_waiting(&mut self) {
1867 self.text.give_up_waiting();
1868 }
1869
1870 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
1871 pub fn set_active_selections(
1872 &mut self,
1873 selections: Arc<[Selection<Anchor>]>,
1874 line_mode: bool,
1875 cursor_shape: CursorShape,
1876 cx: &mut ModelContext<Self>,
1877 ) {
1878 let lamport_timestamp = self.text.lamport_clock.tick();
1879 self.remote_selections.insert(
1880 self.text.replica_id(),
1881 SelectionSet {
1882 selections: selections.clone(),
1883 lamport_timestamp,
1884 line_mode,
1885 cursor_shape,
1886 },
1887 );
1888 self.send_operation(
1889 Operation::UpdateSelections {
1890 selections,
1891 line_mode,
1892 lamport_timestamp,
1893 cursor_shape,
1894 },
1895 true,
1896 cx,
1897 );
1898 self.non_text_state_update_count += 1;
1899 cx.notify();
1900 }
1901
1902 /// Clears the selections, so that other replicas of the buffer do not see any selections for
1903 /// this replica.
1904 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1905 if self
1906 .remote_selections
1907 .get(&self.text.replica_id())
1908 .map_or(true, |set| !set.selections.is_empty())
1909 {
1910 self.set_active_selections(Arc::default(), false, Default::default(), cx);
1911 }
1912 }
1913
1914 /// Replaces the buffer's entire text.
1915 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1916 where
1917 T: Into<Arc<str>>,
1918 {
1919 self.autoindent_requests.clear();
1920 self.edit([(0..self.len(), text)], None, cx)
1921 }
1922
1923 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
1924 /// delete, and a string of text to insert at that location.
1925 ///
1926 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
1927 /// request for the edited ranges, which will be processed when the buffer finishes
1928 /// parsing.
1929 ///
1930 /// Parsing takes place at the end of a transaction, and may compute synchronously
1931 /// or asynchronously, depending on the changes.
1932 pub fn edit<I, S, T>(
1933 &mut self,
1934 edits_iter: I,
1935 autoindent_mode: Option<AutoindentMode>,
1936 cx: &mut ModelContext<Self>,
1937 ) -> Option<clock::Lamport>
1938 where
1939 I: IntoIterator<Item = (Range<S>, T)>,
1940 S: ToOffset,
1941 T: Into<Arc<str>>,
1942 {
1943 // Skip invalid edits and coalesce contiguous ones.
1944 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1945 for (range, new_text) in edits_iter {
1946 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1947 if range.start > range.end {
1948 mem::swap(&mut range.start, &mut range.end);
1949 }
1950 let new_text = new_text.into();
1951 if !new_text.is_empty() || !range.is_empty() {
1952 if let Some((prev_range, prev_text)) = edits.last_mut() {
1953 if prev_range.end >= range.start {
1954 prev_range.end = cmp::max(prev_range.end, range.end);
1955 *prev_text = format!("{prev_text}{new_text}").into();
1956 } else {
1957 edits.push((range, new_text));
1958 }
1959 } else {
1960 edits.push((range, new_text));
1961 }
1962 }
1963 }
1964 if edits.is_empty() {
1965 return None;
1966 }
1967
1968 self.start_transaction();
1969 self.pending_autoindent.take();
1970 let autoindent_request = autoindent_mode
1971 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1972
1973 let edit_operation = self.text.edit(edits.iter().cloned());
1974 let edit_id = edit_operation.timestamp();
1975
1976 if let Some((before_edit, mode)) = autoindent_request {
1977 let mut delta = 0isize;
1978 let entries = edits
1979 .into_iter()
1980 .enumerate()
1981 .zip(&edit_operation.as_edit().unwrap().new_text)
1982 .map(|((ix, (range, _)), new_text)| {
1983 let new_text_length = new_text.len();
1984 let old_start = range.start.to_point(&before_edit);
1985 let new_start = (delta + range.start as isize) as usize;
1986 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1987
1988 let mut range_of_insertion_to_indent = 0..new_text_length;
1989 let mut first_line_is_new = false;
1990 let mut original_indent_column = None;
1991
1992 // When inserting an entire line at the beginning of an existing line,
1993 // treat the insertion as new.
1994 if new_text.contains('\n')
1995 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1996 {
1997 first_line_is_new = true;
1998 }
1999
2000 // When inserting text starting with a newline, avoid auto-indenting the
2001 // previous line.
2002 if new_text.starts_with('\n') {
2003 range_of_insertion_to_indent.start += 1;
2004 first_line_is_new = true;
2005 }
2006
2007 // Avoid auto-indenting after the insertion.
2008 if let AutoindentMode::Block {
2009 original_indent_columns,
2010 } = &mode
2011 {
2012 original_indent_column =
2013 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
2014 indent_size_for_text(
2015 new_text[range_of_insertion_to_indent.clone()].chars(),
2016 )
2017 .len
2018 }));
2019 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2020 range_of_insertion_to_indent.end -= 1;
2021 }
2022 }
2023
2024 AutoindentRequestEntry {
2025 first_line_is_new,
2026 original_indent_column,
2027 indent_size: before_edit.language_indent_size_at(range.start, cx),
2028 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2029 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2030 }
2031 })
2032 .collect();
2033
2034 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2035 before_edit,
2036 entries,
2037 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2038 }));
2039 }
2040
2041 self.end_transaction(cx);
2042 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2043 Some(edit_id)
2044 }
2045
2046 fn did_edit(
2047 &mut self,
2048 old_version: &clock::Global,
2049 was_dirty: bool,
2050 cx: &mut ModelContext<Self>,
2051 ) {
2052 if self.edits_since::<usize>(old_version).next().is_none() {
2053 return;
2054 }
2055
2056 self.reparse(cx);
2057
2058 cx.emit(BufferEvent::Edited);
2059 if was_dirty != self.is_dirty() {
2060 cx.emit(BufferEvent::DirtyChanged);
2061 }
2062 cx.notify();
2063 }
2064
2065 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2066 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2067 pub fn insert_empty_line(
2068 &mut self,
2069 position: impl ToPoint,
2070 space_above: bool,
2071 space_below: bool,
2072 cx: &mut ModelContext<Self>,
2073 ) -> Point {
2074 let mut position = position.to_point(self);
2075
2076 self.start_transaction();
2077
2078 self.edit(
2079 [(position..position, "\n")],
2080 Some(AutoindentMode::EachLine),
2081 cx,
2082 );
2083
2084 if position.column > 0 {
2085 position += Point::new(1, 0);
2086 }
2087
2088 if !self.is_line_blank(position.row) {
2089 self.edit(
2090 [(position..position, "\n")],
2091 Some(AutoindentMode::EachLine),
2092 cx,
2093 );
2094 }
2095
2096 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2097 self.edit(
2098 [(position..position, "\n")],
2099 Some(AutoindentMode::EachLine),
2100 cx,
2101 );
2102 position.row += 1;
2103 }
2104
2105 if space_below
2106 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2107 {
2108 self.edit(
2109 [(position..position, "\n")],
2110 Some(AutoindentMode::EachLine),
2111 cx,
2112 );
2113 }
2114
2115 self.end_transaction(cx);
2116
2117 position
2118 }
2119
2120 /// Applies the given remote operations to the buffer.
2121 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
2122 &mut self,
2123 ops: I,
2124 cx: &mut ModelContext<Self>,
2125 ) {
2126 self.pending_autoindent.take();
2127 let was_dirty = self.is_dirty();
2128 let old_version = self.version.clone();
2129 let mut deferred_ops = Vec::new();
2130 let buffer_ops = ops
2131 .into_iter()
2132 .filter_map(|op| match op {
2133 Operation::Buffer(op) => Some(op),
2134 _ => {
2135 if self.can_apply_op(&op) {
2136 self.apply_op(op, cx);
2137 } else {
2138 deferred_ops.push(op);
2139 }
2140 None
2141 }
2142 })
2143 .collect::<Vec<_>>();
2144 for operation in buffer_ops.iter() {
2145 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2146 }
2147 self.text.apply_ops(buffer_ops);
2148 self.deferred_ops.insert(deferred_ops);
2149 self.flush_deferred_ops(cx);
2150 self.did_edit(&old_version, was_dirty, cx);
2151 // Notify independently of whether the buffer was edited as the operations could include a
2152 // selection update.
2153 cx.notify();
2154 }
2155
2156 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
2157 let mut deferred_ops = Vec::new();
2158 for op in self.deferred_ops.drain().iter().cloned() {
2159 if self.can_apply_op(&op) {
2160 self.apply_op(op, cx);
2161 } else {
2162 deferred_ops.push(op);
2163 }
2164 }
2165 self.deferred_ops.insert(deferred_ops);
2166 }
2167
2168 pub fn has_deferred_ops(&self) -> bool {
2169 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2170 }
2171
2172 fn can_apply_op(&self, operation: &Operation) -> bool {
2173 match operation {
2174 Operation::Buffer(_) => {
2175 unreachable!("buffer operations should never be applied at this layer")
2176 }
2177 Operation::UpdateDiagnostics {
2178 diagnostics: diagnostic_set,
2179 ..
2180 } => diagnostic_set.iter().all(|diagnostic| {
2181 self.text.can_resolve(&diagnostic.range.start)
2182 && self.text.can_resolve(&diagnostic.range.end)
2183 }),
2184 Operation::UpdateSelections { selections, .. } => selections
2185 .iter()
2186 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2187 Operation::UpdateCompletionTriggers { .. } => true,
2188 }
2189 }
2190
2191 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
2192 match operation {
2193 Operation::Buffer(_) => {
2194 unreachable!("buffer operations should never be applied at this layer")
2195 }
2196 Operation::UpdateDiagnostics {
2197 server_id,
2198 diagnostics: diagnostic_set,
2199 lamport_timestamp,
2200 } => {
2201 let snapshot = self.snapshot();
2202 self.apply_diagnostic_update(
2203 server_id,
2204 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2205 lamport_timestamp,
2206 cx,
2207 );
2208 }
2209 Operation::UpdateSelections {
2210 selections,
2211 lamport_timestamp,
2212 line_mode,
2213 cursor_shape,
2214 } => {
2215 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2216 if set.lamport_timestamp > lamport_timestamp {
2217 return;
2218 }
2219 }
2220
2221 self.remote_selections.insert(
2222 lamport_timestamp.replica_id,
2223 SelectionSet {
2224 selections,
2225 lamport_timestamp,
2226 line_mode,
2227 cursor_shape,
2228 },
2229 );
2230 self.text.lamport_clock.observe(lamport_timestamp);
2231 self.non_text_state_update_count += 1;
2232 }
2233 Operation::UpdateCompletionTriggers {
2234 triggers,
2235 lamport_timestamp,
2236 } => {
2237 self.completion_triggers = triggers;
2238 self.text.lamport_clock.observe(lamport_timestamp);
2239 }
2240 }
2241 }
2242
2243 fn apply_diagnostic_update(
2244 &mut self,
2245 server_id: LanguageServerId,
2246 diagnostics: DiagnosticSet,
2247 lamport_timestamp: clock::Lamport,
2248 cx: &mut ModelContext<Self>,
2249 ) {
2250 if lamport_timestamp > self.diagnostics_timestamp {
2251 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2252 if diagnostics.is_empty() {
2253 if let Ok(ix) = ix {
2254 self.diagnostics.remove(ix);
2255 }
2256 } else {
2257 match ix {
2258 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2259 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2260 };
2261 }
2262 self.diagnostics_timestamp = lamport_timestamp;
2263 self.non_text_state_update_count += 1;
2264 self.text.lamport_clock.observe(lamport_timestamp);
2265 cx.notify();
2266 cx.emit(BufferEvent::DiagnosticsUpdated);
2267 }
2268 }
2269
2270 fn send_operation(&self, operation: Operation, is_local: bool, cx: &mut ModelContext<Self>) {
2271 cx.emit(BufferEvent::Operation {
2272 operation,
2273 is_local,
2274 });
2275 }
2276
2277 /// Removes the selections for a given peer.
2278 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
2279 self.remote_selections.remove(&replica_id);
2280 cx.notify();
2281 }
2282
2283 /// Undoes the most recent transaction.
2284 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2285 let was_dirty = self.is_dirty();
2286 let old_version = self.version.clone();
2287
2288 if let Some((transaction_id, operation)) = self.text.undo() {
2289 self.send_operation(Operation::Buffer(operation), true, cx);
2290 self.did_edit(&old_version, was_dirty, cx);
2291 Some(transaction_id)
2292 } else {
2293 None
2294 }
2295 }
2296
2297 /// Manually undoes a specific transaction in the buffer's undo history.
2298 pub fn undo_transaction(
2299 &mut self,
2300 transaction_id: TransactionId,
2301 cx: &mut ModelContext<Self>,
2302 ) -> bool {
2303 let was_dirty = self.is_dirty();
2304 let old_version = self.version.clone();
2305 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2306 self.send_operation(Operation::Buffer(operation), true, cx);
2307 self.did_edit(&old_version, was_dirty, cx);
2308 true
2309 } else {
2310 false
2311 }
2312 }
2313
2314 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2315 pub fn undo_to_transaction(
2316 &mut self,
2317 transaction_id: TransactionId,
2318 cx: &mut ModelContext<Self>,
2319 ) -> bool {
2320 let was_dirty = self.is_dirty();
2321 let old_version = self.version.clone();
2322
2323 let operations = self.text.undo_to_transaction(transaction_id);
2324 let undone = !operations.is_empty();
2325 for operation in operations {
2326 self.send_operation(Operation::Buffer(operation), true, cx);
2327 }
2328 if undone {
2329 self.did_edit(&old_version, was_dirty, cx)
2330 }
2331 undone
2332 }
2333
2334 /// Manually redoes a specific transaction in the buffer's redo history.
2335 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2336 let was_dirty = self.is_dirty();
2337 let old_version = self.version.clone();
2338
2339 if let Some((transaction_id, operation)) = self.text.redo() {
2340 self.send_operation(Operation::Buffer(operation), true, cx);
2341 self.did_edit(&old_version, was_dirty, cx);
2342 Some(transaction_id)
2343 } else {
2344 None
2345 }
2346 }
2347
2348 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2349 pub fn redo_to_transaction(
2350 &mut self,
2351 transaction_id: TransactionId,
2352 cx: &mut ModelContext<Self>,
2353 ) -> bool {
2354 let was_dirty = self.is_dirty();
2355 let old_version = self.version.clone();
2356
2357 let operations = self.text.redo_to_transaction(transaction_id);
2358 let redone = !operations.is_empty();
2359 for operation in operations {
2360 self.send_operation(Operation::Buffer(operation), true, cx);
2361 }
2362 if redone {
2363 self.did_edit(&old_version, was_dirty, cx)
2364 }
2365 redone
2366 }
2367
2368 /// Override current completion triggers with the user-provided completion triggers.
2369 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
2370 self.completion_triggers.clone_from(&triggers);
2371 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2372 self.send_operation(
2373 Operation::UpdateCompletionTriggers {
2374 triggers,
2375 lamport_timestamp: self.completion_triggers_timestamp,
2376 },
2377 true,
2378 cx,
2379 );
2380 cx.notify();
2381 }
2382
2383 /// Returns a list of strings which trigger a completion menu for this language.
2384 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2385 pub fn completion_triggers(&self) -> &[String] {
2386 &self.completion_triggers
2387 }
2388
2389 /// Call this directly after performing edits to prevent the preview tab
2390 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2391 /// to return false until there are additional edits.
2392 pub fn refresh_preview(&mut self) {
2393 self.preview_version = self.version.clone();
2394 }
2395
2396 /// Whether we should preserve the preview status of a tab containing this buffer.
2397 pub fn preserve_preview(&self) -> bool {
2398 !self.has_edits_since(&self.preview_version)
2399 }
2400}
2401
2402#[doc(hidden)]
2403#[cfg(any(test, feature = "test-support"))]
2404impl Buffer {
2405 pub fn edit_via_marked_text(
2406 &mut self,
2407 marked_string: &str,
2408 autoindent_mode: Option<AutoindentMode>,
2409 cx: &mut ModelContext<Self>,
2410 ) {
2411 let edits = self.edits_for_marked_text(marked_string);
2412 self.edit(edits, autoindent_mode, cx);
2413 }
2414
2415 pub fn set_group_interval(&mut self, group_interval: Duration) {
2416 self.text.set_group_interval(group_interval);
2417 }
2418
2419 pub fn randomly_edit<T>(
2420 &mut self,
2421 rng: &mut T,
2422 old_range_count: usize,
2423 cx: &mut ModelContext<Self>,
2424 ) where
2425 T: rand::Rng,
2426 {
2427 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2428 let mut last_end = None;
2429 for _ in 0..old_range_count {
2430 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2431 break;
2432 }
2433
2434 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2435 let mut range = self.random_byte_range(new_start, rng);
2436 if rng.gen_bool(0.2) {
2437 mem::swap(&mut range.start, &mut range.end);
2438 }
2439 last_end = Some(range.end);
2440
2441 let new_text_len = rng.gen_range(0..10);
2442 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2443
2444 edits.push((range, new_text));
2445 }
2446 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2447 self.edit(edits, None, cx);
2448 }
2449
2450 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
2451 let was_dirty = self.is_dirty();
2452 let old_version = self.version.clone();
2453
2454 let ops = self.text.randomly_undo_redo(rng);
2455 if !ops.is_empty() {
2456 for op in ops {
2457 self.send_operation(Operation::Buffer(op), true, cx);
2458 self.did_edit(&old_version, was_dirty, cx);
2459 }
2460 }
2461 }
2462}
2463
2464impl EventEmitter<BufferEvent> for Buffer {}
2465
2466impl Deref for Buffer {
2467 type Target = TextBuffer;
2468
2469 fn deref(&self) -> &Self::Target {
2470 &self.text
2471 }
2472}
2473
2474impl BufferSnapshot {
2475 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2476 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2477 indent_size_for_line(self, row)
2478 }
2479 /// Returns [`IndentSize`] for a given position that respects user settings
2480 /// and language preferences.
2481 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
2482 let settings = language_settings(self.language_at(position), self.file(), cx);
2483 if settings.hard_tabs {
2484 IndentSize::tab()
2485 } else {
2486 IndentSize::spaces(settings.tab_size.get())
2487 }
2488 }
2489
2490 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2491 /// is passed in as `single_indent_size`.
2492 pub fn suggested_indents(
2493 &self,
2494 rows: impl Iterator<Item = u32>,
2495 single_indent_size: IndentSize,
2496 ) -> BTreeMap<u32, IndentSize> {
2497 let mut result = BTreeMap::new();
2498
2499 for row_range in contiguous_ranges(rows, 10) {
2500 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2501 Some(suggestions) => suggestions,
2502 _ => break,
2503 };
2504
2505 for (row, suggestion) in row_range.zip(suggestions) {
2506 let indent_size = if let Some(suggestion) = suggestion {
2507 result
2508 .get(&suggestion.basis_row)
2509 .copied()
2510 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2511 .with_delta(suggestion.delta, single_indent_size)
2512 } else {
2513 self.indent_size_for_line(row)
2514 };
2515
2516 result.insert(row, indent_size);
2517 }
2518 }
2519
2520 result
2521 }
2522
2523 fn suggest_autoindents(
2524 &self,
2525 row_range: Range<u32>,
2526 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2527 let config = &self.language.as_ref()?.config;
2528 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2529
2530 // Find the suggested indentation ranges based on the syntax tree.
2531 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2532 let end = Point::new(row_range.end, 0);
2533 let range = (start..end).to_offset(&self.text);
2534 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2535 Some(&grammar.indents_config.as_ref()?.query)
2536 });
2537 let indent_configs = matches
2538 .grammars()
2539 .iter()
2540 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2541 .collect::<Vec<_>>();
2542
2543 let mut indent_ranges = Vec::<Range<Point>>::new();
2544 let mut outdent_positions = Vec::<Point>::new();
2545 while let Some(mat) = matches.peek() {
2546 let mut start: Option<Point> = None;
2547 let mut end: Option<Point> = None;
2548
2549 let config = &indent_configs[mat.grammar_index];
2550 for capture in mat.captures {
2551 if capture.index == config.indent_capture_ix {
2552 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2553 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2554 } else if Some(capture.index) == config.start_capture_ix {
2555 start = Some(Point::from_ts_point(capture.node.end_position()));
2556 } else if Some(capture.index) == config.end_capture_ix {
2557 end = Some(Point::from_ts_point(capture.node.start_position()));
2558 } else if Some(capture.index) == config.outdent_capture_ix {
2559 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2560 }
2561 }
2562
2563 matches.advance();
2564 if let Some((start, end)) = start.zip(end) {
2565 if start.row == end.row {
2566 continue;
2567 }
2568
2569 let range = start..end;
2570 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2571 Err(ix) => indent_ranges.insert(ix, range),
2572 Ok(ix) => {
2573 let prev_range = &mut indent_ranges[ix];
2574 prev_range.end = prev_range.end.max(range.end);
2575 }
2576 }
2577 }
2578 }
2579
2580 let mut error_ranges = Vec::<Range<Point>>::new();
2581 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2582 Some(&grammar.error_query)
2583 });
2584 while let Some(mat) = matches.peek() {
2585 let node = mat.captures[0].node;
2586 let start = Point::from_ts_point(node.start_position());
2587 let end = Point::from_ts_point(node.end_position());
2588 let range = start..end;
2589 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2590 Ok(ix) | Err(ix) => ix,
2591 };
2592 let mut end_ix = ix;
2593 while let Some(existing_range) = error_ranges.get(end_ix) {
2594 if existing_range.end < end {
2595 end_ix += 1;
2596 } else {
2597 break;
2598 }
2599 }
2600 error_ranges.splice(ix..end_ix, [range]);
2601 matches.advance();
2602 }
2603
2604 outdent_positions.sort();
2605 for outdent_position in outdent_positions {
2606 // find the innermost indent range containing this outdent_position
2607 // set its end to the outdent position
2608 if let Some(range_to_truncate) = indent_ranges
2609 .iter_mut()
2610 .filter(|indent_range| indent_range.contains(&outdent_position))
2611 .last()
2612 {
2613 range_to_truncate.end = outdent_position;
2614 }
2615 }
2616
2617 // Find the suggested indentation increases and decreased based on regexes.
2618 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2619 self.for_each_line(
2620 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2621 ..Point::new(row_range.end, 0),
2622 |row, line| {
2623 if config
2624 .decrease_indent_pattern
2625 .as_ref()
2626 .map_or(false, |regex| regex.is_match(line))
2627 {
2628 indent_change_rows.push((row, Ordering::Less));
2629 }
2630 if config
2631 .increase_indent_pattern
2632 .as_ref()
2633 .map_or(false, |regex| regex.is_match(line))
2634 {
2635 indent_change_rows.push((row + 1, Ordering::Greater));
2636 }
2637 },
2638 );
2639
2640 let mut indent_changes = indent_change_rows.into_iter().peekable();
2641 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2642 prev_non_blank_row.unwrap_or(0)
2643 } else {
2644 row_range.start.saturating_sub(1)
2645 };
2646 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2647 Some(row_range.map(move |row| {
2648 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2649
2650 let mut indent_from_prev_row = false;
2651 let mut outdent_from_prev_row = false;
2652 let mut outdent_to_row = u32::MAX;
2653
2654 while let Some((indent_row, delta)) = indent_changes.peek() {
2655 match indent_row.cmp(&row) {
2656 Ordering::Equal => match delta {
2657 Ordering::Less => outdent_from_prev_row = true,
2658 Ordering::Greater => indent_from_prev_row = true,
2659 _ => {}
2660 },
2661
2662 Ordering::Greater => break,
2663 Ordering::Less => {}
2664 }
2665
2666 indent_changes.next();
2667 }
2668
2669 for range in &indent_ranges {
2670 if range.start.row >= row {
2671 break;
2672 }
2673 if range.start.row == prev_row && range.end > row_start {
2674 indent_from_prev_row = true;
2675 }
2676 if range.end > prev_row_start && range.end <= row_start {
2677 outdent_to_row = outdent_to_row.min(range.start.row);
2678 }
2679 }
2680
2681 let within_error = error_ranges
2682 .iter()
2683 .any(|e| e.start.row < row && e.end > row_start);
2684
2685 let suggestion = if outdent_to_row == prev_row
2686 || (outdent_from_prev_row && indent_from_prev_row)
2687 {
2688 Some(IndentSuggestion {
2689 basis_row: prev_row,
2690 delta: Ordering::Equal,
2691 within_error,
2692 })
2693 } else if indent_from_prev_row {
2694 Some(IndentSuggestion {
2695 basis_row: prev_row,
2696 delta: Ordering::Greater,
2697 within_error,
2698 })
2699 } else if outdent_to_row < prev_row {
2700 Some(IndentSuggestion {
2701 basis_row: outdent_to_row,
2702 delta: Ordering::Equal,
2703 within_error,
2704 })
2705 } else if outdent_from_prev_row {
2706 Some(IndentSuggestion {
2707 basis_row: prev_row,
2708 delta: Ordering::Less,
2709 within_error,
2710 })
2711 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2712 {
2713 Some(IndentSuggestion {
2714 basis_row: prev_row,
2715 delta: Ordering::Equal,
2716 within_error,
2717 })
2718 } else {
2719 None
2720 };
2721
2722 prev_row = row;
2723 prev_row_start = row_start;
2724 suggestion
2725 }))
2726 }
2727
2728 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2729 while row > 0 {
2730 row -= 1;
2731 if !self.is_line_blank(row) {
2732 return Some(row);
2733 }
2734 }
2735 None
2736 }
2737
2738 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
2739 let captures = self.syntax.captures(range, &self.text, |grammar| {
2740 grammar.highlights_query.as_ref()
2741 });
2742 let highlight_maps = captures
2743 .grammars()
2744 .iter()
2745 .map(|grammar| grammar.highlight_map())
2746 .collect();
2747 (captures, highlight_maps)
2748 }
2749
2750 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2751 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2752 /// returned in chunks where each chunk has a single syntax highlighting style and
2753 /// diagnostic status.
2754 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2755 let range = range.start.to_offset(self)..range.end.to_offset(self);
2756
2757 let mut syntax = None;
2758 if language_aware {
2759 syntax = Some(self.get_highlights(range.clone()));
2760 }
2761 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
2762 let diagnostics = language_aware;
2763 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
2764 }
2765
2766 /// Invokes the given callback for each line of text in the given range of the buffer.
2767 /// Uses callback to avoid allocating a string for each line.
2768 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2769 let mut line = String::new();
2770 let mut row = range.start.row;
2771 for chunk in self
2772 .as_rope()
2773 .chunks_in_range(range.to_offset(self))
2774 .chain(["\n"])
2775 {
2776 for (newline_ix, text) in chunk.split('\n').enumerate() {
2777 if newline_ix > 0 {
2778 callback(row, &line);
2779 row += 1;
2780 line.clear();
2781 }
2782 line.push_str(text);
2783 }
2784 }
2785 }
2786
2787 /// Iterates over every [`SyntaxLayer`] in the buffer.
2788 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2789 self.syntax
2790 .layers_for_range(0..self.len(), &self.text, true)
2791 }
2792
2793 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2794 let offset = position.to_offset(self);
2795 self.syntax
2796 .layers_for_range(offset..offset, &self.text, false)
2797 .filter(|l| l.node().end_byte() > offset)
2798 .last()
2799 }
2800
2801 /// Returns the main [`Language`].
2802 pub fn language(&self) -> Option<&Arc<Language>> {
2803 self.language.as_ref()
2804 }
2805
2806 /// Returns the [`Language`] at the given location.
2807 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2808 self.syntax_layer_at(position)
2809 .map(|info| info.language)
2810 .or(self.language.as_ref())
2811 }
2812
2813 /// Returns the settings for the language at the given location.
2814 pub fn settings_at<'a, D: ToOffset>(
2815 &self,
2816 position: D,
2817 cx: &'a AppContext,
2818 ) -> &'a LanguageSettings {
2819 language_settings(self.language_at(position), self.file.as_ref(), cx)
2820 }
2821
2822 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
2823 CharClassifier::new(self.language_scope_at(point))
2824 }
2825
2826 /// Returns the [`LanguageScope`] at the given location.
2827 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2828 let offset = position.to_offset(self);
2829 let mut scope = None;
2830 let mut smallest_range: Option<Range<usize>> = None;
2831
2832 // Use the layer that has the smallest node intersecting the given point.
2833 for layer in self
2834 .syntax
2835 .layers_for_range(offset..offset, &self.text, false)
2836 {
2837 let mut cursor = layer.node().walk();
2838
2839 let mut range = None;
2840 loop {
2841 let child_range = cursor.node().byte_range();
2842 if !child_range.to_inclusive().contains(&offset) {
2843 break;
2844 }
2845
2846 range = Some(child_range);
2847 if cursor.goto_first_child_for_byte(offset).is_none() {
2848 break;
2849 }
2850 }
2851
2852 if let Some(range) = range {
2853 if smallest_range
2854 .as_ref()
2855 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2856 {
2857 smallest_range = Some(range);
2858 scope = Some(LanguageScope {
2859 language: layer.language.clone(),
2860 override_id: layer.override_id(offset, &self.text),
2861 });
2862 }
2863 }
2864 }
2865
2866 scope.or_else(|| {
2867 self.language.clone().map(|language| LanguageScope {
2868 language,
2869 override_id: None,
2870 })
2871 })
2872 }
2873
2874 /// Returns a tuple of the range and character kind of the word
2875 /// surrounding the given position.
2876 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2877 let mut start = start.to_offset(self);
2878 let mut end = start;
2879 let mut next_chars = self.chars_at(start).peekable();
2880 let mut prev_chars = self.reversed_chars_at(start).peekable();
2881
2882 let classifier = self.char_classifier_at(start);
2883 let word_kind = cmp::max(
2884 prev_chars.peek().copied().map(|c| classifier.kind(c)),
2885 next_chars.peek().copied().map(|c| classifier.kind(c)),
2886 );
2887
2888 for ch in prev_chars {
2889 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
2890 start -= ch.len_utf8();
2891 } else {
2892 break;
2893 }
2894 }
2895
2896 for ch in next_chars {
2897 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
2898 end += ch.len_utf8();
2899 } else {
2900 break;
2901 }
2902 }
2903
2904 (start..end, word_kind)
2905 }
2906
2907 /// Returns the range for the closes syntax node enclosing the given range.
2908 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2909 let range = range.start.to_offset(self)..range.end.to_offset(self);
2910 let mut result: Option<Range<usize>> = None;
2911 'outer: for layer in self
2912 .syntax
2913 .layers_for_range(range.clone(), &self.text, true)
2914 {
2915 let mut cursor = layer.node().walk();
2916
2917 // Descend to the first leaf that touches the start of the range,
2918 // and if the range is non-empty, extends beyond the start.
2919 while cursor.goto_first_child_for_byte(range.start).is_some() {
2920 if !range.is_empty() && cursor.node().end_byte() == range.start {
2921 cursor.goto_next_sibling();
2922 }
2923 }
2924
2925 // Ascend to the smallest ancestor that strictly contains the range.
2926 loop {
2927 let node_range = cursor.node().byte_range();
2928 if node_range.start <= range.start
2929 && node_range.end >= range.end
2930 && node_range.len() > range.len()
2931 {
2932 break;
2933 }
2934 if !cursor.goto_parent() {
2935 continue 'outer;
2936 }
2937 }
2938
2939 let left_node = cursor.node();
2940 let mut layer_result = left_node.byte_range();
2941
2942 // For an empty range, try to find another node immediately to the right of the range.
2943 if left_node.end_byte() == range.start {
2944 let mut right_node = None;
2945 while !cursor.goto_next_sibling() {
2946 if !cursor.goto_parent() {
2947 break;
2948 }
2949 }
2950
2951 while cursor.node().start_byte() == range.start {
2952 right_node = Some(cursor.node());
2953 if !cursor.goto_first_child() {
2954 break;
2955 }
2956 }
2957
2958 // If there is a candidate node on both sides of the (empty) range, then
2959 // decide between the two by favoring a named node over an anonymous token.
2960 // If both nodes are the same in that regard, favor the right one.
2961 if let Some(right_node) = right_node {
2962 if right_node.is_named() || !left_node.is_named() {
2963 layer_result = right_node.byte_range();
2964 }
2965 }
2966 }
2967
2968 if let Some(previous_result) = &result {
2969 if previous_result.len() < layer_result.len() {
2970 continue;
2971 }
2972 }
2973 result = Some(layer_result);
2974 }
2975
2976 result
2977 }
2978
2979 /// Returns the outline for the buffer.
2980 ///
2981 /// This method allows passing an optional [`SyntaxTheme`] to
2982 /// syntax-highlight the returned symbols.
2983 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2984 self.outline_items_containing(0..self.len(), true, theme)
2985 .map(Outline::new)
2986 }
2987
2988 /// Returns all the symbols that contain the given position.
2989 ///
2990 /// This method allows passing an optional [`SyntaxTheme`] to
2991 /// syntax-highlight the returned symbols.
2992 pub fn symbols_containing<T: ToOffset>(
2993 &self,
2994 position: T,
2995 theme: Option<&SyntaxTheme>,
2996 ) -> Option<Vec<OutlineItem<Anchor>>> {
2997 let position = position.to_offset(self);
2998 let mut items = self.outline_items_containing(
2999 position.saturating_sub(1)..self.len().min(position + 1),
3000 false,
3001 theme,
3002 )?;
3003 let mut prev_depth = None;
3004 items.retain(|item| {
3005 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3006 prev_depth = Some(item.depth);
3007 result
3008 });
3009 Some(items)
3010 }
3011
3012 pub fn outline_items_containing<T: ToOffset>(
3013 &self,
3014 range: Range<T>,
3015 include_extra_context: bool,
3016 theme: Option<&SyntaxTheme>,
3017 ) -> Option<Vec<OutlineItem<Anchor>>> {
3018 let range = range.to_offset(self);
3019 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3020 grammar.outline_config.as_ref().map(|c| &c.query)
3021 });
3022 let configs = matches
3023 .grammars()
3024 .iter()
3025 .map(|g| g.outline_config.as_ref().unwrap())
3026 .collect::<Vec<_>>();
3027
3028 let mut items = Vec::new();
3029 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3030 while let Some(mat) = matches.peek() {
3031 let config = &configs[mat.grammar_index];
3032 if let Some(item) =
3033 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3034 {
3035 items.push(item);
3036 } else if let Some(capture) = mat
3037 .captures
3038 .iter()
3039 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3040 {
3041 let capture_range = capture.node.start_position()..capture.node.end_position();
3042 let mut capture_row_range =
3043 capture_range.start.row as u32..capture_range.end.row as u32;
3044 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3045 {
3046 capture_row_range.end -= 1;
3047 }
3048 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3049 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3050 last_row_range.end = capture_row_range.end;
3051 } else {
3052 annotation_row_ranges.push(capture_row_range);
3053 }
3054 } else {
3055 annotation_row_ranges.push(capture_row_range);
3056 }
3057 }
3058 matches.advance();
3059 }
3060
3061 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3062
3063 // Assign depths based on containment relationships and convert to anchors.
3064 let mut item_ends_stack = Vec::<Point>::new();
3065 let mut anchor_items = Vec::new();
3066 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3067 for item in items {
3068 while let Some(last_end) = item_ends_stack.last().copied() {
3069 if last_end < item.range.end {
3070 item_ends_stack.pop();
3071 } else {
3072 break;
3073 }
3074 }
3075
3076 let mut annotation_row_range = None;
3077 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3078 let row_preceding_item = item.range.start.row.saturating_sub(1);
3079 if next_annotation_row_range.end < row_preceding_item {
3080 annotation_row_ranges.next();
3081 } else {
3082 if next_annotation_row_range.end == row_preceding_item {
3083 annotation_row_range = Some(next_annotation_row_range.clone());
3084 annotation_row_ranges.next();
3085 }
3086 break;
3087 }
3088 }
3089
3090 anchor_items.push(OutlineItem {
3091 depth: item_ends_stack.len(),
3092 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3093 text: item.text,
3094 highlight_ranges: item.highlight_ranges,
3095 name_ranges: item.name_ranges,
3096 body_range: item.body_range.map(|body_range| {
3097 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3098 }),
3099 annotation_range: annotation_row_range.map(|annotation_range| {
3100 self.anchor_after(Point::new(annotation_range.start, 0))
3101 ..self.anchor_before(Point::new(
3102 annotation_range.end,
3103 self.line_len(annotation_range.end),
3104 ))
3105 }),
3106 });
3107 item_ends_stack.push(item.range.end);
3108 }
3109
3110 Some(anchor_items)
3111 }
3112
3113 fn next_outline_item(
3114 &self,
3115 config: &OutlineConfig,
3116 mat: &SyntaxMapMatch,
3117 range: &Range<usize>,
3118 include_extra_context: bool,
3119 theme: Option<&SyntaxTheme>,
3120 ) -> Option<OutlineItem<Point>> {
3121 let item_node = mat.captures.iter().find_map(|cap| {
3122 if cap.index == config.item_capture_ix {
3123 Some(cap.node)
3124 } else {
3125 None
3126 }
3127 })?;
3128
3129 let item_byte_range = item_node.byte_range();
3130 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3131 return None;
3132 }
3133 let item_point_range = Point::from_ts_point(item_node.start_position())
3134 ..Point::from_ts_point(item_node.end_position());
3135
3136 let mut open_point = None;
3137 let mut close_point = None;
3138 let mut buffer_ranges = Vec::new();
3139 for capture in mat.captures {
3140 let node_is_name;
3141 if capture.index == config.name_capture_ix {
3142 node_is_name = true;
3143 } else if Some(capture.index) == config.context_capture_ix
3144 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3145 {
3146 node_is_name = false;
3147 } else {
3148 if Some(capture.index) == config.open_capture_ix {
3149 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3150 } else if Some(capture.index) == config.close_capture_ix {
3151 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3152 }
3153
3154 continue;
3155 }
3156
3157 let mut range = capture.node.start_byte()..capture.node.end_byte();
3158 let start = capture.node.start_position();
3159 if capture.node.end_position().row > start.row {
3160 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3161 }
3162
3163 if !range.is_empty() {
3164 buffer_ranges.push((range, node_is_name));
3165 }
3166 }
3167 if buffer_ranges.is_empty() {
3168 return None;
3169 }
3170 let mut text = String::new();
3171 let mut highlight_ranges = Vec::new();
3172 let mut name_ranges = Vec::new();
3173 let mut chunks = self.chunks(
3174 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3175 true,
3176 );
3177 let mut last_buffer_range_end = 0;
3178 for (buffer_range, is_name) in buffer_ranges {
3179 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
3180 text.push(' ');
3181 }
3182 last_buffer_range_end = buffer_range.end;
3183 if is_name {
3184 let mut start = text.len();
3185 let end = start + buffer_range.len();
3186
3187 // When multiple names are captured, then the matchable text
3188 // includes the whitespace in between the names.
3189 if !name_ranges.is_empty() {
3190 start -= 1;
3191 }
3192
3193 name_ranges.push(start..end);
3194 }
3195
3196 let mut offset = buffer_range.start;
3197 chunks.seek(buffer_range.clone());
3198 for mut chunk in chunks.by_ref() {
3199 if chunk.text.len() > buffer_range.end - offset {
3200 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3201 offset = buffer_range.end;
3202 } else {
3203 offset += chunk.text.len();
3204 }
3205 let style = chunk
3206 .syntax_highlight_id
3207 .zip(theme)
3208 .and_then(|(highlight, theme)| highlight.style(theme));
3209 if let Some(style) = style {
3210 let start = text.len();
3211 let end = start + chunk.text.len();
3212 highlight_ranges.push((start..end, style));
3213 }
3214 text.push_str(chunk.text);
3215 if offset >= buffer_range.end {
3216 break;
3217 }
3218 }
3219 }
3220
3221 Some(OutlineItem {
3222 depth: 0, // We'll calculate the depth later
3223 range: item_point_range,
3224 text,
3225 highlight_ranges,
3226 name_ranges,
3227 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3228 annotation_range: None,
3229 })
3230 }
3231
3232 /// For each grammar in the language, runs the provided
3233 /// [`tree_sitter::Query`] against the given range.
3234 pub fn matches(
3235 &self,
3236 range: Range<usize>,
3237 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3238 ) -> SyntaxMapMatches {
3239 self.syntax.matches(range, self, query)
3240 }
3241
3242 /// Returns bracket range pairs overlapping or adjacent to `range`
3243 pub fn bracket_ranges<T: ToOffset>(
3244 &self,
3245 range: Range<T>,
3246 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3247 // Find bracket pairs that *inclusively* contain the given range.
3248 let range = range.start.to_offset(self).saturating_sub(1)
3249 ..self.len().min(range.end.to_offset(self) + 1);
3250
3251 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3252 grammar.brackets_config.as_ref().map(|c| &c.query)
3253 });
3254 let configs = matches
3255 .grammars()
3256 .iter()
3257 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3258 .collect::<Vec<_>>();
3259
3260 iter::from_fn(move || {
3261 while let Some(mat) = matches.peek() {
3262 let mut open = None;
3263 let mut close = None;
3264 let config = &configs[mat.grammar_index];
3265 for capture in mat.captures {
3266 if capture.index == config.open_capture_ix {
3267 open = Some(capture.node.byte_range());
3268 } else if capture.index == config.close_capture_ix {
3269 close = Some(capture.node.byte_range());
3270 }
3271 }
3272
3273 matches.advance();
3274
3275 let Some((open, close)) = open.zip(close) else {
3276 continue;
3277 };
3278
3279 let bracket_range = open.start..=close.end;
3280 if !bracket_range.overlaps(&range) {
3281 continue;
3282 }
3283
3284 return Some((open, close));
3285 }
3286 None
3287 })
3288 }
3289
3290 /// Returns enclosing bracket ranges containing the given range
3291 pub fn enclosing_bracket_ranges<T: ToOffset>(
3292 &self,
3293 range: Range<T>,
3294 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3295 let range = range.start.to_offset(self)..range.end.to_offset(self);
3296
3297 self.bracket_ranges(range.clone())
3298 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
3299 }
3300
3301 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3302 ///
3303 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3304 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3305 &self,
3306 range: Range<T>,
3307 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3308 ) -> Option<(Range<usize>, Range<usize>)> {
3309 let range = range.start.to_offset(self)..range.end.to_offset(self);
3310
3311 // Get the ranges of the innermost pair of brackets.
3312 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3313
3314 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
3315 if let Some(range_filter) = range_filter {
3316 if !range_filter(open.clone(), close.clone()) {
3317 continue;
3318 }
3319 }
3320
3321 let len = close.end - open.start;
3322
3323 if let Some((existing_open, existing_close)) = &result {
3324 let existing_len = existing_close.end - existing_open.start;
3325 if len > existing_len {
3326 continue;
3327 }
3328 }
3329
3330 result = Some((open, close));
3331 }
3332
3333 result
3334 }
3335
3336 /// Returns anchor ranges for any matches of the redaction query.
3337 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3338 /// will be run on the relevant section of the buffer.
3339 pub fn redacted_ranges<T: ToOffset>(
3340 &self,
3341 range: Range<T>,
3342 ) -> impl Iterator<Item = Range<usize>> + '_ {
3343 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3344 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3345 grammar
3346 .redactions_config
3347 .as_ref()
3348 .map(|config| &config.query)
3349 });
3350
3351 let configs = syntax_matches
3352 .grammars()
3353 .iter()
3354 .map(|grammar| grammar.redactions_config.as_ref())
3355 .collect::<Vec<_>>();
3356
3357 iter::from_fn(move || {
3358 let redacted_range = syntax_matches
3359 .peek()
3360 .and_then(|mat| {
3361 configs[mat.grammar_index].and_then(|config| {
3362 mat.captures
3363 .iter()
3364 .find(|capture| capture.index == config.redaction_capture_ix)
3365 })
3366 })
3367 .map(|mat| mat.node.byte_range());
3368 syntax_matches.advance();
3369 redacted_range
3370 })
3371 }
3372
3373 pub fn injections_intersecting_range<T: ToOffset>(
3374 &self,
3375 range: Range<T>,
3376 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3377 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3378
3379 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3380 grammar
3381 .injection_config
3382 .as_ref()
3383 .map(|config| &config.query)
3384 });
3385
3386 let configs = syntax_matches
3387 .grammars()
3388 .iter()
3389 .map(|grammar| grammar.injection_config.as_ref())
3390 .collect::<Vec<_>>();
3391
3392 iter::from_fn(move || {
3393 let ranges = syntax_matches.peek().and_then(|mat| {
3394 let config = &configs[mat.grammar_index]?;
3395 let content_capture_range = mat.captures.iter().find_map(|capture| {
3396 if capture.index == config.content_capture_ix {
3397 Some(capture.node.byte_range())
3398 } else {
3399 None
3400 }
3401 })?;
3402 let language = self.language_at(content_capture_range.start)?;
3403 Some((content_capture_range, language))
3404 });
3405 syntax_matches.advance();
3406 ranges
3407 })
3408 }
3409
3410 pub fn runnable_ranges(
3411 &self,
3412 range: Range<Anchor>,
3413 ) -> impl Iterator<Item = RunnableRange> + '_ {
3414 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3415
3416 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3417 grammar.runnable_config.as_ref().map(|config| &config.query)
3418 });
3419
3420 let test_configs = syntax_matches
3421 .grammars()
3422 .iter()
3423 .map(|grammar| grammar.runnable_config.as_ref())
3424 .collect::<Vec<_>>();
3425
3426 iter::from_fn(move || loop {
3427 let mat = syntax_matches.peek()?;
3428
3429 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3430 let mut run_range = None;
3431 let full_range = mat.captures.iter().fold(
3432 Range {
3433 start: usize::MAX,
3434 end: 0,
3435 },
3436 |mut acc, next| {
3437 let byte_range = next.node.byte_range();
3438 if acc.start > byte_range.start {
3439 acc.start = byte_range.start;
3440 }
3441 if acc.end < byte_range.end {
3442 acc.end = byte_range.end;
3443 }
3444 acc
3445 },
3446 );
3447 if full_range.start > full_range.end {
3448 // We did not find a full spanning range of this match.
3449 return None;
3450 }
3451 let extra_captures: SmallVec<[_; 1]> =
3452 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3453 test_configs
3454 .extra_captures
3455 .get(capture.index as usize)
3456 .cloned()
3457 .and_then(|tag_name| match tag_name {
3458 RunnableCapture::Named(name) => {
3459 Some((capture.node.byte_range(), name))
3460 }
3461 RunnableCapture::Run => {
3462 let _ = run_range.insert(capture.node.byte_range());
3463 None
3464 }
3465 })
3466 }));
3467 let run_range = run_range?;
3468 let tags = test_configs
3469 .query
3470 .property_settings(mat.pattern_index)
3471 .iter()
3472 .filter_map(|property| {
3473 if *property.key == *"tag" {
3474 property
3475 .value
3476 .as_ref()
3477 .map(|value| RunnableTag(value.to_string().into()))
3478 } else {
3479 None
3480 }
3481 })
3482 .collect();
3483 let extra_captures = extra_captures
3484 .into_iter()
3485 .map(|(range, name)| {
3486 (
3487 name.to_string(),
3488 self.text_for_range(range.clone()).collect::<String>(),
3489 )
3490 })
3491 .collect();
3492 // All tags should have the same range.
3493 Some(RunnableRange {
3494 run_range,
3495 full_range,
3496 runnable: Runnable {
3497 tags,
3498 language: mat.language,
3499 buffer: self.remote_id(),
3500 },
3501 extra_captures,
3502 buffer_id: self.remote_id(),
3503 })
3504 });
3505
3506 syntax_matches.advance();
3507 if test_range.is_some() {
3508 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3509 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3510 return test_range;
3511 }
3512 })
3513 }
3514
3515 pub fn indent_guides_in_range(
3516 &self,
3517 range: Range<Anchor>,
3518 ignore_disabled_for_language: bool,
3519 cx: &AppContext,
3520 ) -> Vec<IndentGuide> {
3521 let language_settings = language_settings(self.language(), self.file.as_ref(), cx);
3522 let settings = language_settings.indent_guides;
3523 if !ignore_disabled_for_language && !settings.enabled {
3524 return Vec::new();
3525 }
3526 let tab_size = language_settings.tab_size.get() as u32;
3527
3528 let start_row = range.start.to_point(self).row;
3529 let end_row = range.end.to_point(self).row;
3530 let row_range = start_row..end_row + 1;
3531
3532 let mut row_indents = self.line_indents_in_row_range(row_range.clone());
3533
3534 let mut result_vec = Vec::new();
3535 let mut indent_stack = SmallVec::<[IndentGuide; 8]>::new();
3536
3537 while let Some((first_row, mut line_indent)) = row_indents.next() {
3538 let current_depth = indent_stack.len() as u32;
3539
3540 // When encountering empty, continue until found useful line indent
3541 // then add to the indent stack with the depth found
3542 let mut found_indent = false;
3543 let mut last_row = first_row;
3544 if line_indent.is_line_empty() {
3545 let mut trailing_row = end_row;
3546 while !found_indent {
3547 let (target_row, new_line_indent) =
3548 if let Some(display_row) = row_indents.next() {
3549 display_row
3550 } else {
3551 // This means we reached the end of the given range and found empty lines at the end.
3552 // We need to traverse further until we find a non-empty line to know if we need to add
3553 // an indent guide for the last visible indent.
3554 trailing_row += 1;
3555
3556 const TRAILING_ROW_SEARCH_LIMIT: u32 = 25;
3557 if trailing_row > self.max_point().row
3558 || trailing_row > end_row + TRAILING_ROW_SEARCH_LIMIT
3559 {
3560 break;
3561 }
3562 let new_line_indent = self.line_indent_for_row(trailing_row);
3563 (trailing_row, new_line_indent)
3564 };
3565
3566 if new_line_indent.is_line_empty() {
3567 continue;
3568 }
3569 last_row = target_row.min(end_row);
3570 line_indent = new_line_indent;
3571 found_indent = true;
3572 break;
3573 }
3574 } else {
3575 found_indent = true
3576 }
3577
3578 let depth = if found_indent {
3579 line_indent.len(tab_size) / tab_size
3580 + ((line_indent.len(tab_size) % tab_size) > 0) as u32
3581 } else {
3582 current_depth
3583 };
3584
3585 match depth.cmp(¤t_depth) {
3586 Ordering::Less => {
3587 for _ in 0..(current_depth - depth) {
3588 let mut indent = indent_stack.pop().unwrap();
3589 if last_row != first_row {
3590 // In this case, we landed on an empty row, had to seek forward,
3591 // and discovered that the indent we where on is ending.
3592 // This means that the last display row must
3593 // be on line that ends this indent range, so we
3594 // should display the range up to the first non-empty line
3595 indent.end_row = first_row.saturating_sub(1);
3596 }
3597
3598 result_vec.push(indent)
3599 }
3600 }
3601 Ordering::Greater => {
3602 for next_depth in current_depth..depth {
3603 indent_stack.push(IndentGuide {
3604 buffer_id: self.remote_id(),
3605 start_row: first_row,
3606 end_row: last_row,
3607 depth: next_depth,
3608 tab_size,
3609 settings,
3610 });
3611 }
3612 }
3613 _ => {}
3614 }
3615
3616 for indent in indent_stack.iter_mut() {
3617 indent.end_row = last_row;
3618 }
3619 }
3620
3621 result_vec.extend(indent_stack);
3622
3623 result_vec
3624 }
3625
3626 pub async fn enclosing_indent(
3627 &self,
3628 mut buffer_row: BufferRow,
3629 ) -> Option<(Range<BufferRow>, LineIndent)> {
3630 let max_row = self.max_point().row;
3631 if buffer_row >= max_row {
3632 return None;
3633 }
3634
3635 let mut target_indent = self.line_indent_for_row(buffer_row);
3636
3637 // If the current row is at the start of an indented block, we want to return this
3638 // block as the enclosing indent.
3639 if !target_indent.is_line_empty() && buffer_row < max_row {
3640 let next_line_indent = self.line_indent_for_row(buffer_row + 1);
3641 if !next_line_indent.is_line_empty()
3642 && target_indent.raw_len() < next_line_indent.raw_len()
3643 {
3644 target_indent = next_line_indent;
3645 buffer_row += 1;
3646 }
3647 }
3648
3649 const SEARCH_ROW_LIMIT: u32 = 25000;
3650 const SEARCH_WHITESPACE_ROW_LIMIT: u32 = 2500;
3651 const YIELD_INTERVAL: u32 = 100;
3652
3653 let mut accessed_row_counter = 0;
3654
3655 // If there is a blank line at the current row, search for the next non indented lines
3656 if target_indent.is_line_empty() {
3657 let start = buffer_row.saturating_sub(SEARCH_WHITESPACE_ROW_LIMIT);
3658 let end = (max_row + 1).min(buffer_row + SEARCH_WHITESPACE_ROW_LIMIT);
3659
3660 let mut non_empty_line_above = None;
3661 for (row, indent) in self
3662 .text
3663 .reversed_line_indents_in_row_range(start..buffer_row)
3664 {
3665 accessed_row_counter += 1;
3666 if accessed_row_counter == YIELD_INTERVAL {
3667 accessed_row_counter = 0;
3668 yield_now().await;
3669 }
3670 if !indent.is_line_empty() {
3671 non_empty_line_above = Some((row, indent));
3672 break;
3673 }
3674 }
3675
3676 let mut non_empty_line_below = None;
3677 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3678 accessed_row_counter += 1;
3679 if accessed_row_counter == YIELD_INTERVAL {
3680 accessed_row_counter = 0;
3681 yield_now().await;
3682 }
3683 if !indent.is_line_empty() {
3684 non_empty_line_below = Some((row, indent));
3685 break;
3686 }
3687 }
3688
3689 let (row, indent) = match (non_empty_line_above, non_empty_line_below) {
3690 (Some((above_row, above_indent)), Some((below_row, below_indent))) => {
3691 if above_indent.raw_len() >= below_indent.raw_len() {
3692 (above_row, above_indent)
3693 } else {
3694 (below_row, below_indent)
3695 }
3696 }
3697 (Some(above), None) => above,
3698 (None, Some(below)) => below,
3699 _ => return None,
3700 };
3701
3702 target_indent = indent;
3703 buffer_row = row;
3704 }
3705
3706 let start = buffer_row.saturating_sub(SEARCH_ROW_LIMIT);
3707 let end = (max_row + 1).min(buffer_row + SEARCH_ROW_LIMIT);
3708
3709 let mut start_indent = None;
3710 for (row, indent) in self
3711 .text
3712 .reversed_line_indents_in_row_range(start..buffer_row)
3713 {
3714 accessed_row_counter += 1;
3715 if accessed_row_counter == YIELD_INTERVAL {
3716 accessed_row_counter = 0;
3717 yield_now().await;
3718 }
3719 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3720 start_indent = Some((row, indent));
3721 break;
3722 }
3723 }
3724 let (start_row, start_indent_size) = start_indent?;
3725
3726 let mut end_indent = (end, None);
3727 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3728 accessed_row_counter += 1;
3729 if accessed_row_counter == YIELD_INTERVAL {
3730 accessed_row_counter = 0;
3731 yield_now().await;
3732 }
3733 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3734 end_indent = (row.saturating_sub(1), Some(indent));
3735 break;
3736 }
3737 }
3738 let (end_row, end_indent_size) = end_indent;
3739
3740 let indent = if let Some(end_indent_size) = end_indent_size {
3741 if start_indent_size.raw_len() > end_indent_size.raw_len() {
3742 start_indent_size
3743 } else {
3744 end_indent_size
3745 }
3746 } else {
3747 start_indent_size
3748 };
3749
3750 Some((start_row..end_row, indent))
3751 }
3752
3753 /// Returns selections for remote peers intersecting the given range.
3754 #[allow(clippy::type_complexity)]
3755 pub fn selections_in_range(
3756 &self,
3757 range: Range<Anchor>,
3758 include_local: bool,
3759 ) -> impl Iterator<
3760 Item = (
3761 ReplicaId,
3762 bool,
3763 CursorShape,
3764 impl Iterator<Item = &Selection<Anchor>> + '_,
3765 ),
3766 > + '_ {
3767 self.remote_selections
3768 .iter()
3769 .filter(move |(replica_id, set)| {
3770 (include_local || **replica_id != self.text.replica_id())
3771 && !set.selections.is_empty()
3772 })
3773 .map(move |(replica_id, set)| {
3774 let start_ix = match set.selections.binary_search_by(|probe| {
3775 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3776 }) {
3777 Ok(ix) | Err(ix) => ix,
3778 };
3779 let end_ix = match set.selections.binary_search_by(|probe| {
3780 probe.start.cmp(&range.end, self).then(Ordering::Less)
3781 }) {
3782 Ok(ix) | Err(ix) => ix,
3783 };
3784
3785 (
3786 *replica_id,
3787 set.line_mode,
3788 set.cursor_shape,
3789 set.selections[start_ix..end_ix].iter(),
3790 )
3791 })
3792 }
3793
3794 /// Whether the buffer contains any Git changes.
3795 pub fn has_git_diff(&self) -> bool {
3796 !self.git_diff.is_empty()
3797 }
3798
3799 /// Returns all the Git diff hunks intersecting the given row range.
3800 #[cfg(any(test, feature = "test-support"))]
3801 pub fn git_diff_hunks_in_row_range(
3802 &self,
3803 range: Range<BufferRow>,
3804 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk> {
3805 self.git_diff.hunks_in_row_range(range, self)
3806 }
3807
3808 /// Returns all the Git diff hunks intersecting the given
3809 /// range.
3810 pub fn git_diff_hunks_intersecting_range(
3811 &self,
3812 range: Range<Anchor>,
3813 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk> {
3814 self.git_diff.hunks_intersecting_range(range, self)
3815 }
3816
3817 /// Returns all the Git diff hunks intersecting the given
3818 /// range, in reverse order.
3819 pub fn git_diff_hunks_intersecting_range_rev(
3820 &self,
3821 range: Range<Anchor>,
3822 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk> {
3823 self.git_diff.hunks_intersecting_range_rev(range, self)
3824 }
3825
3826 /// Returns if the buffer contains any diagnostics.
3827 pub fn has_diagnostics(&self) -> bool {
3828 !self.diagnostics.is_empty()
3829 }
3830
3831 /// Returns all the diagnostics intersecting the given range.
3832 pub fn diagnostics_in_range<'a, T, O>(
3833 &'a self,
3834 search_range: Range<T>,
3835 reversed: bool,
3836 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3837 where
3838 T: 'a + Clone + ToOffset,
3839 O: 'a + FromAnchor + Ord,
3840 {
3841 let mut iterators: Vec<_> = self
3842 .diagnostics
3843 .iter()
3844 .map(|(_, collection)| {
3845 collection
3846 .range::<T, O>(search_range.clone(), self, true, reversed)
3847 .peekable()
3848 })
3849 .collect();
3850
3851 std::iter::from_fn(move || {
3852 let (next_ix, _) = iterators
3853 .iter_mut()
3854 .enumerate()
3855 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3856 .min_by(|(_, a), (_, b)| {
3857 let cmp = a
3858 .range
3859 .start
3860 .cmp(&b.range.start)
3861 // when range is equal, sort by diagnostic severity
3862 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3863 // and stabilize order with group_id
3864 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3865 if reversed {
3866 cmp.reverse()
3867 } else {
3868 cmp
3869 }
3870 })?;
3871 iterators[next_ix].next()
3872 })
3873 }
3874
3875 /// Returns all the diagnostic groups associated with the given
3876 /// language server ID. If no language server ID is provided,
3877 /// all diagnostics groups are returned.
3878 pub fn diagnostic_groups(
3879 &self,
3880 language_server_id: Option<LanguageServerId>,
3881 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3882 let mut groups = Vec::new();
3883
3884 if let Some(language_server_id) = language_server_id {
3885 if let Ok(ix) = self
3886 .diagnostics
3887 .binary_search_by_key(&language_server_id, |e| e.0)
3888 {
3889 self.diagnostics[ix]
3890 .1
3891 .groups(language_server_id, &mut groups, self);
3892 }
3893 } else {
3894 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3895 diagnostics.groups(*language_server_id, &mut groups, self);
3896 }
3897 }
3898
3899 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3900 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3901 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3902 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3903 });
3904
3905 groups
3906 }
3907
3908 /// Returns an iterator over the diagnostics for the given group.
3909 pub fn diagnostic_group<'a, O>(
3910 &'a self,
3911 group_id: usize,
3912 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3913 where
3914 O: 'a + FromAnchor,
3915 {
3916 self.diagnostics
3917 .iter()
3918 .flat_map(move |(_, set)| set.group(group_id, self))
3919 }
3920
3921 /// An integer version number that accounts for all updates besides
3922 /// the buffer's text itself (which is versioned via a version vector).
3923 pub fn non_text_state_update_count(&self) -> usize {
3924 self.non_text_state_update_count
3925 }
3926
3927 /// Returns a snapshot of underlying file.
3928 pub fn file(&self) -> Option<&Arc<dyn File>> {
3929 self.file.as_ref()
3930 }
3931
3932 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
3933 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
3934 if let Some(file) = self.file() {
3935 if file.path().file_name().is_none() || include_root {
3936 Some(file.full_path(cx))
3937 } else {
3938 Some(file.path().to_path_buf())
3939 }
3940 } else {
3941 None
3942 }
3943 }
3944}
3945
3946fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
3947 indent_size_for_text(text.chars_at(Point::new(row, 0)))
3948}
3949
3950fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
3951 let mut result = IndentSize::spaces(0);
3952 for c in text {
3953 let kind = match c {
3954 ' ' => IndentKind::Space,
3955 '\t' => IndentKind::Tab,
3956 _ => break,
3957 };
3958 if result.len == 0 {
3959 result.kind = kind;
3960 }
3961 result.len += 1;
3962 }
3963 result
3964}
3965
3966impl Clone for BufferSnapshot {
3967 fn clone(&self) -> Self {
3968 Self {
3969 text: self.text.clone(),
3970 git_diff: self.git_diff.clone(),
3971 syntax: self.syntax.clone(),
3972 file: self.file.clone(),
3973 remote_selections: self.remote_selections.clone(),
3974 diagnostics: self.diagnostics.clone(),
3975 language: self.language.clone(),
3976 non_text_state_update_count: self.non_text_state_update_count,
3977 }
3978 }
3979}
3980
3981impl Deref for BufferSnapshot {
3982 type Target = text::BufferSnapshot;
3983
3984 fn deref(&self) -> &Self::Target {
3985 &self.text
3986 }
3987}
3988
3989unsafe impl<'a> Send for BufferChunks<'a> {}
3990
3991impl<'a> BufferChunks<'a> {
3992 pub(crate) fn new(
3993 text: &'a Rope,
3994 range: Range<usize>,
3995 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
3996 diagnostics: bool,
3997 buffer_snapshot: Option<&'a BufferSnapshot>,
3998 ) -> Self {
3999 let mut highlights = None;
4000 if let Some((captures, highlight_maps)) = syntax {
4001 highlights = Some(BufferChunkHighlights {
4002 captures,
4003 next_capture: None,
4004 stack: Default::default(),
4005 highlight_maps,
4006 })
4007 }
4008
4009 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4010 let chunks = text.chunks_in_range(range.clone());
4011
4012 let mut this = BufferChunks {
4013 range,
4014 buffer_snapshot,
4015 chunks,
4016 diagnostic_endpoints,
4017 error_depth: 0,
4018 warning_depth: 0,
4019 information_depth: 0,
4020 hint_depth: 0,
4021 unnecessary_depth: 0,
4022 highlights,
4023 };
4024 this.initialize_diagnostic_endpoints();
4025 this
4026 }
4027
4028 /// Seeks to the given byte offset in the buffer.
4029 pub fn seek(&mut self, range: Range<usize>) {
4030 let old_range = std::mem::replace(&mut self.range, range.clone());
4031 self.chunks.set_range(self.range.clone());
4032 if let Some(highlights) = self.highlights.as_mut() {
4033 if old_range.start >= self.range.start && old_range.end <= self.range.end {
4034 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4035 highlights
4036 .stack
4037 .retain(|(end_offset, _)| *end_offset > range.start);
4038 if let Some(capture) = &highlights.next_capture {
4039 if range.start >= capture.node.start_byte() {
4040 let next_capture_end = capture.node.end_byte();
4041 if range.start < next_capture_end {
4042 highlights.stack.push((
4043 next_capture_end,
4044 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4045 ));
4046 }
4047 highlights.next_capture.take();
4048 }
4049 }
4050 } else if let Some(snapshot) = self.buffer_snapshot {
4051 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4052 *highlights = BufferChunkHighlights {
4053 captures,
4054 next_capture: None,
4055 stack: Default::default(),
4056 highlight_maps,
4057 };
4058 } else {
4059 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4060 // Seeking such BufferChunks is not supported.
4061 debug_assert!(false, "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot");
4062 }
4063
4064 highlights.captures.set_byte_range(self.range.clone());
4065 self.initialize_diagnostic_endpoints();
4066 }
4067 }
4068
4069 fn initialize_diagnostic_endpoints(&mut self) {
4070 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4071 if let Some(buffer) = self.buffer_snapshot {
4072 let mut diagnostic_endpoints = Vec::new();
4073 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4074 diagnostic_endpoints.push(DiagnosticEndpoint {
4075 offset: entry.range.start,
4076 is_start: true,
4077 severity: entry.diagnostic.severity,
4078 is_unnecessary: entry.diagnostic.is_unnecessary,
4079 });
4080 diagnostic_endpoints.push(DiagnosticEndpoint {
4081 offset: entry.range.end,
4082 is_start: false,
4083 severity: entry.diagnostic.severity,
4084 is_unnecessary: entry.diagnostic.is_unnecessary,
4085 });
4086 }
4087 diagnostic_endpoints
4088 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4089 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4090 }
4091 }
4092 }
4093
4094 /// The current byte offset in the buffer.
4095 pub fn offset(&self) -> usize {
4096 self.range.start
4097 }
4098
4099 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4100 let depth = match endpoint.severity {
4101 DiagnosticSeverity::ERROR => &mut self.error_depth,
4102 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4103 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4104 DiagnosticSeverity::HINT => &mut self.hint_depth,
4105 _ => return,
4106 };
4107 if endpoint.is_start {
4108 *depth += 1;
4109 } else {
4110 *depth -= 1;
4111 }
4112
4113 if endpoint.is_unnecessary {
4114 if endpoint.is_start {
4115 self.unnecessary_depth += 1;
4116 } else {
4117 self.unnecessary_depth -= 1;
4118 }
4119 }
4120 }
4121
4122 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4123 if self.error_depth > 0 {
4124 Some(DiagnosticSeverity::ERROR)
4125 } else if self.warning_depth > 0 {
4126 Some(DiagnosticSeverity::WARNING)
4127 } else if self.information_depth > 0 {
4128 Some(DiagnosticSeverity::INFORMATION)
4129 } else if self.hint_depth > 0 {
4130 Some(DiagnosticSeverity::HINT)
4131 } else {
4132 None
4133 }
4134 }
4135
4136 fn current_code_is_unnecessary(&self) -> bool {
4137 self.unnecessary_depth > 0
4138 }
4139}
4140
4141impl<'a> Iterator for BufferChunks<'a> {
4142 type Item = Chunk<'a>;
4143
4144 fn next(&mut self) -> Option<Self::Item> {
4145 let mut next_capture_start = usize::MAX;
4146 let mut next_diagnostic_endpoint = usize::MAX;
4147
4148 if let Some(highlights) = self.highlights.as_mut() {
4149 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4150 if *parent_capture_end <= self.range.start {
4151 highlights.stack.pop();
4152 } else {
4153 break;
4154 }
4155 }
4156
4157 if highlights.next_capture.is_none() {
4158 highlights.next_capture = highlights.captures.next();
4159 }
4160
4161 while let Some(capture) = highlights.next_capture.as_ref() {
4162 if self.range.start < capture.node.start_byte() {
4163 next_capture_start = capture.node.start_byte();
4164 break;
4165 } else {
4166 let highlight_id =
4167 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4168 highlights
4169 .stack
4170 .push((capture.node.end_byte(), highlight_id));
4171 highlights.next_capture = highlights.captures.next();
4172 }
4173 }
4174 }
4175
4176 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4177 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4178 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4179 if endpoint.offset <= self.range.start {
4180 self.update_diagnostic_depths(endpoint);
4181 diagnostic_endpoints.next();
4182 } else {
4183 next_diagnostic_endpoint = endpoint.offset;
4184 break;
4185 }
4186 }
4187 }
4188 self.diagnostic_endpoints = diagnostic_endpoints;
4189
4190 if let Some(chunk) = self.chunks.peek() {
4191 let chunk_start = self.range.start;
4192 let mut chunk_end = (self.chunks.offset() + chunk.len())
4193 .min(next_capture_start)
4194 .min(next_diagnostic_endpoint);
4195 let mut highlight_id = None;
4196 if let Some(highlights) = self.highlights.as_ref() {
4197 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4198 chunk_end = chunk_end.min(*parent_capture_end);
4199 highlight_id = Some(*parent_highlight_id);
4200 }
4201 }
4202
4203 let slice =
4204 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4205 self.range.start = chunk_end;
4206 if self.range.start == self.chunks.offset() + chunk.len() {
4207 self.chunks.next().unwrap();
4208 }
4209
4210 Some(Chunk {
4211 text: slice,
4212 syntax_highlight_id: highlight_id,
4213 diagnostic_severity: self.current_diagnostic_severity(),
4214 is_unnecessary: self.current_code_is_unnecessary(),
4215 ..Default::default()
4216 })
4217 } else {
4218 None
4219 }
4220 }
4221}
4222
4223impl operation_queue::Operation for Operation {
4224 fn lamport_timestamp(&self) -> clock::Lamport {
4225 match self {
4226 Operation::Buffer(_) => {
4227 unreachable!("buffer operations should never be deferred at this layer")
4228 }
4229 Operation::UpdateDiagnostics {
4230 lamport_timestamp, ..
4231 }
4232 | Operation::UpdateSelections {
4233 lamport_timestamp, ..
4234 }
4235 | Operation::UpdateCompletionTriggers {
4236 lamport_timestamp, ..
4237 } => *lamport_timestamp,
4238 }
4239 }
4240}
4241
4242impl Default for Diagnostic {
4243 fn default() -> Self {
4244 Self {
4245 source: Default::default(),
4246 code: None,
4247 severity: DiagnosticSeverity::ERROR,
4248 message: Default::default(),
4249 group_id: 0,
4250 is_primary: false,
4251 is_disk_based: false,
4252 is_unnecessary: false,
4253 data: None,
4254 }
4255 }
4256}
4257
4258impl IndentSize {
4259 /// Returns an [`IndentSize`] representing the given spaces.
4260 pub fn spaces(len: u32) -> Self {
4261 Self {
4262 len,
4263 kind: IndentKind::Space,
4264 }
4265 }
4266
4267 /// Returns an [`IndentSize`] representing a tab.
4268 pub fn tab() -> Self {
4269 Self {
4270 len: 1,
4271 kind: IndentKind::Tab,
4272 }
4273 }
4274
4275 /// An iterator over the characters represented by this [`IndentSize`].
4276 pub fn chars(&self) -> impl Iterator<Item = char> {
4277 iter::repeat(self.char()).take(self.len as usize)
4278 }
4279
4280 /// The character representation of this [`IndentSize`].
4281 pub fn char(&self) -> char {
4282 match self.kind {
4283 IndentKind::Space => ' ',
4284 IndentKind::Tab => '\t',
4285 }
4286 }
4287
4288 /// Consumes the current [`IndentSize`] and returns a new one that has
4289 /// been shrunk or enlarged by the given size along the given direction.
4290 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4291 match direction {
4292 Ordering::Less => {
4293 if self.kind == size.kind && self.len >= size.len {
4294 self.len -= size.len;
4295 }
4296 }
4297 Ordering::Equal => {}
4298 Ordering::Greater => {
4299 if self.len == 0 {
4300 self = size;
4301 } else if self.kind == size.kind {
4302 self.len += size.len;
4303 }
4304 }
4305 }
4306 self
4307 }
4308}
4309
4310#[cfg(any(test, feature = "test-support"))]
4311pub struct TestFile {
4312 pub path: Arc<Path>,
4313 pub root_name: String,
4314}
4315
4316#[cfg(any(test, feature = "test-support"))]
4317impl File for TestFile {
4318 fn path(&self) -> &Arc<Path> {
4319 &self.path
4320 }
4321
4322 fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
4323 PathBuf::from(&self.root_name).join(self.path.as_ref())
4324 }
4325
4326 fn as_local(&self) -> Option<&dyn LocalFile> {
4327 None
4328 }
4329
4330 fn mtime(&self) -> Option<SystemTime> {
4331 unimplemented!()
4332 }
4333
4334 fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
4335 self.path().file_name().unwrap_or(self.root_name.as_ref())
4336 }
4337
4338 fn worktree_id(&self, _: &AppContext) -> WorktreeId {
4339 WorktreeId::from_usize(0)
4340 }
4341
4342 fn is_deleted(&self) -> bool {
4343 unimplemented!()
4344 }
4345
4346 fn as_any(&self) -> &dyn std::any::Any {
4347 unimplemented!()
4348 }
4349
4350 fn to_proto(&self, _: &AppContext) -> rpc::proto::File {
4351 unimplemented!()
4352 }
4353
4354 fn is_private(&self) -> bool {
4355 false
4356 }
4357}
4358
4359pub(crate) fn contiguous_ranges(
4360 values: impl Iterator<Item = u32>,
4361 max_len: usize,
4362) -> impl Iterator<Item = Range<u32>> {
4363 let mut values = values;
4364 let mut current_range: Option<Range<u32>> = None;
4365 std::iter::from_fn(move || loop {
4366 if let Some(value) = values.next() {
4367 if let Some(range) = &mut current_range {
4368 if value == range.end && range.len() < max_len {
4369 range.end += 1;
4370 continue;
4371 }
4372 }
4373
4374 let prev_range = current_range.clone();
4375 current_range = Some(value..(value + 1));
4376 if prev_range.is_some() {
4377 return prev_range;
4378 }
4379 } else {
4380 return current_range.take();
4381 }
4382 })
4383}
4384
4385#[derive(Default, Debug)]
4386pub struct CharClassifier {
4387 scope: Option<LanguageScope>,
4388 for_completion: bool,
4389 ignore_punctuation: bool,
4390}
4391
4392impl CharClassifier {
4393 pub fn new(scope: Option<LanguageScope>) -> Self {
4394 Self {
4395 scope,
4396 for_completion: false,
4397 ignore_punctuation: false,
4398 }
4399 }
4400
4401 pub fn for_completion(self, for_completion: bool) -> Self {
4402 Self {
4403 for_completion,
4404 ..self
4405 }
4406 }
4407
4408 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4409 Self {
4410 ignore_punctuation,
4411 ..self
4412 }
4413 }
4414
4415 pub fn is_whitespace(&self, c: char) -> bool {
4416 self.kind(c) == CharKind::Whitespace
4417 }
4418
4419 pub fn is_word(&self, c: char) -> bool {
4420 self.kind(c) == CharKind::Word
4421 }
4422
4423 pub fn is_punctuation(&self, c: char) -> bool {
4424 self.kind(c) == CharKind::Punctuation
4425 }
4426
4427 pub fn kind(&self, c: char) -> CharKind {
4428 if c.is_whitespace() {
4429 return CharKind::Whitespace;
4430 } else if c.is_alphanumeric() || c == '_' {
4431 return CharKind::Word;
4432 }
4433
4434 if let Some(scope) = &self.scope {
4435 if let Some(characters) = scope.word_characters() {
4436 if characters.contains(&c) {
4437 if c == '-' && !self.for_completion && !self.ignore_punctuation {
4438 return CharKind::Punctuation;
4439 }
4440 return CharKind::Word;
4441 }
4442 }
4443 }
4444
4445 if self.ignore_punctuation {
4446 CharKind::Word
4447 } else {
4448 CharKind::Punctuation
4449 }
4450 }
4451}
4452
4453/// Find all of the ranges of whitespace that occur at the ends of lines
4454/// in the given rope.
4455///
4456/// This could also be done with a regex search, but this implementation
4457/// avoids copying text.
4458pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4459 let mut ranges = Vec::new();
4460
4461 let mut offset = 0;
4462 let mut prev_chunk_trailing_whitespace_range = 0..0;
4463 for chunk in rope.chunks() {
4464 let mut prev_line_trailing_whitespace_range = 0..0;
4465 for (i, line) in chunk.split('\n').enumerate() {
4466 let line_end_offset = offset + line.len();
4467 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4468 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4469
4470 if i == 0 && trimmed_line_len == 0 {
4471 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4472 }
4473 if !prev_line_trailing_whitespace_range.is_empty() {
4474 ranges.push(prev_line_trailing_whitespace_range);
4475 }
4476
4477 offset = line_end_offset + 1;
4478 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4479 }
4480
4481 offset -= 1;
4482 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4483 }
4484
4485 if !prev_chunk_trailing_whitespace_range.is_empty() {
4486 ranges.push(prev_chunk_trailing_whitespace_range);
4487 }
4488
4489 ranges
4490}