1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, IndentGuideSettings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
14 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
18};
19use anyhow::{anyhow, Context, Result};
20use async_watch as watch;
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use futures::channel::oneshot;
25use gpui::{
26 AnyElement, AppContext, Context as _, EventEmitter, HighlightStyle, Model, ModelContext,
27 Pixels, Task, TaskLabel, WindowContext,
28};
29use lsp::LanguageServerId;
30use parking_lot::Mutex;
31use schemars::JsonSchema;
32use serde::{Deserialize, Serialize};
33use serde_json::Value;
34use settings::WorktreeId;
35use similar::{ChangeTag, TextDiff};
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::BTreeMap,
44 ffi::OsStr,
45 fmt,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 ops::{Deref, DerefMut, Range},
50 path::{Path, PathBuf},
51 str,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant, SystemTime},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
62 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
63 Transaction, TransactionId, Unclipped,
64};
65use theme::SyntaxTheme;
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{debug_panic, RangeExt};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90#[derive(Clone)]
91enum BufferDiffBase {
92 Git(Rope),
93 PastBufferVersion {
94 buffer: Model<Buffer>,
95 rope: Rope,
96 merged_operations: Vec<Lamport>,
97 },
98}
99
100/// An in-memory representation of a source code file, including its text,
101/// syntax trees, git status, and diagnostics.
102pub struct Buffer {
103 text: TextBuffer,
104 diff_base: Option<BufferDiffBase>,
105 git_diff: git::diff::BufferDiff,
106 file: Option<Arc<dyn File>>,
107 /// The mtime of the file when this buffer was last loaded from
108 /// or saved to disk.
109 saved_mtime: Option<SystemTime>,
110 /// The version vector when this buffer was last loaded from
111 /// or saved to disk.
112 saved_version: clock::Global,
113 preview_version: clock::Global,
114 transaction_depth: usize,
115 was_dirty_before_starting_transaction: Option<bool>,
116 reload_task: Option<Task<Result<()>>>,
117 language: Option<Arc<Language>>,
118 autoindent_requests: Vec<Arc<AutoindentRequest>>,
119 pending_autoindent: Option<Task<()>>,
120 sync_parse_timeout: Duration,
121 syntax_map: Mutex<SyntaxMap>,
122 parsing_in_background: bool,
123 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
124 non_text_state_update_count: usize,
125 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
126 remote_selections: TreeMap<ReplicaId, SelectionSet>,
127 diagnostics_timestamp: clock::Lamport,
128 completion_triggers: Vec<String>,
129 completion_triggers_timestamp: clock::Lamport,
130 deferred_ops: OperationQueue<Operation>,
131 capability: Capability,
132 has_conflict: bool,
133 diff_base_version: usize,
134 /// Memoize calls to has_changes_since(saved_version).
135 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
136 has_unsaved_edits: Cell<(clock::Global, bool)>,
137 _subscriptions: Vec<gpui::Subscription>,
138}
139
140#[derive(Copy, Clone, Debug, PartialEq, Eq)]
141pub enum ParseStatus {
142 Idle,
143 Parsing,
144}
145
146/// An immutable, cheaply cloneable representation of a fixed
147/// state of a buffer.
148pub struct BufferSnapshot {
149 text: text::BufferSnapshot,
150 git_diff: git::diff::BufferDiff,
151 pub(crate) syntax: SyntaxSnapshot,
152 file: Option<Arc<dyn File>>,
153 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
154 remote_selections: TreeMap<ReplicaId, SelectionSet>,
155 language: Option<Arc<Language>>,
156 non_text_state_update_count: usize,
157}
158
159/// The kind and amount of indentation in a particular line. For now,
160/// assumes that indentation is all the same character.
161#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
162pub struct IndentSize {
163 /// The number of bytes that comprise the indentation.
164 pub len: u32,
165 /// The kind of whitespace used for indentation.
166 pub kind: IndentKind,
167}
168
169/// A whitespace character that's used for indentation.
170#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
171pub enum IndentKind {
172 /// An ASCII space character.
173 #[default]
174 Space,
175 /// An ASCII tab character.
176 Tab,
177}
178
179/// The shape of a selection cursor.
180#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
181#[serde(rename_all = "snake_case")]
182pub enum CursorShape {
183 /// A vertical bar
184 #[default]
185 Bar,
186 /// A block that surrounds the following character
187 Block,
188 /// An underline that runs along the following character
189 Underline,
190 /// A box drawn around the following character
191 Hollow,
192}
193
194#[derive(Clone, Debug)]
195struct SelectionSet {
196 line_mode: bool,
197 cursor_shape: CursorShape,
198 selections: Arc<[Selection<Anchor>]>,
199 lamport_timestamp: clock::Lamport,
200}
201
202/// A diagnostic associated with a certain range of a buffer.
203#[derive(Clone, Debug, PartialEq, Eq)]
204pub struct Diagnostic {
205 /// The name of the service that produced this diagnostic.
206 pub source: Option<String>,
207 /// A machine-readable code that identifies this diagnostic.
208 pub code: Option<String>,
209 /// Whether this diagnostic is a hint, warning, or error.
210 pub severity: DiagnosticSeverity,
211 /// The human-readable message associated with this diagnostic.
212 pub message: String,
213 /// An id that identifies the group to which this diagnostic belongs.
214 ///
215 /// When a language server produces a diagnostic with
216 /// one or more associated diagnostics, those diagnostics are all
217 /// assigned a single group ID.
218 pub group_id: usize,
219 /// Whether this diagnostic is the primary diagnostic for its group.
220 ///
221 /// In a given group, the primary diagnostic is the top-level diagnostic
222 /// returned by the language server. The non-primary diagnostics are the
223 /// associated diagnostics.
224 pub is_primary: bool,
225 /// Whether this diagnostic is considered to originate from an analysis of
226 /// files on disk, as opposed to any unsaved buffer contents. This is a
227 /// property of a given diagnostic source, and is configured for a given
228 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
229 /// for the language server.
230 pub is_disk_based: bool,
231 /// Whether this diagnostic marks unnecessary code.
232 pub is_unnecessary: bool,
233 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
234 pub data: Option<Value>,
235}
236
237/// TODO - move this into the `project` crate and make it private.
238pub async fn prepare_completion_documentation(
239 documentation: &lsp::Documentation,
240 language_registry: &Arc<LanguageRegistry>,
241 language: Option<Arc<Language>>,
242) -> Documentation {
243 match documentation {
244 lsp::Documentation::String(text) => {
245 if text.lines().count() <= 1 {
246 Documentation::SingleLine(text.clone())
247 } else {
248 Documentation::MultiLinePlainText(text.clone())
249 }
250 }
251
252 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
253 lsp::MarkupKind::PlainText => {
254 if value.lines().count() <= 1 {
255 Documentation::SingleLine(value.clone())
256 } else {
257 Documentation::MultiLinePlainText(value.clone())
258 }
259 }
260
261 lsp::MarkupKind::Markdown => {
262 let parsed = parse_markdown(value, language_registry, language).await;
263 Documentation::MultiLineMarkdown(parsed)
264 }
265 },
266 }
267}
268
269/// Documentation associated with a [`Completion`].
270#[derive(Clone, Debug)]
271pub enum Documentation {
272 /// There is no documentation for this completion.
273 Undocumented,
274 /// A single line of documentation.
275 SingleLine(String),
276 /// Multiple lines of plain text documentation.
277 MultiLinePlainText(String),
278 /// Markdown documentation.
279 MultiLineMarkdown(ParsedMarkdown),
280}
281
282/// An operation used to synchronize this buffer with its other replicas.
283#[derive(Clone, Debug, PartialEq)]
284pub enum Operation {
285 /// A text operation.
286 Buffer(text::Operation),
287
288 /// An update to the buffer's diagnostics.
289 UpdateDiagnostics {
290 /// The id of the language server that produced the new diagnostics.
291 server_id: LanguageServerId,
292 /// The diagnostics.
293 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
294 /// The buffer's lamport timestamp.
295 lamport_timestamp: clock::Lamport,
296 },
297
298 /// An update to the most recent selections in this buffer.
299 UpdateSelections {
300 /// The selections.
301 selections: Arc<[Selection<Anchor>]>,
302 /// The buffer's lamport timestamp.
303 lamport_timestamp: clock::Lamport,
304 /// Whether the selections are in 'line mode'.
305 line_mode: bool,
306 /// The [`CursorShape`] associated with these selections.
307 cursor_shape: CursorShape,
308 },
309
310 /// An update to the characters that should trigger autocompletion
311 /// for this buffer.
312 UpdateCompletionTriggers {
313 /// The characters that trigger autocompletion.
314 triggers: Vec<String>,
315 /// The buffer's lamport timestamp.
316 lamport_timestamp: clock::Lamport,
317 },
318}
319
320/// An event that occurs in a buffer.
321#[derive(Clone, Debug, PartialEq)]
322pub enum BufferEvent {
323 /// The buffer was changed in a way that must be
324 /// propagated to its other replicas.
325 Operation {
326 operation: Operation,
327 is_local: bool,
328 },
329 /// The buffer was edited.
330 Edited,
331 /// The buffer's `dirty` bit changed.
332 DirtyChanged,
333 /// The buffer was saved.
334 Saved,
335 /// The buffer's file was changed on disk.
336 FileHandleChanged,
337 /// The buffer was reloaded.
338 Reloaded,
339 /// The buffer is in need of a reload
340 ReloadNeeded,
341 /// The buffer's diff_base changed.
342 DiffBaseChanged,
343 /// Buffer's excerpts for a certain diff base were recalculated.
344 DiffUpdated,
345 /// The buffer's language was changed.
346 LanguageChanged,
347 /// The buffer's syntax trees were updated.
348 Reparsed,
349 /// The buffer's diagnostics were updated.
350 DiagnosticsUpdated,
351 /// The buffer gained or lost editing capabilities.
352 CapabilityChanged,
353 /// The buffer was explicitly requested to close.
354 Closed,
355 /// The buffer was discarded when closing.
356 Discarded,
357}
358
359/// The file associated with a buffer.
360pub trait File: Send + Sync {
361 /// Returns the [`LocalFile`] associated with this file, if the
362 /// file is local.
363 fn as_local(&self) -> Option<&dyn LocalFile>;
364
365 /// Returns whether this file is local.
366 fn is_local(&self) -> bool {
367 self.as_local().is_some()
368 }
369
370 /// Returns the file's mtime.
371 fn mtime(&self) -> Option<SystemTime>;
372
373 /// Returns the path of this file relative to the worktree's root directory.
374 fn path(&self) -> &Arc<Path>;
375
376 /// Returns the path of this file relative to the worktree's parent directory (this means it
377 /// includes the name of the worktree's root folder).
378 fn full_path(&self, cx: &AppContext) -> PathBuf;
379
380 /// Returns the last component of this handle's absolute path. If this handle refers to the root
381 /// of its worktree, then this method will return the name of the worktree itself.
382 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
383
384 /// Returns the id of the worktree to which this file belongs.
385 ///
386 /// This is needed for looking up project-specific settings.
387 fn worktree_id(&self, cx: &AppContext) -> WorktreeId;
388
389 /// Returns whether the file has been deleted.
390 fn is_deleted(&self) -> bool;
391
392 /// Returns whether the file existed on disk at one point
393 fn is_created(&self) -> bool {
394 self.mtime().is_some()
395 }
396
397 /// Converts this file into an [`Any`] trait object.
398 fn as_any(&self) -> &dyn Any;
399
400 /// Converts this file into a protobuf message.
401 fn to_proto(&self, cx: &AppContext) -> rpc::proto::File;
402
403 /// Return whether Zed considers this to be a private file.
404 fn is_private(&self) -> bool;
405}
406
407/// The file associated with a buffer, in the case where the file is on the local disk.
408pub trait LocalFile: File {
409 /// Returns the absolute path of this file
410 fn abs_path(&self, cx: &AppContext) -> PathBuf;
411
412 /// Loads the file's contents from disk.
413 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
414
415 /// Returns true if the file should not be shared with collaborators.
416 fn is_private(&self, _: &AppContext) -> bool {
417 false
418 }
419}
420
421/// The auto-indent behavior associated with an editing operation.
422/// For some editing operations, each affected line of text has its
423/// indentation recomputed. For other operations, the entire block
424/// of edited text is adjusted uniformly.
425#[derive(Clone, Debug)]
426pub enum AutoindentMode {
427 /// Indent each line of inserted text.
428 EachLine,
429 /// Apply the same indentation adjustment to all of the lines
430 /// in a given insertion.
431 Block {
432 /// The original indentation level of the first line of each
433 /// insertion, if it has been copied.
434 original_indent_columns: Vec<u32>,
435 },
436}
437
438#[derive(Clone)]
439struct AutoindentRequest {
440 before_edit: BufferSnapshot,
441 entries: Vec<AutoindentRequestEntry>,
442 is_block_mode: bool,
443}
444
445#[derive(Debug, Clone)]
446struct AutoindentRequestEntry {
447 /// A range of the buffer whose indentation should be adjusted.
448 range: Range<Anchor>,
449 /// Whether or not these lines should be considered brand new, for the
450 /// purpose of auto-indent. When text is not new, its indentation will
451 /// only be adjusted if the suggested indentation level has *changed*
452 /// since the edit was made.
453 first_line_is_new: bool,
454 indent_size: IndentSize,
455 original_indent_column: Option<u32>,
456}
457
458#[derive(Debug)]
459struct IndentSuggestion {
460 basis_row: u32,
461 delta: Ordering,
462 within_error: bool,
463}
464
465struct BufferChunkHighlights<'a> {
466 captures: SyntaxMapCaptures<'a>,
467 next_capture: Option<SyntaxMapCapture<'a>>,
468 stack: Vec<(usize, HighlightId)>,
469 highlight_maps: Vec<HighlightMap>,
470}
471
472/// An iterator that yields chunks of a buffer's text, along with their
473/// syntax highlights and diagnostic status.
474pub struct BufferChunks<'a> {
475 buffer_snapshot: Option<&'a BufferSnapshot>,
476 range: Range<usize>,
477 chunks: text::Chunks<'a>,
478 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
479 error_depth: usize,
480 warning_depth: usize,
481 information_depth: usize,
482 hint_depth: usize,
483 unnecessary_depth: usize,
484 highlights: Option<BufferChunkHighlights<'a>>,
485}
486
487/// A chunk of a buffer's text, along with its syntax highlight and
488/// diagnostic status.
489#[derive(Clone, Debug, Default)]
490pub struct Chunk<'a> {
491 /// The text of the chunk.
492 pub text: &'a str,
493 /// The syntax highlighting style of the chunk.
494 pub syntax_highlight_id: Option<HighlightId>,
495 /// The highlight style that has been applied to this chunk in
496 /// the editor.
497 pub highlight_style: Option<HighlightStyle>,
498 /// The severity of diagnostic associated with this chunk, if any.
499 pub diagnostic_severity: Option<DiagnosticSeverity>,
500 /// Whether this chunk of text is marked as unnecessary.
501 pub is_unnecessary: bool,
502 /// Whether this chunk of text was originally a tab character.
503 pub is_tab: bool,
504 /// Whether this chunk of text is an invisible character.
505 pub is_invisible: bool,
506 /// An optional recipe for how the chunk should be presented.
507 pub renderer: Option<ChunkRenderer>,
508}
509
510/// A recipe for how the chunk should be presented.
511#[derive(Clone)]
512pub struct ChunkRenderer {
513 /// creates a custom element to represent this chunk.
514 pub render: Arc<dyn Send + Sync + Fn(&mut ChunkRendererContext) -> AnyElement>,
515 /// If true, the element is constrained to the shaped width of the text.
516 pub constrain_width: bool,
517}
518
519pub struct ChunkRendererContext<'a, 'b> {
520 pub context: &'a mut WindowContext<'b>,
521 pub max_width: Pixels,
522}
523
524impl fmt::Debug for ChunkRenderer {
525 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
526 f.debug_struct("ChunkRenderer")
527 .field("constrain_width", &self.constrain_width)
528 .finish()
529 }
530}
531
532impl<'a, 'b> Deref for ChunkRendererContext<'a, 'b> {
533 type Target = WindowContext<'b>;
534
535 fn deref(&self) -> &Self::Target {
536 self.context
537 }
538}
539
540impl<'a, 'b> DerefMut for ChunkRendererContext<'a, 'b> {
541 fn deref_mut(&mut self) -> &mut Self::Target {
542 self.context
543 }
544}
545
546/// A set of edits to a given version of a buffer, computed asynchronously.
547#[derive(Debug)]
548pub struct Diff {
549 pub(crate) base_version: clock::Global,
550 line_ending: LineEnding,
551 edits: Vec<(Range<usize>, Arc<str>)>,
552}
553
554#[derive(Clone, Copy)]
555pub(crate) struct DiagnosticEndpoint {
556 offset: usize,
557 is_start: bool,
558 severity: DiagnosticSeverity,
559 is_unnecessary: bool,
560}
561
562/// A class of characters, used for characterizing a run of text.
563#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
564pub enum CharKind {
565 /// Whitespace.
566 Whitespace,
567 /// Punctuation.
568 Punctuation,
569 /// Word.
570 Word,
571}
572
573/// A runnable is a set of data about a region that could be resolved into a task
574pub struct Runnable {
575 pub tags: SmallVec<[RunnableTag; 1]>,
576 pub language: Arc<Language>,
577 pub buffer: BufferId,
578}
579
580#[derive(Clone, Debug, PartialEq)]
581pub struct IndentGuide {
582 pub buffer_id: BufferId,
583 pub start_row: BufferRow,
584 pub end_row: BufferRow,
585 pub depth: u32,
586 pub tab_size: u32,
587 pub settings: IndentGuideSettings,
588}
589
590impl IndentGuide {
591 pub fn indent_level(&self) -> u32 {
592 self.depth * self.tab_size
593 }
594}
595
596impl Buffer {
597 /// Create a new buffer with the given base text.
598 pub fn local<T: Into<String>>(base_text: T, cx: &ModelContext<Self>) -> Self {
599 Self::build(
600 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
601 None,
602 None,
603 Capability::ReadWrite,
604 )
605 }
606
607 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
608 pub fn local_normalized(
609 base_text_normalized: Rope,
610 line_ending: LineEnding,
611 cx: &ModelContext<Self>,
612 ) -> Self {
613 Self::build(
614 TextBuffer::new_normalized(
615 0,
616 cx.entity_id().as_non_zero_u64().into(),
617 line_ending,
618 base_text_normalized,
619 ),
620 None,
621 None,
622 Capability::ReadWrite,
623 )
624 }
625
626 /// Create a new buffer that is a replica of a remote buffer.
627 pub fn remote(
628 remote_id: BufferId,
629 replica_id: ReplicaId,
630 capability: Capability,
631 base_text: impl Into<String>,
632 ) -> Self {
633 Self::build(
634 TextBuffer::new(replica_id, remote_id, base_text.into()),
635 None,
636 None,
637 capability,
638 )
639 }
640
641 /// Create a new buffer that is a replica of a remote buffer, populating its
642 /// state from the given protobuf message.
643 pub fn from_proto(
644 replica_id: ReplicaId,
645 capability: Capability,
646 message: proto::BufferState,
647 file: Option<Arc<dyn File>>,
648 ) -> Result<Self> {
649 let buffer_id = BufferId::new(message.id)
650 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
651 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
652 let mut this = Self::build(buffer, message.diff_base, file, capability);
653 this.text.set_line_ending(proto::deserialize_line_ending(
654 rpc::proto::LineEnding::from_i32(message.line_ending)
655 .ok_or_else(|| anyhow!("missing line_ending"))?,
656 ));
657 this.saved_version = proto::deserialize_version(&message.saved_version);
658 this.saved_mtime = message.saved_mtime.map(|time| time.into());
659 Ok(this)
660 }
661
662 /// Serialize the buffer's state to a protobuf message.
663 pub fn to_proto(&self, cx: &AppContext) -> proto::BufferState {
664 proto::BufferState {
665 id: self.remote_id().into(),
666 file: self.file.as_ref().map(|f| f.to_proto(cx)),
667 base_text: self.base_text().to_string(),
668 diff_base: self.diff_base().as_ref().map(|h| h.to_string()),
669 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
670 saved_version: proto::serialize_version(&self.saved_version),
671 saved_mtime: self.saved_mtime.map(|time| time.into()),
672 }
673 }
674
675 /// Serialize as protobufs all of the changes to the buffer since the given version.
676 pub fn serialize_ops(
677 &self,
678 since: Option<clock::Global>,
679 cx: &AppContext,
680 ) -> Task<Vec<proto::Operation>> {
681 let mut operations = Vec::new();
682 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
683
684 operations.extend(self.remote_selections.iter().map(|(_, set)| {
685 proto::serialize_operation(&Operation::UpdateSelections {
686 selections: set.selections.clone(),
687 lamport_timestamp: set.lamport_timestamp,
688 line_mode: set.line_mode,
689 cursor_shape: set.cursor_shape,
690 })
691 }));
692
693 for (server_id, diagnostics) in &self.diagnostics {
694 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
695 lamport_timestamp: self.diagnostics_timestamp,
696 server_id: *server_id,
697 diagnostics: diagnostics.iter().cloned().collect(),
698 }));
699 }
700
701 operations.push(proto::serialize_operation(
702 &Operation::UpdateCompletionTriggers {
703 triggers: self.completion_triggers.clone(),
704 lamport_timestamp: self.completion_triggers_timestamp,
705 },
706 ));
707
708 let text_operations = self.text.operations().clone();
709 cx.background_executor().spawn(async move {
710 let since = since.unwrap_or_default();
711 operations.extend(
712 text_operations
713 .iter()
714 .filter(|(_, op)| !since.observed(op.timestamp()))
715 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
716 );
717 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
718 operations
719 })
720 }
721
722 /// Assign a language to the buffer, returning the buffer.
723 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
724 self.set_language(Some(language), cx);
725 self
726 }
727
728 /// Returns the [`Capability`] of this buffer.
729 pub fn capability(&self) -> Capability {
730 self.capability
731 }
732
733 /// Whether this buffer can only be read.
734 pub fn read_only(&self) -> bool {
735 self.capability == Capability::ReadOnly
736 }
737
738 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
739 pub fn build(
740 buffer: TextBuffer,
741 diff_base: Option<String>,
742 file: Option<Arc<dyn File>>,
743 capability: Capability,
744 ) -> Self {
745 let saved_mtime = file.as_ref().and_then(|file| file.mtime());
746 let snapshot = buffer.snapshot();
747 let git_diff = git::diff::BufferDiff::new(&snapshot);
748 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
749 Self {
750 saved_mtime,
751 saved_version: buffer.version(),
752 preview_version: buffer.version(),
753 reload_task: None,
754 transaction_depth: 0,
755 was_dirty_before_starting_transaction: None,
756 has_unsaved_edits: Cell::new((buffer.version(), false)),
757 text: buffer,
758 diff_base: diff_base.map(|mut raw_diff_base| {
759 LineEnding::normalize(&mut raw_diff_base);
760 BufferDiffBase::Git(Rope::from(raw_diff_base))
761 }),
762 diff_base_version: 0,
763 git_diff,
764 file,
765 capability,
766 syntax_map,
767 parsing_in_background: false,
768 non_text_state_update_count: 0,
769 sync_parse_timeout: Duration::from_millis(1),
770 parse_status: async_watch::channel(ParseStatus::Idle),
771 autoindent_requests: Default::default(),
772 pending_autoindent: Default::default(),
773 language: None,
774 remote_selections: Default::default(),
775 diagnostics: Default::default(),
776 diagnostics_timestamp: Default::default(),
777 completion_triggers: Default::default(),
778 completion_triggers_timestamp: Default::default(),
779 deferred_ops: OperationQueue::new(),
780 has_conflict: false,
781 _subscriptions: Vec::new(),
782 }
783 }
784
785 /// Retrieve a snapshot of the buffer's current state. This is computationally
786 /// cheap, and allows reading from the buffer on a background thread.
787 pub fn snapshot(&self) -> BufferSnapshot {
788 let text = self.text.snapshot();
789 let mut syntax_map = self.syntax_map.lock();
790 syntax_map.interpolate(&text);
791 let syntax = syntax_map.snapshot();
792
793 BufferSnapshot {
794 text,
795 syntax,
796 git_diff: self.git_diff.clone(),
797 file: self.file.clone(),
798 remote_selections: self.remote_selections.clone(),
799 diagnostics: self.diagnostics.clone(),
800 language: self.language.clone(),
801 non_text_state_update_count: self.non_text_state_update_count,
802 }
803 }
804
805 pub fn branch(&mut self, cx: &mut ModelContext<Self>) -> Model<Self> {
806 let this = cx.handle();
807 cx.new_model(|cx| {
808 let mut branch = Self {
809 diff_base: Some(BufferDiffBase::PastBufferVersion {
810 buffer: this.clone(),
811 rope: self.as_rope().clone(),
812 merged_operations: Default::default(),
813 }),
814 language: self.language.clone(),
815 has_conflict: self.has_conflict,
816 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
817 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
818 ..Self::build(
819 self.text.branch(),
820 None,
821 self.file.clone(),
822 self.capability(),
823 )
824 };
825 if let Some(language_registry) = self.language_registry() {
826 branch.set_language_registry(language_registry);
827 }
828
829 // Reparse the branch buffer so that we get syntax highlighting immediately.
830 branch.reparse(cx);
831
832 branch
833 })
834 }
835
836 /// Applies all of the changes in this buffer that intersect any of the
837 /// given `ranges` to its base buffer.
838 ///
839 /// If `ranges` is empty, then all changes will be applied. This buffer must
840 /// be a branch buffer to call this method.
841 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut ModelContext<Self>) {
842 let Some(base_buffer) = self.diff_base_buffer() else {
843 debug_panic!("not a branch buffer");
844 return;
845 };
846
847 let mut ranges = if ranges.is_empty() {
848 &[0..usize::MAX]
849 } else {
850 ranges.as_slice()
851 }
852 .into_iter()
853 .peekable();
854
855 let mut edits = Vec::new();
856 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
857 let mut is_included = false;
858 while let Some(range) = ranges.peek() {
859 if range.end < edit.new.start {
860 ranges.next().unwrap();
861 } else {
862 if range.start <= edit.new.end {
863 is_included = true;
864 }
865 break;
866 }
867 }
868
869 if is_included {
870 edits.push((
871 edit.old.clone(),
872 self.text_for_range(edit.new.clone()).collect::<String>(),
873 ));
874 }
875 }
876
877 let operation = base_buffer.update(cx, |base_buffer, cx| {
878 cx.emit(BufferEvent::DiffBaseChanged);
879 base_buffer.edit(edits, None, cx)
880 });
881
882 if let Some(operation) = operation {
883 if let Some(BufferDiffBase::PastBufferVersion {
884 merged_operations, ..
885 }) = &mut self.diff_base
886 {
887 merged_operations.push(operation);
888 }
889 }
890 }
891
892 fn on_base_buffer_event(
893 &mut self,
894 _: Model<Buffer>,
895 event: &BufferEvent,
896 cx: &mut ModelContext<Self>,
897 ) {
898 let BufferEvent::Operation { operation, .. } = event else {
899 return;
900 };
901 let Some(BufferDiffBase::PastBufferVersion {
902 merged_operations, ..
903 }) = &mut self.diff_base
904 else {
905 return;
906 };
907
908 let mut operation_to_undo = None;
909 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
910 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
911 merged_operations.remove(ix);
912 operation_to_undo = Some(operation.timestamp);
913 }
914 }
915
916 self.apply_ops([operation.clone()], cx);
917
918 if let Some(timestamp) = operation_to_undo {
919 let counts = [(timestamp, u32::MAX)].into_iter().collect();
920 self.undo_operations(counts, cx);
921 }
922
923 self.diff_base_version += 1;
924 }
925
926 #[cfg(test)]
927 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
928 &self.text
929 }
930
931 /// Retrieve a snapshot of the buffer's raw text, without any
932 /// language-related state like the syntax tree or diagnostics.
933 pub fn text_snapshot(&self) -> text::BufferSnapshot {
934 self.text.snapshot()
935 }
936
937 /// The file associated with the buffer, if any.
938 pub fn file(&self) -> Option<&Arc<dyn File>> {
939 self.file.as_ref()
940 }
941
942 /// The version of the buffer that was last saved or reloaded from disk.
943 pub fn saved_version(&self) -> &clock::Global {
944 &self.saved_version
945 }
946
947 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
948 pub fn saved_mtime(&self) -> Option<SystemTime> {
949 self.saved_mtime
950 }
951
952 /// Assign a language to the buffer.
953 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
954 self.non_text_state_update_count += 1;
955 self.syntax_map.lock().clear(&self.text);
956 self.language = language;
957 self.reparse(cx);
958 cx.emit(BufferEvent::LanguageChanged);
959 }
960
961 /// Assign a language registry to the buffer. This allows the buffer to retrieve
962 /// other languages if parts of the buffer are written in different languages.
963 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
964 self.syntax_map
965 .lock()
966 .set_language_registry(language_registry);
967 }
968
969 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
970 self.syntax_map.lock().language_registry()
971 }
972
973 /// Assign the buffer a new [`Capability`].
974 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
975 self.capability = capability;
976 cx.emit(BufferEvent::CapabilityChanged)
977 }
978
979 /// This method is called to signal that the buffer has been saved.
980 pub fn did_save(
981 &mut self,
982 version: clock::Global,
983 mtime: Option<SystemTime>,
984 cx: &mut ModelContext<Self>,
985 ) {
986 self.saved_version = version;
987 self.has_unsaved_edits
988 .set((self.saved_version().clone(), false));
989 self.has_conflict = false;
990 self.saved_mtime = mtime;
991 cx.emit(BufferEvent::Saved);
992 cx.notify();
993 }
994
995 /// This method is called to signal that the buffer has been discarded.
996 pub fn discarded(&self, cx: &mut ModelContext<Self>) {
997 cx.emit(BufferEvent::Discarded);
998 cx.notify();
999 }
1000
1001 /// Reloads the contents of the buffer from disk.
1002 pub fn reload(&mut self, cx: &ModelContext<Self>) -> oneshot::Receiver<Option<Transaction>> {
1003 let (tx, rx) = futures::channel::oneshot::channel();
1004 let prev_version = self.text.version();
1005 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
1006 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
1007 let file = this.file.as_ref()?.as_local()?;
1008 Some((file.mtime(), file.load(cx)))
1009 })?
1010 else {
1011 return Ok(());
1012 };
1013
1014 let new_text = new_text.await?;
1015 let diff = this
1016 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
1017 .await;
1018 this.update(&mut cx, |this, cx| {
1019 if this.version() == diff.base_version {
1020 this.finalize_last_transaction();
1021 this.apply_diff(diff, cx);
1022 tx.send(this.finalize_last_transaction().cloned()).ok();
1023 this.has_conflict = false;
1024 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1025 } else {
1026 if !diff.edits.is_empty()
1027 || this
1028 .edits_since::<usize>(&diff.base_version)
1029 .next()
1030 .is_some()
1031 {
1032 this.has_conflict = true;
1033 }
1034
1035 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1036 }
1037
1038 this.reload_task.take();
1039 })
1040 }));
1041 rx
1042 }
1043
1044 /// This method is called to signal that the buffer has been reloaded.
1045 pub fn did_reload(
1046 &mut self,
1047 version: clock::Global,
1048 line_ending: LineEnding,
1049 mtime: Option<SystemTime>,
1050 cx: &mut ModelContext<Self>,
1051 ) {
1052 self.saved_version = version;
1053 self.has_unsaved_edits
1054 .set((self.saved_version.clone(), false));
1055 self.text.set_line_ending(line_ending);
1056 self.saved_mtime = mtime;
1057 cx.emit(BufferEvent::Reloaded);
1058 cx.notify();
1059 }
1060
1061 /// Updates the [`File`] backing this buffer. This should be called when
1062 /// the file has changed or has been deleted.
1063 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
1064 let mut file_changed = false;
1065
1066 if let Some(old_file) = self.file.as_ref() {
1067 if new_file.path() != old_file.path() {
1068 file_changed = true;
1069 }
1070
1071 if new_file.is_deleted() {
1072 if !old_file.is_deleted() {
1073 file_changed = true;
1074 if !self.is_dirty() {
1075 cx.emit(BufferEvent::DirtyChanged);
1076 }
1077 }
1078 } else {
1079 let new_mtime = new_file.mtime();
1080 if new_mtime != old_file.mtime() {
1081 file_changed = true;
1082
1083 if !self.is_dirty() {
1084 cx.emit(BufferEvent::ReloadNeeded);
1085 }
1086 }
1087 }
1088 } else {
1089 file_changed = true;
1090 };
1091
1092 self.file = Some(new_file);
1093 if file_changed {
1094 self.non_text_state_update_count += 1;
1095 cx.emit(BufferEvent::FileHandleChanged);
1096 cx.notify();
1097 }
1098 }
1099
1100 /// Returns the current diff base, see [`Buffer::set_diff_base`].
1101 pub fn diff_base(&self) -> Option<&Rope> {
1102 match self.diff_base.as_ref()? {
1103 BufferDiffBase::Git(rope) | BufferDiffBase::PastBufferVersion { rope, .. } => {
1104 Some(rope)
1105 }
1106 }
1107 }
1108
1109 /// Sets the text that will be used to compute a Git diff
1110 /// against the buffer text.
1111 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &ModelContext<Self>) {
1112 self.diff_base = diff_base.map(|mut raw_diff_base| {
1113 LineEnding::normalize(&mut raw_diff_base);
1114 BufferDiffBase::Git(Rope::from(raw_diff_base))
1115 });
1116 self.diff_base_version += 1;
1117 if let Some(recalc_task) = self.recalculate_diff(cx) {
1118 cx.spawn(|buffer, mut cx| async move {
1119 recalc_task.await;
1120 buffer
1121 .update(&mut cx, |_, cx| {
1122 cx.emit(BufferEvent::DiffBaseChanged);
1123 })
1124 .ok();
1125 })
1126 .detach();
1127 }
1128 }
1129
1130 /// Returns a number, unique per diff base set to the buffer.
1131 pub fn diff_base_version(&self) -> usize {
1132 self.diff_base_version
1133 }
1134
1135 pub fn diff_base_buffer(&self) -> Option<Model<Self>> {
1136 match self.diff_base.as_ref()? {
1137 BufferDiffBase::Git(_) => None,
1138 BufferDiffBase::PastBufferVersion { buffer, .. } => Some(buffer.clone()),
1139 }
1140 }
1141
1142 /// Recomputes the diff.
1143 pub fn recalculate_diff(&self, cx: &ModelContext<Self>) -> Option<Task<()>> {
1144 let diff_base_rope = match self.diff_base.as_ref()? {
1145 BufferDiffBase::Git(rope) => rope.clone(),
1146 BufferDiffBase::PastBufferVersion { buffer, .. } => buffer.read(cx).as_rope().clone(),
1147 };
1148
1149 let snapshot = self.snapshot();
1150 let mut diff = self.git_diff.clone();
1151 let diff = cx.background_executor().spawn(async move {
1152 diff.update(&diff_base_rope, &snapshot).await;
1153 (diff, diff_base_rope)
1154 });
1155
1156 Some(cx.spawn(|this, mut cx| async move {
1157 let (buffer_diff, diff_base_rope) = diff.await;
1158 this.update(&mut cx, |this, cx| {
1159 this.git_diff = buffer_diff;
1160 this.non_text_state_update_count += 1;
1161 if let Some(BufferDiffBase::PastBufferVersion { rope, .. }) = &mut this.diff_base {
1162 *rope = diff_base_rope;
1163 }
1164 cx.emit(BufferEvent::DiffUpdated);
1165 })
1166 .ok();
1167 }))
1168 }
1169
1170 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1171 pub fn language(&self) -> Option<&Arc<Language>> {
1172 self.language.as_ref()
1173 }
1174
1175 /// Returns the [`Language`] at the given location.
1176 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1177 let offset = position.to_offset(self);
1178 self.syntax_map
1179 .lock()
1180 .layers_for_range(offset..offset, &self.text, false)
1181 .last()
1182 .map(|info| info.language.clone())
1183 .or_else(|| self.language.clone())
1184 }
1185
1186 /// An integer version number that accounts for all updates besides
1187 /// the buffer's text itself (which is versioned via a version vector).
1188 pub fn non_text_state_update_count(&self) -> usize {
1189 self.non_text_state_update_count
1190 }
1191
1192 /// Whether the buffer is being parsed in the background.
1193 #[cfg(any(test, feature = "test-support"))]
1194 pub fn is_parsing(&self) -> bool {
1195 self.parsing_in_background
1196 }
1197
1198 /// Indicates whether the buffer contains any regions that may be
1199 /// written in a language that hasn't been loaded yet.
1200 pub fn contains_unknown_injections(&self) -> bool {
1201 self.syntax_map.lock().contains_unknown_injections()
1202 }
1203
1204 #[cfg(test)]
1205 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1206 self.sync_parse_timeout = timeout;
1207 }
1208
1209 /// Called after an edit to synchronize the buffer's main parse tree with
1210 /// the buffer's new underlying state.
1211 ///
1212 /// Locks the syntax map and interpolates the edits since the last reparse
1213 /// into the foreground syntax tree.
1214 ///
1215 /// Then takes a stable snapshot of the syntax map before unlocking it.
1216 /// The snapshot with the interpolated edits is sent to a background thread,
1217 /// where we ask Tree-sitter to perform an incremental parse.
1218 ///
1219 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1220 /// waiting on the parse to complete. As soon as it completes, we proceed
1221 /// synchronously, unless a 1ms timeout elapses.
1222 ///
1223 /// If we time out waiting on the parse, we spawn a second task waiting
1224 /// until the parse does complete and return with the interpolated tree still
1225 /// in the foreground. When the background parse completes, call back into
1226 /// the main thread and assign the foreground parse state.
1227 ///
1228 /// If the buffer or grammar changed since the start of the background parse,
1229 /// initiate an additional reparse recursively. To avoid concurrent parses
1230 /// for the same buffer, we only initiate a new parse if we are not already
1231 /// parsing in the background.
1232 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1233 if self.parsing_in_background {
1234 return;
1235 }
1236 let language = if let Some(language) = self.language.clone() {
1237 language
1238 } else {
1239 return;
1240 };
1241
1242 let text = self.text_snapshot();
1243 let parsed_version = self.version();
1244
1245 let mut syntax_map = self.syntax_map.lock();
1246 syntax_map.interpolate(&text);
1247 let language_registry = syntax_map.language_registry();
1248 let mut syntax_snapshot = syntax_map.snapshot();
1249 drop(syntax_map);
1250
1251 let parse_task = cx.background_executor().spawn({
1252 let language = language.clone();
1253 let language_registry = language_registry.clone();
1254 async move {
1255 syntax_snapshot.reparse(&text, language_registry, language);
1256 syntax_snapshot
1257 }
1258 });
1259
1260 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1261 match cx
1262 .background_executor()
1263 .block_with_timeout(self.sync_parse_timeout, parse_task)
1264 {
1265 Ok(new_syntax_snapshot) => {
1266 self.did_finish_parsing(new_syntax_snapshot, cx);
1267 }
1268 Err(parse_task) => {
1269 self.parsing_in_background = true;
1270 cx.spawn(move |this, mut cx| async move {
1271 let new_syntax_map = parse_task.await;
1272 this.update(&mut cx, move |this, cx| {
1273 let grammar_changed =
1274 this.language.as_ref().map_or(true, |current_language| {
1275 !Arc::ptr_eq(&language, current_language)
1276 });
1277 let language_registry_changed = new_syntax_map
1278 .contains_unknown_injections()
1279 && language_registry.map_or(false, |registry| {
1280 registry.version() != new_syntax_map.language_registry_version()
1281 });
1282 let parse_again = language_registry_changed
1283 || grammar_changed
1284 || this.version.changed_since(&parsed_version);
1285 this.did_finish_parsing(new_syntax_map, cx);
1286 this.parsing_in_background = false;
1287 if parse_again {
1288 this.reparse(cx);
1289 }
1290 })
1291 .ok();
1292 })
1293 .detach();
1294 }
1295 }
1296 }
1297
1298 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
1299 self.non_text_state_update_count += 1;
1300 self.syntax_map.lock().did_parse(syntax_snapshot);
1301 self.request_autoindent(cx);
1302 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1303 cx.emit(BufferEvent::Reparsed);
1304 cx.notify();
1305 }
1306
1307 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1308 self.parse_status.1.clone()
1309 }
1310
1311 /// Assign to the buffer a set of diagnostics created by a given language server.
1312 pub fn update_diagnostics(
1313 &mut self,
1314 server_id: LanguageServerId,
1315 diagnostics: DiagnosticSet,
1316 cx: &mut ModelContext<Self>,
1317 ) {
1318 let lamport_timestamp = self.text.lamport_clock.tick();
1319 let op = Operation::UpdateDiagnostics {
1320 server_id,
1321 diagnostics: diagnostics.iter().cloned().collect(),
1322 lamport_timestamp,
1323 };
1324 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1325 self.send_operation(op, true, cx);
1326 }
1327
1328 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1329 if let Some(indent_sizes) = self.compute_autoindents() {
1330 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1331 match cx
1332 .background_executor()
1333 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1334 {
1335 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1336 Err(indent_sizes) => {
1337 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1338 let indent_sizes = indent_sizes.await;
1339 this.update(&mut cx, |this, cx| {
1340 this.apply_autoindents(indent_sizes, cx);
1341 })
1342 .ok();
1343 }));
1344 }
1345 }
1346 } else {
1347 self.autoindent_requests.clear();
1348 }
1349 }
1350
1351 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1352 let max_rows_between_yields = 100;
1353 let snapshot = self.snapshot();
1354 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1355 return None;
1356 }
1357
1358 let autoindent_requests = self.autoindent_requests.clone();
1359 Some(async move {
1360 let mut indent_sizes = BTreeMap::new();
1361 for request in autoindent_requests {
1362 // Resolve each edited range to its row in the current buffer and in the
1363 // buffer before this batch of edits.
1364 let mut row_ranges = Vec::new();
1365 let mut old_to_new_rows = BTreeMap::new();
1366 let mut language_indent_sizes_by_new_row = Vec::new();
1367 for entry in &request.entries {
1368 let position = entry.range.start;
1369 let new_row = position.to_point(&snapshot).row;
1370 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1371 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1372
1373 if !entry.first_line_is_new {
1374 let old_row = position.to_point(&request.before_edit).row;
1375 old_to_new_rows.insert(old_row, new_row);
1376 }
1377 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1378 }
1379
1380 // Build a map containing the suggested indentation for each of the edited lines
1381 // with respect to the state of the buffer before these edits. This map is keyed
1382 // by the rows for these lines in the current state of the buffer.
1383 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1384 let old_edited_ranges =
1385 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1386 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1387 let mut language_indent_size = IndentSize::default();
1388 for old_edited_range in old_edited_ranges {
1389 let suggestions = request
1390 .before_edit
1391 .suggest_autoindents(old_edited_range.clone())
1392 .into_iter()
1393 .flatten();
1394 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1395 if let Some(suggestion) = suggestion {
1396 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1397
1398 // Find the indent size based on the language for this row.
1399 while let Some((row, size)) = language_indent_sizes.peek() {
1400 if *row > new_row {
1401 break;
1402 }
1403 language_indent_size = *size;
1404 language_indent_sizes.next();
1405 }
1406
1407 let suggested_indent = old_to_new_rows
1408 .get(&suggestion.basis_row)
1409 .and_then(|from_row| {
1410 Some(old_suggestions.get(from_row).copied()?.0)
1411 })
1412 .unwrap_or_else(|| {
1413 request
1414 .before_edit
1415 .indent_size_for_line(suggestion.basis_row)
1416 })
1417 .with_delta(suggestion.delta, language_indent_size);
1418 old_suggestions
1419 .insert(new_row, (suggested_indent, suggestion.within_error));
1420 }
1421 }
1422 yield_now().await;
1423 }
1424
1425 // Compute new suggestions for each line, but only include them in the result
1426 // if they differ from the old suggestion for that line.
1427 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1428 let mut language_indent_size = IndentSize::default();
1429 for (row_range, original_indent_column) in row_ranges {
1430 let new_edited_row_range = if request.is_block_mode {
1431 row_range.start..row_range.start + 1
1432 } else {
1433 row_range.clone()
1434 };
1435
1436 let suggestions = snapshot
1437 .suggest_autoindents(new_edited_row_range.clone())
1438 .into_iter()
1439 .flatten();
1440 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1441 if let Some(suggestion) = suggestion {
1442 // Find the indent size based on the language for this row.
1443 while let Some((row, size)) = language_indent_sizes.peek() {
1444 if *row > new_row {
1445 break;
1446 }
1447 language_indent_size = *size;
1448 language_indent_sizes.next();
1449 }
1450
1451 let suggested_indent = indent_sizes
1452 .get(&suggestion.basis_row)
1453 .copied()
1454 .unwrap_or_else(|| {
1455 snapshot.indent_size_for_line(suggestion.basis_row)
1456 })
1457 .with_delta(suggestion.delta, language_indent_size);
1458 if old_suggestions.get(&new_row).map_or(
1459 true,
1460 |(old_indentation, was_within_error)| {
1461 suggested_indent != *old_indentation
1462 && (!suggestion.within_error || *was_within_error)
1463 },
1464 ) {
1465 indent_sizes.insert(new_row, suggested_indent);
1466 }
1467 }
1468 }
1469
1470 if let (true, Some(original_indent_column)) =
1471 (request.is_block_mode, original_indent_column)
1472 {
1473 let new_indent = indent_sizes
1474 .get(&row_range.start)
1475 .copied()
1476 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1477 let delta = new_indent.len as i64 - original_indent_column as i64;
1478 if delta != 0 {
1479 for row in row_range.skip(1) {
1480 indent_sizes.entry(row).or_insert_with(|| {
1481 let mut size = snapshot.indent_size_for_line(row);
1482 if size.kind == new_indent.kind {
1483 match delta.cmp(&0) {
1484 Ordering::Greater => size.len += delta as u32,
1485 Ordering::Less => {
1486 size.len = size.len.saturating_sub(-delta as u32)
1487 }
1488 Ordering::Equal => {}
1489 }
1490 }
1491 size
1492 });
1493 }
1494 }
1495 }
1496
1497 yield_now().await;
1498 }
1499 }
1500
1501 indent_sizes
1502 })
1503 }
1504
1505 fn apply_autoindents(
1506 &mut self,
1507 indent_sizes: BTreeMap<u32, IndentSize>,
1508 cx: &mut ModelContext<Self>,
1509 ) {
1510 self.autoindent_requests.clear();
1511
1512 let edits: Vec<_> = indent_sizes
1513 .into_iter()
1514 .filter_map(|(row, indent_size)| {
1515 let current_size = indent_size_for_line(self, row);
1516 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1517 })
1518 .collect();
1519
1520 let preserve_preview = self.preserve_preview();
1521 self.edit(edits, None, cx);
1522 if preserve_preview {
1523 self.refresh_preview();
1524 }
1525 }
1526
1527 /// Create a minimal edit that will cause the given row to be indented
1528 /// with the given size. After applying this edit, the length of the line
1529 /// will always be at least `new_size.len`.
1530 pub fn edit_for_indent_size_adjustment(
1531 row: u32,
1532 current_size: IndentSize,
1533 new_size: IndentSize,
1534 ) -> Option<(Range<Point>, String)> {
1535 if new_size.kind == current_size.kind {
1536 match new_size.len.cmp(¤t_size.len) {
1537 Ordering::Greater => {
1538 let point = Point::new(row, 0);
1539 Some((
1540 point..point,
1541 iter::repeat(new_size.char())
1542 .take((new_size.len - current_size.len) as usize)
1543 .collect::<String>(),
1544 ))
1545 }
1546
1547 Ordering::Less => Some((
1548 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1549 String::new(),
1550 )),
1551
1552 Ordering::Equal => None,
1553 }
1554 } else {
1555 Some((
1556 Point::new(row, 0)..Point::new(row, current_size.len),
1557 iter::repeat(new_size.char())
1558 .take(new_size.len as usize)
1559 .collect::<String>(),
1560 ))
1561 }
1562 }
1563
1564 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1565 /// and the given new text.
1566 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1567 let old_text = self.as_rope().clone();
1568 let base_version = self.version();
1569 cx.background_executor()
1570 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1571 let old_text = old_text.to_string();
1572 let line_ending = LineEnding::detect(&new_text);
1573 LineEnding::normalize(&mut new_text);
1574
1575 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1576 let empty: Arc<str> = Arc::default();
1577
1578 let mut edits = Vec::new();
1579 let mut old_offset = 0;
1580 let mut new_offset = 0;
1581 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1582 for change in diff.iter_all_changes().map(Some).chain([None]) {
1583 if let Some(change) = &change {
1584 let len = change.value().len();
1585 match change.tag() {
1586 ChangeTag::Equal => {
1587 old_offset += len;
1588 new_offset += len;
1589 }
1590 ChangeTag::Delete => {
1591 let old_end_offset = old_offset + len;
1592 if let Some((last_old_range, _)) = &mut last_edit {
1593 last_old_range.end = old_end_offset;
1594 } else {
1595 last_edit =
1596 Some((old_offset..old_end_offset, new_offset..new_offset));
1597 }
1598 old_offset = old_end_offset;
1599 }
1600 ChangeTag::Insert => {
1601 let new_end_offset = new_offset + len;
1602 if let Some((_, last_new_range)) = &mut last_edit {
1603 last_new_range.end = new_end_offset;
1604 } else {
1605 last_edit =
1606 Some((old_offset..old_offset, new_offset..new_end_offset));
1607 }
1608 new_offset = new_end_offset;
1609 }
1610 }
1611 }
1612
1613 if let Some((old_range, new_range)) = &last_edit {
1614 if old_offset > old_range.end
1615 || new_offset > new_range.end
1616 || change.is_none()
1617 {
1618 let text = if new_range.is_empty() {
1619 empty.clone()
1620 } else {
1621 new_text[new_range.clone()].into()
1622 };
1623 edits.push((old_range.clone(), text));
1624 last_edit.take();
1625 }
1626 }
1627 }
1628
1629 Diff {
1630 base_version,
1631 line_ending,
1632 edits,
1633 }
1634 })
1635 }
1636
1637 /// Spawns a background task that searches the buffer for any whitespace
1638 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1639 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1640 let old_text = self.as_rope().clone();
1641 let line_ending = self.line_ending();
1642 let base_version = self.version();
1643 cx.background_executor().spawn(async move {
1644 let ranges = trailing_whitespace_ranges(&old_text);
1645 let empty = Arc::<str>::from("");
1646 Diff {
1647 base_version,
1648 line_ending,
1649 edits: ranges
1650 .into_iter()
1651 .map(|range| (range, empty.clone()))
1652 .collect(),
1653 }
1654 })
1655 }
1656
1657 /// Ensures that the buffer ends with a single newline character, and
1658 /// no other whitespace.
1659 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1660 let len = self.len();
1661 let mut offset = len;
1662 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1663 let non_whitespace_len = chunk
1664 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1665 .len();
1666 offset -= chunk.len();
1667 offset += non_whitespace_len;
1668 if non_whitespace_len != 0 {
1669 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1670 return;
1671 }
1672 break;
1673 }
1674 }
1675 self.edit([(offset..len, "\n")], None, cx);
1676 }
1677
1678 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1679 /// calculated, then adjust the diff to account for those changes, and discard any
1680 /// parts of the diff that conflict with those changes.
1681 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1682 // Check for any edits to the buffer that have occurred since this diff
1683 // was computed.
1684 let snapshot = self.snapshot();
1685 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1686 let mut delta = 0;
1687 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1688 while let Some(edit_since) = edits_since.peek() {
1689 // If the edit occurs after a diff hunk, then it does not
1690 // affect that hunk.
1691 if edit_since.old.start > range.end {
1692 break;
1693 }
1694 // If the edit precedes the diff hunk, then adjust the hunk
1695 // to reflect the edit.
1696 else if edit_since.old.end < range.start {
1697 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1698 edits_since.next();
1699 }
1700 // If the edit intersects a diff hunk, then discard that hunk.
1701 else {
1702 return None;
1703 }
1704 }
1705
1706 let start = (range.start as i64 + delta) as usize;
1707 let end = (range.end as i64 + delta) as usize;
1708 Some((start..end, new_text))
1709 });
1710
1711 self.start_transaction();
1712 self.text.set_line_ending(diff.line_ending);
1713 self.edit(adjusted_edits, None, cx);
1714 self.end_transaction(cx)
1715 }
1716
1717 fn has_unsaved_edits(&self) -> bool {
1718 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1719
1720 if last_version == self.version {
1721 self.has_unsaved_edits
1722 .set((last_version, has_unsaved_edits));
1723 return has_unsaved_edits;
1724 }
1725
1726 let has_edits = self.has_edits_since(&self.saved_version);
1727 self.has_unsaved_edits
1728 .set((self.version.clone(), has_edits));
1729 has_edits
1730 }
1731
1732 /// Checks if the buffer has unsaved changes.
1733 pub fn is_dirty(&self) -> bool {
1734 self.capability != Capability::ReadOnly
1735 && (self.has_conflict
1736 || self.has_unsaved_edits()
1737 || self
1738 .file
1739 .as_ref()
1740 .map_or(false, |file| file.is_deleted() || !file.is_created()))
1741 }
1742
1743 /// Checks if the buffer and its file have both changed since the buffer
1744 /// was last saved or reloaded.
1745 pub fn has_conflict(&self) -> bool {
1746 self.has_conflict
1747 || self.file.as_ref().map_or(false, |file| {
1748 file.mtime() > self.saved_mtime && self.has_unsaved_edits()
1749 })
1750 }
1751
1752 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1753 pub fn subscribe(&mut self) -> Subscription {
1754 self.text.subscribe()
1755 }
1756
1757 /// Starts a transaction, if one is not already in-progress. When undoing or
1758 /// redoing edits, all of the edits performed within a transaction are undone
1759 /// or redone together.
1760 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1761 self.start_transaction_at(Instant::now())
1762 }
1763
1764 /// Starts a transaction, providing the current time. Subsequent transactions
1765 /// that occur within a short period of time will be grouped together. This
1766 /// is controlled by the buffer's undo grouping duration.
1767 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1768 self.transaction_depth += 1;
1769 if self.was_dirty_before_starting_transaction.is_none() {
1770 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1771 }
1772 self.text.start_transaction_at(now)
1773 }
1774
1775 /// Terminates the current transaction, if this is the outermost transaction.
1776 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1777 self.end_transaction_at(Instant::now(), cx)
1778 }
1779
1780 /// Terminates the current transaction, providing the current time. Subsequent transactions
1781 /// that occur within a short period of time will be grouped together. This
1782 /// is controlled by the buffer's undo grouping duration.
1783 pub fn end_transaction_at(
1784 &mut self,
1785 now: Instant,
1786 cx: &mut ModelContext<Self>,
1787 ) -> Option<TransactionId> {
1788 assert!(self.transaction_depth > 0);
1789 self.transaction_depth -= 1;
1790 let was_dirty = if self.transaction_depth == 0 {
1791 self.was_dirty_before_starting_transaction.take().unwrap()
1792 } else {
1793 false
1794 };
1795 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1796 self.did_edit(&start_version, was_dirty, cx);
1797 Some(transaction_id)
1798 } else {
1799 None
1800 }
1801 }
1802
1803 /// Manually add a transaction to the buffer's undo history.
1804 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1805 self.text.push_transaction(transaction, now);
1806 }
1807
1808 /// Prevent the last transaction from being grouped with any subsequent transactions,
1809 /// even if they occur with the buffer's undo grouping duration.
1810 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1811 self.text.finalize_last_transaction()
1812 }
1813
1814 /// Manually group all changes since a given transaction.
1815 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1816 self.text.group_until_transaction(transaction_id);
1817 }
1818
1819 /// Manually remove a transaction from the buffer's undo history
1820 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1821 self.text.forget_transaction(transaction_id);
1822 }
1823
1824 /// Manually merge two adjacent transactions in the buffer's undo history.
1825 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1826 self.text.merge_transactions(transaction, destination);
1827 }
1828
1829 /// Waits for the buffer to receive operations with the given timestamps.
1830 pub fn wait_for_edits(
1831 &mut self,
1832 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1833 ) -> impl Future<Output = Result<()>> {
1834 self.text.wait_for_edits(edit_ids)
1835 }
1836
1837 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1838 pub fn wait_for_anchors(
1839 &mut self,
1840 anchors: impl IntoIterator<Item = Anchor>,
1841 ) -> impl 'static + Future<Output = Result<()>> {
1842 self.text.wait_for_anchors(anchors)
1843 }
1844
1845 /// Waits for the buffer to receive operations up to the given version.
1846 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1847 self.text.wait_for_version(version)
1848 }
1849
1850 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1851 /// [`Buffer::wait_for_version`] to resolve with an error.
1852 pub fn give_up_waiting(&mut self) {
1853 self.text.give_up_waiting();
1854 }
1855
1856 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
1857 pub fn set_active_selections(
1858 &mut self,
1859 selections: Arc<[Selection<Anchor>]>,
1860 line_mode: bool,
1861 cursor_shape: CursorShape,
1862 cx: &mut ModelContext<Self>,
1863 ) {
1864 let lamport_timestamp = self.text.lamport_clock.tick();
1865 self.remote_selections.insert(
1866 self.text.replica_id(),
1867 SelectionSet {
1868 selections: selections.clone(),
1869 lamport_timestamp,
1870 line_mode,
1871 cursor_shape,
1872 },
1873 );
1874 self.send_operation(
1875 Operation::UpdateSelections {
1876 selections,
1877 line_mode,
1878 lamport_timestamp,
1879 cursor_shape,
1880 },
1881 true,
1882 cx,
1883 );
1884 self.non_text_state_update_count += 1;
1885 cx.notify();
1886 }
1887
1888 /// Clears the selections, so that other replicas of the buffer do not see any selections for
1889 /// this replica.
1890 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1891 if self
1892 .remote_selections
1893 .get(&self.text.replica_id())
1894 .map_or(true, |set| !set.selections.is_empty())
1895 {
1896 self.set_active_selections(Arc::default(), false, Default::default(), cx);
1897 }
1898 }
1899
1900 /// Replaces the buffer's entire text.
1901 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1902 where
1903 T: Into<Arc<str>>,
1904 {
1905 self.autoindent_requests.clear();
1906 self.edit([(0..self.len(), text)], None, cx)
1907 }
1908
1909 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
1910 /// delete, and a string of text to insert at that location.
1911 ///
1912 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
1913 /// request for the edited ranges, which will be processed when the buffer finishes
1914 /// parsing.
1915 ///
1916 /// Parsing takes place at the end of a transaction, and may compute synchronously
1917 /// or asynchronously, depending on the changes.
1918 pub fn edit<I, S, T>(
1919 &mut self,
1920 edits_iter: I,
1921 autoindent_mode: Option<AutoindentMode>,
1922 cx: &mut ModelContext<Self>,
1923 ) -> Option<clock::Lamport>
1924 where
1925 I: IntoIterator<Item = (Range<S>, T)>,
1926 S: ToOffset,
1927 T: Into<Arc<str>>,
1928 {
1929 // Skip invalid edits and coalesce contiguous ones.
1930 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1931 for (range, new_text) in edits_iter {
1932 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1933 if range.start > range.end {
1934 mem::swap(&mut range.start, &mut range.end);
1935 }
1936 let new_text = new_text.into();
1937 if !new_text.is_empty() || !range.is_empty() {
1938 if let Some((prev_range, prev_text)) = edits.last_mut() {
1939 if prev_range.end >= range.start {
1940 prev_range.end = cmp::max(prev_range.end, range.end);
1941 *prev_text = format!("{prev_text}{new_text}").into();
1942 } else {
1943 edits.push((range, new_text));
1944 }
1945 } else {
1946 edits.push((range, new_text));
1947 }
1948 }
1949 }
1950 if edits.is_empty() {
1951 return None;
1952 }
1953
1954 self.start_transaction();
1955 self.pending_autoindent.take();
1956 let autoindent_request = autoindent_mode
1957 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1958
1959 let edit_operation = self.text.edit(edits.iter().cloned());
1960 let edit_id = edit_operation.timestamp();
1961
1962 if let Some((before_edit, mode)) = autoindent_request {
1963 let mut delta = 0isize;
1964 let entries = edits
1965 .into_iter()
1966 .enumerate()
1967 .zip(&edit_operation.as_edit().unwrap().new_text)
1968 .map(|((ix, (range, _)), new_text)| {
1969 let new_text_length = new_text.len();
1970 let old_start = range.start.to_point(&before_edit);
1971 let new_start = (delta + range.start as isize) as usize;
1972 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1973
1974 let mut range_of_insertion_to_indent = 0..new_text_length;
1975 let mut first_line_is_new = false;
1976 let mut original_indent_column = None;
1977
1978 // When inserting an entire line at the beginning of an existing line,
1979 // treat the insertion as new.
1980 if new_text.contains('\n')
1981 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1982 {
1983 first_line_is_new = true;
1984 }
1985
1986 // When inserting text starting with a newline, avoid auto-indenting the
1987 // previous line.
1988 if new_text.starts_with('\n') {
1989 range_of_insertion_to_indent.start += 1;
1990 first_line_is_new = true;
1991 }
1992
1993 // Avoid auto-indenting after the insertion.
1994 if let AutoindentMode::Block {
1995 original_indent_columns,
1996 } = &mode
1997 {
1998 original_indent_column =
1999 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
2000 indent_size_for_text(
2001 new_text[range_of_insertion_to_indent.clone()].chars(),
2002 )
2003 .len
2004 }));
2005 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2006 range_of_insertion_to_indent.end -= 1;
2007 }
2008 }
2009
2010 AutoindentRequestEntry {
2011 first_line_is_new,
2012 original_indent_column,
2013 indent_size: before_edit.language_indent_size_at(range.start, cx),
2014 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2015 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2016 }
2017 })
2018 .collect();
2019
2020 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2021 before_edit,
2022 entries,
2023 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2024 }));
2025 }
2026
2027 self.end_transaction(cx);
2028 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2029 Some(edit_id)
2030 }
2031
2032 fn did_edit(
2033 &mut self,
2034 old_version: &clock::Global,
2035 was_dirty: bool,
2036 cx: &mut ModelContext<Self>,
2037 ) {
2038 if self.edits_since::<usize>(old_version).next().is_none() {
2039 return;
2040 }
2041
2042 self.reparse(cx);
2043
2044 cx.emit(BufferEvent::Edited);
2045 if was_dirty != self.is_dirty() {
2046 cx.emit(BufferEvent::DirtyChanged);
2047 }
2048 cx.notify();
2049 }
2050
2051 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2052 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2053 pub fn insert_empty_line(
2054 &mut self,
2055 position: impl ToPoint,
2056 space_above: bool,
2057 space_below: bool,
2058 cx: &mut ModelContext<Self>,
2059 ) -> Point {
2060 let mut position = position.to_point(self);
2061
2062 self.start_transaction();
2063
2064 self.edit(
2065 [(position..position, "\n")],
2066 Some(AutoindentMode::EachLine),
2067 cx,
2068 );
2069
2070 if position.column > 0 {
2071 position += Point::new(1, 0);
2072 }
2073
2074 if !self.is_line_blank(position.row) {
2075 self.edit(
2076 [(position..position, "\n")],
2077 Some(AutoindentMode::EachLine),
2078 cx,
2079 );
2080 }
2081
2082 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2083 self.edit(
2084 [(position..position, "\n")],
2085 Some(AutoindentMode::EachLine),
2086 cx,
2087 );
2088 position.row += 1;
2089 }
2090
2091 if space_below
2092 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2093 {
2094 self.edit(
2095 [(position..position, "\n")],
2096 Some(AutoindentMode::EachLine),
2097 cx,
2098 );
2099 }
2100
2101 self.end_transaction(cx);
2102
2103 position
2104 }
2105
2106 /// Applies the given remote operations to the buffer.
2107 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
2108 &mut self,
2109 ops: I,
2110 cx: &mut ModelContext<Self>,
2111 ) {
2112 self.pending_autoindent.take();
2113 let was_dirty = self.is_dirty();
2114 let old_version = self.version.clone();
2115 let mut deferred_ops = Vec::new();
2116 let buffer_ops = ops
2117 .into_iter()
2118 .filter_map(|op| match op {
2119 Operation::Buffer(op) => Some(op),
2120 _ => {
2121 if self.can_apply_op(&op) {
2122 self.apply_op(op, cx);
2123 } else {
2124 deferred_ops.push(op);
2125 }
2126 None
2127 }
2128 })
2129 .collect::<Vec<_>>();
2130 for operation in buffer_ops.iter() {
2131 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2132 }
2133 self.text.apply_ops(buffer_ops);
2134 self.deferred_ops.insert(deferred_ops);
2135 self.flush_deferred_ops(cx);
2136 self.did_edit(&old_version, was_dirty, cx);
2137 // Notify independently of whether the buffer was edited as the operations could include a
2138 // selection update.
2139 cx.notify();
2140 }
2141
2142 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
2143 let mut deferred_ops = Vec::new();
2144 for op in self.deferred_ops.drain().iter().cloned() {
2145 if self.can_apply_op(&op) {
2146 self.apply_op(op, cx);
2147 } else {
2148 deferred_ops.push(op);
2149 }
2150 }
2151 self.deferred_ops.insert(deferred_ops);
2152 }
2153
2154 pub fn has_deferred_ops(&self) -> bool {
2155 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2156 }
2157
2158 fn can_apply_op(&self, operation: &Operation) -> bool {
2159 match operation {
2160 Operation::Buffer(_) => {
2161 unreachable!("buffer operations should never be applied at this layer")
2162 }
2163 Operation::UpdateDiagnostics {
2164 diagnostics: diagnostic_set,
2165 ..
2166 } => diagnostic_set.iter().all(|diagnostic| {
2167 self.text.can_resolve(&diagnostic.range.start)
2168 && self.text.can_resolve(&diagnostic.range.end)
2169 }),
2170 Operation::UpdateSelections { selections, .. } => selections
2171 .iter()
2172 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2173 Operation::UpdateCompletionTriggers { .. } => true,
2174 }
2175 }
2176
2177 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
2178 match operation {
2179 Operation::Buffer(_) => {
2180 unreachable!("buffer operations should never be applied at this layer")
2181 }
2182 Operation::UpdateDiagnostics {
2183 server_id,
2184 diagnostics: diagnostic_set,
2185 lamport_timestamp,
2186 } => {
2187 let snapshot = self.snapshot();
2188 self.apply_diagnostic_update(
2189 server_id,
2190 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2191 lamport_timestamp,
2192 cx,
2193 );
2194 }
2195 Operation::UpdateSelections {
2196 selections,
2197 lamport_timestamp,
2198 line_mode,
2199 cursor_shape,
2200 } => {
2201 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2202 if set.lamport_timestamp > lamport_timestamp {
2203 return;
2204 }
2205 }
2206
2207 self.remote_selections.insert(
2208 lamport_timestamp.replica_id,
2209 SelectionSet {
2210 selections,
2211 lamport_timestamp,
2212 line_mode,
2213 cursor_shape,
2214 },
2215 );
2216 self.text.lamport_clock.observe(lamport_timestamp);
2217 self.non_text_state_update_count += 1;
2218 }
2219 Operation::UpdateCompletionTriggers {
2220 triggers,
2221 lamport_timestamp,
2222 } => {
2223 self.completion_triggers = triggers;
2224 self.text.lamport_clock.observe(lamport_timestamp);
2225 }
2226 }
2227 }
2228
2229 fn apply_diagnostic_update(
2230 &mut self,
2231 server_id: LanguageServerId,
2232 diagnostics: DiagnosticSet,
2233 lamport_timestamp: clock::Lamport,
2234 cx: &mut ModelContext<Self>,
2235 ) {
2236 if lamport_timestamp > self.diagnostics_timestamp {
2237 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2238 if diagnostics.is_empty() {
2239 if let Ok(ix) = ix {
2240 self.diagnostics.remove(ix);
2241 }
2242 } else {
2243 match ix {
2244 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2245 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2246 };
2247 }
2248 self.diagnostics_timestamp = lamport_timestamp;
2249 self.non_text_state_update_count += 1;
2250 self.text.lamport_clock.observe(lamport_timestamp);
2251 cx.notify();
2252 cx.emit(BufferEvent::DiagnosticsUpdated);
2253 }
2254 }
2255
2256 fn send_operation(&self, operation: Operation, is_local: bool, cx: &mut ModelContext<Self>) {
2257 cx.emit(BufferEvent::Operation {
2258 operation,
2259 is_local,
2260 });
2261 }
2262
2263 /// Removes the selections for a given peer.
2264 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
2265 self.remote_selections.remove(&replica_id);
2266 cx.notify();
2267 }
2268
2269 /// Undoes the most recent transaction.
2270 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2271 let was_dirty = self.is_dirty();
2272 let old_version = self.version.clone();
2273
2274 if let Some((transaction_id, operation)) = self.text.undo() {
2275 self.send_operation(Operation::Buffer(operation), true, cx);
2276 self.did_edit(&old_version, was_dirty, cx);
2277 Some(transaction_id)
2278 } else {
2279 None
2280 }
2281 }
2282
2283 /// Manually undoes a specific transaction in the buffer's undo history.
2284 pub fn undo_transaction(
2285 &mut self,
2286 transaction_id: TransactionId,
2287 cx: &mut ModelContext<Self>,
2288 ) -> bool {
2289 let was_dirty = self.is_dirty();
2290 let old_version = self.version.clone();
2291 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2292 self.send_operation(Operation::Buffer(operation), true, cx);
2293 self.did_edit(&old_version, was_dirty, cx);
2294 true
2295 } else {
2296 false
2297 }
2298 }
2299
2300 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2301 pub fn undo_to_transaction(
2302 &mut self,
2303 transaction_id: TransactionId,
2304 cx: &mut ModelContext<Self>,
2305 ) -> bool {
2306 let was_dirty = self.is_dirty();
2307 let old_version = self.version.clone();
2308
2309 let operations = self.text.undo_to_transaction(transaction_id);
2310 let undone = !operations.is_empty();
2311 for operation in operations {
2312 self.send_operation(Operation::Buffer(operation), true, cx);
2313 }
2314 if undone {
2315 self.did_edit(&old_version, was_dirty, cx)
2316 }
2317 undone
2318 }
2319
2320 pub fn undo_operations(
2321 &mut self,
2322 counts: HashMap<Lamport, u32>,
2323 cx: &mut ModelContext<Buffer>,
2324 ) {
2325 let was_dirty = self.is_dirty();
2326 let operation = self.text.undo_operations(counts);
2327 let old_version = self.version.clone();
2328 self.send_operation(Operation::Buffer(operation), true, cx);
2329 self.did_edit(&old_version, was_dirty, cx);
2330 }
2331
2332 /// Manually redoes a specific transaction in the buffer's redo history.
2333 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2334 let was_dirty = self.is_dirty();
2335 let old_version = self.version.clone();
2336
2337 if let Some((transaction_id, operation)) = self.text.redo() {
2338 self.send_operation(Operation::Buffer(operation), true, cx);
2339 self.did_edit(&old_version, was_dirty, cx);
2340 Some(transaction_id)
2341 } else {
2342 None
2343 }
2344 }
2345
2346 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2347 pub fn redo_to_transaction(
2348 &mut self,
2349 transaction_id: TransactionId,
2350 cx: &mut ModelContext<Self>,
2351 ) -> bool {
2352 let was_dirty = self.is_dirty();
2353 let old_version = self.version.clone();
2354
2355 let operations = self.text.redo_to_transaction(transaction_id);
2356 let redone = !operations.is_empty();
2357 for operation in operations {
2358 self.send_operation(Operation::Buffer(operation), true, cx);
2359 }
2360 if redone {
2361 self.did_edit(&old_version, was_dirty, cx)
2362 }
2363 redone
2364 }
2365
2366 /// Override current completion triggers with the user-provided completion triggers.
2367 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
2368 self.completion_triggers.clone_from(&triggers);
2369 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2370 self.send_operation(
2371 Operation::UpdateCompletionTriggers {
2372 triggers,
2373 lamport_timestamp: self.completion_triggers_timestamp,
2374 },
2375 true,
2376 cx,
2377 );
2378 cx.notify();
2379 }
2380
2381 /// Returns a list of strings which trigger a completion menu for this language.
2382 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2383 pub fn completion_triggers(&self) -> &[String] {
2384 &self.completion_triggers
2385 }
2386
2387 /// Call this directly after performing edits to prevent the preview tab
2388 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2389 /// to return false until there are additional edits.
2390 pub fn refresh_preview(&mut self) {
2391 self.preview_version = self.version.clone();
2392 }
2393
2394 /// Whether we should preserve the preview status of a tab containing this buffer.
2395 pub fn preserve_preview(&self) -> bool {
2396 !self.has_edits_since(&self.preview_version)
2397 }
2398}
2399
2400#[doc(hidden)]
2401#[cfg(any(test, feature = "test-support"))]
2402impl Buffer {
2403 pub fn edit_via_marked_text(
2404 &mut self,
2405 marked_string: &str,
2406 autoindent_mode: Option<AutoindentMode>,
2407 cx: &mut ModelContext<Self>,
2408 ) {
2409 let edits = self.edits_for_marked_text(marked_string);
2410 self.edit(edits, autoindent_mode, cx);
2411 }
2412
2413 pub fn set_group_interval(&mut self, group_interval: Duration) {
2414 self.text.set_group_interval(group_interval);
2415 }
2416
2417 pub fn randomly_edit<T>(
2418 &mut self,
2419 rng: &mut T,
2420 old_range_count: usize,
2421 cx: &mut ModelContext<Self>,
2422 ) where
2423 T: rand::Rng,
2424 {
2425 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2426 let mut last_end = None;
2427 for _ in 0..old_range_count {
2428 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2429 break;
2430 }
2431
2432 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2433 let mut range = self.random_byte_range(new_start, rng);
2434 if rng.gen_bool(0.2) {
2435 mem::swap(&mut range.start, &mut range.end);
2436 }
2437 last_end = Some(range.end);
2438
2439 let new_text_len = rng.gen_range(0..10);
2440 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2441
2442 edits.push((range, new_text));
2443 }
2444 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2445 self.edit(edits, None, cx);
2446 }
2447
2448 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
2449 let was_dirty = self.is_dirty();
2450 let old_version = self.version.clone();
2451
2452 let ops = self.text.randomly_undo_redo(rng);
2453 if !ops.is_empty() {
2454 for op in ops {
2455 self.send_operation(Operation::Buffer(op), true, cx);
2456 self.did_edit(&old_version, was_dirty, cx);
2457 }
2458 }
2459 }
2460}
2461
2462impl EventEmitter<BufferEvent> for Buffer {}
2463
2464impl Deref for Buffer {
2465 type Target = TextBuffer;
2466
2467 fn deref(&self) -> &Self::Target {
2468 &self.text
2469 }
2470}
2471
2472impl BufferSnapshot {
2473 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2474 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2475 indent_size_for_line(self, row)
2476 }
2477 /// Returns [`IndentSize`] for a given position that respects user settings
2478 /// and language preferences.
2479 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
2480 let settings = language_settings(
2481 self.language_at(position).map(|l| l.name()),
2482 self.file(),
2483 cx,
2484 );
2485 if settings.hard_tabs {
2486 IndentSize::tab()
2487 } else {
2488 IndentSize::spaces(settings.tab_size.get())
2489 }
2490 }
2491
2492 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2493 /// is passed in as `single_indent_size`.
2494 pub fn suggested_indents(
2495 &self,
2496 rows: impl Iterator<Item = u32>,
2497 single_indent_size: IndentSize,
2498 ) -> BTreeMap<u32, IndentSize> {
2499 let mut result = BTreeMap::new();
2500
2501 for row_range in contiguous_ranges(rows, 10) {
2502 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2503 Some(suggestions) => suggestions,
2504 _ => break,
2505 };
2506
2507 for (row, suggestion) in row_range.zip(suggestions) {
2508 let indent_size = if let Some(suggestion) = suggestion {
2509 result
2510 .get(&suggestion.basis_row)
2511 .copied()
2512 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2513 .with_delta(suggestion.delta, single_indent_size)
2514 } else {
2515 self.indent_size_for_line(row)
2516 };
2517
2518 result.insert(row, indent_size);
2519 }
2520 }
2521
2522 result
2523 }
2524
2525 fn suggest_autoindents(
2526 &self,
2527 row_range: Range<u32>,
2528 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2529 let config = &self.language.as_ref()?.config;
2530 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2531
2532 // Find the suggested indentation ranges based on the syntax tree.
2533 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2534 let end = Point::new(row_range.end, 0);
2535 let range = (start..end).to_offset(&self.text);
2536 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2537 Some(&grammar.indents_config.as_ref()?.query)
2538 });
2539 let indent_configs = matches
2540 .grammars()
2541 .iter()
2542 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2543 .collect::<Vec<_>>();
2544
2545 let mut indent_ranges = Vec::<Range<Point>>::new();
2546 let mut outdent_positions = Vec::<Point>::new();
2547 while let Some(mat) = matches.peek() {
2548 let mut start: Option<Point> = None;
2549 let mut end: Option<Point> = None;
2550
2551 let config = &indent_configs[mat.grammar_index];
2552 for capture in mat.captures {
2553 if capture.index == config.indent_capture_ix {
2554 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2555 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2556 } else if Some(capture.index) == config.start_capture_ix {
2557 start = Some(Point::from_ts_point(capture.node.end_position()));
2558 } else if Some(capture.index) == config.end_capture_ix {
2559 end = Some(Point::from_ts_point(capture.node.start_position()));
2560 } else if Some(capture.index) == config.outdent_capture_ix {
2561 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2562 }
2563 }
2564
2565 matches.advance();
2566 if let Some((start, end)) = start.zip(end) {
2567 if start.row == end.row {
2568 continue;
2569 }
2570
2571 let range = start..end;
2572 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2573 Err(ix) => indent_ranges.insert(ix, range),
2574 Ok(ix) => {
2575 let prev_range = &mut indent_ranges[ix];
2576 prev_range.end = prev_range.end.max(range.end);
2577 }
2578 }
2579 }
2580 }
2581
2582 let mut error_ranges = Vec::<Range<Point>>::new();
2583 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2584 Some(&grammar.error_query)
2585 });
2586 while let Some(mat) = matches.peek() {
2587 let node = mat.captures[0].node;
2588 let start = Point::from_ts_point(node.start_position());
2589 let end = Point::from_ts_point(node.end_position());
2590 let range = start..end;
2591 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2592 Ok(ix) | Err(ix) => ix,
2593 };
2594 let mut end_ix = ix;
2595 while let Some(existing_range) = error_ranges.get(end_ix) {
2596 if existing_range.end < end {
2597 end_ix += 1;
2598 } else {
2599 break;
2600 }
2601 }
2602 error_ranges.splice(ix..end_ix, [range]);
2603 matches.advance();
2604 }
2605
2606 outdent_positions.sort();
2607 for outdent_position in outdent_positions {
2608 // find the innermost indent range containing this outdent_position
2609 // set its end to the outdent position
2610 if let Some(range_to_truncate) = indent_ranges
2611 .iter_mut()
2612 .filter(|indent_range| indent_range.contains(&outdent_position))
2613 .last()
2614 {
2615 range_to_truncate.end = outdent_position;
2616 }
2617 }
2618
2619 // Find the suggested indentation increases and decreased based on regexes.
2620 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2621 self.for_each_line(
2622 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2623 ..Point::new(row_range.end, 0),
2624 |row, line| {
2625 if config
2626 .decrease_indent_pattern
2627 .as_ref()
2628 .map_or(false, |regex| regex.is_match(line))
2629 {
2630 indent_change_rows.push((row, Ordering::Less));
2631 }
2632 if config
2633 .increase_indent_pattern
2634 .as_ref()
2635 .map_or(false, |regex| regex.is_match(line))
2636 {
2637 indent_change_rows.push((row + 1, Ordering::Greater));
2638 }
2639 },
2640 );
2641
2642 let mut indent_changes = indent_change_rows.into_iter().peekable();
2643 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2644 prev_non_blank_row.unwrap_or(0)
2645 } else {
2646 row_range.start.saturating_sub(1)
2647 };
2648 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2649 Some(row_range.map(move |row| {
2650 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2651
2652 let mut indent_from_prev_row = false;
2653 let mut outdent_from_prev_row = false;
2654 let mut outdent_to_row = u32::MAX;
2655
2656 while let Some((indent_row, delta)) = indent_changes.peek() {
2657 match indent_row.cmp(&row) {
2658 Ordering::Equal => match delta {
2659 Ordering::Less => outdent_from_prev_row = true,
2660 Ordering::Greater => indent_from_prev_row = true,
2661 _ => {}
2662 },
2663
2664 Ordering::Greater => break,
2665 Ordering::Less => {}
2666 }
2667
2668 indent_changes.next();
2669 }
2670
2671 for range in &indent_ranges {
2672 if range.start.row >= row {
2673 break;
2674 }
2675 if range.start.row == prev_row && range.end > row_start {
2676 indent_from_prev_row = true;
2677 }
2678 if range.end > prev_row_start && range.end <= row_start {
2679 outdent_to_row = outdent_to_row.min(range.start.row);
2680 }
2681 }
2682
2683 let within_error = error_ranges
2684 .iter()
2685 .any(|e| e.start.row < row && e.end > row_start);
2686
2687 let suggestion = if outdent_to_row == prev_row
2688 || (outdent_from_prev_row && indent_from_prev_row)
2689 {
2690 Some(IndentSuggestion {
2691 basis_row: prev_row,
2692 delta: Ordering::Equal,
2693 within_error,
2694 })
2695 } else if indent_from_prev_row {
2696 Some(IndentSuggestion {
2697 basis_row: prev_row,
2698 delta: Ordering::Greater,
2699 within_error,
2700 })
2701 } else if outdent_to_row < prev_row {
2702 Some(IndentSuggestion {
2703 basis_row: outdent_to_row,
2704 delta: Ordering::Equal,
2705 within_error,
2706 })
2707 } else if outdent_from_prev_row {
2708 Some(IndentSuggestion {
2709 basis_row: prev_row,
2710 delta: Ordering::Less,
2711 within_error,
2712 })
2713 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2714 {
2715 Some(IndentSuggestion {
2716 basis_row: prev_row,
2717 delta: Ordering::Equal,
2718 within_error,
2719 })
2720 } else {
2721 None
2722 };
2723
2724 prev_row = row;
2725 prev_row_start = row_start;
2726 suggestion
2727 }))
2728 }
2729
2730 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2731 while row > 0 {
2732 row -= 1;
2733 if !self.is_line_blank(row) {
2734 return Some(row);
2735 }
2736 }
2737 None
2738 }
2739
2740 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
2741 let captures = self.syntax.captures(range, &self.text, |grammar| {
2742 grammar.highlights_query.as_ref()
2743 });
2744 let highlight_maps = captures
2745 .grammars()
2746 .iter()
2747 .map(|grammar| grammar.highlight_map())
2748 .collect();
2749 (captures, highlight_maps)
2750 }
2751
2752 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2753 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2754 /// returned in chunks where each chunk has a single syntax highlighting style and
2755 /// diagnostic status.
2756 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2757 let range = range.start.to_offset(self)..range.end.to_offset(self);
2758
2759 let mut syntax = None;
2760 if language_aware {
2761 syntax = Some(self.get_highlights(range.clone()));
2762 }
2763 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
2764 let diagnostics = language_aware;
2765 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
2766 }
2767
2768 /// Invokes the given callback for each line of text in the given range of the buffer.
2769 /// Uses callback to avoid allocating a string for each line.
2770 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2771 let mut line = String::new();
2772 let mut row = range.start.row;
2773 for chunk in self
2774 .as_rope()
2775 .chunks_in_range(range.to_offset(self))
2776 .chain(["\n"])
2777 {
2778 for (newline_ix, text) in chunk.split('\n').enumerate() {
2779 if newline_ix > 0 {
2780 callback(row, &line);
2781 row += 1;
2782 line.clear();
2783 }
2784 line.push_str(text);
2785 }
2786 }
2787 }
2788
2789 /// Iterates over every [`SyntaxLayer`] in the buffer.
2790 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2791 self.syntax
2792 .layers_for_range(0..self.len(), &self.text, true)
2793 }
2794
2795 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2796 let offset = position.to_offset(self);
2797 self.syntax
2798 .layers_for_range(offset..offset, &self.text, false)
2799 .filter(|l| l.node().end_byte() > offset)
2800 .last()
2801 }
2802
2803 /// Returns the main [`Language`].
2804 pub fn language(&self) -> Option<&Arc<Language>> {
2805 self.language.as_ref()
2806 }
2807
2808 /// Returns the [`Language`] at the given location.
2809 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2810 self.syntax_layer_at(position)
2811 .map(|info| info.language)
2812 .or(self.language.as_ref())
2813 }
2814
2815 /// Returns the settings for the language at the given location.
2816 pub fn settings_at<'a, D: ToOffset>(
2817 &'a self,
2818 position: D,
2819 cx: &'a AppContext,
2820 ) -> Cow<'a, LanguageSettings> {
2821 language_settings(
2822 self.language_at(position).map(|l| l.name()),
2823 self.file.as_ref(),
2824 cx,
2825 )
2826 }
2827
2828 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
2829 CharClassifier::new(self.language_scope_at(point))
2830 }
2831
2832 /// Returns the [`LanguageScope`] at the given location.
2833 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2834 let offset = position.to_offset(self);
2835 let mut scope = None;
2836 let mut smallest_range: Option<Range<usize>> = None;
2837
2838 // Use the layer that has the smallest node intersecting the given point.
2839 for layer in self
2840 .syntax
2841 .layers_for_range(offset..offset, &self.text, false)
2842 {
2843 let mut cursor = layer.node().walk();
2844
2845 let mut range = None;
2846 loop {
2847 let child_range = cursor.node().byte_range();
2848 if !child_range.to_inclusive().contains(&offset) {
2849 break;
2850 }
2851
2852 range = Some(child_range);
2853 if cursor.goto_first_child_for_byte(offset).is_none() {
2854 break;
2855 }
2856 }
2857
2858 if let Some(range) = range {
2859 if smallest_range
2860 .as_ref()
2861 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2862 {
2863 smallest_range = Some(range);
2864 scope = Some(LanguageScope {
2865 language: layer.language.clone(),
2866 override_id: layer.override_id(offset, &self.text),
2867 });
2868 }
2869 }
2870 }
2871
2872 scope.or_else(|| {
2873 self.language.clone().map(|language| LanguageScope {
2874 language,
2875 override_id: None,
2876 })
2877 })
2878 }
2879
2880 /// Returns a tuple of the range and character kind of the word
2881 /// surrounding the given position.
2882 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2883 let mut start = start.to_offset(self);
2884 let mut end = start;
2885 let mut next_chars = self.chars_at(start).peekable();
2886 let mut prev_chars = self.reversed_chars_at(start).peekable();
2887
2888 let classifier = self.char_classifier_at(start);
2889 let word_kind = cmp::max(
2890 prev_chars.peek().copied().map(|c| classifier.kind(c)),
2891 next_chars.peek().copied().map(|c| classifier.kind(c)),
2892 );
2893
2894 for ch in prev_chars {
2895 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
2896 start -= ch.len_utf8();
2897 } else {
2898 break;
2899 }
2900 }
2901
2902 for ch in next_chars {
2903 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
2904 end += ch.len_utf8();
2905 } else {
2906 break;
2907 }
2908 }
2909
2910 (start..end, word_kind)
2911 }
2912
2913 /// Returns the range for the closes syntax node enclosing the given range.
2914 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2915 let range = range.start.to_offset(self)..range.end.to_offset(self);
2916 let mut result: Option<Range<usize>> = None;
2917 'outer: for layer in self
2918 .syntax
2919 .layers_for_range(range.clone(), &self.text, true)
2920 {
2921 let mut cursor = layer.node().walk();
2922
2923 // Descend to the first leaf that touches the start of the range,
2924 // and if the range is non-empty, extends beyond the start.
2925 while cursor.goto_first_child_for_byte(range.start).is_some() {
2926 if !range.is_empty() && cursor.node().end_byte() == range.start {
2927 cursor.goto_next_sibling();
2928 }
2929 }
2930
2931 // Ascend to the smallest ancestor that strictly contains the range.
2932 loop {
2933 let node_range = cursor.node().byte_range();
2934 if node_range.start <= range.start
2935 && node_range.end >= range.end
2936 && node_range.len() > range.len()
2937 {
2938 break;
2939 }
2940 if !cursor.goto_parent() {
2941 continue 'outer;
2942 }
2943 }
2944
2945 let left_node = cursor.node();
2946 let mut layer_result = left_node.byte_range();
2947
2948 // For an empty range, try to find another node immediately to the right of the range.
2949 if left_node.end_byte() == range.start {
2950 let mut right_node = None;
2951 while !cursor.goto_next_sibling() {
2952 if !cursor.goto_parent() {
2953 break;
2954 }
2955 }
2956
2957 while cursor.node().start_byte() == range.start {
2958 right_node = Some(cursor.node());
2959 if !cursor.goto_first_child() {
2960 break;
2961 }
2962 }
2963
2964 // If there is a candidate node on both sides of the (empty) range, then
2965 // decide between the two by favoring a named node over an anonymous token.
2966 // If both nodes are the same in that regard, favor the right one.
2967 if let Some(right_node) = right_node {
2968 if right_node.is_named() || !left_node.is_named() {
2969 layer_result = right_node.byte_range();
2970 }
2971 }
2972 }
2973
2974 if let Some(previous_result) = &result {
2975 if previous_result.len() < layer_result.len() {
2976 continue;
2977 }
2978 }
2979 result = Some(layer_result);
2980 }
2981
2982 result
2983 }
2984
2985 /// Returns the outline for the buffer.
2986 ///
2987 /// This method allows passing an optional [`SyntaxTheme`] to
2988 /// syntax-highlight the returned symbols.
2989 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2990 self.outline_items_containing(0..self.len(), true, theme)
2991 .map(Outline::new)
2992 }
2993
2994 /// Returns all the symbols that contain the given position.
2995 ///
2996 /// This method allows passing an optional [`SyntaxTheme`] to
2997 /// syntax-highlight the returned symbols.
2998 pub fn symbols_containing<T: ToOffset>(
2999 &self,
3000 position: T,
3001 theme: Option<&SyntaxTheme>,
3002 ) -> Option<Vec<OutlineItem<Anchor>>> {
3003 let position = position.to_offset(self);
3004 let mut items = self.outline_items_containing(
3005 position.saturating_sub(1)..self.len().min(position + 1),
3006 false,
3007 theme,
3008 )?;
3009 let mut prev_depth = None;
3010 items.retain(|item| {
3011 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3012 prev_depth = Some(item.depth);
3013 result
3014 });
3015 Some(items)
3016 }
3017
3018 pub fn outline_items_containing<T: ToOffset>(
3019 &self,
3020 range: Range<T>,
3021 include_extra_context: bool,
3022 theme: Option<&SyntaxTheme>,
3023 ) -> Option<Vec<OutlineItem<Anchor>>> {
3024 let range = range.to_offset(self);
3025 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3026 grammar.outline_config.as_ref().map(|c| &c.query)
3027 });
3028 let configs = matches
3029 .grammars()
3030 .iter()
3031 .map(|g| g.outline_config.as_ref().unwrap())
3032 .collect::<Vec<_>>();
3033
3034 let mut items = Vec::new();
3035 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3036 while let Some(mat) = matches.peek() {
3037 let config = &configs[mat.grammar_index];
3038 if let Some(item) =
3039 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3040 {
3041 items.push(item);
3042 } else if let Some(capture) = mat
3043 .captures
3044 .iter()
3045 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3046 {
3047 let capture_range = capture.node.start_position()..capture.node.end_position();
3048 let mut capture_row_range =
3049 capture_range.start.row as u32..capture_range.end.row as u32;
3050 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3051 {
3052 capture_row_range.end -= 1;
3053 }
3054 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3055 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3056 last_row_range.end = capture_row_range.end;
3057 } else {
3058 annotation_row_ranges.push(capture_row_range);
3059 }
3060 } else {
3061 annotation_row_ranges.push(capture_row_range);
3062 }
3063 }
3064 matches.advance();
3065 }
3066
3067 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3068
3069 // Assign depths based on containment relationships and convert to anchors.
3070 let mut item_ends_stack = Vec::<Point>::new();
3071 let mut anchor_items = Vec::new();
3072 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3073 for item in items {
3074 while let Some(last_end) = item_ends_stack.last().copied() {
3075 if last_end < item.range.end {
3076 item_ends_stack.pop();
3077 } else {
3078 break;
3079 }
3080 }
3081
3082 let mut annotation_row_range = None;
3083 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3084 let row_preceding_item = item.range.start.row.saturating_sub(1);
3085 if next_annotation_row_range.end < row_preceding_item {
3086 annotation_row_ranges.next();
3087 } else {
3088 if next_annotation_row_range.end == row_preceding_item {
3089 annotation_row_range = Some(next_annotation_row_range.clone());
3090 annotation_row_ranges.next();
3091 }
3092 break;
3093 }
3094 }
3095
3096 anchor_items.push(OutlineItem {
3097 depth: item_ends_stack.len(),
3098 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3099 text: item.text,
3100 highlight_ranges: item.highlight_ranges,
3101 name_ranges: item.name_ranges,
3102 body_range: item.body_range.map(|body_range| {
3103 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3104 }),
3105 annotation_range: annotation_row_range.map(|annotation_range| {
3106 self.anchor_after(Point::new(annotation_range.start, 0))
3107 ..self.anchor_before(Point::new(
3108 annotation_range.end,
3109 self.line_len(annotation_range.end),
3110 ))
3111 }),
3112 });
3113 item_ends_stack.push(item.range.end);
3114 }
3115
3116 Some(anchor_items)
3117 }
3118
3119 fn next_outline_item(
3120 &self,
3121 config: &OutlineConfig,
3122 mat: &SyntaxMapMatch,
3123 range: &Range<usize>,
3124 include_extra_context: bool,
3125 theme: Option<&SyntaxTheme>,
3126 ) -> Option<OutlineItem<Point>> {
3127 let item_node = mat.captures.iter().find_map(|cap| {
3128 if cap.index == config.item_capture_ix {
3129 Some(cap.node)
3130 } else {
3131 None
3132 }
3133 })?;
3134
3135 let item_byte_range = item_node.byte_range();
3136 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3137 return None;
3138 }
3139 let item_point_range = Point::from_ts_point(item_node.start_position())
3140 ..Point::from_ts_point(item_node.end_position());
3141
3142 let mut open_point = None;
3143 let mut close_point = None;
3144 let mut buffer_ranges = Vec::new();
3145 for capture in mat.captures {
3146 let node_is_name;
3147 if capture.index == config.name_capture_ix {
3148 node_is_name = true;
3149 } else if Some(capture.index) == config.context_capture_ix
3150 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3151 {
3152 node_is_name = false;
3153 } else {
3154 if Some(capture.index) == config.open_capture_ix {
3155 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3156 } else if Some(capture.index) == config.close_capture_ix {
3157 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3158 }
3159
3160 continue;
3161 }
3162
3163 let mut range = capture.node.start_byte()..capture.node.end_byte();
3164 let start = capture.node.start_position();
3165 if capture.node.end_position().row > start.row {
3166 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3167 }
3168
3169 if !range.is_empty() {
3170 buffer_ranges.push((range, node_is_name));
3171 }
3172 }
3173 if buffer_ranges.is_empty() {
3174 return None;
3175 }
3176 let mut text = String::new();
3177 let mut highlight_ranges = Vec::new();
3178 let mut name_ranges = Vec::new();
3179 let mut chunks = self.chunks(
3180 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3181 true,
3182 );
3183 let mut last_buffer_range_end = 0;
3184 for (buffer_range, is_name) in buffer_ranges {
3185 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
3186 text.push(' ');
3187 }
3188 last_buffer_range_end = buffer_range.end;
3189 if is_name {
3190 let mut start = text.len();
3191 let end = start + buffer_range.len();
3192
3193 // When multiple names are captured, then the matchable text
3194 // includes the whitespace in between the names.
3195 if !name_ranges.is_empty() {
3196 start -= 1;
3197 }
3198
3199 name_ranges.push(start..end);
3200 }
3201
3202 let mut offset = buffer_range.start;
3203 chunks.seek(buffer_range.clone());
3204 for mut chunk in chunks.by_ref() {
3205 if chunk.text.len() > buffer_range.end - offset {
3206 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3207 offset = buffer_range.end;
3208 } else {
3209 offset += chunk.text.len();
3210 }
3211 let style = chunk
3212 .syntax_highlight_id
3213 .zip(theme)
3214 .and_then(|(highlight, theme)| highlight.style(theme));
3215 if let Some(style) = style {
3216 let start = text.len();
3217 let end = start + chunk.text.len();
3218 highlight_ranges.push((start..end, style));
3219 }
3220 text.push_str(chunk.text);
3221 if offset >= buffer_range.end {
3222 break;
3223 }
3224 }
3225 }
3226
3227 Some(OutlineItem {
3228 depth: 0, // We'll calculate the depth later
3229 range: item_point_range,
3230 text,
3231 highlight_ranges,
3232 name_ranges,
3233 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3234 annotation_range: None,
3235 })
3236 }
3237
3238 /// For each grammar in the language, runs the provided
3239 /// [`tree_sitter::Query`] against the given range.
3240 pub fn matches(
3241 &self,
3242 range: Range<usize>,
3243 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3244 ) -> SyntaxMapMatches {
3245 self.syntax.matches(range, self, query)
3246 }
3247
3248 /// Returns bracket range pairs overlapping or adjacent to `range`
3249 pub fn bracket_ranges<T: ToOffset>(
3250 &self,
3251 range: Range<T>,
3252 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3253 // Find bracket pairs that *inclusively* contain the given range.
3254 let range = range.start.to_offset(self).saturating_sub(1)
3255 ..self.len().min(range.end.to_offset(self) + 1);
3256
3257 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3258 grammar.brackets_config.as_ref().map(|c| &c.query)
3259 });
3260 let configs = matches
3261 .grammars()
3262 .iter()
3263 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3264 .collect::<Vec<_>>();
3265
3266 iter::from_fn(move || {
3267 while let Some(mat) = matches.peek() {
3268 let mut open = None;
3269 let mut close = None;
3270 let config = &configs[mat.grammar_index];
3271 for capture in mat.captures {
3272 if capture.index == config.open_capture_ix {
3273 open = Some(capture.node.byte_range());
3274 } else if capture.index == config.close_capture_ix {
3275 close = Some(capture.node.byte_range());
3276 }
3277 }
3278
3279 matches.advance();
3280
3281 let Some((open, close)) = open.zip(close) else {
3282 continue;
3283 };
3284
3285 let bracket_range = open.start..=close.end;
3286 if !bracket_range.overlaps(&range) {
3287 continue;
3288 }
3289
3290 return Some((open, close));
3291 }
3292 None
3293 })
3294 }
3295
3296 /// Returns enclosing bracket ranges containing the given range
3297 pub fn enclosing_bracket_ranges<T: ToOffset>(
3298 &self,
3299 range: Range<T>,
3300 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3301 let range = range.start.to_offset(self)..range.end.to_offset(self);
3302
3303 self.bracket_ranges(range.clone())
3304 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
3305 }
3306
3307 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3308 ///
3309 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3310 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3311 &self,
3312 range: Range<T>,
3313 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3314 ) -> Option<(Range<usize>, Range<usize>)> {
3315 let range = range.start.to_offset(self)..range.end.to_offset(self);
3316
3317 // Get the ranges of the innermost pair of brackets.
3318 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3319
3320 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
3321 if let Some(range_filter) = range_filter {
3322 if !range_filter(open.clone(), close.clone()) {
3323 continue;
3324 }
3325 }
3326
3327 let len = close.end - open.start;
3328
3329 if let Some((existing_open, existing_close)) = &result {
3330 let existing_len = existing_close.end - existing_open.start;
3331 if len > existing_len {
3332 continue;
3333 }
3334 }
3335
3336 result = Some((open, close));
3337 }
3338
3339 result
3340 }
3341
3342 /// Returns anchor ranges for any matches of the redaction query.
3343 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3344 /// will be run on the relevant section of the buffer.
3345 pub fn redacted_ranges<T: ToOffset>(
3346 &self,
3347 range: Range<T>,
3348 ) -> impl Iterator<Item = Range<usize>> + '_ {
3349 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3350 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3351 grammar
3352 .redactions_config
3353 .as_ref()
3354 .map(|config| &config.query)
3355 });
3356
3357 let configs = syntax_matches
3358 .grammars()
3359 .iter()
3360 .map(|grammar| grammar.redactions_config.as_ref())
3361 .collect::<Vec<_>>();
3362
3363 iter::from_fn(move || {
3364 let redacted_range = syntax_matches
3365 .peek()
3366 .and_then(|mat| {
3367 configs[mat.grammar_index].and_then(|config| {
3368 mat.captures
3369 .iter()
3370 .find(|capture| capture.index == config.redaction_capture_ix)
3371 })
3372 })
3373 .map(|mat| mat.node.byte_range());
3374 syntax_matches.advance();
3375 redacted_range
3376 })
3377 }
3378
3379 pub fn injections_intersecting_range<T: ToOffset>(
3380 &self,
3381 range: Range<T>,
3382 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3383 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3384
3385 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3386 grammar
3387 .injection_config
3388 .as_ref()
3389 .map(|config| &config.query)
3390 });
3391
3392 let configs = syntax_matches
3393 .grammars()
3394 .iter()
3395 .map(|grammar| grammar.injection_config.as_ref())
3396 .collect::<Vec<_>>();
3397
3398 iter::from_fn(move || {
3399 let ranges = syntax_matches.peek().and_then(|mat| {
3400 let config = &configs[mat.grammar_index]?;
3401 let content_capture_range = mat.captures.iter().find_map(|capture| {
3402 if capture.index == config.content_capture_ix {
3403 Some(capture.node.byte_range())
3404 } else {
3405 None
3406 }
3407 })?;
3408 let language = self.language_at(content_capture_range.start)?;
3409 Some((content_capture_range, language))
3410 });
3411 syntax_matches.advance();
3412 ranges
3413 })
3414 }
3415
3416 pub fn runnable_ranges(
3417 &self,
3418 range: Range<Anchor>,
3419 ) -> impl Iterator<Item = RunnableRange> + '_ {
3420 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3421
3422 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3423 grammar.runnable_config.as_ref().map(|config| &config.query)
3424 });
3425
3426 let test_configs = syntax_matches
3427 .grammars()
3428 .iter()
3429 .map(|grammar| grammar.runnable_config.as_ref())
3430 .collect::<Vec<_>>();
3431
3432 iter::from_fn(move || loop {
3433 let mat = syntax_matches.peek()?;
3434
3435 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3436 let mut run_range = None;
3437 let full_range = mat.captures.iter().fold(
3438 Range {
3439 start: usize::MAX,
3440 end: 0,
3441 },
3442 |mut acc, next| {
3443 let byte_range = next.node.byte_range();
3444 if acc.start > byte_range.start {
3445 acc.start = byte_range.start;
3446 }
3447 if acc.end < byte_range.end {
3448 acc.end = byte_range.end;
3449 }
3450 acc
3451 },
3452 );
3453 if full_range.start > full_range.end {
3454 // We did not find a full spanning range of this match.
3455 return None;
3456 }
3457 let extra_captures: SmallVec<[_; 1]> =
3458 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3459 test_configs
3460 .extra_captures
3461 .get(capture.index as usize)
3462 .cloned()
3463 .and_then(|tag_name| match tag_name {
3464 RunnableCapture::Named(name) => {
3465 Some((capture.node.byte_range(), name))
3466 }
3467 RunnableCapture::Run => {
3468 let _ = run_range.insert(capture.node.byte_range());
3469 None
3470 }
3471 })
3472 }));
3473 let run_range = run_range?;
3474 let tags = test_configs
3475 .query
3476 .property_settings(mat.pattern_index)
3477 .iter()
3478 .filter_map(|property| {
3479 if *property.key == *"tag" {
3480 property
3481 .value
3482 .as_ref()
3483 .map(|value| RunnableTag(value.to_string().into()))
3484 } else {
3485 None
3486 }
3487 })
3488 .collect();
3489 let extra_captures = extra_captures
3490 .into_iter()
3491 .map(|(range, name)| {
3492 (
3493 name.to_string(),
3494 self.text_for_range(range.clone()).collect::<String>(),
3495 )
3496 })
3497 .collect();
3498 // All tags should have the same range.
3499 Some(RunnableRange {
3500 run_range,
3501 full_range,
3502 runnable: Runnable {
3503 tags,
3504 language: mat.language,
3505 buffer: self.remote_id(),
3506 },
3507 extra_captures,
3508 buffer_id: self.remote_id(),
3509 })
3510 });
3511
3512 syntax_matches.advance();
3513 if test_range.is_some() {
3514 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3515 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3516 return test_range;
3517 }
3518 })
3519 }
3520
3521 pub fn indent_guides_in_range(
3522 &self,
3523 range: Range<Anchor>,
3524 ignore_disabled_for_language: bool,
3525 cx: &AppContext,
3526 ) -> Vec<IndentGuide> {
3527 let language_settings =
3528 language_settings(self.language().map(|l| l.name()), self.file.as_ref(), cx);
3529 let settings = language_settings.indent_guides;
3530 if !ignore_disabled_for_language && !settings.enabled {
3531 return Vec::new();
3532 }
3533 let tab_size = language_settings.tab_size.get() as u32;
3534
3535 let start_row = range.start.to_point(self).row;
3536 let end_row = range.end.to_point(self).row;
3537 let row_range = start_row..end_row + 1;
3538
3539 let mut row_indents = self.line_indents_in_row_range(row_range.clone());
3540
3541 let mut result_vec = Vec::new();
3542 let mut indent_stack = SmallVec::<[IndentGuide; 8]>::new();
3543
3544 while let Some((first_row, mut line_indent)) = row_indents.next() {
3545 let current_depth = indent_stack.len() as u32;
3546
3547 // When encountering empty, continue until found useful line indent
3548 // then add to the indent stack with the depth found
3549 let mut found_indent = false;
3550 let mut last_row = first_row;
3551 if line_indent.is_line_empty() {
3552 let mut trailing_row = end_row;
3553 while !found_indent {
3554 let (target_row, new_line_indent) =
3555 if let Some(display_row) = row_indents.next() {
3556 display_row
3557 } else {
3558 // This means we reached the end of the given range and found empty lines at the end.
3559 // We need to traverse further until we find a non-empty line to know if we need to add
3560 // an indent guide for the last visible indent.
3561 trailing_row += 1;
3562
3563 const TRAILING_ROW_SEARCH_LIMIT: u32 = 25;
3564 if trailing_row > self.max_point().row
3565 || trailing_row > end_row + TRAILING_ROW_SEARCH_LIMIT
3566 {
3567 break;
3568 }
3569 let new_line_indent = self.line_indent_for_row(trailing_row);
3570 (trailing_row, new_line_indent)
3571 };
3572
3573 if new_line_indent.is_line_empty() {
3574 continue;
3575 }
3576 last_row = target_row.min(end_row);
3577 line_indent = new_line_indent;
3578 found_indent = true;
3579 break;
3580 }
3581 } else {
3582 found_indent = true
3583 }
3584
3585 let depth = if found_indent {
3586 line_indent.len(tab_size) / tab_size
3587 + ((line_indent.len(tab_size) % tab_size) > 0) as u32
3588 } else {
3589 current_depth
3590 };
3591
3592 match depth.cmp(¤t_depth) {
3593 Ordering::Less => {
3594 for _ in 0..(current_depth - depth) {
3595 let mut indent = indent_stack.pop().unwrap();
3596 if last_row != first_row {
3597 // In this case, we landed on an empty row, had to seek forward,
3598 // and discovered that the indent we where on is ending.
3599 // This means that the last display row must
3600 // be on line that ends this indent range, so we
3601 // should display the range up to the first non-empty line
3602 indent.end_row = first_row.saturating_sub(1);
3603 }
3604
3605 result_vec.push(indent)
3606 }
3607 }
3608 Ordering::Greater => {
3609 for next_depth in current_depth..depth {
3610 indent_stack.push(IndentGuide {
3611 buffer_id: self.remote_id(),
3612 start_row: first_row,
3613 end_row: last_row,
3614 depth: next_depth,
3615 tab_size,
3616 settings,
3617 });
3618 }
3619 }
3620 _ => {}
3621 }
3622
3623 for indent in indent_stack.iter_mut() {
3624 indent.end_row = last_row;
3625 }
3626 }
3627
3628 result_vec.extend(indent_stack);
3629
3630 result_vec
3631 }
3632
3633 pub async fn enclosing_indent(
3634 &self,
3635 mut buffer_row: BufferRow,
3636 ) -> Option<(Range<BufferRow>, LineIndent)> {
3637 let max_row = self.max_point().row;
3638 if buffer_row >= max_row {
3639 return None;
3640 }
3641
3642 let mut target_indent = self.line_indent_for_row(buffer_row);
3643
3644 // If the current row is at the start of an indented block, we want to return this
3645 // block as the enclosing indent.
3646 if !target_indent.is_line_empty() && buffer_row < max_row {
3647 let next_line_indent = self.line_indent_for_row(buffer_row + 1);
3648 if !next_line_indent.is_line_empty()
3649 && target_indent.raw_len() < next_line_indent.raw_len()
3650 {
3651 target_indent = next_line_indent;
3652 buffer_row += 1;
3653 }
3654 }
3655
3656 const SEARCH_ROW_LIMIT: u32 = 25000;
3657 const SEARCH_WHITESPACE_ROW_LIMIT: u32 = 2500;
3658 const YIELD_INTERVAL: u32 = 100;
3659
3660 let mut accessed_row_counter = 0;
3661
3662 // If there is a blank line at the current row, search for the next non indented lines
3663 if target_indent.is_line_empty() {
3664 let start = buffer_row.saturating_sub(SEARCH_WHITESPACE_ROW_LIMIT);
3665 let end = (max_row + 1).min(buffer_row + SEARCH_WHITESPACE_ROW_LIMIT);
3666
3667 let mut non_empty_line_above = None;
3668 for (row, indent) in self
3669 .text
3670 .reversed_line_indents_in_row_range(start..buffer_row)
3671 {
3672 accessed_row_counter += 1;
3673 if accessed_row_counter == YIELD_INTERVAL {
3674 accessed_row_counter = 0;
3675 yield_now().await;
3676 }
3677 if !indent.is_line_empty() {
3678 non_empty_line_above = Some((row, indent));
3679 break;
3680 }
3681 }
3682
3683 let mut non_empty_line_below = None;
3684 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3685 accessed_row_counter += 1;
3686 if accessed_row_counter == YIELD_INTERVAL {
3687 accessed_row_counter = 0;
3688 yield_now().await;
3689 }
3690 if !indent.is_line_empty() {
3691 non_empty_line_below = Some((row, indent));
3692 break;
3693 }
3694 }
3695
3696 let (row, indent) = match (non_empty_line_above, non_empty_line_below) {
3697 (Some((above_row, above_indent)), Some((below_row, below_indent))) => {
3698 if above_indent.raw_len() >= below_indent.raw_len() {
3699 (above_row, above_indent)
3700 } else {
3701 (below_row, below_indent)
3702 }
3703 }
3704 (Some(above), None) => above,
3705 (None, Some(below)) => below,
3706 _ => return None,
3707 };
3708
3709 target_indent = indent;
3710 buffer_row = row;
3711 }
3712
3713 let start = buffer_row.saturating_sub(SEARCH_ROW_LIMIT);
3714 let end = (max_row + 1).min(buffer_row + SEARCH_ROW_LIMIT);
3715
3716 let mut start_indent = None;
3717 for (row, indent) in self
3718 .text
3719 .reversed_line_indents_in_row_range(start..buffer_row)
3720 {
3721 accessed_row_counter += 1;
3722 if accessed_row_counter == YIELD_INTERVAL {
3723 accessed_row_counter = 0;
3724 yield_now().await;
3725 }
3726 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3727 start_indent = Some((row, indent));
3728 break;
3729 }
3730 }
3731 let (start_row, start_indent_size) = start_indent?;
3732
3733 let mut end_indent = (end, None);
3734 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3735 accessed_row_counter += 1;
3736 if accessed_row_counter == YIELD_INTERVAL {
3737 accessed_row_counter = 0;
3738 yield_now().await;
3739 }
3740 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3741 end_indent = (row.saturating_sub(1), Some(indent));
3742 break;
3743 }
3744 }
3745 let (end_row, end_indent_size) = end_indent;
3746
3747 let indent = if let Some(end_indent_size) = end_indent_size {
3748 if start_indent_size.raw_len() > end_indent_size.raw_len() {
3749 start_indent_size
3750 } else {
3751 end_indent_size
3752 }
3753 } else {
3754 start_indent_size
3755 };
3756
3757 Some((start_row..end_row, indent))
3758 }
3759
3760 /// Returns selections for remote peers intersecting the given range.
3761 #[allow(clippy::type_complexity)]
3762 pub fn selections_in_range(
3763 &self,
3764 range: Range<Anchor>,
3765 include_local: bool,
3766 ) -> impl Iterator<
3767 Item = (
3768 ReplicaId,
3769 bool,
3770 CursorShape,
3771 impl Iterator<Item = &Selection<Anchor>> + '_,
3772 ),
3773 > + '_ {
3774 self.remote_selections
3775 .iter()
3776 .filter(move |(replica_id, set)| {
3777 (include_local || **replica_id != self.text.replica_id())
3778 && !set.selections.is_empty()
3779 })
3780 .map(move |(replica_id, set)| {
3781 let start_ix = match set.selections.binary_search_by(|probe| {
3782 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3783 }) {
3784 Ok(ix) | Err(ix) => ix,
3785 };
3786 let end_ix = match set.selections.binary_search_by(|probe| {
3787 probe.start.cmp(&range.end, self).then(Ordering::Less)
3788 }) {
3789 Ok(ix) | Err(ix) => ix,
3790 };
3791
3792 (
3793 *replica_id,
3794 set.line_mode,
3795 set.cursor_shape,
3796 set.selections[start_ix..end_ix].iter(),
3797 )
3798 })
3799 }
3800
3801 /// Whether the buffer contains any Git changes.
3802 pub fn has_git_diff(&self) -> bool {
3803 !self.git_diff.is_empty()
3804 }
3805
3806 /// Returns all the Git diff hunks intersecting the given row range.
3807 #[cfg(any(test, feature = "test-support"))]
3808 pub fn git_diff_hunks_in_row_range(
3809 &self,
3810 range: Range<BufferRow>,
3811 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk> {
3812 self.git_diff.hunks_in_row_range(range, self)
3813 }
3814
3815 /// Returns all the Git diff hunks intersecting the given
3816 /// range.
3817 pub fn git_diff_hunks_intersecting_range(
3818 &self,
3819 range: Range<Anchor>,
3820 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk> {
3821 self.git_diff.hunks_intersecting_range(range, self)
3822 }
3823
3824 /// Returns all the Git diff hunks intersecting the given
3825 /// range, in reverse order.
3826 pub fn git_diff_hunks_intersecting_range_rev(
3827 &self,
3828 range: Range<Anchor>,
3829 ) -> impl '_ + Iterator<Item = git::diff::DiffHunk> {
3830 self.git_diff.hunks_intersecting_range_rev(range, self)
3831 }
3832
3833 /// Returns if the buffer contains any diagnostics.
3834 pub fn has_diagnostics(&self) -> bool {
3835 !self.diagnostics.is_empty()
3836 }
3837
3838 /// Returns all the diagnostics intersecting the given range.
3839 pub fn diagnostics_in_range<'a, T, O>(
3840 &'a self,
3841 search_range: Range<T>,
3842 reversed: bool,
3843 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3844 where
3845 T: 'a + Clone + ToOffset,
3846 O: 'a + FromAnchor + Ord,
3847 {
3848 let mut iterators: Vec<_> = self
3849 .diagnostics
3850 .iter()
3851 .map(|(_, collection)| {
3852 collection
3853 .range::<T, O>(search_range.clone(), self, true, reversed)
3854 .peekable()
3855 })
3856 .collect();
3857
3858 std::iter::from_fn(move || {
3859 let (next_ix, _) = iterators
3860 .iter_mut()
3861 .enumerate()
3862 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3863 .min_by(|(_, a), (_, b)| {
3864 let cmp = a
3865 .range
3866 .start
3867 .cmp(&b.range.start)
3868 // when range is equal, sort by diagnostic severity
3869 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3870 // and stabilize order with group_id
3871 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3872 if reversed {
3873 cmp.reverse()
3874 } else {
3875 cmp
3876 }
3877 })?;
3878 iterators[next_ix].next()
3879 })
3880 }
3881
3882 /// Returns all the diagnostic groups associated with the given
3883 /// language server ID. If no language server ID is provided,
3884 /// all diagnostics groups are returned.
3885 pub fn diagnostic_groups(
3886 &self,
3887 language_server_id: Option<LanguageServerId>,
3888 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3889 let mut groups = Vec::new();
3890
3891 if let Some(language_server_id) = language_server_id {
3892 if let Ok(ix) = self
3893 .diagnostics
3894 .binary_search_by_key(&language_server_id, |e| e.0)
3895 {
3896 self.diagnostics[ix]
3897 .1
3898 .groups(language_server_id, &mut groups, self);
3899 }
3900 } else {
3901 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3902 diagnostics.groups(*language_server_id, &mut groups, self);
3903 }
3904 }
3905
3906 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3907 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3908 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3909 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3910 });
3911
3912 groups
3913 }
3914
3915 /// Returns an iterator over the diagnostics for the given group.
3916 pub fn diagnostic_group<'a, O>(
3917 &'a self,
3918 group_id: usize,
3919 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3920 where
3921 O: 'a + FromAnchor,
3922 {
3923 self.diagnostics
3924 .iter()
3925 .flat_map(move |(_, set)| set.group(group_id, self))
3926 }
3927
3928 /// An integer version number that accounts for all updates besides
3929 /// the buffer's text itself (which is versioned via a version vector).
3930 pub fn non_text_state_update_count(&self) -> usize {
3931 self.non_text_state_update_count
3932 }
3933
3934 /// Returns a snapshot of underlying file.
3935 pub fn file(&self) -> Option<&Arc<dyn File>> {
3936 self.file.as_ref()
3937 }
3938
3939 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
3940 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
3941 if let Some(file) = self.file() {
3942 if file.path().file_name().is_none() || include_root {
3943 Some(file.full_path(cx))
3944 } else {
3945 Some(file.path().to_path_buf())
3946 }
3947 } else {
3948 None
3949 }
3950 }
3951}
3952
3953fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
3954 indent_size_for_text(text.chars_at(Point::new(row, 0)))
3955}
3956
3957fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
3958 let mut result = IndentSize::spaces(0);
3959 for c in text {
3960 let kind = match c {
3961 ' ' => IndentKind::Space,
3962 '\t' => IndentKind::Tab,
3963 _ => break,
3964 };
3965 if result.len == 0 {
3966 result.kind = kind;
3967 }
3968 result.len += 1;
3969 }
3970 result
3971}
3972
3973impl Clone for BufferSnapshot {
3974 fn clone(&self) -> Self {
3975 Self {
3976 text: self.text.clone(),
3977 git_diff: self.git_diff.clone(),
3978 syntax: self.syntax.clone(),
3979 file: self.file.clone(),
3980 remote_selections: self.remote_selections.clone(),
3981 diagnostics: self.diagnostics.clone(),
3982 language: self.language.clone(),
3983 non_text_state_update_count: self.non_text_state_update_count,
3984 }
3985 }
3986}
3987
3988impl Deref for BufferSnapshot {
3989 type Target = text::BufferSnapshot;
3990
3991 fn deref(&self) -> &Self::Target {
3992 &self.text
3993 }
3994}
3995
3996unsafe impl<'a> Send for BufferChunks<'a> {}
3997
3998impl<'a> BufferChunks<'a> {
3999 pub(crate) fn new(
4000 text: &'a Rope,
4001 range: Range<usize>,
4002 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4003 diagnostics: bool,
4004 buffer_snapshot: Option<&'a BufferSnapshot>,
4005 ) -> Self {
4006 let mut highlights = None;
4007 if let Some((captures, highlight_maps)) = syntax {
4008 highlights = Some(BufferChunkHighlights {
4009 captures,
4010 next_capture: None,
4011 stack: Default::default(),
4012 highlight_maps,
4013 })
4014 }
4015
4016 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4017 let chunks = text.chunks_in_range(range.clone());
4018
4019 let mut this = BufferChunks {
4020 range,
4021 buffer_snapshot,
4022 chunks,
4023 diagnostic_endpoints,
4024 error_depth: 0,
4025 warning_depth: 0,
4026 information_depth: 0,
4027 hint_depth: 0,
4028 unnecessary_depth: 0,
4029 highlights,
4030 };
4031 this.initialize_diagnostic_endpoints();
4032 this
4033 }
4034
4035 /// Seeks to the given byte offset in the buffer.
4036 pub fn seek(&mut self, range: Range<usize>) {
4037 let old_range = std::mem::replace(&mut self.range, range.clone());
4038 self.chunks.set_range(self.range.clone());
4039 if let Some(highlights) = self.highlights.as_mut() {
4040 if old_range.start >= self.range.start && old_range.end <= self.range.end {
4041 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4042 highlights
4043 .stack
4044 .retain(|(end_offset, _)| *end_offset > range.start);
4045 if let Some(capture) = &highlights.next_capture {
4046 if range.start >= capture.node.start_byte() {
4047 let next_capture_end = capture.node.end_byte();
4048 if range.start < next_capture_end {
4049 highlights.stack.push((
4050 next_capture_end,
4051 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4052 ));
4053 }
4054 highlights.next_capture.take();
4055 }
4056 }
4057 } else if let Some(snapshot) = self.buffer_snapshot {
4058 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4059 *highlights = BufferChunkHighlights {
4060 captures,
4061 next_capture: None,
4062 stack: Default::default(),
4063 highlight_maps,
4064 };
4065 } else {
4066 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4067 // Seeking such BufferChunks is not supported.
4068 debug_assert!(false, "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot");
4069 }
4070
4071 highlights.captures.set_byte_range(self.range.clone());
4072 self.initialize_diagnostic_endpoints();
4073 }
4074 }
4075
4076 fn initialize_diagnostic_endpoints(&mut self) {
4077 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4078 if let Some(buffer) = self.buffer_snapshot {
4079 let mut diagnostic_endpoints = Vec::new();
4080 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4081 diagnostic_endpoints.push(DiagnosticEndpoint {
4082 offset: entry.range.start,
4083 is_start: true,
4084 severity: entry.diagnostic.severity,
4085 is_unnecessary: entry.diagnostic.is_unnecessary,
4086 });
4087 diagnostic_endpoints.push(DiagnosticEndpoint {
4088 offset: entry.range.end,
4089 is_start: false,
4090 severity: entry.diagnostic.severity,
4091 is_unnecessary: entry.diagnostic.is_unnecessary,
4092 });
4093 }
4094 diagnostic_endpoints
4095 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4096 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4097 }
4098 }
4099 }
4100
4101 /// The current byte offset in the buffer.
4102 pub fn offset(&self) -> usize {
4103 self.range.start
4104 }
4105
4106 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4107 let depth = match endpoint.severity {
4108 DiagnosticSeverity::ERROR => &mut self.error_depth,
4109 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4110 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4111 DiagnosticSeverity::HINT => &mut self.hint_depth,
4112 _ => return,
4113 };
4114 if endpoint.is_start {
4115 *depth += 1;
4116 } else {
4117 *depth -= 1;
4118 }
4119
4120 if endpoint.is_unnecessary {
4121 if endpoint.is_start {
4122 self.unnecessary_depth += 1;
4123 } else {
4124 self.unnecessary_depth -= 1;
4125 }
4126 }
4127 }
4128
4129 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4130 if self.error_depth > 0 {
4131 Some(DiagnosticSeverity::ERROR)
4132 } else if self.warning_depth > 0 {
4133 Some(DiagnosticSeverity::WARNING)
4134 } else if self.information_depth > 0 {
4135 Some(DiagnosticSeverity::INFORMATION)
4136 } else if self.hint_depth > 0 {
4137 Some(DiagnosticSeverity::HINT)
4138 } else {
4139 None
4140 }
4141 }
4142
4143 fn current_code_is_unnecessary(&self) -> bool {
4144 self.unnecessary_depth > 0
4145 }
4146}
4147
4148impl<'a> Iterator for BufferChunks<'a> {
4149 type Item = Chunk<'a>;
4150
4151 fn next(&mut self) -> Option<Self::Item> {
4152 let mut next_capture_start = usize::MAX;
4153 let mut next_diagnostic_endpoint = usize::MAX;
4154
4155 if let Some(highlights) = self.highlights.as_mut() {
4156 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4157 if *parent_capture_end <= self.range.start {
4158 highlights.stack.pop();
4159 } else {
4160 break;
4161 }
4162 }
4163
4164 if highlights.next_capture.is_none() {
4165 highlights.next_capture = highlights.captures.next();
4166 }
4167
4168 while let Some(capture) = highlights.next_capture.as_ref() {
4169 if self.range.start < capture.node.start_byte() {
4170 next_capture_start = capture.node.start_byte();
4171 break;
4172 } else {
4173 let highlight_id =
4174 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4175 highlights
4176 .stack
4177 .push((capture.node.end_byte(), highlight_id));
4178 highlights.next_capture = highlights.captures.next();
4179 }
4180 }
4181 }
4182
4183 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4184 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4185 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4186 if endpoint.offset <= self.range.start {
4187 self.update_diagnostic_depths(endpoint);
4188 diagnostic_endpoints.next();
4189 } else {
4190 next_diagnostic_endpoint = endpoint.offset;
4191 break;
4192 }
4193 }
4194 }
4195 self.diagnostic_endpoints = diagnostic_endpoints;
4196
4197 if let Some(chunk) = self.chunks.peek() {
4198 let chunk_start = self.range.start;
4199 let mut chunk_end = (self.chunks.offset() + chunk.len())
4200 .min(next_capture_start)
4201 .min(next_diagnostic_endpoint);
4202 let mut highlight_id = None;
4203 if let Some(highlights) = self.highlights.as_ref() {
4204 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4205 chunk_end = chunk_end.min(*parent_capture_end);
4206 highlight_id = Some(*parent_highlight_id);
4207 }
4208 }
4209
4210 let slice =
4211 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4212 self.range.start = chunk_end;
4213 if self.range.start == self.chunks.offset() + chunk.len() {
4214 self.chunks.next().unwrap();
4215 }
4216 Some(Chunk {
4217 text: slice,
4218 syntax_highlight_id: highlight_id,
4219 diagnostic_severity: self.current_diagnostic_severity(),
4220 is_unnecessary: self.current_code_is_unnecessary(),
4221 ..Default::default()
4222 })
4223 } else {
4224 None
4225 }
4226 }
4227}
4228
4229impl operation_queue::Operation for Operation {
4230 fn lamport_timestamp(&self) -> clock::Lamport {
4231 match self {
4232 Operation::Buffer(_) => {
4233 unreachable!("buffer operations should never be deferred at this layer")
4234 }
4235 Operation::UpdateDiagnostics {
4236 lamport_timestamp, ..
4237 }
4238 | Operation::UpdateSelections {
4239 lamport_timestamp, ..
4240 }
4241 | Operation::UpdateCompletionTriggers {
4242 lamport_timestamp, ..
4243 } => *lamport_timestamp,
4244 }
4245 }
4246}
4247
4248impl Default for Diagnostic {
4249 fn default() -> Self {
4250 Self {
4251 source: Default::default(),
4252 code: None,
4253 severity: DiagnosticSeverity::ERROR,
4254 message: Default::default(),
4255 group_id: 0,
4256 is_primary: false,
4257 is_disk_based: false,
4258 is_unnecessary: false,
4259 data: None,
4260 }
4261 }
4262}
4263
4264impl IndentSize {
4265 /// Returns an [`IndentSize`] representing the given spaces.
4266 pub fn spaces(len: u32) -> Self {
4267 Self {
4268 len,
4269 kind: IndentKind::Space,
4270 }
4271 }
4272
4273 /// Returns an [`IndentSize`] representing a tab.
4274 pub fn tab() -> Self {
4275 Self {
4276 len: 1,
4277 kind: IndentKind::Tab,
4278 }
4279 }
4280
4281 /// An iterator over the characters represented by this [`IndentSize`].
4282 pub fn chars(&self) -> impl Iterator<Item = char> {
4283 iter::repeat(self.char()).take(self.len as usize)
4284 }
4285
4286 /// The character representation of this [`IndentSize`].
4287 pub fn char(&self) -> char {
4288 match self.kind {
4289 IndentKind::Space => ' ',
4290 IndentKind::Tab => '\t',
4291 }
4292 }
4293
4294 /// Consumes the current [`IndentSize`] and returns a new one that has
4295 /// been shrunk or enlarged by the given size along the given direction.
4296 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4297 match direction {
4298 Ordering::Less => {
4299 if self.kind == size.kind && self.len >= size.len {
4300 self.len -= size.len;
4301 }
4302 }
4303 Ordering::Equal => {}
4304 Ordering::Greater => {
4305 if self.len == 0 {
4306 self = size;
4307 } else if self.kind == size.kind {
4308 self.len += size.len;
4309 }
4310 }
4311 }
4312 self
4313 }
4314}
4315
4316#[cfg(any(test, feature = "test-support"))]
4317pub struct TestFile {
4318 pub path: Arc<Path>,
4319 pub root_name: String,
4320}
4321
4322#[cfg(any(test, feature = "test-support"))]
4323impl File for TestFile {
4324 fn path(&self) -> &Arc<Path> {
4325 &self.path
4326 }
4327
4328 fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
4329 PathBuf::from(&self.root_name).join(self.path.as_ref())
4330 }
4331
4332 fn as_local(&self) -> Option<&dyn LocalFile> {
4333 None
4334 }
4335
4336 fn mtime(&self) -> Option<SystemTime> {
4337 unimplemented!()
4338 }
4339
4340 fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
4341 self.path().file_name().unwrap_or(self.root_name.as_ref())
4342 }
4343
4344 fn worktree_id(&self, _: &AppContext) -> WorktreeId {
4345 WorktreeId::from_usize(0)
4346 }
4347
4348 fn is_deleted(&self) -> bool {
4349 unimplemented!()
4350 }
4351
4352 fn as_any(&self) -> &dyn std::any::Any {
4353 unimplemented!()
4354 }
4355
4356 fn to_proto(&self, _: &AppContext) -> rpc::proto::File {
4357 unimplemented!()
4358 }
4359
4360 fn is_private(&self) -> bool {
4361 false
4362 }
4363}
4364
4365pub(crate) fn contiguous_ranges(
4366 values: impl Iterator<Item = u32>,
4367 max_len: usize,
4368) -> impl Iterator<Item = Range<u32>> {
4369 let mut values = values;
4370 let mut current_range: Option<Range<u32>> = None;
4371 std::iter::from_fn(move || loop {
4372 if let Some(value) = values.next() {
4373 if let Some(range) = &mut current_range {
4374 if value == range.end && range.len() < max_len {
4375 range.end += 1;
4376 continue;
4377 }
4378 }
4379
4380 let prev_range = current_range.clone();
4381 current_range = Some(value..(value + 1));
4382 if prev_range.is_some() {
4383 return prev_range;
4384 }
4385 } else {
4386 return current_range.take();
4387 }
4388 })
4389}
4390
4391#[derive(Default, Debug)]
4392pub struct CharClassifier {
4393 scope: Option<LanguageScope>,
4394 for_completion: bool,
4395 ignore_punctuation: bool,
4396}
4397
4398impl CharClassifier {
4399 pub fn new(scope: Option<LanguageScope>) -> Self {
4400 Self {
4401 scope,
4402 for_completion: false,
4403 ignore_punctuation: false,
4404 }
4405 }
4406
4407 pub fn for_completion(self, for_completion: bool) -> Self {
4408 Self {
4409 for_completion,
4410 ..self
4411 }
4412 }
4413
4414 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4415 Self {
4416 ignore_punctuation,
4417 ..self
4418 }
4419 }
4420
4421 pub fn is_whitespace(&self, c: char) -> bool {
4422 self.kind(c) == CharKind::Whitespace
4423 }
4424
4425 pub fn is_word(&self, c: char) -> bool {
4426 self.kind(c) == CharKind::Word
4427 }
4428
4429 pub fn is_punctuation(&self, c: char) -> bool {
4430 self.kind(c) == CharKind::Punctuation
4431 }
4432
4433 pub fn kind(&self, c: char) -> CharKind {
4434 if c.is_whitespace() {
4435 return CharKind::Whitespace;
4436 } else if c.is_alphanumeric() || c == '_' {
4437 return CharKind::Word;
4438 }
4439
4440 if let Some(scope) = &self.scope {
4441 if let Some(characters) = scope.word_characters() {
4442 if characters.contains(&c) {
4443 if c == '-' && !self.for_completion && !self.ignore_punctuation {
4444 return CharKind::Punctuation;
4445 }
4446 return CharKind::Word;
4447 }
4448 }
4449 }
4450
4451 if self.ignore_punctuation {
4452 CharKind::Word
4453 } else {
4454 CharKind::Punctuation
4455 }
4456 }
4457}
4458
4459/// Find all of the ranges of whitespace that occur at the ends of lines
4460/// in the given rope.
4461///
4462/// This could also be done with a regex search, but this implementation
4463/// avoids copying text.
4464pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4465 let mut ranges = Vec::new();
4466
4467 let mut offset = 0;
4468 let mut prev_chunk_trailing_whitespace_range = 0..0;
4469 for chunk in rope.chunks() {
4470 let mut prev_line_trailing_whitespace_range = 0..0;
4471 for (i, line) in chunk.split('\n').enumerate() {
4472 let line_end_offset = offset + line.len();
4473 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4474 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4475
4476 if i == 0 && trimmed_line_len == 0 {
4477 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4478 }
4479 if !prev_line_trailing_whitespace_range.is_empty() {
4480 ranges.push(prev_line_trailing_whitespace_range);
4481 }
4482
4483 offset = line_end_offset + 1;
4484 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4485 }
4486
4487 offset -= 1;
4488 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4489 }
4490
4491 if !prev_chunk_trailing_whitespace_range.is_empty() {
4492 ranges.push(prev_chunk_trailing_whitespace_range);
4493 }
4494
4495 ranges
4496}