1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 proto, Grammar, Language, LanguageRegistry,
5};
6use crate::{
7 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
8 language_settings::{language_settings, LanguageSettings},
9 outline::OutlineItem,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
17 TreeSitterOptions,
18};
19use anyhow::{anyhow, Context as _, Result};
20use async_watch as watch;
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 AnyElement, App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, Pixels,
28 SharedString, StyledText, Task, TaskLabel, TextStyle, Window,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 fmt,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, DerefMut, Range},
51 path::{Path, PathBuf},
52 str,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
63 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
64 Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{debug_panic, maybe, RangeExt};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76/// A label for the background task spawned by the buffer to compute
77/// a diff against the contents of its file.
78pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
79
80/// Indicate whether a [`Buffer`] has permissions to edit.
81#[derive(PartialEq, Clone, Copy, Debug)]
82pub enum Capability {
83 /// The buffer is a mutable replica.
84 ReadWrite,
85 /// The buffer is a read-only replica.
86 ReadOnly,
87}
88
89pub type BufferRow = u32;
90
91/// An in-memory representation of a source code file, including its text,
92/// syntax trees, git status, and diagnostics.
93pub struct Buffer {
94 text: TextBuffer,
95 branch_state: Option<BufferBranchState>,
96 /// Filesystem state, `None` when there is no path.
97 file: Option<Arc<dyn File>>,
98 /// The mtime of the file when this buffer was last loaded from
99 /// or saved to disk.
100 saved_mtime: Option<MTime>,
101 /// The version vector when this buffer was last loaded from
102 /// or saved to disk.
103 saved_version: clock::Global,
104 preview_version: clock::Global,
105 transaction_depth: usize,
106 was_dirty_before_starting_transaction: Option<bool>,
107 reload_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 autoindent_requests: Vec<Arc<AutoindentRequest>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 parsing_in_background: bool,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub(crate) syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
176#[serde(rename_all = "snake_case")]
177pub enum CursorShape {
178 /// A vertical bar
179 #[default]
180 Bar,
181 /// A block that surrounds the following character
182 Block,
183 /// An underline that runs along the following character
184 Underline,
185 /// A box drawn around the following character
186 Hollow,
187}
188
189#[derive(Clone, Debug)]
190struct SelectionSet {
191 line_mode: bool,
192 cursor_shape: CursorShape,
193 selections: Arc<[Selection<Anchor>]>,
194 lamport_timestamp: clock::Lamport,
195}
196
197/// A diagnostic associated with a certain range of a buffer.
198#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
199pub struct Diagnostic {
200 /// The name of the service that produced this diagnostic.
201 pub source: Option<String>,
202 /// A machine-readable code that identifies this diagnostic.
203 pub code: Option<NumberOrString>,
204 /// Whether this diagnostic is a hint, warning, or error.
205 pub severity: DiagnosticSeverity,
206 /// The human-readable message associated with this diagnostic.
207 pub message: String,
208 /// An id that identifies the group to which this diagnostic belongs.
209 ///
210 /// When a language server produces a diagnostic with
211 /// one or more associated diagnostics, those diagnostics are all
212 /// assigned a single group ID.
213 pub group_id: usize,
214 /// Whether this diagnostic is the primary diagnostic for its group.
215 ///
216 /// In a given group, the primary diagnostic is the top-level diagnostic
217 /// returned by the language server. The non-primary diagnostics are the
218 /// associated diagnostics.
219 pub is_primary: bool,
220 /// Whether this diagnostic is considered to originate from an analysis of
221 /// files on disk, as opposed to any unsaved buffer contents. This is a
222 /// property of a given diagnostic source, and is configured for a given
223 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
224 /// for the language server.
225 pub is_disk_based: bool,
226 /// Whether this diagnostic marks unnecessary code.
227 pub is_unnecessary: bool,
228 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
229 pub data: Option<Value>,
230}
231
232/// An operation used to synchronize this buffer with its other replicas.
233#[derive(Clone, Debug, PartialEq)]
234pub enum Operation {
235 /// A text operation.
236 Buffer(text::Operation),
237
238 /// An update to the buffer's diagnostics.
239 UpdateDiagnostics {
240 /// The id of the language server that produced the new diagnostics.
241 server_id: LanguageServerId,
242 /// The diagnostics.
243 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
244 /// The buffer's lamport timestamp.
245 lamport_timestamp: clock::Lamport,
246 },
247
248 /// An update to the most recent selections in this buffer.
249 UpdateSelections {
250 /// The selections.
251 selections: Arc<[Selection<Anchor>]>,
252 /// The buffer's lamport timestamp.
253 lamport_timestamp: clock::Lamport,
254 /// Whether the selections are in 'line mode'.
255 line_mode: bool,
256 /// The [`CursorShape`] associated with these selections.
257 cursor_shape: CursorShape,
258 },
259
260 /// An update to the characters that should trigger autocompletion
261 /// for this buffer.
262 UpdateCompletionTriggers {
263 /// The characters that trigger autocompletion.
264 triggers: Vec<String>,
265 /// The buffer's lamport timestamp.
266 lamport_timestamp: clock::Lamport,
267 /// The language server ID.
268 server_id: LanguageServerId,
269 },
270}
271
272/// An event that occurs in a buffer.
273#[derive(Clone, Debug, PartialEq)]
274pub enum BufferEvent {
275 /// The buffer was changed in a way that must be
276 /// propagated to its other replicas.
277 Operation {
278 operation: Operation,
279 is_local: bool,
280 },
281 /// The buffer was edited.
282 Edited,
283 /// The buffer's `dirty` bit changed.
284 DirtyChanged,
285 /// The buffer was saved.
286 Saved,
287 /// The buffer's file was changed on disk.
288 FileHandleChanged,
289 /// The buffer was reloaded.
290 Reloaded,
291 /// The buffer is in need of a reload
292 ReloadNeeded,
293 /// The buffer's language was changed.
294 LanguageChanged,
295 /// The buffer's syntax trees were updated.
296 Reparsed,
297 /// The buffer's diagnostics were updated.
298 DiagnosticsUpdated,
299 /// The buffer gained or lost editing capabilities.
300 CapabilityChanged,
301 /// The buffer was explicitly requested to close.
302 Closed,
303 /// The buffer was discarded when closing.
304 Discarded,
305}
306
307/// The file associated with a buffer.
308pub trait File: Send + Sync {
309 /// Returns the [`LocalFile`] associated with this file, if the
310 /// file is local.
311 fn as_local(&self) -> Option<&dyn LocalFile>;
312
313 /// Returns whether this file is local.
314 fn is_local(&self) -> bool {
315 self.as_local().is_some()
316 }
317
318 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
319 /// only available in some states, such as modification time.
320 fn disk_state(&self) -> DiskState;
321
322 /// Returns the path of this file relative to the worktree's root directory.
323 fn path(&self) -> &Arc<Path>;
324
325 /// Returns the path of this file relative to the worktree's parent directory (this means it
326 /// includes the name of the worktree's root folder).
327 fn full_path(&self, cx: &App) -> PathBuf;
328
329 /// Returns the last component of this handle's absolute path. If this handle refers to the root
330 /// of its worktree, then this method will return the name of the worktree itself.
331 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
332
333 /// Returns the id of the worktree to which this file belongs.
334 ///
335 /// This is needed for looking up project-specific settings.
336 fn worktree_id(&self, cx: &App) -> WorktreeId;
337
338 /// Converts this file into an [`Any`] trait object.
339 fn as_any(&self) -> &dyn Any;
340
341 /// Converts this file into a protobuf message.
342 fn to_proto(&self, cx: &App) -> rpc::proto::File;
343
344 /// Return whether Zed considers this to be a private file.
345 fn is_private(&self) -> bool;
346}
347
348/// The file's storage status - whether it's stored (`Present`), and if so when it was last
349/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
350/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
351/// indicator for new files.
352#[derive(Copy, Clone, Debug, PartialEq)]
353pub enum DiskState {
354 /// File created in Zed that has not been saved.
355 New,
356 /// File present on the filesystem.
357 Present { mtime: MTime },
358 /// Deleted file that was previously present.
359 Deleted,
360}
361
362impl DiskState {
363 /// Returns the file's last known modification time on disk.
364 pub fn mtime(self) -> Option<MTime> {
365 match self {
366 DiskState::New => None,
367 DiskState::Present { mtime } => Some(mtime),
368 DiskState::Deleted => None,
369 }
370 }
371}
372
373/// The file associated with a buffer, in the case where the file is on the local disk.
374pub trait LocalFile: File {
375 /// Returns the absolute path of this file
376 fn abs_path(&self, cx: &App) -> PathBuf;
377
378 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
379 fn load(&self, cx: &App) -> Task<Result<String>>;
380
381 /// Loads the file's contents from disk.
382 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
383}
384
385/// The auto-indent behavior associated with an editing operation.
386/// For some editing operations, each affected line of text has its
387/// indentation recomputed. For other operations, the entire block
388/// of edited text is adjusted uniformly.
389#[derive(Clone, Debug)]
390pub enum AutoindentMode {
391 /// Indent each line of inserted text.
392 EachLine,
393 /// Apply the same indentation adjustment to all of the lines
394 /// in a given insertion.
395 Block {
396 /// The original indentation level of the first line of each
397 /// insertion, if it has been copied.
398 original_indent_columns: Vec<u32>,
399 },
400}
401
402#[derive(Clone)]
403struct AutoindentRequest {
404 before_edit: BufferSnapshot,
405 entries: Vec<AutoindentRequestEntry>,
406 is_block_mode: bool,
407 ignore_empty_lines: bool,
408}
409
410#[derive(Debug, Clone)]
411struct AutoindentRequestEntry {
412 /// A range of the buffer whose indentation should be adjusted.
413 range: Range<Anchor>,
414 /// Whether or not these lines should be considered brand new, for the
415 /// purpose of auto-indent. When text is not new, its indentation will
416 /// only be adjusted if the suggested indentation level has *changed*
417 /// since the edit was made.
418 first_line_is_new: bool,
419 indent_size: IndentSize,
420 original_indent_column: Option<u32>,
421}
422
423#[derive(Debug)]
424struct IndentSuggestion {
425 basis_row: u32,
426 delta: Ordering,
427 within_error: bool,
428}
429
430struct BufferChunkHighlights<'a> {
431 captures: SyntaxMapCaptures<'a>,
432 next_capture: Option<SyntaxMapCapture<'a>>,
433 stack: Vec<(usize, HighlightId)>,
434 highlight_maps: Vec<HighlightMap>,
435}
436
437/// An iterator that yields chunks of a buffer's text, along with their
438/// syntax highlights and diagnostic status.
439pub struct BufferChunks<'a> {
440 buffer_snapshot: Option<&'a BufferSnapshot>,
441 range: Range<usize>,
442 chunks: text::Chunks<'a>,
443 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
444 error_depth: usize,
445 warning_depth: usize,
446 information_depth: usize,
447 hint_depth: usize,
448 unnecessary_depth: usize,
449 highlights: Option<BufferChunkHighlights<'a>>,
450}
451
452/// A chunk of a buffer's text, along with its syntax highlight and
453/// diagnostic status.
454#[derive(Clone, Debug, Default)]
455pub struct Chunk<'a> {
456 /// The text of the chunk.
457 pub text: &'a str,
458 /// The syntax highlighting style of the chunk.
459 pub syntax_highlight_id: Option<HighlightId>,
460 /// The highlight style that has been applied to this chunk in
461 /// the editor.
462 pub highlight_style: Option<HighlightStyle>,
463 /// The severity of diagnostic associated with this chunk, if any.
464 pub diagnostic_severity: Option<DiagnosticSeverity>,
465 /// Whether this chunk of text is marked as unnecessary.
466 pub is_unnecessary: bool,
467 /// Whether this chunk of text was originally a tab character.
468 pub is_tab: bool,
469 /// An optional recipe for how the chunk should be presented.
470 pub renderer: Option<ChunkRenderer>,
471}
472
473/// A recipe for how the chunk should be presented.
474#[derive(Clone)]
475pub struct ChunkRenderer {
476 /// creates a custom element to represent this chunk.
477 pub render: Arc<dyn Send + Sync + Fn(&mut ChunkRendererContext) -> AnyElement>,
478 /// If true, the element is constrained to the shaped width of the text.
479 pub constrain_width: bool,
480}
481
482pub struct ChunkRendererContext<'a, 'b> {
483 pub window: &'a mut Window,
484 pub context: &'b mut App,
485 pub max_width: Pixels,
486}
487
488impl fmt::Debug for ChunkRenderer {
489 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
490 f.debug_struct("ChunkRenderer")
491 .field("constrain_width", &self.constrain_width)
492 .finish()
493 }
494}
495
496impl<'a, 'b> Deref for ChunkRendererContext<'a, 'b> {
497 type Target = App;
498
499 fn deref(&self) -> &Self::Target {
500 self.context
501 }
502}
503
504impl<'a, 'b> DerefMut for ChunkRendererContext<'a, 'b> {
505 fn deref_mut(&mut self) -> &mut Self::Target {
506 self.context
507 }
508}
509
510/// A set of edits to a given version of a buffer, computed asynchronously.
511#[derive(Debug)]
512pub struct Diff {
513 pub(crate) base_version: clock::Global,
514 line_ending: LineEnding,
515 pub edits: Vec<(Range<usize>, Arc<str>)>,
516}
517
518#[derive(Clone, Copy)]
519pub(crate) struct DiagnosticEndpoint {
520 offset: usize,
521 is_start: bool,
522 severity: DiagnosticSeverity,
523 is_unnecessary: bool,
524}
525
526/// A class of characters, used for characterizing a run of text.
527#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
528pub enum CharKind {
529 /// Whitespace.
530 Whitespace,
531 /// Punctuation.
532 Punctuation,
533 /// Word.
534 Word,
535}
536
537/// A runnable is a set of data about a region that could be resolved into a task
538pub struct Runnable {
539 pub tags: SmallVec<[RunnableTag; 1]>,
540 pub language: Arc<Language>,
541 pub buffer: BufferId,
542}
543
544#[derive(Default, Clone, Debug)]
545pub struct HighlightedText {
546 pub text: SharedString,
547 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
548}
549
550#[derive(Default, Debug)]
551struct HighlightedTextBuilder {
552 pub text: String,
553 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
554}
555
556impl HighlightedText {
557 pub fn from_buffer_range<T: ToOffset>(
558 range: Range<T>,
559 snapshot: &text::BufferSnapshot,
560 syntax_snapshot: &SyntaxSnapshot,
561 override_style: Option<HighlightStyle>,
562 syntax_theme: &SyntaxTheme,
563 ) -> Self {
564 let mut highlighted_text = HighlightedTextBuilder::default();
565 highlighted_text.add_text_from_buffer_range(
566 range,
567 snapshot,
568 syntax_snapshot,
569 override_style,
570 syntax_theme,
571 );
572 highlighted_text.build()
573 }
574
575 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
576 gpui::StyledText::new(self.text.clone())
577 .with_highlights(default_style, self.highlights.iter().cloned())
578 }
579
580 /// Returns the first line without leading whitespace unless highlighted
581 /// and a boolean indicating if there are more lines after
582 pub fn first_line_preview(self) -> (Self, bool) {
583 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
584 let first_line = &self.text[..newline_ix];
585
586 // Trim leading whitespace, unless an edit starts prior to it.
587 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
588 if let Some((first_highlight_range, _)) = self.highlights.first() {
589 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
590 }
591
592 let preview_text = &first_line[preview_start_ix..];
593 let preview_highlights = self
594 .highlights
595 .into_iter()
596 .take_while(|(range, _)| range.start < newline_ix)
597 .filter_map(|(mut range, highlight)| {
598 range.start = range.start.saturating_sub(preview_start_ix);
599 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
600 if range.is_empty() {
601 None
602 } else {
603 Some((range, highlight))
604 }
605 });
606
607 let preview = Self {
608 text: SharedString::new(preview_text),
609 highlights: preview_highlights.collect(),
610 };
611
612 (preview, self.text.len() > newline_ix)
613 }
614}
615
616impl HighlightedTextBuilder {
617 pub fn build(self) -> HighlightedText {
618 HighlightedText {
619 text: self.text.into(),
620 highlights: self.highlights,
621 }
622 }
623
624 pub fn add_text_from_buffer_range<T: ToOffset>(
625 &mut self,
626 range: Range<T>,
627 snapshot: &text::BufferSnapshot,
628 syntax_snapshot: &SyntaxSnapshot,
629 override_style: Option<HighlightStyle>,
630 syntax_theme: &SyntaxTheme,
631 ) {
632 let range = range.to_offset(snapshot);
633 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
634 let start = self.text.len();
635 self.text.push_str(chunk.text);
636 let end = self.text.len();
637
638 if let Some(mut highlight_style) = chunk
639 .syntax_highlight_id
640 .and_then(|id| id.style(syntax_theme))
641 {
642 if let Some(override_style) = override_style {
643 highlight_style.highlight(override_style);
644 }
645 self.highlights.push((start..end, highlight_style));
646 } else if let Some(override_style) = override_style {
647 self.highlights.push((start..end, override_style));
648 }
649 }
650 }
651
652 fn highlighted_chunks<'a>(
653 range: Range<usize>,
654 snapshot: &'a text::BufferSnapshot,
655 syntax_snapshot: &'a SyntaxSnapshot,
656 ) -> BufferChunks<'a> {
657 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
658 grammar.highlights_query.as_ref()
659 });
660
661 let highlight_maps = captures
662 .grammars()
663 .iter()
664 .map(|grammar| grammar.highlight_map())
665 .collect();
666
667 BufferChunks::new(
668 snapshot.as_rope(),
669 range,
670 Some((captures, highlight_maps)),
671 false,
672 None,
673 )
674 }
675}
676
677#[derive(Clone)]
678pub struct EditPreview {
679 old_snapshot: text::BufferSnapshot,
680 applied_edits_snapshot: text::BufferSnapshot,
681 syntax_snapshot: SyntaxSnapshot,
682}
683
684impl EditPreview {
685 pub fn highlight_edits(
686 &self,
687 current_snapshot: &BufferSnapshot,
688 edits: &[(Range<Anchor>, String)],
689 include_deletions: bool,
690 cx: &App,
691 ) -> HighlightedText {
692 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
693 return HighlightedText::default();
694 };
695
696 let mut highlighted_text = HighlightedTextBuilder::default();
697
698 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
699
700 let insertion_highlight_style = HighlightStyle {
701 background_color: Some(cx.theme().status().created_background),
702 ..Default::default()
703 };
704 let deletion_highlight_style = HighlightStyle {
705 background_color: Some(cx.theme().status().deleted_background),
706 ..Default::default()
707 };
708 let syntax_theme = cx.theme().syntax();
709
710 for (range, edit_text) in edits {
711 let edit_new_end_in_preview_snapshot = range
712 .end
713 .bias_right(&self.old_snapshot)
714 .to_offset(&self.applied_edits_snapshot);
715 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
716
717 let unchanged_range_in_preview_snapshot =
718 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
719 if !unchanged_range_in_preview_snapshot.is_empty() {
720 highlighted_text.add_text_from_buffer_range(
721 unchanged_range_in_preview_snapshot,
722 &self.applied_edits_snapshot,
723 &self.syntax_snapshot,
724 None,
725 &syntax_theme,
726 );
727 }
728
729 let range_in_current_snapshot = range.to_offset(current_snapshot);
730 if include_deletions && !range_in_current_snapshot.is_empty() {
731 highlighted_text.add_text_from_buffer_range(
732 range_in_current_snapshot,
733 ¤t_snapshot.text,
734 ¤t_snapshot.syntax,
735 Some(deletion_highlight_style),
736 &syntax_theme,
737 );
738 }
739
740 if !edit_text.is_empty() {
741 highlighted_text.add_text_from_buffer_range(
742 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
743 &self.applied_edits_snapshot,
744 &self.syntax_snapshot,
745 Some(insertion_highlight_style),
746 &syntax_theme,
747 );
748 }
749
750 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
751 }
752
753 highlighted_text.add_text_from_buffer_range(
754 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
755 &self.applied_edits_snapshot,
756 &self.syntax_snapshot,
757 None,
758 &syntax_theme,
759 );
760
761 highlighted_text.build()
762 }
763
764 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
765 let (first, _) = edits.first()?;
766 let (last, _) = edits.last()?;
767
768 let start = first
769 .start
770 .bias_left(&self.old_snapshot)
771 .to_point(&self.applied_edits_snapshot);
772 let end = last
773 .end
774 .bias_right(&self.old_snapshot)
775 .to_point(&self.applied_edits_snapshot);
776
777 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
778 let range = Point::new(start.row, 0)
779 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
780
781 Some(range.to_offset(&self.applied_edits_snapshot))
782 }
783}
784
785#[derive(Clone, Debug, PartialEq, Eq)]
786pub struct BracketMatch {
787 pub open_range: Range<usize>,
788 pub close_range: Range<usize>,
789 pub newline_only: bool,
790}
791
792impl Buffer {
793 /// Create a new buffer with the given base text.
794 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
795 Self::build(
796 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
797 None,
798 Capability::ReadWrite,
799 )
800 }
801
802 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
803 pub fn local_normalized(
804 base_text_normalized: Rope,
805 line_ending: LineEnding,
806 cx: &Context<Self>,
807 ) -> Self {
808 Self::build(
809 TextBuffer::new_normalized(
810 0,
811 cx.entity_id().as_non_zero_u64().into(),
812 line_ending,
813 base_text_normalized,
814 ),
815 None,
816 Capability::ReadWrite,
817 )
818 }
819
820 /// Create a new buffer that is a replica of a remote buffer.
821 pub fn remote(
822 remote_id: BufferId,
823 replica_id: ReplicaId,
824 capability: Capability,
825 base_text: impl Into<String>,
826 ) -> Self {
827 Self::build(
828 TextBuffer::new(replica_id, remote_id, base_text.into()),
829 None,
830 capability,
831 )
832 }
833
834 /// Create a new buffer that is a replica of a remote buffer, populating its
835 /// state from the given protobuf message.
836 pub fn from_proto(
837 replica_id: ReplicaId,
838 capability: Capability,
839 message: proto::BufferState,
840 file: Option<Arc<dyn File>>,
841 ) -> Result<Self> {
842 let buffer_id = BufferId::new(message.id)
843 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
844 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
845 let mut this = Self::build(buffer, file, capability);
846 this.text.set_line_ending(proto::deserialize_line_ending(
847 rpc::proto::LineEnding::from_i32(message.line_ending)
848 .ok_or_else(|| anyhow!("missing line_ending"))?,
849 ));
850 this.saved_version = proto::deserialize_version(&message.saved_version);
851 this.saved_mtime = message.saved_mtime.map(|time| time.into());
852 Ok(this)
853 }
854
855 /// Serialize the buffer's state to a protobuf message.
856 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
857 proto::BufferState {
858 id: self.remote_id().into(),
859 file: self.file.as_ref().map(|f| f.to_proto(cx)),
860 base_text: self.base_text().to_string(),
861 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
862 saved_version: proto::serialize_version(&self.saved_version),
863 saved_mtime: self.saved_mtime.map(|time| time.into()),
864 }
865 }
866
867 /// Serialize as protobufs all of the changes to the buffer since the given version.
868 pub fn serialize_ops(
869 &self,
870 since: Option<clock::Global>,
871 cx: &App,
872 ) -> Task<Vec<proto::Operation>> {
873 let mut operations = Vec::new();
874 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
875
876 operations.extend(self.remote_selections.iter().map(|(_, set)| {
877 proto::serialize_operation(&Operation::UpdateSelections {
878 selections: set.selections.clone(),
879 lamport_timestamp: set.lamport_timestamp,
880 line_mode: set.line_mode,
881 cursor_shape: set.cursor_shape,
882 })
883 }));
884
885 for (server_id, diagnostics) in &self.diagnostics {
886 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
887 lamport_timestamp: self.diagnostics_timestamp,
888 server_id: *server_id,
889 diagnostics: diagnostics.iter().cloned().collect(),
890 }));
891 }
892
893 for (server_id, completions) in &self.completion_triggers_per_language_server {
894 operations.push(proto::serialize_operation(
895 &Operation::UpdateCompletionTriggers {
896 triggers: completions.iter().cloned().collect(),
897 lamport_timestamp: self.completion_triggers_timestamp,
898 server_id: *server_id,
899 },
900 ));
901 }
902
903 let text_operations = self.text.operations().clone();
904 cx.background_spawn(async move {
905 let since = since.unwrap_or_default();
906 operations.extend(
907 text_operations
908 .iter()
909 .filter(|(_, op)| !since.observed(op.timestamp()))
910 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
911 );
912 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
913 operations
914 })
915 }
916
917 /// Assign a language to the buffer, returning the buffer.
918 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
919 self.set_language(Some(language), cx);
920 self
921 }
922
923 /// Returns the [`Capability`] of this buffer.
924 pub fn capability(&self) -> Capability {
925 self.capability
926 }
927
928 /// Whether this buffer can only be read.
929 pub fn read_only(&self) -> bool {
930 self.capability == Capability::ReadOnly
931 }
932
933 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
934 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
935 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
936 let snapshot = buffer.snapshot();
937 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
938 Self {
939 saved_mtime,
940 saved_version: buffer.version(),
941 preview_version: buffer.version(),
942 reload_task: None,
943 transaction_depth: 0,
944 was_dirty_before_starting_transaction: None,
945 has_unsaved_edits: Cell::new((buffer.version(), false)),
946 text: buffer,
947 branch_state: None,
948 file,
949 capability,
950 syntax_map,
951 parsing_in_background: false,
952 non_text_state_update_count: 0,
953 sync_parse_timeout: Duration::from_millis(1),
954 parse_status: async_watch::channel(ParseStatus::Idle),
955 autoindent_requests: Default::default(),
956 pending_autoindent: Default::default(),
957 language: None,
958 remote_selections: Default::default(),
959 diagnostics: Default::default(),
960 diagnostics_timestamp: Default::default(),
961 completion_triggers: Default::default(),
962 completion_triggers_per_language_server: Default::default(),
963 completion_triggers_timestamp: Default::default(),
964 deferred_ops: OperationQueue::new(),
965 has_conflict: false,
966 _subscriptions: Vec::new(),
967 }
968 }
969
970 pub fn build_snapshot(
971 text: Rope,
972 language: Option<Arc<Language>>,
973 language_registry: Option<Arc<LanguageRegistry>>,
974 cx: &mut App,
975 ) -> impl Future<Output = BufferSnapshot> {
976 let entity_id = cx.reserve_entity::<Self>().entity_id();
977 let buffer_id = entity_id.as_non_zero_u64().into();
978 async move {
979 let text =
980 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
981 let mut syntax = SyntaxMap::new(&text).snapshot();
982 if let Some(language) = language.clone() {
983 let text = text.clone();
984 let language = language.clone();
985 let language_registry = language_registry.clone();
986 syntax.reparse(&text, language_registry, language);
987 }
988 BufferSnapshot {
989 text,
990 syntax,
991 file: None,
992 diagnostics: Default::default(),
993 remote_selections: Default::default(),
994 language,
995 non_text_state_update_count: 0,
996 }
997 }
998 }
999
1000 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1001 let entity_id = cx.reserve_entity::<Self>().entity_id();
1002 let buffer_id = entity_id.as_non_zero_u64().into();
1003 let text =
1004 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
1005 let syntax = SyntaxMap::new(&text).snapshot();
1006 BufferSnapshot {
1007 text,
1008 syntax,
1009 file: None,
1010 diagnostics: Default::default(),
1011 remote_selections: Default::default(),
1012 language: None,
1013 non_text_state_update_count: 0,
1014 }
1015 }
1016
1017 #[cfg(any(test, feature = "test-support"))]
1018 pub fn build_snapshot_sync(
1019 text: Rope,
1020 language: Option<Arc<Language>>,
1021 language_registry: Option<Arc<LanguageRegistry>>,
1022 cx: &mut App,
1023 ) -> BufferSnapshot {
1024 let entity_id = cx.reserve_entity::<Self>().entity_id();
1025 let buffer_id = entity_id.as_non_zero_u64().into();
1026 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1027 let mut syntax = SyntaxMap::new(&text).snapshot();
1028 if let Some(language) = language.clone() {
1029 let text = text.clone();
1030 let language = language.clone();
1031 let language_registry = language_registry.clone();
1032 syntax.reparse(&text, language_registry, language);
1033 }
1034 BufferSnapshot {
1035 text,
1036 syntax,
1037 file: None,
1038 diagnostics: Default::default(),
1039 remote_selections: Default::default(),
1040 language,
1041 non_text_state_update_count: 0,
1042 }
1043 }
1044
1045 /// Retrieve a snapshot of the buffer's current state. This is computationally
1046 /// cheap, and allows reading from the buffer on a background thread.
1047 pub fn snapshot(&self) -> BufferSnapshot {
1048 let text = self.text.snapshot();
1049 let mut syntax_map = self.syntax_map.lock();
1050 syntax_map.interpolate(&text);
1051 let syntax = syntax_map.snapshot();
1052
1053 BufferSnapshot {
1054 text,
1055 syntax,
1056 file: self.file.clone(),
1057 remote_selections: self.remote_selections.clone(),
1058 diagnostics: self.diagnostics.clone(),
1059 language: self.language.clone(),
1060 non_text_state_update_count: self.non_text_state_update_count,
1061 }
1062 }
1063
1064 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1065 let this = cx.entity();
1066 cx.new(|cx| {
1067 let mut branch = Self {
1068 branch_state: Some(BufferBranchState {
1069 base_buffer: this.clone(),
1070 merged_operations: Default::default(),
1071 }),
1072 language: self.language.clone(),
1073 has_conflict: self.has_conflict,
1074 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1075 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1076 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1077 };
1078 if let Some(language_registry) = self.language_registry() {
1079 branch.set_language_registry(language_registry);
1080 }
1081
1082 // Reparse the branch buffer so that we get syntax highlighting immediately.
1083 branch.reparse(cx);
1084
1085 branch
1086 })
1087 }
1088
1089 pub fn preview_edits(
1090 &self,
1091 edits: Arc<[(Range<Anchor>, String)]>,
1092 cx: &App,
1093 ) -> Task<EditPreview> {
1094 let registry = self.language_registry();
1095 let language = self.language().cloned();
1096 let old_snapshot = self.text.snapshot();
1097 let mut branch_buffer = self.text.branch();
1098 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1099 cx.background_spawn(async move {
1100 if !edits.is_empty() {
1101 if let Some(language) = language.clone() {
1102 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1103 }
1104
1105 branch_buffer.edit(edits.iter().cloned());
1106 let snapshot = branch_buffer.snapshot();
1107 syntax_snapshot.interpolate(&snapshot);
1108
1109 if let Some(language) = language {
1110 syntax_snapshot.reparse(&snapshot, registry, language);
1111 }
1112 }
1113 EditPreview {
1114 old_snapshot,
1115 applied_edits_snapshot: branch_buffer.snapshot(),
1116 syntax_snapshot,
1117 }
1118 })
1119 }
1120
1121 /// Applies all of the changes in this buffer that intersect any of the
1122 /// given `ranges` to its base buffer.
1123 ///
1124 /// If `ranges` is empty, then all changes will be applied. This buffer must
1125 /// be a branch buffer to call this method.
1126 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1127 let Some(base_buffer) = self.base_buffer() else {
1128 debug_panic!("not a branch buffer");
1129 return;
1130 };
1131
1132 let mut ranges = if ranges.is_empty() {
1133 &[0..usize::MAX]
1134 } else {
1135 ranges.as_slice()
1136 }
1137 .into_iter()
1138 .peekable();
1139
1140 let mut edits = Vec::new();
1141 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1142 let mut is_included = false;
1143 while let Some(range) = ranges.peek() {
1144 if range.end < edit.new.start {
1145 ranges.next().unwrap();
1146 } else {
1147 if range.start <= edit.new.end {
1148 is_included = true;
1149 }
1150 break;
1151 }
1152 }
1153
1154 if is_included {
1155 edits.push((
1156 edit.old.clone(),
1157 self.text_for_range(edit.new.clone()).collect::<String>(),
1158 ));
1159 }
1160 }
1161
1162 let operation = base_buffer.update(cx, |base_buffer, cx| {
1163 // cx.emit(BufferEvent::DiffBaseChanged);
1164 base_buffer.edit(edits, None, cx)
1165 });
1166
1167 if let Some(operation) = operation {
1168 if let Some(BufferBranchState {
1169 merged_operations, ..
1170 }) = &mut self.branch_state
1171 {
1172 merged_operations.push(operation);
1173 }
1174 }
1175 }
1176
1177 fn on_base_buffer_event(
1178 &mut self,
1179 _: Entity<Buffer>,
1180 event: &BufferEvent,
1181 cx: &mut Context<Self>,
1182 ) {
1183 let BufferEvent::Operation { operation, .. } = event else {
1184 return;
1185 };
1186 let Some(BufferBranchState {
1187 merged_operations, ..
1188 }) = &mut self.branch_state
1189 else {
1190 return;
1191 };
1192
1193 let mut operation_to_undo = None;
1194 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1195 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1196 merged_operations.remove(ix);
1197 operation_to_undo = Some(operation.timestamp);
1198 }
1199 }
1200
1201 self.apply_ops([operation.clone()], cx);
1202
1203 if let Some(timestamp) = operation_to_undo {
1204 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1205 self.undo_operations(counts, cx);
1206 }
1207 }
1208
1209 #[cfg(test)]
1210 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1211 &self.text
1212 }
1213
1214 /// Retrieve a snapshot of the buffer's raw text, without any
1215 /// language-related state like the syntax tree or diagnostics.
1216 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1217 self.text.snapshot()
1218 }
1219
1220 /// The file associated with the buffer, if any.
1221 pub fn file(&self) -> Option<&Arc<dyn File>> {
1222 self.file.as_ref()
1223 }
1224
1225 /// The version of the buffer that was last saved or reloaded from disk.
1226 pub fn saved_version(&self) -> &clock::Global {
1227 &self.saved_version
1228 }
1229
1230 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1231 pub fn saved_mtime(&self) -> Option<MTime> {
1232 self.saved_mtime
1233 }
1234
1235 /// Assign a language to the buffer.
1236 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1237 self.non_text_state_update_count += 1;
1238 self.syntax_map.lock().clear(&self.text);
1239 self.language = language;
1240 self.reparse(cx);
1241 cx.emit(BufferEvent::LanguageChanged);
1242 }
1243
1244 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1245 /// other languages if parts of the buffer are written in different languages.
1246 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1247 self.syntax_map
1248 .lock()
1249 .set_language_registry(language_registry);
1250 }
1251
1252 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1253 self.syntax_map.lock().language_registry()
1254 }
1255
1256 /// Assign the buffer a new [`Capability`].
1257 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1258 self.capability = capability;
1259 cx.emit(BufferEvent::CapabilityChanged)
1260 }
1261
1262 /// This method is called to signal that the buffer has been saved.
1263 pub fn did_save(
1264 &mut self,
1265 version: clock::Global,
1266 mtime: Option<MTime>,
1267 cx: &mut Context<Self>,
1268 ) {
1269 self.saved_version = version;
1270 self.has_unsaved_edits
1271 .set((self.saved_version().clone(), false));
1272 self.has_conflict = false;
1273 self.saved_mtime = mtime;
1274 cx.emit(BufferEvent::Saved);
1275 cx.notify();
1276 }
1277
1278 /// This method is called to signal that the buffer has been discarded.
1279 pub fn discarded(&self, cx: &mut Context<Self>) {
1280 cx.emit(BufferEvent::Discarded);
1281 cx.notify();
1282 }
1283
1284 /// Reloads the contents of the buffer from disk.
1285 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1286 let (tx, rx) = futures::channel::oneshot::channel();
1287 let prev_version = self.text.version();
1288 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
1289 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
1290 let file = this.file.as_ref()?.as_local()?;
1291 Some((file.disk_state().mtime(), file.load(cx)))
1292 })?
1293 else {
1294 return Ok(());
1295 };
1296
1297 let new_text = new_text.await?;
1298 let diff = this
1299 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
1300 .await;
1301 this.update(&mut cx, |this, cx| {
1302 if this.version() == diff.base_version {
1303 this.finalize_last_transaction();
1304 this.apply_diff(diff, cx);
1305 tx.send(this.finalize_last_transaction().cloned()).ok();
1306 this.has_conflict = false;
1307 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1308 } else {
1309 if !diff.edits.is_empty()
1310 || this
1311 .edits_since::<usize>(&diff.base_version)
1312 .next()
1313 .is_some()
1314 {
1315 this.has_conflict = true;
1316 }
1317
1318 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1319 }
1320
1321 this.reload_task.take();
1322 })
1323 }));
1324 rx
1325 }
1326
1327 /// This method is called to signal that the buffer has been reloaded.
1328 pub fn did_reload(
1329 &mut self,
1330 version: clock::Global,
1331 line_ending: LineEnding,
1332 mtime: Option<MTime>,
1333 cx: &mut Context<Self>,
1334 ) {
1335 self.saved_version = version;
1336 self.has_unsaved_edits
1337 .set((self.saved_version.clone(), false));
1338 self.text.set_line_ending(line_ending);
1339 self.saved_mtime = mtime;
1340 cx.emit(BufferEvent::Reloaded);
1341 cx.notify();
1342 }
1343
1344 /// Updates the [`File`] backing this buffer. This should be called when
1345 /// the file has changed or has been deleted.
1346 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1347 let was_dirty = self.is_dirty();
1348 let mut file_changed = false;
1349
1350 if let Some(old_file) = self.file.as_ref() {
1351 if new_file.path() != old_file.path() {
1352 file_changed = true;
1353 }
1354
1355 let old_state = old_file.disk_state();
1356 let new_state = new_file.disk_state();
1357 if old_state != new_state {
1358 file_changed = true;
1359 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1360 cx.emit(BufferEvent::ReloadNeeded)
1361 }
1362 }
1363 } else {
1364 file_changed = true;
1365 };
1366
1367 self.file = Some(new_file);
1368 if file_changed {
1369 self.non_text_state_update_count += 1;
1370 if was_dirty != self.is_dirty() {
1371 cx.emit(BufferEvent::DirtyChanged);
1372 }
1373 cx.emit(BufferEvent::FileHandleChanged);
1374 cx.notify();
1375 }
1376 }
1377
1378 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1379 Some(self.branch_state.as_ref()?.base_buffer.clone())
1380 }
1381
1382 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1383 pub fn language(&self) -> Option<&Arc<Language>> {
1384 self.language.as_ref()
1385 }
1386
1387 /// Returns the [`Language`] at the given location.
1388 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1389 let offset = position.to_offset(self);
1390 self.syntax_map
1391 .lock()
1392 .layers_for_range(offset..offset, &self.text, false)
1393 .last()
1394 .map(|info| info.language.clone())
1395 .or_else(|| self.language.clone())
1396 }
1397
1398 /// An integer version number that accounts for all updates besides
1399 /// the buffer's text itself (which is versioned via a version vector).
1400 pub fn non_text_state_update_count(&self) -> usize {
1401 self.non_text_state_update_count
1402 }
1403
1404 /// Whether the buffer is being parsed in the background.
1405 #[cfg(any(test, feature = "test-support"))]
1406 pub fn is_parsing(&self) -> bool {
1407 self.parsing_in_background
1408 }
1409
1410 /// Indicates whether the buffer contains any regions that may be
1411 /// written in a language that hasn't been loaded yet.
1412 pub fn contains_unknown_injections(&self) -> bool {
1413 self.syntax_map.lock().contains_unknown_injections()
1414 }
1415
1416 #[cfg(test)]
1417 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1418 self.sync_parse_timeout = timeout;
1419 }
1420
1421 /// Called after an edit to synchronize the buffer's main parse tree with
1422 /// the buffer's new underlying state.
1423 ///
1424 /// Locks the syntax map and interpolates the edits since the last reparse
1425 /// into the foreground syntax tree.
1426 ///
1427 /// Then takes a stable snapshot of the syntax map before unlocking it.
1428 /// The snapshot with the interpolated edits is sent to a background thread,
1429 /// where we ask Tree-sitter to perform an incremental parse.
1430 ///
1431 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1432 /// waiting on the parse to complete. As soon as it completes, we proceed
1433 /// synchronously, unless a 1ms timeout elapses.
1434 ///
1435 /// If we time out waiting on the parse, we spawn a second task waiting
1436 /// until the parse does complete and return with the interpolated tree still
1437 /// in the foreground. When the background parse completes, call back into
1438 /// the main thread and assign the foreground parse state.
1439 ///
1440 /// If the buffer or grammar changed since the start of the background parse,
1441 /// initiate an additional reparse recursively. To avoid concurrent parses
1442 /// for the same buffer, we only initiate a new parse if we are not already
1443 /// parsing in the background.
1444 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1445 if self.parsing_in_background {
1446 return;
1447 }
1448 let language = if let Some(language) = self.language.clone() {
1449 language
1450 } else {
1451 return;
1452 };
1453
1454 let text = self.text_snapshot();
1455 let parsed_version = self.version();
1456
1457 let mut syntax_map = self.syntax_map.lock();
1458 syntax_map.interpolate(&text);
1459 let language_registry = syntax_map.language_registry();
1460 let mut syntax_snapshot = syntax_map.snapshot();
1461 drop(syntax_map);
1462
1463 let parse_task = cx.background_spawn({
1464 let language = language.clone();
1465 let language_registry = language_registry.clone();
1466 async move {
1467 syntax_snapshot.reparse(&text, language_registry, language);
1468 syntax_snapshot
1469 }
1470 });
1471
1472 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1473 match cx
1474 .background_executor()
1475 .block_with_timeout(self.sync_parse_timeout, parse_task)
1476 {
1477 Ok(new_syntax_snapshot) => {
1478 self.did_finish_parsing(new_syntax_snapshot, cx);
1479 }
1480 Err(parse_task) => {
1481 self.parsing_in_background = true;
1482 cx.spawn(move |this, mut cx| async move {
1483 let new_syntax_map = parse_task.await;
1484 this.update(&mut cx, move |this, cx| {
1485 let grammar_changed =
1486 this.language.as_ref().map_or(true, |current_language| {
1487 !Arc::ptr_eq(&language, current_language)
1488 });
1489 let language_registry_changed = new_syntax_map
1490 .contains_unknown_injections()
1491 && language_registry.map_or(false, |registry| {
1492 registry.version() != new_syntax_map.language_registry_version()
1493 });
1494 let parse_again = language_registry_changed
1495 || grammar_changed
1496 || this.version.changed_since(&parsed_version);
1497 this.did_finish_parsing(new_syntax_map, cx);
1498 this.parsing_in_background = false;
1499 if parse_again {
1500 this.reparse(cx);
1501 }
1502 })
1503 .ok();
1504 })
1505 .detach();
1506 }
1507 }
1508 }
1509
1510 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1511 self.non_text_state_update_count += 1;
1512 self.syntax_map.lock().did_parse(syntax_snapshot);
1513 self.request_autoindent(cx);
1514 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1515 cx.emit(BufferEvent::Reparsed);
1516 cx.notify();
1517 }
1518
1519 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1520 self.parse_status.1.clone()
1521 }
1522
1523 /// Assign to the buffer a set of diagnostics created by a given language server.
1524 pub fn update_diagnostics(
1525 &mut self,
1526 server_id: LanguageServerId,
1527 diagnostics: DiagnosticSet,
1528 cx: &mut Context<Self>,
1529 ) {
1530 let lamport_timestamp = self.text.lamport_clock.tick();
1531 let op = Operation::UpdateDiagnostics {
1532 server_id,
1533 diagnostics: diagnostics.iter().cloned().collect(),
1534 lamport_timestamp,
1535 };
1536 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1537 self.send_operation(op, true, cx);
1538 }
1539
1540 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1541 if let Some(indent_sizes) = self.compute_autoindents() {
1542 let indent_sizes = cx.background_spawn(indent_sizes);
1543 match cx
1544 .background_executor()
1545 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1546 {
1547 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1548 Err(indent_sizes) => {
1549 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1550 let indent_sizes = indent_sizes.await;
1551 this.update(&mut cx, |this, cx| {
1552 this.apply_autoindents(indent_sizes, cx);
1553 })
1554 .ok();
1555 }));
1556 }
1557 }
1558 } else {
1559 self.autoindent_requests.clear();
1560 }
1561 }
1562
1563 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1564 let max_rows_between_yields = 100;
1565 let snapshot = self.snapshot();
1566 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1567 return None;
1568 }
1569
1570 let autoindent_requests = self.autoindent_requests.clone();
1571 Some(async move {
1572 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1573 for request in autoindent_requests {
1574 // Resolve each edited range to its row in the current buffer and in the
1575 // buffer before this batch of edits.
1576 let mut row_ranges = Vec::new();
1577 let mut old_to_new_rows = BTreeMap::new();
1578 let mut language_indent_sizes_by_new_row = Vec::new();
1579 for entry in &request.entries {
1580 let position = entry.range.start;
1581 let new_row = position.to_point(&snapshot).row;
1582 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1583 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1584
1585 if !entry.first_line_is_new {
1586 let old_row = position.to_point(&request.before_edit).row;
1587 old_to_new_rows.insert(old_row, new_row);
1588 }
1589 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1590 }
1591
1592 // Build a map containing the suggested indentation for each of the edited lines
1593 // with respect to the state of the buffer before these edits. This map is keyed
1594 // by the rows for these lines in the current state of the buffer.
1595 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1596 let old_edited_ranges =
1597 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1598 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1599 let mut language_indent_size = IndentSize::default();
1600 for old_edited_range in old_edited_ranges {
1601 let suggestions = request
1602 .before_edit
1603 .suggest_autoindents(old_edited_range.clone())
1604 .into_iter()
1605 .flatten();
1606 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1607 if let Some(suggestion) = suggestion {
1608 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1609
1610 // Find the indent size based on the language for this row.
1611 while let Some((row, size)) = language_indent_sizes.peek() {
1612 if *row > new_row {
1613 break;
1614 }
1615 language_indent_size = *size;
1616 language_indent_sizes.next();
1617 }
1618
1619 let suggested_indent = old_to_new_rows
1620 .get(&suggestion.basis_row)
1621 .and_then(|from_row| {
1622 Some(old_suggestions.get(from_row).copied()?.0)
1623 })
1624 .unwrap_or_else(|| {
1625 request
1626 .before_edit
1627 .indent_size_for_line(suggestion.basis_row)
1628 })
1629 .with_delta(suggestion.delta, language_indent_size);
1630 old_suggestions
1631 .insert(new_row, (suggested_indent, suggestion.within_error));
1632 }
1633 }
1634 yield_now().await;
1635 }
1636
1637 // Compute new suggestions for each line, but only include them in the result
1638 // if they differ from the old suggestion for that line.
1639 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1640 let mut language_indent_size = IndentSize::default();
1641 for (row_range, original_indent_column) in row_ranges {
1642 let new_edited_row_range = if request.is_block_mode {
1643 row_range.start..row_range.start + 1
1644 } else {
1645 row_range.clone()
1646 };
1647
1648 let suggestions = snapshot
1649 .suggest_autoindents(new_edited_row_range.clone())
1650 .into_iter()
1651 .flatten();
1652 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1653 if let Some(suggestion) = suggestion {
1654 // Find the indent size based on the language for this row.
1655 while let Some((row, size)) = language_indent_sizes.peek() {
1656 if *row > new_row {
1657 break;
1658 }
1659 language_indent_size = *size;
1660 language_indent_sizes.next();
1661 }
1662
1663 let suggested_indent = indent_sizes
1664 .get(&suggestion.basis_row)
1665 .copied()
1666 .map(|e| e.0)
1667 .unwrap_or_else(|| {
1668 snapshot.indent_size_for_line(suggestion.basis_row)
1669 })
1670 .with_delta(suggestion.delta, language_indent_size);
1671
1672 if old_suggestions.get(&new_row).map_or(
1673 true,
1674 |(old_indentation, was_within_error)| {
1675 suggested_indent != *old_indentation
1676 && (!suggestion.within_error || *was_within_error)
1677 },
1678 ) {
1679 indent_sizes.insert(
1680 new_row,
1681 (suggested_indent, request.ignore_empty_lines),
1682 );
1683 }
1684 }
1685 }
1686
1687 if let (true, Some(original_indent_column)) =
1688 (request.is_block_mode, original_indent_column)
1689 {
1690 let new_indent =
1691 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1692 *indent
1693 } else {
1694 snapshot.indent_size_for_line(row_range.start)
1695 };
1696 let delta = new_indent.len as i64 - original_indent_column as i64;
1697 if delta != 0 {
1698 for row in row_range.skip(1) {
1699 indent_sizes.entry(row).or_insert_with(|| {
1700 let mut size = snapshot.indent_size_for_line(row);
1701 if size.kind == new_indent.kind {
1702 match delta.cmp(&0) {
1703 Ordering::Greater => size.len += delta as u32,
1704 Ordering::Less => {
1705 size.len = size.len.saturating_sub(-delta as u32)
1706 }
1707 Ordering::Equal => {}
1708 }
1709 }
1710 (size, request.ignore_empty_lines)
1711 });
1712 }
1713 }
1714 }
1715
1716 yield_now().await;
1717 }
1718 }
1719
1720 indent_sizes
1721 .into_iter()
1722 .filter_map(|(row, (indent, ignore_empty_lines))| {
1723 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1724 None
1725 } else {
1726 Some((row, indent))
1727 }
1728 })
1729 .collect()
1730 })
1731 }
1732
1733 fn apply_autoindents(
1734 &mut self,
1735 indent_sizes: BTreeMap<u32, IndentSize>,
1736 cx: &mut Context<Self>,
1737 ) {
1738 self.autoindent_requests.clear();
1739
1740 let edits: Vec<_> = indent_sizes
1741 .into_iter()
1742 .filter_map(|(row, indent_size)| {
1743 let current_size = indent_size_for_line(self, row);
1744 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1745 })
1746 .collect();
1747
1748 let preserve_preview = self.preserve_preview();
1749 self.edit(edits, None, cx);
1750 if preserve_preview {
1751 self.refresh_preview();
1752 }
1753 }
1754
1755 /// Create a minimal edit that will cause the given row to be indented
1756 /// with the given size. After applying this edit, the length of the line
1757 /// will always be at least `new_size.len`.
1758 pub fn edit_for_indent_size_adjustment(
1759 row: u32,
1760 current_size: IndentSize,
1761 new_size: IndentSize,
1762 ) -> Option<(Range<Point>, String)> {
1763 if new_size.kind == current_size.kind {
1764 match new_size.len.cmp(¤t_size.len) {
1765 Ordering::Greater => {
1766 let point = Point::new(row, 0);
1767 Some((
1768 point..point,
1769 iter::repeat(new_size.char())
1770 .take((new_size.len - current_size.len) as usize)
1771 .collect::<String>(),
1772 ))
1773 }
1774
1775 Ordering::Less => Some((
1776 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1777 String::new(),
1778 )),
1779
1780 Ordering::Equal => None,
1781 }
1782 } else {
1783 Some((
1784 Point::new(row, 0)..Point::new(row, current_size.len),
1785 iter::repeat(new_size.char())
1786 .take(new_size.len as usize)
1787 .collect::<String>(),
1788 ))
1789 }
1790 }
1791
1792 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1793 /// and the given new text.
1794 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1795 let old_text = self.as_rope().clone();
1796 let base_version = self.version();
1797 cx.background_executor()
1798 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1799 let old_text = old_text.to_string();
1800 let line_ending = LineEnding::detect(&new_text);
1801 LineEnding::normalize(&mut new_text);
1802 let edits = text_diff(&old_text, &new_text);
1803 Diff {
1804 base_version,
1805 line_ending,
1806 edits,
1807 }
1808 })
1809 }
1810
1811 /// Spawns a background task that searches the buffer for any whitespace
1812 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1813 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1814 let old_text = self.as_rope().clone();
1815 let line_ending = self.line_ending();
1816 let base_version = self.version();
1817 cx.background_spawn(async move {
1818 let ranges = trailing_whitespace_ranges(&old_text);
1819 let empty = Arc::<str>::from("");
1820 Diff {
1821 base_version,
1822 line_ending,
1823 edits: ranges
1824 .into_iter()
1825 .map(|range| (range, empty.clone()))
1826 .collect(),
1827 }
1828 })
1829 }
1830
1831 /// Ensures that the buffer ends with a single newline character, and
1832 /// no other whitespace.
1833 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1834 let len = self.len();
1835 let mut offset = len;
1836 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1837 let non_whitespace_len = chunk
1838 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1839 .len();
1840 offset -= chunk.len();
1841 offset += non_whitespace_len;
1842 if non_whitespace_len != 0 {
1843 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1844 return;
1845 }
1846 break;
1847 }
1848 }
1849 self.edit([(offset..len, "\n")], None, cx);
1850 }
1851
1852 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1853 /// calculated, then adjust the diff to account for those changes, and discard any
1854 /// parts of the diff that conflict with those changes.
1855 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1856 // Check for any edits to the buffer that have occurred since this diff
1857 // was computed.
1858 let snapshot = self.snapshot();
1859 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1860 let mut delta = 0;
1861 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1862 while let Some(edit_since) = edits_since.peek() {
1863 // If the edit occurs after a diff hunk, then it does not
1864 // affect that hunk.
1865 if edit_since.old.start > range.end {
1866 break;
1867 }
1868 // If the edit precedes the diff hunk, then adjust the hunk
1869 // to reflect the edit.
1870 else if edit_since.old.end < range.start {
1871 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1872 edits_since.next();
1873 }
1874 // If the edit intersects a diff hunk, then discard that hunk.
1875 else {
1876 return None;
1877 }
1878 }
1879
1880 let start = (range.start as i64 + delta) as usize;
1881 let end = (range.end as i64 + delta) as usize;
1882 Some((start..end, new_text))
1883 });
1884
1885 self.start_transaction();
1886 self.text.set_line_ending(diff.line_ending);
1887 self.edit(adjusted_edits, None, cx);
1888 self.end_transaction(cx)
1889 }
1890
1891 fn has_unsaved_edits(&self) -> bool {
1892 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1893
1894 if last_version == self.version {
1895 self.has_unsaved_edits
1896 .set((last_version, has_unsaved_edits));
1897 return has_unsaved_edits;
1898 }
1899
1900 let has_edits = self.has_edits_since(&self.saved_version);
1901 self.has_unsaved_edits
1902 .set((self.version.clone(), has_edits));
1903 has_edits
1904 }
1905
1906 /// Checks if the buffer has unsaved changes.
1907 pub fn is_dirty(&self) -> bool {
1908 if self.capability == Capability::ReadOnly {
1909 return false;
1910 }
1911 if self.has_conflict || self.has_unsaved_edits() {
1912 return true;
1913 }
1914 match self.file.as_ref().map(|f| f.disk_state()) {
1915 Some(DiskState::New) => !self.is_empty(),
1916 Some(DiskState::Deleted) => true,
1917 _ => false,
1918 }
1919 }
1920
1921 /// Checks if the buffer and its file have both changed since the buffer
1922 /// was last saved or reloaded.
1923 pub fn has_conflict(&self) -> bool {
1924 if self.has_conflict {
1925 return true;
1926 }
1927 let Some(file) = self.file.as_ref() else {
1928 return false;
1929 };
1930 match file.disk_state() {
1931 DiskState::New => false,
1932 DiskState::Present { mtime } => match self.saved_mtime {
1933 Some(saved_mtime) => {
1934 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1935 }
1936 None => true,
1937 },
1938 DiskState::Deleted => true,
1939 }
1940 }
1941
1942 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1943 pub fn subscribe(&mut self) -> Subscription {
1944 self.text.subscribe()
1945 }
1946
1947 /// Starts a transaction, if one is not already in-progress. When undoing or
1948 /// redoing edits, all of the edits performed within a transaction are undone
1949 /// or redone together.
1950 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1951 self.start_transaction_at(Instant::now())
1952 }
1953
1954 /// Starts a transaction, providing the current time. Subsequent transactions
1955 /// that occur within a short period of time will be grouped together. This
1956 /// is controlled by the buffer's undo grouping duration.
1957 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1958 self.transaction_depth += 1;
1959 if self.was_dirty_before_starting_transaction.is_none() {
1960 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1961 }
1962 self.text.start_transaction_at(now)
1963 }
1964
1965 /// Terminates the current transaction, if this is the outermost transaction.
1966 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
1967 self.end_transaction_at(Instant::now(), cx)
1968 }
1969
1970 /// Terminates the current transaction, providing the current time. Subsequent transactions
1971 /// that occur within a short period of time will be grouped together. This
1972 /// is controlled by the buffer's undo grouping duration.
1973 pub fn end_transaction_at(
1974 &mut self,
1975 now: Instant,
1976 cx: &mut Context<Self>,
1977 ) -> Option<TransactionId> {
1978 assert!(self.transaction_depth > 0);
1979 self.transaction_depth -= 1;
1980 let was_dirty = if self.transaction_depth == 0 {
1981 self.was_dirty_before_starting_transaction.take().unwrap()
1982 } else {
1983 false
1984 };
1985 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1986 self.did_edit(&start_version, was_dirty, cx);
1987 Some(transaction_id)
1988 } else {
1989 None
1990 }
1991 }
1992
1993 /// Manually add a transaction to the buffer's undo history.
1994 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1995 self.text.push_transaction(transaction, now);
1996 }
1997
1998 /// Prevent the last transaction from being grouped with any subsequent transactions,
1999 /// even if they occur with the buffer's undo grouping duration.
2000 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2001 self.text.finalize_last_transaction()
2002 }
2003
2004 /// Manually group all changes since a given transaction.
2005 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2006 self.text.group_until_transaction(transaction_id);
2007 }
2008
2009 /// Manually remove a transaction from the buffer's undo history
2010 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
2011 self.text.forget_transaction(transaction_id);
2012 }
2013
2014 /// Manually merge two adjacent transactions in the buffer's undo history.
2015 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2016 self.text.merge_transactions(transaction, destination);
2017 }
2018
2019 /// Waits for the buffer to receive operations with the given timestamps.
2020 pub fn wait_for_edits(
2021 &mut self,
2022 edit_ids: impl IntoIterator<Item = clock::Lamport>,
2023 ) -> impl Future<Output = Result<()>> {
2024 self.text.wait_for_edits(edit_ids)
2025 }
2026
2027 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2028 pub fn wait_for_anchors(
2029 &mut self,
2030 anchors: impl IntoIterator<Item = Anchor>,
2031 ) -> impl 'static + Future<Output = Result<()>> {
2032 self.text.wait_for_anchors(anchors)
2033 }
2034
2035 /// Waits for the buffer to receive operations up to the given version.
2036 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
2037 self.text.wait_for_version(version)
2038 }
2039
2040 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2041 /// [`Buffer::wait_for_version`] to resolve with an error.
2042 pub fn give_up_waiting(&mut self) {
2043 self.text.give_up_waiting();
2044 }
2045
2046 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2047 pub fn set_active_selections(
2048 &mut self,
2049 selections: Arc<[Selection<Anchor>]>,
2050 line_mode: bool,
2051 cursor_shape: CursorShape,
2052 cx: &mut Context<Self>,
2053 ) {
2054 let lamport_timestamp = self.text.lamport_clock.tick();
2055 self.remote_selections.insert(
2056 self.text.replica_id(),
2057 SelectionSet {
2058 selections: selections.clone(),
2059 lamport_timestamp,
2060 line_mode,
2061 cursor_shape,
2062 },
2063 );
2064 self.send_operation(
2065 Operation::UpdateSelections {
2066 selections,
2067 line_mode,
2068 lamport_timestamp,
2069 cursor_shape,
2070 },
2071 true,
2072 cx,
2073 );
2074 self.non_text_state_update_count += 1;
2075 cx.notify();
2076 }
2077
2078 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2079 /// this replica.
2080 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2081 if self
2082 .remote_selections
2083 .get(&self.text.replica_id())
2084 .map_or(true, |set| !set.selections.is_empty())
2085 {
2086 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2087 }
2088 }
2089
2090 /// Replaces the buffer's entire text.
2091 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2092 where
2093 T: Into<Arc<str>>,
2094 {
2095 self.autoindent_requests.clear();
2096 self.edit([(0..self.len(), text)], None, cx)
2097 }
2098
2099 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2100 /// delete, and a string of text to insert at that location.
2101 ///
2102 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2103 /// request for the edited ranges, which will be processed when the buffer finishes
2104 /// parsing.
2105 ///
2106 /// Parsing takes place at the end of a transaction, and may compute synchronously
2107 /// or asynchronously, depending on the changes.
2108 pub fn edit<I, S, T>(
2109 &mut self,
2110 edits_iter: I,
2111 autoindent_mode: Option<AutoindentMode>,
2112 cx: &mut Context<Self>,
2113 ) -> Option<clock::Lamport>
2114 where
2115 I: IntoIterator<Item = (Range<S>, T)>,
2116 S: ToOffset,
2117 T: Into<Arc<str>>,
2118 {
2119 // Skip invalid edits and coalesce contiguous ones.
2120 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2121 for (range, new_text) in edits_iter {
2122 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2123 if range.start > range.end {
2124 mem::swap(&mut range.start, &mut range.end);
2125 }
2126 let new_text = new_text.into();
2127 if !new_text.is_empty() || !range.is_empty() {
2128 if let Some((prev_range, prev_text)) = edits.last_mut() {
2129 if prev_range.end >= range.start {
2130 prev_range.end = cmp::max(prev_range.end, range.end);
2131 *prev_text = format!("{prev_text}{new_text}").into();
2132 } else {
2133 edits.push((range, new_text));
2134 }
2135 } else {
2136 edits.push((range, new_text));
2137 }
2138 }
2139 }
2140 if edits.is_empty() {
2141 return None;
2142 }
2143
2144 self.start_transaction();
2145 self.pending_autoindent.take();
2146 let autoindent_request = autoindent_mode
2147 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2148
2149 let edit_operation = self.text.edit(edits.iter().cloned());
2150 let edit_id = edit_operation.timestamp();
2151
2152 if let Some((before_edit, mode)) = autoindent_request {
2153 let mut delta = 0isize;
2154 let entries = edits
2155 .into_iter()
2156 .enumerate()
2157 .zip(&edit_operation.as_edit().unwrap().new_text)
2158 .map(|((ix, (range, _)), new_text)| {
2159 let new_text_length = new_text.len();
2160 let old_start = range.start.to_point(&before_edit);
2161 let new_start = (delta + range.start as isize) as usize;
2162 let range_len = range.end - range.start;
2163 delta += new_text_length as isize - range_len as isize;
2164
2165 // Decide what range of the insertion to auto-indent, and whether
2166 // the first line of the insertion should be considered a newly-inserted line
2167 // or an edit to an existing line.
2168 let mut range_of_insertion_to_indent = 0..new_text_length;
2169 let mut first_line_is_new = true;
2170
2171 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2172 let old_line_end = before_edit.line_len(old_start.row);
2173
2174 if old_start.column > old_line_start {
2175 first_line_is_new = false;
2176 }
2177
2178 if !new_text.contains('\n')
2179 && (old_start.column + (range_len as u32) < old_line_end
2180 || old_line_end == old_line_start)
2181 {
2182 first_line_is_new = false;
2183 }
2184
2185 // When inserting text starting with a newline, avoid auto-indenting the
2186 // previous line.
2187 if new_text.starts_with('\n') {
2188 range_of_insertion_to_indent.start += 1;
2189 first_line_is_new = true;
2190 }
2191
2192 let mut original_indent_column = None;
2193 if let AutoindentMode::Block {
2194 original_indent_columns,
2195 } = &mode
2196 {
2197 original_indent_column =
2198 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
2199 indent_size_for_text(
2200 new_text[range_of_insertion_to_indent.clone()].chars(),
2201 )
2202 .len
2203 }));
2204
2205 // Avoid auto-indenting the line after the edit.
2206 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2207 range_of_insertion_to_indent.end -= 1;
2208 }
2209 }
2210
2211 AutoindentRequestEntry {
2212 first_line_is_new,
2213 original_indent_column,
2214 indent_size: before_edit.language_indent_size_at(range.start, cx),
2215 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2216 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2217 }
2218 })
2219 .collect();
2220
2221 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2222 before_edit,
2223 entries,
2224 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2225 ignore_empty_lines: false,
2226 }));
2227 }
2228
2229 self.end_transaction(cx);
2230 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2231 Some(edit_id)
2232 }
2233
2234 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2235 if self.edits_since::<usize>(old_version).next().is_none() {
2236 return;
2237 }
2238
2239 self.reparse(cx);
2240
2241 cx.emit(BufferEvent::Edited);
2242 if was_dirty != self.is_dirty() {
2243 cx.emit(BufferEvent::DirtyChanged);
2244 }
2245 cx.notify();
2246 }
2247
2248 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2249 where
2250 I: IntoIterator<Item = Range<T>>,
2251 T: ToOffset + Copy,
2252 {
2253 let before_edit = self.snapshot();
2254 let entries = ranges
2255 .into_iter()
2256 .map(|range| AutoindentRequestEntry {
2257 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2258 first_line_is_new: true,
2259 indent_size: before_edit.language_indent_size_at(range.start, cx),
2260 original_indent_column: None,
2261 })
2262 .collect();
2263 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2264 before_edit,
2265 entries,
2266 is_block_mode: false,
2267 ignore_empty_lines: true,
2268 }));
2269 self.request_autoindent(cx);
2270 }
2271
2272 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2273 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2274 pub fn insert_empty_line(
2275 &mut self,
2276 position: impl ToPoint,
2277 space_above: bool,
2278 space_below: bool,
2279 cx: &mut Context<Self>,
2280 ) -> Point {
2281 let mut position = position.to_point(self);
2282
2283 self.start_transaction();
2284
2285 self.edit(
2286 [(position..position, "\n")],
2287 Some(AutoindentMode::EachLine),
2288 cx,
2289 );
2290
2291 if position.column > 0 {
2292 position += Point::new(1, 0);
2293 }
2294
2295 if !self.is_line_blank(position.row) {
2296 self.edit(
2297 [(position..position, "\n")],
2298 Some(AutoindentMode::EachLine),
2299 cx,
2300 );
2301 }
2302
2303 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2304 self.edit(
2305 [(position..position, "\n")],
2306 Some(AutoindentMode::EachLine),
2307 cx,
2308 );
2309 position.row += 1;
2310 }
2311
2312 if space_below
2313 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2314 {
2315 self.edit(
2316 [(position..position, "\n")],
2317 Some(AutoindentMode::EachLine),
2318 cx,
2319 );
2320 }
2321
2322 self.end_transaction(cx);
2323
2324 position
2325 }
2326
2327 /// Applies the given remote operations to the buffer.
2328 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2329 self.pending_autoindent.take();
2330 let was_dirty = self.is_dirty();
2331 let old_version = self.version.clone();
2332 let mut deferred_ops = Vec::new();
2333 let buffer_ops = ops
2334 .into_iter()
2335 .filter_map(|op| match op {
2336 Operation::Buffer(op) => Some(op),
2337 _ => {
2338 if self.can_apply_op(&op) {
2339 self.apply_op(op, cx);
2340 } else {
2341 deferred_ops.push(op);
2342 }
2343 None
2344 }
2345 })
2346 .collect::<Vec<_>>();
2347 for operation in buffer_ops.iter() {
2348 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2349 }
2350 self.text.apply_ops(buffer_ops);
2351 self.deferred_ops.insert(deferred_ops);
2352 self.flush_deferred_ops(cx);
2353 self.did_edit(&old_version, was_dirty, cx);
2354 // Notify independently of whether the buffer was edited as the operations could include a
2355 // selection update.
2356 cx.notify();
2357 }
2358
2359 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2360 let mut deferred_ops = Vec::new();
2361 for op in self.deferred_ops.drain().iter().cloned() {
2362 if self.can_apply_op(&op) {
2363 self.apply_op(op, cx);
2364 } else {
2365 deferred_ops.push(op);
2366 }
2367 }
2368 self.deferred_ops.insert(deferred_ops);
2369 }
2370
2371 pub fn has_deferred_ops(&self) -> bool {
2372 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2373 }
2374
2375 fn can_apply_op(&self, operation: &Operation) -> bool {
2376 match operation {
2377 Operation::Buffer(_) => {
2378 unreachable!("buffer operations should never be applied at this layer")
2379 }
2380 Operation::UpdateDiagnostics {
2381 diagnostics: diagnostic_set,
2382 ..
2383 } => diagnostic_set.iter().all(|diagnostic| {
2384 self.text.can_resolve(&diagnostic.range.start)
2385 && self.text.can_resolve(&diagnostic.range.end)
2386 }),
2387 Operation::UpdateSelections { selections, .. } => selections
2388 .iter()
2389 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2390 Operation::UpdateCompletionTriggers { .. } => true,
2391 }
2392 }
2393
2394 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2395 match operation {
2396 Operation::Buffer(_) => {
2397 unreachable!("buffer operations should never be applied at this layer")
2398 }
2399 Operation::UpdateDiagnostics {
2400 server_id,
2401 diagnostics: diagnostic_set,
2402 lamport_timestamp,
2403 } => {
2404 let snapshot = self.snapshot();
2405 self.apply_diagnostic_update(
2406 server_id,
2407 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2408 lamport_timestamp,
2409 cx,
2410 );
2411 }
2412 Operation::UpdateSelections {
2413 selections,
2414 lamport_timestamp,
2415 line_mode,
2416 cursor_shape,
2417 } => {
2418 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2419 if set.lamport_timestamp > lamport_timestamp {
2420 return;
2421 }
2422 }
2423
2424 self.remote_selections.insert(
2425 lamport_timestamp.replica_id,
2426 SelectionSet {
2427 selections,
2428 lamport_timestamp,
2429 line_mode,
2430 cursor_shape,
2431 },
2432 );
2433 self.text.lamport_clock.observe(lamport_timestamp);
2434 self.non_text_state_update_count += 1;
2435 }
2436 Operation::UpdateCompletionTriggers {
2437 triggers,
2438 lamport_timestamp,
2439 server_id,
2440 } => {
2441 if triggers.is_empty() {
2442 self.completion_triggers_per_language_server
2443 .remove(&server_id);
2444 self.completion_triggers = self
2445 .completion_triggers_per_language_server
2446 .values()
2447 .flat_map(|triggers| triggers.into_iter().cloned())
2448 .collect();
2449 } else {
2450 self.completion_triggers_per_language_server
2451 .insert(server_id, triggers.iter().cloned().collect());
2452 self.completion_triggers.extend(triggers);
2453 }
2454 self.text.lamport_clock.observe(lamport_timestamp);
2455 }
2456 }
2457 }
2458
2459 fn apply_diagnostic_update(
2460 &mut self,
2461 server_id: LanguageServerId,
2462 diagnostics: DiagnosticSet,
2463 lamport_timestamp: clock::Lamport,
2464 cx: &mut Context<Self>,
2465 ) {
2466 if lamport_timestamp > self.diagnostics_timestamp {
2467 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2468 if diagnostics.is_empty() {
2469 if let Ok(ix) = ix {
2470 self.diagnostics.remove(ix);
2471 }
2472 } else {
2473 match ix {
2474 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2475 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2476 };
2477 }
2478 self.diagnostics_timestamp = lamport_timestamp;
2479 self.non_text_state_update_count += 1;
2480 self.text.lamport_clock.observe(lamport_timestamp);
2481 cx.notify();
2482 cx.emit(BufferEvent::DiagnosticsUpdated);
2483 }
2484 }
2485
2486 fn send_operation(&self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2487 cx.emit(BufferEvent::Operation {
2488 operation,
2489 is_local,
2490 });
2491 }
2492
2493 /// Removes the selections for a given peer.
2494 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2495 self.remote_selections.remove(&replica_id);
2496 cx.notify();
2497 }
2498
2499 /// Undoes the most recent transaction.
2500 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2501 let was_dirty = self.is_dirty();
2502 let old_version = self.version.clone();
2503
2504 if let Some((transaction_id, operation)) = self.text.undo() {
2505 self.send_operation(Operation::Buffer(operation), true, cx);
2506 self.did_edit(&old_version, was_dirty, cx);
2507 Some(transaction_id)
2508 } else {
2509 None
2510 }
2511 }
2512
2513 /// Manually undoes a specific transaction in the buffer's undo history.
2514 pub fn undo_transaction(
2515 &mut self,
2516 transaction_id: TransactionId,
2517 cx: &mut Context<Self>,
2518 ) -> bool {
2519 let was_dirty = self.is_dirty();
2520 let old_version = self.version.clone();
2521 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2522 self.send_operation(Operation::Buffer(operation), true, cx);
2523 self.did_edit(&old_version, was_dirty, cx);
2524 true
2525 } else {
2526 false
2527 }
2528 }
2529
2530 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2531 pub fn undo_to_transaction(
2532 &mut self,
2533 transaction_id: TransactionId,
2534 cx: &mut Context<Self>,
2535 ) -> bool {
2536 let was_dirty = self.is_dirty();
2537 let old_version = self.version.clone();
2538
2539 let operations = self.text.undo_to_transaction(transaction_id);
2540 let undone = !operations.is_empty();
2541 for operation in operations {
2542 self.send_operation(Operation::Buffer(operation), true, cx);
2543 }
2544 if undone {
2545 self.did_edit(&old_version, was_dirty, cx)
2546 }
2547 undone
2548 }
2549
2550 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2551 let was_dirty = self.is_dirty();
2552 let operation = self.text.undo_operations(counts);
2553 let old_version = self.version.clone();
2554 self.send_operation(Operation::Buffer(operation), true, cx);
2555 self.did_edit(&old_version, was_dirty, cx);
2556 }
2557
2558 /// Manually redoes a specific transaction in the buffer's redo history.
2559 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2560 let was_dirty = self.is_dirty();
2561 let old_version = self.version.clone();
2562
2563 if let Some((transaction_id, operation)) = self.text.redo() {
2564 self.send_operation(Operation::Buffer(operation), true, cx);
2565 self.did_edit(&old_version, was_dirty, cx);
2566 Some(transaction_id)
2567 } else {
2568 None
2569 }
2570 }
2571
2572 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2573 pub fn redo_to_transaction(
2574 &mut self,
2575 transaction_id: TransactionId,
2576 cx: &mut Context<Self>,
2577 ) -> bool {
2578 let was_dirty = self.is_dirty();
2579 let old_version = self.version.clone();
2580
2581 let operations = self.text.redo_to_transaction(transaction_id);
2582 let redone = !operations.is_empty();
2583 for operation in operations {
2584 self.send_operation(Operation::Buffer(operation), true, cx);
2585 }
2586 if redone {
2587 self.did_edit(&old_version, was_dirty, cx)
2588 }
2589 redone
2590 }
2591
2592 /// Override current completion triggers with the user-provided completion triggers.
2593 pub fn set_completion_triggers(
2594 &mut self,
2595 server_id: LanguageServerId,
2596 triggers: BTreeSet<String>,
2597 cx: &mut Context<Self>,
2598 ) {
2599 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2600 if triggers.is_empty() {
2601 self.completion_triggers_per_language_server
2602 .remove(&server_id);
2603 self.completion_triggers = self
2604 .completion_triggers_per_language_server
2605 .values()
2606 .flat_map(|triggers| triggers.into_iter().cloned())
2607 .collect();
2608 } else {
2609 self.completion_triggers_per_language_server
2610 .insert(server_id, triggers.clone());
2611 self.completion_triggers.extend(triggers.iter().cloned());
2612 }
2613 self.send_operation(
2614 Operation::UpdateCompletionTriggers {
2615 triggers: triggers.iter().cloned().collect(),
2616 lamport_timestamp: self.completion_triggers_timestamp,
2617 server_id,
2618 },
2619 true,
2620 cx,
2621 );
2622 cx.notify();
2623 }
2624
2625 /// Returns a list of strings which trigger a completion menu for this language.
2626 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2627 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2628 &self.completion_triggers
2629 }
2630
2631 /// Call this directly after performing edits to prevent the preview tab
2632 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2633 /// to return false until there are additional edits.
2634 pub fn refresh_preview(&mut self) {
2635 self.preview_version = self.version.clone();
2636 }
2637
2638 /// Whether we should preserve the preview status of a tab containing this buffer.
2639 pub fn preserve_preview(&self) -> bool {
2640 !self.has_edits_since(&self.preview_version)
2641 }
2642}
2643
2644#[doc(hidden)]
2645#[cfg(any(test, feature = "test-support"))]
2646impl Buffer {
2647 pub fn edit_via_marked_text(
2648 &mut self,
2649 marked_string: &str,
2650 autoindent_mode: Option<AutoindentMode>,
2651 cx: &mut Context<Self>,
2652 ) {
2653 let edits = self.edits_for_marked_text(marked_string);
2654 self.edit(edits, autoindent_mode, cx);
2655 }
2656
2657 pub fn set_group_interval(&mut self, group_interval: Duration) {
2658 self.text.set_group_interval(group_interval);
2659 }
2660
2661 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2662 where
2663 T: rand::Rng,
2664 {
2665 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2666 let mut last_end = None;
2667 for _ in 0..old_range_count {
2668 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2669 break;
2670 }
2671
2672 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2673 let mut range = self.random_byte_range(new_start, rng);
2674 if rng.gen_bool(0.2) {
2675 mem::swap(&mut range.start, &mut range.end);
2676 }
2677 last_end = Some(range.end);
2678
2679 let new_text_len = rng.gen_range(0..10);
2680 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2681 new_text = new_text.to_uppercase();
2682
2683 edits.push((range, new_text));
2684 }
2685 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2686 self.edit(edits, None, cx);
2687 }
2688
2689 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2690 let was_dirty = self.is_dirty();
2691 let old_version = self.version.clone();
2692
2693 let ops = self.text.randomly_undo_redo(rng);
2694 if !ops.is_empty() {
2695 for op in ops {
2696 self.send_operation(Operation::Buffer(op), true, cx);
2697 self.did_edit(&old_version, was_dirty, cx);
2698 }
2699 }
2700 }
2701}
2702
2703impl EventEmitter<BufferEvent> for Buffer {}
2704
2705impl Deref for Buffer {
2706 type Target = TextBuffer;
2707
2708 fn deref(&self) -> &Self::Target {
2709 &self.text
2710 }
2711}
2712
2713impl BufferSnapshot {
2714 /// Returns [`IndentSize`] for a given line that respects user settings and
2715 /// language preferences.
2716 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2717 indent_size_for_line(self, row)
2718 }
2719
2720 /// Returns [`IndentSize`] for a given position that respects user settings
2721 /// and language preferences.
2722 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2723 let settings = language_settings(
2724 self.language_at(position).map(|l| l.name()),
2725 self.file(),
2726 cx,
2727 );
2728 if settings.hard_tabs {
2729 IndentSize::tab()
2730 } else {
2731 IndentSize::spaces(settings.tab_size.get())
2732 }
2733 }
2734
2735 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2736 /// is passed in as `single_indent_size`.
2737 pub fn suggested_indents(
2738 &self,
2739 rows: impl Iterator<Item = u32>,
2740 single_indent_size: IndentSize,
2741 ) -> BTreeMap<u32, IndentSize> {
2742 let mut result = BTreeMap::new();
2743
2744 for row_range in contiguous_ranges(rows, 10) {
2745 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2746 Some(suggestions) => suggestions,
2747 _ => break,
2748 };
2749
2750 for (row, suggestion) in row_range.zip(suggestions) {
2751 let indent_size = if let Some(suggestion) = suggestion {
2752 result
2753 .get(&suggestion.basis_row)
2754 .copied()
2755 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2756 .with_delta(suggestion.delta, single_indent_size)
2757 } else {
2758 self.indent_size_for_line(row)
2759 };
2760
2761 result.insert(row, indent_size);
2762 }
2763 }
2764
2765 result
2766 }
2767
2768 fn suggest_autoindents(
2769 &self,
2770 row_range: Range<u32>,
2771 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2772 let config = &self.language.as_ref()?.config;
2773 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2774
2775 // Find the suggested indentation ranges based on the syntax tree.
2776 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2777 let end = Point::new(row_range.end, 0);
2778 let range = (start..end).to_offset(&self.text);
2779 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2780 Some(&grammar.indents_config.as_ref()?.query)
2781 });
2782 let indent_configs = matches
2783 .grammars()
2784 .iter()
2785 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2786 .collect::<Vec<_>>();
2787
2788 let mut indent_ranges = Vec::<Range<Point>>::new();
2789 let mut outdent_positions = Vec::<Point>::new();
2790 while let Some(mat) = matches.peek() {
2791 let mut start: Option<Point> = None;
2792 let mut end: Option<Point> = None;
2793
2794 let config = &indent_configs[mat.grammar_index];
2795 for capture in mat.captures {
2796 if capture.index == config.indent_capture_ix {
2797 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2798 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2799 } else if Some(capture.index) == config.start_capture_ix {
2800 start = Some(Point::from_ts_point(capture.node.end_position()));
2801 } else if Some(capture.index) == config.end_capture_ix {
2802 end = Some(Point::from_ts_point(capture.node.start_position()));
2803 } else if Some(capture.index) == config.outdent_capture_ix {
2804 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2805 }
2806 }
2807
2808 matches.advance();
2809 if let Some((start, end)) = start.zip(end) {
2810 if start.row == end.row {
2811 continue;
2812 }
2813
2814 let range = start..end;
2815 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2816 Err(ix) => indent_ranges.insert(ix, range),
2817 Ok(ix) => {
2818 let prev_range = &mut indent_ranges[ix];
2819 prev_range.end = prev_range.end.max(range.end);
2820 }
2821 }
2822 }
2823 }
2824
2825 let mut error_ranges = Vec::<Range<Point>>::new();
2826 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2827 grammar.error_query.as_ref()
2828 });
2829 while let Some(mat) = matches.peek() {
2830 let node = mat.captures[0].node;
2831 let start = Point::from_ts_point(node.start_position());
2832 let end = Point::from_ts_point(node.end_position());
2833 let range = start..end;
2834 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2835 Ok(ix) | Err(ix) => ix,
2836 };
2837 let mut end_ix = ix;
2838 while let Some(existing_range) = error_ranges.get(end_ix) {
2839 if existing_range.end < end {
2840 end_ix += 1;
2841 } else {
2842 break;
2843 }
2844 }
2845 error_ranges.splice(ix..end_ix, [range]);
2846 matches.advance();
2847 }
2848
2849 outdent_positions.sort();
2850 for outdent_position in outdent_positions {
2851 // find the innermost indent range containing this outdent_position
2852 // set its end to the outdent position
2853 if let Some(range_to_truncate) = indent_ranges
2854 .iter_mut()
2855 .filter(|indent_range| indent_range.contains(&outdent_position))
2856 .last()
2857 {
2858 range_to_truncate.end = outdent_position;
2859 }
2860 }
2861
2862 // Find the suggested indentation increases and decreased based on regexes.
2863 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2864 self.for_each_line(
2865 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2866 ..Point::new(row_range.end, 0),
2867 |row, line| {
2868 if config
2869 .decrease_indent_pattern
2870 .as_ref()
2871 .map_or(false, |regex| regex.is_match(line))
2872 {
2873 indent_change_rows.push((row, Ordering::Less));
2874 }
2875 if config
2876 .increase_indent_pattern
2877 .as_ref()
2878 .map_or(false, |regex| regex.is_match(line))
2879 {
2880 indent_change_rows.push((row + 1, Ordering::Greater));
2881 }
2882 },
2883 );
2884
2885 let mut indent_changes = indent_change_rows.into_iter().peekable();
2886 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2887 prev_non_blank_row.unwrap_or(0)
2888 } else {
2889 row_range.start.saturating_sub(1)
2890 };
2891 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2892 Some(row_range.map(move |row| {
2893 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2894
2895 let mut indent_from_prev_row = false;
2896 let mut outdent_from_prev_row = false;
2897 let mut outdent_to_row = u32::MAX;
2898 let mut from_regex = false;
2899
2900 while let Some((indent_row, delta)) = indent_changes.peek() {
2901 match indent_row.cmp(&row) {
2902 Ordering::Equal => match delta {
2903 Ordering::Less => {
2904 from_regex = true;
2905 outdent_from_prev_row = true
2906 }
2907 Ordering::Greater => {
2908 indent_from_prev_row = true;
2909 from_regex = true
2910 }
2911 _ => {}
2912 },
2913
2914 Ordering::Greater => break,
2915 Ordering::Less => {}
2916 }
2917
2918 indent_changes.next();
2919 }
2920
2921 for range in &indent_ranges {
2922 if range.start.row >= row {
2923 break;
2924 }
2925 if range.start.row == prev_row && range.end > row_start {
2926 indent_from_prev_row = true;
2927 }
2928 if range.end > prev_row_start && range.end <= row_start {
2929 outdent_to_row = outdent_to_row.min(range.start.row);
2930 }
2931 }
2932
2933 let within_error = error_ranges
2934 .iter()
2935 .any(|e| e.start.row < row && e.end > row_start);
2936
2937 let suggestion = if outdent_to_row == prev_row
2938 || (outdent_from_prev_row && indent_from_prev_row)
2939 {
2940 Some(IndentSuggestion {
2941 basis_row: prev_row,
2942 delta: Ordering::Equal,
2943 within_error: within_error && !from_regex,
2944 })
2945 } else if indent_from_prev_row {
2946 Some(IndentSuggestion {
2947 basis_row: prev_row,
2948 delta: Ordering::Greater,
2949 within_error: within_error && !from_regex,
2950 })
2951 } else if outdent_to_row < prev_row {
2952 Some(IndentSuggestion {
2953 basis_row: outdent_to_row,
2954 delta: Ordering::Equal,
2955 within_error: within_error && !from_regex,
2956 })
2957 } else if outdent_from_prev_row {
2958 Some(IndentSuggestion {
2959 basis_row: prev_row,
2960 delta: Ordering::Less,
2961 within_error: within_error && !from_regex,
2962 })
2963 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2964 {
2965 Some(IndentSuggestion {
2966 basis_row: prev_row,
2967 delta: Ordering::Equal,
2968 within_error: within_error && !from_regex,
2969 })
2970 } else {
2971 None
2972 };
2973
2974 prev_row = row;
2975 prev_row_start = row_start;
2976 suggestion
2977 }))
2978 }
2979
2980 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2981 while row > 0 {
2982 row -= 1;
2983 if !self.is_line_blank(row) {
2984 return Some(row);
2985 }
2986 }
2987 None
2988 }
2989
2990 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
2991 let captures = self.syntax.captures(range, &self.text, |grammar| {
2992 grammar.highlights_query.as_ref()
2993 });
2994 let highlight_maps = captures
2995 .grammars()
2996 .iter()
2997 .map(|grammar| grammar.highlight_map())
2998 .collect();
2999 (captures, highlight_maps)
3000 }
3001
3002 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3003 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3004 /// returned in chunks where each chunk has a single syntax highlighting style and
3005 /// diagnostic status.
3006 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3007 let range = range.start.to_offset(self)..range.end.to_offset(self);
3008
3009 let mut syntax = None;
3010 if language_aware {
3011 syntax = Some(self.get_highlights(range.clone()));
3012 }
3013 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3014 let diagnostics = language_aware;
3015 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3016 }
3017
3018 pub fn highlighted_text_for_range<T: ToOffset>(
3019 &self,
3020 range: Range<T>,
3021 override_style: Option<HighlightStyle>,
3022 syntax_theme: &SyntaxTheme,
3023 ) -> HighlightedText {
3024 HighlightedText::from_buffer_range(
3025 range,
3026 &self.text,
3027 &self.syntax,
3028 override_style,
3029 syntax_theme,
3030 )
3031 }
3032
3033 /// Invokes the given callback for each line of text in the given range of the buffer.
3034 /// Uses callback to avoid allocating a string for each line.
3035 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3036 let mut line = String::new();
3037 let mut row = range.start.row;
3038 for chunk in self
3039 .as_rope()
3040 .chunks_in_range(range.to_offset(self))
3041 .chain(["\n"])
3042 {
3043 for (newline_ix, text) in chunk.split('\n').enumerate() {
3044 if newline_ix > 0 {
3045 callback(row, &line);
3046 row += 1;
3047 line.clear();
3048 }
3049 line.push_str(text);
3050 }
3051 }
3052 }
3053
3054 /// Iterates over every [`SyntaxLayer`] in the buffer.
3055 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3056 self.syntax
3057 .layers_for_range(0..self.len(), &self.text, true)
3058 }
3059
3060 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3061 let offset = position.to_offset(self);
3062 self.syntax
3063 .layers_for_range(offset..offset, &self.text, false)
3064 .filter(|l| l.node().end_byte() > offset)
3065 .last()
3066 }
3067
3068 /// Returns the main [`Language`].
3069 pub fn language(&self) -> Option<&Arc<Language>> {
3070 self.language.as_ref()
3071 }
3072
3073 /// Returns the [`Language`] at the given location.
3074 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3075 self.syntax_layer_at(position)
3076 .map(|info| info.language)
3077 .or(self.language.as_ref())
3078 }
3079
3080 /// Returns the settings for the language at the given location.
3081 pub fn settings_at<'a, D: ToOffset>(
3082 &'a self,
3083 position: D,
3084 cx: &'a App,
3085 ) -> Cow<'a, LanguageSettings> {
3086 language_settings(
3087 self.language_at(position).map(|l| l.name()),
3088 self.file.as_ref(),
3089 cx,
3090 )
3091 }
3092
3093 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3094 CharClassifier::new(self.language_scope_at(point))
3095 }
3096
3097 /// Returns the [`LanguageScope`] at the given location.
3098 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3099 let offset = position.to_offset(self);
3100 let mut scope = None;
3101 let mut smallest_range: Option<Range<usize>> = None;
3102
3103 // Use the layer that has the smallest node intersecting the given point.
3104 for layer in self
3105 .syntax
3106 .layers_for_range(offset..offset, &self.text, false)
3107 {
3108 let mut cursor = layer.node().walk();
3109
3110 let mut range = None;
3111 loop {
3112 let child_range = cursor.node().byte_range();
3113 if !child_range.to_inclusive().contains(&offset) {
3114 break;
3115 }
3116
3117 range = Some(child_range);
3118 if cursor.goto_first_child_for_byte(offset).is_none() {
3119 break;
3120 }
3121 }
3122
3123 if let Some(range) = range {
3124 if smallest_range
3125 .as_ref()
3126 .map_or(true, |smallest_range| range.len() < smallest_range.len())
3127 {
3128 smallest_range = Some(range);
3129 scope = Some(LanguageScope {
3130 language: layer.language.clone(),
3131 override_id: layer.override_id(offset, &self.text),
3132 });
3133 }
3134 }
3135 }
3136
3137 scope.or_else(|| {
3138 self.language.clone().map(|language| LanguageScope {
3139 language,
3140 override_id: None,
3141 })
3142 })
3143 }
3144
3145 /// Returns a tuple of the range and character kind of the word
3146 /// surrounding the given position.
3147 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3148 let mut start = start.to_offset(self);
3149 let mut end = start;
3150 let mut next_chars = self.chars_at(start).peekable();
3151 let mut prev_chars = self.reversed_chars_at(start).peekable();
3152
3153 let classifier = self.char_classifier_at(start);
3154 let word_kind = cmp::max(
3155 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3156 next_chars.peek().copied().map(|c| classifier.kind(c)),
3157 );
3158
3159 for ch in prev_chars {
3160 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3161 start -= ch.len_utf8();
3162 } else {
3163 break;
3164 }
3165 }
3166
3167 for ch in next_chars {
3168 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3169 end += ch.len_utf8();
3170 } else {
3171 break;
3172 }
3173 }
3174
3175 (start..end, word_kind)
3176 }
3177
3178 /// Returns the closest syntax node enclosing the given range.
3179 pub fn syntax_ancestor<'a, T: ToOffset>(
3180 &'a self,
3181 range: Range<T>,
3182 ) -> Option<tree_sitter::Node<'a>> {
3183 let range = range.start.to_offset(self)..range.end.to_offset(self);
3184 let mut result: Option<tree_sitter::Node<'a>> = None;
3185 'outer: for layer in self
3186 .syntax
3187 .layers_for_range(range.clone(), &self.text, true)
3188 {
3189 let mut cursor = layer.node().walk();
3190
3191 // Descend to the first leaf that touches the start of the range,
3192 // and if the range is non-empty, extends beyond the start.
3193 while cursor.goto_first_child_for_byte(range.start).is_some() {
3194 if !range.is_empty() && cursor.node().end_byte() == range.start {
3195 cursor.goto_next_sibling();
3196 }
3197 }
3198
3199 // Ascend to the smallest ancestor that strictly contains the range.
3200 loop {
3201 let node_range = cursor.node().byte_range();
3202 if node_range.start <= range.start
3203 && node_range.end >= range.end
3204 && node_range.len() > range.len()
3205 {
3206 break;
3207 }
3208 if !cursor.goto_parent() {
3209 continue 'outer;
3210 }
3211 }
3212
3213 let left_node = cursor.node();
3214 let mut layer_result = left_node;
3215
3216 // For an empty range, try to find another node immediately to the right of the range.
3217 if left_node.end_byte() == range.start {
3218 let mut right_node = None;
3219 while !cursor.goto_next_sibling() {
3220 if !cursor.goto_parent() {
3221 break;
3222 }
3223 }
3224
3225 while cursor.node().start_byte() == range.start {
3226 right_node = Some(cursor.node());
3227 if !cursor.goto_first_child() {
3228 break;
3229 }
3230 }
3231
3232 // If there is a candidate node on both sides of the (empty) range, then
3233 // decide between the two by favoring a named node over an anonymous token.
3234 // If both nodes are the same in that regard, favor the right one.
3235 if let Some(right_node) = right_node {
3236 if right_node.is_named() || !left_node.is_named() {
3237 layer_result = right_node;
3238 }
3239 }
3240 }
3241
3242 if let Some(previous_result) = &result {
3243 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3244 continue;
3245 }
3246 }
3247 result = Some(layer_result);
3248 }
3249
3250 result
3251 }
3252
3253 /// Returns the outline for the buffer.
3254 ///
3255 /// This method allows passing an optional [`SyntaxTheme`] to
3256 /// syntax-highlight the returned symbols.
3257 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3258 self.outline_items_containing(0..self.len(), true, theme)
3259 .map(Outline::new)
3260 }
3261
3262 /// Returns all the symbols that contain the given position.
3263 ///
3264 /// This method allows passing an optional [`SyntaxTheme`] to
3265 /// syntax-highlight the returned symbols.
3266 pub fn symbols_containing<T: ToOffset>(
3267 &self,
3268 position: T,
3269 theme: Option<&SyntaxTheme>,
3270 ) -> Option<Vec<OutlineItem<Anchor>>> {
3271 let position = position.to_offset(self);
3272 let mut items = self.outline_items_containing(
3273 position.saturating_sub(1)..self.len().min(position + 1),
3274 false,
3275 theme,
3276 )?;
3277 let mut prev_depth = None;
3278 items.retain(|item| {
3279 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3280 prev_depth = Some(item.depth);
3281 result
3282 });
3283 Some(items)
3284 }
3285
3286 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3287 let range = range.to_offset(self);
3288 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3289 grammar.outline_config.as_ref().map(|c| &c.query)
3290 });
3291 let configs = matches
3292 .grammars()
3293 .iter()
3294 .map(|g| g.outline_config.as_ref().unwrap())
3295 .collect::<Vec<_>>();
3296
3297 while let Some(mat) = matches.peek() {
3298 let config = &configs[mat.grammar_index];
3299 let containing_item_node = maybe!({
3300 let item_node = mat.captures.iter().find_map(|cap| {
3301 if cap.index == config.item_capture_ix {
3302 Some(cap.node)
3303 } else {
3304 None
3305 }
3306 })?;
3307
3308 let item_byte_range = item_node.byte_range();
3309 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3310 None
3311 } else {
3312 Some(item_node)
3313 }
3314 });
3315
3316 if let Some(item_node) = containing_item_node {
3317 return Some(
3318 Point::from_ts_point(item_node.start_position())
3319 ..Point::from_ts_point(item_node.end_position()),
3320 );
3321 }
3322
3323 matches.advance();
3324 }
3325 None
3326 }
3327
3328 pub fn outline_items_containing<T: ToOffset>(
3329 &self,
3330 range: Range<T>,
3331 include_extra_context: bool,
3332 theme: Option<&SyntaxTheme>,
3333 ) -> Option<Vec<OutlineItem<Anchor>>> {
3334 let range = range.to_offset(self);
3335 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3336 grammar.outline_config.as_ref().map(|c| &c.query)
3337 });
3338 let configs = matches
3339 .grammars()
3340 .iter()
3341 .map(|g| g.outline_config.as_ref().unwrap())
3342 .collect::<Vec<_>>();
3343
3344 let mut items = Vec::new();
3345 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3346 while let Some(mat) = matches.peek() {
3347 let config = &configs[mat.grammar_index];
3348 if let Some(item) =
3349 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3350 {
3351 items.push(item);
3352 } else if let Some(capture) = mat
3353 .captures
3354 .iter()
3355 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3356 {
3357 let capture_range = capture.node.start_position()..capture.node.end_position();
3358 let mut capture_row_range =
3359 capture_range.start.row as u32..capture_range.end.row as u32;
3360 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3361 {
3362 capture_row_range.end -= 1;
3363 }
3364 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3365 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3366 last_row_range.end = capture_row_range.end;
3367 } else {
3368 annotation_row_ranges.push(capture_row_range);
3369 }
3370 } else {
3371 annotation_row_ranges.push(capture_row_range);
3372 }
3373 }
3374 matches.advance();
3375 }
3376
3377 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3378
3379 // Assign depths based on containment relationships and convert to anchors.
3380 let mut item_ends_stack = Vec::<Point>::new();
3381 let mut anchor_items = Vec::new();
3382 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3383 for item in items {
3384 while let Some(last_end) = item_ends_stack.last().copied() {
3385 if last_end < item.range.end {
3386 item_ends_stack.pop();
3387 } else {
3388 break;
3389 }
3390 }
3391
3392 let mut annotation_row_range = None;
3393 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3394 let row_preceding_item = item.range.start.row.saturating_sub(1);
3395 if next_annotation_row_range.end < row_preceding_item {
3396 annotation_row_ranges.next();
3397 } else {
3398 if next_annotation_row_range.end == row_preceding_item {
3399 annotation_row_range = Some(next_annotation_row_range.clone());
3400 annotation_row_ranges.next();
3401 }
3402 break;
3403 }
3404 }
3405
3406 anchor_items.push(OutlineItem {
3407 depth: item_ends_stack.len(),
3408 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3409 text: item.text,
3410 highlight_ranges: item.highlight_ranges,
3411 name_ranges: item.name_ranges,
3412 body_range: item.body_range.map(|body_range| {
3413 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3414 }),
3415 annotation_range: annotation_row_range.map(|annotation_range| {
3416 self.anchor_after(Point::new(annotation_range.start, 0))
3417 ..self.anchor_before(Point::new(
3418 annotation_range.end,
3419 self.line_len(annotation_range.end),
3420 ))
3421 }),
3422 });
3423 item_ends_stack.push(item.range.end);
3424 }
3425
3426 Some(anchor_items)
3427 }
3428
3429 fn next_outline_item(
3430 &self,
3431 config: &OutlineConfig,
3432 mat: &SyntaxMapMatch,
3433 range: &Range<usize>,
3434 include_extra_context: bool,
3435 theme: Option<&SyntaxTheme>,
3436 ) -> Option<OutlineItem<Point>> {
3437 let item_node = mat.captures.iter().find_map(|cap| {
3438 if cap.index == config.item_capture_ix {
3439 Some(cap.node)
3440 } else {
3441 None
3442 }
3443 })?;
3444
3445 let item_byte_range = item_node.byte_range();
3446 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3447 return None;
3448 }
3449 let item_point_range = Point::from_ts_point(item_node.start_position())
3450 ..Point::from_ts_point(item_node.end_position());
3451
3452 let mut open_point = None;
3453 let mut close_point = None;
3454 let mut buffer_ranges = Vec::new();
3455 for capture in mat.captures {
3456 let node_is_name;
3457 if capture.index == config.name_capture_ix {
3458 node_is_name = true;
3459 } else if Some(capture.index) == config.context_capture_ix
3460 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3461 {
3462 node_is_name = false;
3463 } else {
3464 if Some(capture.index) == config.open_capture_ix {
3465 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3466 } else if Some(capture.index) == config.close_capture_ix {
3467 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3468 }
3469
3470 continue;
3471 }
3472
3473 let mut range = capture.node.start_byte()..capture.node.end_byte();
3474 let start = capture.node.start_position();
3475 if capture.node.end_position().row > start.row {
3476 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3477 }
3478
3479 if !range.is_empty() {
3480 buffer_ranges.push((range, node_is_name));
3481 }
3482 }
3483 if buffer_ranges.is_empty() {
3484 return None;
3485 }
3486 let mut text = String::new();
3487 let mut highlight_ranges = Vec::new();
3488 let mut name_ranges = Vec::new();
3489 let mut chunks = self.chunks(
3490 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3491 true,
3492 );
3493 let mut last_buffer_range_end = 0;
3494 for (buffer_range, is_name) in buffer_ranges {
3495 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
3496 text.push(' ');
3497 }
3498 last_buffer_range_end = buffer_range.end;
3499 if is_name {
3500 let mut start = text.len();
3501 let end = start + buffer_range.len();
3502
3503 // When multiple names are captured, then the matchable text
3504 // includes the whitespace in between the names.
3505 if !name_ranges.is_empty() {
3506 start -= 1;
3507 }
3508
3509 name_ranges.push(start..end);
3510 }
3511
3512 let mut offset = buffer_range.start;
3513 chunks.seek(buffer_range.clone());
3514 for mut chunk in chunks.by_ref() {
3515 if chunk.text.len() > buffer_range.end - offset {
3516 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3517 offset = buffer_range.end;
3518 } else {
3519 offset += chunk.text.len();
3520 }
3521 let style = chunk
3522 .syntax_highlight_id
3523 .zip(theme)
3524 .and_then(|(highlight, theme)| highlight.style(theme));
3525 if let Some(style) = style {
3526 let start = text.len();
3527 let end = start + chunk.text.len();
3528 highlight_ranges.push((start..end, style));
3529 }
3530 text.push_str(chunk.text);
3531 if offset >= buffer_range.end {
3532 break;
3533 }
3534 }
3535 }
3536
3537 Some(OutlineItem {
3538 depth: 0, // We'll calculate the depth later
3539 range: item_point_range,
3540 text,
3541 highlight_ranges,
3542 name_ranges,
3543 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3544 annotation_range: None,
3545 })
3546 }
3547
3548 pub fn function_body_fold_ranges<T: ToOffset>(
3549 &self,
3550 within: Range<T>,
3551 ) -> impl Iterator<Item = Range<usize>> + '_ {
3552 self.text_object_ranges(within, TreeSitterOptions::default())
3553 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3554 }
3555
3556 /// For each grammar in the language, runs the provided
3557 /// [`tree_sitter::Query`] against the given range.
3558 pub fn matches(
3559 &self,
3560 range: Range<usize>,
3561 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3562 ) -> SyntaxMapMatches {
3563 self.syntax.matches(range, self, query)
3564 }
3565
3566 pub fn all_bracket_ranges(
3567 &self,
3568 range: Range<usize>,
3569 ) -> impl Iterator<Item = BracketMatch> + '_ {
3570 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3571 grammar.brackets_config.as_ref().map(|c| &c.query)
3572 });
3573 let configs = matches
3574 .grammars()
3575 .iter()
3576 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3577 .collect::<Vec<_>>();
3578
3579 iter::from_fn(move || {
3580 while let Some(mat) = matches.peek() {
3581 let mut open = None;
3582 let mut close = None;
3583 let config = &configs[mat.grammar_index];
3584 let pattern = &config.patterns[mat.pattern_index];
3585 for capture in mat.captures {
3586 if capture.index == config.open_capture_ix {
3587 open = Some(capture.node.byte_range());
3588 } else if capture.index == config.close_capture_ix {
3589 close = Some(capture.node.byte_range());
3590 }
3591 }
3592
3593 matches.advance();
3594
3595 let Some((open_range, close_range)) = open.zip(close) else {
3596 continue;
3597 };
3598
3599 let bracket_range = open_range.start..=close_range.end;
3600 if !bracket_range.overlaps(&range) {
3601 continue;
3602 }
3603
3604 return Some(BracketMatch {
3605 open_range,
3606 close_range,
3607 newline_only: pattern.newline_only,
3608 });
3609 }
3610 None
3611 })
3612 }
3613
3614 /// Returns bracket range pairs overlapping or adjacent to `range`
3615 pub fn bracket_ranges<T: ToOffset>(
3616 &self,
3617 range: Range<T>,
3618 ) -> impl Iterator<Item = BracketMatch> + '_ {
3619 // Find bracket pairs that *inclusively* contain the given range.
3620 let range = range.start.to_offset(self).saturating_sub(1)
3621 ..self.len().min(range.end.to_offset(self) + 1);
3622 self.all_bracket_ranges(range)
3623 .filter(|pair| !pair.newline_only)
3624 }
3625
3626 pub fn text_object_ranges<T: ToOffset>(
3627 &self,
3628 range: Range<T>,
3629 options: TreeSitterOptions,
3630 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3631 let range = range.start.to_offset(self).saturating_sub(1)
3632 ..self.len().min(range.end.to_offset(self) + 1);
3633
3634 let mut matches =
3635 self.syntax
3636 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3637 grammar.text_object_config.as_ref().map(|c| &c.query)
3638 });
3639
3640 let configs = matches
3641 .grammars()
3642 .iter()
3643 .map(|grammar| grammar.text_object_config.as_ref())
3644 .collect::<Vec<_>>();
3645
3646 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3647
3648 iter::from_fn(move || loop {
3649 while let Some(capture) = captures.pop() {
3650 if capture.0.overlaps(&range) {
3651 return Some(capture);
3652 }
3653 }
3654
3655 let mat = matches.peek()?;
3656
3657 let Some(config) = configs[mat.grammar_index].as_ref() else {
3658 matches.advance();
3659 continue;
3660 };
3661
3662 for capture in mat.captures {
3663 let Some(ix) = config
3664 .text_objects_by_capture_ix
3665 .binary_search_by_key(&capture.index, |e| e.0)
3666 .ok()
3667 else {
3668 continue;
3669 };
3670 let text_object = config.text_objects_by_capture_ix[ix].1;
3671 let byte_range = capture.node.byte_range();
3672
3673 let mut found = false;
3674 for (range, existing) in captures.iter_mut() {
3675 if existing == &text_object {
3676 range.start = range.start.min(byte_range.start);
3677 range.end = range.end.max(byte_range.end);
3678 found = true;
3679 break;
3680 }
3681 }
3682
3683 if !found {
3684 captures.push((byte_range, text_object));
3685 }
3686 }
3687
3688 matches.advance();
3689 })
3690 }
3691
3692 /// Returns enclosing bracket ranges containing the given range
3693 pub fn enclosing_bracket_ranges<T: ToOffset>(
3694 &self,
3695 range: Range<T>,
3696 ) -> impl Iterator<Item = BracketMatch> + '_ {
3697 let range = range.start.to_offset(self)..range.end.to_offset(self);
3698
3699 self.bracket_ranges(range.clone()).filter(move |pair| {
3700 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3701 })
3702 }
3703
3704 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3705 ///
3706 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3707 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3708 &self,
3709 range: Range<T>,
3710 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3711 ) -> Option<(Range<usize>, Range<usize>)> {
3712 let range = range.start.to_offset(self)..range.end.to_offset(self);
3713
3714 // Get the ranges of the innermost pair of brackets.
3715 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3716
3717 for pair in self.enclosing_bracket_ranges(range.clone()) {
3718 if let Some(range_filter) = range_filter {
3719 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3720 continue;
3721 }
3722 }
3723
3724 let len = pair.close_range.end - pair.open_range.start;
3725
3726 if let Some((existing_open, existing_close)) = &result {
3727 let existing_len = existing_close.end - existing_open.start;
3728 if len > existing_len {
3729 continue;
3730 }
3731 }
3732
3733 result = Some((pair.open_range, pair.close_range));
3734 }
3735
3736 result
3737 }
3738
3739 /// Returns anchor ranges for any matches of the redaction query.
3740 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3741 /// will be run on the relevant section of the buffer.
3742 pub fn redacted_ranges<T: ToOffset>(
3743 &self,
3744 range: Range<T>,
3745 ) -> impl Iterator<Item = Range<usize>> + '_ {
3746 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3747 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3748 grammar
3749 .redactions_config
3750 .as_ref()
3751 .map(|config| &config.query)
3752 });
3753
3754 let configs = syntax_matches
3755 .grammars()
3756 .iter()
3757 .map(|grammar| grammar.redactions_config.as_ref())
3758 .collect::<Vec<_>>();
3759
3760 iter::from_fn(move || {
3761 let redacted_range = syntax_matches
3762 .peek()
3763 .and_then(|mat| {
3764 configs[mat.grammar_index].and_then(|config| {
3765 mat.captures
3766 .iter()
3767 .find(|capture| capture.index == config.redaction_capture_ix)
3768 })
3769 })
3770 .map(|mat| mat.node.byte_range());
3771 syntax_matches.advance();
3772 redacted_range
3773 })
3774 }
3775
3776 pub fn injections_intersecting_range<T: ToOffset>(
3777 &self,
3778 range: Range<T>,
3779 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3780 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3781
3782 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3783 grammar
3784 .injection_config
3785 .as_ref()
3786 .map(|config| &config.query)
3787 });
3788
3789 let configs = syntax_matches
3790 .grammars()
3791 .iter()
3792 .map(|grammar| grammar.injection_config.as_ref())
3793 .collect::<Vec<_>>();
3794
3795 iter::from_fn(move || {
3796 let ranges = syntax_matches.peek().and_then(|mat| {
3797 let config = &configs[mat.grammar_index]?;
3798 let content_capture_range = mat.captures.iter().find_map(|capture| {
3799 if capture.index == config.content_capture_ix {
3800 Some(capture.node.byte_range())
3801 } else {
3802 None
3803 }
3804 })?;
3805 let language = self.language_at(content_capture_range.start)?;
3806 Some((content_capture_range, language))
3807 });
3808 syntax_matches.advance();
3809 ranges
3810 })
3811 }
3812
3813 pub fn runnable_ranges(
3814 &self,
3815 offset_range: Range<usize>,
3816 ) -> impl Iterator<Item = RunnableRange> + '_ {
3817 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3818 grammar.runnable_config.as_ref().map(|config| &config.query)
3819 });
3820
3821 let test_configs = syntax_matches
3822 .grammars()
3823 .iter()
3824 .map(|grammar| grammar.runnable_config.as_ref())
3825 .collect::<Vec<_>>();
3826
3827 iter::from_fn(move || loop {
3828 let mat = syntax_matches.peek()?;
3829
3830 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3831 let mut run_range = None;
3832 let full_range = mat.captures.iter().fold(
3833 Range {
3834 start: usize::MAX,
3835 end: 0,
3836 },
3837 |mut acc, next| {
3838 let byte_range = next.node.byte_range();
3839 if acc.start > byte_range.start {
3840 acc.start = byte_range.start;
3841 }
3842 if acc.end < byte_range.end {
3843 acc.end = byte_range.end;
3844 }
3845 acc
3846 },
3847 );
3848 if full_range.start > full_range.end {
3849 // We did not find a full spanning range of this match.
3850 return None;
3851 }
3852 let extra_captures: SmallVec<[_; 1]> =
3853 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3854 test_configs
3855 .extra_captures
3856 .get(capture.index as usize)
3857 .cloned()
3858 .and_then(|tag_name| match tag_name {
3859 RunnableCapture::Named(name) => {
3860 Some((capture.node.byte_range(), name))
3861 }
3862 RunnableCapture::Run => {
3863 let _ = run_range.insert(capture.node.byte_range());
3864 None
3865 }
3866 })
3867 }));
3868 let run_range = run_range?;
3869 let tags = test_configs
3870 .query
3871 .property_settings(mat.pattern_index)
3872 .iter()
3873 .filter_map(|property| {
3874 if *property.key == *"tag" {
3875 property
3876 .value
3877 .as_ref()
3878 .map(|value| RunnableTag(value.to_string().into()))
3879 } else {
3880 None
3881 }
3882 })
3883 .collect();
3884 let extra_captures = extra_captures
3885 .into_iter()
3886 .map(|(range, name)| {
3887 (
3888 name.to_string(),
3889 self.text_for_range(range.clone()).collect::<String>(),
3890 )
3891 })
3892 .collect();
3893 // All tags should have the same range.
3894 Some(RunnableRange {
3895 run_range,
3896 full_range,
3897 runnable: Runnable {
3898 tags,
3899 language: mat.language,
3900 buffer: self.remote_id(),
3901 },
3902 extra_captures,
3903 buffer_id: self.remote_id(),
3904 })
3905 });
3906
3907 syntax_matches.advance();
3908 if test_range.is_some() {
3909 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3910 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3911 return test_range;
3912 }
3913 })
3914 }
3915
3916 /// Returns selections for remote peers intersecting the given range.
3917 #[allow(clippy::type_complexity)]
3918 pub fn selections_in_range(
3919 &self,
3920 range: Range<Anchor>,
3921 include_local: bool,
3922 ) -> impl Iterator<
3923 Item = (
3924 ReplicaId,
3925 bool,
3926 CursorShape,
3927 impl Iterator<Item = &Selection<Anchor>> + '_,
3928 ),
3929 > + '_ {
3930 self.remote_selections
3931 .iter()
3932 .filter(move |(replica_id, set)| {
3933 (include_local || **replica_id != self.text.replica_id())
3934 && !set.selections.is_empty()
3935 })
3936 .map(move |(replica_id, set)| {
3937 let start_ix = match set.selections.binary_search_by(|probe| {
3938 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3939 }) {
3940 Ok(ix) | Err(ix) => ix,
3941 };
3942 let end_ix = match set.selections.binary_search_by(|probe| {
3943 probe.start.cmp(&range.end, self).then(Ordering::Less)
3944 }) {
3945 Ok(ix) | Err(ix) => ix,
3946 };
3947
3948 (
3949 *replica_id,
3950 set.line_mode,
3951 set.cursor_shape,
3952 set.selections[start_ix..end_ix].iter(),
3953 )
3954 })
3955 }
3956
3957 /// Returns if the buffer contains any diagnostics.
3958 pub fn has_diagnostics(&self) -> bool {
3959 !self.diagnostics.is_empty()
3960 }
3961
3962 /// Returns all the diagnostics intersecting the given range.
3963 pub fn diagnostics_in_range<'a, T, O>(
3964 &'a self,
3965 search_range: Range<T>,
3966 reversed: bool,
3967 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3968 where
3969 T: 'a + Clone + ToOffset,
3970 O: 'a + FromAnchor,
3971 {
3972 let mut iterators: Vec<_> = self
3973 .diagnostics
3974 .iter()
3975 .map(|(_, collection)| {
3976 collection
3977 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
3978 .peekable()
3979 })
3980 .collect();
3981
3982 std::iter::from_fn(move || {
3983 let (next_ix, _) = iterators
3984 .iter_mut()
3985 .enumerate()
3986 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3987 .min_by(|(_, a), (_, b)| {
3988 let cmp = a
3989 .range
3990 .start
3991 .cmp(&b.range.start, self)
3992 // when range is equal, sort by diagnostic severity
3993 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3994 // and stabilize order with group_id
3995 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3996 if reversed {
3997 cmp.reverse()
3998 } else {
3999 cmp
4000 }
4001 })?;
4002 iterators[next_ix]
4003 .next()
4004 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4005 diagnostic,
4006 range: FromAnchor::from_anchor(&range.start, self)
4007 ..FromAnchor::from_anchor(&range.end, self),
4008 })
4009 })
4010 }
4011
4012 /// Returns all the diagnostic groups associated with the given
4013 /// language server ID. If no language server ID is provided,
4014 /// all diagnostics groups are returned.
4015 pub fn diagnostic_groups(
4016 &self,
4017 language_server_id: Option<LanguageServerId>,
4018 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4019 let mut groups = Vec::new();
4020
4021 if let Some(language_server_id) = language_server_id {
4022 if let Ok(ix) = self
4023 .diagnostics
4024 .binary_search_by_key(&language_server_id, |e| e.0)
4025 {
4026 self.diagnostics[ix]
4027 .1
4028 .groups(language_server_id, &mut groups, self);
4029 }
4030 } else {
4031 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4032 diagnostics.groups(*language_server_id, &mut groups, self);
4033 }
4034 }
4035
4036 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4037 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4038 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4039 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4040 });
4041
4042 groups
4043 }
4044
4045 /// Returns an iterator over the diagnostics for the given group.
4046 pub fn diagnostic_group<O>(
4047 &self,
4048 group_id: usize,
4049 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4050 where
4051 O: FromAnchor + 'static,
4052 {
4053 self.diagnostics
4054 .iter()
4055 .flat_map(move |(_, set)| set.group(group_id, self))
4056 }
4057
4058 /// An integer version number that accounts for all updates besides
4059 /// the buffer's text itself (which is versioned via a version vector).
4060 pub fn non_text_state_update_count(&self) -> usize {
4061 self.non_text_state_update_count
4062 }
4063
4064 /// Returns a snapshot of underlying file.
4065 pub fn file(&self) -> Option<&Arc<dyn File>> {
4066 self.file.as_ref()
4067 }
4068
4069 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4070 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4071 if let Some(file) = self.file() {
4072 if file.path().file_name().is_none() || include_root {
4073 Some(file.full_path(cx))
4074 } else {
4075 Some(file.path().to_path_buf())
4076 }
4077 } else {
4078 None
4079 }
4080 }
4081}
4082
4083fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4084 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4085}
4086
4087fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4088 let mut result = IndentSize::spaces(0);
4089 for c in text {
4090 let kind = match c {
4091 ' ' => IndentKind::Space,
4092 '\t' => IndentKind::Tab,
4093 _ => break,
4094 };
4095 if result.len == 0 {
4096 result.kind = kind;
4097 }
4098 result.len += 1;
4099 }
4100 result
4101}
4102
4103impl Clone for BufferSnapshot {
4104 fn clone(&self) -> Self {
4105 Self {
4106 text: self.text.clone(),
4107 syntax: self.syntax.clone(),
4108 file: self.file.clone(),
4109 remote_selections: self.remote_selections.clone(),
4110 diagnostics: self.diagnostics.clone(),
4111 language: self.language.clone(),
4112 non_text_state_update_count: self.non_text_state_update_count,
4113 }
4114 }
4115}
4116
4117impl Deref for BufferSnapshot {
4118 type Target = text::BufferSnapshot;
4119
4120 fn deref(&self) -> &Self::Target {
4121 &self.text
4122 }
4123}
4124
4125unsafe impl<'a> Send for BufferChunks<'a> {}
4126
4127impl<'a> BufferChunks<'a> {
4128 pub(crate) fn new(
4129 text: &'a Rope,
4130 range: Range<usize>,
4131 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4132 diagnostics: bool,
4133 buffer_snapshot: Option<&'a BufferSnapshot>,
4134 ) -> Self {
4135 let mut highlights = None;
4136 if let Some((captures, highlight_maps)) = syntax {
4137 highlights = Some(BufferChunkHighlights {
4138 captures,
4139 next_capture: None,
4140 stack: Default::default(),
4141 highlight_maps,
4142 })
4143 }
4144
4145 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4146 let chunks = text.chunks_in_range(range.clone());
4147
4148 let mut this = BufferChunks {
4149 range,
4150 buffer_snapshot,
4151 chunks,
4152 diagnostic_endpoints,
4153 error_depth: 0,
4154 warning_depth: 0,
4155 information_depth: 0,
4156 hint_depth: 0,
4157 unnecessary_depth: 0,
4158 highlights,
4159 };
4160 this.initialize_diagnostic_endpoints();
4161 this
4162 }
4163
4164 /// Seeks to the given byte offset in the buffer.
4165 pub fn seek(&mut self, range: Range<usize>) {
4166 let old_range = std::mem::replace(&mut self.range, range.clone());
4167 self.chunks.set_range(self.range.clone());
4168 if let Some(highlights) = self.highlights.as_mut() {
4169 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4170 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4171 highlights
4172 .stack
4173 .retain(|(end_offset, _)| *end_offset > range.start);
4174 if let Some(capture) = &highlights.next_capture {
4175 if range.start >= capture.node.start_byte() {
4176 let next_capture_end = capture.node.end_byte();
4177 if range.start < next_capture_end {
4178 highlights.stack.push((
4179 next_capture_end,
4180 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4181 ));
4182 }
4183 highlights.next_capture.take();
4184 }
4185 }
4186 } else if let Some(snapshot) = self.buffer_snapshot {
4187 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4188 *highlights = BufferChunkHighlights {
4189 captures,
4190 next_capture: None,
4191 stack: Default::default(),
4192 highlight_maps,
4193 };
4194 } else {
4195 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4196 // Seeking such BufferChunks is not supported.
4197 debug_assert!(false, "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot");
4198 }
4199
4200 highlights.captures.set_byte_range(self.range.clone());
4201 self.initialize_diagnostic_endpoints();
4202 }
4203 }
4204
4205 fn initialize_diagnostic_endpoints(&mut self) {
4206 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4207 if let Some(buffer) = self.buffer_snapshot {
4208 let mut diagnostic_endpoints = Vec::new();
4209 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4210 diagnostic_endpoints.push(DiagnosticEndpoint {
4211 offset: entry.range.start,
4212 is_start: true,
4213 severity: entry.diagnostic.severity,
4214 is_unnecessary: entry.diagnostic.is_unnecessary,
4215 });
4216 diagnostic_endpoints.push(DiagnosticEndpoint {
4217 offset: entry.range.end,
4218 is_start: false,
4219 severity: entry.diagnostic.severity,
4220 is_unnecessary: entry.diagnostic.is_unnecessary,
4221 });
4222 }
4223 diagnostic_endpoints
4224 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4225 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4226 self.hint_depth = 0;
4227 self.error_depth = 0;
4228 self.warning_depth = 0;
4229 self.information_depth = 0;
4230 }
4231 }
4232 }
4233
4234 /// The current byte offset in the buffer.
4235 pub fn offset(&self) -> usize {
4236 self.range.start
4237 }
4238
4239 pub fn range(&self) -> Range<usize> {
4240 self.range.clone()
4241 }
4242
4243 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4244 let depth = match endpoint.severity {
4245 DiagnosticSeverity::ERROR => &mut self.error_depth,
4246 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4247 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4248 DiagnosticSeverity::HINT => &mut self.hint_depth,
4249 _ => return,
4250 };
4251 if endpoint.is_start {
4252 *depth += 1;
4253 } else {
4254 *depth -= 1;
4255 }
4256
4257 if endpoint.is_unnecessary {
4258 if endpoint.is_start {
4259 self.unnecessary_depth += 1;
4260 } else {
4261 self.unnecessary_depth -= 1;
4262 }
4263 }
4264 }
4265
4266 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4267 if self.error_depth > 0 {
4268 Some(DiagnosticSeverity::ERROR)
4269 } else if self.warning_depth > 0 {
4270 Some(DiagnosticSeverity::WARNING)
4271 } else if self.information_depth > 0 {
4272 Some(DiagnosticSeverity::INFORMATION)
4273 } else if self.hint_depth > 0 {
4274 Some(DiagnosticSeverity::HINT)
4275 } else {
4276 None
4277 }
4278 }
4279
4280 fn current_code_is_unnecessary(&self) -> bool {
4281 self.unnecessary_depth > 0
4282 }
4283}
4284
4285impl<'a> Iterator for BufferChunks<'a> {
4286 type Item = Chunk<'a>;
4287
4288 fn next(&mut self) -> Option<Self::Item> {
4289 let mut next_capture_start = usize::MAX;
4290 let mut next_diagnostic_endpoint = usize::MAX;
4291
4292 if let Some(highlights) = self.highlights.as_mut() {
4293 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4294 if *parent_capture_end <= self.range.start {
4295 highlights.stack.pop();
4296 } else {
4297 break;
4298 }
4299 }
4300
4301 if highlights.next_capture.is_none() {
4302 highlights.next_capture = highlights.captures.next();
4303 }
4304
4305 while let Some(capture) = highlights.next_capture.as_ref() {
4306 if self.range.start < capture.node.start_byte() {
4307 next_capture_start = capture.node.start_byte();
4308 break;
4309 } else {
4310 let highlight_id =
4311 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4312 highlights
4313 .stack
4314 .push((capture.node.end_byte(), highlight_id));
4315 highlights.next_capture = highlights.captures.next();
4316 }
4317 }
4318 }
4319
4320 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4321 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4322 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4323 if endpoint.offset <= self.range.start {
4324 self.update_diagnostic_depths(endpoint);
4325 diagnostic_endpoints.next();
4326 } else {
4327 next_diagnostic_endpoint = endpoint.offset;
4328 break;
4329 }
4330 }
4331 }
4332 self.diagnostic_endpoints = diagnostic_endpoints;
4333
4334 if let Some(chunk) = self.chunks.peek() {
4335 let chunk_start = self.range.start;
4336 let mut chunk_end = (self.chunks.offset() + chunk.len())
4337 .min(next_capture_start)
4338 .min(next_diagnostic_endpoint);
4339 let mut highlight_id = None;
4340 if let Some(highlights) = self.highlights.as_ref() {
4341 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4342 chunk_end = chunk_end.min(*parent_capture_end);
4343 highlight_id = Some(*parent_highlight_id);
4344 }
4345 }
4346
4347 let slice =
4348 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4349 self.range.start = chunk_end;
4350 if self.range.start == self.chunks.offset() + chunk.len() {
4351 self.chunks.next().unwrap();
4352 }
4353
4354 Some(Chunk {
4355 text: slice,
4356 syntax_highlight_id: highlight_id,
4357 diagnostic_severity: self.current_diagnostic_severity(),
4358 is_unnecessary: self.current_code_is_unnecessary(),
4359 ..Default::default()
4360 })
4361 } else {
4362 None
4363 }
4364 }
4365}
4366
4367impl operation_queue::Operation for Operation {
4368 fn lamport_timestamp(&self) -> clock::Lamport {
4369 match self {
4370 Operation::Buffer(_) => {
4371 unreachable!("buffer operations should never be deferred at this layer")
4372 }
4373 Operation::UpdateDiagnostics {
4374 lamport_timestamp, ..
4375 }
4376 | Operation::UpdateSelections {
4377 lamport_timestamp, ..
4378 }
4379 | Operation::UpdateCompletionTriggers {
4380 lamport_timestamp, ..
4381 } => *lamport_timestamp,
4382 }
4383 }
4384}
4385
4386impl Default for Diagnostic {
4387 fn default() -> Self {
4388 Self {
4389 source: Default::default(),
4390 code: None,
4391 severity: DiagnosticSeverity::ERROR,
4392 message: Default::default(),
4393 group_id: 0,
4394 is_primary: false,
4395 is_disk_based: false,
4396 is_unnecessary: false,
4397 data: None,
4398 }
4399 }
4400}
4401
4402impl IndentSize {
4403 /// Returns an [`IndentSize`] representing the given spaces.
4404 pub fn spaces(len: u32) -> Self {
4405 Self {
4406 len,
4407 kind: IndentKind::Space,
4408 }
4409 }
4410
4411 /// Returns an [`IndentSize`] representing a tab.
4412 pub fn tab() -> Self {
4413 Self {
4414 len: 1,
4415 kind: IndentKind::Tab,
4416 }
4417 }
4418
4419 /// An iterator over the characters represented by this [`IndentSize`].
4420 pub fn chars(&self) -> impl Iterator<Item = char> {
4421 iter::repeat(self.char()).take(self.len as usize)
4422 }
4423
4424 /// The character representation of this [`IndentSize`].
4425 pub fn char(&self) -> char {
4426 match self.kind {
4427 IndentKind::Space => ' ',
4428 IndentKind::Tab => '\t',
4429 }
4430 }
4431
4432 /// Consumes the current [`IndentSize`] and returns a new one that has
4433 /// been shrunk or enlarged by the given size along the given direction.
4434 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4435 match direction {
4436 Ordering::Less => {
4437 if self.kind == size.kind && self.len >= size.len {
4438 self.len -= size.len;
4439 }
4440 }
4441 Ordering::Equal => {}
4442 Ordering::Greater => {
4443 if self.len == 0 {
4444 self = size;
4445 } else if self.kind == size.kind {
4446 self.len += size.len;
4447 }
4448 }
4449 }
4450 self
4451 }
4452
4453 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4454 match self.kind {
4455 IndentKind::Space => self.len as usize,
4456 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4457 }
4458 }
4459}
4460
4461#[cfg(any(test, feature = "test-support"))]
4462pub struct TestFile {
4463 pub path: Arc<Path>,
4464 pub root_name: String,
4465}
4466
4467#[cfg(any(test, feature = "test-support"))]
4468impl File for TestFile {
4469 fn path(&self) -> &Arc<Path> {
4470 &self.path
4471 }
4472
4473 fn full_path(&self, _: &gpui::App) -> PathBuf {
4474 PathBuf::from(&self.root_name).join(self.path.as_ref())
4475 }
4476
4477 fn as_local(&self) -> Option<&dyn LocalFile> {
4478 None
4479 }
4480
4481 fn disk_state(&self) -> DiskState {
4482 unimplemented!()
4483 }
4484
4485 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4486 self.path().file_name().unwrap_or(self.root_name.as_ref())
4487 }
4488
4489 fn worktree_id(&self, _: &App) -> WorktreeId {
4490 WorktreeId::from_usize(0)
4491 }
4492
4493 fn as_any(&self) -> &dyn std::any::Any {
4494 unimplemented!()
4495 }
4496
4497 fn to_proto(&self, _: &App) -> rpc::proto::File {
4498 unimplemented!()
4499 }
4500
4501 fn is_private(&self) -> bool {
4502 false
4503 }
4504}
4505
4506pub(crate) fn contiguous_ranges(
4507 values: impl Iterator<Item = u32>,
4508 max_len: usize,
4509) -> impl Iterator<Item = Range<u32>> {
4510 let mut values = values;
4511 let mut current_range: Option<Range<u32>> = None;
4512 std::iter::from_fn(move || loop {
4513 if let Some(value) = values.next() {
4514 if let Some(range) = &mut current_range {
4515 if value == range.end && range.len() < max_len {
4516 range.end += 1;
4517 continue;
4518 }
4519 }
4520
4521 let prev_range = current_range.clone();
4522 current_range = Some(value..(value + 1));
4523 if prev_range.is_some() {
4524 return prev_range;
4525 }
4526 } else {
4527 return current_range.take();
4528 }
4529 })
4530}
4531
4532#[derive(Default, Debug)]
4533pub struct CharClassifier {
4534 scope: Option<LanguageScope>,
4535 for_completion: bool,
4536 ignore_punctuation: bool,
4537}
4538
4539impl CharClassifier {
4540 pub fn new(scope: Option<LanguageScope>) -> Self {
4541 Self {
4542 scope,
4543 for_completion: false,
4544 ignore_punctuation: false,
4545 }
4546 }
4547
4548 pub fn for_completion(self, for_completion: bool) -> Self {
4549 Self {
4550 for_completion,
4551 ..self
4552 }
4553 }
4554
4555 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4556 Self {
4557 ignore_punctuation,
4558 ..self
4559 }
4560 }
4561
4562 pub fn is_whitespace(&self, c: char) -> bool {
4563 self.kind(c) == CharKind::Whitespace
4564 }
4565
4566 pub fn is_word(&self, c: char) -> bool {
4567 self.kind(c) == CharKind::Word
4568 }
4569
4570 pub fn is_punctuation(&self, c: char) -> bool {
4571 self.kind(c) == CharKind::Punctuation
4572 }
4573
4574 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4575 if c.is_whitespace() {
4576 return CharKind::Whitespace;
4577 } else if c.is_alphanumeric() || c == '_' {
4578 return CharKind::Word;
4579 }
4580
4581 if let Some(scope) = &self.scope {
4582 if let Some(characters) = scope.word_characters() {
4583 if characters.contains(&c) {
4584 if c == '-' && !self.for_completion && !ignore_punctuation {
4585 return CharKind::Punctuation;
4586 }
4587 return CharKind::Word;
4588 }
4589 }
4590 }
4591
4592 if ignore_punctuation {
4593 CharKind::Word
4594 } else {
4595 CharKind::Punctuation
4596 }
4597 }
4598
4599 pub fn kind(&self, c: char) -> CharKind {
4600 self.kind_with(c, self.ignore_punctuation)
4601 }
4602}
4603
4604/// Find all of the ranges of whitespace that occur at the ends of lines
4605/// in the given rope.
4606///
4607/// This could also be done with a regex search, but this implementation
4608/// avoids copying text.
4609pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4610 let mut ranges = Vec::new();
4611
4612 let mut offset = 0;
4613 let mut prev_chunk_trailing_whitespace_range = 0..0;
4614 for chunk in rope.chunks() {
4615 let mut prev_line_trailing_whitespace_range = 0..0;
4616 for (i, line) in chunk.split('\n').enumerate() {
4617 let line_end_offset = offset + line.len();
4618 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4619 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4620
4621 if i == 0 && trimmed_line_len == 0 {
4622 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4623 }
4624 if !prev_line_trailing_whitespace_range.is_empty() {
4625 ranges.push(prev_line_trailing_whitespace_range);
4626 }
4627
4628 offset = line_end_offset + 1;
4629 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4630 }
4631
4632 offset -= 1;
4633 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4634 }
4635
4636 if !prev_chunk_trailing_whitespace_range.is_empty() {
4637 ranges.push(prev_chunk_trailing_whitespace_range);
4638 }
4639
4640 ranges
4641}