1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 proto, Grammar, Language, LanguageRegistry,
5};
6use crate::{
7 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
8 language_settings::{language_settings, LanguageSettings},
9 outline::OutlineItem,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
17 TreeSitterOptions,
18};
19use anyhow::{anyhow, Context as _, Result};
20use async_watch as watch;
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 AnyElement, App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, Pixels,
28 SharedString, StyledText, Task, TaskLabel, TextStyle, Window,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 fmt,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, DerefMut, Range},
51 path::{Path, PathBuf},
52 str,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
63 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
64 Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{debug_panic, maybe, RangeExt};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76/// A label for the background task spawned by the buffer to compute
77/// a diff against the contents of its file.
78pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
79
80/// Indicate whether a [`Buffer`] has permissions to edit.
81#[derive(PartialEq, Clone, Copy, Debug)]
82pub enum Capability {
83 /// The buffer is a mutable replica.
84 ReadWrite,
85 /// The buffer is a read-only replica.
86 ReadOnly,
87}
88
89pub type BufferRow = u32;
90
91/// An in-memory representation of a source code file, including its text,
92/// syntax trees, git status, and diagnostics.
93pub struct Buffer {
94 text: TextBuffer,
95 branch_state: Option<BufferBranchState>,
96 /// Filesystem state, `None` when there is no path.
97 file: Option<Arc<dyn File>>,
98 /// The mtime of the file when this buffer was last loaded from
99 /// or saved to disk.
100 saved_mtime: Option<MTime>,
101 /// The version vector when this buffer was last loaded from
102 /// or saved to disk.
103 saved_version: clock::Global,
104 preview_version: clock::Global,
105 transaction_depth: usize,
106 was_dirty_before_starting_transaction: Option<bool>,
107 reload_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 autoindent_requests: Vec<Arc<AutoindentRequest>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 parsing_in_background: bool,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub(crate) syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
176#[serde(rename_all = "snake_case")]
177pub enum CursorShape {
178 /// A vertical bar
179 #[default]
180 Bar,
181 /// A block that surrounds the following character
182 Block,
183 /// An underline that runs along the following character
184 Underline,
185 /// A box drawn around the following character
186 Hollow,
187}
188
189#[derive(Clone, Debug)]
190struct SelectionSet {
191 line_mode: bool,
192 cursor_shape: CursorShape,
193 selections: Arc<[Selection<Anchor>]>,
194 lamport_timestamp: clock::Lamport,
195}
196
197/// A diagnostic associated with a certain range of a buffer.
198#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
199pub struct Diagnostic {
200 /// The name of the service that produced this diagnostic.
201 pub source: Option<String>,
202 /// A machine-readable code that identifies this diagnostic.
203 pub code: Option<NumberOrString>,
204 /// Whether this diagnostic is a hint, warning, or error.
205 pub severity: DiagnosticSeverity,
206 /// The human-readable message associated with this diagnostic.
207 pub message: String,
208 /// An id that identifies the group to which this diagnostic belongs.
209 ///
210 /// When a language server produces a diagnostic with
211 /// one or more associated diagnostics, those diagnostics are all
212 /// assigned a single group ID.
213 pub group_id: usize,
214 /// Whether this diagnostic is the primary diagnostic for its group.
215 ///
216 /// In a given group, the primary diagnostic is the top-level diagnostic
217 /// returned by the language server. The non-primary diagnostics are the
218 /// associated diagnostics.
219 pub is_primary: bool,
220 /// Whether this diagnostic is considered to originate from an analysis of
221 /// files on disk, as opposed to any unsaved buffer contents. This is a
222 /// property of a given diagnostic source, and is configured for a given
223 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
224 /// for the language server.
225 pub is_disk_based: bool,
226 /// Whether this diagnostic marks unnecessary code.
227 pub is_unnecessary: bool,
228 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
229 pub data: Option<Value>,
230}
231
232/// An operation used to synchronize this buffer with its other replicas.
233#[derive(Clone, Debug, PartialEq)]
234pub enum Operation {
235 /// A text operation.
236 Buffer(text::Operation),
237
238 /// An update to the buffer's diagnostics.
239 UpdateDiagnostics {
240 /// The id of the language server that produced the new diagnostics.
241 server_id: LanguageServerId,
242 /// The diagnostics.
243 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
244 /// The buffer's lamport timestamp.
245 lamport_timestamp: clock::Lamport,
246 },
247
248 /// An update to the most recent selections in this buffer.
249 UpdateSelections {
250 /// The selections.
251 selections: Arc<[Selection<Anchor>]>,
252 /// The buffer's lamport timestamp.
253 lamport_timestamp: clock::Lamport,
254 /// Whether the selections are in 'line mode'.
255 line_mode: bool,
256 /// The [`CursorShape`] associated with these selections.
257 cursor_shape: CursorShape,
258 },
259
260 /// An update to the characters that should trigger autocompletion
261 /// for this buffer.
262 UpdateCompletionTriggers {
263 /// The characters that trigger autocompletion.
264 triggers: Vec<String>,
265 /// The buffer's lamport timestamp.
266 lamport_timestamp: clock::Lamport,
267 /// The language server ID.
268 server_id: LanguageServerId,
269 },
270}
271
272/// An event that occurs in a buffer.
273#[derive(Clone, Debug, PartialEq)]
274pub enum BufferEvent {
275 /// The buffer was changed in a way that must be
276 /// propagated to its other replicas.
277 Operation {
278 operation: Operation,
279 is_local: bool,
280 },
281 /// The buffer was edited.
282 Edited,
283 /// The buffer's `dirty` bit changed.
284 DirtyChanged,
285 /// The buffer was saved.
286 Saved,
287 /// The buffer's file was changed on disk.
288 FileHandleChanged,
289 /// The buffer was reloaded.
290 Reloaded,
291 /// The buffer is in need of a reload
292 ReloadNeeded,
293 /// The buffer's language was changed.
294 LanguageChanged,
295 /// The buffer's syntax trees were updated.
296 Reparsed,
297 /// The buffer's diagnostics were updated.
298 DiagnosticsUpdated,
299 /// The buffer gained or lost editing capabilities.
300 CapabilityChanged,
301 /// The buffer was explicitly requested to close.
302 Closed,
303 /// The buffer was discarded when closing.
304 Discarded,
305}
306
307/// The file associated with a buffer.
308pub trait File: Send + Sync {
309 /// Returns the [`LocalFile`] associated with this file, if the
310 /// file is local.
311 fn as_local(&self) -> Option<&dyn LocalFile>;
312
313 /// Returns whether this file is local.
314 fn is_local(&self) -> bool {
315 self.as_local().is_some()
316 }
317
318 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
319 /// only available in some states, such as modification time.
320 fn disk_state(&self) -> DiskState;
321
322 /// Returns the path of this file relative to the worktree's root directory.
323 fn path(&self) -> &Arc<Path>;
324
325 /// Returns the path of this file relative to the worktree's parent directory (this means it
326 /// includes the name of the worktree's root folder).
327 fn full_path(&self, cx: &App) -> PathBuf;
328
329 /// Returns the last component of this handle's absolute path. If this handle refers to the root
330 /// of its worktree, then this method will return the name of the worktree itself.
331 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
332
333 /// Returns the id of the worktree to which this file belongs.
334 ///
335 /// This is needed for looking up project-specific settings.
336 fn worktree_id(&self, cx: &App) -> WorktreeId;
337
338 /// Converts this file into an [`Any`] trait object.
339 fn as_any(&self) -> &dyn Any;
340
341 /// Converts this file into a protobuf message.
342 fn to_proto(&self, cx: &App) -> rpc::proto::File;
343
344 /// Return whether Zed considers this to be a private file.
345 fn is_private(&self) -> bool;
346}
347
348/// The file's storage status - whether it's stored (`Present`), and if so when it was last
349/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
350/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
351/// indicator for new files.
352#[derive(Copy, Clone, Debug, PartialEq)]
353pub enum DiskState {
354 /// File created in Zed that has not been saved.
355 New,
356 /// File present on the filesystem.
357 Present { mtime: MTime },
358 /// Deleted file that was previously present.
359 Deleted,
360}
361
362impl DiskState {
363 /// Returns the file's last known modification time on disk.
364 pub fn mtime(self) -> Option<MTime> {
365 match self {
366 DiskState::New => None,
367 DiskState::Present { mtime } => Some(mtime),
368 DiskState::Deleted => None,
369 }
370 }
371}
372
373/// The file associated with a buffer, in the case where the file is on the local disk.
374pub trait LocalFile: File {
375 /// Returns the absolute path of this file
376 fn abs_path(&self, cx: &App) -> PathBuf;
377
378 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
379 fn load(&self, cx: &App) -> Task<Result<String>>;
380
381 /// Loads the file's contents from disk.
382 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
383}
384
385/// The auto-indent behavior associated with an editing operation.
386/// For some editing operations, each affected line of text has its
387/// indentation recomputed. For other operations, the entire block
388/// of edited text is adjusted uniformly.
389#[derive(Clone, Debug)]
390pub enum AutoindentMode {
391 /// Indent each line of inserted text.
392 EachLine,
393 /// Apply the same indentation adjustment to all of the lines
394 /// in a given insertion.
395 Block {
396 /// The original indentation level of the first line of each
397 /// insertion, if it has been copied.
398 original_indent_columns: Vec<u32>,
399 },
400}
401
402#[derive(Clone)]
403struct AutoindentRequest {
404 before_edit: BufferSnapshot,
405 entries: Vec<AutoindentRequestEntry>,
406 is_block_mode: bool,
407 ignore_empty_lines: bool,
408}
409
410#[derive(Debug, Clone)]
411struct AutoindentRequestEntry {
412 /// A range of the buffer whose indentation should be adjusted.
413 range: Range<Anchor>,
414 /// Whether or not these lines should be considered brand new, for the
415 /// purpose of auto-indent. When text is not new, its indentation will
416 /// only be adjusted if the suggested indentation level has *changed*
417 /// since the edit was made.
418 first_line_is_new: bool,
419 indent_size: IndentSize,
420 original_indent_column: Option<u32>,
421}
422
423#[derive(Debug)]
424struct IndentSuggestion {
425 basis_row: u32,
426 delta: Ordering,
427 within_error: bool,
428}
429
430struct BufferChunkHighlights<'a> {
431 captures: SyntaxMapCaptures<'a>,
432 next_capture: Option<SyntaxMapCapture<'a>>,
433 stack: Vec<(usize, HighlightId)>,
434 highlight_maps: Vec<HighlightMap>,
435}
436
437/// An iterator that yields chunks of a buffer's text, along with their
438/// syntax highlights and diagnostic status.
439pub struct BufferChunks<'a> {
440 buffer_snapshot: Option<&'a BufferSnapshot>,
441 range: Range<usize>,
442 chunks: text::Chunks<'a>,
443 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
444 error_depth: usize,
445 warning_depth: usize,
446 information_depth: usize,
447 hint_depth: usize,
448 unnecessary_depth: usize,
449 highlights: Option<BufferChunkHighlights<'a>>,
450}
451
452/// A chunk of a buffer's text, along with its syntax highlight and
453/// diagnostic status.
454#[derive(Clone, Debug, Default)]
455pub struct Chunk<'a> {
456 /// The text of the chunk.
457 pub text: &'a str,
458 /// The syntax highlighting style of the chunk.
459 pub syntax_highlight_id: Option<HighlightId>,
460 /// The highlight style that has been applied to this chunk in
461 /// the editor.
462 pub highlight_style: Option<HighlightStyle>,
463 /// The severity of diagnostic associated with this chunk, if any.
464 pub diagnostic_severity: Option<DiagnosticSeverity>,
465 /// Whether this chunk of text is marked as unnecessary.
466 pub is_unnecessary: bool,
467 /// Whether this chunk of text was originally a tab character.
468 pub is_tab: bool,
469 /// An optional recipe for how the chunk should be presented.
470 pub renderer: Option<ChunkRenderer>,
471}
472
473/// A recipe for how the chunk should be presented.
474#[derive(Clone)]
475pub struct ChunkRenderer {
476 /// creates a custom element to represent this chunk.
477 pub render: Arc<dyn Send + Sync + Fn(&mut ChunkRendererContext) -> AnyElement>,
478 /// If true, the element is constrained to the shaped width of the text.
479 pub constrain_width: bool,
480}
481
482pub struct ChunkRendererContext<'a, 'b> {
483 pub window: &'a mut Window,
484 pub context: &'b mut App,
485 pub max_width: Pixels,
486}
487
488impl fmt::Debug for ChunkRenderer {
489 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
490 f.debug_struct("ChunkRenderer")
491 .field("constrain_width", &self.constrain_width)
492 .finish()
493 }
494}
495
496impl<'a, 'b> Deref for ChunkRendererContext<'a, 'b> {
497 type Target = App;
498
499 fn deref(&self) -> &Self::Target {
500 self.context
501 }
502}
503
504impl<'a, 'b> DerefMut for ChunkRendererContext<'a, 'b> {
505 fn deref_mut(&mut self) -> &mut Self::Target {
506 self.context
507 }
508}
509
510/// A set of edits to a given version of a buffer, computed asynchronously.
511#[derive(Debug)]
512pub struct Diff {
513 pub(crate) base_version: clock::Global,
514 line_ending: LineEnding,
515 pub edits: Vec<(Range<usize>, Arc<str>)>,
516}
517
518#[derive(Clone, Copy)]
519pub(crate) struct DiagnosticEndpoint {
520 offset: usize,
521 is_start: bool,
522 severity: DiagnosticSeverity,
523 is_unnecessary: bool,
524}
525
526/// A class of characters, used for characterizing a run of text.
527#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
528pub enum CharKind {
529 /// Whitespace.
530 Whitespace,
531 /// Punctuation.
532 Punctuation,
533 /// Word.
534 Word,
535}
536
537/// A runnable is a set of data about a region that could be resolved into a task
538pub struct Runnable {
539 pub tags: SmallVec<[RunnableTag; 1]>,
540 pub language: Arc<Language>,
541 pub buffer: BufferId,
542}
543
544#[derive(Default, Clone, Debug)]
545pub struct HighlightedText {
546 pub text: SharedString,
547 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
548}
549
550#[derive(Default, Debug)]
551struct HighlightedTextBuilder {
552 pub text: String,
553 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
554}
555
556impl HighlightedText {
557 pub fn from_buffer_range<T: ToOffset>(
558 range: Range<T>,
559 snapshot: &text::BufferSnapshot,
560 syntax_snapshot: &SyntaxSnapshot,
561 override_style: Option<HighlightStyle>,
562 syntax_theme: &SyntaxTheme,
563 ) -> Self {
564 let mut highlighted_text = HighlightedTextBuilder::default();
565 highlighted_text.add_text_from_buffer_range(
566 range,
567 snapshot,
568 syntax_snapshot,
569 override_style,
570 syntax_theme,
571 );
572 highlighted_text.build()
573 }
574
575 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
576 gpui::StyledText::new(self.text.clone())
577 .with_highlights(default_style, self.highlights.iter().cloned())
578 }
579
580 /// Returns the first line without leading whitespace unless highlighted
581 /// and a boolean indicating if there are more lines after
582 pub fn first_line_preview(self) -> (Self, bool) {
583 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
584 let first_line = &self.text[..newline_ix];
585
586 // Trim leading whitespace, unless an edit starts prior to it.
587 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
588 if let Some((first_highlight_range, _)) = self.highlights.first() {
589 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
590 }
591
592 let preview_text = &first_line[preview_start_ix..];
593 let preview_highlights = self
594 .highlights
595 .into_iter()
596 .take_while(|(range, _)| range.start < newline_ix)
597 .filter_map(|(mut range, highlight)| {
598 range.start = range.start.saturating_sub(preview_start_ix);
599 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
600 if range.is_empty() {
601 None
602 } else {
603 Some((range, highlight))
604 }
605 });
606
607 let preview = Self {
608 text: SharedString::new(preview_text),
609 highlights: preview_highlights.collect(),
610 };
611
612 (preview, self.text.len() > newline_ix)
613 }
614}
615
616impl HighlightedTextBuilder {
617 pub fn build(self) -> HighlightedText {
618 HighlightedText {
619 text: self.text.into(),
620 highlights: self.highlights,
621 }
622 }
623
624 pub fn add_text_from_buffer_range<T: ToOffset>(
625 &mut self,
626 range: Range<T>,
627 snapshot: &text::BufferSnapshot,
628 syntax_snapshot: &SyntaxSnapshot,
629 override_style: Option<HighlightStyle>,
630 syntax_theme: &SyntaxTheme,
631 ) {
632 let range = range.to_offset(snapshot);
633 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
634 let start = self.text.len();
635 self.text.push_str(chunk.text);
636 let end = self.text.len();
637
638 if let Some(mut highlight_style) = chunk
639 .syntax_highlight_id
640 .and_then(|id| id.style(syntax_theme))
641 {
642 if let Some(override_style) = override_style {
643 highlight_style.highlight(override_style);
644 }
645 self.highlights.push((start..end, highlight_style));
646 } else if let Some(override_style) = override_style {
647 self.highlights.push((start..end, override_style));
648 }
649 }
650 }
651
652 fn highlighted_chunks<'a>(
653 range: Range<usize>,
654 snapshot: &'a text::BufferSnapshot,
655 syntax_snapshot: &'a SyntaxSnapshot,
656 ) -> BufferChunks<'a> {
657 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
658 grammar.highlights_query.as_ref()
659 });
660
661 let highlight_maps = captures
662 .grammars()
663 .iter()
664 .map(|grammar| grammar.highlight_map())
665 .collect();
666
667 BufferChunks::new(
668 snapshot.as_rope(),
669 range,
670 Some((captures, highlight_maps)),
671 false,
672 None,
673 )
674 }
675}
676
677#[derive(Clone)]
678pub struct EditPreview {
679 old_snapshot: text::BufferSnapshot,
680 applied_edits_snapshot: text::BufferSnapshot,
681 syntax_snapshot: SyntaxSnapshot,
682}
683
684impl EditPreview {
685 pub fn highlight_edits(
686 &self,
687 current_snapshot: &BufferSnapshot,
688 edits: &[(Range<Anchor>, String)],
689 include_deletions: bool,
690 cx: &App,
691 ) -> HighlightedText {
692 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
693 return HighlightedText::default();
694 };
695
696 let mut highlighted_text = HighlightedTextBuilder::default();
697
698 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
699
700 let insertion_highlight_style = HighlightStyle {
701 background_color: Some(cx.theme().status().created_background),
702 ..Default::default()
703 };
704 let deletion_highlight_style = HighlightStyle {
705 background_color: Some(cx.theme().status().deleted_background),
706 ..Default::default()
707 };
708 let syntax_theme = cx.theme().syntax();
709
710 for (range, edit_text) in edits {
711 let edit_new_end_in_preview_snapshot = range
712 .end
713 .bias_right(&self.old_snapshot)
714 .to_offset(&self.applied_edits_snapshot);
715 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
716
717 let unchanged_range_in_preview_snapshot =
718 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
719 if !unchanged_range_in_preview_snapshot.is_empty() {
720 highlighted_text.add_text_from_buffer_range(
721 unchanged_range_in_preview_snapshot,
722 &self.applied_edits_snapshot,
723 &self.syntax_snapshot,
724 None,
725 &syntax_theme,
726 );
727 }
728
729 let range_in_current_snapshot = range.to_offset(current_snapshot);
730 if include_deletions && !range_in_current_snapshot.is_empty() {
731 highlighted_text.add_text_from_buffer_range(
732 range_in_current_snapshot,
733 ¤t_snapshot.text,
734 ¤t_snapshot.syntax,
735 Some(deletion_highlight_style),
736 &syntax_theme,
737 );
738 }
739
740 if !edit_text.is_empty() {
741 highlighted_text.add_text_from_buffer_range(
742 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
743 &self.applied_edits_snapshot,
744 &self.syntax_snapshot,
745 Some(insertion_highlight_style),
746 &syntax_theme,
747 );
748 }
749
750 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
751 }
752
753 highlighted_text.add_text_from_buffer_range(
754 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
755 &self.applied_edits_snapshot,
756 &self.syntax_snapshot,
757 None,
758 &syntax_theme,
759 );
760
761 highlighted_text.build()
762 }
763
764 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
765 let (first, _) = edits.first()?;
766 let (last, _) = edits.last()?;
767
768 let start = first
769 .start
770 .bias_left(&self.old_snapshot)
771 .to_point(&self.applied_edits_snapshot);
772 let end = last
773 .end
774 .bias_right(&self.old_snapshot)
775 .to_point(&self.applied_edits_snapshot);
776
777 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
778 let range = Point::new(start.row, 0)
779 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
780
781 Some(range.to_offset(&self.applied_edits_snapshot))
782 }
783}
784
785impl Buffer {
786 /// Create a new buffer with the given base text.
787 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
788 Self::build(
789 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
790 None,
791 Capability::ReadWrite,
792 )
793 }
794
795 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
796 pub fn local_normalized(
797 base_text_normalized: Rope,
798 line_ending: LineEnding,
799 cx: &Context<Self>,
800 ) -> Self {
801 Self::build(
802 TextBuffer::new_normalized(
803 0,
804 cx.entity_id().as_non_zero_u64().into(),
805 line_ending,
806 base_text_normalized,
807 ),
808 None,
809 Capability::ReadWrite,
810 )
811 }
812
813 /// Create a new buffer that is a replica of a remote buffer.
814 pub fn remote(
815 remote_id: BufferId,
816 replica_id: ReplicaId,
817 capability: Capability,
818 base_text: impl Into<String>,
819 ) -> Self {
820 Self::build(
821 TextBuffer::new(replica_id, remote_id, base_text.into()),
822 None,
823 capability,
824 )
825 }
826
827 /// Create a new buffer that is a replica of a remote buffer, populating its
828 /// state from the given protobuf message.
829 pub fn from_proto(
830 replica_id: ReplicaId,
831 capability: Capability,
832 message: proto::BufferState,
833 file: Option<Arc<dyn File>>,
834 ) -> Result<Self> {
835 let buffer_id = BufferId::new(message.id)
836 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
837 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
838 let mut this = Self::build(buffer, file, capability);
839 this.text.set_line_ending(proto::deserialize_line_ending(
840 rpc::proto::LineEnding::from_i32(message.line_ending)
841 .ok_or_else(|| anyhow!("missing line_ending"))?,
842 ));
843 this.saved_version = proto::deserialize_version(&message.saved_version);
844 this.saved_mtime = message.saved_mtime.map(|time| time.into());
845 Ok(this)
846 }
847
848 /// Serialize the buffer's state to a protobuf message.
849 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
850 proto::BufferState {
851 id: self.remote_id().into(),
852 file: self.file.as_ref().map(|f| f.to_proto(cx)),
853 base_text: self.base_text().to_string(),
854 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
855 saved_version: proto::serialize_version(&self.saved_version),
856 saved_mtime: self.saved_mtime.map(|time| time.into()),
857 }
858 }
859
860 /// Serialize as protobufs all of the changes to the buffer since the given version.
861 pub fn serialize_ops(
862 &self,
863 since: Option<clock::Global>,
864 cx: &App,
865 ) -> Task<Vec<proto::Operation>> {
866 let mut operations = Vec::new();
867 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
868
869 operations.extend(self.remote_selections.iter().map(|(_, set)| {
870 proto::serialize_operation(&Operation::UpdateSelections {
871 selections: set.selections.clone(),
872 lamport_timestamp: set.lamport_timestamp,
873 line_mode: set.line_mode,
874 cursor_shape: set.cursor_shape,
875 })
876 }));
877
878 for (server_id, diagnostics) in &self.diagnostics {
879 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
880 lamport_timestamp: self.diagnostics_timestamp,
881 server_id: *server_id,
882 diagnostics: diagnostics.iter().cloned().collect(),
883 }));
884 }
885
886 for (server_id, completions) in &self.completion_triggers_per_language_server {
887 operations.push(proto::serialize_operation(
888 &Operation::UpdateCompletionTriggers {
889 triggers: completions.iter().cloned().collect(),
890 lamport_timestamp: self.completion_triggers_timestamp,
891 server_id: *server_id,
892 },
893 ));
894 }
895
896 let text_operations = self.text.operations().clone();
897 cx.background_spawn(async move {
898 let since = since.unwrap_or_default();
899 operations.extend(
900 text_operations
901 .iter()
902 .filter(|(_, op)| !since.observed(op.timestamp()))
903 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
904 );
905 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
906 operations
907 })
908 }
909
910 /// Assign a language to the buffer, returning the buffer.
911 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
912 self.set_language(Some(language), cx);
913 self
914 }
915
916 /// Returns the [`Capability`] of this buffer.
917 pub fn capability(&self) -> Capability {
918 self.capability
919 }
920
921 /// Whether this buffer can only be read.
922 pub fn read_only(&self) -> bool {
923 self.capability == Capability::ReadOnly
924 }
925
926 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
927 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
928 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
929 let snapshot = buffer.snapshot();
930 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
931 Self {
932 saved_mtime,
933 saved_version: buffer.version(),
934 preview_version: buffer.version(),
935 reload_task: None,
936 transaction_depth: 0,
937 was_dirty_before_starting_transaction: None,
938 has_unsaved_edits: Cell::new((buffer.version(), false)),
939 text: buffer,
940 branch_state: None,
941 file,
942 capability,
943 syntax_map,
944 parsing_in_background: false,
945 non_text_state_update_count: 0,
946 sync_parse_timeout: Duration::from_millis(1),
947 parse_status: async_watch::channel(ParseStatus::Idle),
948 autoindent_requests: Default::default(),
949 pending_autoindent: Default::default(),
950 language: None,
951 remote_selections: Default::default(),
952 diagnostics: Default::default(),
953 diagnostics_timestamp: Default::default(),
954 completion_triggers: Default::default(),
955 completion_triggers_per_language_server: Default::default(),
956 completion_triggers_timestamp: Default::default(),
957 deferred_ops: OperationQueue::new(),
958 has_conflict: false,
959 _subscriptions: Vec::new(),
960 }
961 }
962
963 pub fn build_snapshot(
964 text: Rope,
965 language: Option<Arc<Language>>,
966 language_registry: Option<Arc<LanguageRegistry>>,
967 cx: &mut App,
968 ) -> impl Future<Output = BufferSnapshot> {
969 let entity_id = cx.reserve_entity::<Self>().entity_id();
970 let buffer_id = entity_id.as_non_zero_u64().into();
971 async move {
972 let text =
973 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
974 let mut syntax = SyntaxMap::new(&text).snapshot();
975 if let Some(language) = language.clone() {
976 let text = text.clone();
977 let language = language.clone();
978 let language_registry = language_registry.clone();
979 syntax.reparse(&text, language_registry, language);
980 }
981 BufferSnapshot {
982 text,
983 syntax,
984 file: None,
985 diagnostics: Default::default(),
986 remote_selections: Default::default(),
987 language,
988 non_text_state_update_count: 0,
989 }
990 }
991 }
992
993 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
994 let entity_id = cx.reserve_entity::<Self>().entity_id();
995 let buffer_id = entity_id.as_non_zero_u64().into();
996 let text =
997 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
998 let syntax = SyntaxMap::new(&text).snapshot();
999 BufferSnapshot {
1000 text,
1001 syntax,
1002 file: None,
1003 diagnostics: Default::default(),
1004 remote_selections: Default::default(),
1005 language: None,
1006 non_text_state_update_count: 0,
1007 }
1008 }
1009
1010 #[cfg(any(test, feature = "test-support"))]
1011 pub fn build_snapshot_sync(
1012 text: Rope,
1013 language: Option<Arc<Language>>,
1014 language_registry: Option<Arc<LanguageRegistry>>,
1015 cx: &mut App,
1016 ) -> BufferSnapshot {
1017 let entity_id = cx.reserve_entity::<Self>().entity_id();
1018 let buffer_id = entity_id.as_non_zero_u64().into();
1019 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1020 let mut syntax = SyntaxMap::new(&text).snapshot();
1021 if let Some(language) = language.clone() {
1022 let text = text.clone();
1023 let language = language.clone();
1024 let language_registry = language_registry.clone();
1025 syntax.reparse(&text, language_registry, language);
1026 }
1027 BufferSnapshot {
1028 text,
1029 syntax,
1030 file: None,
1031 diagnostics: Default::default(),
1032 remote_selections: Default::default(),
1033 language,
1034 non_text_state_update_count: 0,
1035 }
1036 }
1037
1038 /// Retrieve a snapshot of the buffer's current state. This is computationally
1039 /// cheap, and allows reading from the buffer on a background thread.
1040 pub fn snapshot(&self) -> BufferSnapshot {
1041 let text = self.text.snapshot();
1042 let mut syntax_map = self.syntax_map.lock();
1043 syntax_map.interpolate(&text);
1044 let syntax = syntax_map.snapshot();
1045
1046 BufferSnapshot {
1047 text,
1048 syntax,
1049 file: self.file.clone(),
1050 remote_selections: self.remote_selections.clone(),
1051 diagnostics: self.diagnostics.clone(),
1052 language: self.language.clone(),
1053 non_text_state_update_count: self.non_text_state_update_count,
1054 }
1055 }
1056
1057 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1058 let this = cx.entity();
1059 cx.new(|cx| {
1060 let mut branch = Self {
1061 branch_state: Some(BufferBranchState {
1062 base_buffer: this.clone(),
1063 merged_operations: Default::default(),
1064 }),
1065 language: self.language.clone(),
1066 has_conflict: self.has_conflict,
1067 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1068 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1069 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1070 };
1071 if let Some(language_registry) = self.language_registry() {
1072 branch.set_language_registry(language_registry);
1073 }
1074
1075 // Reparse the branch buffer so that we get syntax highlighting immediately.
1076 branch.reparse(cx);
1077
1078 branch
1079 })
1080 }
1081
1082 pub fn preview_edits(
1083 &self,
1084 edits: Arc<[(Range<Anchor>, String)]>,
1085 cx: &App,
1086 ) -> Task<EditPreview> {
1087 let registry = self.language_registry();
1088 let language = self.language().cloned();
1089 let old_snapshot = self.text.snapshot();
1090 let mut branch_buffer = self.text.branch();
1091 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1092 cx.background_spawn(async move {
1093 if !edits.is_empty() {
1094 if let Some(language) = language.clone() {
1095 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1096 }
1097
1098 branch_buffer.edit(edits.iter().cloned());
1099 let snapshot = branch_buffer.snapshot();
1100 syntax_snapshot.interpolate(&snapshot);
1101
1102 if let Some(language) = language {
1103 syntax_snapshot.reparse(&snapshot, registry, language);
1104 }
1105 }
1106 EditPreview {
1107 old_snapshot,
1108 applied_edits_snapshot: branch_buffer.snapshot(),
1109 syntax_snapshot,
1110 }
1111 })
1112 }
1113
1114 /// Applies all of the changes in this buffer that intersect any of the
1115 /// given `ranges` to its base buffer.
1116 ///
1117 /// If `ranges` is empty, then all changes will be applied. This buffer must
1118 /// be a branch buffer to call this method.
1119 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1120 let Some(base_buffer) = self.base_buffer() else {
1121 debug_panic!("not a branch buffer");
1122 return;
1123 };
1124
1125 let mut ranges = if ranges.is_empty() {
1126 &[0..usize::MAX]
1127 } else {
1128 ranges.as_slice()
1129 }
1130 .into_iter()
1131 .peekable();
1132
1133 let mut edits = Vec::new();
1134 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1135 let mut is_included = false;
1136 while let Some(range) = ranges.peek() {
1137 if range.end < edit.new.start {
1138 ranges.next().unwrap();
1139 } else {
1140 if range.start <= edit.new.end {
1141 is_included = true;
1142 }
1143 break;
1144 }
1145 }
1146
1147 if is_included {
1148 edits.push((
1149 edit.old.clone(),
1150 self.text_for_range(edit.new.clone()).collect::<String>(),
1151 ));
1152 }
1153 }
1154
1155 let operation = base_buffer.update(cx, |base_buffer, cx| {
1156 // cx.emit(BufferEvent::DiffBaseChanged);
1157 base_buffer.edit(edits, None, cx)
1158 });
1159
1160 if let Some(operation) = operation {
1161 if let Some(BufferBranchState {
1162 merged_operations, ..
1163 }) = &mut self.branch_state
1164 {
1165 merged_operations.push(operation);
1166 }
1167 }
1168 }
1169
1170 fn on_base_buffer_event(
1171 &mut self,
1172 _: Entity<Buffer>,
1173 event: &BufferEvent,
1174 cx: &mut Context<Self>,
1175 ) {
1176 let BufferEvent::Operation { operation, .. } = event else {
1177 return;
1178 };
1179 let Some(BufferBranchState {
1180 merged_operations, ..
1181 }) = &mut self.branch_state
1182 else {
1183 return;
1184 };
1185
1186 let mut operation_to_undo = None;
1187 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1188 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1189 merged_operations.remove(ix);
1190 operation_to_undo = Some(operation.timestamp);
1191 }
1192 }
1193
1194 self.apply_ops([operation.clone()], cx);
1195
1196 if let Some(timestamp) = operation_to_undo {
1197 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1198 self.undo_operations(counts, cx);
1199 }
1200 }
1201
1202 #[cfg(test)]
1203 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1204 &self.text
1205 }
1206
1207 /// Retrieve a snapshot of the buffer's raw text, without any
1208 /// language-related state like the syntax tree or diagnostics.
1209 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1210 self.text.snapshot()
1211 }
1212
1213 /// The file associated with the buffer, if any.
1214 pub fn file(&self) -> Option<&Arc<dyn File>> {
1215 self.file.as_ref()
1216 }
1217
1218 /// The version of the buffer that was last saved or reloaded from disk.
1219 pub fn saved_version(&self) -> &clock::Global {
1220 &self.saved_version
1221 }
1222
1223 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1224 pub fn saved_mtime(&self) -> Option<MTime> {
1225 self.saved_mtime
1226 }
1227
1228 /// Assign a language to the buffer.
1229 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1230 self.non_text_state_update_count += 1;
1231 self.syntax_map.lock().clear(&self.text);
1232 self.language = language;
1233 self.reparse(cx);
1234 cx.emit(BufferEvent::LanguageChanged);
1235 }
1236
1237 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1238 /// other languages if parts of the buffer are written in different languages.
1239 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1240 self.syntax_map
1241 .lock()
1242 .set_language_registry(language_registry);
1243 }
1244
1245 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1246 self.syntax_map.lock().language_registry()
1247 }
1248
1249 /// Assign the buffer a new [`Capability`].
1250 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1251 self.capability = capability;
1252 cx.emit(BufferEvent::CapabilityChanged)
1253 }
1254
1255 /// This method is called to signal that the buffer has been saved.
1256 pub fn did_save(
1257 &mut self,
1258 version: clock::Global,
1259 mtime: Option<MTime>,
1260 cx: &mut Context<Self>,
1261 ) {
1262 self.saved_version = version;
1263 self.has_unsaved_edits
1264 .set((self.saved_version().clone(), false));
1265 self.has_conflict = false;
1266 self.saved_mtime = mtime;
1267 cx.emit(BufferEvent::Saved);
1268 cx.notify();
1269 }
1270
1271 /// This method is called to signal that the buffer has been discarded.
1272 pub fn discarded(&self, cx: &mut Context<Self>) {
1273 cx.emit(BufferEvent::Discarded);
1274 cx.notify();
1275 }
1276
1277 /// Reloads the contents of the buffer from disk.
1278 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1279 let (tx, rx) = futures::channel::oneshot::channel();
1280 let prev_version = self.text.version();
1281 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
1282 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
1283 let file = this.file.as_ref()?.as_local()?;
1284 Some((file.disk_state().mtime(), file.load(cx)))
1285 })?
1286 else {
1287 return Ok(());
1288 };
1289
1290 let new_text = new_text.await?;
1291 let diff = this
1292 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
1293 .await;
1294 this.update(&mut cx, |this, cx| {
1295 if this.version() == diff.base_version {
1296 this.finalize_last_transaction();
1297 this.apply_diff(diff, cx);
1298 tx.send(this.finalize_last_transaction().cloned()).ok();
1299 this.has_conflict = false;
1300 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1301 } else {
1302 if !diff.edits.is_empty()
1303 || this
1304 .edits_since::<usize>(&diff.base_version)
1305 .next()
1306 .is_some()
1307 {
1308 this.has_conflict = true;
1309 }
1310
1311 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1312 }
1313
1314 this.reload_task.take();
1315 })
1316 }));
1317 rx
1318 }
1319
1320 /// This method is called to signal that the buffer has been reloaded.
1321 pub fn did_reload(
1322 &mut self,
1323 version: clock::Global,
1324 line_ending: LineEnding,
1325 mtime: Option<MTime>,
1326 cx: &mut Context<Self>,
1327 ) {
1328 self.saved_version = version;
1329 self.has_unsaved_edits
1330 .set((self.saved_version.clone(), false));
1331 self.text.set_line_ending(line_ending);
1332 self.saved_mtime = mtime;
1333 cx.emit(BufferEvent::Reloaded);
1334 cx.notify();
1335 }
1336
1337 /// Updates the [`File`] backing this buffer. This should be called when
1338 /// the file has changed or has been deleted.
1339 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1340 let was_dirty = self.is_dirty();
1341 let mut file_changed = false;
1342
1343 if let Some(old_file) = self.file.as_ref() {
1344 if new_file.path() != old_file.path() {
1345 file_changed = true;
1346 }
1347
1348 let old_state = old_file.disk_state();
1349 let new_state = new_file.disk_state();
1350 if old_state != new_state {
1351 file_changed = true;
1352 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1353 cx.emit(BufferEvent::ReloadNeeded)
1354 }
1355 }
1356 } else {
1357 file_changed = true;
1358 };
1359
1360 self.file = Some(new_file);
1361 if file_changed {
1362 self.non_text_state_update_count += 1;
1363 if was_dirty != self.is_dirty() {
1364 cx.emit(BufferEvent::DirtyChanged);
1365 }
1366 cx.emit(BufferEvent::FileHandleChanged);
1367 cx.notify();
1368 }
1369 }
1370
1371 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1372 Some(self.branch_state.as_ref()?.base_buffer.clone())
1373 }
1374
1375 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1376 pub fn language(&self) -> Option<&Arc<Language>> {
1377 self.language.as_ref()
1378 }
1379
1380 /// Returns the [`Language`] at the given location.
1381 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1382 let offset = position.to_offset(self);
1383 self.syntax_map
1384 .lock()
1385 .layers_for_range(offset..offset, &self.text, false)
1386 .last()
1387 .map(|info| info.language.clone())
1388 .or_else(|| self.language.clone())
1389 }
1390
1391 /// An integer version number that accounts for all updates besides
1392 /// the buffer's text itself (which is versioned via a version vector).
1393 pub fn non_text_state_update_count(&self) -> usize {
1394 self.non_text_state_update_count
1395 }
1396
1397 /// Whether the buffer is being parsed in the background.
1398 #[cfg(any(test, feature = "test-support"))]
1399 pub fn is_parsing(&self) -> bool {
1400 self.parsing_in_background
1401 }
1402
1403 /// Indicates whether the buffer contains any regions that may be
1404 /// written in a language that hasn't been loaded yet.
1405 pub fn contains_unknown_injections(&self) -> bool {
1406 self.syntax_map.lock().contains_unknown_injections()
1407 }
1408
1409 #[cfg(test)]
1410 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1411 self.sync_parse_timeout = timeout;
1412 }
1413
1414 /// Called after an edit to synchronize the buffer's main parse tree with
1415 /// the buffer's new underlying state.
1416 ///
1417 /// Locks the syntax map and interpolates the edits since the last reparse
1418 /// into the foreground syntax tree.
1419 ///
1420 /// Then takes a stable snapshot of the syntax map before unlocking it.
1421 /// The snapshot with the interpolated edits is sent to a background thread,
1422 /// where we ask Tree-sitter to perform an incremental parse.
1423 ///
1424 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1425 /// waiting on the parse to complete. As soon as it completes, we proceed
1426 /// synchronously, unless a 1ms timeout elapses.
1427 ///
1428 /// If we time out waiting on the parse, we spawn a second task waiting
1429 /// until the parse does complete and return with the interpolated tree still
1430 /// in the foreground. When the background parse completes, call back into
1431 /// the main thread and assign the foreground parse state.
1432 ///
1433 /// If the buffer or grammar changed since the start of the background parse,
1434 /// initiate an additional reparse recursively. To avoid concurrent parses
1435 /// for the same buffer, we only initiate a new parse if we are not already
1436 /// parsing in the background.
1437 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1438 if self.parsing_in_background {
1439 return;
1440 }
1441 let language = if let Some(language) = self.language.clone() {
1442 language
1443 } else {
1444 return;
1445 };
1446
1447 let text = self.text_snapshot();
1448 let parsed_version = self.version();
1449
1450 let mut syntax_map = self.syntax_map.lock();
1451 syntax_map.interpolate(&text);
1452 let language_registry = syntax_map.language_registry();
1453 let mut syntax_snapshot = syntax_map.snapshot();
1454 drop(syntax_map);
1455
1456 let parse_task = cx.background_spawn({
1457 let language = language.clone();
1458 let language_registry = language_registry.clone();
1459 async move {
1460 syntax_snapshot.reparse(&text, language_registry, language);
1461 syntax_snapshot
1462 }
1463 });
1464
1465 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1466 match cx
1467 .background_executor()
1468 .block_with_timeout(self.sync_parse_timeout, parse_task)
1469 {
1470 Ok(new_syntax_snapshot) => {
1471 self.did_finish_parsing(new_syntax_snapshot, cx);
1472 }
1473 Err(parse_task) => {
1474 self.parsing_in_background = true;
1475 cx.spawn(move |this, mut cx| async move {
1476 let new_syntax_map = parse_task.await;
1477 this.update(&mut cx, move |this, cx| {
1478 let grammar_changed =
1479 this.language.as_ref().map_or(true, |current_language| {
1480 !Arc::ptr_eq(&language, current_language)
1481 });
1482 let language_registry_changed = new_syntax_map
1483 .contains_unknown_injections()
1484 && language_registry.map_or(false, |registry| {
1485 registry.version() != new_syntax_map.language_registry_version()
1486 });
1487 let parse_again = language_registry_changed
1488 || grammar_changed
1489 || this.version.changed_since(&parsed_version);
1490 this.did_finish_parsing(new_syntax_map, cx);
1491 this.parsing_in_background = false;
1492 if parse_again {
1493 this.reparse(cx);
1494 }
1495 })
1496 .ok();
1497 })
1498 .detach();
1499 }
1500 }
1501 }
1502
1503 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1504 self.non_text_state_update_count += 1;
1505 self.syntax_map.lock().did_parse(syntax_snapshot);
1506 self.request_autoindent(cx);
1507 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1508 cx.emit(BufferEvent::Reparsed);
1509 cx.notify();
1510 }
1511
1512 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1513 self.parse_status.1.clone()
1514 }
1515
1516 /// Assign to the buffer a set of diagnostics created by a given language server.
1517 pub fn update_diagnostics(
1518 &mut self,
1519 server_id: LanguageServerId,
1520 diagnostics: DiagnosticSet,
1521 cx: &mut Context<Self>,
1522 ) {
1523 let lamport_timestamp = self.text.lamport_clock.tick();
1524 let op = Operation::UpdateDiagnostics {
1525 server_id,
1526 diagnostics: diagnostics.iter().cloned().collect(),
1527 lamport_timestamp,
1528 };
1529 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1530 self.send_operation(op, true, cx);
1531 }
1532
1533 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1534 if let Some(indent_sizes) = self.compute_autoindents() {
1535 let indent_sizes = cx.background_spawn(indent_sizes);
1536 match cx
1537 .background_executor()
1538 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1539 {
1540 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1541 Err(indent_sizes) => {
1542 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1543 let indent_sizes = indent_sizes.await;
1544 this.update(&mut cx, |this, cx| {
1545 this.apply_autoindents(indent_sizes, cx);
1546 })
1547 .ok();
1548 }));
1549 }
1550 }
1551 } else {
1552 self.autoindent_requests.clear();
1553 }
1554 }
1555
1556 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1557 let max_rows_between_yields = 100;
1558 let snapshot = self.snapshot();
1559 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1560 return None;
1561 }
1562
1563 let autoindent_requests = self.autoindent_requests.clone();
1564 Some(async move {
1565 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1566 for request in autoindent_requests {
1567 // Resolve each edited range to its row in the current buffer and in the
1568 // buffer before this batch of edits.
1569 let mut row_ranges = Vec::new();
1570 let mut old_to_new_rows = BTreeMap::new();
1571 let mut language_indent_sizes_by_new_row = Vec::new();
1572 for entry in &request.entries {
1573 let position = entry.range.start;
1574 let new_row = position.to_point(&snapshot).row;
1575 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1576 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1577
1578 if !entry.first_line_is_new {
1579 let old_row = position.to_point(&request.before_edit).row;
1580 old_to_new_rows.insert(old_row, new_row);
1581 }
1582 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1583 }
1584
1585 // Build a map containing the suggested indentation for each of the edited lines
1586 // with respect to the state of the buffer before these edits. This map is keyed
1587 // by the rows for these lines in the current state of the buffer.
1588 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1589 let old_edited_ranges =
1590 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1591 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1592 let mut language_indent_size = IndentSize::default();
1593 for old_edited_range in old_edited_ranges {
1594 let suggestions = request
1595 .before_edit
1596 .suggest_autoindents(old_edited_range.clone())
1597 .into_iter()
1598 .flatten();
1599 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1600 if let Some(suggestion) = suggestion {
1601 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1602
1603 // Find the indent size based on the language for this row.
1604 while let Some((row, size)) = language_indent_sizes.peek() {
1605 if *row > new_row {
1606 break;
1607 }
1608 language_indent_size = *size;
1609 language_indent_sizes.next();
1610 }
1611
1612 let suggested_indent = old_to_new_rows
1613 .get(&suggestion.basis_row)
1614 .and_then(|from_row| {
1615 Some(old_suggestions.get(from_row).copied()?.0)
1616 })
1617 .unwrap_or_else(|| {
1618 request
1619 .before_edit
1620 .indent_size_for_line(suggestion.basis_row)
1621 })
1622 .with_delta(suggestion.delta, language_indent_size);
1623 old_suggestions
1624 .insert(new_row, (suggested_indent, suggestion.within_error));
1625 }
1626 }
1627 yield_now().await;
1628 }
1629
1630 // Compute new suggestions for each line, but only include them in the result
1631 // if they differ from the old suggestion for that line.
1632 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1633 let mut language_indent_size = IndentSize::default();
1634 for (row_range, original_indent_column) in row_ranges {
1635 let new_edited_row_range = if request.is_block_mode {
1636 row_range.start..row_range.start + 1
1637 } else {
1638 row_range.clone()
1639 };
1640
1641 let suggestions = snapshot
1642 .suggest_autoindents(new_edited_row_range.clone())
1643 .into_iter()
1644 .flatten();
1645 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1646 if let Some(suggestion) = suggestion {
1647 // Find the indent size based on the language for this row.
1648 while let Some((row, size)) = language_indent_sizes.peek() {
1649 if *row > new_row {
1650 break;
1651 }
1652 language_indent_size = *size;
1653 language_indent_sizes.next();
1654 }
1655
1656 let suggested_indent = indent_sizes
1657 .get(&suggestion.basis_row)
1658 .copied()
1659 .map(|e| e.0)
1660 .unwrap_or_else(|| {
1661 snapshot.indent_size_for_line(suggestion.basis_row)
1662 })
1663 .with_delta(suggestion.delta, language_indent_size);
1664
1665 if old_suggestions.get(&new_row).map_or(
1666 true,
1667 |(old_indentation, was_within_error)| {
1668 suggested_indent != *old_indentation
1669 && (!suggestion.within_error || *was_within_error)
1670 },
1671 ) {
1672 indent_sizes.insert(
1673 new_row,
1674 (suggested_indent, request.ignore_empty_lines),
1675 );
1676 }
1677 }
1678 }
1679
1680 if let (true, Some(original_indent_column)) =
1681 (request.is_block_mode, original_indent_column)
1682 {
1683 let new_indent =
1684 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1685 *indent
1686 } else {
1687 snapshot.indent_size_for_line(row_range.start)
1688 };
1689 let delta = new_indent.len as i64 - original_indent_column as i64;
1690 if delta != 0 {
1691 for row in row_range.skip(1) {
1692 indent_sizes.entry(row).or_insert_with(|| {
1693 let mut size = snapshot.indent_size_for_line(row);
1694 if size.kind == new_indent.kind {
1695 match delta.cmp(&0) {
1696 Ordering::Greater => size.len += delta as u32,
1697 Ordering::Less => {
1698 size.len = size.len.saturating_sub(-delta as u32)
1699 }
1700 Ordering::Equal => {}
1701 }
1702 }
1703 (size, request.ignore_empty_lines)
1704 });
1705 }
1706 }
1707 }
1708
1709 yield_now().await;
1710 }
1711 }
1712
1713 indent_sizes
1714 .into_iter()
1715 .filter_map(|(row, (indent, ignore_empty_lines))| {
1716 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1717 None
1718 } else {
1719 Some((row, indent))
1720 }
1721 })
1722 .collect()
1723 })
1724 }
1725
1726 fn apply_autoindents(
1727 &mut self,
1728 indent_sizes: BTreeMap<u32, IndentSize>,
1729 cx: &mut Context<Self>,
1730 ) {
1731 self.autoindent_requests.clear();
1732
1733 let edits: Vec<_> = indent_sizes
1734 .into_iter()
1735 .filter_map(|(row, indent_size)| {
1736 let current_size = indent_size_for_line(self, row);
1737 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1738 })
1739 .collect();
1740
1741 let preserve_preview = self.preserve_preview();
1742 self.edit(edits, None, cx);
1743 if preserve_preview {
1744 self.refresh_preview();
1745 }
1746 }
1747
1748 /// Create a minimal edit that will cause the given row to be indented
1749 /// with the given size. After applying this edit, the length of the line
1750 /// will always be at least `new_size.len`.
1751 pub fn edit_for_indent_size_adjustment(
1752 row: u32,
1753 current_size: IndentSize,
1754 new_size: IndentSize,
1755 ) -> Option<(Range<Point>, String)> {
1756 if new_size.kind == current_size.kind {
1757 match new_size.len.cmp(¤t_size.len) {
1758 Ordering::Greater => {
1759 let point = Point::new(row, 0);
1760 Some((
1761 point..point,
1762 iter::repeat(new_size.char())
1763 .take((new_size.len - current_size.len) as usize)
1764 .collect::<String>(),
1765 ))
1766 }
1767
1768 Ordering::Less => Some((
1769 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1770 String::new(),
1771 )),
1772
1773 Ordering::Equal => None,
1774 }
1775 } else {
1776 Some((
1777 Point::new(row, 0)..Point::new(row, current_size.len),
1778 iter::repeat(new_size.char())
1779 .take(new_size.len as usize)
1780 .collect::<String>(),
1781 ))
1782 }
1783 }
1784
1785 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1786 /// and the given new text.
1787 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1788 let old_text = self.as_rope().clone();
1789 let base_version = self.version();
1790 cx.background_executor()
1791 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1792 let old_text = old_text.to_string();
1793 let line_ending = LineEnding::detect(&new_text);
1794 LineEnding::normalize(&mut new_text);
1795 let edits = text_diff(&old_text, &new_text);
1796 Diff {
1797 base_version,
1798 line_ending,
1799 edits,
1800 }
1801 })
1802 }
1803
1804 /// Spawns a background task that searches the buffer for any whitespace
1805 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1806 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1807 let old_text = self.as_rope().clone();
1808 let line_ending = self.line_ending();
1809 let base_version = self.version();
1810 cx.background_spawn(async move {
1811 let ranges = trailing_whitespace_ranges(&old_text);
1812 let empty = Arc::<str>::from("");
1813 Diff {
1814 base_version,
1815 line_ending,
1816 edits: ranges
1817 .into_iter()
1818 .map(|range| (range, empty.clone()))
1819 .collect(),
1820 }
1821 })
1822 }
1823
1824 /// Ensures that the buffer ends with a single newline character, and
1825 /// no other whitespace.
1826 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1827 let len = self.len();
1828 let mut offset = len;
1829 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1830 let non_whitespace_len = chunk
1831 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1832 .len();
1833 offset -= chunk.len();
1834 offset += non_whitespace_len;
1835 if non_whitespace_len != 0 {
1836 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1837 return;
1838 }
1839 break;
1840 }
1841 }
1842 self.edit([(offset..len, "\n")], None, cx);
1843 }
1844
1845 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1846 /// calculated, then adjust the diff to account for those changes, and discard any
1847 /// parts of the diff that conflict with those changes.
1848 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1849 // Check for any edits to the buffer that have occurred since this diff
1850 // was computed.
1851 let snapshot = self.snapshot();
1852 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1853 let mut delta = 0;
1854 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1855 while let Some(edit_since) = edits_since.peek() {
1856 // If the edit occurs after a diff hunk, then it does not
1857 // affect that hunk.
1858 if edit_since.old.start > range.end {
1859 break;
1860 }
1861 // If the edit precedes the diff hunk, then adjust the hunk
1862 // to reflect the edit.
1863 else if edit_since.old.end < range.start {
1864 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1865 edits_since.next();
1866 }
1867 // If the edit intersects a diff hunk, then discard that hunk.
1868 else {
1869 return None;
1870 }
1871 }
1872
1873 let start = (range.start as i64 + delta) as usize;
1874 let end = (range.end as i64 + delta) as usize;
1875 Some((start..end, new_text))
1876 });
1877
1878 self.start_transaction();
1879 self.text.set_line_ending(diff.line_ending);
1880 self.edit(adjusted_edits, None, cx);
1881 self.end_transaction(cx)
1882 }
1883
1884 fn has_unsaved_edits(&self) -> bool {
1885 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1886
1887 if last_version == self.version {
1888 self.has_unsaved_edits
1889 .set((last_version, has_unsaved_edits));
1890 return has_unsaved_edits;
1891 }
1892
1893 let has_edits = self.has_edits_since(&self.saved_version);
1894 self.has_unsaved_edits
1895 .set((self.version.clone(), has_edits));
1896 has_edits
1897 }
1898
1899 /// Checks if the buffer has unsaved changes.
1900 pub fn is_dirty(&self) -> bool {
1901 if self.capability == Capability::ReadOnly {
1902 return false;
1903 }
1904 if self.has_conflict || self.has_unsaved_edits() {
1905 return true;
1906 }
1907 match self.file.as_ref().map(|f| f.disk_state()) {
1908 Some(DiskState::New) => !self.is_empty(),
1909 Some(DiskState::Deleted) => true,
1910 _ => false,
1911 }
1912 }
1913
1914 /// Checks if the buffer and its file have both changed since the buffer
1915 /// was last saved or reloaded.
1916 pub fn has_conflict(&self) -> bool {
1917 if self.has_conflict {
1918 return true;
1919 }
1920 let Some(file) = self.file.as_ref() else {
1921 return false;
1922 };
1923 match file.disk_state() {
1924 DiskState::New => false,
1925 DiskState::Present { mtime } => match self.saved_mtime {
1926 Some(saved_mtime) => {
1927 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1928 }
1929 None => true,
1930 },
1931 DiskState::Deleted => true,
1932 }
1933 }
1934
1935 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1936 pub fn subscribe(&mut self) -> Subscription {
1937 self.text.subscribe()
1938 }
1939
1940 /// Starts a transaction, if one is not already in-progress. When undoing or
1941 /// redoing edits, all of the edits performed within a transaction are undone
1942 /// or redone together.
1943 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1944 self.start_transaction_at(Instant::now())
1945 }
1946
1947 /// Starts a transaction, providing the current time. Subsequent transactions
1948 /// that occur within a short period of time will be grouped together. This
1949 /// is controlled by the buffer's undo grouping duration.
1950 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1951 self.transaction_depth += 1;
1952 if self.was_dirty_before_starting_transaction.is_none() {
1953 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1954 }
1955 self.text.start_transaction_at(now)
1956 }
1957
1958 /// Terminates the current transaction, if this is the outermost transaction.
1959 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
1960 self.end_transaction_at(Instant::now(), cx)
1961 }
1962
1963 /// Terminates the current transaction, providing the current time. Subsequent transactions
1964 /// that occur within a short period of time will be grouped together. This
1965 /// is controlled by the buffer's undo grouping duration.
1966 pub fn end_transaction_at(
1967 &mut self,
1968 now: Instant,
1969 cx: &mut Context<Self>,
1970 ) -> Option<TransactionId> {
1971 assert!(self.transaction_depth > 0);
1972 self.transaction_depth -= 1;
1973 let was_dirty = if self.transaction_depth == 0 {
1974 self.was_dirty_before_starting_transaction.take().unwrap()
1975 } else {
1976 false
1977 };
1978 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1979 self.did_edit(&start_version, was_dirty, cx);
1980 Some(transaction_id)
1981 } else {
1982 None
1983 }
1984 }
1985
1986 /// Manually add a transaction to the buffer's undo history.
1987 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1988 self.text.push_transaction(transaction, now);
1989 }
1990
1991 /// Prevent the last transaction from being grouped with any subsequent transactions,
1992 /// even if they occur with the buffer's undo grouping duration.
1993 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1994 self.text.finalize_last_transaction()
1995 }
1996
1997 /// Manually group all changes since a given transaction.
1998 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1999 self.text.group_until_transaction(transaction_id);
2000 }
2001
2002 /// Manually remove a transaction from the buffer's undo history
2003 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
2004 self.text.forget_transaction(transaction_id);
2005 }
2006
2007 /// Manually merge two adjacent transactions in the buffer's undo history.
2008 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2009 self.text.merge_transactions(transaction, destination);
2010 }
2011
2012 /// Waits for the buffer to receive operations with the given timestamps.
2013 pub fn wait_for_edits(
2014 &mut self,
2015 edit_ids: impl IntoIterator<Item = clock::Lamport>,
2016 ) -> impl Future<Output = Result<()>> {
2017 self.text.wait_for_edits(edit_ids)
2018 }
2019
2020 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2021 pub fn wait_for_anchors(
2022 &mut self,
2023 anchors: impl IntoIterator<Item = Anchor>,
2024 ) -> impl 'static + Future<Output = Result<()>> {
2025 self.text.wait_for_anchors(anchors)
2026 }
2027
2028 /// Waits for the buffer to receive operations up to the given version.
2029 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
2030 self.text.wait_for_version(version)
2031 }
2032
2033 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2034 /// [`Buffer::wait_for_version`] to resolve with an error.
2035 pub fn give_up_waiting(&mut self) {
2036 self.text.give_up_waiting();
2037 }
2038
2039 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2040 pub fn set_active_selections(
2041 &mut self,
2042 selections: Arc<[Selection<Anchor>]>,
2043 line_mode: bool,
2044 cursor_shape: CursorShape,
2045 cx: &mut Context<Self>,
2046 ) {
2047 let lamport_timestamp = self.text.lamport_clock.tick();
2048 self.remote_selections.insert(
2049 self.text.replica_id(),
2050 SelectionSet {
2051 selections: selections.clone(),
2052 lamport_timestamp,
2053 line_mode,
2054 cursor_shape,
2055 },
2056 );
2057 self.send_operation(
2058 Operation::UpdateSelections {
2059 selections,
2060 line_mode,
2061 lamport_timestamp,
2062 cursor_shape,
2063 },
2064 true,
2065 cx,
2066 );
2067 self.non_text_state_update_count += 1;
2068 cx.notify();
2069 }
2070
2071 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2072 /// this replica.
2073 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2074 if self
2075 .remote_selections
2076 .get(&self.text.replica_id())
2077 .map_or(true, |set| !set.selections.is_empty())
2078 {
2079 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2080 }
2081 }
2082
2083 /// Replaces the buffer's entire text.
2084 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2085 where
2086 T: Into<Arc<str>>,
2087 {
2088 self.autoindent_requests.clear();
2089 self.edit([(0..self.len(), text)], None, cx)
2090 }
2091
2092 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2093 /// delete, and a string of text to insert at that location.
2094 ///
2095 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2096 /// request for the edited ranges, which will be processed when the buffer finishes
2097 /// parsing.
2098 ///
2099 /// Parsing takes place at the end of a transaction, and may compute synchronously
2100 /// or asynchronously, depending on the changes.
2101 pub fn edit<I, S, T>(
2102 &mut self,
2103 edits_iter: I,
2104 autoindent_mode: Option<AutoindentMode>,
2105 cx: &mut Context<Self>,
2106 ) -> Option<clock::Lamport>
2107 where
2108 I: IntoIterator<Item = (Range<S>, T)>,
2109 S: ToOffset,
2110 T: Into<Arc<str>>,
2111 {
2112 // Skip invalid edits and coalesce contiguous ones.
2113 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2114 for (range, new_text) in edits_iter {
2115 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2116 if range.start > range.end {
2117 mem::swap(&mut range.start, &mut range.end);
2118 }
2119 let new_text = new_text.into();
2120 if !new_text.is_empty() || !range.is_empty() {
2121 if let Some((prev_range, prev_text)) = edits.last_mut() {
2122 if prev_range.end >= range.start {
2123 prev_range.end = cmp::max(prev_range.end, range.end);
2124 *prev_text = format!("{prev_text}{new_text}").into();
2125 } else {
2126 edits.push((range, new_text));
2127 }
2128 } else {
2129 edits.push((range, new_text));
2130 }
2131 }
2132 }
2133 if edits.is_empty() {
2134 return None;
2135 }
2136
2137 self.start_transaction();
2138 self.pending_autoindent.take();
2139 let autoindent_request = autoindent_mode
2140 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2141
2142 let edit_operation = self.text.edit(edits.iter().cloned());
2143 let edit_id = edit_operation.timestamp();
2144
2145 if let Some((before_edit, mode)) = autoindent_request {
2146 let mut delta = 0isize;
2147 let entries = edits
2148 .into_iter()
2149 .enumerate()
2150 .zip(&edit_operation.as_edit().unwrap().new_text)
2151 .map(|((ix, (range, _)), new_text)| {
2152 let new_text_length = new_text.len();
2153 let old_start = range.start.to_point(&before_edit);
2154 let new_start = (delta + range.start as isize) as usize;
2155 let range_len = range.end - range.start;
2156 delta += new_text_length as isize - range_len as isize;
2157
2158 // Decide what range of the insertion to auto-indent, and whether
2159 // the first line of the insertion should be considered a newly-inserted line
2160 // or an edit to an existing line.
2161 let mut range_of_insertion_to_indent = 0..new_text_length;
2162 let mut first_line_is_new = true;
2163
2164 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2165 let old_line_end = before_edit.line_len(old_start.row);
2166
2167 if old_start.column > old_line_start {
2168 first_line_is_new = false;
2169 }
2170
2171 if !new_text.contains('\n')
2172 && (old_start.column + (range_len as u32) < old_line_end
2173 || old_line_end == old_line_start)
2174 {
2175 first_line_is_new = false;
2176 }
2177
2178 // When inserting text starting with a newline, avoid auto-indenting the
2179 // previous line.
2180 if new_text.starts_with('\n') {
2181 range_of_insertion_to_indent.start += 1;
2182 first_line_is_new = true;
2183 }
2184
2185 let mut original_indent_column = None;
2186 if let AutoindentMode::Block {
2187 original_indent_columns,
2188 } = &mode
2189 {
2190 original_indent_column =
2191 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
2192 indent_size_for_text(
2193 new_text[range_of_insertion_to_indent.clone()].chars(),
2194 )
2195 .len
2196 }));
2197
2198 // Avoid auto-indenting the line after the edit.
2199 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2200 range_of_insertion_to_indent.end -= 1;
2201 }
2202 }
2203
2204 AutoindentRequestEntry {
2205 first_line_is_new,
2206 original_indent_column,
2207 indent_size: before_edit.language_indent_size_at(range.start, cx),
2208 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2209 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2210 }
2211 })
2212 .collect();
2213
2214 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2215 before_edit,
2216 entries,
2217 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2218 ignore_empty_lines: false,
2219 }));
2220 }
2221
2222 self.end_transaction(cx);
2223 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2224 Some(edit_id)
2225 }
2226
2227 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2228 if self.edits_since::<usize>(old_version).next().is_none() {
2229 return;
2230 }
2231
2232 self.reparse(cx);
2233
2234 cx.emit(BufferEvent::Edited);
2235 if was_dirty != self.is_dirty() {
2236 cx.emit(BufferEvent::DirtyChanged);
2237 }
2238 cx.notify();
2239 }
2240
2241 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2242 where
2243 I: IntoIterator<Item = Range<T>>,
2244 T: ToOffset + Copy,
2245 {
2246 let before_edit = self.snapshot();
2247 let entries = ranges
2248 .into_iter()
2249 .map(|range| AutoindentRequestEntry {
2250 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2251 first_line_is_new: true,
2252 indent_size: before_edit.language_indent_size_at(range.start, cx),
2253 original_indent_column: None,
2254 })
2255 .collect();
2256 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2257 before_edit,
2258 entries,
2259 is_block_mode: false,
2260 ignore_empty_lines: true,
2261 }));
2262 self.request_autoindent(cx);
2263 }
2264
2265 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2266 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2267 pub fn insert_empty_line(
2268 &mut self,
2269 position: impl ToPoint,
2270 space_above: bool,
2271 space_below: bool,
2272 cx: &mut Context<Self>,
2273 ) -> Point {
2274 let mut position = position.to_point(self);
2275
2276 self.start_transaction();
2277
2278 self.edit(
2279 [(position..position, "\n")],
2280 Some(AutoindentMode::EachLine),
2281 cx,
2282 );
2283
2284 if position.column > 0 {
2285 position += Point::new(1, 0);
2286 }
2287
2288 if !self.is_line_blank(position.row) {
2289 self.edit(
2290 [(position..position, "\n")],
2291 Some(AutoindentMode::EachLine),
2292 cx,
2293 );
2294 }
2295
2296 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2297 self.edit(
2298 [(position..position, "\n")],
2299 Some(AutoindentMode::EachLine),
2300 cx,
2301 );
2302 position.row += 1;
2303 }
2304
2305 if space_below
2306 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2307 {
2308 self.edit(
2309 [(position..position, "\n")],
2310 Some(AutoindentMode::EachLine),
2311 cx,
2312 );
2313 }
2314
2315 self.end_transaction(cx);
2316
2317 position
2318 }
2319
2320 /// Applies the given remote operations to the buffer.
2321 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2322 self.pending_autoindent.take();
2323 let was_dirty = self.is_dirty();
2324 let old_version = self.version.clone();
2325 let mut deferred_ops = Vec::new();
2326 let buffer_ops = ops
2327 .into_iter()
2328 .filter_map(|op| match op {
2329 Operation::Buffer(op) => Some(op),
2330 _ => {
2331 if self.can_apply_op(&op) {
2332 self.apply_op(op, cx);
2333 } else {
2334 deferred_ops.push(op);
2335 }
2336 None
2337 }
2338 })
2339 .collect::<Vec<_>>();
2340 for operation in buffer_ops.iter() {
2341 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2342 }
2343 self.text.apply_ops(buffer_ops);
2344 self.deferred_ops.insert(deferred_ops);
2345 self.flush_deferred_ops(cx);
2346 self.did_edit(&old_version, was_dirty, cx);
2347 // Notify independently of whether the buffer was edited as the operations could include a
2348 // selection update.
2349 cx.notify();
2350 }
2351
2352 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2353 let mut deferred_ops = Vec::new();
2354 for op in self.deferred_ops.drain().iter().cloned() {
2355 if self.can_apply_op(&op) {
2356 self.apply_op(op, cx);
2357 } else {
2358 deferred_ops.push(op);
2359 }
2360 }
2361 self.deferred_ops.insert(deferred_ops);
2362 }
2363
2364 pub fn has_deferred_ops(&self) -> bool {
2365 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2366 }
2367
2368 fn can_apply_op(&self, operation: &Operation) -> bool {
2369 match operation {
2370 Operation::Buffer(_) => {
2371 unreachable!("buffer operations should never be applied at this layer")
2372 }
2373 Operation::UpdateDiagnostics {
2374 diagnostics: diagnostic_set,
2375 ..
2376 } => diagnostic_set.iter().all(|diagnostic| {
2377 self.text.can_resolve(&diagnostic.range.start)
2378 && self.text.can_resolve(&diagnostic.range.end)
2379 }),
2380 Operation::UpdateSelections { selections, .. } => selections
2381 .iter()
2382 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2383 Operation::UpdateCompletionTriggers { .. } => true,
2384 }
2385 }
2386
2387 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2388 match operation {
2389 Operation::Buffer(_) => {
2390 unreachable!("buffer operations should never be applied at this layer")
2391 }
2392 Operation::UpdateDiagnostics {
2393 server_id,
2394 diagnostics: diagnostic_set,
2395 lamport_timestamp,
2396 } => {
2397 let snapshot = self.snapshot();
2398 self.apply_diagnostic_update(
2399 server_id,
2400 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2401 lamport_timestamp,
2402 cx,
2403 );
2404 }
2405 Operation::UpdateSelections {
2406 selections,
2407 lamport_timestamp,
2408 line_mode,
2409 cursor_shape,
2410 } => {
2411 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2412 if set.lamport_timestamp > lamport_timestamp {
2413 return;
2414 }
2415 }
2416
2417 self.remote_selections.insert(
2418 lamport_timestamp.replica_id,
2419 SelectionSet {
2420 selections,
2421 lamport_timestamp,
2422 line_mode,
2423 cursor_shape,
2424 },
2425 );
2426 self.text.lamport_clock.observe(lamport_timestamp);
2427 self.non_text_state_update_count += 1;
2428 }
2429 Operation::UpdateCompletionTriggers {
2430 triggers,
2431 lamport_timestamp,
2432 server_id,
2433 } => {
2434 if triggers.is_empty() {
2435 self.completion_triggers_per_language_server
2436 .remove(&server_id);
2437 self.completion_triggers = self
2438 .completion_triggers_per_language_server
2439 .values()
2440 .flat_map(|triggers| triggers.into_iter().cloned())
2441 .collect();
2442 } else {
2443 self.completion_triggers_per_language_server
2444 .insert(server_id, triggers.iter().cloned().collect());
2445 self.completion_triggers.extend(triggers);
2446 }
2447 self.text.lamport_clock.observe(lamport_timestamp);
2448 }
2449 }
2450 }
2451
2452 fn apply_diagnostic_update(
2453 &mut self,
2454 server_id: LanguageServerId,
2455 diagnostics: DiagnosticSet,
2456 lamport_timestamp: clock::Lamport,
2457 cx: &mut Context<Self>,
2458 ) {
2459 if lamport_timestamp > self.diagnostics_timestamp {
2460 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2461 if diagnostics.is_empty() {
2462 if let Ok(ix) = ix {
2463 self.diagnostics.remove(ix);
2464 }
2465 } else {
2466 match ix {
2467 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2468 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2469 };
2470 }
2471 self.diagnostics_timestamp = lamport_timestamp;
2472 self.non_text_state_update_count += 1;
2473 self.text.lamport_clock.observe(lamport_timestamp);
2474 cx.notify();
2475 cx.emit(BufferEvent::DiagnosticsUpdated);
2476 }
2477 }
2478
2479 fn send_operation(&self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2480 cx.emit(BufferEvent::Operation {
2481 operation,
2482 is_local,
2483 });
2484 }
2485
2486 /// Removes the selections for a given peer.
2487 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2488 self.remote_selections.remove(&replica_id);
2489 cx.notify();
2490 }
2491
2492 /// Undoes the most recent transaction.
2493 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2494 let was_dirty = self.is_dirty();
2495 let old_version = self.version.clone();
2496
2497 if let Some((transaction_id, operation)) = self.text.undo() {
2498 self.send_operation(Operation::Buffer(operation), true, cx);
2499 self.did_edit(&old_version, was_dirty, cx);
2500 Some(transaction_id)
2501 } else {
2502 None
2503 }
2504 }
2505
2506 /// Manually undoes a specific transaction in the buffer's undo history.
2507 pub fn undo_transaction(
2508 &mut self,
2509 transaction_id: TransactionId,
2510 cx: &mut Context<Self>,
2511 ) -> bool {
2512 let was_dirty = self.is_dirty();
2513 let old_version = self.version.clone();
2514 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2515 self.send_operation(Operation::Buffer(operation), true, cx);
2516 self.did_edit(&old_version, was_dirty, cx);
2517 true
2518 } else {
2519 false
2520 }
2521 }
2522
2523 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2524 pub fn undo_to_transaction(
2525 &mut self,
2526 transaction_id: TransactionId,
2527 cx: &mut Context<Self>,
2528 ) -> bool {
2529 let was_dirty = self.is_dirty();
2530 let old_version = self.version.clone();
2531
2532 let operations = self.text.undo_to_transaction(transaction_id);
2533 let undone = !operations.is_empty();
2534 for operation in operations {
2535 self.send_operation(Operation::Buffer(operation), true, cx);
2536 }
2537 if undone {
2538 self.did_edit(&old_version, was_dirty, cx)
2539 }
2540 undone
2541 }
2542
2543 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2544 let was_dirty = self.is_dirty();
2545 let operation = self.text.undo_operations(counts);
2546 let old_version = self.version.clone();
2547 self.send_operation(Operation::Buffer(operation), true, cx);
2548 self.did_edit(&old_version, was_dirty, cx);
2549 }
2550
2551 /// Manually redoes a specific transaction in the buffer's redo history.
2552 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2553 let was_dirty = self.is_dirty();
2554 let old_version = self.version.clone();
2555
2556 if let Some((transaction_id, operation)) = self.text.redo() {
2557 self.send_operation(Operation::Buffer(operation), true, cx);
2558 self.did_edit(&old_version, was_dirty, cx);
2559 Some(transaction_id)
2560 } else {
2561 None
2562 }
2563 }
2564
2565 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2566 pub fn redo_to_transaction(
2567 &mut self,
2568 transaction_id: TransactionId,
2569 cx: &mut Context<Self>,
2570 ) -> bool {
2571 let was_dirty = self.is_dirty();
2572 let old_version = self.version.clone();
2573
2574 let operations = self.text.redo_to_transaction(transaction_id);
2575 let redone = !operations.is_empty();
2576 for operation in operations {
2577 self.send_operation(Operation::Buffer(operation), true, cx);
2578 }
2579 if redone {
2580 self.did_edit(&old_version, was_dirty, cx)
2581 }
2582 redone
2583 }
2584
2585 /// Override current completion triggers with the user-provided completion triggers.
2586 pub fn set_completion_triggers(
2587 &mut self,
2588 server_id: LanguageServerId,
2589 triggers: BTreeSet<String>,
2590 cx: &mut Context<Self>,
2591 ) {
2592 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2593 if triggers.is_empty() {
2594 self.completion_triggers_per_language_server
2595 .remove(&server_id);
2596 self.completion_triggers = self
2597 .completion_triggers_per_language_server
2598 .values()
2599 .flat_map(|triggers| triggers.into_iter().cloned())
2600 .collect();
2601 } else {
2602 self.completion_triggers_per_language_server
2603 .insert(server_id, triggers.clone());
2604 self.completion_triggers.extend(triggers.iter().cloned());
2605 }
2606 self.send_operation(
2607 Operation::UpdateCompletionTriggers {
2608 triggers: triggers.iter().cloned().collect(),
2609 lamport_timestamp: self.completion_triggers_timestamp,
2610 server_id,
2611 },
2612 true,
2613 cx,
2614 );
2615 cx.notify();
2616 }
2617
2618 /// Returns a list of strings which trigger a completion menu for this language.
2619 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2620 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2621 &self.completion_triggers
2622 }
2623
2624 /// Call this directly after performing edits to prevent the preview tab
2625 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2626 /// to return false until there are additional edits.
2627 pub fn refresh_preview(&mut self) {
2628 self.preview_version = self.version.clone();
2629 }
2630
2631 /// Whether we should preserve the preview status of a tab containing this buffer.
2632 pub fn preserve_preview(&self) -> bool {
2633 !self.has_edits_since(&self.preview_version)
2634 }
2635}
2636
2637#[doc(hidden)]
2638#[cfg(any(test, feature = "test-support"))]
2639impl Buffer {
2640 pub fn edit_via_marked_text(
2641 &mut self,
2642 marked_string: &str,
2643 autoindent_mode: Option<AutoindentMode>,
2644 cx: &mut Context<Self>,
2645 ) {
2646 let edits = self.edits_for_marked_text(marked_string);
2647 self.edit(edits, autoindent_mode, cx);
2648 }
2649
2650 pub fn set_group_interval(&mut self, group_interval: Duration) {
2651 self.text.set_group_interval(group_interval);
2652 }
2653
2654 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2655 where
2656 T: rand::Rng,
2657 {
2658 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2659 let mut last_end = None;
2660 for _ in 0..old_range_count {
2661 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2662 break;
2663 }
2664
2665 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2666 let mut range = self.random_byte_range(new_start, rng);
2667 if rng.gen_bool(0.2) {
2668 mem::swap(&mut range.start, &mut range.end);
2669 }
2670 last_end = Some(range.end);
2671
2672 let new_text_len = rng.gen_range(0..10);
2673 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2674 new_text = new_text.to_uppercase();
2675
2676 edits.push((range, new_text));
2677 }
2678 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2679 self.edit(edits, None, cx);
2680 }
2681
2682 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2683 let was_dirty = self.is_dirty();
2684 let old_version = self.version.clone();
2685
2686 let ops = self.text.randomly_undo_redo(rng);
2687 if !ops.is_empty() {
2688 for op in ops {
2689 self.send_operation(Operation::Buffer(op), true, cx);
2690 self.did_edit(&old_version, was_dirty, cx);
2691 }
2692 }
2693 }
2694}
2695
2696impl EventEmitter<BufferEvent> for Buffer {}
2697
2698impl Deref for Buffer {
2699 type Target = TextBuffer;
2700
2701 fn deref(&self) -> &Self::Target {
2702 &self.text
2703 }
2704}
2705
2706impl BufferSnapshot {
2707 /// Returns [`IndentSize`] for a given line that respects user settings and
2708 /// language preferences.
2709 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2710 indent_size_for_line(self, row)
2711 }
2712
2713 /// Returns [`IndentSize`] for a given position that respects user settings
2714 /// and language preferences.
2715 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2716 let settings = language_settings(
2717 self.language_at(position).map(|l| l.name()),
2718 self.file(),
2719 cx,
2720 );
2721 if settings.hard_tabs {
2722 IndentSize::tab()
2723 } else {
2724 IndentSize::spaces(settings.tab_size.get())
2725 }
2726 }
2727
2728 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2729 /// is passed in as `single_indent_size`.
2730 pub fn suggested_indents(
2731 &self,
2732 rows: impl Iterator<Item = u32>,
2733 single_indent_size: IndentSize,
2734 ) -> BTreeMap<u32, IndentSize> {
2735 let mut result = BTreeMap::new();
2736
2737 for row_range in contiguous_ranges(rows, 10) {
2738 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2739 Some(suggestions) => suggestions,
2740 _ => break,
2741 };
2742
2743 for (row, suggestion) in row_range.zip(suggestions) {
2744 let indent_size = if let Some(suggestion) = suggestion {
2745 result
2746 .get(&suggestion.basis_row)
2747 .copied()
2748 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2749 .with_delta(suggestion.delta, single_indent_size)
2750 } else {
2751 self.indent_size_for_line(row)
2752 };
2753
2754 result.insert(row, indent_size);
2755 }
2756 }
2757
2758 result
2759 }
2760
2761 fn suggest_autoindents(
2762 &self,
2763 row_range: Range<u32>,
2764 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2765 let config = &self.language.as_ref()?.config;
2766 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2767
2768 // Find the suggested indentation ranges based on the syntax tree.
2769 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2770 let end = Point::new(row_range.end, 0);
2771 let range = (start..end).to_offset(&self.text);
2772 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2773 Some(&grammar.indents_config.as_ref()?.query)
2774 });
2775 let indent_configs = matches
2776 .grammars()
2777 .iter()
2778 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2779 .collect::<Vec<_>>();
2780
2781 let mut indent_ranges = Vec::<Range<Point>>::new();
2782 let mut outdent_positions = Vec::<Point>::new();
2783 while let Some(mat) = matches.peek() {
2784 let mut start: Option<Point> = None;
2785 let mut end: Option<Point> = None;
2786
2787 let config = &indent_configs[mat.grammar_index];
2788 for capture in mat.captures {
2789 if capture.index == config.indent_capture_ix {
2790 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2791 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2792 } else if Some(capture.index) == config.start_capture_ix {
2793 start = Some(Point::from_ts_point(capture.node.end_position()));
2794 } else if Some(capture.index) == config.end_capture_ix {
2795 end = Some(Point::from_ts_point(capture.node.start_position()));
2796 } else if Some(capture.index) == config.outdent_capture_ix {
2797 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2798 }
2799 }
2800
2801 matches.advance();
2802 if let Some((start, end)) = start.zip(end) {
2803 if start.row == end.row {
2804 continue;
2805 }
2806
2807 let range = start..end;
2808 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2809 Err(ix) => indent_ranges.insert(ix, range),
2810 Ok(ix) => {
2811 let prev_range = &mut indent_ranges[ix];
2812 prev_range.end = prev_range.end.max(range.end);
2813 }
2814 }
2815 }
2816 }
2817
2818 let mut error_ranges = Vec::<Range<Point>>::new();
2819 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2820 grammar.error_query.as_ref()
2821 });
2822 while let Some(mat) = matches.peek() {
2823 let node = mat.captures[0].node;
2824 let start = Point::from_ts_point(node.start_position());
2825 let end = Point::from_ts_point(node.end_position());
2826 let range = start..end;
2827 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2828 Ok(ix) | Err(ix) => ix,
2829 };
2830 let mut end_ix = ix;
2831 while let Some(existing_range) = error_ranges.get(end_ix) {
2832 if existing_range.end < end {
2833 end_ix += 1;
2834 } else {
2835 break;
2836 }
2837 }
2838 error_ranges.splice(ix..end_ix, [range]);
2839 matches.advance();
2840 }
2841
2842 outdent_positions.sort();
2843 for outdent_position in outdent_positions {
2844 // find the innermost indent range containing this outdent_position
2845 // set its end to the outdent position
2846 if let Some(range_to_truncate) = indent_ranges
2847 .iter_mut()
2848 .filter(|indent_range| indent_range.contains(&outdent_position))
2849 .last()
2850 {
2851 range_to_truncate.end = outdent_position;
2852 }
2853 }
2854
2855 // Find the suggested indentation increases and decreased based on regexes.
2856 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2857 self.for_each_line(
2858 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2859 ..Point::new(row_range.end, 0),
2860 |row, line| {
2861 if config
2862 .decrease_indent_pattern
2863 .as_ref()
2864 .map_or(false, |regex| regex.is_match(line))
2865 {
2866 indent_change_rows.push((row, Ordering::Less));
2867 }
2868 if config
2869 .increase_indent_pattern
2870 .as_ref()
2871 .map_or(false, |regex| regex.is_match(line))
2872 {
2873 indent_change_rows.push((row + 1, Ordering::Greater));
2874 }
2875 },
2876 );
2877
2878 let mut indent_changes = indent_change_rows.into_iter().peekable();
2879 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2880 prev_non_blank_row.unwrap_or(0)
2881 } else {
2882 row_range.start.saturating_sub(1)
2883 };
2884 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2885 Some(row_range.map(move |row| {
2886 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2887
2888 let mut indent_from_prev_row = false;
2889 let mut outdent_from_prev_row = false;
2890 let mut outdent_to_row = u32::MAX;
2891 let mut from_regex = false;
2892
2893 while let Some((indent_row, delta)) = indent_changes.peek() {
2894 match indent_row.cmp(&row) {
2895 Ordering::Equal => match delta {
2896 Ordering::Less => {
2897 from_regex = true;
2898 outdent_from_prev_row = true
2899 }
2900 Ordering::Greater => {
2901 indent_from_prev_row = true;
2902 from_regex = true
2903 }
2904 _ => {}
2905 },
2906
2907 Ordering::Greater => break,
2908 Ordering::Less => {}
2909 }
2910
2911 indent_changes.next();
2912 }
2913
2914 for range in &indent_ranges {
2915 if range.start.row >= row {
2916 break;
2917 }
2918 if range.start.row == prev_row && range.end > row_start {
2919 indent_from_prev_row = true;
2920 }
2921 if range.end > prev_row_start && range.end <= row_start {
2922 outdent_to_row = outdent_to_row.min(range.start.row);
2923 }
2924 }
2925
2926 let within_error = error_ranges
2927 .iter()
2928 .any(|e| e.start.row < row && e.end > row_start);
2929
2930 let suggestion = if outdent_to_row == prev_row
2931 || (outdent_from_prev_row && indent_from_prev_row)
2932 {
2933 Some(IndentSuggestion {
2934 basis_row: prev_row,
2935 delta: Ordering::Equal,
2936 within_error: within_error && !from_regex,
2937 })
2938 } else if indent_from_prev_row {
2939 Some(IndentSuggestion {
2940 basis_row: prev_row,
2941 delta: Ordering::Greater,
2942 within_error: within_error && !from_regex,
2943 })
2944 } else if outdent_to_row < prev_row {
2945 Some(IndentSuggestion {
2946 basis_row: outdent_to_row,
2947 delta: Ordering::Equal,
2948 within_error: within_error && !from_regex,
2949 })
2950 } else if outdent_from_prev_row {
2951 Some(IndentSuggestion {
2952 basis_row: prev_row,
2953 delta: Ordering::Less,
2954 within_error: within_error && !from_regex,
2955 })
2956 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2957 {
2958 Some(IndentSuggestion {
2959 basis_row: prev_row,
2960 delta: Ordering::Equal,
2961 within_error: within_error && !from_regex,
2962 })
2963 } else {
2964 None
2965 };
2966
2967 prev_row = row;
2968 prev_row_start = row_start;
2969 suggestion
2970 }))
2971 }
2972
2973 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2974 while row > 0 {
2975 row -= 1;
2976 if !self.is_line_blank(row) {
2977 return Some(row);
2978 }
2979 }
2980 None
2981 }
2982
2983 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
2984 let captures = self.syntax.captures(range, &self.text, |grammar| {
2985 grammar.highlights_query.as_ref()
2986 });
2987 let highlight_maps = captures
2988 .grammars()
2989 .iter()
2990 .map(|grammar| grammar.highlight_map())
2991 .collect();
2992 (captures, highlight_maps)
2993 }
2994
2995 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2996 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2997 /// returned in chunks where each chunk has a single syntax highlighting style and
2998 /// diagnostic status.
2999 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3000 let range = range.start.to_offset(self)..range.end.to_offset(self);
3001
3002 let mut syntax = None;
3003 if language_aware {
3004 syntax = Some(self.get_highlights(range.clone()));
3005 }
3006 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3007 let diagnostics = language_aware;
3008 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3009 }
3010
3011 pub fn highlighted_text_for_range<T: ToOffset>(
3012 &self,
3013 range: Range<T>,
3014 override_style: Option<HighlightStyle>,
3015 syntax_theme: &SyntaxTheme,
3016 ) -> HighlightedText {
3017 HighlightedText::from_buffer_range(
3018 range,
3019 &self.text,
3020 &self.syntax,
3021 override_style,
3022 syntax_theme,
3023 )
3024 }
3025
3026 /// Invokes the given callback for each line of text in the given range of the buffer.
3027 /// Uses callback to avoid allocating a string for each line.
3028 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3029 let mut line = String::new();
3030 let mut row = range.start.row;
3031 for chunk in self
3032 .as_rope()
3033 .chunks_in_range(range.to_offset(self))
3034 .chain(["\n"])
3035 {
3036 for (newline_ix, text) in chunk.split('\n').enumerate() {
3037 if newline_ix > 0 {
3038 callback(row, &line);
3039 row += 1;
3040 line.clear();
3041 }
3042 line.push_str(text);
3043 }
3044 }
3045 }
3046
3047 /// Iterates over every [`SyntaxLayer`] in the buffer.
3048 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3049 self.syntax
3050 .layers_for_range(0..self.len(), &self.text, true)
3051 }
3052
3053 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3054 let offset = position.to_offset(self);
3055 self.syntax
3056 .layers_for_range(offset..offset, &self.text, false)
3057 .filter(|l| l.node().end_byte() > offset)
3058 .last()
3059 }
3060
3061 /// Returns the main [`Language`].
3062 pub fn language(&self) -> Option<&Arc<Language>> {
3063 self.language.as_ref()
3064 }
3065
3066 /// Returns the [`Language`] at the given location.
3067 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3068 self.syntax_layer_at(position)
3069 .map(|info| info.language)
3070 .or(self.language.as_ref())
3071 }
3072
3073 /// Returns the settings for the language at the given location.
3074 pub fn settings_at<'a, D: ToOffset>(
3075 &'a self,
3076 position: D,
3077 cx: &'a App,
3078 ) -> Cow<'a, LanguageSettings> {
3079 language_settings(
3080 self.language_at(position).map(|l| l.name()),
3081 self.file.as_ref(),
3082 cx,
3083 )
3084 }
3085
3086 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3087 CharClassifier::new(self.language_scope_at(point))
3088 }
3089
3090 /// Returns the [`LanguageScope`] at the given location.
3091 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3092 let offset = position.to_offset(self);
3093 let mut scope = None;
3094 let mut smallest_range: Option<Range<usize>> = None;
3095
3096 // Use the layer that has the smallest node intersecting the given point.
3097 for layer in self
3098 .syntax
3099 .layers_for_range(offset..offset, &self.text, false)
3100 {
3101 let mut cursor = layer.node().walk();
3102
3103 let mut range = None;
3104 loop {
3105 let child_range = cursor.node().byte_range();
3106 if !child_range.to_inclusive().contains(&offset) {
3107 break;
3108 }
3109
3110 range = Some(child_range);
3111 if cursor.goto_first_child_for_byte(offset).is_none() {
3112 break;
3113 }
3114 }
3115
3116 if let Some(range) = range {
3117 if smallest_range
3118 .as_ref()
3119 .map_or(true, |smallest_range| range.len() < smallest_range.len())
3120 {
3121 smallest_range = Some(range);
3122 scope = Some(LanguageScope {
3123 language: layer.language.clone(),
3124 override_id: layer.override_id(offset, &self.text),
3125 });
3126 }
3127 }
3128 }
3129
3130 scope.or_else(|| {
3131 self.language.clone().map(|language| LanguageScope {
3132 language,
3133 override_id: None,
3134 })
3135 })
3136 }
3137
3138 /// Returns a tuple of the range and character kind of the word
3139 /// surrounding the given position.
3140 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3141 let mut start = start.to_offset(self);
3142 let mut end = start;
3143 let mut next_chars = self.chars_at(start).peekable();
3144 let mut prev_chars = self.reversed_chars_at(start).peekable();
3145
3146 let classifier = self.char_classifier_at(start);
3147 let word_kind = cmp::max(
3148 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3149 next_chars.peek().copied().map(|c| classifier.kind(c)),
3150 );
3151
3152 for ch in prev_chars {
3153 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3154 start -= ch.len_utf8();
3155 } else {
3156 break;
3157 }
3158 }
3159
3160 for ch in next_chars {
3161 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3162 end += ch.len_utf8();
3163 } else {
3164 break;
3165 }
3166 }
3167
3168 (start..end, word_kind)
3169 }
3170
3171 /// Returns the closest syntax node enclosing the given range.
3172 pub fn syntax_ancestor<'a, T: ToOffset>(
3173 &'a self,
3174 range: Range<T>,
3175 ) -> Option<tree_sitter::Node<'a>> {
3176 let range = range.start.to_offset(self)..range.end.to_offset(self);
3177 let mut result: Option<tree_sitter::Node<'a>> = None;
3178 'outer: for layer in self
3179 .syntax
3180 .layers_for_range(range.clone(), &self.text, true)
3181 {
3182 let mut cursor = layer.node().walk();
3183
3184 // Descend to the first leaf that touches the start of the range,
3185 // and if the range is non-empty, extends beyond the start.
3186 while cursor.goto_first_child_for_byte(range.start).is_some() {
3187 if !range.is_empty() && cursor.node().end_byte() == range.start {
3188 cursor.goto_next_sibling();
3189 }
3190 }
3191
3192 // Ascend to the smallest ancestor that strictly contains the range.
3193 loop {
3194 let node_range = cursor.node().byte_range();
3195 if node_range.start <= range.start
3196 && node_range.end >= range.end
3197 && node_range.len() > range.len()
3198 {
3199 break;
3200 }
3201 if !cursor.goto_parent() {
3202 continue 'outer;
3203 }
3204 }
3205
3206 let left_node = cursor.node();
3207 let mut layer_result = left_node;
3208
3209 // For an empty range, try to find another node immediately to the right of the range.
3210 if left_node.end_byte() == range.start {
3211 let mut right_node = None;
3212 while !cursor.goto_next_sibling() {
3213 if !cursor.goto_parent() {
3214 break;
3215 }
3216 }
3217
3218 while cursor.node().start_byte() == range.start {
3219 right_node = Some(cursor.node());
3220 if !cursor.goto_first_child() {
3221 break;
3222 }
3223 }
3224
3225 // If there is a candidate node on both sides of the (empty) range, then
3226 // decide between the two by favoring a named node over an anonymous token.
3227 // If both nodes are the same in that regard, favor the right one.
3228 if let Some(right_node) = right_node {
3229 if right_node.is_named() || !left_node.is_named() {
3230 layer_result = right_node;
3231 }
3232 }
3233 }
3234
3235 if let Some(previous_result) = &result {
3236 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3237 continue;
3238 }
3239 }
3240 result = Some(layer_result);
3241 }
3242
3243 result
3244 }
3245
3246 /// Returns the outline for the buffer.
3247 ///
3248 /// This method allows passing an optional [`SyntaxTheme`] to
3249 /// syntax-highlight the returned symbols.
3250 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3251 self.outline_items_containing(0..self.len(), true, theme)
3252 .map(Outline::new)
3253 }
3254
3255 /// Returns all the symbols that contain the given position.
3256 ///
3257 /// This method allows passing an optional [`SyntaxTheme`] to
3258 /// syntax-highlight the returned symbols.
3259 pub fn symbols_containing<T: ToOffset>(
3260 &self,
3261 position: T,
3262 theme: Option<&SyntaxTheme>,
3263 ) -> Option<Vec<OutlineItem<Anchor>>> {
3264 let position = position.to_offset(self);
3265 let mut items = self.outline_items_containing(
3266 position.saturating_sub(1)..self.len().min(position + 1),
3267 false,
3268 theme,
3269 )?;
3270 let mut prev_depth = None;
3271 items.retain(|item| {
3272 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3273 prev_depth = Some(item.depth);
3274 result
3275 });
3276 Some(items)
3277 }
3278
3279 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3280 let range = range.to_offset(self);
3281 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3282 grammar.outline_config.as_ref().map(|c| &c.query)
3283 });
3284 let configs = matches
3285 .grammars()
3286 .iter()
3287 .map(|g| g.outline_config.as_ref().unwrap())
3288 .collect::<Vec<_>>();
3289
3290 while let Some(mat) = matches.peek() {
3291 let config = &configs[mat.grammar_index];
3292 let containing_item_node = maybe!({
3293 let item_node = mat.captures.iter().find_map(|cap| {
3294 if cap.index == config.item_capture_ix {
3295 Some(cap.node)
3296 } else {
3297 None
3298 }
3299 })?;
3300
3301 let item_byte_range = item_node.byte_range();
3302 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3303 None
3304 } else {
3305 Some(item_node)
3306 }
3307 });
3308
3309 if let Some(item_node) = containing_item_node {
3310 return Some(
3311 Point::from_ts_point(item_node.start_position())
3312 ..Point::from_ts_point(item_node.end_position()),
3313 );
3314 }
3315
3316 matches.advance();
3317 }
3318 None
3319 }
3320
3321 pub fn outline_items_containing<T: ToOffset>(
3322 &self,
3323 range: Range<T>,
3324 include_extra_context: bool,
3325 theme: Option<&SyntaxTheme>,
3326 ) -> Option<Vec<OutlineItem<Anchor>>> {
3327 let range = range.to_offset(self);
3328 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3329 grammar.outline_config.as_ref().map(|c| &c.query)
3330 });
3331 let configs = matches
3332 .grammars()
3333 .iter()
3334 .map(|g| g.outline_config.as_ref().unwrap())
3335 .collect::<Vec<_>>();
3336
3337 let mut items = Vec::new();
3338 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3339 while let Some(mat) = matches.peek() {
3340 let config = &configs[mat.grammar_index];
3341 if let Some(item) =
3342 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3343 {
3344 items.push(item);
3345 } else if let Some(capture) = mat
3346 .captures
3347 .iter()
3348 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3349 {
3350 let capture_range = capture.node.start_position()..capture.node.end_position();
3351 let mut capture_row_range =
3352 capture_range.start.row as u32..capture_range.end.row as u32;
3353 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3354 {
3355 capture_row_range.end -= 1;
3356 }
3357 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3358 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3359 last_row_range.end = capture_row_range.end;
3360 } else {
3361 annotation_row_ranges.push(capture_row_range);
3362 }
3363 } else {
3364 annotation_row_ranges.push(capture_row_range);
3365 }
3366 }
3367 matches.advance();
3368 }
3369
3370 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3371
3372 // Assign depths based on containment relationships and convert to anchors.
3373 let mut item_ends_stack = Vec::<Point>::new();
3374 let mut anchor_items = Vec::new();
3375 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3376 for item in items {
3377 while let Some(last_end) = item_ends_stack.last().copied() {
3378 if last_end < item.range.end {
3379 item_ends_stack.pop();
3380 } else {
3381 break;
3382 }
3383 }
3384
3385 let mut annotation_row_range = None;
3386 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3387 let row_preceding_item = item.range.start.row.saturating_sub(1);
3388 if next_annotation_row_range.end < row_preceding_item {
3389 annotation_row_ranges.next();
3390 } else {
3391 if next_annotation_row_range.end == row_preceding_item {
3392 annotation_row_range = Some(next_annotation_row_range.clone());
3393 annotation_row_ranges.next();
3394 }
3395 break;
3396 }
3397 }
3398
3399 anchor_items.push(OutlineItem {
3400 depth: item_ends_stack.len(),
3401 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3402 text: item.text,
3403 highlight_ranges: item.highlight_ranges,
3404 name_ranges: item.name_ranges,
3405 body_range: item.body_range.map(|body_range| {
3406 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3407 }),
3408 annotation_range: annotation_row_range.map(|annotation_range| {
3409 self.anchor_after(Point::new(annotation_range.start, 0))
3410 ..self.anchor_before(Point::new(
3411 annotation_range.end,
3412 self.line_len(annotation_range.end),
3413 ))
3414 }),
3415 });
3416 item_ends_stack.push(item.range.end);
3417 }
3418
3419 Some(anchor_items)
3420 }
3421
3422 fn next_outline_item(
3423 &self,
3424 config: &OutlineConfig,
3425 mat: &SyntaxMapMatch,
3426 range: &Range<usize>,
3427 include_extra_context: bool,
3428 theme: Option<&SyntaxTheme>,
3429 ) -> Option<OutlineItem<Point>> {
3430 let item_node = mat.captures.iter().find_map(|cap| {
3431 if cap.index == config.item_capture_ix {
3432 Some(cap.node)
3433 } else {
3434 None
3435 }
3436 })?;
3437
3438 let item_byte_range = item_node.byte_range();
3439 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3440 return None;
3441 }
3442 let item_point_range = Point::from_ts_point(item_node.start_position())
3443 ..Point::from_ts_point(item_node.end_position());
3444
3445 let mut open_point = None;
3446 let mut close_point = None;
3447 let mut buffer_ranges = Vec::new();
3448 for capture in mat.captures {
3449 let node_is_name;
3450 if capture.index == config.name_capture_ix {
3451 node_is_name = true;
3452 } else if Some(capture.index) == config.context_capture_ix
3453 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3454 {
3455 node_is_name = false;
3456 } else {
3457 if Some(capture.index) == config.open_capture_ix {
3458 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3459 } else if Some(capture.index) == config.close_capture_ix {
3460 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3461 }
3462
3463 continue;
3464 }
3465
3466 let mut range = capture.node.start_byte()..capture.node.end_byte();
3467 let start = capture.node.start_position();
3468 if capture.node.end_position().row > start.row {
3469 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3470 }
3471
3472 if !range.is_empty() {
3473 buffer_ranges.push((range, node_is_name));
3474 }
3475 }
3476 if buffer_ranges.is_empty() {
3477 return None;
3478 }
3479 let mut text = String::new();
3480 let mut highlight_ranges = Vec::new();
3481 let mut name_ranges = Vec::new();
3482 let mut chunks = self.chunks(
3483 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3484 true,
3485 );
3486 let mut last_buffer_range_end = 0;
3487 for (buffer_range, is_name) in buffer_ranges {
3488 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
3489 text.push(' ');
3490 }
3491 last_buffer_range_end = buffer_range.end;
3492 if is_name {
3493 let mut start = text.len();
3494 let end = start + buffer_range.len();
3495
3496 // When multiple names are captured, then the matchable text
3497 // includes the whitespace in between the names.
3498 if !name_ranges.is_empty() {
3499 start -= 1;
3500 }
3501
3502 name_ranges.push(start..end);
3503 }
3504
3505 let mut offset = buffer_range.start;
3506 chunks.seek(buffer_range.clone());
3507 for mut chunk in chunks.by_ref() {
3508 if chunk.text.len() > buffer_range.end - offset {
3509 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3510 offset = buffer_range.end;
3511 } else {
3512 offset += chunk.text.len();
3513 }
3514 let style = chunk
3515 .syntax_highlight_id
3516 .zip(theme)
3517 .and_then(|(highlight, theme)| highlight.style(theme));
3518 if let Some(style) = style {
3519 let start = text.len();
3520 let end = start + chunk.text.len();
3521 highlight_ranges.push((start..end, style));
3522 }
3523 text.push_str(chunk.text);
3524 if offset >= buffer_range.end {
3525 break;
3526 }
3527 }
3528 }
3529
3530 Some(OutlineItem {
3531 depth: 0, // We'll calculate the depth later
3532 range: item_point_range,
3533 text,
3534 highlight_ranges,
3535 name_ranges,
3536 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3537 annotation_range: None,
3538 })
3539 }
3540
3541 pub fn function_body_fold_ranges<T: ToOffset>(
3542 &self,
3543 within: Range<T>,
3544 ) -> impl Iterator<Item = Range<usize>> + '_ {
3545 self.text_object_ranges(within, TreeSitterOptions::default())
3546 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3547 }
3548
3549 /// For each grammar in the language, runs the provided
3550 /// [`tree_sitter::Query`] against the given range.
3551 pub fn matches(
3552 &self,
3553 range: Range<usize>,
3554 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3555 ) -> SyntaxMapMatches {
3556 self.syntax.matches(range, self, query)
3557 }
3558
3559 /// Returns bracket range pairs overlapping or adjacent to `range`
3560 pub fn bracket_ranges<T: ToOffset>(
3561 &self,
3562 range: Range<T>,
3563 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3564 // Find bracket pairs that *inclusively* contain the given range.
3565 let range = range.start.to_offset(self).saturating_sub(1)
3566 ..self.len().min(range.end.to_offset(self) + 1);
3567
3568 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3569 grammar.brackets_config.as_ref().map(|c| &c.query)
3570 });
3571 let configs = matches
3572 .grammars()
3573 .iter()
3574 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3575 .collect::<Vec<_>>();
3576
3577 iter::from_fn(move || {
3578 while let Some(mat) = matches.peek() {
3579 let mut open = None;
3580 let mut close = None;
3581 let config = &configs[mat.grammar_index];
3582 for capture in mat.captures {
3583 if capture.index == config.open_capture_ix {
3584 open = Some(capture.node.byte_range());
3585 } else if capture.index == config.close_capture_ix {
3586 close = Some(capture.node.byte_range());
3587 }
3588 }
3589
3590 matches.advance();
3591
3592 let Some((open, close)) = open.zip(close) else {
3593 continue;
3594 };
3595
3596 let bracket_range = open.start..=close.end;
3597 if !bracket_range.overlaps(&range) {
3598 continue;
3599 }
3600
3601 return Some((open, close));
3602 }
3603 None
3604 })
3605 }
3606
3607 pub fn text_object_ranges<T: ToOffset>(
3608 &self,
3609 range: Range<T>,
3610 options: TreeSitterOptions,
3611 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3612 let range = range.start.to_offset(self).saturating_sub(1)
3613 ..self.len().min(range.end.to_offset(self) + 1);
3614
3615 let mut matches =
3616 self.syntax
3617 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3618 grammar.text_object_config.as_ref().map(|c| &c.query)
3619 });
3620
3621 let configs = matches
3622 .grammars()
3623 .iter()
3624 .map(|grammar| grammar.text_object_config.as_ref())
3625 .collect::<Vec<_>>();
3626
3627 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3628
3629 iter::from_fn(move || loop {
3630 while let Some(capture) = captures.pop() {
3631 if capture.0.overlaps(&range) {
3632 return Some(capture);
3633 }
3634 }
3635
3636 let mat = matches.peek()?;
3637
3638 let Some(config) = configs[mat.grammar_index].as_ref() else {
3639 matches.advance();
3640 continue;
3641 };
3642
3643 for capture in mat.captures {
3644 let Some(ix) = config
3645 .text_objects_by_capture_ix
3646 .binary_search_by_key(&capture.index, |e| e.0)
3647 .ok()
3648 else {
3649 continue;
3650 };
3651 let text_object = config.text_objects_by_capture_ix[ix].1;
3652 let byte_range = capture.node.byte_range();
3653
3654 let mut found = false;
3655 for (range, existing) in captures.iter_mut() {
3656 if existing == &text_object {
3657 range.start = range.start.min(byte_range.start);
3658 range.end = range.end.max(byte_range.end);
3659 found = true;
3660 break;
3661 }
3662 }
3663
3664 if !found {
3665 captures.push((byte_range, text_object));
3666 }
3667 }
3668
3669 matches.advance();
3670 })
3671 }
3672
3673 /// Returns enclosing bracket ranges containing the given range
3674 pub fn enclosing_bracket_ranges<T: ToOffset>(
3675 &self,
3676 range: Range<T>,
3677 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3678 let range = range.start.to_offset(self)..range.end.to_offset(self);
3679
3680 self.bracket_ranges(range.clone())
3681 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
3682 }
3683
3684 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3685 ///
3686 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3687 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3688 &self,
3689 range: Range<T>,
3690 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3691 ) -> Option<(Range<usize>, Range<usize>)> {
3692 let range = range.start.to_offset(self)..range.end.to_offset(self);
3693
3694 // Get the ranges of the innermost pair of brackets.
3695 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3696
3697 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
3698 if let Some(range_filter) = range_filter {
3699 if !range_filter(open.clone(), close.clone()) {
3700 continue;
3701 }
3702 }
3703
3704 let len = close.end - open.start;
3705
3706 if let Some((existing_open, existing_close)) = &result {
3707 let existing_len = existing_close.end - existing_open.start;
3708 if len > existing_len {
3709 continue;
3710 }
3711 }
3712
3713 result = Some((open, close));
3714 }
3715
3716 result
3717 }
3718
3719 /// Returns anchor ranges for any matches of the redaction query.
3720 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3721 /// will be run on the relevant section of the buffer.
3722 pub fn redacted_ranges<T: ToOffset>(
3723 &self,
3724 range: Range<T>,
3725 ) -> impl Iterator<Item = Range<usize>> + '_ {
3726 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3727 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3728 grammar
3729 .redactions_config
3730 .as_ref()
3731 .map(|config| &config.query)
3732 });
3733
3734 let configs = syntax_matches
3735 .grammars()
3736 .iter()
3737 .map(|grammar| grammar.redactions_config.as_ref())
3738 .collect::<Vec<_>>();
3739
3740 iter::from_fn(move || {
3741 let redacted_range = syntax_matches
3742 .peek()
3743 .and_then(|mat| {
3744 configs[mat.grammar_index].and_then(|config| {
3745 mat.captures
3746 .iter()
3747 .find(|capture| capture.index == config.redaction_capture_ix)
3748 })
3749 })
3750 .map(|mat| mat.node.byte_range());
3751 syntax_matches.advance();
3752 redacted_range
3753 })
3754 }
3755
3756 pub fn injections_intersecting_range<T: ToOffset>(
3757 &self,
3758 range: Range<T>,
3759 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3760 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3761
3762 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3763 grammar
3764 .injection_config
3765 .as_ref()
3766 .map(|config| &config.query)
3767 });
3768
3769 let configs = syntax_matches
3770 .grammars()
3771 .iter()
3772 .map(|grammar| grammar.injection_config.as_ref())
3773 .collect::<Vec<_>>();
3774
3775 iter::from_fn(move || {
3776 let ranges = syntax_matches.peek().and_then(|mat| {
3777 let config = &configs[mat.grammar_index]?;
3778 let content_capture_range = mat.captures.iter().find_map(|capture| {
3779 if capture.index == config.content_capture_ix {
3780 Some(capture.node.byte_range())
3781 } else {
3782 None
3783 }
3784 })?;
3785 let language = self.language_at(content_capture_range.start)?;
3786 Some((content_capture_range, language))
3787 });
3788 syntax_matches.advance();
3789 ranges
3790 })
3791 }
3792
3793 pub fn runnable_ranges(
3794 &self,
3795 offset_range: Range<usize>,
3796 ) -> impl Iterator<Item = RunnableRange> + '_ {
3797 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3798 grammar.runnable_config.as_ref().map(|config| &config.query)
3799 });
3800
3801 let test_configs = syntax_matches
3802 .grammars()
3803 .iter()
3804 .map(|grammar| grammar.runnable_config.as_ref())
3805 .collect::<Vec<_>>();
3806
3807 iter::from_fn(move || loop {
3808 let mat = syntax_matches.peek()?;
3809
3810 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3811 let mut run_range = None;
3812 let full_range = mat.captures.iter().fold(
3813 Range {
3814 start: usize::MAX,
3815 end: 0,
3816 },
3817 |mut acc, next| {
3818 let byte_range = next.node.byte_range();
3819 if acc.start > byte_range.start {
3820 acc.start = byte_range.start;
3821 }
3822 if acc.end < byte_range.end {
3823 acc.end = byte_range.end;
3824 }
3825 acc
3826 },
3827 );
3828 if full_range.start > full_range.end {
3829 // We did not find a full spanning range of this match.
3830 return None;
3831 }
3832 let extra_captures: SmallVec<[_; 1]> =
3833 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3834 test_configs
3835 .extra_captures
3836 .get(capture.index as usize)
3837 .cloned()
3838 .and_then(|tag_name| match tag_name {
3839 RunnableCapture::Named(name) => {
3840 Some((capture.node.byte_range(), name))
3841 }
3842 RunnableCapture::Run => {
3843 let _ = run_range.insert(capture.node.byte_range());
3844 None
3845 }
3846 })
3847 }));
3848 let run_range = run_range?;
3849 let tags = test_configs
3850 .query
3851 .property_settings(mat.pattern_index)
3852 .iter()
3853 .filter_map(|property| {
3854 if *property.key == *"tag" {
3855 property
3856 .value
3857 .as_ref()
3858 .map(|value| RunnableTag(value.to_string().into()))
3859 } else {
3860 None
3861 }
3862 })
3863 .collect();
3864 let extra_captures = extra_captures
3865 .into_iter()
3866 .map(|(range, name)| {
3867 (
3868 name.to_string(),
3869 self.text_for_range(range.clone()).collect::<String>(),
3870 )
3871 })
3872 .collect();
3873 // All tags should have the same range.
3874 Some(RunnableRange {
3875 run_range,
3876 full_range,
3877 runnable: Runnable {
3878 tags,
3879 language: mat.language,
3880 buffer: self.remote_id(),
3881 },
3882 extra_captures,
3883 buffer_id: self.remote_id(),
3884 })
3885 });
3886
3887 syntax_matches.advance();
3888 if test_range.is_some() {
3889 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3890 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3891 return test_range;
3892 }
3893 })
3894 }
3895
3896 /// Returns selections for remote peers intersecting the given range.
3897 #[allow(clippy::type_complexity)]
3898 pub fn selections_in_range(
3899 &self,
3900 range: Range<Anchor>,
3901 include_local: bool,
3902 ) -> impl Iterator<
3903 Item = (
3904 ReplicaId,
3905 bool,
3906 CursorShape,
3907 impl Iterator<Item = &Selection<Anchor>> + '_,
3908 ),
3909 > + '_ {
3910 self.remote_selections
3911 .iter()
3912 .filter(move |(replica_id, set)| {
3913 (include_local || **replica_id != self.text.replica_id())
3914 && !set.selections.is_empty()
3915 })
3916 .map(move |(replica_id, set)| {
3917 let start_ix = match set.selections.binary_search_by(|probe| {
3918 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3919 }) {
3920 Ok(ix) | Err(ix) => ix,
3921 };
3922 let end_ix = match set.selections.binary_search_by(|probe| {
3923 probe.start.cmp(&range.end, self).then(Ordering::Less)
3924 }) {
3925 Ok(ix) | Err(ix) => ix,
3926 };
3927
3928 (
3929 *replica_id,
3930 set.line_mode,
3931 set.cursor_shape,
3932 set.selections[start_ix..end_ix].iter(),
3933 )
3934 })
3935 }
3936
3937 /// Returns if the buffer contains any diagnostics.
3938 pub fn has_diagnostics(&self) -> bool {
3939 !self.diagnostics.is_empty()
3940 }
3941
3942 /// Returns all the diagnostics intersecting the given range.
3943 pub fn diagnostics_in_range<'a, T, O>(
3944 &'a self,
3945 search_range: Range<T>,
3946 reversed: bool,
3947 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3948 where
3949 T: 'a + Clone + ToOffset,
3950 O: 'a + FromAnchor,
3951 {
3952 let mut iterators: Vec<_> = self
3953 .diagnostics
3954 .iter()
3955 .map(|(_, collection)| {
3956 collection
3957 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
3958 .peekable()
3959 })
3960 .collect();
3961
3962 std::iter::from_fn(move || {
3963 let (next_ix, _) = iterators
3964 .iter_mut()
3965 .enumerate()
3966 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3967 .min_by(|(_, a), (_, b)| {
3968 let cmp = a
3969 .range
3970 .start
3971 .cmp(&b.range.start, self)
3972 // when range is equal, sort by diagnostic severity
3973 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3974 // and stabilize order with group_id
3975 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3976 if reversed {
3977 cmp.reverse()
3978 } else {
3979 cmp
3980 }
3981 })?;
3982 iterators[next_ix]
3983 .next()
3984 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
3985 diagnostic,
3986 range: FromAnchor::from_anchor(&range.start, self)
3987 ..FromAnchor::from_anchor(&range.end, self),
3988 })
3989 })
3990 }
3991
3992 /// Returns all the diagnostic groups associated with the given
3993 /// language server ID. If no language server ID is provided,
3994 /// all diagnostics groups are returned.
3995 pub fn diagnostic_groups(
3996 &self,
3997 language_server_id: Option<LanguageServerId>,
3998 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3999 let mut groups = Vec::new();
4000
4001 if let Some(language_server_id) = language_server_id {
4002 if let Ok(ix) = self
4003 .diagnostics
4004 .binary_search_by_key(&language_server_id, |e| e.0)
4005 {
4006 self.diagnostics[ix]
4007 .1
4008 .groups(language_server_id, &mut groups, self);
4009 }
4010 } else {
4011 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4012 diagnostics.groups(*language_server_id, &mut groups, self);
4013 }
4014 }
4015
4016 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4017 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4018 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4019 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4020 });
4021
4022 groups
4023 }
4024
4025 /// Returns an iterator over the diagnostics for the given group.
4026 pub fn diagnostic_group<O>(
4027 &self,
4028 group_id: usize,
4029 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4030 where
4031 O: FromAnchor + 'static,
4032 {
4033 self.diagnostics
4034 .iter()
4035 .flat_map(move |(_, set)| set.group(group_id, self))
4036 }
4037
4038 /// An integer version number that accounts for all updates besides
4039 /// the buffer's text itself (which is versioned via a version vector).
4040 pub fn non_text_state_update_count(&self) -> usize {
4041 self.non_text_state_update_count
4042 }
4043
4044 /// Returns a snapshot of underlying file.
4045 pub fn file(&self) -> Option<&Arc<dyn File>> {
4046 self.file.as_ref()
4047 }
4048
4049 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4050 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4051 if let Some(file) = self.file() {
4052 if file.path().file_name().is_none() || include_root {
4053 Some(file.full_path(cx))
4054 } else {
4055 Some(file.path().to_path_buf())
4056 }
4057 } else {
4058 None
4059 }
4060 }
4061}
4062
4063fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4064 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4065}
4066
4067fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4068 let mut result = IndentSize::spaces(0);
4069 for c in text {
4070 let kind = match c {
4071 ' ' => IndentKind::Space,
4072 '\t' => IndentKind::Tab,
4073 _ => break,
4074 };
4075 if result.len == 0 {
4076 result.kind = kind;
4077 }
4078 result.len += 1;
4079 }
4080 result
4081}
4082
4083impl Clone for BufferSnapshot {
4084 fn clone(&self) -> Self {
4085 Self {
4086 text: self.text.clone(),
4087 syntax: self.syntax.clone(),
4088 file: self.file.clone(),
4089 remote_selections: self.remote_selections.clone(),
4090 diagnostics: self.diagnostics.clone(),
4091 language: self.language.clone(),
4092 non_text_state_update_count: self.non_text_state_update_count,
4093 }
4094 }
4095}
4096
4097impl Deref for BufferSnapshot {
4098 type Target = text::BufferSnapshot;
4099
4100 fn deref(&self) -> &Self::Target {
4101 &self.text
4102 }
4103}
4104
4105unsafe impl<'a> Send for BufferChunks<'a> {}
4106
4107impl<'a> BufferChunks<'a> {
4108 pub(crate) fn new(
4109 text: &'a Rope,
4110 range: Range<usize>,
4111 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4112 diagnostics: bool,
4113 buffer_snapshot: Option<&'a BufferSnapshot>,
4114 ) -> Self {
4115 let mut highlights = None;
4116 if let Some((captures, highlight_maps)) = syntax {
4117 highlights = Some(BufferChunkHighlights {
4118 captures,
4119 next_capture: None,
4120 stack: Default::default(),
4121 highlight_maps,
4122 })
4123 }
4124
4125 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4126 let chunks = text.chunks_in_range(range.clone());
4127
4128 let mut this = BufferChunks {
4129 range,
4130 buffer_snapshot,
4131 chunks,
4132 diagnostic_endpoints,
4133 error_depth: 0,
4134 warning_depth: 0,
4135 information_depth: 0,
4136 hint_depth: 0,
4137 unnecessary_depth: 0,
4138 highlights,
4139 };
4140 this.initialize_diagnostic_endpoints();
4141 this
4142 }
4143
4144 /// Seeks to the given byte offset in the buffer.
4145 pub fn seek(&mut self, range: Range<usize>) {
4146 let old_range = std::mem::replace(&mut self.range, range.clone());
4147 self.chunks.set_range(self.range.clone());
4148 if let Some(highlights) = self.highlights.as_mut() {
4149 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4150 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4151 highlights
4152 .stack
4153 .retain(|(end_offset, _)| *end_offset > range.start);
4154 if let Some(capture) = &highlights.next_capture {
4155 if range.start >= capture.node.start_byte() {
4156 let next_capture_end = capture.node.end_byte();
4157 if range.start < next_capture_end {
4158 highlights.stack.push((
4159 next_capture_end,
4160 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4161 ));
4162 }
4163 highlights.next_capture.take();
4164 }
4165 }
4166 } else if let Some(snapshot) = self.buffer_snapshot {
4167 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4168 *highlights = BufferChunkHighlights {
4169 captures,
4170 next_capture: None,
4171 stack: Default::default(),
4172 highlight_maps,
4173 };
4174 } else {
4175 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4176 // Seeking such BufferChunks is not supported.
4177 debug_assert!(false, "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot");
4178 }
4179
4180 highlights.captures.set_byte_range(self.range.clone());
4181 self.initialize_diagnostic_endpoints();
4182 }
4183 }
4184
4185 fn initialize_diagnostic_endpoints(&mut self) {
4186 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4187 if let Some(buffer) = self.buffer_snapshot {
4188 let mut diagnostic_endpoints = Vec::new();
4189 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4190 diagnostic_endpoints.push(DiagnosticEndpoint {
4191 offset: entry.range.start,
4192 is_start: true,
4193 severity: entry.diagnostic.severity,
4194 is_unnecessary: entry.diagnostic.is_unnecessary,
4195 });
4196 diagnostic_endpoints.push(DiagnosticEndpoint {
4197 offset: entry.range.end,
4198 is_start: false,
4199 severity: entry.diagnostic.severity,
4200 is_unnecessary: entry.diagnostic.is_unnecessary,
4201 });
4202 }
4203 diagnostic_endpoints
4204 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4205 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4206 self.hint_depth = 0;
4207 self.error_depth = 0;
4208 self.warning_depth = 0;
4209 self.information_depth = 0;
4210 }
4211 }
4212 }
4213
4214 /// The current byte offset in the buffer.
4215 pub fn offset(&self) -> usize {
4216 self.range.start
4217 }
4218
4219 pub fn range(&self) -> Range<usize> {
4220 self.range.clone()
4221 }
4222
4223 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4224 let depth = match endpoint.severity {
4225 DiagnosticSeverity::ERROR => &mut self.error_depth,
4226 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4227 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4228 DiagnosticSeverity::HINT => &mut self.hint_depth,
4229 _ => return,
4230 };
4231 if endpoint.is_start {
4232 *depth += 1;
4233 } else {
4234 *depth -= 1;
4235 }
4236
4237 if endpoint.is_unnecessary {
4238 if endpoint.is_start {
4239 self.unnecessary_depth += 1;
4240 } else {
4241 self.unnecessary_depth -= 1;
4242 }
4243 }
4244 }
4245
4246 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4247 if self.error_depth > 0 {
4248 Some(DiagnosticSeverity::ERROR)
4249 } else if self.warning_depth > 0 {
4250 Some(DiagnosticSeverity::WARNING)
4251 } else if self.information_depth > 0 {
4252 Some(DiagnosticSeverity::INFORMATION)
4253 } else if self.hint_depth > 0 {
4254 Some(DiagnosticSeverity::HINT)
4255 } else {
4256 None
4257 }
4258 }
4259
4260 fn current_code_is_unnecessary(&self) -> bool {
4261 self.unnecessary_depth > 0
4262 }
4263}
4264
4265impl<'a> Iterator for BufferChunks<'a> {
4266 type Item = Chunk<'a>;
4267
4268 fn next(&mut self) -> Option<Self::Item> {
4269 let mut next_capture_start = usize::MAX;
4270 let mut next_diagnostic_endpoint = usize::MAX;
4271
4272 if let Some(highlights) = self.highlights.as_mut() {
4273 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4274 if *parent_capture_end <= self.range.start {
4275 highlights.stack.pop();
4276 } else {
4277 break;
4278 }
4279 }
4280
4281 if highlights.next_capture.is_none() {
4282 highlights.next_capture = highlights.captures.next();
4283 }
4284
4285 while let Some(capture) = highlights.next_capture.as_ref() {
4286 if self.range.start < capture.node.start_byte() {
4287 next_capture_start = capture.node.start_byte();
4288 break;
4289 } else {
4290 let highlight_id =
4291 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4292 highlights
4293 .stack
4294 .push((capture.node.end_byte(), highlight_id));
4295 highlights.next_capture = highlights.captures.next();
4296 }
4297 }
4298 }
4299
4300 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4301 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4302 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4303 if endpoint.offset <= self.range.start {
4304 self.update_diagnostic_depths(endpoint);
4305 diagnostic_endpoints.next();
4306 } else {
4307 next_diagnostic_endpoint = endpoint.offset;
4308 break;
4309 }
4310 }
4311 }
4312 self.diagnostic_endpoints = diagnostic_endpoints;
4313
4314 if let Some(chunk) = self.chunks.peek() {
4315 let chunk_start = self.range.start;
4316 let mut chunk_end = (self.chunks.offset() + chunk.len())
4317 .min(next_capture_start)
4318 .min(next_diagnostic_endpoint);
4319 let mut highlight_id = None;
4320 if let Some(highlights) = self.highlights.as_ref() {
4321 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4322 chunk_end = chunk_end.min(*parent_capture_end);
4323 highlight_id = Some(*parent_highlight_id);
4324 }
4325 }
4326
4327 let slice =
4328 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4329 self.range.start = chunk_end;
4330 if self.range.start == self.chunks.offset() + chunk.len() {
4331 self.chunks.next().unwrap();
4332 }
4333
4334 Some(Chunk {
4335 text: slice,
4336 syntax_highlight_id: highlight_id,
4337 diagnostic_severity: self.current_diagnostic_severity(),
4338 is_unnecessary: self.current_code_is_unnecessary(),
4339 ..Default::default()
4340 })
4341 } else {
4342 None
4343 }
4344 }
4345}
4346
4347impl operation_queue::Operation for Operation {
4348 fn lamport_timestamp(&self) -> clock::Lamport {
4349 match self {
4350 Operation::Buffer(_) => {
4351 unreachable!("buffer operations should never be deferred at this layer")
4352 }
4353 Operation::UpdateDiagnostics {
4354 lamport_timestamp, ..
4355 }
4356 | Operation::UpdateSelections {
4357 lamport_timestamp, ..
4358 }
4359 | Operation::UpdateCompletionTriggers {
4360 lamport_timestamp, ..
4361 } => *lamport_timestamp,
4362 }
4363 }
4364}
4365
4366impl Default for Diagnostic {
4367 fn default() -> Self {
4368 Self {
4369 source: Default::default(),
4370 code: None,
4371 severity: DiagnosticSeverity::ERROR,
4372 message: Default::default(),
4373 group_id: 0,
4374 is_primary: false,
4375 is_disk_based: false,
4376 is_unnecessary: false,
4377 data: None,
4378 }
4379 }
4380}
4381
4382impl IndentSize {
4383 /// Returns an [`IndentSize`] representing the given spaces.
4384 pub fn spaces(len: u32) -> Self {
4385 Self {
4386 len,
4387 kind: IndentKind::Space,
4388 }
4389 }
4390
4391 /// Returns an [`IndentSize`] representing a tab.
4392 pub fn tab() -> Self {
4393 Self {
4394 len: 1,
4395 kind: IndentKind::Tab,
4396 }
4397 }
4398
4399 /// An iterator over the characters represented by this [`IndentSize`].
4400 pub fn chars(&self) -> impl Iterator<Item = char> {
4401 iter::repeat(self.char()).take(self.len as usize)
4402 }
4403
4404 /// The character representation of this [`IndentSize`].
4405 pub fn char(&self) -> char {
4406 match self.kind {
4407 IndentKind::Space => ' ',
4408 IndentKind::Tab => '\t',
4409 }
4410 }
4411
4412 /// Consumes the current [`IndentSize`] and returns a new one that has
4413 /// been shrunk or enlarged by the given size along the given direction.
4414 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4415 match direction {
4416 Ordering::Less => {
4417 if self.kind == size.kind && self.len >= size.len {
4418 self.len -= size.len;
4419 }
4420 }
4421 Ordering::Equal => {}
4422 Ordering::Greater => {
4423 if self.len == 0 {
4424 self = size;
4425 } else if self.kind == size.kind {
4426 self.len += size.len;
4427 }
4428 }
4429 }
4430 self
4431 }
4432
4433 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4434 match self.kind {
4435 IndentKind::Space => self.len as usize,
4436 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4437 }
4438 }
4439}
4440
4441#[cfg(any(test, feature = "test-support"))]
4442pub struct TestFile {
4443 pub path: Arc<Path>,
4444 pub root_name: String,
4445}
4446
4447#[cfg(any(test, feature = "test-support"))]
4448impl File for TestFile {
4449 fn path(&self) -> &Arc<Path> {
4450 &self.path
4451 }
4452
4453 fn full_path(&self, _: &gpui::App) -> PathBuf {
4454 PathBuf::from(&self.root_name).join(self.path.as_ref())
4455 }
4456
4457 fn as_local(&self) -> Option<&dyn LocalFile> {
4458 None
4459 }
4460
4461 fn disk_state(&self) -> DiskState {
4462 unimplemented!()
4463 }
4464
4465 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4466 self.path().file_name().unwrap_or(self.root_name.as_ref())
4467 }
4468
4469 fn worktree_id(&self, _: &App) -> WorktreeId {
4470 WorktreeId::from_usize(0)
4471 }
4472
4473 fn as_any(&self) -> &dyn std::any::Any {
4474 unimplemented!()
4475 }
4476
4477 fn to_proto(&self, _: &App) -> rpc::proto::File {
4478 unimplemented!()
4479 }
4480
4481 fn is_private(&self) -> bool {
4482 false
4483 }
4484}
4485
4486pub(crate) fn contiguous_ranges(
4487 values: impl Iterator<Item = u32>,
4488 max_len: usize,
4489) -> impl Iterator<Item = Range<u32>> {
4490 let mut values = values;
4491 let mut current_range: Option<Range<u32>> = None;
4492 std::iter::from_fn(move || loop {
4493 if let Some(value) = values.next() {
4494 if let Some(range) = &mut current_range {
4495 if value == range.end && range.len() < max_len {
4496 range.end += 1;
4497 continue;
4498 }
4499 }
4500
4501 let prev_range = current_range.clone();
4502 current_range = Some(value..(value + 1));
4503 if prev_range.is_some() {
4504 return prev_range;
4505 }
4506 } else {
4507 return current_range.take();
4508 }
4509 })
4510}
4511
4512#[derive(Default, Debug)]
4513pub struct CharClassifier {
4514 scope: Option<LanguageScope>,
4515 for_completion: bool,
4516 ignore_punctuation: bool,
4517}
4518
4519impl CharClassifier {
4520 pub fn new(scope: Option<LanguageScope>) -> Self {
4521 Self {
4522 scope,
4523 for_completion: false,
4524 ignore_punctuation: false,
4525 }
4526 }
4527
4528 pub fn for_completion(self, for_completion: bool) -> Self {
4529 Self {
4530 for_completion,
4531 ..self
4532 }
4533 }
4534
4535 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4536 Self {
4537 ignore_punctuation,
4538 ..self
4539 }
4540 }
4541
4542 pub fn is_whitespace(&self, c: char) -> bool {
4543 self.kind(c) == CharKind::Whitespace
4544 }
4545
4546 pub fn is_word(&self, c: char) -> bool {
4547 self.kind(c) == CharKind::Word
4548 }
4549
4550 pub fn is_punctuation(&self, c: char) -> bool {
4551 self.kind(c) == CharKind::Punctuation
4552 }
4553
4554 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4555 if c.is_whitespace() {
4556 return CharKind::Whitespace;
4557 } else if c.is_alphanumeric() || c == '_' {
4558 return CharKind::Word;
4559 }
4560
4561 if let Some(scope) = &self.scope {
4562 if let Some(characters) = scope.word_characters() {
4563 if characters.contains(&c) {
4564 if c == '-' && !self.for_completion && !ignore_punctuation {
4565 return CharKind::Punctuation;
4566 }
4567 return CharKind::Word;
4568 }
4569 }
4570 }
4571
4572 if ignore_punctuation {
4573 CharKind::Word
4574 } else {
4575 CharKind::Punctuation
4576 }
4577 }
4578
4579 pub fn kind(&self, c: char) -> CharKind {
4580 self.kind_with(c, self.ignore_punctuation)
4581 }
4582}
4583
4584/// Find all of the ranges of whitespace that occur at the ends of lines
4585/// in the given rope.
4586///
4587/// This could also be done with a regex search, but this implementation
4588/// avoids copying text.
4589pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4590 let mut ranges = Vec::new();
4591
4592 let mut offset = 0;
4593 let mut prev_chunk_trailing_whitespace_range = 0..0;
4594 for chunk in rope.chunks() {
4595 let mut prev_line_trailing_whitespace_range = 0..0;
4596 for (i, line) in chunk.split('\n').enumerate() {
4597 let line_end_offset = offset + line.len();
4598 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4599 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4600
4601 if i == 0 && trimmed_line_len == 0 {
4602 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4603 }
4604 if !prev_line_trailing_whitespace_range.is_empty() {
4605 ranges.push(prev_line_trailing_whitespace_range);
4606 }
4607
4608 offset = line_end_offset + 1;
4609 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4610 }
4611
4612 offset -= 1;
4613 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4614 }
4615
4616 if !prev_chunk_trailing_whitespace_range.is_empty() {
4617 ranges.push(prev_chunk_trailing_whitespace_range);
4618 }
4619
4620 ranges
4621}