1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 proto, Grammar, Language, LanguageRegistry,
5};
6use crate::{
7 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
8 language_settings::{language_settings, LanguageSettings},
9 outline::OutlineItem,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
17 TreeSitterOptions,
18};
19use anyhow::{anyhow, Context as _, Result};
20use async_watch as watch;
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 AnyElement, App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, Pixels,
28 SharedString, StyledText, Task, TaskLabel, TextStyle, Window,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 fmt,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, DerefMut, Range},
51 path::{Path, PathBuf},
52 str,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
63 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
64 Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{debug_panic, maybe, RangeExt};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76/// A label for the background task spawned by the buffer to compute
77/// a diff against the contents of its file.
78pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
79
80/// Indicate whether a [`Buffer`] has permissions to edit.
81#[derive(PartialEq, Clone, Copy, Debug)]
82pub enum Capability {
83 /// The buffer is a mutable replica.
84 ReadWrite,
85 /// The buffer is a read-only replica.
86 ReadOnly,
87}
88
89pub type BufferRow = u32;
90
91/// An in-memory representation of a source code file, including its text,
92/// syntax trees, git status, and diagnostics.
93pub struct Buffer {
94 text: TextBuffer,
95 branch_state: Option<BufferBranchState>,
96 /// Filesystem state, `None` when there is no path.
97 file: Option<Arc<dyn File>>,
98 /// The mtime of the file when this buffer was last loaded from
99 /// or saved to disk.
100 saved_mtime: Option<MTime>,
101 /// The version vector when this buffer was last loaded from
102 /// or saved to disk.
103 saved_version: clock::Global,
104 preview_version: clock::Global,
105 transaction_depth: usize,
106 was_dirty_before_starting_transaction: Option<bool>,
107 reload_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 autoindent_requests: Vec<Arc<AutoindentRequest>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 parsing_in_background: bool,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub(crate) syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
176#[serde(rename_all = "snake_case")]
177pub enum CursorShape {
178 /// A vertical bar
179 #[default]
180 Bar,
181 /// A block that surrounds the following character
182 Block,
183 /// An underline that runs along the following character
184 Underline,
185 /// A box drawn around the following character
186 Hollow,
187}
188
189#[derive(Clone, Debug)]
190struct SelectionSet {
191 line_mode: bool,
192 cursor_shape: CursorShape,
193 selections: Arc<[Selection<Anchor>]>,
194 lamport_timestamp: clock::Lamport,
195}
196
197/// A diagnostic associated with a certain range of a buffer.
198#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
199pub struct Diagnostic {
200 /// The name of the service that produced this diagnostic.
201 pub source: Option<String>,
202 /// A machine-readable code that identifies this diagnostic.
203 pub code: Option<NumberOrString>,
204 /// Whether this diagnostic is a hint, warning, or error.
205 pub severity: DiagnosticSeverity,
206 /// The human-readable message associated with this diagnostic.
207 pub message: String,
208 /// An id that identifies the group to which this diagnostic belongs.
209 ///
210 /// When a language server produces a diagnostic with
211 /// one or more associated diagnostics, those diagnostics are all
212 /// assigned a single group ID.
213 pub group_id: usize,
214 /// Whether this diagnostic is the primary diagnostic for its group.
215 ///
216 /// In a given group, the primary diagnostic is the top-level diagnostic
217 /// returned by the language server. The non-primary diagnostics are the
218 /// associated diagnostics.
219 pub is_primary: bool,
220 /// Whether this diagnostic is considered to originate from an analysis of
221 /// files on disk, as opposed to any unsaved buffer contents. This is a
222 /// property of a given diagnostic source, and is configured for a given
223 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
224 /// for the language server.
225 pub is_disk_based: bool,
226 /// Whether this diagnostic marks unnecessary code.
227 pub is_unnecessary: bool,
228 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
229 pub data: Option<Value>,
230}
231
232/// An operation used to synchronize this buffer with its other replicas.
233#[derive(Clone, Debug, PartialEq)]
234pub enum Operation {
235 /// A text operation.
236 Buffer(text::Operation),
237
238 /// An update to the buffer's diagnostics.
239 UpdateDiagnostics {
240 /// The id of the language server that produced the new diagnostics.
241 server_id: LanguageServerId,
242 /// The diagnostics.
243 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
244 /// The buffer's lamport timestamp.
245 lamport_timestamp: clock::Lamport,
246 },
247
248 /// An update to the most recent selections in this buffer.
249 UpdateSelections {
250 /// The selections.
251 selections: Arc<[Selection<Anchor>]>,
252 /// The buffer's lamport timestamp.
253 lamport_timestamp: clock::Lamport,
254 /// Whether the selections are in 'line mode'.
255 line_mode: bool,
256 /// The [`CursorShape`] associated with these selections.
257 cursor_shape: CursorShape,
258 },
259
260 /// An update to the characters that should trigger autocompletion
261 /// for this buffer.
262 UpdateCompletionTriggers {
263 /// The characters that trigger autocompletion.
264 triggers: Vec<String>,
265 /// The buffer's lamport timestamp.
266 lamport_timestamp: clock::Lamport,
267 /// The language server ID.
268 server_id: LanguageServerId,
269 },
270}
271
272/// An event that occurs in a buffer.
273#[derive(Clone, Debug, PartialEq)]
274pub enum BufferEvent {
275 /// The buffer was changed in a way that must be
276 /// propagated to its other replicas.
277 Operation {
278 operation: Operation,
279 is_local: bool,
280 },
281 /// The buffer was edited.
282 Edited,
283 /// The buffer's `dirty` bit changed.
284 DirtyChanged,
285 /// The buffer was saved.
286 Saved,
287 /// The buffer's file was changed on disk.
288 FileHandleChanged,
289 /// The buffer was reloaded.
290 Reloaded,
291 /// The buffer is in need of a reload
292 ReloadNeeded,
293 /// The buffer's language was changed.
294 LanguageChanged,
295 /// The buffer's syntax trees were updated.
296 Reparsed,
297 /// The buffer's diagnostics were updated.
298 DiagnosticsUpdated,
299 /// The buffer gained or lost editing capabilities.
300 CapabilityChanged,
301 /// The buffer was explicitly requested to close.
302 Closed,
303 /// The buffer was discarded when closing.
304 Discarded,
305}
306
307/// The file associated with a buffer.
308pub trait File: Send + Sync {
309 /// Returns the [`LocalFile`] associated with this file, if the
310 /// file is local.
311 fn as_local(&self) -> Option<&dyn LocalFile>;
312
313 /// Returns whether this file is local.
314 fn is_local(&self) -> bool {
315 self.as_local().is_some()
316 }
317
318 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
319 /// only available in some states, such as modification time.
320 fn disk_state(&self) -> DiskState;
321
322 /// Returns the path of this file relative to the worktree's root directory.
323 fn path(&self) -> &Arc<Path>;
324
325 /// Returns the path of this file relative to the worktree's parent directory (this means it
326 /// includes the name of the worktree's root folder).
327 fn full_path(&self, cx: &App) -> PathBuf;
328
329 /// Returns the last component of this handle's absolute path. If this handle refers to the root
330 /// of its worktree, then this method will return the name of the worktree itself.
331 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
332
333 /// Returns the id of the worktree to which this file belongs.
334 ///
335 /// This is needed for looking up project-specific settings.
336 fn worktree_id(&self, cx: &App) -> WorktreeId;
337
338 /// Converts this file into an [`Any`] trait object.
339 fn as_any(&self) -> &dyn Any;
340
341 /// Converts this file into a protobuf message.
342 fn to_proto(&self, cx: &App) -> rpc::proto::File;
343
344 /// Return whether Zed considers this to be a private file.
345 fn is_private(&self) -> bool;
346}
347
348/// The file's storage status - whether it's stored (`Present`), and if so when it was last
349/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
350/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
351/// indicator for new files.
352#[derive(Copy, Clone, Debug, PartialEq)]
353pub enum DiskState {
354 /// File created in Zed that has not been saved.
355 New,
356 /// File present on the filesystem.
357 Present { mtime: MTime },
358 /// Deleted file that was previously present.
359 Deleted,
360}
361
362impl DiskState {
363 /// Returns the file's last known modification time on disk.
364 pub fn mtime(self) -> Option<MTime> {
365 match self {
366 DiskState::New => None,
367 DiskState::Present { mtime } => Some(mtime),
368 DiskState::Deleted => None,
369 }
370 }
371
372 pub fn exists(&self) -> bool {
373 match self {
374 DiskState::New => false,
375 DiskState::Present { .. } => true,
376 DiskState::Deleted => false,
377 }
378 }
379}
380
381/// The file associated with a buffer, in the case where the file is on the local disk.
382pub trait LocalFile: File {
383 /// Returns the absolute path of this file
384 fn abs_path(&self, cx: &App) -> PathBuf;
385
386 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
387 fn load(&self, cx: &App) -> Task<Result<String>>;
388
389 /// Loads the file's contents from disk.
390 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
391}
392
393/// The auto-indent behavior associated with an editing operation.
394/// For some editing operations, each affected line of text has its
395/// indentation recomputed. For other operations, the entire block
396/// of edited text is adjusted uniformly.
397#[derive(Clone, Debug)]
398pub enum AutoindentMode {
399 /// Indent each line of inserted text.
400 EachLine,
401 /// Apply the same indentation adjustment to all of the lines
402 /// in a given insertion.
403 Block {
404 /// The original start column of each insertion, if it was
405 /// copied from elsewhere.
406 ///
407 /// Knowing this start column makes it possible to preserve the
408 /// relative indentation of every line in the insertion from
409 /// when it was copied.
410 ///
411 /// If the start column is `a`, and the first line of insertion
412 /// is then auto-indented to column `b`, then every other line of
413 /// the insertion will be auto-indented to column `b - a`
414 original_start_columns: Vec<u32>,
415 },
416}
417
418#[derive(Clone)]
419struct AutoindentRequest {
420 before_edit: BufferSnapshot,
421 entries: Vec<AutoindentRequestEntry>,
422 is_block_mode: bool,
423 ignore_empty_lines: bool,
424}
425
426#[derive(Debug, Clone)]
427struct AutoindentRequestEntry {
428 /// A range of the buffer whose indentation should be adjusted.
429 range: Range<Anchor>,
430 /// Whether or not these lines should be considered brand new, for the
431 /// purpose of auto-indent. When text is not new, its indentation will
432 /// only be adjusted if the suggested indentation level has *changed*
433 /// since the edit was made.
434 first_line_is_new: bool,
435 indent_size: IndentSize,
436 original_indent_column: Option<u32>,
437}
438
439#[derive(Debug)]
440struct IndentSuggestion {
441 basis_row: u32,
442 delta: Ordering,
443 within_error: bool,
444}
445
446struct BufferChunkHighlights<'a> {
447 captures: SyntaxMapCaptures<'a>,
448 next_capture: Option<SyntaxMapCapture<'a>>,
449 stack: Vec<(usize, HighlightId)>,
450 highlight_maps: Vec<HighlightMap>,
451}
452
453/// An iterator that yields chunks of a buffer's text, along with their
454/// syntax highlights and diagnostic status.
455pub struct BufferChunks<'a> {
456 buffer_snapshot: Option<&'a BufferSnapshot>,
457 range: Range<usize>,
458 chunks: text::Chunks<'a>,
459 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
460 error_depth: usize,
461 warning_depth: usize,
462 information_depth: usize,
463 hint_depth: usize,
464 unnecessary_depth: usize,
465 highlights: Option<BufferChunkHighlights<'a>>,
466}
467
468/// A chunk of a buffer's text, along with its syntax highlight and
469/// diagnostic status.
470#[derive(Clone, Debug, Default)]
471pub struct Chunk<'a> {
472 /// The text of the chunk.
473 pub text: &'a str,
474 /// The syntax highlighting style of the chunk.
475 pub syntax_highlight_id: Option<HighlightId>,
476 /// The highlight style that has been applied to this chunk in
477 /// the editor.
478 pub highlight_style: Option<HighlightStyle>,
479 /// The severity of diagnostic associated with this chunk, if any.
480 pub diagnostic_severity: Option<DiagnosticSeverity>,
481 /// Whether this chunk of text is marked as unnecessary.
482 pub is_unnecessary: bool,
483 /// Whether this chunk of text was originally a tab character.
484 pub is_tab: bool,
485 /// An optional recipe for how the chunk should be presented.
486 pub renderer: Option<ChunkRenderer>,
487}
488
489/// A recipe for how the chunk should be presented.
490#[derive(Clone)]
491pub struct ChunkRenderer {
492 /// creates a custom element to represent this chunk.
493 pub render: Arc<dyn Send + Sync + Fn(&mut ChunkRendererContext) -> AnyElement>,
494 /// If true, the element is constrained to the shaped width of the text.
495 pub constrain_width: bool,
496}
497
498pub struct ChunkRendererContext<'a, 'b> {
499 pub window: &'a mut Window,
500 pub context: &'b mut App,
501 pub max_width: Pixels,
502}
503
504impl fmt::Debug for ChunkRenderer {
505 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
506 f.debug_struct("ChunkRenderer")
507 .field("constrain_width", &self.constrain_width)
508 .finish()
509 }
510}
511
512impl Deref for ChunkRendererContext<'_, '_> {
513 type Target = App;
514
515 fn deref(&self) -> &Self::Target {
516 self.context
517 }
518}
519
520impl DerefMut for ChunkRendererContext<'_, '_> {
521 fn deref_mut(&mut self) -> &mut Self::Target {
522 self.context
523 }
524}
525
526/// A set of edits to a given version of a buffer, computed asynchronously.
527#[derive(Debug)]
528pub struct Diff {
529 pub(crate) base_version: clock::Global,
530 line_ending: LineEnding,
531 pub edits: Vec<(Range<usize>, Arc<str>)>,
532}
533
534#[derive(Clone, Copy)]
535pub(crate) struct DiagnosticEndpoint {
536 offset: usize,
537 is_start: bool,
538 severity: DiagnosticSeverity,
539 is_unnecessary: bool,
540}
541
542/// A class of characters, used for characterizing a run of text.
543#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
544pub enum CharKind {
545 /// Whitespace.
546 Whitespace,
547 /// Punctuation.
548 Punctuation,
549 /// Word.
550 Word,
551}
552
553/// A runnable is a set of data about a region that could be resolved into a task
554pub struct Runnable {
555 pub tags: SmallVec<[RunnableTag; 1]>,
556 pub language: Arc<Language>,
557 pub buffer: BufferId,
558}
559
560#[derive(Default, Clone, Debug)]
561pub struct HighlightedText {
562 pub text: SharedString,
563 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
564}
565
566#[derive(Default, Debug)]
567struct HighlightedTextBuilder {
568 pub text: String,
569 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
570}
571
572impl HighlightedText {
573 pub fn from_buffer_range<T: ToOffset>(
574 range: Range<T>,
575 snapshot: &text::BufferSnapshot,
576 syntax_snapshot: &SyntaxSnapshot,
577 override_style: Option<HighlightStyle>,
578 syntax_theme: &SyntaxTheme,
579 ) -> Self {
580 let mut highlighted_text = HighlightedTextBuilder::default();
581 highlighted_text.add_text_from_buffer_range(
582 range,
583 snapshot,
584 syntax_snapshot,
585 override_style,
586 syntax_theme,
587 );
588 highlighted_text.build()
589 }
590
591 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
592 gpui::StyledText::new(self.text.clone())
593 .with_default_highlights(default_style, self.highlights.iter().cloned())
594 }
595
596 /// Returns the first line without leading whitespace unless highlighted
597 /// and a boolean indicating if there are more lines after
598 pub fn first_line_preview(self) -> (Self, bool) {
599 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
600 let first_line = &self.text[..newline_ix];
601
602 // Trim leading whitespace, unless an edit starts prior to it.
603 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
604 if let Some((first_highlight_range, _)) = self.highlights.first() {
605 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
606 }
607
608 let preview_text = &first_line[preview_start_ix..];
609 let preview_highlights = self
610 .highlights
611 .into_iter()
612 .take_while(|(range, _)| range.start < newline_ix)
613 .filter_map(|(mut range, highlight)| {
614 range.start = range.start.saturating_sub(preview_start_ix);
615 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
616 if range.is_empty() {
617 None
618 } else {
619 Some((range, highlight))
620 }
621 });
622
623 let preview = Self {
624 text: SharedString::new(preview_text),
625 highlights: preview_highlights.collect(),
626 };
627
628 (preview, self.text.len() > newline_ix)
629 }
630}
631
632impl HighlightedTextBuilder {
633 pub fn build(self) -> HighlightedText {
634 HighlightedText {
635 text: self.text.into(),
636 highlights: self.highlights,
637 }
638 }
639
640 pub fn add_text_from_buffer_range<T: ToOffset>(
641 &mut self,
642 range: Range<T>,
643 snapshot: &text::BufferSnapshot,
644 syntax_snapshot: &SyntaxSnapshot,
645 override_style: Option<HighlightStyle>,
646 syntax_theme: &SyntaxTheme,
647 ) {
648 let range = range.to_offset(snapshot);
649 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
650 let start = self.text.len();
651 self.text.push_str(chunk.text);
652 let end = self.text.len();
653
654 if let Some(mut highlight_style) = chunk
655 .syntax_highlight_id
656 .and_then(|id| id.style(syntax_theme))
657 {
658 if let Some(override_style) = override_style {
659 highlight_style.highlight(override_style);
660 }
661 self.highlights.push((start..end, highlight_style));
662 } else if let Some(override_style) = override_style {
663 self.highlights.push((start..end, override_style));
664 }
665 }
666 }
667
668 fn highlighted_chunks<'a>(
669 range: Range<usize>,
670 snapshot: &'a text::BufferSnapshot,
671 syntax_snapshot: &'a SyntaxSnapshot,
672 ) -> BufferChunks<'a> {
673 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
674 grammar.highlights_query.as_ref()
675 });
676
677 let highlight_maps = captures
678 .grammars()
679 .iter()
680 .map(|grammar| grammar.highlight_map())
681 .collect();
682
683 BufferChunks::new(
684 snapshot.as_rope(),
685 range,
686 Some((captures, highlight_maps)),
687 false,
688 None,
689 )
690 }
691}
692
693#[derive(Clone)]
694pub struct EditPreview {
695 old_snapshot: text::BufferSnapshot,
696 applied_edits_snapshot: text::BufferSnapshot,
697 syntax_snapshot: SyntaxSnapshot,
698}
699
700impl EditPreview {
701 pub fn highlight_edits(
702 &self,
703 current_snapshot: &BufferSnapshot,
704 edits: &[(Range<Anchor>, String)],
705 include_deletions: bool,
706 cx: &App,
707 ) -> HighlightedText {
708 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
709 return HighlightedText::default();
710 };
711
712 let mut highlighted_text = HighlightedTextBuilder::default();
713
714 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
715
716 let insertion_highlight_style = HighlightStyle {
717 background_color: Some(cx.theme().status().created_background),
718 ..Default::default()
719 };
720 let deletion_highlight_style = HighlightStyle {
721 background_color: Some(cx.theme().status().deleted_background),
722 ..Default::default()
723 };
724 let syntax_theme = cx.theme().syntax();
725
726 for (range, edit_text) in edits {
727 let edit_new_end_in_preview_snapshot = range
728 .end
729 .bias_right(&self.old_snapshot)
730 .to_offset(&self.applied_edits_snapshot);
731 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
732
733 let unchanged_range_in_preview_snapshot =
734 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
735 if !unchanged_range_in_preview_snapshot.is_empty() {
736 highlighted_text.add_text_from_buffer_range(
737 unchanged_range_in_preview_snapshot,
738 &self.applied_edits_snapshot,
739 &self.syntax_snapshot,
740 None,
741 &syntax_theme,
742 );
743 }
744
745 let range_in_current_snapshot = range.to_offset(current_snapshot);
746 if include_deletions && !range_in_current_snapshot.is_empty() {
747 highlighted_text.add_text_from_buffer_range(
748 range_in_current_snapshot,
749 ¤t_snapshot.text,
750 ¤t_snapshot.syntax,
751 Some(deletion_highlight_style),
752 &syntax_theme,
753 );
754 }
755
756 if !edit_text.is_empty() {
757 highlighted_text.add_text_from_buffer_range(
758 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
759 &self.applied_edits_snapshot,
760 &self.syntax_snapshot,
761 Some(insertion_highlight_style),
762 &syntax_theme,
763 );
764 }
765
766 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
767 }
768
769 highlighted_text.add_text_from_buffer_range(
770 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
771 &self.applied_edits_snapshot,
772 &self.syntax_snapshot,
773 None,
774 &syntax_theme,
775 );
776
777 highlighted_text.build()
778 }
779
780 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
781 let (first, _) = edits.first()?;
782 let (last, _) = edits.last()?;
783
784 let start = first
785 .start
786 .bias_left(&self.old_snapshot)
787 .to_point(&self.applied_edits_snapshot);
788 let end = last
789 .end
790 .bias_right(&self.old_snapshot)
791 .to_point(&self.applied_edits_snapshot);
792
793 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
794 let range = Point::new(start.row, 0)
795 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
796
797 Some(range.to_offset(&self.applied_edits_snapshot))
798 }
799}
800
801#[derive(Clone, Debug, PartialEq, Eq)]
802pub struct BracketMatch {
803 pub open_range: Range<usize>,
804 pub close_range: Range<usize>,
805 pub newline_only: bool,
806}
807
808impl Buffer {
809 /// Create a new buffer with the given base text.
810 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
811 Self::build(
812 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
813 None,
814 Capability::ReadWrite,
815 )
816 }
817
818 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
819 pub fn local_normalized(
820 base_text_normalized: Rope,
821 line_ending: LineEnding,
822 cx: &Context<Self>,
823 ) -> Self {
824 Self::build(
825 TextBuffer::new_normalized(
826 0,
827 cx.entity_id().as_non_zero_u64().into(),
828 line_ending,
829 base_text_normalized,
830 ),
831 None,
832 Capability::ReadWrite,
833 )
834 }
835
836 /// Create a new buffer that is a replica of a remote buffer.
837 pub fn remote(
838 remote_id: BufferId,
839 replica_id: ReplicaId,
840 capability: Capability,
841 base_text: impl Into<String>,
842 ) -> Self {
843 Self::build(
844 TextBuffer::new(replica_id, remote_id, base_text.into()),
845 None,
846 capability,
847 )
848 }
849
850 /// Create a new buffer that is a replica of a remote buffer, populating its
851 /// state from the given protobuf message.
852 pub fn from_proto(
853 replica_id: ReplicaId,
854 capability: Capability,
855 message: proto::BufferState,
856 file: Option<Arc<dyn File>>,
857 ) -> Result<Self> {
858 let buffer_id = BufferId::new(message.id)
859 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
860 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
861 let mut this = Self::build(buffer, file, capability);
862 this.text.set_line_ending(proto::deserialize_line_ending(
863 rpc::proto::LineEnding::from_i32(message.line_ending)
864 .ok_or_else(|| anyhow!("missing line_ending"))?,
865 ));
866 this.saved_version = proto::deserialize_version(&message.saved_version);
867 this.saved_mtime = message.saved_mtime.map(|time| time.into());
868 Ok(this)
869 }
870
871 /// Serialize the buffer's state to a protobuf message.
872 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
873 proto::BufferState {
874 id: self.remote_id().into(),
875 file: self.file.as_ref().map(|f| f.to_proto(cx)),
876 base_text: self.base_text().to_string(),
877 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
878 saved_version: proto::serialize_version(&self.saved_version),
879 saved_mtime: self.saved_mtime.map(|time| time.into()),
880 }
881 }
882
883 /// Serialize as protobufs all of the changes to the buffer since the given version.
884 pub fn serialize_ops(
885 &self,
886 since: Option<clock::Global>,
887 cx: &App,
888 ) -> Task<Vec<proto::Operation>> {
889 let mut operations = Vec::new();
890 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
891
892 operations.extend(self.remote_selections.iter().map(|(_, set)| {
893 proto::serialize_operation(&Operation::UpdateSelections {
894 selections: set.selections.clone(),
895 lamport_timestamp: set.lamport_timestamp,
896 line_mode: set.line_mode,
897 cursor_shape: set.cursor_shape,
898 })
899 }));
900
901 for (server_id, diagnostics) in &self.diagnostics {
902 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
903 lamport_timestamp: self.diagnostics_timestamp,
904 server_id: *server_id,
905 diagnostics: diagnostics.iter().cloned().collect(),
906 }));
907 }
908
909 for (server_id, completions) in &self.completion_triggers_per_language_server {
910 operations.push(proto::serialize_operation(
911 &Operation::UpdateCompletionTriggers {
912 triggers: completions.iter().cloned().collect(),
913 lamport_timestamp: self.completion_triggers_timestamp,
914 server_id: *server_id,
915 },
916 ));
917 }
918
919 let text_operations = self.text.operations().clone();
920 cx.background_spawn(async move {
921 let since = since.unwrap_or_default();
922 operations.extend(
923 text_operations
924 .iter()
925 .filter(|(_, op)| !since.observed(op.timestamp()))
926 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
927 );
928 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
929 operations
930 })
931 }
932
933 /// Assign a language to the buffer, returning the buffer.
934 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
935 self.set_language(Some(language), cx);
936 self
937 }
938
939 /// Returns the [`Capability`] of this buffer.
940 pub fn capability(&self) -> Capability {
941 self.capability
942 }
943
944 /// Whether this buffer can only be read.
945 pub fn read_only(&self) -> bool {
946 self.capability == Capability::ReadOnly
947 }
948
949 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
950 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
951 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
952 let snapshot = buffer.snapshot();
953 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
954 Self {
955 saved_mtime,
956 saved_version: buffer.version(),
957 preview_version: buffer.version(),
958 reload_task: None,
959 transaction_depth: 0,
960 was_dirty_before_starting_transaction: None,
961 has_unsaved_edits: Cell::new((buffer.version(), false)),
962 text: buffer,
963 branch_state: None,
964 file,
965 capability,
966 syntax_map,
967 parsing_in_background: false,
968 non_text_state_update_count: 0,
969 sync_parse_timeout: Duration::from_millis(1),
970 parse_status: async_watch::channel(ParseStatus::Idle),
971 autoindent_requests: Default::default(),
972 pending_autoindent: Default::default(),
973 language: None,
974 remote_selections: Default::default(),
975 diagnostics: Default::default(),
976 diagnostics_timestamp: Default::default(),
977 completion_triggers: Default::default(),
978 completion_triggers_per_language_server: Default::default(),
979 completion_triggers_timestamp: Default::default(),
980 deferred_ops: OperationQueue::new(),
981 has_conflict: false,
982 _subscriptions: Vec::new(),
983 }
984 }
985
986 pub fn build_snapshot(
987 text: Rope,
988 language: Option<Arc<Language>>,
989 language_registry: Option<Arc<LanguageRegistry>>,
990 cx: &mut App,
991 ) -> impl Future<Output = BufferSnapshot> {
992 let entity_id = cx.reserve_entity::<Self>().entity_id();
993 let buffer_id = entity_id.as_non_zero_u64().into();
994 async move {
995 let text =
996 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
997 let mut syntax = SyntaxMap::new(&text).snapshot();
998 if let Some(language) = language.clone() {
999 let text = text.clone();
1000 let language = language.clone();
1001 let language_registry = language_registry.clone();
1002 syntax.reparse(&text, language_registry, language);
1003 }
1004 BufferSnapshot {
1005 text,
1006 syntax,
1007 file: None,
1008 diagnostics: Default::default(),
1009 remote_selections: Default::default(),
1010 language,
1011 non_text_state_update_count: 0,
1012 }
1013 }
1014 }
1015
1016 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1017 let entity_id = cx.reserve_entity::<Self>().entity_id();
1018 let buffer_id = entity_id.as_non_zero_u64().into();
1019 let text =
1020 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
1021 let syntax = SyntaxMap::new(&text).snapshot();
1022 BufferSnapshot {
1023 text,
1024 syntax,
1025 file: None,
1026 diagnostics: Default::default(),
1027 remote_selections: Default::default(),
1028 language: None,
1029 non_text_state_update_count: 0,
1030 }
1031 }
1032
1033 #[cfg(any(test, feature = "test-support"))]
1034 pub fn build_snapshot_sync(
1035 text: Rope,
1036 language: Option<Arc<Language>>,
1037 language_registry: Option<Arc<LanguageRegistry>>,
1038 cx: &mut App,
1039 ) -> BufferSnapshot {
1040 let entity_id = cx.reserve_entity::<Self>().entity_id();
1041 let buffer_id = entity_id.as_non_zero_u64().into();
1042 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1043 let mut syntax = SyntaxMap::new(&text).snapshot();
1044 if let Some(language) = language.clone() {
1045 let text = text.clone();
1046 let language = language.clone();
1047 let language_registry = language_registry.clone();
1048 syntax.reparse(&text, language_registry, language);
1049 }
1050 BufferSnapshot {
1051 text,
1052 syntax,
1053 file: None,
1054 diagnostics: Default::default(),
1055 remote_selections: Default::default(),
1056 language,
1057 non_text_state_update_count: 0,
1058 }
1059 }
1060
1061 /// Retrieve a snapshot of the buffer's current state. This is computationally
1062 /// cheap, and allows reading from the buffer on a background thread.
1063 pub fn snapshot(&self) -> BufferSnapshot {
1064 let text = self.text.snapshot();
1065 let mut syntax_map = self.syntax_map.lock();
1066 syntax_map.interpolate(&text);
1067 let syntax = syntax_map.snapshot();
1068
1069 BufferSnapshot {
1070 text,
1071 syntax,
1072 file: self.file.clone(),
1073 remote_selections: self.remote_selections.clone(),
1074 diagnostics: self.diagnostics.clone(),
1075 language: self.language.clone(),
1076 non_text_state_update_count: self.non_text_state_update_count,
1077 }
1078 }
1079
1080 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1081 let this = cx.entity();
1082 cx.new(|cx| {
1083 let mut branch = Self {
1084 branch_state: Some(BufferBranchState {
1085 base_buffer: this.clone(),
1086 merged_operations: Default::default(),
1087 }),
1088 language: self.language.clone(),
1089 has_conflict: self.has_conflict,
1090 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1091 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1092 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1093 };
1094 if let Some(language_registry) = self.language_registry() {
1095 branch.set_language_registry(language_registry);
1096 }
1097
1098 // Reparse the branch buffer so that we get syntax highlighting immediately.
1099 branch.reparse(cx);
1100
1101 branch
1102 })
1103 }
1104
1105 pub fn preview_edits(
1106 &self,
1107 edits: Arc<[(Range<Anchor>, String)]>,
1108 cx: &App,
1109 ) -> Task<EditPreview> {
1110 let registry = self.language_registry();
1111 let language = self.language().cloned();
1112 let old_snapshot = self.text.snapshot();
1113 let mut branch_buffer = self.text.branch();
1114 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1115 cx.background_spawn(async move {
1116 if !edits.is_empty() {
1117 if let Some(language) = language.clone() {
1118 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1119 }
1120
1121 branch_buffer.edit(edits.iter().cloned());
1122 let snapshot = branch_buffer.snapshot();
1123 syntax_snapshot.interpolate(&snapshot);
1124
1125 if let Some(language) = language {
1126 syntax_snapshot.reparse(&snapshot, registry, language);
1127 }
1128 }
1129 EditPreview {
1130 old_snapshot,
1131 applied_edits_snapshot: branch_buffer.snapshot(),
1132 syntax_snapshot,
1133 }
1134 })
1135 }
1136
1137 /// Applies all of the changes in this buffer that intersect any of the
1138 /// given `ranges` to its base buffer.
1139 ///
1140 /// If `ranges` is empty, then all changes will be applied. This buffer must
1141 /// be a branch buffer to call this method.
1142 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1143 let Some(base_buffer) = self.base_buffer() else {
1144 debug_panic!("not a branch buffer");
1145 return;
1146 };
1147
1148 let mut ranges = if ranges.is_empty() {
1149 &[0..usize::MAX]
1150 } else {
1151 ranges.as_slice()
1152 }
1153 .into_iter()
1154 .peekable();
1155
1156 let mut edits = Vec::new();
1157 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1158 let mut is_included = false;
1159 while let Some(range) = ranges.peek() {
1160 if range.end < edit.new.start {
1161 ranges.next().unwrap();
1162 } else {
1163 if range.start <= edit.new.end {
1164 is_included = true;
1165 }
1166 break;
1167 }
1168 }
1169
1170 if is_included {
1171 edits.push((
1172 edit.old.clone(),
1173 self.text_for_range(edit.new.clone()).collect::<String>(),
1174 ));
1175 }
1176 }
1177
1178 let operation = base_buffer.update(cx, |base_buffer, cx| {
1179 // cx.emit(BufferEvent::DiffBaseChanged);
1180 base_buffer.edit(edits, None, cx)
1181 });
1182
1183 if let Some(operation) = operation {
1184 if let Some(BufferBranchState {
1185 merged_operations, ..
1186 }) = &mut self.branch_state
1187 {
1188 merged_operations.push(operation);
1189 }
1190 }
1191 }
1192
1193 fn on_base_buffer_event(
1194 &mut self,
1195 _: Entity<Buffer>,
1196 event: &BufferEvent,
1197 cx: &mut Context<Self>,
1198 ) {
1199 let BufferEvent::Operation { operation, .. } = event else {
1200 return;
1201 };
1202 let Some(BufferBranchState {
1203 merged_operations, ..
1204 }) = &mut self.branch_state
1205 else {
1206 return;
1207 };
1208
1209 let mut operation_to_undo = None;
1210 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1211 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1212 merged_operations.remove(ix);
1213 operation_to_undo = Some(operation.timestamp);
1214 }
1215 }
1216
1217 self.apply_ops([operation.clone()], cx);
1218
1219 if let Some(timestamp) = operation_to_undo {
1220 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1221 self.undo_operations(counts, cx);
1222 }
1223 }
1224
1225 #[cfg(test)]
1226 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1227 &self.text
1228 }
1229
1230 /// Retrieve a snapshot of the buffer's raw text, without any
1231 /// language-related state like the syntax tree or diagnostics.
1232 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1233 self.text.snapshot()
1234 }
1235
1236 /// The file associated with the buffer, if any.
1237 pub fn file(&self) -> Option<&Arc<dyn File>> {
1238 self.file.as_ref()
1239 }
1240
1241 /// The version of the buffer that was last saved or reloaded from disk.
1242 pub fn saved_version(&self) -> &clock::Global {
1243 &self.saved_version
1244 }
1245
1246 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1247 pub fn saved_mtime(&self) -> Option<MTime> {
1248 self.saved_mtime
1249 }
1250
1251 /// Assign a language to the buffer.
1252 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1253 self.non_text_state_update_count += 1;
1254 self.syntax_map.lock().clear(&self.text);
1255 self.language = language;
1256 self.reparse(cx);
1257 cx.emit(BufferEvent::LanguageChanged);
1258 }
1259
1260 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1261 /// other languages if parts of the buffer are written in different languages.
1262 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1263 self.syntax_map
1264 .lock()
1265 .set_language_registry(language_registry);
1266 }
1267
1268 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1269 self.syntax_map.lock().language_registry()
1270 }
1271
1272 /// Assign the buffer a new [`Capability`].
1273 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1274 self.capability = capability;
1275 cx.emit(BufferEvent::CapabilityChanged)
1276 }
1277
1278 /// This method is called to signal that the buffer has been saved.
1279 pub fn did_save(
1280 &mut self,
1281 version: clock::Global,
1282 mtime: Option<MTime>,
1283 cx: &mut Context<Self>,
1284 ) {
1285 self.saved_version = version;
1286 self.has_unsaved_edits
1287 .set((self.saved_version().clone(), false));
1288 self.has_conflict = false;
1289 self.saved_mtime = mtime;
1290 cx.emit(BufferEvent::Saved);
1291 cx.notify();
1292 }
1293
1294 /// This method is called to signal that the buffer has been discarded.
1295 pub fn discarded(&self, cx: &mut Context<Self>) {
1296 cx.emit(BufferEvent::Discarded);
1297 cx.notify();
1298 }
1299
1300 /// Reloads the contents of the buffer from disk.
1301 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1302 let (tx, rx) = futures::channel::oneshot::channel();
1303 let prev_version = self.text.version();
1304 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
1305 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
1306 let file = this.file.as_ref()?.as_local()?;
1307 Some((file.disk_state().mtime(), file.load(cx)))
1308 })?
1309 else {
1310 return Ok(());
1311 };
1312
1313 let new_text = new_text.await?;
1314 let diff = this
1315 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
1316 .await;
1317 this.update(&mut cx, |this, cx| {
1318 if this.version() == diff.base_version {
1319 this.finalize_last_transaction();
1320 this.apply_diff(diff, cx);
1321 tx.send(this.finalize_last_transaction().cloned()).ok();
1322 this.has_conflict = false;
1323 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1324 } else {
1325 if !diff.edits.is_empty()
1326 || this
1327 .edits_since::<usize>(&diff.base_version)
1328 .next()
1329 .is_some()
1330 {
1331 this.has_conflict = true;
1332 }
1333
1334 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1335 }
1336
1337 this.reload_task.take();
1338 })
1339 }));
1340 rx
1341 }
1342
1343 /// This method is called to signal that the buffer has been reloaded.
1344 pub fn did_reload(
1345 &mut self,
1346 version: clock::Global,
1347 line_ending: LineEnding,
1348 mtime: Option<MTime>,
1349 cx: &mut Context<Self>,
1350 ) {
1351 self.saved_version = version;
1352 self.has_unsaved_edits
1353 .set((self.saved_version.clone(), false));
1354 self.text.set_line_ending(line_ending);
1355 self.saved_mtime = mtime;
1356 cx.emit(BufferEvent::Reloaded);
1357 cx.notify();
1358 }
1359
1360 /// Updates the [`File`] backing this buffer. This should be called when
1361 /// the file has changed or has been deleted.
1362 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1363 let was_dirty = self.is_dirty();
1364 let mut file_changed = false;
1365
1366 if let Some(old_file) = self.file.as_ref() {
1367 if new_file.path() != old_file.path() {
1368 file_changed = true;
1369 }
1370
1371 let old_state = old_file.disk_state();
1372 let new_state = new_file.disk_state();
1373 if old_state != new_state {
1374 file_changed = true;
1375 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1376 cx.emit(BufferEvent::ReloadNeeded)
1377 }
1378 }
1379 } else {
1380 file_changed = true;
1381 };
1382
1383 self.file = Some(new_file);
1384 if file_changed {
1385 self.non_text_state_update_count += 1;
1386 if was_dirty != self.is_dirty() {
1387 cx.emit(BufferEvent::DirtyChanged);
1388 }
1389 cx.emit(BufferEvent::FileHandleChanged);
1390 cx.notify();
1391 }
1392 }
1393
1394 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1395 Some(self.branch_state.as_ref()?.base_buffer.clone())
1396 }
1397
1398 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1399 pub fn language(&self) -> Option<&Arc<Language>> {
1400 self.language.as_ref()
1401 }
1402
1403 /// Returns the [`Language`] at the given location.
1404 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1405 let offset = position.to_offset(self);
1406 self.syntax_map
1407 .lock()
1408 .layers_for_range(offset..offset, &self.text, false)
1409 .last()
1410 .map(|info| info.language.clone())
1411 .or_else(|| self.language.clone())
1412 }
1413
1414 /// An integer version number that accounts for all updates besides
1415 /// the buffer's text itself (which is versioned via a version vector).
1416 pub fn non_text_state_update_count(&self) -> usize {
1417 self.non_text_state_update_count
1418 }
1419
1420 /// Whether the buffer is being parsed in the background.
1421 #[cfg(any(test, feature = "test-support"))]
1422 pub fn is_parsing(&self) -> bool {
1423 self.parsing_in_background
1424 }
1425
1426 /// Indicates whether the buffer contains any regions that may be
1427 /// written in a language that hasn't been loaded yet.
1428 pub fn contains_unknown_injections(&self) -> bool {
1429 self.syntax_map.lock().contains_unknown_injections()
1430 }
1431
1432 #[cfg(test)]
1433 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1434 self.sync_parse_timeout = timeout;
1435 }
1436
1437 /// Called after an edit to synchronize the buffer's main parse tree with
1438 /// the buffer's new underlying state.
1439 ///
1440 /// Locks the syntax map and interpolates the edits since the last reparse
1441 /// into the foreground syntax tree.
1442 ///
1443 /// Then takes a stable snapshot of the syntax map before unlocking it.
1444 /// The snapshot with the interpolated edits is sent to a background thread,
1445 /// where we ask Tree-sitter to perform an incremental parse.
1446 ///
1447 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1448 /// waiting on the parse to complete. As soon as it completes, we proceed
1449 /// synchronously, unless a 1ms timeout elapses.
1450 ///
1451 /// If we time out waiting on the parse, we spawn a second task waiting
1452 /// until the parse does complete and return with the interpolated tree still
1453 /// in the foreground. When the background parse completes, call back into
1454 /// the main thread and assign the foreground parse state.
1455 ///
1456 /// If the buffer or grammar changed since the start of the background parse,
1457 /// initiate an additional reparse recursively. To avoid concurrent parses
1458 /// for the same buffer, we only initiate a new parse if we are not already
1459 /// parsing in the background.
1460 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1461 if self.parsing_in_background {
1462 return;
1463 }
1464 let language = if let Some(language) = self.language.clone() {
1465 language
1466 } else {
1467 return;
1468 };
1469
1470 let text = self.text_snapshot();
1471 let parsed_version = self.version();
1472
1473 let mut syntax_map = self.syntax_map.lock();
1474 syntax_map.interpolate(&text);
1475 let language_registry = syntax_map.language_registry();
1476 let mut syntax_snapshot = syntax_map.snapshot();
1477 drop(syntax_map);
1478
1479 let parse_task = cx.background_spawn({
1480 let language = language.clone();
1481 let language_registry = language_registry.clone();
1482 async move {
1483 syntax_snapshot.reparse(&text, language_registry, language);
1484 syntax_snapshot
1485 }
1486 });
1487
1488 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1489 match cx
1490 .background_executor()
1491 .block_with_timeout(self.sync_parse_timeout, parse_task)
1492 {
1493 Ok(new_syntax_snapshot) => {
1494 self.did_finish_parsing(new_syntax_snapshot, cx);
1495 }
1496 Err(parse_task) => {
1497 self.parsing_in_background = true;
1498 cx.spawn(move |this, mut cx| async move {
1499 let new_syntax_map = parse_task.await;
1500 this.update(&mut cx, move |this, cx| {
1501 let grammar_changed =
1502 this.language.as_ref().map_or(true, |current_language| {
1503 !Arc::ptr_eq(&language, current_language)
1504 });
1505 let language_registry_changed = new_syntax_map
1506 .contains_unknown_injections()
1507 && language_registry.map_or(false, |registry| {
1508 registry.version() != new_syntax_map.language_registry_version()
1509 });
1510 let parse_again = language_registry_changed
1511 || grammar_changed
1512 || this.version.changed_since(&parsed_version);
1513 this.did_finish_parsing(new_syntax_map, cx);
1514 this.parsing_in_background = false;
1515 if parse_again {
1516 this.reparse(cx);
1517 }
1518 })
1519 .ok();
1520 })
1521 .detach();
1522 }
1523 }
1524 }
1525
1526 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1527 self.non_text_state_update_count += 1;
1528 self.syntax_map.lock().did_parse(syntax_snapshot);
1529 self.request_autoindent(cx);
1530 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1531 cx.emit(BufferEvent::Reparsed);
1532 cx.notify();
1533 }
1534
1535 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1536 self.parse_status.1.clone()
1537 }
1538
1539 /// Assign to the buffer a set of diagnostics created by a given language server.
1540 pub fn update_diagnostics(
1541 &mut self,
1542 server_id: LanguageServerId,
1543 diagnostics: DiagnosticSet,
1544 cx: &mut Context<Self>,
1545 ) {
1546 let lamport_timestamp = self.text.lamport_clock.tick();
1547 let op = Operation::UpdateDiagnostics {
1548 server_id,
1549 diagnostics: diagnostics.iter().cloned().collect(),
1550 lamport_timestamp,
1551 };
1552 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1553 self.send_operation(op, true, cx);
1554 }
1555
1556 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1557 if let Some(indent_sizes) = self.compute_autoindents() {
1558 let indent_sizes = cx.background_spawn(indent_sizes);
1559 match cx
1560 .background_executor()
1561 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1562 {
1563 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1564 Err(indent_sizes) => {
1565 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1566 let indent_sizes = indent_sizes.await;
1567 this.update(&mut cx, |this, cx| {
1568 this.apply_autoindents(indent_sizes, cx);
1569 })
1570 .ok();
1571 }));
1572 }
1573 }
1574 } else {
1575 self.autoindent_requests.clear();
1576 }
1577 }
1578
1579 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1580 let max_rows_between_yields = 100;
1581 let snapshot = self.snapshot();
1582 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1583 return None;
1584 }
1585
1586 let autoindent_requests = self.autoindent_requests.clone();
1587 Some(async move {
1588 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1589 for request in autoindent_requests {
1590 // Resolve each edited range to its row in the current buffer and in the
1591 // buffer before this batch of edits.
1592 let mut row_ranges = Vec::new();
1593 let mut old_to_new_rows = BTreeMap::new();
1594 let mut language_indent_sizes_by_new_row = Vec::new();
1595 for entry in &request.entries {
1596 let position = entry.range.start;
1597 let new_row = position.to_point(&snapshot).row;
1598 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1599 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1600
1601 if !entry.first_line_is_new {
1602 let old_row = position.to_point(&request.before_edit).row;
1603 old_to_new_rows.insert(old_row, new_row);
1604 }
1605 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1606 }
1607
1608 // Build a map containing the suggested indentation for each of the edited lines
1609 // with respect to the state of the buffer before these edits. This map is keyed
1610 // by the rows for these lines in the current state of the buffer.
1611 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1612 let old_edited_ranges =
1613 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1614 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1615 let mut language_indent_size = IndentSize::default();
1616 for old_edited_range in old_edited_ranges {
1617 let suggestions = request
1618 .before_edit
1619 .suggest_autoindents(old_edited_range.clone())
1620 .into_iter()
1621 .flatten();
1622 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1623 if let Some(suggestion) = suggestion {
1624 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1625
1626 // Find the indent size based on the language for this row.
1627 while let Some((row, size)) = language_indent_sizes.peek() {
1628 if *row > new_row {
1629 break;
1630 }
1631 language_indent_size = *size;
1632 language_indent_sizes.next();
1633 }
1634
1635 let suggested_indent = old_to_new_rows
1636 .get(&suggestion.basis_row)
1637 .and_then(|from_row| {
1638 Some(old_suggestions.get(from_row).copied()?.0)
1639 })
1640 .unwrap_or_else(|| {
1641 request
1642 .before_edit
1643 .indent_size_for_line(suggestion.basis_row)
1644 })
1645 .with_delta(suggestion.delta, language_indent_size);
1646 old_suggestions
1647 .insert(new_row, (suggested_indent, suggestion.within_error));
1648 }
1649 }
1650 yield_now().await;
1651 }
1652
1653 // Compute new suggestions for each line, but only include them in the result
1654 // if they differ from the old suggestion for that line.
1655 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1656 let mut language_indent_size = IndentSize::default();
1657 for (row_range, original_indent_column) in row_ranges {
1658 let new_edited_row_range = if request.is_block_mode {
1659 row_range.start..row_range.start + 1
1660 } else {
1661 row_range.clone()
1662 };
1663
1664 let suggestions = snapshot
1665 .suggest_autoindents(new_edited_row_range.clone())
1666 .into_iter()
1667 .flatten();
1668 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1669 if let Some(suggestion) = suggestion {
1670 // Find the indent size based on the language for this row.
1671 while let Some((row, size)) = language_indent_sizes.peek() {
1672 if *row > new_row {
1673 break;
1674 }
1675 language_indent_size = *size;
1676 language_indent_sizes.next();
1677 }
1678
1679 let suggested_indent = indent_sizes
1680 .get(&suggestion.basis_row)
1681 .copied()
1682 .map(|e| e.0)
1683 .unwrap_or_else(|| {
1684 snapshot.indent_size_for_line(suggestion.basis_row)
1685 })
1686 .with_delta(suggestion.delta, language_indent_size);
1687
1688 if old_suggestions.get(&new_row).map_or(
1689 true,
1690 |(old_indentation, was_within_error)| {
1691 suggested_indent != *old_indentation
1692 && (!suggestion.within_error || *was_within_error)
1693 },
1694 ) {
1695 indent_sizes.insert(
1696 new_row,
1697 (suggested_indent, request.ignore_empty_lines),
1698 );
1699 }
1700 }
1701 }
1702
1703 if let (true, Some(original_indent_column)) =
1704 (request.is_block_mode, original_indent_column)
1705 {
1706 let new_indent =
1707 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1708 *indent
1709 } else {
1710 snapshot.indent_size_for_line(row_range.start)
1711 };
1712 let delta = new_indent.len as i64 - original_indent_column as i64;
1713 if delta != 0 {
1714 for row in row_range.skip(1) {
1715 indent_sizes.entry(row).or_insert_with(|| {
1716 let mut size = snapshot.indent_size_for_line(row);
1717 if size.kind == new_indent.kind {
1718 match delta.cmp(&0) {
1719 Ordering::Greater => size.len += delta as u32,
1720 Ordering::Less => {
1721 size.len = size.len.saturating_sub(-delta as u32)
1722 }
1723 Ordering::Equal => {}
1724 }
1725 }
1726 (size, request.ignore_empty_lines)
1727 });
1728 }
1729 }
1730 }
1731
1732 yield_now().await;
1733 }
1734 }
1735
1736 indent_sizes
1737 .into_iter()
1738 .filter_map(|(row, (indent, ignore_empty_lines))| {
1739 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1740 None
1741 } else {
1742 Some((row, indent))
1743 }
1744 })
1745 .collect()
1746 })
1747 }
1748
1749 fn apply_autoindents(
1750 &mut self,
1751 indent_sizes: BTreeMap<u32, IndentSize>,
1752 cx: &mut Context<Self>,
1753 ) {
1754 self.autoindent_requests.clear();
1755
1756 let edits: Vec<_> = indent_sizes
1757 .into_iter()
1758 .filter_map(|(row, indent_size)| {
1759 let current_size = indent_size_for_line(self, row);
1760 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1761 })
1762 .collect();
1763
1764 let preserve_preview = self.preserve_preview();
1765 self.edit(edits, None, cx);
1766 if preserve_preview {
1767 self.refresh_preview();
1768 }
1769 }
1770
1771 /// Create a minimal edit that will cause the given row to be indented
1772 /// with the given size. After applying this edit, the length of the line
1773 /// will always be at least `new_size.len`.
1774 pub fn edit_for_indent_size_adjustment(
1775 row: u32,
1776 current_size: IndentSize,
1777 new_size: IndentSize,
1778 ) -> Option<(Range<Point>, String)> {
1779 if new_size.kind == current_size.kind {
1780 match new_size.len.cmp(¤t_size.len) {
1781 Ordering::Greater => {
1782 let point = Point::new(row, 0);
1783 Some((
1784 point..point,
1785 iter::repeat(new_size.char())
1786 .take((new_size.len - current_size.len) as usize)
1787 .collect::<String>(),
1788 ))
1789 }
1790
1791 Ordering::Less => Some((
1792 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1793 String::new(),
1794 )),
1795
1796 Ordering::Equal => None,
1797 }
1798 } else {
1799 Some((
1800 Point::new(row, 0)..Point::new(row, current_size.len),
1801 iter::repeat(new_size.char())
1802 .take(new_size.len as usize)
1803 .collect::<String>(),
1804 ))
1805 }
1806 }
1807
1808 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1809 /// and the given new text.
1810 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1811 let old_text = self.as_rope().clone();
1812 let base_version = self.version();
1813 cx.background_executor()
1814 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1815 let old_text = old_text.to_string();
1816 let line_ending = LineEnding::detect(&new_text);
1817 LineEnding::normalize(&mut new_text);
1818 let edits = text_diff(&old_text, &new_text);
1819 Diff {
1820 base_version,
1821 line_ending,
1822 edits,
1823 }
1824 })
1825 }
1826
1827 /// Spawns a background task that searches the buffer for any whitespace
1828 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1829 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1830 let old_text = self.as_rope().clone();
1831 let line_ending = self.line_ending();
1832 let base_version = self.version();
1833 cx.background_spawn(async move {
1834 let ranges = trailing_whitespace_ranges(&old_text);
1835 let empty = Arc::<str>::from("");
1836 Diff {
1837 base_version,
1838 line_ending,
1839 edits: ranges
1840 .into_iter()
1841 .map(|range| (range, empty.clone()))
1842 .collect(),
1843 }
1844 })
1845 }
1846
1847 /// Ensures that the buffer ends with a single newline character, and
1848 /// no other whitespace.
1849 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1850 let len = self.len();
1851 let mut offset = len;
1852 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1853 let non_whitespace_len = chunk
1854 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1855 .len();
1856 offset -= chunk.len();
1857 offset += non_whitespace_len;
1858 if non_whitespace_len != 0 {
1859 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1860 return;
1861 }
1862 break;
1863 }
1864 }
1865 self.edit([(offset..len, "\n")], None, cx);
1866 }
1867
1868 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1869 /// calculated, then adjust the diff to account for those changes, and discard any
1870 /// parts of the diff that conflict with those changes.
1871 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1872 // Check for any edits to the buffer that have occurred since this diff
1873 // was computed.
1874 let snapshot = self.snapshot();
1875 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1876 let mut delta = 0;
1877 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1878 while let Some(edit_since) = edits_since.peek() {
1879 // If the edit occurs after a diff hunk, then it does not
1880 // affect that hunk.
1881 if edit_since.old.start > range.end {
1882 break;
1883 }
1884 // If the edit precedes the diff hunk, then adjust the hunk
1885 // to reflect the edit.
1886 else if edit_since.old.end < range.start {
1887 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1888 edits_since.next();
1889 }
1890 // If the edit intersects a diff hunk, then discard that hunk.
1891 else {
1892 return None;
1893 }
1894 }
1895
1896 let start = (range.start as i64 + delta) as usize;
1897 let end = (range.end as i64 + delta) as usize;
1898 Some((start..end, new_text))
1899 });
1900
1901 self.start_transaction();
1902 self.text.set_line_ending(diff.line_ending);
1903 self.edit(adjusted_edits, None, cx);
1904 self.end_transaction(cx)
1905 }
1906
1907 fn has_unsaved_edits(&self) -> bool {
1908 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1909
1910 if last_version == self.version {
1911 self.has_unsaved_edits
1912 .set((last_version, has_unsaved_edits));
1913 return has_unsaved_edits;
1914 }
1915
1916 let has_edits = self.has_edits_since(&self.saved_version);
1917 self.has_unsaved_edits
1918 .set((self.version.clone(), has_edits));
1919 has_edits
1920 }
1921
1922 /// Checks if the buffer has unsaved changes.
1923 pub fn is_dirty(&self) -> bool {
1924 if self.capability == Capability::ReadOnly {
1925 return false;
1926 }
1927 if self.has_conflict || self.has_unsaved_edits() {
1928 return true;
1929 }
1930 match self.file.as_ref().map(|f| f.disk_state()) {
1931 Some(DiskState::New) => !self.is_empty(),
1932 Some(DiskState::Deleted) => true,
1933 _ => false,
1934 }
1935 }
1936
1937 /// Checks if the buffer and its file have both changed since the buffer
1938 /// was last saved or reloaded.
1939 pub fn has_conflict(&self) -> bool {
1940 if self.has_conflict {
1941 return true;
1942 }
1943 let Some(file) = self.file.as_ref() else {
1944 return false;
1945 };
1946 match file.disk_state() {
1947 DiskState::New => false,
1948 DiskState::Present { mtime } => match self.saved_mtime {
1949 Some(saved_mtime) => {
1950 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1951 }
1952 None => true,
1953 },
1954 DiskState::Deleted => true,
1955 }
1956 }
1957
1958 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1959 pub fn subscribe(&mut self) -> Subscription {
1960 self.text.subscribe()
1961 }
1962
1963 /// Starts a transaction, if one is not already in-progress. When undoing or
1964 /// redoing edits, all of the edits performed within a transaction are undone
1965 /// or redone together.
1966 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1967 self.start_transaction_at(Instant::now())
1968 }
1969
1970 /// Starts a transaction, providing the current time. Subsequent transactions
1971 /// that occur within a short period of time will be grouped together. This
1972 /// is controlled by the buffer's undo grouping duration.
1973 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1974 self.transaction_depth += 1;
1975 if self.was_dirty_before_starting_transaction.is_none() {
1976 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1977 }
1978 self.text.start_transaction_at(now)
1979 }
1980
1981 /// Terminates the current transaction, if this is the outermost transaction.
1982 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
1983 self.end_transaction_at(Instant::now(), cx)
1984 }
1985
1986 /// Terminates the current transaction, providing the current time. Subsequent transactions
1987 /// that occur within a short period of time will be grouped together. This
1988 /// is controlled by the buffer's undo grouping duration.
1989 pub fn end_transaction_at(
1990 &mut self,
1991 now: Instant,
1992 cx: &mut Context<Self>,
1993 ) -> Option<TransactionId> {
1994 assert!(self.transaction_depth > 0);
1995 self.transaction_depth -= 1;
1996 let was_dirty = if self.transaction_depth == 0 {
1997 self.was_dirty_before_starting_transaction.take().unwrap()
1998 } else {
1999 false
2000 };
2001 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2002 self.did_edit(&start_version, was_dirty, cx);
2003 Some(transaction_id)
2004 } else {
2005 None
2006 }
2007 }
2008
2009 /// Manually add a transaction to the buffer's undo history.
2010 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2011 self.text.push_transaction(transaction, now);
2012 }
2013
2014 /// Prevent the last transaction from being grouped with any subsequent transactions,
2015 /// even if they occur with the buffer's undo grouping duration.
2016 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2017 self.text.finalize_last_transaction()
2018 }
2019
2020 /// Manually group all changes since a given transaction.
2021 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2022 self.text.group_until_transaction(transaction_id);
2023 }
2024
2025 /// Manually remove a transaction from the buffer's undo history
2026 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
2027 self.text.forget_transaction(transaction_id);
2028 }
2029
2030 /// Manually merge two adjacent transactions in the buffer's undo history.
2031 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2032 self.text.merge_transactions(transaction, destination);
2033 }
2034
2035 /// Waits for the buffer to receive operations with the given timestamps.
2036 pub fn wait_for_edits(
2037 &mut self,
2038 edit_ids: impl IntoIterator<Item = clock::Lamport>,
2039 ) -> impl Future<Output = Result<()>> {
2040 self.text.wait_for_edits(edit_ids)
2041 }
2042
2043 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2044 pub fn wait_for_anchors(
2045 &mut self,
2046 anchors: impl IntoIterator<Item = Anchor>,
2047 ) -> impl 'static + Future<Output = Result<()>> {
2048 self.text.wait_for_anchors(anchors)
2049 }
2050
2051 /// Waits for the buffer to receive operations up to the given version.
2052 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
2053 self.text.wait_for_version(version)
2054 }
2055
2056 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2057 /// [`Buffer::wait_for_version`] to resolve with an error.
2058 pub fn give_up_waiting(&mut self) {
2059 self.text.give_up_waiting();
2060 }
2061
2062 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2063 pub fn set_active_selections(
2064 &mut self,
2065 selections: Arc<[Selection<Anchor>]>,
2066 line_mode: bool,
2067 cursor_shape: CursorShape,
2068 cx: &mut Context<Self>,
2069 ) {
2070 let lamport_timestamp = self.text.lamport_clock.tick();
2071 self.remote_selections.insert(
2072 self.text.replica_id(),
2073 SelectionSet {
2074 selections: selections.clone(),
2075 lamport_timestamp,
2076 line_mode,
2077 cursor_shape,
2078 },
2079 );
2080 self.send_operation(
2081 Operation::UpdateSelections {
2082 selections,
2083 line_mode,
2084 lamport_timestamp,
2085 cursor_shape,
2086 },
2087 true,
2088 cx,
2089 );
2090 self.non_text_state_update_count += 1;
2091 cx.notify();
2092 }
2093
2094 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2095 /// this replica.
2096 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2097 if self
2098 .remote_selections
2099 .get(&self.text.replica_id())
2100 .map_or(true, |set| !set.selections.is_empty())
2101 {
2102 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2103 }
2104 }
2105
2106 /// Replaces the buffer's entire text.
2107 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2108 where
2109 T: Into<Arc<str>>,
2110 {
2111 self.autoindent_requests.clear();
2112 self.edit([(0..self.len(), text)], None, cx)
2113 }
2114
2115 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2116 /// delete, and a string of text to insert at that location.
2117 ///
2118 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2119 /// request for the edited ranges, which will be processed when the buffer finishes
2120 /// parsing.
2121 ///
2122 /// Parsing takes place at the end of a transaction, and may compute synchronously
2123 /// or asynchronously, depending on the changes.
2124 pub fn edit<I, S, T>(
2125 &mut self,
2126 edits_iter: I,
2127 autoindent_mode: Option<AutoindentMode>,
2128 cx: &mut Context<Self>,
2129 ) -> Option<clock::Lamport>
2130 where
2131 I: IntoIterator<Item = (Range<S>, T)>,
2132 S: ToOffset,
2133 T: Into<Arc<str>>,
2134 {
2135 // Skip invalid edits and coalesce contiguous ones.
2136 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2137 for (range, new_text) in edits_iter {
2138 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2139 if range.start > range.end {
2140 mem::swap(&mut range.start, &mut range.end);
2141 }
2142 let new_text = new_text.into();
2143 if !new_text.is_empty() || !range.is_empty() {
2144 if let Some((prev_range, prev_text)) = edits.last_mut() {
2145 if prev_range.end >= range.start {
2146 prev_range.end = cmp::max(prev_range.end, range.end);
2147 *prev_text = format!("{prev_text}{new_text}").into();
2148 } else {
2149 edits.push((range, new_text));
2150 }
2151 } else {
2152 edits.push((range, new_text));
2153 }
2154 }
2155 }
2156 if edits.is_empty() {
2157 return None;
2158 }
2159
2160 self.start_transaction();
2161 self.pending_autoindent.take();
2162 let autoindent_request = autoindent_mode
2163 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2164
2165 let edit_operation = self.text.edit(edits.iter().cloned());
2166 let edit_id = edit_operation.timestamp();
2167
2168 if let Some((before_edit, mode)) = autoindent_request {
2169 let mut delta = 0isize;
2170 let entries = edits
2171 .into_iter()
2172 .enumerate()
2173 .zip(&edit_operation.as_edit().unwrap().new_text)
2174 .map(|((ix, (range, _)), new_text)| {
2175 let new_text_length = new_text.len();
2176 let old_start = range.start.to_point(&before_edit);
2177 let new_start = (delta + range.start as isize) as usize;
2178 let range_len = range.end - range.start;
2179 delta += new_text_length as isize - range_len as isize;
2180
2181 // Decide what range of the insertion to auto-indent, and whether
2182 // the first line of the insertion should be considered a newly-inserted line
2183 // or an edit to an existing line.
2184 let mut range_of_insertion_to_indent = 0..new_text_length;
2185 let mut first_line_is_new = true;
2186
2187 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2188 let old_line_end = before_edit.line_len(old_start.row);
2189
2190 if old_start.column > old_line_start {
2191 first_line_is_new = false;
2192 }
2193
2194 if !new_text.contains('\n')
2195 && (old_start.column + (range_len as u32) < old_line_end
2196 || old_line_end == old_line_start)
2197 {
2198 first_line_is_new = false;
2199 }
2200
2201 // When inserting text starting with a newline, avoid auto-indenting the
2202 // previous line.
2203 if new_text.starts_with('\n') {
2204 range_of_insertion_to_indent.start += 1;
2205 first_line_is_new = true;
2206 }
2207
2208 let mut original_indent_column = None;
2209 if let AutoindentMode::Block {
2210 original_start_columns,
2211 } = &mode
2212 {
2213 original_indent_column = Some(
2214 original_start_columns.get(ix).copied().unwrap_or(0)
2215 + indent_size_for_text(
2216 new_text[range_of_insertion_to_indent.clone()].chars(),
2217 )
2218 .len,
2219 );
2220
2221 // Avoid auto-indenting the line after the edit.
2222 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2223 range_of_insertion_to_indent.end -= 1;
2224 }
2225 }
2226
2227 AutoindentRequestEntry {
2228 first_line_is_new,
2229 original_indent_column,
2230 indent_size: before_edit.language_indent_size_at(range.start, cx),
2231 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2232 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2233 }
2234 })
2235 .collect();
2236
2237 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2238 before_edit,
2239 entries,
2240 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2241 ignore_empty_lines: false,
2242 }));
2243 }
2244
2245 self.end_transaction(cx);
2246 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2247 Some(edit_id)
2248 }
2249
2250 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2251 if self.edits_since::<usize>(old_version).next().is_none() {
2252 return;
2253 }
2254
2255 self.reparse(cx);
2256
2257 cx.emit(BufferEvent::Edited);
2258 if was_dirty != self.is_dirty() {
2259 cx.emit(BufferEvent::DirtyChanged);
2260 }
2261 cx.notify();
2262 }
2263
2264 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2265 where
2266 I: IntoIterator<Item = Range<T>>,
2267 T: ToOffset + Copy,
2268 {
2269 let before_edit = self.snapshot();
2270 let entries = ranges
2271 .into_iter()
2272 .map(|range| AutoindentRequestEntry {
2273 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2274 first_line_is_new: true,
2275 indent_size: before_edit.language_indent_size_at(range.start, cx),
2276 original_indent_column: None,
2277 })
2278 .collect();
2279 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2280 before_edit,
2281 entries,
2282 is_block_mode: false,
2283 ignore_empty_lines: true,
2284 }));
2285 self.request_autoindent(cx);
2286 }
2287
2288 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2289 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2290 pub fn insert_empty_line(
2291 &mut self,
2292 position: impl ToPoint,
2293 space_above: bool,
2294 space_below: bool,
2295 cx: &mut Context<Self>,
2296 ) -> Point {
2297 let mut position = position.to_point(self);
2298
2299 self.start_transaction();
2300
2301 self.edit(
2302 [(position..position, "\n")],
2303 Some(AutoindentMode::EachLine),
2304 cx,
2305 );
2306
2307 if position.column > 0 {
2308 position += Point::new(1, 0);
2309 }
2310
2311 if !self.is_line_blank(position.row) {
2312 self.edit(
2313 [(position..position, "\n")],
2314 Some(AutoindentMode::EachLine),
2315 cx,
2316 );
2317 }
2318
2319 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2320 self.edit(
2321 [(position..position, "\n")],
2322 Some(AutoindentMode::EachLine),
2323 cx,
2324 );
2325 position.row += 1;
2326 }
2327
2328 if space_below
2329 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2330 {
2331 self.edit(
2332 [(position..position, "\n")],
2333 Some(AutoindentMode::EachLine),
2334 cx,
2335 );
2336 }
2337
2338 self.end_transaction(cx);
2339
2340 position
2341 }
2342
2343 /// Applies the given remote operations to the buffer.
2344 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2345 self.pending_autoindent.take();
2346 let was_dirty = self.is_dirty();
2347 let old_version = self.version.clone();
2348 let mut deferred_ops = Vec::new();
2349 let buffer_ops = ops
2350 .into_iter()
2351 .filter_map(|op| match op {
2352 Operation::Buffer(op) => Some(op),
2353 _ => {
2354 if self.can_apply_op(&op) {
2355 self.apply_op(op, cx);
2356 } else {
2357 deferred_ops.push(op);
2358 }
2359 None
2360 }
2361 })
2362 .collect::<Vec<_>>();
2363 for operation in buffer_ops.iter() {
2364 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2365 }
2366 self.text.apply_ops(buffer_ops);
2367 self.deferred_ops.insert(deferred_ops);
2368 self.flush_deferred_ops(cx);
2369 self.did_edit(&old_version, was_dirty, cx);
2370 // Notify independently of whether the buffer was edited as the operations could include a
2371 // selection update.
2372 cx.notify();
2373 }
2374
2375 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2376 let mut deferred_ops = Vec::new();
2377 for op in self.deferred_ops.drain().iter().cloned() {
2378 if self.can_apply_op(&op) {
2379 self.apply_op(op, cx);
2380 } else {
2381 deferred_ops.push(op);
2382 }
2383 }
2384 self.deferred_ops.insert(deferred_ops);
2385 }
2386
2387 pub fn has_deferred_ops(&self) -> bool {
2388 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2389 }
2390
2391 fn can_apply_op(&self, operation: &Operation) -> bool {
2392 match operation {
2393 Operation::Buffer(_) => {
2394 unreachable!("buffer operations should never be applied at this layer")
2395 }
2396 Operation::UpdateDiagnostics {
2397 diagnostics: diagnostic_set,
2398 ..
2399 } => diagnostic_set.iter().all(|diagnostic| {
2400 self.text.can_resolve(&diagnostic.range.start)
2401 && self.text.can_resolve(&diagnostic.range.end)
2402 }),
2403 Operation::UpdateSelections { selections, .. } => selections
2404 .iter()
2405 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2406 Operation::UpdateCompletionTriggers { .. } => true,
2407 }
2408 }
2409
2410 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2411 match operation {
2412 Operation::Buffer(_) => {
2413 unreachable!("buffer operations should never be applied at this layer")
2414 }
2415 Operation::UpdateDiagnostics {
2416 server_id,
2417 diagnostics: diagnostic_set,
2418 lamport_timestamp,
2419 } => {
2420 let snapshot = self.snapshot();
2421 self.apply_diagnostic_update(
2422 server_id,
2423 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2424 lamport_timestamp,
2425 cx,
2426 );
2427 }
2428 Operation::UpdateSelections {
2429 selections,
2430 lamport_timestamp,
2431 line_mode,
2432 cursor_shape,
2433 } => {
2434 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2435 if set.lamport_timestamp > lamport_timestamp {
2436 return;
2437 }
2438 }
2439
2440 self.remote_selections.insert(
2441 lamport_timestamp.replica_id,
2442 SelectionSet {
2443 selections,
2444 lamport_timestamp,
2445 line_mode,
2446 cursor_shape,
2447 },
2448 );
2449 self.text.lamport_clock.observe(lamport_timestamp);
2450 self.non_text_state_update_count += 1;
2451 }
2452 Operation::UpdateCompletionTriggers {
2453 triggers,
2454 lamport_timestamp,
2455 server_id,
2456 } => {
2457 if triggers.is_empty() {
2458 self.completion_triggers_per_language_server
2459 .remove(&server_id);
2460 self.completion_triggers = self
2461 .completion_triggers_per_language_server
2462 .values()
2463 .flat_map(|triggers| triggers.into_iter().cloned())
2464 .collect();
2465 } else {
2466 self.completion_triggers_per_language_server
2467 .insert(server_id, triggers.iter().cloned().collect());
2468 self.completion_triggers.extend(triggers);
2469 }
2470 self.text.lamport_clock.observe(lamport_timestamp);
2471 }
2472 }
2473 }
2474
2475 fn apply_diagnostic_update(
2476 &mut self,
2477 server_id: LanguageServerId,
2478 diagnostics: DiagnosticSet,
2479 lamport_timestamp: clock::Lamport,
2480 cx: &mut Context<Self>,
2481 ) {
2482 if lamport_timestamp > self.diagnostics_timestamp {
2483 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2484 if diagnostics.is_empty() {
2485 if let Ok(ix) = ix {
2486 self.diagnostics.remove(ix);
2487 }
2488 } else {
2489 match ix {
2490 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2491 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2492 };
2493 }
2494 self.diagnostics_timestamp = lamport_timestamp;
2495 self.non_text_state_update_count += 1;
2496 self.text.lamport_clock.observe(lamport_timestamp);
2497 cx.notify();
2498 cx.emit(BufferEvent::DiagnosticsUpdated);
2499 }
2500 }
2501
2502 fn send_operation(&self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2503 cx.emit(BufferEvent::Operation {
2504 operation,
2505 is_local,
2506 });
2507 }
2508
2509 /// Removes the selections for a given peer.
2510 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2511 self.remote_selections.remove(&replica_id);
2512 cx.notify();
2513 }
2514
2515 /// Undoes the most recent transaction.
2516 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2517 let was_dirty = self.is_dirty();
2518 let old_version = self.version.clone();
2519
2520 if let Some((transaction_id, operation)) = self.text.undo() {
2521 self.send_operation(Operation::Buffer(operation), true, cx);
2522 self.did_edit(&old_version, was_dirty, cx);
2523 Some(transaction_id)
2524 } else {
2525 None
2526 }
2527 }
2528
2529 /// Manually undoes a specific transaction in the buffer's undo history.
2530 pub fn undo_transaction(
2531 &mut self,
2532 transaction_id: TransactionId,
2533 cx: &mut Context<Self>,
2534 ) -> bool {
2535 let was_dirty = self.is_dirty();
2536 let old_version = self.version.clone();
2537 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2538 self.send_operation(Operation::Buffer(operation), true, cx);
2539 self.did_edit(&old_version, was_dirty, cx);
2540 true
2541 } else {
2542 false
2543 }
2544 }
2545
2546 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2547 pub fn undo_to_transaction(
2548 &mut self,
2549 transaction_id: TransactionId,
2550 cx: &mut Context<Self>,
2551 ) -> bool {
2552 let was_dirty = self.is_dirty();
2553 let old_version = self.version.clone();
2554
2555 let operations = self.text.undo_to_transaction(transaction_id);
2556 let undone = !operations.is_empty();
2557 for operation in operations {
2558 self.send_operation(Operation::Buffer(operation), true, cx);
2559 }
2560 if undone {
2561 self.did_edit(&old_version, was_dirty, cx)
2562 }
2563 undone
2564 }
2565
2566 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2567 let was_dirty = self.is_dirty();
2568 let operation = self.text.undo_operations(counts);
2569 let old_version = self.version.clone();
2570 self.send_operation(Operation::Buffer(operation), true, cx);
2571 self.did_edit(&old_version, was_dirty, cx);
2572 }
2573
2574 /// Manually redoes a specific transaction in the buffer's redo history.
2575 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2576 let was_dirty = self.is_dirty();
2577 let old_version = self.version.clone();
2578
2579 if let Some((transaction_id, operation)) = self.text.redo() {
2580 self.send_operation(Operation::Buffer(operation), true, cx);
2581 self.did_edit(&old_version, was_dirty, cx);
2582 Some(transaction_id)
2583 } else {
2584 None
2585 }
2586 }
2587
2588 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2589 pub fn redo_to_transaction(
2590 &mut self,
2591 transaction_id: TransactionId,
2592 cx: &mut Context<Self>,
2593 ) -> bool {
2594 let was_dirty = self.is_dirty();
2595 let old_version = self.version.clone();
2596
2597 let operations = self.text.redo_to_transaction(transaction_id);
2598 let redone = !operations.is_empty();
2599 for operation in operations {
2600 self.send_operation(Operation::Buffer(operation), true, cx);
2601 }
2602 if redone {
2603 self.did_edit(&old_version, was_dirty, cx)
2604 }
2605 redone
2606 }
2607
2608 /// Override current completion triggers with the user-provided completion triggers.
2609 pub fn set_completion_triggers(
2610 &mut self,
2611 server_id: LanguageServerId,
2612 triggers: BTreeSet<String>,
2613 cx: &mut Context<Self>,
2614 ) {
2615 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2616 if triggers.is_empty() {
2617 self.completion_triggers_per_language_server
2618 .remove(&server_id);
2619 self.completion_triggers = self
2620 .completion_triggers_per_language_server
2621 .values()
2622 .flat_map(|triggers| triggers.into_iter().cloned())
2623 .collect();
2624 } else {
2625 self.completion_triggers_per_language_server
2626 .insert(server_id, triggers.clone());
2627 self.completion_triggers.extend(triggers.iter().cloned());
2628 }
2629 self.send_operation(
2630 Operation::UpdateCompletionTriggers {
2631 triggers: triggers.iter().cloned().collect(),
2632 lamport_timestamp: self.completion_triggers_timestamp,
2633 server_id,
2634 },
2635 true,
2636 cx,
2637 );
2638 cx.notify();
2639 }
2640
2641 /// Returns a list of strings which trigger a completion menu for this language.
2642 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2643 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2644 &self.completion_triggers
2645 }
2646
2647 /// Call this directly after performing edits to prevent the preview tab
2648 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2649 /// to return false until there are additional edits.
2650 pub fn refresh_preview(&mut self) {
2651 self.preview_version = self.version.clone();
2652 }
2653
2654 /// Whether we should preserve the preview status of a tab containing this buffer.
2655 pub fn preserve_preview(&self) -> bool {
2656 !self.has_edits_since(&self.preview_version)
2657 }
2658}
2659
2660#[doc(hidden)]
2661#[cfg(any(test, feature = "test-support"))]
2662impl Buffer {
2663 pub fn edit_via_marked_text(
2664 &mut self,
2665 marked_string: &str,
2666 autoindent_mode: Option<AutoindentMode>,
2667 cx: &mut Context<Self>,
2668 ) {
2669 let edits = self.edits_for_marked_text(marked_string);
2670 self.edit(edits, autoindent_mode, cx);
2671 }
2672
2673 pub fn set_group_interval(&mut self, group_interval: Duration) {
2674 self.text.set_group_interval(group_interval);
2675 }
2676
2677 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2678 where
2679 T: rand::Rng,
2680 {
2681 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2682 let mut last_end = None;
2683 for _ in 0..old_range_count {
2684 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2685 break;
2686 }
2687
2688 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2689 let mut range = self.random_byte_range(new_start, rng);
2690 if rng.gen_bool(0.2) {
2691 mem::swap(&mut range.start, &mut range.end);
2692 }
2693 last_end = Some(range.end);
2694
2695 let new_text_len = rng.gen_range(0..10);
2696 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2697 new_text = new_text.to_uppercase();
2698
2699 edits.push((range, new_text));
2700 }
2701 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2702 self.edit(edits, None, cx);
2703 }
2704
2705 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2706 let was_dirty = self.is_dirty();
2707 let old_version = self.version.clone();
2708
2709 let ops = self.text.randomly_undo_redo(rng);
2710 if !ops.is_empty() {
2711 for op in ops {
2712 self.send_operation(Operation::Buffer(op), true, cx);
2713 self.did_edit(&old_version, was_dirty, cx);
2714 }
2715 }
2716 }
2717}
2718
2719impl EventEmitter<BufferEvent> for Buffer {}
2720
2721impl Deref for Buffer {
2722 type Target = TextBuffer;
2723
2724 fn deref(&self) -> &Self::Target {
2725 &self.text
2726 }
2727}
2728
2729impl BufferSnapshot {
2730 /// Returns [`IndentSize`] for a given line that respects user settings and
2731 /// language preferences.
2732 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2733 indent_size_for_line(self, row)
2734 }
2735
2736 /// Returns [`IndentSize`] for a given position that respects user settings
2737 /// and language preferences.
2738 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2739 let settings = language_settings(
2740 self.language_at(position).map(|l| l.name()),
2741 self.file(),
2742 cx,
2743 );
2744 if settings.hard_tabs {
2745 IndentSize::tab()
2746 } else {
2747 IndentSize::spaces(settings.tab_size.get())
2748 }
2749 }
2750
2751 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2752 /// is passed in as `single_indent_size`.
2753 pub fn suggested_indents(
2754 &self,
2755 rows: impl Iterator<Item = u32>,
2756 single_indent_size: IndentSize,
2757 ) -> BTreeMap<u32, IndentSize> {
2758 let mut result = BTreeMap::new();
2759
2760 for row_range in contiguous_ranges(rows, 10) {
2761 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2762 Some(suggestions) => suggestions,
2763 _ => break,
2764 };
2765
2766 for (row, suggestion) in row_range.zip(suggestions) {
2767 let indent_size = if let Some(suggestion) = suggestion {
2768 result
2769 .get(&suggestion.basis_row)
2770 .copied()
2771 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2772 .with_delta(suggestion.delta, single_indent_size)
2773 } else {
2774 self.indent_size_for_line(row)
2775 };
2776
2777 result.insert(row, indent_size);
2778 }
2779 }
2780
2781 result
2782 }
2783
2784 fn suggest_autoindents(
2785 &self,
2786 row_range: Range<u32>,
2787 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2788 let config = &self.language.as_ref()?.config;
2789 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2790
2791 // Find the suggested indentation ranges based on the syntax tree.
2792 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2793 let end = Point::new(row_range.end, 0);
2794 let range = (start..end).to_offset(&self.text);
2795 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2796 Some(&grammar.indents_config.as_ref()?.query)
2797 });
2798 let indent_configs = matches
2799 .grammars()
2800 .iter()
2801 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2802 .collect::<Vec<_>>();
2803
2804 let mut indent_ranges = Vec::<Range<Point>>::new();
2805 let mut outdent_positions = Vec::<Point>::new();
2806 while let Some(mat) = matches.peek() {
2807 let mut start: Option<Point> = None;
2808 let mut end: Option<Point> = None;
2809
2810 let config = &indent_configs[mat.grammar_index];
2811 for capture in mat.captures {
2812 if capture.index == config.indent_capture_ix {
2813 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2814 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2815 } else if Some(capture.index) == config.start_capture_ix {
2816 start = Some(Point::from_ts_point(capture.node.end_position()));
2817 } else if Some(capture.index) == config.end_capture_ix {
2818 end = Some(Point::from_ts_point(capture.node.start_position()));
2819 } else if Some(capture.index) == config.outdent_capture_ix {
2820 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2821 }
2822 }
2823
2824 matches.advance();
2825 if let Some((start, end)) = start.zip(end) {
2826 if start.row == end.row {
2827 continue;
2828 }
2829
2830 let range = start..end;
2831 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2832 Err(ix) => indent_ranges.insert(ix, range),
2833 Ok(ix) => {
2834 let prev_range = &mut indent_ranges[ix];
2835 prev_range.end = prev_range.end.max(range.end);
2836 }
2837 }
2838 }
2839 }
2840
2841 let mut error_ranges = Vec::<Range<Point>>::new();
2842 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2843 grammar.error_query.as_ref()
2844 });
2845 while let Some(mat) = matches.peek() {
2846 let node = mat.captures[0].node;
2847 let start = Point::from_ts_point(node.start_position());
2848 let end = Point::from_ts_point(node.end_position());
2849 let range = start..end;
2850 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2851 Ok(ix) | Err(ix) => ix,
2852 };
2853 let mut end_ix = ix;
2854 while let Some(existing_range) = error_ranges.get(end_ix) {
2855 if existing_range.end < end {
2856 end_ix += 1;
2857 } else {
2858 break;
2859 }
2860 }
2861 error_ranges.splice(ix..end_ix, [range]);
2862 matches.advance();
2863 }
2864
2865 outdent_positions.sort();
2866 for outdent_position in outdent_positions {
2867 // find the innermost indent range containing this outdent_position
2868 // set its end to the outdent position
2869 if let Some(range_to_truncate) = indent_ranges
2870 .iter_mut()
2871 .filter(|indent_range| indent_range.contains(&outdent_position))
2872 .last()
2873 {
2874 range_to_truncate.end = outdent_position;
2875 }
2876 }
2877
2878 // Find the suggested indentation increases and decreased based on regexes.
2879 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2880 self.for_each_line(
2881 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2882 ..Point::new(row_range.end, 0),
2883 |row, line| {
2884 if config
2885 .decrease_indent_pattern
2886 .as_ref()
2887 .map_or(false, |regex| regex.is_match(line))
2888 {
2889 indent_change_rows.push((row, Ordering::Less));
2890 }
2891 if config
2892 .increase_indent_pattern
2893 .as_ref()
2894 .map_or(false, |regex| regex.is_match(line))
2895 {
2896 indent_change_rows.push((row + 1, Ordering::Greater));
2897 }
2898 },
2899 );
2900
2901 let mut indent_changes = indent_change_rows.into_iter().peekable();
2902 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2903 prev_non_blank_row.unwrap_or(0)
2904 } else {
2905 row_range.start.saturating_sub(1)
2906 };
2907 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2908 Some(row_range.map(move |row| {
2909 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2910
2911 let mut indent_from_prev_row = false;
2912 let mut outdent_from_prev_row = false;
2913 let mut outdent_to_row = u32::MAX;
2914 let mut from_regex = false;
2915
2916 while let Some((indent_row, delta)) = indent_changes.peek() {
2917 match indent_row.cmp(&row) {
2918 Ordering::Equal => match delta {
2919 Ordering::Less => {
2920 from_regex = true;
2921 outdent_from_prev_row = true
2922 }
2923 Ordering::Greater => {
2924 indent_from_prev_row = true;
2925 from_regex = true
2926 }
2927 _ => {}
2928 },
2929
2930 Ordering::Greater => break,
2931 Ordering::Less => {}
2932 }
2933
2934 indent_changes.next();
2935 }
2936
2937 for range in &indent_ranges {
2938 if range.start.row >= row {
2939 break;
2940 }
2941 if range.start.row == prev_row && range.end > row_start {
2942 indent_from_prev_row = true;
2943 }
2944 if range.end > prev_row_start && range.end <= row_start {
2945 outdent_to_row = outdent_to_row.min(range.start.row);
2946 }
2947 }
2948
2949 let within_error = error_ranges
2950 .iter()
2951 .any(|e| e.start.row < row && e.end > row_start);
2952
2953 let suggestion = if outdent_to_row == prev_row
2954 || (outdent_from_prev_row && indent_from_prev_row)
2955 {
2956 Some(IndentSuggestion {
2957 basis_row: prev_row,
2958 delta: Ordering::Equal,
2959 within_error: within_error && !from_regex,
2960 })
2961 } else if indent_from_prev_row {
2962 Some(IndentSuggestion {
2963 basis_row: prev_row,
2964 delta: Ordering::Greater,
2965 within_error: within_error && !from_regex,
2966 })
2967 } else if outdent_to_row < prev_row {
2968 Some(IndentSuggestion {
2969 basis_row: outdent_to_row,
2970 delta: Ordering::Equal,
2971 within_error: within_error && !from_regex,
2972 })
2973 } else if outdent_from_prev_row {
2974 Some(IndentSuggestion {
2975 basis_row: prev_row,
2976 delta: Ordering::Less,
2977 within_error: within_error && !from_regex,
2978 })
2979 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2980 {
2981 Some(IndentSuggestion {
2982 basis_row: prev_row,
2983 delta: Ordering::Equal,
2984 within_error: within_error && !from_regex,
2985 })
2986 } else {
2987 None
2988 };
2989
2990 prev_row = row;
2991 prev_row_start = row_start;
2992 suggestion
2993 }))
2994 }
2995
2996 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2997 while row > 0 {
2998 row -= 1;
2999 if !self.is_line_blank(row) {
3000 return Some(row);
3001 }
3002 }
3003 None
3004 }
3005
3006 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3007 let captures = self.syntax.captures(range, &self.text, |grammar| {
3008 grammar.highlights_query.as_ref()
3009 });
3010 let highlight_maps = captures
3011 .grammars()
3012 .iter()
3013 .map(|grammar| grammar.highlight_map())
3014 .collect();
3015 (captures, highlight_maps)
3016 }
3017
3018 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3019 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3020 /// returned in chunks where each chunk has a single syntax highlighting style and
3021 /// diagnostic status.
3022 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3023 let range = range.start.to_offset(self)..range.end.to_offset(self);
3024
3025 let mut syntax = None;
3026 if language_aware {
3027 syntax = Some(self.get_highlights(range.clone()));
3028 }
3029 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3030 let diagnostics = language_aware;
3031 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3032 }
3033
3034 pub fn highlighted_text_for_range<T: ToOffset>(
3035 &self,
3036 range: Range<T>,
3037 override_style: Option<HighlightStyle>,
3038 syntax_theme: &SyntaxTheme,
3039 ) -> HighlightedText {
3040 HighlightedText::from_buffer_range(
3041 range,
3042 &self.text,
3043 &self.syntax,
3044 override_style,
3045 syntax_theme,
3046 )
3047 }
3048
3049 /// Invokes the given callback for each line of text in the given range of the buffer.
3050 /// Uses callback to avoid allocating a string for each line.
3051 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3052 let mut line = String::new();
3053 let mut row = range.start.row;
3054 for chunk in self
3055 .as_rope()
3056 .chunks_in_range(range.to_offset(self))
3057 .chain(["\n"])
3058 {
3059 for (newline_ix, text) in chunk.split('\n').enumerate() {
3060 if newline_ix > 0 {
3061 callback(row, &line);
3062 row += 1;
3063 line.clear();
3064 }
3065 line.push_str(text);
3066 }
3067 }
3068 }
3069
3070 /// Iterates over every [`SyntaxLayer`] in the buffer.
3071 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3072 self.syntax
3073 .layers_for_range(0..self.len(), &self.text, true)
3074 }
3075
3076 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3077 let offset = position.to_offset(self);
3078 self.syntax
3079 .layers_for_range(offset..offset, &self.text, false)
3080 .filter(|l| l.node().end_byte() > offset)
3081 .last()
3082 }
3083
3084 /// Returns the main [`Language`].
3085 pub fn language(&self) -> Option<&Arc<Language>> {
3086 self.language.as_ref()
3087 }
3088
3089 /// Returns the [`Language`] at the given location.
3090 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3091 self.syntax_layer_at(position)
3092 .map(|info| info.language)
3093 .or(self.language.as_ref())
3094 }
3095
3096 /// Returns the settings for the language at the given location.
3097 pub fn settings_at<'a, D: ToOffset>(
3098 &'a self,
3099 position: D,
3100 cx: &'a App,
3101 ) -> Cow<'a, LanguageSettings> {
3102 language_settings(
3103 self.language_at(position).map(|l| l.name()),
3104 self.file.as_ref(),
3105 cx,
3106 )
3107 }
3108
3109 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3110 CharClassifier::new(self.language_scope_at(point))
3111 }
3112
3113 /// Returns the [`LanguageScope`] at the given location.
3114 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3115 let offset = position.to_offset(self);
3116 let mut scope = None;
3117 let mut smallest_range: Option<Range<usize>> = None;
3118
3119 // Use the layer that has the smallest node intersecting the given point.
3120 for layer in self
3121 .syntax
3122 .layers_for_range(offset..offset, &self.text, false)
3123 {
3124 let mut cursor = layer.node().walk();
3125
3126 let mut range = None;
3127 loop {
3128 let child_range = cursor.node().byte_range();
3129 if !child_range.to_inclusive().contains(&offset) {
3130 break;
3131 }
3132
3133 range = Some(child_range);
3134 if cursor.goto_first_child_for_byte(offset).is_none() {
3135 break;
3136 }
3137 }
3138
3139 if let Some(range) = range {
3140 if smallest_range
3141 .as_ref()
3142 .map_or(true, |smallest_range| range.len() < smallest_range.len())
3143 {
3144 smallest_range = Some(range);
3145 scope = Some(LanguageScope {
3146 language: layer.language.clone(),
3147 override_id: layer.override_id(offset, &self.text),
3148 });
3149 }
3150 }
3151 }
3152
3153 scope.or_else(|| {
3154 self.language.clone().map(|language| LanguageScope {
3155 language,
3156 override_id: None,
3157 })
3158 })
3159 }
3160
3161 /// Returns a tuple of the range and character kind of the word
3162 /// surrounding the given position.
3163 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3164 let mut start = start.to_offset(self);
3165 let mut end = start;
3166 let mut next_chars = self.chars_at(start).peekable();
3167 let mut prev_chars = self.reversed_chars_at(start).peekable();
3168
3169 let classifier = self.char_classifier_at(start);
3170 let word_kind = cmp::max(
3171 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3172 next_chars.peek().copied().map(|c| classifier.kind(c)),
3173 );
3174
3175 for ch in prev_chars {
3176 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3177 start -= ch.len_utf8();
3178 } else {
3179 break;
3180 }
3181 }
3182
3183 for ch in next_chars {
3184 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3185 end += ch.len_utf8();
3186 } else {
3187 break;
3188 }
3189 }
3190
3191 (start..end, word_kind)
3192 }
3193
3194 /// Returns the closest syntax node enclosing the given range.
3195 pub fn syntax_ancestor<'a, T: ToOffset>(
3196 &'a self,
3197 range: Range<T>,
3198 ) -> Option<tree_sitter::Node<'a>> {
3199 let range = range.start.to_offset(self)..range.end.to_offset(self);
3200 let mut result: Option<tree_sitter::Node<'a>> = None;
3201 'outer: for layer in self
3202 .syntax
3203 .layers_for_range(range.clone(), &self.text, true)
3204 {
3205 let mut cursor = layer.node().walk();
3206
3207 // Descend to the first leaf that touches the start of the range,
3208 // and if the range is non-empty, extends beyond the start.
3209 while cursor.goto_first_child_for_byte(range.start).is_some() {
3210 if !range.is_empty() && cursor.node().end_byte() == range.start {
3211 cursor.goto_next_sibling();
3212 }
3213 }
3214
3215 // Ascend to the smallest ancestor that strictly contains the range.
3216 loop {
3217 let node_range = cursor.node().byte_range();
3218 if node_range.start <= range.start
3219 && node_range.end >= range.end
3220 && node_range.len() > range.len()
3221 {
3222 break;
3223 }
3224 if !cursor.goto_parent() {
3225 continue 'outer;
3226 }
3227 }
3228
3229 let left_node = cursor.node();
3230 let mut layer_result = left_node;
3231
3232 // For an empty range, try to find another node immediately to the right of the range.
3233 if left_node.end_byte() == range.start {
3234 let mut right_node = None;
3235 while !cursor.goto_next_sibling() {
3236 if !cursor.goto_parent() {
3237 break;
3238 }
3239 }
3240
3241 while cursor.node().start_byte() == range.start {
3242 right_node = Some(cursor.node());
3243 if !cursor.goto_first_child() {
3244 break;
3245 }
3246 }
3247
3248 // If there is a candidate node on both sides of the (empty) range, then
3249 // decide between the two by favoring a named node over an anonymous token.
3250 // If both nodes are the same in that regard, favor the right one.
3251 if let Some(right_node) = right_node {
3252 if right_node.is_named() || !left_node.is_named() {
3253 layer_result = right_node;
3254 }
3255 }
3256 }
3257
3258 if let Some(previous_result) = &result {
3259 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3260 continue;
3261 }
3262 }
3263 result = Some(layer_result);
3264 }
3265
3266 result
3267 }
3268
3269 /// Returns the outline for the buffer.
3270 ///
3271 /// This method allows passing an optional [`SyntaxTheme`] to
3272 /// syntax-highlight the returned symbols.
3273 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3274 self.outline_items_containing(0..self.len(), true, theme)
3275 .map(Outline::new)
3276 }
3277
3278 /// Returns all the symbols that contain the given position.
3279 ///
3280 /// This method allows passing an optional [`SyntaxTheme`] to
3281 /// syntax-highlight the returned symbols.
3282 pub fn symbols_containing<T: ToOffset>(
3283 &self,
3284 position: T,
3285 theme: Option<&SyntaxTheme>,
3286 ) -> Option<Vec<OutlineItem<Anchor>>> {
3287 let position = position.to_offset(self);
3288 let mut items = self.outline_items_containing(
3289 position.saturating_sub(1)..self.len().min(position + 1),
3290 false,
3291 theme,
3292 )?;
3293 let mut prev_depth = None;
3294 items.retain(|item| {
3295 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3296 prev_depth = Some(item.depth);
3297 result
3298 });
3299 Some(items)
3300 }
3301
3302 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3303 let range = range.to_offset(self);
3304 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3305 grammar.outline_config.as_ref().map(|c| &c.query)
3306 });
3307 let configs = matches
3308 .grammars()
3309 .iter()
3310 .map(|g| g.outline_config.as_ref().unwrap())
3311 .collect::<Vec<_>>();
3312
3313 while let Some(mat) = matches.peek() {
3314 let config = &configs[mat.grammar_index];
3315 let containing_item_node = maybe!({
3316 let item_node = mat.captures.iter().find_map(|cap| {
3317 if cap.index == config.item_capture_ix {
3318 Some(cap.node)
3319 } else {
3320 None
3321 }
3322 })?;
3323
3324 let item_byte_range = item_node.byte_range();
3325 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3326 None
3327 } else {
3328 Some(item_node)
3329 }
3330 });
3331
3332 if let Some(item_node) = containing_item_node {
3333 return Some(
3334 Point::from_ts_point(item_node.start_position())
3335 ..Point::from_ts_point(item_node.end_position()),
3336 );
3337 }
3338
3339 matches.advance();
3340 }
3341 None
3342 }
3343
3344 pub fn outline_items_containing<T: ToOffset>(
3345 &self,
3346 range: Range<T>,
3347 include_extra_context: bool,
3348 theme: Option<&SyntaxTheme>,
3349 ) -> Option<Vec<OutlineItem<Anchor>>> {
3350 let range = range.to_offset(self);
3351 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3352 grammar.outline_config.as_ref().map(|c| &c.query)
3353 });
3354 let configs = matches
3355 .grammars()
3356 .iter()
3357 .map(|g| g.outline_config.as_ref().unwrap())
3358 .collect::<Vec<_>>();
3359
3360 let mut items = Vec::new();
3361 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3362 while let Some(mat) = matches.peek() {
3363 let config = &configs[mat.grammar_index];
3364 if let Some(item) =
3365 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3366 {
3367 items.push(item);
3368 } else if let Some(capture) = mat
3369 .captures
3370 .iter()
3371 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3372 {
3373 let capture_range = capture.node.start_position()..capture.node.end_position();
3374 let mut capture_row_range =
3375 capture_range.start.row as u32..capture_range.end.row as u32;
3376 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3377 {
3378 capture_row_range.end -= 1;
3379 }
3380 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3381 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3382 last_row_range.end = capture_row_range.end;
3383 } else {
3384 annotation_row_ranges.push(capture_row_range);
3385 }
3386 } else {
3387 annotation_row_ranges.push(capture_row_range);
3388 }
3389 }
3390 matches.advance();
3391 }
3392
3393 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3394
3395 // Assign depths based on containment relationships and convert to anchors.
3396 let mut item_ends_stack = Vec::<Point>::new();
3397 let mut anchor_items = Vec::new();
3398 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3399 for item in items {
3400 while let Some(last_end) = item_ends_stack.last().copied() {
3401 if last_end < item.range.end {
3402 item_ends_stack.pop();
3403 } else {
3404 break;
3405 }
3406 }
3407
3408 let mut annotation_row_range = None;
3409 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3410 let row_preceding_item = item.range.start.row.saturating_sub(1);
3411 if next_annotation_row_range.end < row_preceding_item {
3412 annotation_row_ranges.next();
3413 } else {
3414 if next_annotation_row_range.end == row_preceding_item {
3415 annotation_row_range = Some(next_annotation_row_range.clone());
3416 annotation_row_ranges.next();
3417 }
3418 break;
3419 }
3420 }
3421
3422 anchor_items.push(OutlineItem {
3423 depth: item_ends_stack.len(),
3424 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3425 text: item.text,
3426 highlight_ranges: item.highlight_ranges,
3427 name_ranges: item.name_ranges,
3428 body_range: item.body_range.map(|body_range| {
3429 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3430 }),
3431 annotation_range: annotation_row_range.map(|annotation_range| {
3432 self.anchor_after(Point::new(annotation_range.start, 0))
3433 ..self.anchor_before(Point::new(
3434 annotation_range.end,
3435 self.line_len(annotation_range.end),
3436 ))
3437 }),
3438 });
3439 item_ends_stack.push(item.range.end);
3440 }
3441
3442 Some(anchor_items)
3443 }
3444
3445 fn next_outline_item(
3446 &self,
3447 config: &OutlineConfig,
3448 mat: &SyntaxMapMatch,
3449 range: &Range<usize>,
3450 include_extra_context: bool,
3451 theme: Option<&SyntaxTheme>,
3452 ) -> Option<OutlineItem<Point>> {
3453 let item_node = mat.captures.iter().find_map(|cap| {
3454 if cap.index == config.item_capture_ix {
3455 Some(cap.node)
3456 } else {
3457 None
3458 }
3459 })?;
3460
3461 let item_byte_range = item_node.byte_range();
3462 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3463 return None;
3464 }
3465 let item_point_range = Point::from_ts_point(item_node.start_position())
3466 ..Point::from_ts_point(item_node.end_position());
3467
3468 let mut open_point = None;
3469 let mut close_point = None;
3470 let mut buffer_ranges = Vec::new();
3471 for capture in mat.captures {
3472 let node_is_name;
3473 if capture.index == config.name_capture_ix {
3474 node_is_name = true;
3475 } else if Some(capture.index) == config.context_capture_ix
3476 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3477 {
3478 node_is_name = false;
3479 } else {
3480 if Some(capture.index) == config.open_capture_ix {
3481 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3482 } else if Some(capture.index) == config.close_capture_ix {
3483 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3484 }
3485
3486 continue;
3487 }
3488
3489 let mut range = capture.node.start_byte()..capture.node.end_byte();
3490 let start = capture.node.start_position();
3491 if capture.node.end_position().row > start.row {
3492 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3493 }
3494
3495 if !range.is_empty() {
3496 buffer_ranges.push((range, node_is_name));
3497 }
3498 }
3499 if buffer_ranges.is_empty() {
3500 return None;
3501 }
3502 let mut text = String::new();
3503 let mut highlight_ranges = Vec::new();
3504 let mut name_ranges = Vec::new();
3505 let mut chunks = self.chunks(
3506 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3507 true,
3508 );
3509 let mut last_buffer_range_end = 0;
3510
3511 for (buffer_range, is_name) in buffer_ranges {
3512 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3513 if space_added {
3514 text.push(' ');
3515 }
3516 let before_append_len = text.len();
3517 let mut offset = buffer_range.start;
3518 chunks.seek(buffer_range.clone());
3519 for mut chunk in chunks.by_ref() {
3520 if chunk.text.len() > buffer_range.end - offset {
3521 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3522 offset = buffer_range.end;
3523 } else {
3524 offset += chunk.text.len();
3525 }
3526 let style = chunk
3527 .syntax_highlight_id
3528 .zip(theme)
3529 .and_then(|(highlight, theme)| highlight.style(theme));
3530 if let Some(style) = style {
3531 let start = text.len();
3532 let end = start + chunk.text.len();
3533 highlight_ranges.push((start..end, style));
3534 }
3535 text.push_str(chunk.text);
3536 if offset >= buffer_range.end {
3537 break;
3538 }
3539 }
3540 if is_name {
3541 let after_append_len = text.len();
3542 let start = if space_added && !name_ranges.is_empty() {
3543 before_append_len - 1
3544 } else {
3545 before_append_len
3546 };
3547 name_ranges.push(start..after_append_len);
3548 }
3549 last_buffer_range_end = buffer_range.end;
3550 }
3551
3552 Some(OutlineItem {
3553 depth: 0, // We'll calculate the depth later
3554 range: item_point_range,
3555 text,
3556 highlight_ranges,
3557 name_ranges,
3558 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3559 annotation_range: None,
3560 })
3561 }
3562
3563 pub fn function_body_fold_ranges<T: ToOffset>(
3564 &self,
3565 within: Range<T>,
3566 ) -> impl Iterator<Item = Range<usize>> + '_ {
3567 self.text_object_ranges(within, TreeSitterOptions::default())
3568 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3569 }
3570
3571 /// For each grammar in the language, runs the provided
3572 /// [`tree_sitter::Query`] against the given range.
3573 pub fn matches(
3574 &self,
3575 range: Range<usize>,
3576 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3577 ) -> SyntaxMapMatches {
3578 self.syntax.matches(range, self, query)
3579 }
3580
3581 pub fn all_bracket_ranges(
3582 &self,
3583 range: Range<usize>,
3584 ) -> impl Iterator<Item = BracketMatch> + '_ {
3585 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3586 grammar.brackets_config.as_ref().map(|c| &c.query)
3587 });
3588 let configs = matches
3589 .grammars()
3590 .iter()
3591 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3592 .collect::<Vec<_>>();
3593
3594 iter::from_fn(move || {
3595 while let Some(mat) = matches.peek() {
3596 let mut open = None;
3597 let mut close = None;
3598 let config = &configs[mat.grammar_index];
3599 let pattern = &config.patterns[mat.pattern_index];
3600 for capture in mat.captures {
3601 if capture.index == config.open_capture_ix {
3602 open = Some(capture.node.byte_range());
3603 } else if capture.index == config.close_capture_ix {
3604 close = Some(capture.node.byte_range());
3605 }
3606 }
3607
3608 matches.advance();
3609
3610 let Some((open_range, close_range)) = open.zip(close) else {
3611 continue;
3612 };
3613
3614 let bracket_range = open_range.start..=close_range.end;
3615 if !bracket_range.overlaps(&range) {
3616 continue;
3617 }
3618
3619 return Some(BracketMatch {
3620 open_range,
3621 close_range,
3622 newline_only: pattern.newline_only,
3623 });
3624 }
3625 None
3626 })
3627 }
3628
3629 /// Returns bracket range pairs overlapping or adjacent to `range`
3630 pub fn bracket_ranges<T: ToOffset>(
3631 &self,
3632 range: Range<T>,
3633 ) -> impl Iterator<Item = BracketMatch> + '_ {
3634 // Find bracket pairs that *inclusively* contain the given range.
3635 let range = range.start.to_offset(self).saturating_sub(1)
3636 ..self.len().min(range.end.to_offset(self) + 1);
3637 self.all_bracket_ranges(range)
3638 .filter(|pair| !pair.newline_only)
3639 }
3640
3641 pub fn text_object_ranges<T: ToOffset>(
3642 &self,
3643 range: Range<T>,
3644 options: TreeSitterOptions,
3645 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3646 let range = range.start.to_offset(self).saturating_sub(1)
3647 ..self.len().min(range.end.to_offset(self) + 1);
3648
3649 let mut matches =
3650 self.syntax
3651 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3652 grammar.text_object_config.as_ref().map(|c| &c.query)
3653 });
3654
3655 let configs = matches
3656 .grammars()
3657 .iter()
3658 .map(|grammar| grammar.text_object_config.as_ref())
3659 .collect::<Vec<_>>();
3660
3661 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3662
3663 iter::from_fn(move || loop {
3664 while let Some(capture) = captures.pop() {
3665 if capture.0.overlaps(&range) {
3666 return Some(capture);
3667 }
3668 }
3669
3670 let mat = matches.peek()?;
3671
3672 let Some(config) = configs[mat.grammar_index].as_ref() else {
3673 matches.advance();
3674 continue;
3675 };
3676
3677 for capture in mat.captures {
3678 let Some(ix) = config
3679 .text_objects_by_capture_ix
3680 .binary_search_by_key(&capture.index, |e| e.0)
3681 .ok()
3682 else {
3683 continue;
3684 };
3685 let text_object = config.text_objects_by_capture_ix[ix].1;
3686 let byte_range = capture.node.byte_range();
3687
3688 let mut found = false;
3689 for (range, existing) in captures.iter_mut() {
3690 if existing == &text_object {
3691 range.start = range.start.min(byte_range.start);
3692 range.end = range.end.max(byte_range.end);
3693 found = true;
3694 break;
3695 }
3696 }
3697
3698 if !found {
3699 captures.push((byte_range, text_object));
3700 }
3701 }
3702
3703 matches.advance();
3704 })
3705 }
3706
3707 /// Returns enclosing bracket ranges containing the given range
3708 pub fn enclosing_bracket_ranges<T: ToOffset>(
3709 &self,
3710 range: Range<T>,
3711 ) -> impl Iterator<Item = BracketMatch> + '_ {
3712 let range = range.start.to_offset(self)..range.end.to_offset(self);
3713
3714 self.bracket_ranges(range.clone()).filter(move |pair| {
3715 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3716 })
3717 }
3718
3719 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3720 ///
3721 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3722 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3723 &self,
3724 range: Range<T>,
3725 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3726 ) -> Option<(Range<usize>, Range<usize>)> {
3727 let range = range.start.to_offset(self)..range.end.to_offset(self);
3728
3729 // Get the ranges of the innermost pair of brackets.
3730 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3731
3732 for pair in self.enclosing_bracket_ranges(range.clone()) {
3733 if let Some(range_filter) = range_filter {
3734 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3735 continue;
3736 }
3737 }
3738
3739 let len = pair.close_range.end - pair.open_range.start;
3740
3741 if let Some((existing_open, existing_close)) = &result {
3742 let existing_len = existing_close.end - existing_open.start;
3743 if len > existing_len {
3744 continue;
3745 }
3746 }
3747
3748 result = Some((pair.open_range, pair.close_range));
3749 }
3750
3751 result
3752 }
3753
3754 /// Returns anchor ranges for any matches of the redaction query.
3755 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3756 /// will be run on the relevant section of the buffer.
3757 pub fn redacted_ranges<T: ToOffset>(
3758 &self,
3759 range: Range<T>,
3760 ) -> impl Iterator<Item = Range<usize>> + '_ {
3761 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3762 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3763 grammar
3764 .redactions_config
3765 .as_ref()
3766 .map(|config| &config.query)
3767 });
3768
3769 let configs = syntax_matches
3770 .grammars()
3771 .iter()
3772 .map(|grammar| grammar.redactions_config.as_ref())
3773 .collect::<Vec<_>>();
3774
3775 iter::from_fn(move || {
3776 let redacted_range = syntax_matches
3777 .peek()
3778 .and_then(|mat| {
3779 configs[mat.grammar_index].and_then(|config| {
3780 mat.captures
3781 .iter()
3782 .find(|capture| capture.index == config.redaction_capture_ix)
3783 })
3784 })
3785 .map(|mat| mat.node.byte_range());
3786 syntax_matches.advance();
3787 redacted_range
3788 })
3789 }
3790
3791 pub fn injections_intersecting_range<T: ToOffset>(
3792 &self,
3793 range: Range<T>,
3794 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3795 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3796
3797 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3798 grammar
3799 .injection_config
3800 .as_ref()
3801 .map(|config| &config.query)
3802 });
3803
3804 let configs = syntax_matches
3805 .grammars()
3806 .iter()
3807 .map(|grammar| grammar.injection_config.as_ref())
3808 .collect::<Vec<_>>();
3809
3810 iter::from_fn(move || {
3811 let ranges = syntax_matches.peek().and_then(|mat| {
3812 let config = &configs[mat.grammar_index]?;
3813 let content_capture_range = mat.captures.iter().find_map(|capture| {
3814 if capture.index == config.content_capture_ix {
3815 Some(capture.node.byte_range())
3816 } else {
3817 None
3818 }
3819 })?;
3820 let language = self.language_at(content_capture_range.start)?;
3821 Some((content_capture_range, language))
3822 });
3823 syntax_matches.advance();
3824 ranges
3825 })
3826 }
3827
3828 pub fn runnable_ranges(
3829 &self,
3830 offset_range: Range<usize>,
3831 ) -> impl Iterator<Item = RunnableRange> + '_ {
3832 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3833 grammar.runnable_config.as_ref().map(|config| &config.query)
3834 });
3835
3836 let test_configs = syntax_matches
3837 .grammars()
3838 .iter()
3839 .map(|grammar| grammar.runnable_config.as_ref())
3840 .collect::<Vec<_>>();
3841
3842 iter::from_fn(move || loop {
3843 let mat = syntax_matches.peek()?;
3844
3845 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3846 let mut run_range = None;
3847 let full_range = mat.captures.iter().fold(
3848 Range {
3849 start: usize::MAX,
3850 end: 0,
3851 },
3852 |mut acc, next| {
3853 let byte_range = next.node.byte_range();
3854 if acc.start > byte_range.start {
3855 acc.start = byte_range.start;
3856 }
3857 if acc.end < byte_range.end {
3858 acc.end = byte_range.end;
3859 }
3860 acc
3861 },
3862 );
3863 if full_range.start > full_range.end {
3864 // We did not find a full spanning range of this match.
3865 return None;
3866 }
3867 let extra_captures: SmallVec<[_; 1]> =
3868 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3869 test_configs
3870 .extra_captures
3871 .get(capture.index as usize)
3872 .cloned()
3873 .and_then(|tag_name| match tag_name {
3874 RunnableCapture::Named(name) => {
3875 Some((capture.node.byte_range(), name))
3876 }
3877 RunnableCapture::Run => {
3878 let _ = run_range.insert(capture.node.byte_range());
3879 None
3880 }
3881 })
3882 }));
3883 let run_range = run_range?;
3884 let tags = test_configs
3885 .query
3886 .property_settings(mat.pattern_index)
3887 .iter()
3888 .filter_map(|property| {
3889 if *property.key == *"tag" {
3890 property
3891 .value
3892 .as_ref()
3893 .map(|value| RunnableTag(value.to_string().into()))
3894 } else {
3895 None
3896 }
3897 })
3898 .collect();
3899 let extra_captures = extra_captures
3900 .into_iter()
3901 .map(|(range, name)| {
3902 (
3903 name.to_string(),
3904 self.text_for_range(range.clone()).collect::<String>(),
3905 )
3906 })
3907 .collect();
3908 // All tags should have the same range.
3909 Some(RunnableRange {
3910 run_range,
3911 full_range,
3912 runnable: Runnable {
3913 tags,
3914 language: mat.language,
3915 buffer: self.remote_id(),
3916 },
3917 extra_captures,
3918 buffer_id: self.remote_id(),
3919 })
3920 });
3921
3922 syntax_matches.advance();
3923 if test_range.is_some() {
3924 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3925 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3926 return test_range;
3927 }
3928 })
3929 }
3930
3931 /// Returns selections for remote peers intersecting the given range.
3932 #[allow(clippy::type_complexity)]
3933 pub fn selections_in_range(
3934 &self,
3935 range: Range<Anchor>,
3936 include_local: bool,
3937 ) -> impl Iterator<
3938 Item = (
3939 ReplicaId,
3940 bool,
3941 CursorShape,
3942 impl Iterator<Item = &Selection<Anchor>> + '_,
3943 ),
3944 > + '_ {
3945 self.remote_selections
3946 .iter()
3947 .filter(move |(replica_id, set)| {
3948 (include_local || **replica_id != self.text.replica_id())
3949 && !set.selections.is_empty()
3950 })
3951 .map(move |(replica_id, set)| {
3952 let start_ix = match set.selections.binary_search_by(|probe| {
3953 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3954 }) {
3955 Ok(ix) | Err(ix) => ix,
3956 };
3957 let end_ix = match set.selections.binary_search_by(|probe| {
3958 probe.start.cmp(&range.end, self).then(Ordering::Less)
3959 }) {
3960 Ok(ix) | Err(ix) => ix,
3961 };
3962
3963 (
3964 *replica_id,
3965 set.line_mode,
3966 set.cursor_shape,
3967 set.selections[start_ix..end_ix].iter(),
3968 )
3969 })
3970 }
3971
3972 /// Returns if the buffer contains any diagnostics.
3973 pub fn has_diagnostics(&self) -> bool {
3974 !self.diagnostics.is_empty()
3975 }
3976
3977 /// Returns all the diagnostics intersecting the given range.
3978 pub fn diagnostics_in_range<'a, T, O>(
3979 &'a self,
3980 search_range: Range<T>,
3981 reversed: bool,
3982 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3983 where
3984 T: 'a + Clone + ToOffset,
3985 O: 'a + FromAnchor,
3986 {
3987 let mut iterators: Vec<_> = self
3988 .diagnostics
3989 .iter()
3990 .map(|(_, collection)| {
3991 collection
3992 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
3993 .peekable()
3994 })
3995 .collect();
3996
3997 std::iter::from_fn(move || {
3998 let (next_ix, _) = iterators
3999 .iter_mut()
4000 .enumerate()
4001 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4002 .min_by(|(_, a), (_, b)| {
4003 let cmp = a
4004 .range
4005 .start
4006 .cmp(&b.range.start, self)
4007 // when range is equal, sort by diagnostic severity
4008 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4009 // and stabilize order with group_id
4010 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4011 if reversed {
4012 cmp.reverse()
4013 } else {
4014 cmp
4015 }
4016 })?;
4017 iterators[next_ix]
4018 .next()
4019 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4020 diagnostic,
4021 range: FromAnchor::from_anchor(&range.start, self)
4022 ..FromAnchor::from_anchor(&range.end, self),
4023 })
4024 })
4025 }
4026
4027 /// Returns all the diagnostic groups associated with the given
4028 /// language server ID. If no language server ID is provided,
4029 /// all diagnostics groups are returned.
4030 pub fn diagnostic_groups(
4031 &self,
4032 language_server_id: Option<LanguageServerId>,
4033 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4034 let mut groups = Vec::new();
4035
4036 if let Some(language_server_id) = language_server_id {
4037 if let Ok(ix) = self
4038 .diagnostics
4039 .binary_search_by_key(&language_server_id, |e| e.0)
4040 {
4041 self.diagnostics[ix]
4042 .1
4043 .groups(language_server_id, &mut groups, self);
4044 }
4045 } else {
4046 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4047 diagnostics.groups(*language_server_id, &mut groups, self);
4048 }
4049 }
4050
4051 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4052 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4053 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4054 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4055 });
4056
4057 groups
4058 }
4059
4060 /// Returns an iterator over the diagnostics for the given group.
4061 pub fn diagnostic_group<O>(
4062 &self,
4063 group_id: usize,
4064 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4065 where
4066 O: FromAnchor + 'static,
4067 {
4068 self.diagnostics
4069 .iter()
4070 .flat_map(move |(_, set)| set.group(group_id, self))
4071 }
4072
4073 /// An integer version number that accounts for all updates besides
4074 /// the buffer's text itself (which is versioned via a version vector).
4075 pub fn non_text_state_update_count(&self) -> usize {
4076 self.non_text_state_update_count
4077 }
4078
4079 /// Returns a snapshot of underlying file.
4080 pub fn file(&self) -> Option<&Arc<dyn File>> {
4081 self.file.as_ref()
4082 }
4083
4084 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4085 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4086 if let Some(file) = self.file() {
4087 if file.path().file_name().is_none() || include_root {
4088 Some(file.full_path(cx))
4089 } else {
4090 Some(file.path().to_path_buf())
4091 }
4092 } else {
4093 None
4094 }
4095 }
4096}
4097
4098fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4099 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4100}
4101
4102fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4103 let mut result = IndentSize::spaces(0);
4104 for c in text {
4105 let kind = match c {
4106 ' ' => IndentKind::Space,
4107 '\t' => IndentKind::Tab,
4108 _ => break,
4109 };
4110 if result.len == 0 {
4111 result.kind = kind;
4112 }
4113 result.len += 1;
4114 }
4115 result
4116}
4117
4118impl Clone for BufferSnapshot {
4119 fn clone(&self) -> Self {
4120 Self {
4121 text: self.text.clone(),
4122 syntax: self.syntax.clone(),
4123 file: self.file.clone(),
4124 remote_selections: self.remote_selections.clone(),
4125 diagnostics: self.diagnostics.clone(),
4126 language: self.language.clone(),
4127 non_text_state_update_count: self.non_text_state_update_count,
4128 }
4129 }
4130}
4131
4132impl Deref for BufferSnapshot {
4133 type Target = text::BufferSnapshot;
4134
4135 fn deref(&self) -> &Self::Target {
4136 &self.text
4137 }
4138}
4139
4140unsafe impl Send for BufferChunks<'_> {}
4141
4142impl<'a> BufferChunks<'a> {
4143 pub(crate) fn new(
4144 text: &'a Rope,
4145 range: Range<usize>,
4146 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4147 diagnostics: bool,
4148 buffer_snapshot: Option<&'a BufferSnapshot>,
4149 ) -> Self {
4150 let mut highlights = None;
4151 if let Some((captures, highlight_maps)) = syntax {
4152 highlights = Some(BufferChunkHighlights {
4153 captures,
4154 next_capture: None,
4155 stack: Default::default(),
4156 highlight_maps,
4157 })
4158 }
4159
4160 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4161 let chunks = text.chunks_in_range(range.clone());
4162
4163 let mut this = BufferChunks {
4164 range,
4165 buffer_snapshot,
4166 chunks,
4167 diagnostic_endpoints,
4168 error_depth: 0,
4169 warning_depth: 0,
4170 information_depth: 0,
4171 hint_depth: 0,
4172 unnecessary_depth: 0,
4173 highlights,
4174 };
4175 this.initialize_diagnostic_endpoints();
4176 this
4177 }
4178
4179 /// Seeks to the given byte offset in the buffer.
4180 pub fn seek(&mut self, range: Range<usize>) {
4181 let old_range = std::mem::replace(&mut self.range, range.clone());
4182 self.chunks.set_range(self.range.clone());
4183 if let Some(highlights) = self.highlights.as_mut() {
4184 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4185 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4186 highlights
4187 .stack
4188 .retain(|(end_offset, _)| *end_offset > range.start);
4189 if let Some(capture) = &highlights.next_capture {
4190 if range.start >= capture.node.start_byte() {
4191 let next_capture_end = capture.node.end_byte();
4192 if range.start < next_capture_end {
4193 highlights.stack.push((
4194 next_capture_end,
4195 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4196 ));
4197 }
4198 highlights.next_capture.take();
4199 }
4200 }
4201 } else if let Some(snapshot) = self.buffer_snapshot {
4202 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4203 *highlights = BufferChunkHighlights {
4204 captures,
4205 next_capture: None,
4206 stack: Default::default(),
4207 highlight_maps,
4208 };
4209 } else {
4210 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4211 // Seeking such BufferChunks is not supported.
4212 debug_assert!(false, "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot");
4213 }
4214
4215 highlights.captures.set_byte_range(self.range.clone());
4216 self.initialize_diagnostic_endpoints();
4217 }
4218 }
4219
4220 fn initialize_diagnostic_endpoints(&mut self) {
4221 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4222 if let Some(buffer) = self.buffer_snapshot {
4223 let mut diagnostic_endpoints = Vec::new();
4224 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4225 diagnostic_endpoints.push(DiagnosticEndpoint {
4226 offset: entry.range.start,
4227 is_start: true,
4228 severity: entry.diagnostic.severity,
4229 is_unnecessary: entry.diagnostic.is_unnecessary,
4230 });
4231 diagnostic_endpoints.push(DiagnosticEndpoint {
4232 offset: entry.range.end,
4233 is_start: false,
4234 severity: entry.diagnostic.severity,
4235 is_unnecessary: entry.diagnostic.is_unnecessary,
4236 });
4237 }
4238 diagnostic_endpoints
4239 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4240 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4241 self.hint_depth = 0;
4242 self.error_depth = 0;
4243 self.warning_depth = 0;
4244 self.information_depth = 0;
4245 }
4246 }
4247 }
4248
4249 /// The current byte offset in the buffer.
4250 pub fn offset(&self) -> usize {
4251 self.range.start
4252 }
4253
4254 pub fn range(&self) -> Range<usize> {
4255 self.range.clone()
4256 }
4257
4258 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4259 let depth = match endpoint.severity {
4260 DiagnosticSeverity::ERROR => &mut self.error_depth,
4261 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4262 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4263 DiagnosticSeverity::HINT => &mut self.hint_depth,
4264 _ => return,
4265 };
4266 if endpoint.is_start {
4267 *depth += 1;
4268 } else {
4269 *depth -= 1;
4270 }
4271
4272 if endpoint.is_unnecessary {
4273 if endpoint.is_start {
4274 self.unnecessary_depth += 1;
4275 } else {
4276 self.unnecessary_depth -= 1;
4277 }
4278 }
4279 }
4280
4281 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4282 if self.error_depth > 0 {
4283 Some(DiagnosticSeverity::ERROR)
4284 } else if self.warning_depth > 0 {
4285 Some(DiagnosticSeverity::WARNING)
4286 } else if self.information_depth > 0 {
4287 Some(DiagnosticSeverity::INFORMATION)
4288 } else if self.hint_depth > 0 {
4289 Some(DiagnosticSeverity::HINT)
4290 } else {
4291 None
4292 }
4293 }
4294
4295 fn current_code_is_unnecessary(&self) -> bool {
4296 self.unnecessary_depth > 0
4297 }
4298}
4299
4300impl<'a> Iterator for BufferChunks<'a> {
4301 type Item = Chunk<'a>;
4302
4303 fn next(&mut self) -> Option<Self::Item> {
4304 let mut next_capture_start = usize::MAX;
4305 let mut next_diagnostic_endpoint = usize::MAX;
4306
4307 if let Some(highlights) = self.highlights.as_mut() {
4308 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4309 if *parent_capture_end <= self.range.start {
4310 highlights.stack.pop();
4311 } else {
4312 break;
4313 }
4314 }
4315
4316 if highlights.next_capture.is_none() {
4317 highlights.next_capture = highlights.captures.next();
4318 }
4319
4320 while let Some(capture) = highlights.next_capture.as_ref() {
4321 if self.range.start < capture.node.start_byte() {
4322 next_capture_start = capture.node.start_byte();
4323 break;
4324 } else {
4325 let highlight_id =
4326 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4327 highlights
4328 .stack
4329 .push((capture.node.end_byte(), highlight_id));
4330 highlights.next_capture = highlights.captures.next();
4331 }
4332 }
4333 }
4334
4335 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4336 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4337 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4338 if endpoint.offset <= self.range.start {
4339 self.update_diagnostic_depths(endpoint);
4340 diagnostic_endpoints.next();
4341 } else {
4342 next_diagnostic_endpoint = endpoint.offset;
4343 break;
4344 }
4345 }
4346 }
4347 self.diagnostic_endpoints = diagnostic_endpoints;
4348
4349 if let Some(chunk) = self.chunks.peek() {
4350 let chunk_start = self.range.start;
4351 let mut chunk_end = (self.chunks.offset() + chunk.len())
4352 .min(next_capture_start)
4353 .min(next_diagnostic_endpoint);
4354 let mut highlight_id = None;
4355 if let Some(highlights) = self.highlights.as_ref() {
4356 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4357 chunk_end = chunk_end.min(*parent_capture_end);
4358 highlight_id = Some(*parent_highlight_id);
4359 }
4360 }
4361
4362 let slice =
4363 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4364 self.range.start = chunk_end;
4365 if self.range.start == self.chunks.offset() + chunk.len() {
4366 self.chunks.next().unwrap();
4367 }
4368
4369 Some(Chunk {
4370 text: slice,
4371 syntax_highlight_id: highlight_id,
4372 diagnostic_severity: self.current_diagnostic_severity(),
4373 is_unnecessary: self.current_code_is_unnecessary(),
4374 ..Default::default()
4375 })
4376 } else {
4377 None
4378 }
4379 }
4380}
4381
4382impl operation_queue::Operation for Operation {
4383 fn lamport_timestamp(&self) -> clock::Lamport {
4384 match self {
4385 Operation::Buffer(_) => {
4386 unreachable!("buffer operations should never be deferred at this layer")
4387 }
4388 Operation::UpdateDiagnostics {
4389 lamport_timestamp, ..
4390 }
4391 | Operation::UpdateSelections {
4392 lamport_timestamp, ..
4393 }
4394 | Operation::UpdateCompletionTriggers {
4395 lamport_timestamp, ..
4396 } => *lamport_timestamp,
4397 }
4398 }
4399}
4400
4401impl Default for Diagnostic {
4402 fn default() -> Self {
4403 Self {
4404 source: Default::default(),
4405 code: None,
4406 severity: DiagnosticSeverity::ERROR,
4407 message: Default::default(),
4408 group_id: 0,
4409 is_primary: false,
4410 is_disk_based: false,
4411 is_unnecessary: false,
4412 data: None,
4413 }
4414 }
4415}
4416
4417impl IndentSize {
4418 /// Returns an [`IndentSize`] representing the given spaces.
4419 pub fn spaces(len: u32) -> Self {
4420 Self {
4421 len,
4422 kind: IndentKind::Space,
4423 }
4424 }
4425
4426 /// Returns an [`IndentSize`] representing a tab.
4427 pub fn tab() -> Self {
4428 Self {
4429 len: 1,
4430 kind: IndentKind::Tab,
4431 }
4432 }
4433
4434 /// An iterator over the characters represented by this [`IndentSize`].
4435 pub fn chars(&self) -> impl Iterator<Item = char> {
4436 iter::repeat(self.char()).take(self.len as usize)
4437 }
4438
4439 /// The character representation of this [`IndentSize`].
4440 pub fn char(&self) -> char {
4441 match self.kind {
4442 IndentKind::Space => ' ',
4443 IndentKind::Tab => '\t',
4444 }
4445 }
4446
4447 /// Consumes the current [`IndentSize`] and returns a new one that has
4448 /// been shrunk or enlarged by the given size along the given direction.
4449 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4450 match direction {
4451 Ordering::Less => {
4452 if self.kind == size.kind && self.len >= size.len {
4453 self.len -= size.len;
4454 }
4455 }
4456 Ordering::Equal => {}
4457 Ordering::Greater => {
4458 if self.len == 0 {
4459 self = size;
4460 } else if self.kind == size.kind {
4461 self.len += size.len;
4462 }
4463 }
4464 }
4465 self
4466 }
4467
4468 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4469 match self.kind {
4470 IndentKind::Space => self.len as usize,
4471 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4472 }
4473 }
4474}
4475
4476#[cfg(any(test, feature = "test-support"))]
4477pub struct TestFile {
4478 pub path: Arc<Path>,
4479 pub root_name: String,
4480 pub local_root: Option<PathBuf>,
4481}
4482
4483#[cfg(any(test, feature = "test-support"))]
4484impl File for TestFile {
4485 fn path(&self) -> &Arc<Path> {
4486 &self.path
4487 }
4488
4489 fn full_path(&self, _: &gpui::App) -> PathBuf {
4490 PathBuf::from(&self.root_name).join(self.path.as_ref())
4491 }
4492
4493 fn as_local(&self) -> Option<&dyn LocalFile> {
4494 if self.local_root.is_some() {
4495 Some(self)
4496 } else {
4497 None
4498 }
4499 }
4500
4501 fn disk_state(&self) -> DiskState {
4502 unimplemented!()
4503 }
4504
4505 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4506 self.path().file_name().unwrap_or(self.root_name.as_ref())
4507 }
4508
4509 fn worktree_id(&self, _: &App) -> WorktreeId {
4510 WorktreeId::from_usize(0)
4511 }
4512
4513 fn as_any(&self) -> &dyn std::any::Any {
4514 unimplemented!()
4515 }
4516
4517 fn to_proto(&self, _: &App) -> rpc::proto::File {
4518 unimplemented!()
4519 }
4520
4521 fn is_private(&self) -> bool {
4522 false
4523 }
4524}
4525
4526#[cfg(any(test, feature = "test-support"))]
4527impl LocalFile for TestFile {
4528 fn abs_path(&self, _cx: &App) -> PathBuf {
4529 PathBuf::from(self.local_root.as_ref().unwrap())
4530 .join(&self.root_name)
4531 .join(self.path.as_ref())
4532 }
4533
4534 fn load(&self, _cx: &App) -> Task<Result<String>> {
4535 unimplemented!()
4536 }
4537
4538 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4539 unimplemented!()
4540 }
4541}
4542
4543pub(crate) fn contiguous_ranges(
4544 values: impl Iterator<Item = u32>,
4545 max_len: usize,
4546) -> impl Iterator<Item = Range<u32>> {
4547 let mut values = values;
4548 let mut current_range: Option<Range<u32>> = None;
4549 std::iter::from_fn(move || loop {
4550 if let Some(value) = values.next() {
4551 if let Some(range) = &mut current_range {
4552 if value == range.end && range.len() < max_len {
4553 range.end += 1;
4554 continue;
4555 }
4556 }
4557
4558 let prev_range = current_range.clone();
4559 current_range = Some(value..(value + 1));
4560 if prev_range.is_some() {
4561 return prev_range;
4562 }
4563 } else {
4564 return current_range.take();
4565 }
4566 })
4567}
4568
4569#[derive(Default, Debug)]
4570pub struct CharClassifier {
4571 scope: Option<LanguageScope>,
4572 for_completion: bool,
4573 ignore_punctuation: bool,
4574}
4575
4576impl CharClassifier {
4577 pub fn new(scope: Option<LanguageScope>) -> Self {
4578 Self {
4579 scope,
4580 for_completion: false,
4581 ignore_punctuation: false,
4582 }
4583 }
4584
4585 pub fn for_completion(self, for_completion: bool) -> Self {
4586 Self {
4587 for_completion,
4588 ..self
4589 }
4590 }
4591
4592 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4593 Self {
4594 ignore_punctuation,
4595 ..self
4596 }
4597 }
4598
4599 pub fn is_whitespace(&self, c: char) -> bool {
4600 self.kind(c) == CharKind::Whitespace
4601 }
4602
4603 pub fn is_word(&self, c: char) -> bool {
4604 self.kind(c) == CharKind::Word
4605 }
4606
4607 pub fn is_punctuation(&self, c: char) -> bool {
4608 self.kind(c) == CharKind::Punctuation
4609 }
4610
4611 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4612 if c.is_whitespace() {
4613 return CharKind::Whitespace;
4614 } else if c.is_alphanumeric() || c == '_' {
4615 return CharKind::Word;
4616 }
4617
4618 if let Some(scope) = &self.scope {
4619 if let Some(characters) = scope.word_characters() {
4620 if characters.contains(&c) {
4621 if c == '-' && !self.for_completion && !ignore_punctuation {
4622 return CharKind::Punctuation;
4623 }
4624 return CharKind::Word;
4625 }
4626 }
4627 }
4628
4629 if ignore_punctuation {
4630 CharKind::Word
4631 } else {
4632 CharKind::Punctuation
4633 }
4634 }
4635
4636 pub fn kind(&self, c: char) -> CharKind {
4637 self.kind_with(c, self.ignore_punctuation)
4638 }
4639}
4640
4641/// Find all of the ranges of whitespace that occur at the ends of lines
4642/// in the given rope.
4643///
4644/// This could also be done with a regex search, but this implementation
4645/// avoids copying text.
4646pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4647 let mut ranges = Vec::new();
4648
4649 let mut offset = 0;
4650 let mut prev_chunk_trailing_whitespace_range = 0..0;
4651 for chunk in rope.chunks() {
4652 let mut prev_line_trailing_whitespace_range = 0..0;
4653 for (i, line) in chunk.split('\n').enumerate() {
4654 let line_end_offset = offset + line.len();
4655 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4656 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4657
4658 if i == 0 && trimmed_line_len == 0 {
4659 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4660 }
4661 if !prev_line_trailing_whitespace_range.is_empty() {
4662 ranges.push(prev_line_trailing_whitespace_range);
4663 }
4664
4665 offset = line_end_offset + 1;
4666 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4667 }
4668
4669 offset -= 1;
4670 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4671 }
4672
4673 if !prev_chunk_trailing_whitespace_range.is_empty() {
4674 ranges.push(prev_chunk_trailing_whitespace_range);
4675 }
4676
4677 ranges
4678}