1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 proto, Grammar, Language, LanguageRegistry,
5};
6use crate::{
7 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
8 language_settings::{language_settings, LanguageSettings},
9 outline::OutlineItem,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
17 TreeSitterOptions,
18};
19use anyhow::{anyhow, Context as _, Result};
20use async_watch as watch;
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 AnyElement, App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, Pixels,
28 SharedString, StyledText, Task, TaskLabel, TextStyle, Window,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 fmt,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, DerefMut, Range},
51 path::{Path, PathBuf},
52 str,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
63 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
64 Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{debug_panic, maybe, RangeExt};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76/// A label for the background task spawned by the buffer to compute
77/// a diff against the contents of its file.
78pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
79
80/// Indicate whether a [`Buffer`] has permissions to edit.
81#[derive(PartialEq, Clone, Copy, Debug)]
82pub enum Capability {
83 /// The buffer is a mutable replica.
84 ReadWrite,
85 /// The buffer is a read-only replica.
86 ReadOnly,
87}
88
89pub type BufferRow = u32;
90
91/// An in-memory representation of a source code file, including its text,
92/// syntax trees, git status, and diagnostics.
93pub struct Buffer {
94 text: TextBuffer,
95 branch_state: Option<BufferBranchState>,
96 /// Filesystem state, `None` when there is no path.
97 file: Option<Arc<dyn File>>,
98 /// The mtime of the file when this buffer was last loaded from
99 /// or saved to disk.
100 saved_mtime: Option<MTime>,
101 /// The version vector when this buffer was last loaded from
102 /// or saved to disk.
103 saved_version: clock::Global,
104 preview_version: clock::Global,
105 transaction_depth: usize,
106 was_dirty_before_starting_transaction: Option<bool>,
107 reload_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 autoindent_requests: Vec<Arc<AutoindentRequest>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub(crate) syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
176#[serde(rename_all = "snake_case")]
177pub enum CursorShape {
178 /// A vertical bar
179 #[default]
180 Bar,
181 /// A block that surrounds the following character
182 Block,
183 /// An underline that runs along the following character
184 Underline,
185 /// A box drawn around the following character
186 Hollow,
187}
188
189#[derive(Clone, Debug)]
190struct SelectionSet {
191 line_mode: bool,
192 cursor_shape: CursorShape,
193 selections: Arc<[Selection<Anchor>]>,
194 lamport_timestamp: clock::Lamport,
195}
196
197/// A diagnostic associated with a certain range of a buffer.
198#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
199pub struct Diagnostic {
200 /// The name of the service that produced this diagnostic.
201 pub source: Option<String>,
202 /// A machine-readable code that identifies this diagnostic.
203 pub code: Option<NumberOrString>,
204 /// Whether this diagnostic is a hint, warning, or error.
205 pub severity: DiagnosticSeverity,
206 /// The human-readable message associated with this diagnostic.
207 pub message: String,
208 /// An id that identifies the group to which this diagnostic belongs.
209 ///
210 /// When a language server produces a diagnostic with
211 /// one or more associated diagnostics, those diagnostics are all
212 /// assigned a single group ID.
213 pub group_id: usize,
214 /// Whether this diagnostic is the primary diagnostic for its group.
215 ///
216 /// In a given group, the primary diagnostic is the top-level diagnostic
217 /// returned by the language server. The non-primary diagnostics are the
218 /// associated diagnostics.
219 pub is_primary: bool,
220 /// Whether this diagnostic is considered to originate from an analysis of
221 /// files on disk, as opposed to any unsaved buffer contents. This is a
222 /// property of a given diagnostic source, and is configured for a given
223 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
224 /// for the language server.
225 pub is_disk_based: bool,
226 /// Whether this diagnostic marks unnecessary code.
227 pub is_unnecessary: bool,
228 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
229 pub data: Option<Value>,
230}
231
232/// An operation used to synchronize this buffer with its other replicas.
233#[derive(Clone, Debug, PartialEq)]
234pub enum Operation {
235 /// A text operation.
236 Buffer(text::Operation),
237
238 /// An update to the buffer's diagnostics.
239 UpdateDiagnostics {
240 /// The id of the language server that produced the new diagnostics.
241 server_id: LanguageServerId,
242 /// The diagnostics.
243 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
244 /// The buffer's lamport timestamp.
245 lamport_timestamp: clock::Lamport,
246 },
247
248 /// An update to the most recent selections in this buffer.
249 UpdateSelections {
250 /// The selections.
251 selections: Arc<[Selection<Anchor>]>,
252 /// The buffer's lamport timestamp.
253 lamport_timestamp: clock::Lamport,
254 /// Whether the selections are in 'line mode'.
255 line_mode: bool,
256 /// The [`CursorShape`] associated with these selections.
257 cursor_shape: CursorShape,
258 },
259
260 /// An update to the characters that should trigger autocompletion
261 /// for this buffer.
262 UpdateCompletionTriggers {
263 /// The characters that trigger autocompletion.
264 triggers: Vec<String>,
265 /// The buffer's lamport timestamp.
266 lamport_timestamp: clock::Lamport,
267 /// The language server ID.
268 server_id: LanguageServerId,
269 },
270}
271
272/// An event that occurs in a buffer.
273#[derive(Clone, Debug, PartialEq)]
274pub enum BufferEvent {
275 /// The buffer was changed in a way that must be
276 /// propagated to its other replicas.
277 Operation {
278 operation: Operation,
279 is_local: bool,
280 },
281 /// The buffer was edited.
282 Edited,
283 /// The buffer's `dirty` bit changed.
284 DirtyChanged,
285 /// The buffer was saved.
286 Saved,
287 /// The buffer's file was changed on disk.
288 FileHandleChanged,
289 /// The buffer was reloaded.
290 Reloaded,
291 /// The buffer is in need of a reload
292 ReloadNeeded,
293 /// The buffer's language was changed.
294 LanguageChanged,
295 /// The buffer's syntax trees were updated.
296 Reparsed,
297 /// The buffer's diagnostics were updated.
298 DiagnosticsUpdated,
299 /// The buffer gained or lost editing capabilities.
300 CapabilityChanged,
301 /// The buffer was explicitly requested to close.
302 Closed,
303 /// The buffer was discarded when closing.
304 Discarded,
305}
306
307/// The file associated with a buffer.
308pub trait File: Send + Sync {
309 /// Returns the [`LocalFile`] associated with this file, if the
310 /// file is local.
311 fn as_local(&self) -> Option<&dyn LocalFile>;
312
313 /// Returns whether this file is local.
314 fn is_local(&self) -> bool {
315 self.as_local().is_some()
316 }
317
318 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
319 /// only available in some states, such as modification time.
320 fn disk_state(&self) -> DiskState;
321
322 /// Returns the path of this file relative to the worktree's root directory.
323 fn path(&self) -> &Arc<Path>;
324
325 /// Returns the path of this file relative to the worktree's parent directory (this means it
326 /// includes the name of the worktree's root folder).
327 fn full_path(&self, cx: &App) -> PathBuf;
328
329 /// Returns the last component of this handle's absolute path. If this handle refers to the root
330 /// of its worktree, then this method will return the name of the worktree itself.
331 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
332
333 /// Returns the id of the worktree to which this file belongs.
334 ///
335 /// This is needed for looking up project-specific settings.
336 fn worktree_id(&self, cx: &App) -> WorktreeId;
337
338 /// Converts this file into an [`Any`] trait object.
339 fn as_any(&self) -> &dyn Any;
340
341 /// Converts this file into a protobuf message.
342 fn to_proto(&self, cx: &App) -> rpc::proto::File;
343
344 /// Return whether Zed considers this to be a private file.
345 fn is_private(&self) -> bool;
346}
347
348/// The file's storage status - whether it's stored (`Present`), and if so when it was last
349/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
350/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
351/// indicator for new files.
352#[derive(Copy, Clone, Debug, PartialEq)]
353pub enum DiskState {
354 /// File created in Zed that has not been saved.
355 New,
356 /// File present on the filesystem.
357 Present { mtime: MTime },
358 /// Deleted file that was previously present.
359 Deleted,
360}
361
362impl DiskState {
363 /// Returns the file's last known modification time on disk.
364 pub fn mtime(self) -> Option<MTime> {
365 match self {
366 DiskState::New => None,
367 DiskState::Present { mtime } => Some(mtime),
368 DiskState::Deleted => None,
369 }
370 }
371
372 pub fn exists(&self) -> bool {
373 match self {
374 DiskState::New => false,
375 DiskState::Present { .. } => true,
376 DiskState::Deleted => false,
377 }
378 }
379}
380
381/// The file associated with a buffer, in the case where the file is on the local disk.
382pub trait LocalFile: File {
383 /// Returns the absolute path of this file
384 fn abs_path(&self, cx: &App) -> PathBuf;
385
386 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
387 fn load(&self, cx: &App) -> Task<Result<String>>;
388
389 /// Loads the file's contents from disk.
390 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
391}
392
393/// The auto-indent behavior associated with an editing operation.
394/// For some editing operations, each affected line of text has its
395/// indentation recomputed. For other operations, the entire block
396/// of edited text is adjusted uniformly.
397#[derive(Clone, Debug)]
398pub enum AutoindentMode {
399 /// Indent each line of inserted text.
400 EachLine,
401 /// Apply the same indentation adjustment to all of the lines
402 /// in a given insertion.
403 Block {
404 /// The original start column of each insertion, if it was
405 /// copied from elsewhere.
406 ///
407 /// Knowing this start column makes it possible to preserve the
408 /// relative indentation of every line in the insertion from
409 /// when it was copied.
410 ///
411 /// If the start column is `a`, and the first line of insertion
412 /// is then auto-indented to column `b`, then every other line of
413 /// the insertion will be auto-indented to column `b - a`
414 original_start_columns: Vec<u32>,
415 },
416}
417
418#[derive(Clone)]
419struct AutoindentRequest {
420 before_edit: BufferSnapshot,
421 entries: Vec<AutoindentRequestEntry>,
422 is_block_mode: bool,
423 ignore_empty_lines: bool,
424}
425
426#[derive(Debug, Clone)]
427struct AutoindentRequestEntry {
428 /// A range of the buffer whose indentation should be adjusted.
429 range: Range<Anchor>,
430 /// Whether or not these lines should be considered brand new, for the
431 /// purpose of auto-indent. When text is not new, its indentation will
432 /// only be adjusted if the suggested indentation level has *changed*
433 /// since the edit was made.
434 first_line_is_new: bool,
435 indent_size: IndentSize,
436 original_indent_column: Option<u32>,
437}
438
439#[derive(Debug)]
440struct IndentSuggestion {
441 basis_row: u32,
442 delta: Ordering,
443 within_error: bool,
444}
445
446struct BufferChunkHighlights<'a> {
447 captures: SyntaxMapCaptures<'a>,
448 next_capture: Option<SyntaxMapCapture<'a>>,
449 stack: Vec<(usize, HighlightId)>,
450 highlight_maps: Vec<HighlightMap>,
451}
452
453/// An iterator that yields chunks of a buffer's text, along with their
454/// syntax highlights and diagnostic status.
455pub struct BufferChunks<'a> {
456 buffer_snapshot: Option<&'a BufferSnapshot>,
457 range: Range<usize>,
458 chunks: text::Chunks<'a>,
459 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
460 error_depth: usize,
461 warning_depth: usize,
462 information_depth: usize,
463 hint_depth: usize,
464 unnecessary_depth: usize,
465 highlights: Option<BufferChunkHighlights<'a>>,
466}
467
468/// A chunk of a buffer's text, along with its syntax highlight and
469/// diagnostic status.
470#[derive(Clone, Debug, Default)]
471pub struct Chunk<'a> {
472 /// The text of the chunk.
473 pub text: &'a str,
474 /// The syntax highlighting style of the chunk.
475 pub syntax_highlight_id: Option<HighlightId>,
476 /// The highlight style that has been applied to this chunk in
477 /// the editor.
478 pub highlight_style: Option<HighlightStyle>,
479 /// The severity of diagnostic associated with this chunk, if any.
480 pub diagnostic_severity: Option<DiagnosticSeverity>,
481 /// Whether this chunk of text is marked as unnecessary.
482 pub is_unnecessary: bool,
483 /// Whether this chunk of text was originally a tab character.
484 pub is_tab: bool,
485 /// An optional recipe for how the chunk should be presented.
486 pub renderer: Option<ChunkRenderer>,
487}
488
489/// A recipe for how the chunk should be presented.
490#[derive(Clone)]
491pub struct ChunkRenderer {
492 /// creates a custom element to represent this chunk.
493 pub render: Arc<dyn Send + Sync + Fn(&mut ChunkRendererContext) -> AnyElement>,
494 /// If true, the element is constrained to the shaped width of the text.
495 pub constrain_width: bool,
496}
497
498pub struct ChunkRendererContext<'a, 'b> {
499 pub window: &'a mut Window,
500 pub context: &'b mut App,
501 pub max_width: Pixels,
502}
503
504impl fmt::Debug for ChunkRenderer {
505 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
506 f.debug_struct("ChunkRenderer")
507 .field("constrain_width", &self.constrain_width)
508 .finish()
509 }
510}
511
512impl Deref for ChunkRendererContext<'_, '_> {
513 type Target = App;
514
515 fn deref(&self) -> &Self::Target {
516 self.context
517 }
518}
519
520impl DerefMut for ChunkRendererContext<'_, '_> {
521 fn deref_mut(&mut self) -> &mut Self::Target {
522 self.context
523 }
524}
525
526/// A set of edits to a given version of a buffer, computed asynchronously.
527#[derive(Debug)]
528pub struct Diff {
529 pub(crate) base_version: clock::Global,
530 line_ending: LineEnding,
531 pub edits: Vec<(Range<usize>, Arc<str>)>,
532}
533
534#[derive(Clone, Copy)]
535pub(crate) struct DiagnosticEndpoint {
536 offset: usize,
537 is_start: bool,
538 severity: DiagnosticSeverity,
539 is_unnecessary: bool,
540}
541
542/// A class of characters, used for characterizing a run of text.
543#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
544pub enum CharKind {
545 /// Whitespace.
546 Whitespace,
547 /// Punctuation.
548 Punctuation,
549 /// Word.
550 Word,
551}
552
553/// A runnable is a set of data about a region that could be resolved into a task
554pub struct Runnable {
555 pub tags: SmallVec<[RunnableTag; 1]>,
556 pub language: Arc<Language>,
557 pub buffer: BufferId,
558}
559
560#[derive(Default, Clone, Debug)]
561pub struct HighlightedText {
562 pub text: SharedString,
563 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
564}
565
566#[derive(Default, Debug)]
567struct HighlightedTextBuilder {
568 pub text: String,
569 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
570}
571
572impl HighlightedText {
573 pub fn from_buffer_range<T: ToOffset>(
574 range: Range<T>,
575 snapshot: &text::BufferSnapshot,
576 syntax_snapshot: &SyntaxSnapshot,
577 override_style: Option<HighlightStyle>,
578 syntax_theme: &SyntaxTheme,
579 ) -> Self {
580 let mut highlighted_text = HighlightedTextBuilder::default();
581 highlighted_text.add_text_from_buffer_range(
582 range,
583 snapshot,
584 syntax_snapshot,
585 override_style,
586 syntax_theme,
587 );
588 highlighted_text.build()
589 }
590
591 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
592 gpui::StyledText::new(self.text.clone())
593 .with_default_highlights(default_style, self.highlights.iter().cloned())
594 }
595
596 /// Returns the first line without leading whitespace unless highlighted
597 /// and a boolean indicating if there are more lines after
598 pub fn first_line_preview(self) -> (Self, bool) {
599 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
600 let first_line = &self.text[..newline_ix];
601
602 // Trim leading whitespace, unless an edit starts prior to it.
603 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
604 if let Some((first_highlight_range, _)) = self.highlights.first() {
605 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
606 }
607
608 let preview_text = &first_line[preview_start_ix..];
609 let preview_highlights = self
610 .highlights
611 .into_iter()
612 .take_while(|(range, _)| range.start < newline_ix)
613 .filter_map(|(mut range, highlight)| {
614 range.start = range.start.saturating_sub(preview_start_ix);
615 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
616 if range.is_empty() {
617 None
618 } else {
619 Some((range, highlight))
620 }
621 });
622
623 let preview = Self {
624 text: SharedString::new(preview_text),
625 highlights: preview_highlights.collect(),
626 };
627
628 (preview, self.text.len() > newline_ix)
629 }
630}
631
632impl HighlightedTextBuilder {
633 pub fn build(self) -> HighlightedText {
634 HighlightedText {
635 text: self.text.into(),
636 highlights: self.highlights,
637 }
638 }
639
640 pub fn add_text_from_buffer_range<T: ToOffset>(
641 &mut self,
642 range: Range<T>,
643 snapshot: &text::BufferSnapshot,
644 syntax_snapshot: &SyntaxSnapshot,
645 override_style: Option<HighlightStyle>,
646 syntax_theme: &SyntaxTheme,
647 ) {
648 let range = range.to_offset(snapshot);
649 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
650 let start = self.text.len();
651 self.text.push_str(chunk.text);
652 let end = self.text.len();
653
654 if let Some(mut highlight_style) = chunk
655 .syntax_highlight_id
656 .and_then(|id| id.style(syntax_theme))
657 {
658 if let Some(override_style) = override_style {
659 highlight_style.highlight(override_style);
660 }
661 self.highlights.push((start..end, highlight_style));
662 } else if let Some(override_style) = override_style {
663 self.highlights.push((start..end, override_style));
664 }
665 }
666 }
667
668 fn highlighted_chunks<'a>(
669 range: Range<usize>,
670 snapshot: &'a text::BufferSnapshot,
671 syntax_snapshot: &'a SyntaxSnapshot,
672 ) -> BufferChunks<'a> {
673 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
674 grammar.highlights_query.as_ref()
675 });
676
677 let highlight_maps = captures
678 .grammars()
679 .iter()
680 .map(|grammar| grammar.highlight_map())
681 .collect();
682
683 BufferChunks::new(
684 snapshot.as_rope(),
685 range,
686 Some((captures, highlight_maps)),
687 false,
688 None,
689 )
690 }
691}
692
693#[derive(Clone)]
694pub struct EditPreview {
695 old_snapshot: text::BufferSnapshot,
696 applied_edits_snapshot: text::BufferSnapshot,
697 syntax_snapshot: SyntaxSnapshot,
698}
699
700impl EditPreview {
701 pub fn highlight_edits(
702 &self,
703 current_snapshot: &BufferSnapshot,
704 edits: &[(Range<Anchor>, String)],
705 include_deletions: bool,
706 cx: &App,
707 ) -> HighlightedText {
708 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
709 return HighlightedText::default();
710 };
711
712 let mut highlighted_text = HighlightedTextBuilder::default();
713
714 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
715
716 let insertion_highlight_style = HighlightStyle {
717 background_color: Some(cx.theme().status().created_background),
718 ..Default::default()
719 };
720 let deletion_highlight_style = HighlightStyle {
721 background_color: Some(cx.theme().status().deleted_background),
722 ..Default::default()
723 };
724 let syntax_theme = cx.theme().syntax();
725
726 for (range, edit_text) in edits {
727 let edit_new_end_in_preview_snapshot = range
728 .end
729 .bias_right(&self.old_snapshot)
730 .to_offset(&self.applied_edits_snapshot);
731 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
732
733 let unchanged_range_in_preview_snapshot =
734 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
735 if !unchanged_range_in_preview_snapshot.is_empty() {
736 highlighted_text.add_text_from_buffer_range(
737 unchanged_range_in_preview_snapshot,
738 &self.applied_edits_snapshot,
739 &self.syntax_snapshot,
740 None,
741 &syntax_theme,
742 );
743 }
744
745 let range_in_current_snapshot = range.to_offset(current_snapshot);
746 if include_deletions && !range_in_current_snapshot.is_empty() {
747 highlighted_text.add_text_from_buffer_range(
748 range_in_current_snapshot,
749 ¤t_snapshot.text,
750 ¤t_snapshot.syntax,
751 Some(deletion_highlight_style),
752 &syntax_theme,
753 );
754 }
755
756 if !edit_text.is_empty() {
757 highlighted_text.add_text_from_buffer_range(
758 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
759 &self.applied_edits_snapshot,
760 &self.syntax_snapshot,
761 Some(insertion_highlight_style),
762 &syntax_theme,
763 );
764 }
765
766 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
767 }
768
769 highlighted_text.add_text_from_buffer_range(
770 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
771 &self.applied_edits_snapshot,
772 &self.syntax_snapshot,
773 None,
774 &syntax_theme,
775 );
776
777 highlighted_text.build()
778 }
779
780 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
781 let (first, _) = edits.first()?;
782 let (last, _) = edits.last()?;
783
784 let start = first
785 .start
786 .bias_left(&self.old_snapshot)
787 .to_point(&self.applied_edits_snapshot);
788 let end = last
789 .end
790 .bias_right(&self.old_snapshot)
791 .to_point(&self.applied_edits_snapshot);
792
793 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
794 let range = Point::new(start.row, 0)
795 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
796
797 Some(range.to_offset(&self.applied_edits_snapshot))
798 }
799}
800
801#[derive(Clone, Debug, PartialEq, Eq)]
802pub struct BracketMatch {
803 pub open_range: Range<usize>,
804 pub close_range: Range<usize>,
805 pub newline_only: bool,
806}
807
808impl Buffer {
809 /// Create a new buffer with the given base text.
810 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
811 Self::build(
812 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
813 None,
814 Capability::ReadWrite,
815 )
816 }
817
818 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
819 pub fn local_normalized(
820 base_text_normalized: Rope,
821 line_ending: LineEnding,
822 cx: &Context<Self>,
823 ) -> Self {
824 Self::build(
825 TextBuffer::new_normalized(
826 0,
827 cx.entity_id().as_non_zero_u64().into(),
828 line_ending,
829 base_text_normalized,
830 ),
831 None,
832 Capability::ReadWrite,
833 )
834 }
835
836 /// Create a new buffer that is a replica of a remote buffer.
837 pub fn remote(
838 remote_id: BufferId,
839 replica_id: ReplicaId,
840 capability: Capability,
841 base_text: impl Into<String>,
842 ) -> Self {
843 Self::build(
844 TextBuffer::new(replica_id, remote_id, base_text.into()),
845 None,
846 capability,
847 )
848 }
849
850 /// Create a new buffer that is a replica of a remote buffer, populating its
851 /// state from the given protobuf message.
852 pub fn from_proto(
853 replica_id: ReplicaId,
854 capability: Capability,
855 message: proto::BufferState,
856 file: Option<Arc<dyn File>>,
857 ) -> Result<Self> {
858 let buffer_id = BufferId::new(message.id)
859 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
860 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
861 let mut this = Self::build(buffer, file, capability);
862 this.text.set_line_ending(proto::deserialize_line_ending(
863 rpc::proto::LineEnding::from_i32(message.line_ending)
864 .ok_or_else(|| anyhow!("missing line_ending"))?,
865 ));
866 this.saved_version = proto::deserialize_version(&message.saved_version);
867 this.saved_mtime = message.saved_mtime.map(|time| time.into());
868 Ok(this)
869 }
870
871 /// Serialize the buffer's state to a protobuf message.
872 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
873 proto::BufferState {
874 id: self.remote_id().into(),
875 file: self.file.as_ref().map(|f| f.to_proto(cx)),
876 base_text: self.base_text().to_string(),
877 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
878 saved_version: proto::serialize_version(&self.saved_version),
879 saved_mtime: self.saved_mtime.map(|time| time.into()),
880 }
881 }
882
883 /// Serialize as protobufs all of the changes to the buffer since the given version.
884 pub fn serialize_ops(
885 &self,
886 since: Option<clock::Global>,
887 cx: &App,
888 ) -> Task<Vec<proto::Operation>> {
889 let mut operations = Vec::new();
890 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
891
892 operations.extend(self.remote_selections.iter().map(|(_, set)| {
893 proto::serialize_operation(&Operation::UpdateSelections {
894 selections: set.selections.clone(),
895 lamport_timestamp: set.lamport_timestamp,
896 line_mode: set.line_mode,
897 cursor_shape: set.cursor_shape,
898 })
899 }));
900
901 for (server_id, diagnostics) in &self.diagnostics {
902 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
903 lamport_timestamp: self.diagnostics_timestamp,
904 server_id: *server_id,
905 diagnostics: diagnostics.iter().cloned().collect(),
906 }));
907 }
908
909 for (server_id, completions) in &self.completion_triggers_per_language_server {
910 operations.push(proto::serialize_operation(
911 &Operation::UpdateCompletionTriggers {
912 triggers: completions.iter().cloned().collect(),
913 lamport_timestamp: self.completion_triggers_timestamp,
914 server_id: *server_id,
915 },
916 ));
917 }
918
919 let text_operations = self.text.operations().clone();
920 cx.background_spawn(async move {
921 let since = since.unwrap_or_default();
922 operations.extend(
923 text_operations
924 .iter()
925 .filter(|(_, op)| !since.observed(op.timestamp()))
926 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
927 );
928 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
929 operations
930 })
931 }
932
933 /// Assign a language to the buffer, returning the buffer.
934 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
935 self.set_language(Some(language), cx);
936 self
937 }
938
939 /// Returns the [`Capability`] of this buffer.
940 pub fn capability(&self) -> Capability {
941 self.capability
942 }
943
944 /// Whether this buffer can only be read.
945 pub fn read_only(&self) -> bool {
946 self.capability == Capability::ReadOnly
947 }
948
949 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
950 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
951 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
952 let snapshot = buffer.snapshot();
953 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
954 Self {
955 saved_mtime,
956 saved_version: buffer.version(),
957 preview_version: buffer.version(),
958 reload_task: None,
959 transaction_depth: 0,
960 was_dirty_before_starting_transaction: None,
961 has_unsaved_edits: Cell::new((buffer.version(), false)),
962 text: buffer,
963 branch_state: None,
964 file,
965 capability,
966 syntax_map,
967 reparse: None,
968 non_text_state_update_count: 0,
969 sync_parse_timeout: Duration::from_millis(1),
970 parse_status: async_watch::channel(ParseStatus::Idle),
971 autoindent_requests: Default::default(),
972 pending_autoindent: Default::default(),
973 language: None,
974 remote_selections: Default::default(),
975 diagnostics: Default::default(),
976 diagnostics_timestamp: Default::default(),
977 completion_triggers: Default::default(),
978 completion_triggers_per_language_server: Default::default(),
979 completion_triggers_timestamp: Default::default(),
980 deferred_ops: OperationQueue::new(),
981 has_conflict: false,
982 _subscriptions: Vec::new(),
983 }
984 }
985
986 pub fn build_snapshot(
987 text: Rope,
988 language: Option<Arc<Language>>,
989 language_registry: Option<Arc<LanguageRegistry>>,
990 cx: &mut App,
991 ) -> impl Future<Output = BufferSnapshot> {
992 let entity_id = cx.reserve_entity::<Self>().entity_id();
993 let buffer_id = entity_id.as_non_zero_u64().into();
994 async move {
995 let text =
996 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
997 let mut syntax = SyntaxMap::new(&text).snapshot();
998 if let Some(language) = language.clone() {
999 let text = text.clone();
1000 let language = language.clone();
1001 let language_registry = language_registry.clone();
1002 syntax.reparse(&text, language_registry, language);
1003 }
1004 BufferSnapshot {
1005 text,
1006 syntax,
1007 file: None,
1008 diagnostics: Default::default(),
1009 remote_selections: Default::default(),
1010 language,
1011 non_text_state_update_count: 0,
1012 }
1013 }
1014 }
1015
1016 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1017 let entity_id = cx.reserve_entity::<Self>().entity_id();
1018 let buffer_id = entity_id.as_non_zero_u64().into();
1019 let text =
1020 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
1021 let syntax = SyntaxMap::new(&text).snapshot();
1022 BufferSnapshot {
1023 text,
1024 syntax,
1025 file: None,
1026 diagnostics: Default::default(),
1027 remote_selections: Default::default(),
1028 language: None,
1029 non_text_state_update_count: 0,
1030 }
1031 }
1032
1033 #[cfg(any(test, feature = "test-support"))]
1034 pub fn build_snapshot_sync(
1035 text: Rope,
1036 language: Option<Arc<Language>>,
1037 language_registry: Option<Arc<LanguageRegistry>>,
1038 cx: &mut App,
1039 ) -> BufferSnapshot {
1040 let entity_id = cx.reserve_entity::<Self>().entity_id();
1041 let buffer_id = entity_id.as_non_zero_u64().into();
1042 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1043 let mut syntax = SyntaxMap::new(&text).snapshot();
1044 if let Some(language) = language.clone() {
1045 let text = text.clone();
1046 let language = language.clone();
1047 let language_registry = language_registry.clone();
1048 syntax.reparse(&text, language_registry, language);
1049 }
1050 BufferSnapshot {
1051 text,
1052 syntax,
1053 file: None,
1054 diagnostics: Default::default(),
1055 remote_selections: Default::default(),
1056 language,
1057 non_text_state_update_count: 0,
1058 }
1059 }
1060
1061 /// Retrieve a snapshot of the buffer's current state. This is computationally
1062 /// cheap, and allows reading from the buffer on a background thread.
1063 pub fn snapshot(&self) -> BufferSnapshot {
1064 let text = self.text.snapshot();
1065 let mut syntax_map = self.syntax_map.lock();
1066 syntax_map.interpolate(&text);
1067 let syntax = syntax_map.snapshot();
1068
1069 BufferSnapshot {
1070 text,
1071 syntax,
1072 file: self.file.clone(),
1073 remote_selections: self.remote_selections.clone(),
1074 diagnostics: self.diagnostics.clone(),
1075 language: self.language.clone(),
1076 non_text_state_update_count: self.non_text_state_update_count,
1077 }
1078 }
1079
1080 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1081 let this = cx.entity();
1082 cx.new(|cx| {
1083 let mut branch = Self {
1084 branch_state: Some(BufferBranchState {
1085 base_buffer: this.clone(),
1086 merged_operations: Default::default(),
1087 }),
1088 language: self.language.clone(),
1089 has_conflict: self.has_conflict,
1090 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1091 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1092 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1093 };
1094 if let Some(language_registry) = self.language_registry() {
1095 branch.set_language_registry(language_registry);
1096 }
1097
1098 // Reparse the branch buffer so that we get syntax highlighting immediately.
1099 branch.reparse(cx);
1100
1101 branch
1102 })
1103 }
1104
1105 pub fn preview_edits(
1106 &self,
1107 edits: Arc<[(Range<Anchor>, String)]>,
1108 cx: &App,
1109 ) -> Task<EditPreview> {
1110 let registry = self.language_registry();
1111 let language = self.language().cloned();
1112 let old_snapshot = self.text.snapshot();
1113 let mut branch_buffer = self.text.branch();
1114 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1115 cx.background_spawn(async move {
1116 if !edits.is_empty() {
1117 if let Some(language) = language.clone() {
1118 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1119 }
1120
1121 branch_buffer.edit(edits.iter().cloned());
1122 let snapshot = branch_buffer.snapshot();
1123 syntax_snapshot.interpolate(&snapshot);
1124
1125 if let Some(language) = language {
1126 syntax_snapshot.reparse(&snapshot, registry, language);
1127 }
1128 }
1129 EditPreview {
1130 old_snapshot,
1131 applied_edits_snapshot: branch_buffer.snapshot(),
1132 syntax_snapshot,
1133 }
1134 })
1135 }
1136
1137 /// Applies all of the changes in this buffer that intersect any of the
1138 /// given `ranges` to its base buffer.
1139 ///
1140 /// If `ranges` is empty, then all changes will be applied. This buffer must
1141 /// be a branch buffer to call this method.
1142 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1143 let Some(base_buffer) = self.base_buffer() else {
1144 debug_panic!("not a branch buffer");
1145 return;
1146 };
1147
1148 let mut ranges = if ranges.is_empty() {
1149 &[0..usize::MAX]
1150 } else {
1151 ranges.as_slice()
1152 }
1153 .into_iter()
1154 .peekable();
1155
1156 let mut edits = Vec::new();
1157 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1158 let mut is_included = false;
1159 while let Some(range) = ranges.peek() {
1160 if range.end < edit.new.start {
1161 ranges.next().unwrap();
1162 } else {
1163 if range.start <= edit.new.end {
1164 is_included = true;
1165 }
1166 break;
1167 }
1168 }
1169
1170 if is_included {
1171 edits.push((
1172 edit.old.clone(),
1173 self.text_for_range(edit.new.clone()).collect::<String>(),
1174 ));
1175 }
1176 }
1177
1178 let operation = base_buffer.update(cx, |base_buffer, cx| {
1179 // cx.emit(BufferEvent::DiffBaseChanged);
1180 base_buffer.edit(edits, None, cx)
1181 });
1182
1183 if let Some(operation) = operation {
1184 if let Some(BufferBranchState {
1185 merged_operations, ..
1186 }) = &mut self.branch_state
1187 {
1188 merged_operations.push(operation);
1189 }
1190 }
1191 }
1192
1193 fn on_base_buffer_event(
1194 &mut self,
1195 _: Entity<Buffer>,
1196 event: &BufferEvent,
1197 cx: &mut Context<Self>,
1198 ) {
1199 let BufferEvent::Operation { operation, .. } = event else {
1200 return;
1201 };
1202 let Some(BufferBranchState {
1203 merged_operations, ..
1204 }) = &mut self.branch_state
1205 else {
1206 return;
1207 };
1208
1209 let mut operation_to_undo = None;
1210 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1211 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1212 merged_operations.remove(ix);
1213 operation_to_undo = Some(operation.timestamp);
1214 }
1215 }
1216
1217 self.apply_ops([operation.clone()], cx);
1218
1219 if let Some(timestamp) = operation_to_undo {
1220 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1221 self.undo_operations(counts, cx);
1222 }
1223 }
1224
1225 #[cfg(test)]
1226 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1227 &self.text
1228 }
1229
1230 /// Retrieve a snapshot of the buffer's raw text, without any
1231 /// language-related state like the syntax tree or diagnostics.
1232 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1233 self.text.snapshot()
1234 }
1235
1236 /// The file associated with the buffer, if any.
1237 pub fn file(&self) -> Option<&Arc<dyn File>> {
1238 self.file.as_ref()
1239 }
1240
1241 /// The version of the buffer that was last saved or reloaded from disk.
1242 pub fn saved_version(&self) -> &clock::Global {
1243 &self.saved_version
1244 }
1245
1246 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1247 pub fn saved_mtime(&self) -> Option<MTime> {
1248 self.saved_mtime
1249 }
1250
1251 /// Assign a language to the buffer.
1252 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1253 self.non_text_state_update_count += 1;
1254 self.syntax_map.lock().clear(&self.text);
1255 self.language = language;
1256 self.reparse(cx);
1257 cx.emit(BufferEvent::LanguageChanged);
1258 }
1259
1260 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1261 /// other languages if parts of the buffer are written in different languages.
1262 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1263 self.syntax_map
1264 .lock()
1265 .set_language_registry(language_registry);
1266 }
1267
1268 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1269 self.syntax_map.lock().language_registry()
1270 }
1271
1272 /// Assign the buffer a new [`Capability`].
1273 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1274 self.capability = capability;
1275 cx.emit(BufferEvent::CapabilityChanged)
1276 }
1277
1278 /// This method is called to signal that the buffer has been saved.
1279 pub fn did_save(
1280 &mut self,
1281 version: clock::Global,
1282 mtime: Option<MTime>,
1283 cx: &mut Context<Self>,
1284 ) {
1285 self.saved_version = version;
1286 self.has_unsaved_edits
1287 .set((self.saved_version().clone(), false));
1288 self.has_conflict = false;
1289 self.saved_mtime = mtime;
1290 cx.emit(BufferEvent::Saved);
1291 cx.notify();
1292 }
1293
1294 /// This method is called to signal that the buffer has been discarded.
1295 pub fn discarded(&self, cx: &mut Context<Self>) {
1296 cx.emit(BufferEvent::Discarded);
1297 cx.notify();
1298 }
1299
1300 /// Reloads the contents of the buffer from disk.
1301 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1302 let (tx, rx) = futures::channel::oneshot::channel();
1303 let prev_version = self.text.version();
1304 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
1305 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
1306 let file = this.file.as_ref()?.as_local()?;
1307 Some((file.disk_state().mtime(), file.load(cx)))
1308 })?
1309 else {
1310 return Ok(());
1311 };
1312
1313 let new_text = new_text.await?;
1314 let diff = this
1315 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
1316 .await;
1317 this.update(&mut cx, |this, cx| {
1318 if this.version() == diff.base_version {
1319 this.finalize_last_transaction();
1320 this.apply_diff(diff, cx);
1321 tx.send(this.finalize_last_transaction().cloned()).ok();
1322 this.has_conflict = false;
1323 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1324 } else {
1325 if !diff.edits.is_empty()
1326 || this
1327 .edits_since::<usize>(&diff.base_version)
1328 .next()
1329 .is_some()
1330 {
1331 this.has_conflict = true;
1332 }
1333
1334 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1335 }
1336
1337 this.reload_task.take();
1338 })
1339 }));
1340 rx
1341 }
1342
1343 /// This method is called to signal that the buffer has been reloaded.
1344 pub fn did_reload(
1345 &mut self,
1346 version: clock::Global,
1347 line_ending: LineEnding,
1348 mtime: Option<MTime>,
1349 cx: &mut Context<Self>,
1350 ) {
1351 self.saved_version = version;
1352 self.has_unsaved_edits
1353 .set((self.saved_version.clone(), false));
1354 self.text.set_line_ending(line_ending);
1355 self.saved_mtime = mtime;
1356 cx.emit(BufferEvent::Reloaded);
1357 cx.notify();
1358 }
1359
1360 /// Updates the [`File`] backing this buffer. This should be called when
1361 /// the file has changed or has been deleted.
1362 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1363 let was_dirty = self.is_dirty();
1364 let mut file_changed = false;
1365
1366 if let Some(old_file) = self.file.as_ref() {
1367 if new_file.path() != old_file.path() {
1368 file_changed = true;
1369 }
1370
1371 let old_state = old_file.disk_state();
1372 let new_state = new_file.disk_state();
1373 if old_state != new_state {
1374 file_changed = true;
1375 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1376 cx.emit(BufferEvent::ReloadNeeded)
1377 }
1378 }
1379 } else {
1380 file_changed = true;
1381 };
1382
1383 self.file = Some(new_file);
1384 if file_changed {
1385 self.non_text_state_update_count += 1;
1386 if was_dirty != self.is_dirty() {
1387 cx.emit(BufferEvent::DirtyChanged);
1388 }
1389 cx.emit(BufferEvent::FileHandleChanged);
1390 cx.notify();
1391 }
1392 }
1393
1394 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1395 Some(self.branch_state.as_ref()?.base_buffer.clone())
1396 }
1397
1398 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1399 pub fn language(&self) -> Option<&Arc<Language>> {
1400 self.language.as_ref()
1401 }
1402
1403 /// Returns the [`Language`] at the given location.
1404 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1405 let offset = position.to_offset(self);
1406 self.syntax_map
1407 .lock()
1408 .layers_for_range(offset..offset, &self.text, false)
1409 .last()
1410 .map(|info| info.language.clone())
1411 .or_else(|| self.language.clone())
1412 }
1413
1414 /// An integer version number that accounts for all updates besides
1415 /// the buffer's text itself (which is versioned via a version vector).
1416 pub fn non_text_state_update_count(&self) -> usize {
1417 self.non_text_state_update_count
1418 }
1419
1420 /// Whether the buffer is being parsed in the background.
1421 #[cfg(any(test, feature = "test-support"))]
1422 pub fn is_parsing(&self) -> bool {
1423 self.reparse.is_some()
1424 }
1425
1426 /// Indicates whether the buffer contains any regions that may be
1427 /// written in a language that hasn't been loaded yet.
1428 pub fn contains_unknown_injections(&self) -> bool {
1429 self.syntax_map.lock().contains_unknown_injections()
1430 }
1431
1432 #[cfg(test)]
1433 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1434 self.sync_parse_timeout = timeout;
1435 }
1436
1437 /// Called after an edit to synchronize the buffer's main parse tree with
1438 /// the buffer's new underlying state.
1439 ///
1440 /// Locks the syntax map and interpolates the edits since the last reparse
1441 /// into the foreground syntax tree.
1442 ///
1443 /// Then takes a stable snapshot of the syntax map before unlocking it.
1444 /// The snapshot with the interpolated edits is sent to a background thread,
1445 /// where we ask Tree-sitter to perform an incremental parse.
1446 ///
1447 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1448 /// waiting on the parse to complete. As soon as it completes, we proceed
1449 /// synchronously, unless a 1ms timeout elapses.
1450 ///
1451 /// If we time out waiting on the parse, we spawn a second task waiting
1452 /// until the parse does complete and return with the interpolated tree still
1453 /// in the foreground. When the background parse completes, call back into
1454 /// the main thread and assign the foreground parse state.
1455 ///
1456 /// If the buffer or grammar changed since the start of the background parse,
1457 /// initiate an additional reparse recursively. To avoid concurrent parses
1458 /// for the same buffer, we only initiate a new parse if we are not already
1459 /// parsing in the background.
1460 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1461 if self.reparse.is_some() {
1462 return;
1463 }
1464 let language = if let Some(language) = self.language.clone() {
1465 language
1466 } else {
1467 return;
1468 };
1469
1470 let text = self.text_snapshot();
1471 let parsed_version = self.version();
1472
1473 let mut syntax_map = self.syntax_map.lock();
1474 syntax_map.interpolate(&text);
1475 let language_registry = syntax_map.language_registry();
1476 let mut syntax_snapshot = syntax_map.snapshot();
1477 drop(syntax_map);
1478
1479 let parse_task = cx.background_spawn({
1480 let language = language.clone();
1481 let language_registry = language_registry.clone();
1482 async move {
1483 syntax_snapshot.reparse(&text, language_registry, language);
1484 syntax_snapshot
1485 }
1486 });
1487
1488 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1489 match cx
1490 .background_executor()
1491 .block_with_timeout(self.sync_parse_timeout, parse_task)
1492 {
1493 Ok(new_syntax_snapshot) => {
1494 self.did_finish_parsing(new_syntax_snapshot, cx);
1495 self.reparse = None;
1496 }
1497 Err(parse_task) => {
1498 self.reparse = Some(cx.spawn(move |this, mut cx| async move {
1499 let new_syntax_map = parse_task.await;
1500 this.update(&mut cx, move |this, cx| {
1501 let grammar_changed =
1502 this.language.as_ref().map_or(true, |current_language| {
1503 !Arc::ptr_eq(&language, current_language)
1504 });
1505 let language_registry_changed = new_syntax_map
1506 .contains_unknown_injections()
1507 && language_registry.map_or(false, |registry| {
1508 registry.version() != new_syntax_map.language_registry_version()
1509 });
1510 let parse_again = language_registry_changed
1511 || grammar_changed
1512 || this.version.changed_since(&parsed_version);
1513 this.did_finish_parsing(new_syntax_map, cx);
1514 this.reparse = None;
1515 if parse_again {
1516 this.reparse(cx);
1517 }
1518 })
1519 .ok();
1520 }));
1521 }
1522 }
1523 }
1524
1525 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1526 self.non_text_state_update_count += 1;
1527 self.syntax_map.lock().did_parse(syntax_snapshot);
1528 self.request_autoindent(cx);
1529 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1530 cx.emit(BufferEvent::Reparsed);
1531 cx.notify();
1532 }
1533
1534 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1535 self.parse_status.1.clone()
1536 }
1537
1538 /// Assign to the buffer a set of diagnostics created by a given language server.
1539 pub fn update_diagnostics(
1540 &mut self,
1541 server_id: LanguageServerId,
1542 diagnostics: DiagnosticSet,
1543 cx: &mut Context<Self>,
1544 ) {
1545 let lamport_timestamp = self.text.lamport_clock.tick();
1546 let op = Operation::UpdateDiagnostics {
1547 server_id,
1548 diagnostics: diagnostics.iter().cloned().collect(),
1549 lamport_timestamp,
1550 };
1551 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1552 self.send_operation(op, true, cx);
1553 }
1554
1555 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1556 if let Some(indent_sizes) = self.compute_autoindents() {
1557 let indent_sizes = cx.background_spawn(indent_sizes);
1558 match cx
1559 .background_executor()
1560 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1561 {
1562 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1563 Err(indent_sizes) => {
1564 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1565 let indent_sizes = indent_sizes.await;
1566 this.update(&mut cx, |this, cx| {
1567 this.apply_autoindents(indent_sizes, cx);
1568 })
1569 .ok();
1570 }));
1571 }
1572 }
1573 } else {
1574 self.autoindent_requests.clear();
1575 }
1576 }
1577
1578 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1579 let max_rows_between_yields = 100;
1580 let snapshot = self.snapshot();
1581 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1582 return None;
1583 }
1584
1585 let autoindent_requests = self.autoindent_requests.clone();
1586 Some(async move {
1587 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1588 for request in autoindent_requests {
1589 // Resolve each edited range to its row in the current buffer and in the
1590 // buffer before this batch of edits.
1591 let mut row_ranges = Vec::new();
1592 let mut old_to_new_rows = BTreeMap::new();
1593 let mut language_indent_sizes_by_new_row = Vec::new();
1594 for entry in &request.entries {
1595 let position = entry.range.start;
1596 let new_row = position.to_point(&snapshot).row;
1597 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1598 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1599
1600 if !entry.first_line_is_new {
1601 let old_row = position.to_point(&request.before_edit).row;
1602 old_to_new_rows.insert(old_row, new_row);
1603 }
1604 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1605 }
1606
1607 // Build a map containing the suggested indentation for each of the edited lines
1608 // with respect to the state of the buffer before these edits. This map is keyed
1609 // by the rows for these lines in the current state of the buffer.
1610 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1611 let old_edited_ranges =
1612 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1613 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1614 let mut language_indent_size = IndentSize::default();
1615 for old_edited_range in old_edited_ranges {
1616 let suggestions = request
1617 .before_edit
1618 .suggest_autoindents(old_edited_range.clone())
1619 .into_iter()
1620 .flatten();
1621 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1622 if let Some(suggestion) = suggestion {
1623 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1624
1625 // Find the indent size based on the language for this row.
1626 while let Some((row, size)) = language_indent_sizes.peek() {
1627 if *row > new_row {
1628 break;
1629 }
1630 language_indent_size = *size;
1631 language_indent_sizes.next();
1632 }
1633
1634 let suggested_indent = old_to_new_rows
1635 .get(&suggestion.basis_row)
1636 .and_then(|from_row| {
1637 Some(old_suggestions.get(from_row).copied()?.0)
1638 })
1639 .unwrap_or_else(|| {
1640 request
1641 .before_edit
1642 .indent_size_for_line(suggestion.basis_row)
1643 })
1644 .with_delta(suggestion.delta, language_indent_size);
1645 old_suggestions
1646 .insert(new_row, (suggested_indent, suggestion.within_error));
1647 }
1648 }
1649 yield_now().await;
1650 }
1651
1652 // Compute new suggestions for each line, but only include them in the result
1653 // if they differ from the old suggestion for that line.
1654 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1655 let mut language_indent_size = IndentSize::default();
1656 for (row_range, original_indent_column) in row_ranges {
1657 let new_edited_row_range = if request.is_block_mode {
1658 row_range.start..row_range.start + 1
1659 } else {
1660 row_range.clone()
1661 };
1662
1663 let suggestions = snapshot
1664 .suggest_autoindents(new_edited_row_range.clone())
1665 .into_iter()
1666 .flatten();
1667 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1668 if let Some(suggestion) = suggestion {
1669 // Find the indent size based on the language for this row.
1670 while let Some((row, size)) = language_indent_sizes.peek() {
1671 if *row > new_row {
1672 break;
1673 }
1674 language_indent_size = *size;
1675 language_indent_sizes.next();
1676 }
1677
1678 let suggested_indent = indent_sizes
1679 .get(&suggestion.basis_row)
1680 .copied()
1681 .map(|e| e.0)
1682 .unwrap_or_else(|| {
1683 snapshot.indent_size_for_line(suggestion.basis_row)
1684 })
1685 .with_delta(suggestion.delta, language_indent_size);
1686
1687 if old_suggestions.get(&new_row).map_or(
1688 true,
1689 |(old_indentation, was_within_error)| {
1690 suggested_indent != *old_indentation
1691 && (!suggestion.within_error || *was_within_error)
1692 },
1693 ) {
1694 indent_sizes.insert(
1695 new_row,
1696 (suggested_indent, request.ignore_empty_lines),
1697 );
1698 }
1699 }
1700 }
1701
1702 if let (true, Some(original_indent_column)) =
1703 (request.is_block_mode, original_indent_column)
1704 {
1705 let new_indent =
1706 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1707 *indent
1708 } else {
1709 snapshot.indent_size_for_line(row_range.start)
1710 };
1711 let delta = new_indent.len as i64 - original_indent_column as i64;
1712 if delta != 0 {
1713 for row in row_range.skip(1) {
1714 indent_sizes.entry(row).or_insert_with(|| {
1715 let mut size = snapshot.indent_size_for_line(row);
1716 if size.kind == new_indent.kind {
1717 match delta.cmp(&0) {
1718 Ordering::Greater => size.len += delta as u32,
1719 Ordering::Less => {
1720 size.len = size.len.saturating_sub(-delta as u32)
1721 }
1722 Ordering::Equal => {}
1723 }
1724 }
1725 (size, request.ignore_empty_lines)
1726 });
1727 }
1728 }
1729 }
1730
1731 yield_now().await;
1732 }
1733 }
1734
1735 indent_sizes
1736 .into_iter()
1737 .filter_map(|(row, (indent, ignore_empty_lines))| {
1738 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1739 None
1740 } else {
1741 Some((row, indent))
1742 }
1743 })
1744 .collect()
1745 })
1746 }
1747
1748 fn apply_autoindents(
1749 &mut self,
1750 indent_sizes: BTreeMap<u32, IndentSize>,
1751 cx: &mut Context<Self>,
1752 ) {
1753 self.autoindent_requests.clear();
1754
1755 let edits: Vec<_> = indent_sizes
1756 .into_iter()
1757 .filter_map(|(row, indent_size)| {
1758 let current_size = indent_size_for_line(self, row);
1759 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1760 })
1761 .collect();
1762
1763 let preserve_preview = self.preserve_preview();
1764 self.edit(edits, None, cx);
1765 if preserve_preview {
1766 self.refresh_preview();
1767 }
1768 }
1769
1770 /// Create a minimal edit that will cause the given row to be indented
1771 /// with the given size. After applying this edit, the length of the line
1772 /// will always be at least `new_size.len`.
1773 pub fn edit_for_indent_size_adjustment(
1774 row: u32,
1775 current_size: IndentSize,
1776 new_size: IndentSize,
1777 ) -> Option<(Range<Point>, String)> {
1778 if new_size.kind == current_size.kind {
1779 match new_size.len.cmp(¤t_size.len) {
1780 Ordering::Greater => {
1781 let point = Point::new(row, 0);
1782 Some((
1783 point..point,
1784 iter::repeat(new_size.char())
1785 .take((new_size.len - current_size.len) as usize)
1786 .collect::<String>(),
1787 ))
1788 }
1789
1790 Ordering::Less => Some((
1791 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1792 String::new(),
1793 )),
1794
1795 Ordering::Equal => None,
1796 }
1797 } else {
1798 Some((
1799 Point::new(row, 0)..Point::new(row, current_size.len),
1800 iter::repeat(new_size.char())
1801 .take(new_size.len as usize)
1802 .collect::<String>(),
1803 ))
1804 }
1805 }
1806
1807 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1808 /// and the given new text.
1809 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1810 let old_text = self.as_rope().clone();
1811 let base_version = self.version();
1812 cx.background_executor()
1813 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1814 let old_text = old_text.to_string();
1815 let line_ending = LineEnding::detect(&new_text);
1816 LineEnding::normalize(&mut new_text);
1817 let edits = text_diff(&old_text, &new_text);
1818 Diff {
1819 base_version,
1820 line_ending,
1821 edits,
1822 }
1823 })
1824 }
1825
1826 /// Spawns a background task that searches the buffer for any whitespace
1827 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1828 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1829 let old_text = self.as_rope().clone();
1830 let line_ending = self.line_ending();
1831 let base_version = self.version();
1832 cx.background_spawn(async move {
1833 let ranges = trailing_whitespace_ranges(&old_text);
1834 let empty = Arc::<str>::from("");
1835 Diff {
1836 base_version,
1837 line_ending,
1838 edits: ranges
1839 .into_iter()
1840 .map(|range| (range, empty.clone()))
1841 .collect(),
1842 }
1843 })
1844 }
1845
1846 /// Ensures that the buffer ends with a single newline character, and
1847 /// no other whitespace.
1848 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1849 let len = self.len();
1850 let mut offset = len;
1851 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1852 let non_whitespace_len = chunk
1853 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1854 .len();
1855 offset -= chunk.len();
1856 offset += non_whitespace_len;
1857 if non_whitespace_len != 0 {
1858 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1859 return;
1860 }
1861 break;
1862 }
1863 }
1864 self.edit([(offset..len, "\n")], None, cx);
1865 }
1866
1867 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1868 /// calculated, then adjust the diff to account for those changes, and discard any
1869 /// parts of the diff that conflict with those changes.
1870 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1871 // Check for any edits to the buffer that have occurred since this diff
1872 // was computed.
1873 let snapshot = self.snapshot();
1874 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1875 let mut delta = 0;
1876 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1877 while let Some(edit_since) = edits_since.peek() {
1878 // If the edit occurs after a diff hunk, then it does not
1879 // affect that hunk.
1880 if edit_since.old.start > range.end {
1881 break;
1882 }
1883 // If the edit precedes the diff hunk, then adjust the hunk
1884 // to reflect the edit.
1885 else if edit_since.old.end < range.start {
1886 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1887 edits_since.next();
1888 }
1889 // If the edit intersects a diff hunk, then discard that hunk.
1890 else {
1891 return None;
1892 }
1893 }
1894
1895 let start = (range.start as i64 + delta) as usize;
1896 let end = (range.end as i64 + delta) as usize;
1897 Some((start..end, new_text))
1898 });
1899
1900 self.start_transaction();
1901 self.text.set_line_ending(diff.line_ending);
1902 self.edit(adjusted_edits, None, cx);
1903 self.end_transaction(cx)
1904 }
1905
1906 fn has_unsaved_edits(&self) -> bool {
1907 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1908
1909 if last_version == self.version {
1910 self.has_unsaved_edits
1911 .set((last_version, has_unsaved_edits));
1912 return has_unsaved_edits;
1913 }
1914
1915 let has_edits = self.has_edits_since(&self.saved_version);
1916 self.has_unsaved_edits
1917 .set((self.version.clone(), has_edits));
1918 has_edits
1919 }
1920
1921 /// Checks if the buffer has unsaved changes.
1922 pub fn is_dirty(&self) -> bool {
1923 if self.capability == Capability::ReadOnly {
1924 return false;
1925 }
1926 if self.has_conflict || self.has_unsaved_edits() {
1927 return true;
1928 }
1929 match self.file.as_ref().map(|f| f.disk_state()) {
1930 Some(DiskState::New) => !self.is_empty(),
1931 Some(DiskState::Deleted) => true,
1932 _ => false,
1933 }
1934 }
1935
1936 /// Checks if the buffer and its file have both changed since the buffer
1937 /// was last saved or reloaded.
1938 pub fn has_conflict(&self) -> bool {
1939 if self.has_conflict {
1940 return true;
1941 }
1942 let Some(file) = self.file.as_ref() else {
1943 return false;
1944 };
1945 match file.disk_state() {
1946 DiskState::New => false,
1947 DiskState::Present { mtime } => match self.saved_mtime {
1948 Some(saved_mtime) => {
1949 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1950 }
1951 None => true,
1952 },
1953 DiskState::Deleted => true,
1954 }
1955 }
1956
1957 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1958 pub fn subscribe(&mut self) -> Subscription {
1959 self.text.subscribe()
1960 }
1961
1962 /// Starts a transaction, if one is not already in-progress. When undoing or
1963 /// redoing edits, all of the edits performed within a transaction are undone
1964 /// or redone together.
1965 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1966 self.start_transaction_at(Instant::now())
1967 }
1968
1969 /// Starts a transaction, providing the current time. Subsequent transactions
1970 /// that occur within a short period of time will be grouped together. This
1971 /// is controlled by the buffer's undo grouping duration.
1972 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1973 self.transaction_depth += 1;
1974 if self.was_dirty_before_starting_transaction.is_none() {
1975 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1976 }
1977 self.text.start_transaction_at(now)
1978 }
1979
1980 /// Terminates the current transaction, if this is the outermost transaction.
1981 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
1982 self.end_transaction_at(Instant::now(), cx)
1983 }
1984
1985 /// Terminates the current transaction, providing the current time. Subsequent transactions
1986 /// that occur within a short period of time will be grouped together. This
1987 /// is controlled by the buffer's undo grouping duration.
1988 pub fn end_transaction_at(
1989 &mut self,
1990 now: Instant,
1991 cx: &mut Context<Self>,
1992 ) -> Option<TransactionId> {
1993 assert!(self.transaction_depth > 0);
1994 self.transaction_depth -= 1;
1995 let was_dirty = if self.transaction_depth == 0 {
1996 self.was_dirty_before_starting_transaction.take().unwrap()
1997 } else {
1998 false
1999 };
2000 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2001 self.did_edit(&start_version, was_dirty, cx);
2002 Some(transaction_id)
2003 } else {
2004 None
2005 }
2006 }
2007
2008 /// Manually add a transaction to the buffer's undo history.
2009 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2010 self.text.push_transaction(transaction, now);
2011 }
2012
2013 /// Prevent the last transaction from being grouped with any subsequent transactions,
2014 /// even if they occur with the buffer's undo grouping duration.
2015 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2016 self.text.finalize_last_transaction()
2017 }
2018
2019 /// Manually group all changes since a given transaction.
2020 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2021 self.text.group_until_transaction(transaction_id);
2022 }
2023
2024 /// Manually remove a transaction from the buffer's undo history
2025 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
2026 self.text.forget_transaction(transaction_id);
2027 }
2028
2029 /// Manually merge two adjacent transactions in the buffer's undo history.
2030 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2031 self.text.merge_transactions(transaction, destination);
2032 }
2033
2034 /// Waits for the buffer to receive operations with the given timestamps.
2035 pub fn wait_for_edits(
2036 &mut self,
2037 edit_ids: impl IntoIterator<Item = clock::Lamport>,
2038 ) -> impl Future<Output = Result<()>> {
2039 self.text.wait_for_edits(edit_ids)
2040 }
2041
2042 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2043 pub fn wait_for_anchors(
2044 &mut self,
2045 anchors: impl IntoIterator<Item = Anchor>,
2046 ) -> impl 'static + Future<Output = Result<()>> {
2047 self.text.wait_for_anchors(anchors)
2048 }
2049
2050 /// Waits for the buffer to receive operations up to the given version.
2051 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
2052 self.text.wait_for_version(version)
2053 }
2054
2055 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2056 /// [`Buffer::wait_for_version`] to resolve with an error.
2057 pub fn give_up_waiting(&mut self) {
2058 self.text.give_up_waiting();
2059 }
2060
2061 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2062 pub fn set_active_selections(
2063 &mut self,
2064 selections: Arc<[Selection<Anchor>]>,
2065 line_mode: bool,
2066 cursor_shape: CursorShape,
2067 cx: &mut Context<Self>,
2068 ) {
2069 let lamport_timestamp = self.text.lamport_clock.tick();
2070 self.remote_selections.insert(
2071 self.text.replica_id(),
2072 SelectionSet {
2073 selections: selections.clone(),
2074 lamport_timestamp,
2075 line_mode,
2076 cursor_shape,
2077 },
2078 );
2079 self.send_operation(
2080 Operation::UpdateSelections {
2081 selections,
2082 line_mode,
2083 lamport_timestamp,
2084 cursor_shape,
2085 },
2086 true,
2087 cx,
2088 );
2089 self.non_text_state_update_count += 1;
2090 cx.notify();
2091 }
2092
2093 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2094 /// this replica.
2095 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2096 if self
2097 .remote_selections
2098 .get(&self.text.replica_id())
2099 .map_or(true, |set| !set.selections.is_empty())
2100 {
2101 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2102 }
2103 }
2104
2105 /// Replaces the buffer's entire text.
2106 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2107 where
2108 T: Into<Arc<str>>,
2109 {
2110 self.autoindent_requests.clear();
2111 self.edit([(0..self.len(), text)], None, cx)
2112 }
2113
2114 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2115 /// delete, and a string of text to insert at that location.
2116 ///
2117 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2118 /// request for the edited ranges, which will be processed when the buffer finishes
2119 /// parsing.
2120 ///
2121 /// Parsing takes place at the end of a transaction, and may compute synchronously
2122 /// or asynchronously, depending on the changes.
2123 pub fn edit<I, S, T>(
2124 &mut self,
2125 edits_iter: I,
2126 autoindent_mode: Option<AutoindentMode>,
2127 cx: &mut Context<Self>,
2128 ) -> Option<clock::Lamport>
2129 where
2130 I: IntoIterator<Item = (Range<S>, T)>,
2131 S: ToOffset,
2132 T: Into<Arc<str>>,
2133 {
2134 // Skip invalid edits and coalesce contiguous ones.
2135 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2136 for (range, new_text) in edits_iter {
2137 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2138 if range.start > range.end {
2139 mem::swap(&mut range.start, &mut range.end);
2140 }
2141 let new_text = new_text.into();
2142 if !new_text.is_empty() || !range.is_empty() {
2143 if let Some((prev_range, prev_text)) = edits.last_mut() {
2144 if prev_range.end >= range.start {
2145 prev_range.end = cmp::max(prev_range.end, range.end);
2146 *prev_text = format!("{prev_text}{new_text}").into();
2147 } else {
2148 edits.push((range, new_text));
2149 }
2150 } else {
2151 edits.push((range, new_text));
2152 }
2153 }
2154 }
2155 if edits.is_empty() {
2156 return None;
2157 }
2158
2159 self.start_transaction();
2160 self.pending_autoindent.take();
2161 let autoindent_request = autoindent_mode
2162 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2163
2164 let edit_operation = self.text.edit(edits.iter().cloned());
2165 let edit_id = edit_operation.timestamp();
2166
2167 if let Some((before_edit, mode)) = autoindent_request {
2168 let mut delta = 0isize;
2169 let entries = edits
2170 .into_iter()
2171 .enumerate()
2172 .zip(&edit_operation.as_edit().unwrap().new_text)
2173 .map(|((ix, (range, _)), new_text)| {
2174 let new_text_length = new_text.len();
2175 let old_start = range.start.to_point(&before_edit);
2176 let new_start = (delta + range.start as isize) as usize;
2177 let range_len = range.end - range.start;
2178 delta += new_text_length as isize - range_len as isize;
2179
2180 // Decide what range of the insertion to auto-indent, and whether
2181 // the first line of the insertion should be considered a newly-inserted line
2182 // or an edit to an existing line.
2183 let mut range_of_insertion_to_indent = 0..new_text_length;
2184 let mut first_line_is_new = true;
2185
2186 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2187 let old_line_end = before_edit.line_len(old_start.row);
2188
2189 if old_start.column > old_line_start {
2190 first_line_is_new = false;
2191 }
2192
2193 if !new_text.contains('\n')
2194 && (old_start.column + (range_len as u32) < old_line_end
2195 || old_line_end == old_line_start)
2196 {
2197 first_line_is_new = false;
2198 }
2199
2200 // When inserting text starting with a newline, avoid auto-indenting the
2201 // previous line.
2202 if new_text.starts_with('\n') {
2203 range_of_insertion_to_indent.start += 1;
2204 first_line_is_new = true;
2205 }
2206
2207 let mut original_indent_column = None;
2208 if let AutoindentMode::Block {
2209 original_start_columns,
2210 } = &mode
2211 {
2212 original_indent_column = Some(
2213 original_start_columns.get(ix).copied().unwrap_or(0)
2214 + indent_size_for_text(
2215 new_text[range_of_insertion_to_indent.clone()].chars(),
2216 )
2217 .len,
2218 );
2219
2220 // Avoid auto-indenting the line after the edit.
2221 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2222 range_of_insertion_to_indent.end -= 1;
2223 }
2224 }
2225
2226 AutoindentRequestEntry {
2227 first_line_is_new,
2228 original_indent_column,
2229 indent_size: before_edit.language_indent_size_at(range.start, cx),
2230 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2231 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2232 }
2233 })
2234 .collect();
2235
2236 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2237 before_edit,
2238 entries,
2239 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2240 ignore_empty_lines: false,
2241 }));
2242 }
2243
2244 self.end_transaction(cx);
2245 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2246 Some(edit_id)
2247 }
2248
2249 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2250 if self.edits_since::<usize>(old_version).next().is_none() {
2251 return;
2252 }
2253
2254 self.reparse(cx);
2255
2256 cx.emit(BufferEvent::Edited);
2257 if was_dirty != self.is_dirty() {
2258 cx.emit(BufferEvent::DirtyChanged);
2259 }
2260 cx.notify();
2261 }
2262
2263 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2264 where
2265 I: IntoIterator<Item = Range<T>>,
2266 T: ToOffset + Copy,
2267 {
2268 let before_edit = self.snapshot();
2269 let entries = ranges
2270 .into_iter()
2271 .map(|range| AutoindentRequestEntry {
2272 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2273 first_line_is_new: true,
2274 indent_size: before_edit.language_indent_size_at(range.start, cx),
2275 original_indent_column: None,
2276 })
2277 .collect();
2278 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2279 before_edit,
2280 entries,
2281 is_block_mode: false,
2282 ignore_empty_lines: true,
2283 }));
2284 self.request_autoindent(cx);
2285 }
2286
2287 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2288 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2289 pub fn insert_empty_line(
2290 &mut self,
2291 position: impl ToPoint,
2292 space_above: bool,
2293 space_below: bool,
2294 cx: &mut Context<Self>,
2295 ) -> Point {
2296 let mut position = position.to_point(self);
2297
2298 self.start_transaction();
2299
2300 self.edit(
2301 [(position..position, "\n")],
2302 Some(AutoindentMode::EachLine),
2303 cx,
2304 );
2305
2306 if position.column > 0 {
2307 position += Point::new(1, 0);
2308 }
2309
2310 if !self.is_line_blank(position.row) {
2311 self.edit(
2312 [(position..position, "\n")],
2313 Some(AutoindentMode::EachLine),
2314 cx,
2315 );
2316 }
2317
2318 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2319 self.edit(
2320 [(position..position, "\n")],
2321 Some(AutoindentMode::EachLine),
2322 cx,
2323 );
2324 position.row += 1;
2325 }
2326
2327 if space_below
2328 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2329 {
2330 self.edit(
2331 [(position..position, "\n")],
2332 Some(AutoindentMode::EachLine),
2333 cx,
2334 );
2335 }
2336
2337 self.end_transaction(cx);
2338
2339 position
2340 }
2341
2342 /// Applies the given remote operations to the buffer.
2343 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2344 self.pending_autoindent.take();
2345 let was_dirty = self.is_dirty();
2346 let old_version = self.version.clone();
2347 let mut deferred_ops = Vec::new();
2348 let buffer_ops = ops
2349 .into_iter()
2350 .filter_map(|op| match op {
2351 Operation::Buffer(op) => Some(op),
2352 _ => {
2353 if self.can_apply_op(&op) {
2354 self.apply_op(op, cx);
2355 } else {
2356 deferred_ops.push(op);
2357 }
2358 None
2359 }
2360 })
2361 .collect::<Vec<_>>();
2362 for operation in buffer_ops.iter() {
2363 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2364 }
2365 self.text.apply_ops(buffer_ops);
2366 self.deferred_ops.insert(deferred_ops);
2367 self.flush_deferred_ops(cx);
2368 self.did_edit(&old_version, was_dirty, cx);
2369 // Notify independently of whether the buffer was edited as the operations could include a
2370 // selection update.
2371 cx.notify();
2372 }
2373
2374 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2375 let mut deferred_ops = Vec::new();
2376 for op in self.deferred_ops.drain().iter().cloned() {
2377 if self.can_apply_op(&op) {
2378 self.apply_op(op, cx);
2379 } else {
2380 deferred_ops.push(op);
2381 }
2382 }
2383 self.deferred_ops.insert(deferred_ops);
2384 }
2385
2386 pub fn has_deferred_ops(&self) -> bool {
2387 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2388 }
2389
2390 fn can_apply_op(&self, operation: &Operation) -> bool {
2391 match operation {
2392 Operation::Buffer(_) => {
2393 unreachable!("buffer operations should never be applied at this layer")
2394 }
2395 Operation::UpdateDiagnostics {
2396 diagnostics: diagnostic_set,
2397 ..
2398 } => diagnostic_set.iter().all(|diagnostic| {
2399 self.text.can_resolve(&diagnostic.range.start)
2400 && self.text.can_resolve(&diagnostic.range.end)
2401 }),
2402 Operation::UpdateSelections { selections, .. } => selections
2403 .iter()
2404 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2405 Operation::UpdateCompletionTriggers { .. } => true,
2406 }
2407 }
2408
2409 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2410 match operation {
2411 Operation::Buffer(_) => {
2412 unreachable!("buffer operations should never be applied at this layer")
2413 }
2414 Operation::UpdateDiagnostics {
2415 server_id,
2416 diagnostics: diagnostic_set,
2417 lamport_timestamp,
2418 } => {
2419 let snapshot = self.snapshot();
2420 self.apply_diagnostic_update(
2421 server_id,
2422 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2423 lamport_timestamp,
2424 cx,
2425 );
2426 }
2427 Operation::UpdateSelections {
2428 selections,
2429 lamport_timestamp,
2430 line_mode,
2431 cursor_shape,
2432 } => {
2433 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2434 if set.lamport_timestamp > lamport_timestamp {
2435 return;
2436 }
2437 }
2438
2439 self.remote_selections.insert(
2440 lamport_timestamp.replica_id,
2441 SelectionSet {
2442 selections,
2443 lamport_timestamp,
2444 line_mode,
2445 cursor_shape,
2446 },
2447 );
2448 self.text.lamport_clock.observe(lamport_timestamp);
2449 self.non_text_state_update_count += 1;
2450 }
2451 Operation::UpdateCompletionTriggers {
2452 triggers,
2453 lamport_timestamp,
2454 server_id,
2455 } => {
2456 if triggers.is_empty() {
2457 self.completion_triggers_per_language_server
2458 .remove(&server_id);
2459 self.completion_triggers = self
2460 .completion_triggers_per_language_server
2461 .values()
2462 .flat_map(|triggers| triggers.into_iter().cloned())
2463 .collect();
2464 } else {
2465 self.completion_triggers_per_language_server
2466 .insert(server_id, triggers.iter().cloned().collect());
2467 self.completion_triggers.extend(triggers);
2468 }
2469 self.text.lamport_clock.observe(lamport_timestamp);
2470 }
2471 }
2472 }
2473
2474 fn apply_diagnostic_update(
2475 &mut self,
2476 server_id: LanguageServerId,
2477 diagnostics: DiagnosticSet,
2478 lamport_timestamp: clock::Lamport,
2479 cx: &mut Context<Self>,
2480 ) {
2481 if lamport_timestamp > self.diagnostics_timestamp {
2482 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2483 if diagnostics.is_empty() {
2484 if let Ok(ix) = ix {
2485 self.diagnostics.remove(ix);
2486 }
2487 } else {
2488 match ix {
2489 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2490 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2491 };
2492 }
2493 self.diagnostics_timestamp = lamport_timestamp;
2494 self.non_text_state_update_count += 1;
2495 self.text.lamport_clock.observe(lamport_timestamp);
2496 cx.notify();
2497 cx.emit(BufferEvent::DiagnosticsUpdated);
2498 }
2499 }
2500
2501 fn send_operation(&self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2502 cx.emit(BufferEvent::Operation {
2503 operation,
2504 is_local,
2505 });
2506 }
2507
2508 /// Removes the selections for a given peer.
2509 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2510 self.remote_selections.remove(&replica_id);
2511 cx.notify();
2512 }
2513
2514 /// Undoes the most recent transaction.
2515 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2516 let was_dirty = self.is_dirty();
2517 let old_version = self.version.clone();
2518
2519 if let Some((transaction_id, operation)) = self.text.undo() {
2520 self.send_operation(Operation::Buffer(operation), true, cx);
2521 self.did_edit(&old_version, was_dirty, cx);
2522 Some(transaction_id)
2523 } else {
2524 None
2525 }
2526 }
2527
2528 /// Manually undoes a specific transaction in the buffer's undo history.
2529 pub fn undo_transaction(
2530 &mut self,
2531 transaction_id: TransactionId,
2532 cx: &mut Context<Self>,
2533 ) -> bool {
2534 let was_dirty = self.is_dirty();
2535 let old_version = self.version.clone();
2536 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2537 self.send_operation(Operation::Buffer(operation), true, cx);
2538 self.did_edit(&old_version, was_dirty, cx);
2539 true
2540 } else {
2541 false
2542 }
2543 }
2544
2545 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2546 pub fn undo_to_transaction(
2547 &mut self,
2548 transaction_id: TransactionId,
2549 cx: &mut Context<Self>,
2550 ) -> bool {
2551 let was_dirty = self.is_dirty();
2552 let old_version = self.version.clone();
2553
2554 let operations = self.text.undo_to_transaction(transaction_id);
2555 let undone = !operations.is_empty();
2556 for operation in operations {
2557 self.send_operation(Operation::Buffer(operation), true, cx);
2558 }
2559 if undone {
2560 self.did_edit(&old_version, was_dirty, cx)
2561 }
2562 undone
2563 }
2564
2565 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2566 let was_dirty = self.is_dirty();
2567 let operation = self.text.undo_operations(counts);
2568 let old_version = self.version.clone();
2569 self.send_operation(Operation::Buffer(operation), true, cx);
2570 self.did_edit(&old_version, was_dirty, cx);
2571 }
2572
2573 /// Manually redoes a specific transaction in the buffer's redo history.
2574 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2575 let was_dirty = self.is_dirty();
2576 let old_version = self.version.clone();
2577
2578 if let Some((transaction_id, operation)) = self.text.redo() {
2579 self.send_operation(Operation::Buffer(operation), true, cx);
2580 self.did_edit(&old_version, was_dirty, cx);
2581 Some(transaction_id)
2582 } else {
2583 None
2584 }
2585 }
2586
2587 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2588 pub fn redo_to_transaction(
2589 &mut self,
2590 transaction_id: TransactionId,
2591 cx: &mut Context<Self>,
2592 ) -> bool {
2593 let was_dirty = self.is_dirty();
2594 let old_version = self.version.clone();
2595
2596 let operations = self.text.redo_to_transaction(transaction_id);
2597 let redone = !operations.is_empty();
2598 for operation in operations {
2599 self.send_operation(Operation::Buffer(operation), true, cx);
2600 }
2601 if redone {
2602 self.did_edit(&old_version, was_dirty, cx)
2603 }
2604 redone
2605 }
2606
2607 /// Override current completion triggers with the user-provided completion triggers.
2608 pub fn set_completion_triggers(
2609 &mut self,
2610 server_id: LanguageServerId,
2611 triggers: BTreeSet<String>,
2612 cx: &mut Context<Self>,
2613 ) {
2614 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2615 if triggers.is_empty() {
2616 self.completion_triggers_per_language_server
2617 .remove(&server_id);
2618 self.completion_triggers = self
2619 .completion_triggers_per_language_server
2620 .values()
2621 .flat_map(|triggers| triggers.into_iter().cloned())
2622 .collect();
2623 } else {
2624 self.completion_triggers_per_language_server
2625 .insert(server_id, triggers.clone());
2626 self.completion_triggers.extend(triggers.iter().cloned());
2627 }
2628 self.send_operation(
2629 Operation::UpdateCompletionTriggers {
2630 triggers: triggers.iter().cloned().collect(),
2631 lamport_timestamp: self.completion_triggers_timestamp,
2632 server_id,
2633 },
2634 true,
2635 cx,
2636 );
2637 cx.notify();
2638 }
2639
2640 /// Returns a list of strings which trigger a completion menu for this language.
2641 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2642 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2643 &self.completion_triggers
2644 }
2645
2646 /// Call this directly after performing edits to prevent the preview tab
2647 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2648 /// to return false until there are additional edits.
2649 pub fn refresh_preview(&mut self) {
2650 self.preview_version = self.version.clone();
2651 }
2652
2653 /// Whether we should preserve the preview status of a tab containing this buffer.
2654 pub fn preserve_preview(&self) -> bool {
2655 !self.has_edits_since(&self.preview_version)
2656 }
2657}
2658
2659#[doc(hidden)]
2660#[cfg(any(test, feature = "test-support"))]
2661impl Buffer {
2662 pub fn edit_via_marked_text(
2663 &mut self,
2664 marked_string: &str,
2665 autoindent_mode: Option<AutoindentMode>,
2666 cx: &mut Context<Self>,
2667 ) {
2668 let edits = self.edits_for_marked_text(marked_string);
2669 self.edit(edits, autoindent_mode, cx);
2670 }
2671
2672 pub fn set_group_interval(&mut self, group_interval: Duration) {
2673 self.text.set_group_interval(group_interval);
2674 }
2675
2676 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2677 where
2678 T: rand::Rng,
2679 {
2680 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2681 let mut last_end = None;
2682 for _ in 0..old_range_count {
2683 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2684 break;
2685 }
2686
2687 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2688 let mut range = self.random_byte_range(new_start, rng);
2689 if rng.gen_bool(0.2) {
2690 mem::swap(&mut range.start, &mut range.end);
2691 }
2692 last_end = Some(range.end);
2693
2694 let new_text_len = rng.gen_range(0..10);
2695 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2696 new_text = new_text.to_uppercase();
2697
2698 edits.push((range, new_text));
2699 }
2700 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2701 self.edit(edits, None, cx);
2702 }
2703
2704 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2705 let was_dirty = self.is_dirty();
2706 let old_version = self.version.clone();
2707
2708 let ops = self.text.randomly_undo_redo(rng);
2709 if !ops.is_empty() {
2710 for op in ops {
2711 self.send_operation(Operation::Buffer(op), true, cx);
2712 self.did_edit(&old_version, was_dirty, cx);
2713 }
2714 }
2715 }
2716}
2717
2718impl EventEmitter<BufferEvent> for Buffer {}
2719
2720impl Deref for Buffer {
2721 type Target = TextBuffer;
2722
2723 fn deref(&self) -> &Self::Target {
2724 &self.text
2725 }
2726}
2727
2728impl BufferSnapshot {
2729 /// Returns [`IndentSize`] for a given line that respects user settings and
2730 /// language preferences.
2731 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2732 indent_size_for_line(self, row)
2733 }
2734
2735 /// Returns [`IndentSize`] for a given position that respects user settings
2736 /// and language preferences.
2737 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2738 let settings = language_settings(
2739 self.language_at(position).map(|l| l.name()),
2740 self.file(),
2741 cx,
2742 );
2743 if settings.hard_tabs {
2744 IndentSize::tab()
2745 } else {
2746 IndentSize::spaces(settings.tab_size.get())
2747 }
2748 }
2749
2750 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2751 /// is passed in as `single_indent_size`.
2752 pub fn suggested_indents(
2753 &self,
2754 rows: impl Iterator<Item = u32>,
2755 single_indent_size: IndentSize,
2756 ) -> BTreeMap<u32, IndentSize> {
2757 let mut result = BTreeMap::new();
2758
2759 for row_range in contiguous_ranges(rows, 10) {
2760 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2761 Some(suggestions) => suggestions,
2762 _ => break,
2763 };
2764
2765 for (row, suggestion) in row_range.zip(suggestions) {
2766 let indent_size = if let Some(suggestion) = suggestion {
2767 result
2768 .get(&suggestion.basis_row)
2769 .copied()
2770 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2771 .with_delta(suggestion.delta, single_indent_size)
2772 } else {
2773 self.indent_size_for_line(row)
2774 };
2775
2776 result.insert(row, indent_size);
2777 }
2778 }
2779
2780 result
2781 }
2782
2783 fn suggest_autoindents(
2784 &self,
2785 row_range: Range<u32>,
2786 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2787 let config = &self.language.as_ref()?.config;
2788 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2789
2790 // Find the suggested indentation ranges based on the syntax tree.
2791 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2792 let end = Point::new(row_range.end, 0);
2793 let range = (start..end).to_offset(&self.text);
2794 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2795 Some(&grammar.indents_config.as_ref()?.query)
2796 });
2797 let indent_configs = matches
2798 .grammars()
2799 .iter()
2800 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2801 .collect::<Vec<_>>();
2802
2803 let mut indent_ranges = Vec::<Range<Point>>::new();
2804 let mut outdent_positions = Vec::<Point>::new();
2805 while let Some(mat) = matches.peek() {
2806 let mut start: Option<Point> = None;
2807 let mut end: Option<Point> = None;
2808
2809 let config = &indent_configs[mat.grammar_index];
2810 for capture in mat.captures {
2811 if capture.index == config.indent_capture_ix {
2812 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2813 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2814 } else if Some(capture.index) == config.start_capture_ix {
2815 start = Some(Point::from_ts_point(capture.node.end_position()));
2816 } else if Some(capture.index) == config.end_capture_ix {
2817 end = Some(Point::from_ts_point(capture.node.start_position()));
2818 } else if Some(capture.index) == config.outdent_capture_ix {
2819 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2820 }
2821 }
2822
2823 matches.advance();
2824 if let Some((start, end)) = start.zip(end) {
2825 if start.row == end.row {
2826 continue;
2827 }
2828
2829 let range = start..end;
2830 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2831 Err(ix) => indent_ranges.insert(ix, range),
2832 Ok(ix) => {
2833 let prev_range = &mut indent_ranges[ix];
2834 prev_range.end = prev_range.end.max(range.end);
2835 }
2836 }
2837 }
2838 }
2839
2840 let mut error_ranges = Vec::<Range<Point>>::new();
2841 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2842 grammar.error_query.as_ref()
2843 });
2844 while let Some(mat) = matches.peek() {
2845 let node = mat.captures[0].node;
2846 let start = Point::from_ts_point(node.start_position());
2847 let end = Point::from_ts_point(node.end_position());
2848 let range = start..end;
2849 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2850 Ok(ix) | Err(ix) => ix,
2851 };
2852 let mut end_ix = ix;
2853 while let Some(existing_range) = error_ranges.get(end_ix) {
2854 if existing_range.end < end {
2855 end_ix += 1;
2856 } else {
2857 break;
2858 }
2859 }
2860 error_ranges.splice(ix..end_ix, [range]);
2861 matches.advance();
2862 }
2863
2864 outdent_positions.sort();
2865 for outdent_position in outdent_positions {
2866 // find the innermost indent range containing this outdent_position
2867 // set its end to the outdent position
2868 if let Some(range_to_truncate) = indent_ranges
2869 .iter_mut()
2870 .filter(|indent_range| indent_range.contains(&outdent_position))
2871 .last()
2872 {
2873 range_to_truncate.end = outdent_position;
2874 }
2875 }
2876
2877 // Find the suggested indentation increases and decreased based on regexes.
2878 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2879 self.for_each_line(
2880 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2881 ..Point::new(row_range.end, 0),
2882 |row, line| {
2883 if config
2884 .decrease_indent_pattern
2885 .as_ref()
2886 .map_or(false, |regex| regex.is_match(line))
2887 {
2888 indent_change_rows.push((row, Ordering::Less));
2889 }
2890 if config
2891 .increase_indent_pattern
2892 .as_ref()
2893 .map_or(false, |regex| regex.is_match(line))
2894 {
2895 indent_change_rows.push((row + 1, Ordering::Greater));
2896 }
2897 },
2898 );
2899
2900 let mut indent_changes = indent_change_rows.into_iter().peekable();
2901 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2902 prev_non_blank_row.unwrap_or(0)
2903 } else {
2904 row_range.start.saturating_sub(1)
2905 };
2906 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2907 Some(row_range.map(move |row| {
2908 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2909
2910 let mut indent_from_prev_row = false;
2911 let mut outdent_from_prev_row = false;
2912 let mut outdent_to_row = u32::MAX;
2913 let mut from_regex = false;
2914
2915 while let Some((indent_row, delta)) = indent_changes.peek() {
2916 match indent_row.cmp(&row) {
2917 Ordering::Equal => match delta {
2918 Ordering::Less => {
2919 from_regex = true;
2920 outdent_from_prev_row = true
2921 }
2922 Ordering::Greater => {
2923 indent_from_prev_row = true;
2924 from_regex = true
2925 }
2926 _ => {}
2927 },
2928
2929 Ordering::Greater => break,
2930 Ordering::Less => {}
2931 }
2932
2933 indent_changes.next();
2934 }
2935
2936 for range in &indent_ranges {
2937 if range.start.row >= row {
2938 break;
2939 }
2940 if range.start.row == prev_row && range.end > row_start {
2941 indent_from_prev_row = true;
2942 }
2943 if range.end > prev_row_start && range.end <= row_start {
2944 outdent_to_row = outdent_to_row.min(range.start.row);
2945 }
2946 }
2947
2948 let within_error = error_ranges
2949 .iter()
2950 .any(|e| e.start.row < row && e.end > row_start);
2951
2952 let suggestion = if outdent_to_row == prev_row
2953 || (outdent_from_prev_row && indent_from_prev_row)
2954 {
2955 Some(IndentSuggestion {
2956 basis_row: prev_row,
2957 delta: Ordering::Equal,
2958 within_error: within_error && !from_regex,
2959 })
2960 } else if indent_from_prev_row {
2961 Some(IndentSuggestion {
2962 basis_row: prev_row,
2963 delta: Ordering::Greater,
2964 within_error: within_error && !from_regex,
2965 })
2966 } else if outdent_to_row < prev_row {
2967 Some(IndentSuggestion {
2968 basis_row: outdent_to_row,
2969 delta: Ordering::Equal,
2970 within_error: within_error && !from_regex,
2971 })
2972 } else if outdent_from_prev_row {
2973 Some(IndentSuggestion {
2974 basis_row: prev_row,
2975 delta: Ordering::Less,
2976 within_error: within_error && !from_regex,
2977 })
2978 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2979 {
2980 Some(IndentSuggestion {
2981 basis_row: prev_row,
2982 delta: Ordering::Equal,
2983 within_error: within_error && !from_regex,
2984 })
2985 } else {
2986 None
2987 };
2988
2989 prev_row = row;
2990 prev_row_start = row_start;
2991 suggestion
2992 }))
2993 }
2994
2995 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2996 while row > 0 {
2997 row -= 1;
2998 if !self.is_line_blank(row) {
2999 return Some(row);
3000 }
3001 }
3002 None
3003 }
3004
3005 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3006 let captures = self.syntax.captures(range, &self.text, |grammar| {
3007 grammar.highlights_query.as_ref()
3008 });
3009 let highlight_maps = captures
3010 .grammars()
3011 .iter()
3012 .map(|grammar| grammar.highlight_map())
3013 .collect();
3014 (captures, highlight_maps)
3015 }
3016
3017 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3018 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3019 /// returned in chunks where each chunk has a single syntax highlighting style and
3020 /// diagnostic status.
3021 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3022 let range = range.start.to_offset(self)..range.end.to_offset(self);
3023
3024 let mut syntax = None;
3025 if language_aware {
3026 syntax = Some(self.get_highlights(range.clone()));
3027 }
3028 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3029 let diagnostics = language_aware;
3030 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3031 }
3032
3033 pub fn highlighted_text_for_range<T: ToOffset>(
3034 &self,
3035 range: Range<T>,
3036 override_style: Option<HighlightStyle>,
3037 syntax_theme: &SyntaxTheme,
3038 ) -> HighlightedText {
3039 HighlightedText::from_buffer_range(
3040 range,
3041 &self.text,
3042 &self.syntax,
3043 override_style,
3044 syntax_theme,
3045 )
3046 }
3047
3048 /// Invokes the given callback for each line of text in the given range of the buffer.
3049 /// Uses callback to avoid allocating a string for each line.
3050 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3051 let mut line = String::new();
3052 let mut row = range.start.row;
3053 for chunk in self
3054 .as_rope()
3055 .chunks_in_range(range.to_offset(self))
3056 .chain(["\n"])
3057 {
3058 for (newline_ix, text) in chunk.split('\n').enumerate() {
3059 if newline_ix > 0 {
3060 callback(row, &line);
3061 row += 1;
3062 line.clear();
3063 }
3064 line.push_str(text);
3065 }
3066 }
3067 }
3068
3069 /// Iterates over every [`SyntaxLayer`] in the buffer.
3070 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3071 self.syntax
3072 .layers_for_range(0..self.len(), &self.text, true)
3073 }
3074
3075 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3076 let offset = position.to_offset(self);
3077 self.syntax
3078 .layers_for_range(offset..offset, &self.text, false)
3079 .filter(|l| l.node().end_byte() > offset)
3080 .last()
3081 }
3082
3083 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3084 &self,
3085 range: Range<D>,
3086 ) -> Option<SyntaxLayer> {
3087 let range = range.to_offset(self);
3088 return self
3089 .syntax
3090 .layers_for_range(range, &self.text, false)
3091 .max_by(|a, b| {
3092 if a.depth != b.depth {
3093 a.depth.cmp(&b.depth)
3094 } else if a.offset.0 != b.offset.0 {
3095 a.offset.0.cmp(&b.offset.0)
3096 } else {
3097 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3098 }
3099 });
3100 }
3101
3102 /// Returns the main [`Language`].
3103 pub fn language(&self) -> Option<&Arc<Language>> {
3104 self.language.as_ref()
3105 }
3106
3107 /// Returns the [`Language`] at the given location.
3108 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3109 self.syntax_layer_at(position)
3110 .map(|info| info.language)
3111 .or(self.language.as_ref())
3112 }
3113
3114 /// Returns the settings for the language at the given location.
3115 pub fn settings_at<'a, D: ToOffset>(
3116 &'a self,
3117 position: D,
3118 cx: &'a App,
3119 ) -> Cow<'a, LanguageSettings> {
3120 language_settings(
3121 self.language_at(position).map(|l| l.name()),
3122 self.file.as_ref(),
3123 cx,
3124 )
3125 }
3126
3127 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3128 CharClassifier::new(self.language_scope_at(point))
3129 }
3130
3131 /// Returns the [`LanguageScope`] at the given location.
3132 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3133 let offset = position.to_offset(self);
3134 let mut scope = None;
3135 let mut smallest_range: Option<Range<usize>> = None;
3136
3137 // Use the layer that has the smallest node intersecting the given point.
3138 for layer in self
3139 .syntax
3140 .layers_for_range(offset..offset, &self.text, false)
3141 {
3142 let mut cursor = layer.node().walk();
3143
3144 let mut range = None;
3145 loop {
3146 let child_range = cursor.node().byte_range();
3147 if !child_range.to_inclusive().contains(&offset) {
3148 break;
3149 }
3150
3151 range = Some(child_range);
3152 if cursor.goto_first_child_for_byte(offset).is_none() {
3153 break;
3154 }
3155 }
3156
3157 if let Some(range) = range {
3158 if smallest_range
3159 .as_ref()
3160 .map_or(true, |smallest_range| range.len() < smallest_range.len())
3161 {
3162 smallest_range = Some(range);
3163 scope = Some(LanguageScope {
3164 language: layer.language.clone(),
3165 override_id: layer.override_id(offset, &self.text),
3166 });
3167 }
3168 }
3169 }
3170
3171 scope.or_else(|| {
3172 self.language.clone().map(|language| LanguageScope {
3173 language,
3174 override_id: None,
3175 })
3176 })
3177 }
3178
3179 /// Returns a tuple of the range and character kind of the word
3180 /// surrounding the given position.
3181 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3182 let mut start = start.to_offset(self);
3183 let mut end = start;
3184 let mut next_chars = self.chars_at(start).peekable();
3185 let mut prev_chars = self.reversed_chars_at(start).peekable();
3186
3187 let classifier = self.char_classifier_at(start);
3188 let word_kind = cmp::max(
3189 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3190 next_chars.peek().copied().map(|c| classifier.kind(c)),
3191 );
3192
3193 for ch in prev_chars {
3194 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3195 start -= ch.len_utf8();
3196 } else {
3197 break;
3198 }
3199 }
3200
3201 for ch in next_chars {
3202 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3203 end += ch.len_utf8();
3204 } else {
3205 break;
3206 }
3207 }
3208
3209 (start..end, word_kind)
3210 }
3211
3212 /// Returns the closest syntax node enclosing the given range.
3213 pub fn syntax_ancestor<'a, T: ToOffset>(
3214 &'a self,
3215 range: Range<T>,
3216 ) -> Option<tree_sitter::Node<'a>> {
3217 let range = range.start.to_offset(self)..range.end.to_offset(self);
3218 let mut result: Option<tree_sitter::Node<'a>> = None;
3219 'outer: for layer in self
3220 .syntax
3221 .layers_for_range(range.clone(), &self.text, true)
3222 {
3223 let mut cursor = layer.node().walk();
3224
3225 // Descend to the first leaf that touches the start of the range,
3226 // and if the range is non-empty, extends beyond the start.
3227 while cursor.goto_first_child_for_byte(range.start).is_some() {
3228 if !range.is_empty() && cursor.node().end_byte() == range.start {
3229 cursor.goto_next_sibling();
3230 }
3231 }
3232
3233 // Ascend to the smallest ancestor that strictly contains the range.
3234 loop {
3235 let node_range = cursor.node().byte_range();
3236 if node_range.start <= range.start
3237 && node_range.end >= range.end
3238 && node_range.len() > range.len()
3239 {
3240 break;
3241 }
3242 if !cursor.goto_parent() {
3243 continue 'outer;
3244 }
3245 }
3246
3247 let left_node = cursor.node();
3248 let mut layer_result = left_node;
3249
3250 // For an empty range, try to find another node immediately to the right of the range.
3251 if left_node.end_byte() == range.start {
3252 let mut right_node = None;
3253 while !cursor.goto_next_sibling() {
3254 if !cursor.goto_parent() {
3255 break;
3256 }
3257 }
3258
3259 while cursor.node().start_byte() == range.start {
3260 right_node = Some(cursor.node());
3261 if !cursor.goto_first_child() {
3262 break;
3263 }
3264 }
3265
3266 // If there is a candidate node on both sides of the (empty) range, then
3267 // decide between the two by favoring a named node over an anonymous token.
3268 // If both nodes are the same in that regard, favor the right one.
3269 if let Some(right_node) = right_node {
3270 if right_node.is_named() || !left_node.is_named() {
3271 layer_result = right_node;
3272 }
3273 }
3274 }
3275
3276 if let Some(previous_result) = &result {
3277 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3278 continue;
3279 }
3280 }
3281 result = Some(layer_result);
3282 }
3283
3284 result
3285 }
3286
3287 /// Returns the outline for the buffer.
3288 ///
3289 /// This method allows passing an optional [`SyntaxTheme`] to
3290 /// syntax-highlight the returned symbols.
3291 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3292 self.outline_items_containing(0..self.len(), true, theme)
3293 .map(Outline::new)
3294 }
3295
3296 /// Returns all the symbols that contain the given position.
3297 ///
3298 /// This method allows passing an optional [`SyntaxTheme`] to
3299 /// syntax-highlight the returned symbols.
3300 pub fn symbols_containing<T: ToOffset>(
3301 &self,
3302 position: T,
3303 theme: Option<&SyntaxTheme>,
3304 ) -> Option<Vec<OutlineItem<Anchor>>> {
3305 let position = position.to_offset(self);
3306 let mut items = self.outline_items_containing(
3307 position.saturating_sub(1)..self.len().min(position + 1),
3308 false,
3309 theme,
3310 )?;
3311 let mut prev_depth = None;
3312 items.retain(|item| {
3313 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3314 prev_depth = Some(item.depth);
3315 result
3316 });
3317 Some(items)
3318 }
3319
3320 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3321 let range = range.to_offset(self);
3322 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3323 grammar.outline_config.as_ref().map(|c| &c.query)
3324 });
3325 let configs = matches
3326 .grammars()
3327 .iter()
3328 .map(|g| g.outline_config.as_ref().unwrap())
3329 .collect::<Vec<_>>();
3330
3331 while let Some(mat) = matches.peek() {
3332 let config = &configs[mat.grammar_index];
3333 let containing_item_node = maybe!({
3334 let item_node = mat.captures.iter().find_map(|cap| {
3335 if cap.index == config.item_capture_ix {
3336 Some(cap.node)
3337 } else {
3338 None
3339 }
3340 })?;
3341
3342 let item_byte_range = item_node.byte_range();
3343 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3344 None
3345 } else {
3346 Some(item_node)
3347 }
3348 });
3349
3350 if let Some(item_node) = containing_item_node {
3351 return Some(
3352 Point::from_ts_point(item_node.start_position())
3353 ..Point::from_ts_point(item_node.end_position()),
3354 );
3355 }
3356
3357 matches.advance();
3358 }
3359 None
3360 }
3361
3362 pub fn outline_items_containing<T: ToOffset>(
3363 &self,
3364 range: Range<T>,
3365 include_extra_context: bool,
3366 theme: Option<&SyntaxTheme>,
3367 ) -> Option<Vec<OutlineItem<Anchor>>> {
3368 let range = range.to_offset(self);
3369 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3370 grammar.outline_config.as_ref().map(|c| &c.query)
3371 });
3372 let configs = matches
3373 .grammars()
3374 .iter()
3375 .map(|g| g.outline_config.as_ref().unwrap())
3376 .collect::<Vec<_>>();
3377
3378 let mut items = Vec::new();
3379 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3380 while let Some(mat) = matches.peek() {
3381 let config = &configs[mat.grammar_index];
3382 if let Some(item) =
3383 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3384 {
3385 items.push(item);
3386 } else if let Some(capture) = mat
3387 .captures
3388 .iter()
3389 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3390 {
3391 let capture_range = capture.node.start_position()..capture.node.end_position();
3392 let mut capture_row_range =
3393 capture_range.start.row as u32..capture_range.end.row as u32;
3394 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3395 {
3396 capture_row_range.end -= 1;
3397 }
3398 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3399 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3400 last_row_range.end = capture_row_range.end;
3401 } else {
3402 annotation_row_ranges.push(capture_row_range);
3403 }
3404 } else {
3405 annotation_row_ranges.push(capture_row_range);
3406 }
3407 }
3408 matches.advance();
3409 }
3410
3411 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3412
3413 // Assign depths based on containment relationships and convert to anchors.
3414 let mut item_ends_stack = Vec::<Point>::new();
3415 let mut anchor_items = Vec::new();
3416 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3417 for item in items {
3418 while let Some(last_end) = item_ends_stack.last().copied() {
3419 if last_end < item.range.end {
3420 item_ends_stack.pop();
3421 } else {
3422 break;
3423 }
3424 }
3425
3426 let mut annotation_row_range = None;
3427 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3428 let row_preceding_item = item.range.start.row.saturating_sub(1);
3429 if next_annotation_row_range.end < row_preceding_item {
3430 annotation_row_ranges.next();
3431 } else {
3432 if next_annotation_row_range.end == row_preceding_item {
3433 annotation_row_range = Some(next_annotation_row_range.clone());
3434 annotation_row_ranges.next();
3435 }
3436 break;
3437 }
3438 }
3439
3440 anchor_items.push(OutlineItem {
3441 depth: item_ends_stack.len(),
3442 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3443 text: item.text,
3444 highlight_ranges: item.highlight_ranges,
3445 name_ranges: item.name_ranges,
3446 body_range: item.body_range.map(|body_range| {
3447 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3448 }),
3449 annotation_range: annotation_row_range.map(|annotation_range| {
3450 self.anchor_after(Point::new(annotation_range.start, 0))
3451 ..self.anchor_before(Point::new(
3452 annotation_range.end,
3453 self.line_len(annotation_range.end),
3454 ))
3455 }),
3456 });
3457 item_ends_stack.push(item.range.end);
3458 }
3459
3460 Some(anchor_items)
3461 }
3462
3463 fn next_outline_item(
3464 &self,
3465 config: &OutlineConfig,
3466 mat: &SyntaxMapMatch,
3467 range: &Range<usize>,
3468 include_extra_context: bool,
3469 theme: Option<&SyntaxTheme>,
3470 ) -> Option<OutlineItem<Point>> {
3471 let item_node = mat.captures.iter().find_map(|cap| {
3472 if cap.index == config.item_capture_ix {
3473 Some(cap.node)
3474 } else {
3475 None
3476 }
3477 })?;
3478
3479 let item_byte_range = item_node.byte_range();
3480 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3481 return None;
3482 }
3483 let item_point_range = Point::from_ts_point(item_node.start_position())
3484 ..Point::from_ts_point(item_node.end_position());
3485
3486 let mut open_point = None;
3487 let mut close_point = None;
3488 let mut buffer_ranges = Vec::new();
3489 for capture in mat.captures {
3490 let node_is_name;
3491 if capture.index == config.name_capture_ix {
3492 node_is_name = true;
3493 } else if Some(capture.index) == config.context_capture_ix
3494 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3495 {
3496 node_is_name = false;
3497 } else {
3498 if Some(capture.index) == config.open_capture_ix {
3499 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3500 } else if Some(capture.index) == config.close_capture_ix {
3501 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3502 }
3503
3504 continue;
3505 }
3506
3507 let mut range = capture.node.start_byte()..capture.node.end_byte();
3508 let start = capture.node.start_position();
3509 if capture.node.end_position().row > start.row {
3510 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3511 }
3512
3513 if !range.is_empty() {
3514 buffer_ranges.push((range, node_is_name));
3515 }
3516 }
3517 if buffer_ranges.is_empty() {
3518 return None;
3519 }
3520 let mut text = String::new();
3521 let mut highlight_ranges = Vec::new();
3522 let mut name_ranges = Vec::new();
3523 let mut chunks = self.chunks(
3524 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3525 true,
3526 );
3527 let mut last_buffer_range_end = 0;
3528
3529 for (buffer_range, is_name) in buffer_ranges {
3530 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3531 if space_added {
3532 text.push(' ');
3533 }
3534 let before_append_len = text.len();
3535 let mut offset = buffer_range.start;
3536 chunks.seek(buffer_range.clone());
3537 for mut chunk in chunks.by_ref() {
3538 if chunk.text.len() > buffer_range.end - offset {
3539 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3540 offset = buffer_range.end;
3541 } else {
3542 offset += chunk.text.len();
3543 }
3544 let style = chunk
3545 .syntax_highlight_id
3546 .zip(theme)
3547 .and_then(|(highlight, theme)| highlight.style(theme));
3548 if let Some(style) = style {
3549 let start = text.len();
3550 let end = start + chunk.text.len();
3551 highlight_ranges.push((start..end, style));
3552 }
3553 text.push_str(chunk.text);
3554 if offset >= buffer_range.end {
3555 break;
3556 }
3557 }
3558 if is_name {
3559 let after_append_len = text.len();
3560 let start = if space_added && !name_ranges.is_empty() {
3561 before_append_len - 1
3562 } else {
3563 before_append_len
3564 };
3565 name_ranges.push(start..after_append_len);
3566 }
3567 last_buffer_range_end = buffer_range.end;
3568 }
3569
3570 Some(OutlineItem {
3571 depth: 0, // We'll calculate the depth later
3572 range: item_point_range,
3573 text,
3574 highlight_ranges,
3575 name_ranges,
3576 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3577 annotation_range: None,
3578 })
3579 }
3580
3581 pub fn function_body_fold_ranges<T: ToOffset>(
3582 &self,
3583 within: Range<T>,
3584 ) -> impl Iterator<Item = Range<usize>> + '_ {
3585 self.text_object_ranges(within, TreeSitterOptions::default())
3586 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3587 }
3588
3589 /// For each grammar in the language, runs the provided
3590 /// [`tree_sitter::Query`] against the given range.
3591 pub fn matches(
3592 &self,
3593 range: Range<usize>,
3594 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3595 ) -> SyntaxMapMatches {
3596 self.syntax.matches(range, self, query)
3597 }
3598
3599 pub fn all_bracket_ranges(
3600 &self,
3601 range: Range<usize>,
3602 ) -> impl Iterator<Item = BracketMatch> + '_ {
3603 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3604 grammar.brackets_config.as_ref().map(|c| &c.query)
3605 });
3606 let configs = matches
3607 .grammars()
3608 .iter()
3609 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3610 .collect::<Vec<_>>();
3611
3612 iter::from_fn(move || {
3613 while let Some(mat) = matches.peek() {
3614 let mut open = None;
3615 let mut close = None;
3616 let config = &configs[mat.grammar_index];
3617 let pattern = &config.patterns[mat.pattern_index];
3618 for capture in mat.captures {
3619 if capture.index == config.open_capture_ix {
3620 open = Some(capture.node.byte_range());
3621 } else if capture.index == config.close_capture_ix {
3622 close = Some(capture.node.byte_range());
3623 }
3624 }
3625
3626 matches.advance();
3627
3628 let Some((open_range, close_range)) = open.zip(close) else {
3629 continue;
3630 };
3631
3632 let bracket_range = open_range.start..=close_range.end;
3633 if !bracket_range.overlaps(&range) {
3634 continue;
3635 }
3636
3637 return Some(BracketMatch {
3638 open_range,
3639 close_range,
3640 newline_only: pattern.newline_only,
3641 });
3642 }
3643 None
3644 })
3645 }
3646
3647 /// Returns bracket range pairs overlapping or adjacent to `range`
3648 pub fn bracket_ranges<T: ToOffset>(
3649 &self,
3650 range: Range<T>,
3651 ) -> impl Iterator<Item = BracketMatch> + '_ {
3652 // Find bracket pairs that *inclusively* contain the given range.
3653 let range = range.start.to_offset(self).saturating_sub(1)
3654 ..self.len().min(range.end.to_offset(self) + 1);
3655 self.all_bracket_ranges(range)
3656 .filter(|pair| !pair.newline_only)
3657 }
3658
3659 pub fn text_object_ranges<T: ToOffset>(
3660 &self,
3661 range: Range<T>,
3662 options: TreeSitterOptions,
3663 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3664 let range = range.start.to_offset(self).saturating_sub(1)
3665 ..self.len().min(range.end.to_offset(self) + 1);
3666
3667 let mut matches =
3668 self.syntax
3669 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3670 grammar.text_object_config.as_ref().map(|c| &c.query)
3671 });
3672
3673 let configs = matches
3674 .grammars()
3675 .iter()
3676 .map(|grammar| grammar.text_object_config.as_ref())
3677 .collect::<Vec<_>>();
3678
3679 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3680
3681 iter::from_fn(move || loop {
3682 while let Some(capture) = captures.pop() {
3683 if capture.0.overlaps(&range) {
3684 return Some(capture);
3685 }
3686 }
3687
3688 let mat = matches.peek()?;
3689
3690 let Some(config) = configs[mat.grammar_index].as_ref() else {
3691 matches.advance();
3692 continue;
3693 };
3694
3695 for capture in mat.captures {
3696 let Some(ix) = config
3697 .text_objects_by_capture_ix
3698 .binary_search_by_key(&capture.index, |e| e.0)
3699 .ok()
3700 else {
3701 continue;
3702 };
3703 let text_object = config.text_objects_by_capture_ix[ix].1;
3704 let byte_range = capture.node.byte_range();
3705
3706 let mut found = false;
3707 for (range, existing) in captures.iter_mut() {
3708 if existing == &text_object {
3709 range.start = range.start.min(byte_range.start);
3710 range.end = range.end.max(byte_range.end);
3711 found = true;
3712 break;
3713 }
3714 }
3715
3716 if !found {
3717 captures.push((byte_range, text_object));
3718 }
3719 }
3720
3721 matches.advance();
3722 })
3723 }
3724
3725 /// Returns enclosing bracket ranges containing the given range
3726 pub fn enclosing_bracket_ranges<T: ToOffset>(
3727 &self,
3728 range: Range<T>,
3729 ) -> impl Iterator<Item = BracketMatch> + '_ {
3730 let range = range.start.to_offset(self)..range.end.to_offset(self);
3731
3732 self.bracket_ranges(range.clone()).filter(move |pair| {
3733 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3734 })
3735 }
3736
3737 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3738 ///
3739 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3740 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3741 &self,
3742 range: Range<T>,
3743 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3744 ) -> Option<(Range<usize>, Range<usize>)> {
3745 let range = range.start.to_offset(self)..range.end.to_offset(self);
3746
3747 // Get the ranges of the innermost pair of brackets.
3748 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3749
3750 for pair in self.enclosing_bracket_ranges(range.clone()) {
3751 if let Some(range_filter) = range_filter {
3752 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3753 continue;
3754 }
3755 }
3756
3757 let len = pair.close_range.end - pair.open_range.start;
3758
3759 if let Some((existing_open, existing_close)) = &result {
3760 let existing_len = existing_close.end - existing_open.start;
3761 if len > existing_len {
3762 continue;
3763 }
3764 }
3765
3766 result = Some((pair.open_range, pair.close_range));
3767 }
3768
3769 result
3770 }
3771
3772 /// Returns anchor ranges for any matches of the redaction query.
3773 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3774 /// will be run on the relevant section of the buffer.
3775 pub fn redacted_ranges<T: ToOffset>(
3776 &self,
3777 range: Range<T>,
3778 ) -> impl Iterator<Item = Range<usize>> + '_ {
3779 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3780 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3781 grammar
3782 .redactions_config
3783 .as_ref()
3784 .map(|config| &config.query)
3785 });
3786
3787 let configs = syntax_matches
3788 .grammars()
3789 .iter()
3790 .map(|grammar| grammar.redactions_config.as_ref())
3791 .collect::<Vec<_>>();
3792
3793 iter::from_fn(move || {
3794 let redacted_range = syntax_matches
3795 .peek()
3796 .and_then(|mat| {
3797 configs[mat.grammar_index].and_then(|config| {
3798 mat.captures
3799 .iter()
3800 .find(|capture| capture.index == config.redaction_capture_ix)
3801 })
3802 })
3803 .map(|mat| mat.node.byte_range());
3804 syntax_matches.advance();
3805 redacted_range
3806 })
3807 }
3808
3809 pub fn injections_intersecting_range<T: ToOffset>(
3810 &self,
3811 range: Range<T>,
3812 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3813 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3814
3815 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3816 grammar
3817 .injection_config
3818 .as_ref()
3819 .map(|config| &config.query)
3820 });
3821
3822 let configs = syntax_matches
3823 .grammars()
3824 .iter()
3825 .map(|grammar| grammar.injection_config.as_ref())
3826 .collect::<Vec<_>>();
3827
3828 iter::from_fn(move || {
3829 let ranges = syntax_matches.peek().and_then(|mat| {
3830 let config = &configs[mat.grammar_index]?;
3831 let content_capture_range = mat.captures.iter().find_map(|capture| {
3832 if capture.index == config.content_capture_ix {
3833 Some(capture.node.byte_range())
3834 } else {
3835 None
3836 }
3837 })?;
3838 let language = self.language_at(content_capture_range.start)?;
3839 Some((content_capture_range, language))
3840 });
3841 syntax_matches.advance();
3842 ranges
3843 })
3844 }
3845
3846 pub fn runnable_ranges(
3847 &self,
3848 offset_range: Range<usize>,
3849 ) -> impl Iterator<Item = RunnableRange> + '_ {
3850 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3851 grammar.runnable_config.as_ref().map(|config| &config.query)
3852 });
3853
3854 let test_configs = syntax_matches
3855 .grammars()
3856 .iter()
3857 .map(|grammar| grammar.runnable_config.as_ref())
3858 .collect::<Vec<_>>();
3859
3860 iter::from_fn(move || loop {
3861 let mat = syntax_matches.peek()?;
3862
3863 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3864 let mut run_range = None;
3865 let full_range = mat.captures.iter().fold(
3866 Range {
3867 start: usize::MAX,
3868 end: 0,
3869 },
3870 |mut acc, next| {
3871 let byte_range = next.node.byte_range();
3872 if acc.start > byte_range.start {
3873 acc.start = byte_range.start;
3874 }
3875 if acc.end < byte_range.end {
3876 acc.end = byte_range.end;
3877 }
3878 acc
3879 },
3880 );
3881 if full_range.start > full_range.end {
3882 // We did not find a full spanning range of this match.
3883 return None;
3884 }
3885 let extra_captures: SmallVec<[_; 1]> =
3886 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3887 test_configs
3888 .extra_captures
3889 .get(capture.index as usize)
3890 .cloned()
3891 .and_then(|tag_name| match tag_name {
3892 RunnableCapture::Named(name) => {
3893 Some((capture.node.byte_range(), name))
3894 }
3895 RunnableCapture::Run => {
3896 let _ = run_range.insert(capture.node.byte_range());
3897 None
3898 }
3899 })
3900 }));
3901 let run_range = run_range?;
3902 let tags = test_configs
3903 .query
3904 .property_settings(mat.pattern_index)
3905 .iter()
3906 .filter_map(|property| {
3907 if *property.key == *"tag" {
3908 property
3909 .value
3910 .as_ref()
3911 .map(|value| RunnableTag(value.to_string().into()))
3912 } else {
3913 None
3914 }
3915 })
3916 .collect();
3917 let extra_captures = extra_captures
3918 .into_iter()
3919 .map(|(range, name)| {
3920 (
3921 name.to_string(),
3922 self.text_for_range(range.clone()).collect::<String>(),
3923 )
3924 })
3925 .collect();
3926 // All tags should have the same range.
3927 Some(RunnableRange {
3928 run_range,
3929 full_range,
3930 runnable: Runnable {
3931 tags,
3932 language: mat.language,
3933 buffer: self.remote_id(),
3934 },
3935 extra_captures,
3936 buffer_id: self.remote_id(),
3937 })
3938 });
3939
3940 syntax_matches.advance();
3941 if test_range.is_some() {
3942 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3943 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3944 return test_range;
3945 }
3946 })
3947 }
3948
3949 /// Returns selections for remote peers intersecting the given range.
3950 #[allow(clippy::type_complexity)]
3951 pub fn selections_in_range(
3952 &self,
3953 range: Range<Anchor>,
3954 include_local: bool,
3955 ) -> impl Iterator<
3956 Item = (
3957 ReplicaId,
3958 bool,
3959 CursorShape,
3960 impl Iterator<Item = &Selection<Anchor>> + '_,
3961 ),
3962 > + '_ {
3963 self.remote_selections
3964 .iter()
3965 .filter(move |(replica_id, set)| {
3966 (include_local || **replica_id != self.text.replica_id())
3967 && !set.selections.is_empty()
3968 })
3969 .map(move |(replica_id, set)| {
3970 let start_ix = match set.selections.binary_search_by(|probe| {
3971 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3972 }) {
3973 Ok(ix) | Err(ix) => ix,
3974 };
3975 let end_ix = match set.selections.binary_search_by(|probe| {
3976 probe.start.cmp(&range.end, self).then(Ordering::Less)
3977 }) {
3978 Ok(ix) | Err(ix) => ix,
3979 };
3980
3981 (
3982 *replica_id,
3983 set.line_mode,
3984 set.cursor_shape,
3985 set.selections[start_ix..end_ix].iter(),
3986 )
3987 })
3988 }
3989
3990 /// Returns if the buffer contains any diagnostics.
3991 pub fn has_diagnostics(&self) -> bool {
3992 !self.diagnostics.is_empty()
3993 }
3994
3995 /// Returns all the diagnostics intersecting the given range.
3996 pub fn diagnostics_in_range<'a, T, O>(
3997 &'a self,
3998 search_range: Range<T>,
3999 reversed: bool,
4000 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4001 where
4002 T: 'a + Clone + ToOffset,
4003 O: 'a + FromAnchor,
4004 {
4005 let mut iterators: Vec<_> = self
4006 .diagnostics
4007 .iter()
4008 .map(|(_, collection)| {
4009 collection
4010 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4011 .peekable()
4012 })
4013 .collect();
4014
4015 std::iter::from_fn(move || {
4016 let (next_ix, _) = iterators
4017 .iter_mut()
4018 .enumerate()
4019 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4020 .min_by(|(_, a), (_, b)| {
4021 let cmp = a
4022 .range
4023 .start
4024 .cmp(&b.range.start, self)
4025 // when range is equal, sort by diagnostic severity
4026 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4027 // and stabilize order with group_id
4028 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4029 if reversed {
4030 cmp.reverse()
4031 } else {
4032 cmp
4033 }
4034 })?;
4035 iterators[next_ix]
4036 .next()
4037 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4038 diagnostic,
4039 range: FromAnchor::from_anchor(&range.start, self)
4040 ..FromAnchor::from_anchor(&range.end, self),
4041 })
4042 })
4043 }
4044
4045 /// Returns all the diagnostic groups associated with the given
4046 /// language server ID. If no language server ID is provided,
4047 /// all diagnostics groups are returned.
4048 pub fn diagnostic_groups(
4049 &self,
4050 language_server_id: Option<LanguageServerId>,
4051 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4052 let mut groups = Vec::new();
4053
4054 if let Some(language_server_id) = language_server_id {
4055 if let Ok(ix) = self
4056 .diagnostics
4057 .binary_search_by_key(&language_server_id, |e| e.0)
4058 {
4059 self.diagnostics[ix]
4060 .1
4061 .groups(language_server_id, &mut groups, self);
4062 }
4063 } else {
4064 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4065 diagnostics.groups(*language_server_id, &mut groups, self);
4066 }
4067 }
4068
4069 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4070 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4071 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4072 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4073 });
4074
4075 groups
4076 }
4077
4078 /// Returns an iterator over the diagnostics for the given group.
4079 pub fn diagnostic_group<O>(
4080 &self,
4081 group_id: usize,
4082 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4083 where
4084 O: FromAnchor + 'static,
4085 {
4086 self.diagnostics
4087 .iter()
4088 .flat_map(move |(_, set)| set.group(group_id, self))
4089 }
4090
4091 /// An integer version number that accounts for all updates besides
4092 /// the buffer's text itself (which is versioned via a version vector).
4093 pub fn non_text_state_update_count(&self) -> usize {
4094 self.non_text_state_update_count
4095 }
4096
4097 /// Returns a snapshot of underlying file.
4098 pub fn file(&self) -> Option<&Arc<dyn File>> {
4099 self.file.as_ref()
4100 }
4101
4102 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4103 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4104 if let Some(file) = self.file() {
4105 if file.path().file_name().is_none() || include_root {
4106 Some(file.full_path(cx))
4107 } else {
4108 Some(file.path().to_path_buf())
4109 }
4110 } else {
4111 None
4112 }
4113 }
4114}
4115
4116fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4117 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4118}
4119
4120fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4121 let mut result = IndentSize::spaces(0);
4122 for c in text {
4123 let kind = match c {
4124 ' ' => IndentKind::Space,
4125 '\t' => IndentKind::Tab,
4126 _ => break,
4127 };
4128 if result.len == 0 {
4129 result.kind = kind;
4130 }
4131 result.len += 1;
4132 }
4133 result
4134}
4135
4136impl Clone for BufferSnapshot {
4137 fn clone(&self) -> Self {
4138 Self {
4139 text: self.text.clone(),
4140 syntax: self.syntax.clone(),
4141 file: self.file.clone(),
4142 remote_selections: self.remote_selections.clone(),
4143 diagnostics: self.diagnostics.clone(),
4144 language: self.language.clone(),
4145 non_text_state_update_count: self.non_text_state_update_count,
4146 }
4147 }
4148}
4149
4150impl Deref for BufferSnapshot {
4151 type Target = text::BufferSnapshot;
4152
4153 fn deref(&self) -> &Self::Target {
4154 &self.text
4155 }
4156}
4157
4158unsafe impl Send for BufferChunks<'_> {}
4159
4160impl<'a> BufferChunks<'a> {
4161 pub(crate) fn new(
4162 text: &'a Rope,
4163 range: Range<usize>,
4164 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4165 diagnostics: bool,
4166 buffer_snapshot: Option<&'a BufferSnapshot>,
4167 ) -> Self {
4168 let mut highlights = None;
4169 if let Some((captures, highlight_maps)) = syntax {
4170 highlights = Some(BufferChunkHighlights {
4171 captures,
4172 next_capture: None,
4173 stack: Default::default(),
4174 highlight_maps,
4175 })
4176 }
4177
4178 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4179 let chunks = text.chunks_in_range(range.clone());
4180
4181 let mut this = BufferChunks {
4182 range,
4183 buffer_snapshot,
4184 chunks,
4185 diagnostic_endpoints,
4186 error_depth: 0,
4187 warning_depth: 0,
4188 information_depth: 0,
4189 hint_depth: 0,
4190 unnecessary_depth: 0,
4191 highlights,
4192 };
4193 this.initialize_diagnostic_endpoints();
4194 this
4195 }
4196
4197 /// Seeks to the given byte offset in the buffer.
4198 pub fn seek(&mut self, range: Range<usize>) {
4199 let old_range = std::mem::replace(&mut self.range, range.clone());
4200 self.chunks.set_range(self.range.clone());
4201 if let Some(highlights) = self.highlights.as_mut() {
4202 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4203 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4204 highlights
4205 .stack
4206 .retain(|(end_offset, _)| *end_offset > range.start);
4207 if let Some(capture) = &highlights.next_capture {
4208 if range.start >= capture.node.start_byte() {
4209 let next_capture_end = capture.node.end_byte();
4210 if range.start < next_capture_end {
4211 highlights.stack.push((
4212 next_capture_end,
4213 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4214 ));
4215 }
4216 highlights.next_capture.take();
4217 }
4218 }
4219 } else if let Some(snapshot) = self.buffer_snapshot {
4220 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4221 *highlights = BufferChunkHighlights {
4222 captures,
4223 next_capture: None,
4224 stack: Default::default(),
4225 highlight_maps,
4226 };
4227 } else {
4228 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4229 // Seeking such BufferChunks is not supported.
4230 debug_assert!(false, "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot");
4231 }
4232
4233 highlights.captures.set_byte_range(self.range.clone());
4234 self.initialize_diagnostic_endpoints();
4235 }
4236 }
4237
4238 fn initialize_diagnostic_endpoints(&mut self) {
4239 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4240 if let Some(buffer) = self.buffer_snapshot {
4241 let mut diagnostic_endpoints = Vec::new();
4242 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4243 diagnostic_endpoints.push(DiagnosticEndpoint {
4244 offset: entry.range.start,
4245 is_start: true,
4246 severity: entry.diagnostic.severity,
4247 is_unnecessary: entry.diagnostic.is_unnecessary,
4248 });
4249 diagnostic_endpoints.push(DiagnosticEndpoint {
4250 offset: entry.range.end,
4251 is_start: false,
4252 severity: entry.diagnostic.severity,
4253 is_unnecessary: entry.diagnostic.is_unnecessary,
4254 });
4255 }
4256 diagnostic_endpoints
4257 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4258 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4259 self.hint_depth = 0;
4260 self.error_depth = 0;
4261 self.warning_depth = 0;
4262 self.information_depth = 0;
4263 }
4264 }
4265 }
4266
4267 /// The current byte offset in the buffer.
4268 pub fn offset(&self) -> usize {
4269 self.range.start
4270 }
4271
4272 pub fn range(&self) -> Range<usize> {
4273 self.range.clone()
4274 }
4275
4276 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4277 let depth = match endpoint.severity {
4278 DiagnosticSeverity::ERROR => &mut self.error_depth,
4279 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4280 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4281 DiagnosticSeverity::HINT => &mut self.hint_depth,
4282 _ => return,
4283 };
4284 if endpoint.is_start {
4285 *depth += 1;
4286 } else {
4287 *depth -= 1;
4288 }
4289
4290 if endpoint.is_unnecessary {
4291 if endpoint.is_start {
4292 self.unnecessary_depth += 1;
4293 } else {
4294 self.unnecessary_depth -= 1;
4295 }
4296 }
4297 }
4298
4299 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4300 if self.error_depth > 0 {
4301 Some(DiagnosticSeverity::ERROR)
4302 } else if self.warning_depth > 0 {
4303 Some(DiagnosticSeverity::WARNING)
4304 } else if self.information_depth > 0 {
4305 Some(DiagnosticSeverity::INFORMATION)
4306 } else if self.hint_depth > 0 {
4307 Some(DiagnosticSeverity::HINT)
4308 } else {
4309 None
4310 }
4311 }
4312
4313 fn current_code_is_unnecessary(&self) -> bool {
4314 self.unnecessary_depth > 0
4315 }
4316}
4317
4318impl<'a> Iterator for BufferChunks<'a> {
4319 type Item = Chunk<'a>;
4320
4321 fn next(&mut self) -> Option<Self::Item> {
4322 let mut next_capture_start = usize::MAX;
4323 let mut next_diagnostic_endpoint = usize::MAX;
4324
4325 if let Some(highlights) = self.highlights.as_mut() {
4326 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4327 if *parent_capture_end <= self.range.start {
4328 highlights.stack.pop();
4329 } else {
4330 break;
4331 }
4332 }
4333
4334 if highlights.next_capture.is_none() {
4335 highlights.next_capture = highlights.captures.next();
4336 }
4337
4338 while let Some(capture) = highlights.next_capture.as_ref() {
4339 if self.range.start < capture.node.start_byte() {
4340 next_capture_start = capture.node.start_byte();
4341 break;
4342 } else {
4343 let highlight_id =
4344 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4345 highlights
4346 .stack
4347 .push((capture.node.end_byte(), highlight_id));
4348 highlights.next_capture = highlights.captures.next();
4349 }
4350 }
4351 }
4352
4353 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4354 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4355 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4356 if endpoint.offset <= self.range.start {
4357 self.update_diagnostic_depths(endpoint);
4358 diagnostic_endpoints.next();
4359 } else {
4360 next_diagnostic_endpoint = endpoint.offset;
4361 break;
4362 }
4363 }
4364 }
4365 self.diagnostic_endpoints = diagnostic_endpoints;
4366
4367 if let Some(chunk) = self.chunks.peek() {
4368 let chunk_start = self.range.start;
4369 let mut chunk_end = (self.chunks.offset() + chunk.len())
4370 .min(next_capture_start)
4371 .min(next_diagnostic_endpoint);
4372 let mut highlight_id = None;
4373 if let Some(highlights) = self.highlights.as_ref() {
4374 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4375 chunk_end = chunk_end.min(*parent_capture_end);
4376 highlight_id = Some(*parent_highlight_id);
4377 }
4378 }
4379
4380 let slice =
4381 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4382 self.range.start = chunk_end;
4383 if self.range.start == self.chunks.offset() + chunk.len() {
4384 self.chunks.next().unwrap();
4385 }
4386
4387 Some(Chunk {
4388 text: slice,
4389 syntax_highlight_id: highlight_id,
4390 diagnostic_severity: self.current_diagnostic_severity(),
4391 is_unnecessary: self.current_code_is_unnecessary(),
4392 ..Default::default()
4393 })
4394 } else {
4395 None
4396 }
4397 }
4398}
4399
4400impl operation_queue::Operation for Operation {
4401 fn lamport_timestamp(&self) -> clock::Lamport {
4402 match self {
4403 Operation::Buffer(_) => {
4404 unreachable!("buffer operations should never be deferred at this layer")
4405 }
4406 Operation::UpdateDiagnostics {
4407 lamport_timestamp, ..
4408 }
4409 | Operation::UpdateSelections {
4410 lamport_timestamp, ..
4411 }
4412 | Operation::UpdateCompletionTriggers {
4413 lamport_timestamp, ..
4414 } => *lamport_timestamp,
4415 }
4416 }
4417}
4418
4419impl Default for Diagnostic {
4420 fn default() -> Self {
4421 Self {
4422 source: Default::default(),
4423 code: None,
4424 severity: DiagnosticSeverity::ERROR,
4425 message: Default::default(),
4426 group_id: 0,
4427 is_primary: false,
4428 is_disk_based: false,
4429 is_unnecessary: false,
4430 data: None,
4431 }
4432 }
4433}
4434
4435impl IndentSize {
4436 /// Returns an [`IndentSize`] representing the given spaces.
4437 pub fn spaces(len: u32) -> Self {
4438 Self {
4439 len,
4440 kind: IndentKind::Space,
4441 }
4442 }
4443
4444 /// Returns an [`IndentSize`] representing a tab.
4445 pub fn tab() -> Self {
4446 Self {
4447 len: 1,
4448 kind: IndentKind::Tab,
4449 }
4450 }
4451
4452 /// An iterator over the characters represented by this [`IndentSize`].
4453 pub fn chars(&self) -> impl Iterator<Item = char> {
4454 iter::repeat(self.char()).take(self.len as usize)
4455 }
4456
4457 /// The character representation of this [`IndentSize`].
4458 pub fn char(&self) -> char {
4459 match self.kind {
4460 IndentKind::Space => ' ',
4461 IndentKind::Tab => '\t',
4462 }
4463 }
4464
4465 /// Consumes the current [`IndentSize`] and returns a new one that has
4466 /// been shrunk or enlarged by the given size along the given direction.
4467 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4468 match direction {
4469 Ordering::Less => {
4470 if self.kind == size.kind && self.len >= size.len {
4471 self.len -= size.len;
4472 }
4473 }
4474 Ordering::Equal => {}
4475 Ordering::Greater => {
4476 if self.len == 0 {
4477 self = size;
4478 } else if self.kind == size.kind {
4479 self.len += size.len;
4480 }
4481 }
4482 }
4483 self
4484 }
4485
4486 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4487 match self.kind {
4488 IndentKind::Space => self.len as usize,
4489 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4490 }
4491 }
4492}
4493
4494#[cfg(any(test, feature = "test-support"))]
4495pub struct TestFile {
4496 pub path: Arc<Path>,
4497 pub root_name: String,
4498 pub local_root: Option<PathBuf>,
4499}
4500
4501#[cfg(any(test, feature = "test-support"))]
4502impl File for TestFile {
4503 fn path(&self) -> &Arc<Path> {
4504 &self.path
4505 }
4506
4507 fn full_path(&self, _: &gpui::App) -> PathBuf {
4508 PathBuf::from(&self.root_name).join(self.path.as_ref())
4509 }
4510
4511 fn as_local(&self) -> Option<&dyn LocalFile> {
4512 if self.local_root.is_some() {
4513 Some(self)
4514 } else {
4515 None
4516 }
4517 }
4518
4519 fn disk_state(&self) -> DiskState {
4520 unimplemented!()
4521 }
4522
4523 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4524 self.path().file_name().unwrap_or(self.root_name.as_ref())
4525 }
4526
4527 fn worktree_id(&self, _: &App) -> WorktreeId {
4528 WorktreeId::from_usize(0)
4529 }
4530
4531 fn as_any(&self) -> &dyn std::any::Any {
4532 unimplemented!()
4533 }
4534
4535 fn to_proto(&self, _: &App) -> rpc::proto::File {
4536 unimplemented!()
4537 }
4538
4539 fn is_private(&self) -> bool {
4540 false
4541 }
4542}
4543
4544#[cfg(any(test, feature = "test-support"))]
4545impl LocalFile for TestFile {
4546 fn abs_path(&self, _cx: &App) -> PathBuf {
4547 PathBuf::from(self.local_root.as_ref().unwrap())
4548 .join(&self.root_name)
4549 .join(self.path.as_ref())
4550 }
4551
4552 fn load(&self, _cx: &App) -> Task<Result<String>> {
4553 unimplemented!()
4554 }
4555
4556 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4557 unimplemented!()
4558 }
4559}
4560
4561pub(crate) fn contiguous_ranges(
4562 values: impl Iterator<Item = u32>,
4563 max_len: usize,
4564) -> impl Iterator<Item = Range<u32>> {
4565 let mut values = values;
4566 let mut current_range: Option<Range<u32>> = None;
4567 std::iter::from_fn(move || loop {
4568 if let Some(value) = values.next() {
4569 if let Some(range) = &mut current_range {
4570 if value == range.end && range.len() < max_len {
4571 range.end += 1;
4572 continue;
4573 }
4574 }
4575
4576 let prev_range = current_range.clone();
4577 current_range = Some(value..(value + 1));
4578 if prev_range.is_some() {
4579 return prev_range;
4580 }
4581 } else {
4582 return current_range.take();
4583 }
4584 })
4585}
4586
4587#[derive(Default, Debug)]
4588pub struct CharClassifier {
4589 scope: Option<LanguageScope>,
4590 for_completion: bool,
4591 ignore_punctuation: bool,
4592}
4593
4594impl CharClassifier {
4595 pub fn new(scope: Option<LanguageScope>) -> Self {
4596 Self {
4597 scope,
4598 for_completion: false,
4599 ignore_punctuation: false,
4600 }
4601 }
4602
4603 pub fn for_completion(self, for_completion: bool) -> Self {
4604 Self {
4605 for_completion,
4606 ..self
4607 }
4608 }
4609
4610 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4611 Self {
4612 ignore_punctuation,
4613 ..self
4614 }
4615 }
4616
4617 pub fn is_whitespace(&self, c: char) -> bool {
4618 self.kind(c) == CharKind::Whitespace
4619 }
4620
4621 pub fn is_word(&self, c: char) -> bool {
4622 self.kind(c) == CharKind::Word
4623 }
4624
4625 pub fn is_punctuation(&self, c: char) -> bool {
4626 self.kind(c) == CharKind::Punctuation
4627 }
4628
4629 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4630 if c.is_whitespace() {
4631 return CharKind::Whitespace;
4632 } else if c.is_alphanumeric() || c == '_' {
4633 return CharKind::Word;
4634 }
4635
4636 if let Some(scope) = &self.scope {
4637 if let Some(characters) = scope.word_characters() {
4638 if characters.contains(&c) {
4639 if c == '-' && !self.for_completion && !ignore_punctuation {
4640 return CharKind::Punctuation;
4641 }
4642 return CharKind::Word;
4643 }
4644 }
4645 }
4646
4647 if ignore_punctuation {
4648 CharKind::Word
4649 } else {
4650 CharKind::Punctuation
4651 }
4652 }
4653
4654 pub fn kind(&self, c: char) -> CharKind {
4655 self.kind_with(c, self.ignore_punctuation)
4656 }
4657}
4658
4659/// Find all of the ranges of whitespace that occur at the ends of lines
4660/// in the given rope.
4661///
4662/// This could also be done with a regex search, but this implementation
4663/// avoids copying text.
4664pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4665 let mut ranges = Vec::new();
4666
4667 let mut offset = 0;
4668 let mut prev_chunk_trailing_whitespace_range = 0..0;
4669 for chunk in rope.chunks() {
4670 let mut prev_line_trailing_whitespace_range = 0..0;
4671 for (i, line) in chunk.split('\n').enumerate() {
4672 let line_end_offset = offset + line.len();
4673 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4674 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4675
4676 if i == 0 && trimmed_line_len == 0 {
4677 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4678 }
4679 if !prev_line_trailing_whitespace_range.is_empty() {
4680 ranges.push(prev_line_trailing_whitespace_range);
4681 }
4682
4683 offset = line_end_offset + 1;
4684 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4685 }
4686
4687 offset -= 1;
4688 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4689 }
4690
4691 if !prev_chunk_trailing_whitespace_range.is_empty() {
4692 ranges.push(prev_chunk_trailing_whitespace_range);
4693 }
4694
4695 ranges
4696}