1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
14 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
18 TreeSitterOptions,
19};
20use anyhow::{anyhow, Context as _, Result};
21use async_watch as watch;
22use clock::Lamport;
23pub use clock::ReplicaId;
24use collections::HashMap;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 AnyElement, App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, Pixels,
29 SharedString, StyledText, Task, TaskLabel, TextStyle, Window,
30};
31use lsp::LanguageServerId;
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use similar::{ChangeTag, TextDiff};
38use smallvec::SmallVec;
39use smol::future::yield_now;
40use std::{
41 any::Any,
42 borrow::Cow,
43 cell::Cell,
44 cmp::{self, Ordering, Reverse},
45 collections::{BTreeMap, BTreeSet},
46 ffi::OsStr,
47 fmt,
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, DerefMut, Range},
53 path::{Path, PathBuf},
54 str,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
65 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
66 Transaction, TransactionId, Unclipped,
67};
68use theme::{ActiveTheme as _, SyntaxTheme};
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{debug_panic, maybe, RangeExt};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 pending_autoindent: Option<Task<()>>,
113 sync_parse_timeout: Duration,
114 syntax_map: Mutex<SyntaxMap>,
115 parsing_in_background: bool,
116 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
117 non_text_state_update_count: usize,
118 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
119 remote_selections: TreeMap<ReplicaId, SelectionSet>,
120 diagnostics_timestamp: clock::Lamport,
121 completion_triggers: BTreeSet<String>,
122 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
123 completion_triggers_timestamp: clock::Lamport,
124 deferred_ops: OperationQueue<Operation>,
125 capability: Capability,
126 has_conflict: bool,
127 /// Memoize calls to has_changes_since(saved_version).
128 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
129 has_unsaved_edits: Cell<(clock::Global, bool)>,
130 _subscriptions: Vec<gpui::Subscription>,
131}
132
133#[derive(Copy, Clone, Debug, PartialEq, Eq)]
134pub enum ParseStatus {
135 Idle,
136 Parsing,
137}
138
139struct BufferBranchState {
140 base_buffer: Entity<Buffer>,
141 merged_operations: Vec<Lamport>,
142}
143
144/// An immutable, cheaply cloneable representation of a fixed
145/// state of a buffer.
146pub struct BufferSnapshot {
147 pub text: text::BufferSnapshot,
148 pub(crate) syntax: SyntaxSnapshot,
149 file: Option<Arc<dyn File>>,
150 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
151 remote_selections: TreeMap<ReplicaId, SelectionSet>,
152 language: Option<Arc<Language>>,
153 non_text_state_update_count: usize,
154}
155
156/// The kind and amount of indentation in a particular line. For now,
157/// assumes that indentation is all the same character.
158#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
159pub struct IndentSize {
160 /// The number of bytes that comprise the indentation.
161 pub len: u32,
162 /// The kind of whitespace used for indentation.
163 pub kind: IndentKind,
164}
165
166/// A whitespace character that's used for indentation.
167#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
168pub enum IndentKind {
169 /// An ASCII space character.
170 #[default]
171 Space,
172 /// An ASCII tab character.
173 Tab,
174}
175
176/// The shape of a selection cursor.
177#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
178#[serde(rename_all = "snake_case")]
179pub enum CursorShape {
180 /// A vertical bar
181 #[default]
182 Bar,
183 /// A block that surrounds the following character
184 Block,
185 /// An underline that runs along the following character
186 Underline,
187 /// A box drawn around the following character
188 Hollow,
189}
190
191#[derive(Clone, Debug)]
192struct SelectionSet {
193 line_mode: bool,
194 cursor_shape: CursorShape,
195 selections: Arc<[Selection<Anchor>]>,
196 lamport_timestamp: clock::Lamport,
197}
198
199/// A diagnostic associated with a certain range of a buffer.
200#[derive(Clone, Debug, PartialEq, Eq)]
201pub struct Diagnostic {
202 /// The name of the service that produced this diagnostic.
203 pub source: Option<String>,
204 /// A machine-readable code that identifies this diagnostic.
205 pub code: Option<String>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// An id that identifies the group to which this diagnostic belongs.
211 ///
212 /// When a language server produces a diagnostic with
213 /// one or more associated diagnostics, those diagnostics are all
214 /// assigned a single group ID.
215 pub group_id: usize,
216 /// Whether this diagnostic is the primary diagnostic for its group.
217 ///
218 /// In a given group, the primary diagnostic is the top-level diagnostic
219 /// returned by the language server. The non-primary diagnostics are the
220 /// associated diagnostics.
221 pub is_primary: bool,
222 /// Whether this diagnostic is considered to originate from an analysis of
223 /// files on disk, as opposed to any unsaved buffer contents. This is a
224 /// property of a given diagnostic source, and is configured for a given
225 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
226 /// for the language server.
227 pub is_disk_based: bool,
228 /// Whether this diagnostic marks unnecessary code.
229 pub is_unnecessary: bool,
230 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
231 pub data: Option<Value>,
232}
233
234/// TODO - move this into the `project` crate and make it private.
235pub async fn prepare_completion_documentation(
236 documentation: &lsp::Documentation,
237 language_registry: &Arc<LanguageRegistry>,
238 language: Option<Arc<Language>>,
239) -> CompletionDocumentation {
240 match documentation {
241 lsp::Documentation::String(text) => {
242 if text.lines().count() <= 1 {
243 CompletionDocumentation::SingleLine(text.clone())
244 } else {
245 CompletionDocumentation::MultiLinePlainText(text.clone())
246 }
247 }
248
249 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
250 lsp::MarkupKind::PlainText => {
251 if value.lines().count() <= 1 {
252 CompletionDocumentation::SingleLine(value.clone())
253 } else {
254 CompletionDocumentation::MultiLinePlainText(value.clone())
255 }
256 }
257
258 lsp::MarkupKind::Markdown => {
259 let parsed = parse_markdown(value, Some(language_registry), language).await;
260 CompletionDocumentation::MultiLineMarkdown(parsed)
261 }
262 },
263 }
264}
265
266#[derive(Clone, Debug)]
267pub enum CompletionDocumentation {
268 /// There is no documentation for this completion.
269 Undocumented,
270 /// A single line of documentation.
271 SingleLine(String),
272 /// Multiple lines of plain text documentation.
273 MultiLinePlainText(String),
274 /// Markdown documentation.
275 MultiLineMarkdown(ParsedMarkdown),
276}
277
278/// An operation used to synchronize this buffer with its other replicas.
279#[derive(Clone, Debug, PartialEq)]
280pub enum Operation {
281 /// A text operation.
282 Buffer(text::Operation),
283
284 /// An update to the buffer's diagnostics.
285 UpdateDiagnostics {
286 /// The id of the language server that produced the new diagnostics.
287 server_id: LanguageServerId,
288 /// The diagnostics.
289 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
290 /// The buffer's lamport timestamp.
291 lamport_timestamp: clock::Lamport,
292 },
293
294 /// An update to the most recent selections in this buffer.
295 UpdateSelections {
296 /// The selections.
297 selections: Arc<[Selection<Anchor>]>,
298 /// The buffer's lamport timestamp.
299 lamport_timestamp: clock::Lamport,
300 /// Whether the selections are in 'line mode'.
301 line_mode: bool,
302 /// The [`CursorShape`] associated with these selections.
303 cursor_shape: CursorShape,
304 },
305
306 /// An update to the characters that should trigger autocompletion
307 /// for this buffer.
308 UpdateCompletionTriggers {
309 /// The characters that trigger autocompletion.
310 triggers: Vec<String>,
311 /// The buffer's lamport timestamp.
312 lamport_timestamp: clock::Lamport,
313 /// The language server ID.
314 server_id: LanguageServerId,
315 },
316}
317
318/// An event that occurs in a buffer.
319#[derive(Clone, Debug, PartialEq)]
320pub enum BufferEvent {
321 /// The buffer was changed in a way that must be
322 /// propagated to its other replicas.
323 Operation {
324 operation: Operation,
325 is_local: bool,
326 },
327 /// The buffer was edited.
328 Edited,
329 /// The buffer's `dirty` bit changed.
330 DirtyChanged,
331 /// The buffer was saved.
332 Saved,
333 /// The buffer's file was changed on disk.
334 FileHandleChanged,
335 /// The buffer was reloaded.
336 Reloaded,
337 /// The buffer is in need of a reload
338 ReloadNeeded,
339 /// The buffer's language was changed.
340 LanguageChanged,
341 /// The buffer's syntax trees were updated.
342 Reparsed,
343 /// The buffer's diagnostics were updated.
344 DiagnosticsUpdated,
345 /// The buffer gained or lost editing capabilities.
346 CapabilityChanged,
347 /// The buffer was explicitly requested to close.
348 Closed,
349 /// The buffer was discarded when closing.
350 Discarded,
351}
352
353/// The file associated with a buffer.
354pub trait File: Send + Sync {
355 /// Returns the [`LocalFile`] associated with this file, if the
356 /// file is local.
357 fn as_local(&self) -> Option<&dyn LocalFile>;
358
359 /// Returns whether this file is local.
360 fn is_local(&self) -> bool {
361 self.as_local().is_some()
362 }
363
364 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
365 /// only available in some states, such as modification time.
366 fn disk_state(&self) -> DiskState;
367
368 /// Returns the path of this file relative to the worktree's root directory.
369 fn path(&self) -> &Arc<Path>;
370
371 /// Returns the path of this file relative to the worktree's parent directory (this means it
372 /// includes the name of the worktree's root folder).
373 fn full_path(&self, cx: &App) -> PathBuf;
374
375 /// Returns the last component of this handle's absolute path. If this handle refers to the root
376 /// of its worktree, then this method will return the name of the worktree itself.
377 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
378
379 /// Returns the id of the worktree to which this file belongs.
380 ///
381 /// This is needed for looking up project-specific settings.
382 fn worktree_id(&self, cx: &App) -> WorktreeId;
383
384 /// Converts this file into an [`Any`] trait object.
385 fn as_any(&self) -> &dyn Any;
386
387 /// Converts this file into a protobuf message.
388 fn to_proto(&self, cx: &App) -> rpc::proto::File;
389
390 /// Return whether Zed considers this to be a private file.
391 fn is_private(&self) -> bool;
392}
393
394/// The file's storage status - whether it's stored (`Present`), and if so when it was last
395/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
396/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
397/// indicator for new files.
398#[derive(Copy, Clone, Debug, PartialEq)]
399pub enum DiskState {
400 /// File created in Zed that has not been saved.
401 New,
402 /// File present on the filesystem.
403 Present { mtime: MTime },
404 /// Deleted file that was previously present.
405 Deleted,
406}
407
408impl DiskState {
409 /// Returns the file's last known modification time on disk.
410 pub fn mtime(self) -> Option<MTime> {
411 match self {
412 DiskState::New => None,
413 DiskState::Present { mtime } => Some(mtime),
414 DiskState::Deleted => None,
415 }
416 }
417}
418
419/// The file associated with a buffer, in the case where the file is on the local disk.
420pub trait LocalFile: File {
421 /// Returns the absolute path of this file
422 fn abs_path(&self, cx: &App) -> PathBuf;
423
424 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
425 fn load(&self, cx: &App) -> Task<Result<String>>;
426
427 /// Loads the file's contents from disk.
428 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
429}
430
431/// The auto-indent behavior associated with an editing operation.
432/// For some editing operations, each affected line of text has its
433/// indentation recomputed. For other operations, the entire block
434/// of edited text is adjusted uniformly.
435#[derive(Clone, Debug)]
436pub enum AutoindentMode {
437 /// Indent each line of inserted text.
438 EachLine,
439 /// Apply the same indentation adjustment to all of the lines
440 /// in a given insertion.
441 Block {
442 /// The original indentation level of the first line of each
443 /// insertion, if it has been copied.
444 original_indent_columns: Vec<u32>,
445 },
446}
447
448#[derive(Clone)]
449struct AutoindentRequest {
450 before_edit: BufferSnapshot,
451 entries: Vec<AutoindentRequestEntry>,
452 is_block_mode: bool,
453 ignore_empty_lines: bool,
454}
455
456#[derive(Debug, Clone)]
457struct AutoindentRequestEntry {
458 /// A range of the buffer whose indentation should be adjusted.
459 range: Range<Anchor>,
460 /// Whether or not these lines should be considered brand new, for the
461 /// purpose of auto-indent. When text is not new, its indentation will
462 /// only be adjusted if the suggested indentation level has *changed*
463 /// since the edit was made.
464 first_line_is_new: bool,
465 indent_size: IndentSize,
466 original_indent_column: Option<u32>,
467}
468
469#[derive(Debug)]
470struct IndentSuggestion {
471 basis_row: u32,
472 delta: Ordering,
473 within_error: bool,
474}
475
476struct BufferChunkHighlights<'a> {
477 captures: SyntaxMapCaptures<'a>,
478 next_capture: Option<SyntaxMapCapture<'a>>,
479 stack: Vec<(usize, HighlightId)>,
480 highlight_maps: Vec<HighlightMap>,
481}
482
483/// An iterator that yields chunks of a buffer's text, along with their
484/// syntax highlights and diagnostic status.
485pub struct BufferChunks<'a> {
486 buffer_snapshot: Option<&'a BufferSnapshot>,
487 range: Range<usize>,
488 chunks: text::Chunks<'a>,
489 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
490 error_depth: usize,
491 warning_depth: usize,
492 information_depth: usize,
493 hint_depth: usize,
494 unnecessary_depth: usize,
495 highlights: Option<BufferChunkHighlights<'a>>,
496}
497
498/// A chunk of a buffer's text, along with its syntax highlight and
499/// diagnostic status.
500#[derive(Clone, Debug, Default)]
501pub struct Chunk<'a> {
502 /// The text of the chunk.
503 pub text: &'a str,
504 /// The syntax highlighting style of the chunk.
505 pub syntax_highlight_id: Option<HighlightId>,
506 /// The highlight style that has been applied to this chunk in
507 /// the editor.
508 pub highlight_style: Option<HighlightStyle>,
509 /// The severity of diagnostic associated with this chunk, if any.
510 pub diagnostic_severity: Option<DiagnosticSeverity>,
511 /// Whether this chunk of text is marked as unnecessary.
512 pub is_unnecessary: bool,
513 /// Whether this chunk of text was originally a tab character.
514 pub is_tab: bool,
515 /// An optional recipe for how the chunk should be presented.
516 pub renderer: Option<ChunkRenderer>,
517}
518
519/// A recipe for how the chunk should be presented.
520#[derive(Clone)]
521pub struct ChunkRenderer {
522 /// creates a custom element to represent this chunk.
523 pub render: Arc<dyn Send + Sync + Fn(&mut ChunkRendererContext) -> AnyElement>,
524 /// If true, the element is constrained to the shaped width of the text.
525 pub constrain_width: bool,
526}
527
528pub struct ChunkRendererContext<'a, 'b> {
529 pub window: &'a mut Window,
530 pub context: &'b mut App,
531 pub max_width: Pixels,
532}
533
534impl fmt::Debug for ChunkRenderer {
535 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
536 f.debug_struct("ChunkRenderer")
537 .field("constrain_width", &self.constrain_width)
538 .finish()
539 }
540}
541
542impl<'a, 'b> Deref for ChunkRendererContext<'a, 'b> {
543 type Target = App;
544
545 fn deref(&self) -> &Self::Target {
546 self.context
547 }
548}
549
550impl<'a, 'b> DerefMut for ChunkRendererContext<'a, 'b> {
551 fn deref_mut(&mut self) -> &mut Self::Target {
552 self.context
553 }
554}
555
556/// A set of edits to a given version of a buffer, computed asynchronously.
557#[derive(Debug)]
558pub struct Diff {
559 pub(crate) base_version: clock::Global,
560 line_ending: LineEnding,
561 pub edits: Vec<(Range<usize>, Arc<str>)>,
562}
563
564#[derive(Clone, Copy)]
565pub(crate) struct DiagnosticEndpoint {
566 offset: usize,
567 is_start: bool,
568 severity: DiagnosticSeverity,
569 is_unnecessary: bool,
570}
571
572/// A class of characters, used for characterizing a run of text.
573#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
574pub enum CharKind {
575 /// Whitespace.
576 Whitespace,
577 /// Punctuation.
578 Punctuation,
579 /// Word.
580 Word,
581}
582
583/// A runnable is a set of data about a region that could be resolved into a task
584pub struct Runnable {
585 pub tags: SmallVec<[RunnableTag; 1]>,
586 pub language: Arc<Language>,
587 pub buffer: BufferId,
588}
589
590#[derive(Default, Clone, Debug)]
591pub struct HighlightedText {
592 pub text: SharedString,
593 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
594}
595
596#[derive(Default, Debug)]
597struct HighlightedTextBuilder {
598 pub text: String,
599 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
600}
601
602impl HighlightedText {
603 pub fn from_buffer_range<T: ToOffset>(
604 range: Range<T>,
605 snapshot: &text::BufferSnapshot,
606 syntax_snapshot: &SyntaxSnapshot,
607 override_style: Option<HighlightStyle>,
608 syntax_theme: &SyntaxTheme,
609 ) -> Self {
610 let mut highlighted_text = HighlightedTextBuilder::default();
611 highlighted_text.add_text_from_buffer_range(
612 range,
613 snapshot,
614 syntax_snapshot,
615 override_style,
616 syntax_theme,
617 );
618 highlighted_text.build()
619 }
620
621 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
622 gpui::StyledText::new(self.text.clone())
623 .with_highlights(default_style, self.highlights.iter().cloned())
624 }
625}
626
627impl HighlightedTextBuilder {
628 pub fn build(self) -> HighlightedText {
629 HighlightedText {
630 text: self.text.into(),
631 highlights: self.highlights,
632 }
633 }
634
635 pub fn add_text_from_buffer_range<T: ToOffset>(
636 &mut self,
637 range: Range<T>,
638 snapshot: &text::BufferSnapshot,
639 syntax_snapshot: &SyntaxSnapshot,
640 override_style: Option<HighlightStyle>,
641 syntax_theme: &SyntaxTheme,
642 ) {
643 let range = range.to_offset(snapshot);
644 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
645 let start = self.text.len();
646 self.text.push_str(chunk.text);
647 let end = self.text.len();
648
649 if let Some(mut highlight_style) = chunk
650 .syntax_highlight_id
651 .and_then(|id| id.style(syntax_theme))
652 {
653 if let Some(override_style) = override_style {
654 highlight_style.highlight(override_style);
655 }
656 self.highlights.push((start..end, highlight_style));
657 } else if let Some(override_style) = override_style {
658 self.highlights.push((start..end, override_style));
659 }
660 }
661 }
662
663 fn highlighted_chunks<'a>(
664 range: Range<usize>,
665 snapshot: &'a text::BufferSnapshot,
666 syntax_snapshot: &'a SyntaxSnapshot,
667 ) -> BufferChunks<'a> {
668 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
669 grammar.highlights_query.as_ref()
670 });
671
672 let highlight_maps = captures
673 .grammars()
674 .iter()
675 .map(|grammar| grammar.highlight_map())
676 .collect();
677
678 BufferChunks::new(
679 snapshot.as_rope(),
680 range,
681 Some((captures, highlight_maps)),
682 false,
683 None,
684 )
685 }
686}
687
688#[derive(Clone)]
689pub struct EditPreview {
690 old_snapshot: text::BufferSnapshot,
691 applied_edits_snapshot: text::BufferSnapshot,
692 syntax_snapshot: SyntaxSnapshot,
693}
694
695impl EditPreview {
696 pub fn highlight_edits(
697 &self,
698 current_snapshot: &BufferSnapshot,
699 edits: &[(Range<Anchor>, String)],
700 include_deletions: bool,
701 cx: &App,
702 ) -> HighlightedText {
703 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
704 return HighlightedText::default();
705 };
706
707 let mut highlighted_text = HighlightedTextBuilder::default();
708
709 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
710
711 let insertion_highlight_style = HighlightStyle {
712 background_color: Some(cx.theme().status().created_background),
713 ..Default::default()
714 };
715 let deletion_highlight_style = HighlightStyle {
716 background_color: Some(cx.theme().status().deleted_background),
717 ..Default::default()
718 };
719 let syntax_theme = cx.theme().syntax();
720
721 for (range, edit_text) in edits {
722 let edit_new_end_in_preview_snapshot = range
723 .end
724 .bias_right(&self.old_snapshot)
725 .to_offset(&self.applied_edits_snapshot);
726 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
727
728 let unchanged_range_in_preview_snapshot =
729 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
730 if !unchanged_range_in_preview_snapshot.is_empty() {
731 highlighted_text.add_text_from_buffer_range(
732 unchanged_range_in_preview_snapshot,
733 &self.applied_edits_snapshot,
734 &self.syntax_snapshot,
735 None,
736 &syntax_theme,
737 );
738 }
739
740 let range_in_current_snapshot = range.to_offset(current_snapshot);
741 if include_deletions && !range_in_current_snapshot.is_empty() {
742 highlighted_text.add_text_from_buffer_range(
743 range_in_current_snapshot,
744 ¤t_snapshot.text,
745 ¤t_snapshot.syntax,
746 Some(deletion_highlight_style),
747 &syntax_theme,
748 );
749 }
750
751 if !edit_text.is_empty() {
752 highlighted_text.add_text_from_buffer_range(
753 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
754 &self.applied_edits_snapshot,
755 &self.syntax_snapshot,
756 Some(insertion_highlight_style),
757 &syntax_theme,
758 );
759 }
760
761 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
762 }
763
764 highlighted_text.add_text_from_buffer_range(
765 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
766 &self.applied_edits_snapshot,
767 &self.syntax_snapshot,
768 None,
769 &syntax_theme,
770 );
771
772 highlighted_text.build()
773 }
774
775 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
776 let (first, _) = edits.first()?;
777 let (last, _) = edits.last()?;
778
779 let start = first
780 .start
781 .bias_left(&self.old_snapshot)
782 .to_point(&self.applied_edits_snapshot);
783 let end = last
784 .end
785 .bias_right(&self.old_snapshot)
786 .to_point(&self.applied_edits_snapshot);
787
788 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
789 let range = Point::new(start.row, 0)
790 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
791
792 Some(range.to_offset(&self.applied_edits_snapshot))
793 }
794}
795
796impl Buffer {
797 /// Create a new buffer with the given base text.
798 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
799 Self::build(
800 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
801 None,
802 Capability::ReadWrite,
803 )
804 }
805
806 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
807 pub fn local_normalized(
808 base_text_normalized: Rope,
809 line_ending: LineEnding,
810 cx: &Context<Self>,
811 ) -> Self {
812 Self::build(
813 TextBuffer::new_normalized(
814 0,
815 cx.entity_id().as_non_zero_u64().into(),
816 line_ending,
817 base_text_normalized,
818 ),
819 None,
820 Capability::ReadWrite,
821 )
822 }
823
824 /// Create a new buffer that is a replica of a remote buffer.
825 pub fn remote(
826 remote_id: BufferId,
827 replica_id: ReplicaId,
828 capability: Capability,
829 base_text: impl Into<String>,
830 ) -> Self {
831 Self::build(
832 TextBuffer::new(replica_id, remote_id, base_text.into()),
833 None,
834 capability,
835 )
836 }
837
838 /// Create a new buffer that is a replica of a remote buffer, populating its
839 /// state from the given protobuf message.
840 pub fn from_proto(
841 replica_id: ReplicaId,
842 capability: Capability,
843 message: proto::BufferState,
844 file: Option<Arc<dyn File>>,
845 ) -> Result<Self> {
846 let buffer_id = BufferId::new(message.id)
847 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
848 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
849 let mut this = Self::build(buffer, file, capability);
850 this.text.set_line_ending(proto::deserialize_line_ending(
851 rpc::proto::LineEnding::from_i32(message.line_ending)
852 .ok_or_else(|| anyhow!("missing line_ending"))?,
853 ));
854 this.saved_version = proto::deserialize_version(&message.saved_version);
855 this.saved_mtime = message.saved_mtime.map(|time| time.into());
856 Ok(this)
857 }
858
859 /// Serialize the buffer's state to a protobuf message.
860 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
861 proto::BufferState {
862 id: self.remote_id().into(),
863 file: self.file.as_ref().map(|f| f.to_proto(cx)),
864 base_text: self.base_text().to_string(),
865 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
866 saved_version: proto::serialize_version(&self.saved_version),
867 saved_mtime: self.saved_mtime.map(|time| time.into()),
868 }
869 }
870
871 /// Serialize as protobufs all of the changes to the buffer since the given version.
872 pub fn serialize_ops(
873 &self,
874 since: Option<clock::Global>,
875 cx: &App,
876 ) -> Task<Vec<proto::Operation>> {
877 let mut operations = Vec::new();
878 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
879
880 operations.extend(self.remote_selections.iter().map(|(_, set)| {
881 proto::serialize_operation(&Operation::UpdateSelections {
882 selections: set.selections.clone(),
883 lamport_timestamp: set.lamport_timestamp,
884 line_mode: set.line_mode,
885 cursor_shape: set.cursor_shape,
886 })
887 }));
888
889 for (server_id, diagnostics) in &self.diagnostics {
890 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
891 lamport_timestamp: self.diagnostics_timestamp,
892 server_id: *server_id,
893 diagnostics: diagnostics.iter().cloned().collect(),
894 }));
895 }
896
897 for (server_id, completions) in &self.completion_triggers_per_language_server {
898 operations.push(proto::serialize_operation(
899 &Operation::UpdateCompletionTriggers {
900 triggers: completions.iter().cloned().collect(),
901 lamport_timestamp: self.completion_triggers_timestamp,
902 server_id: *server_id,
903 },
904 ));
905 }
906
907 let text_operations = self.text.operations().clone();
908 cx.background_executor().spawn(async move {
909 let since = since.unwrap_or_default();
910 operations.extend(
911 text_operations
912 .iter()
913 .filter(|(_, op)| !since.observed(op.timestamp()))
914 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
915 );
916 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
917 operations
918 })
919 }
920
921 /// Assign a language to the buffer, returning the buffer.
922 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
923 self.set_language(Some(language), cx);
924 self
925 }
926
927 /// Returns the [`Capability`] of this buffer.
928 pub fn capability(&self) -> Capability {
929 self.capability
930 }
931
932 /// Whether this buffer can only be read.
933 pub fn read_only(&self) -> bool {
934 self.capability == Capability::ReadOnly
935 }
936
937 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
938 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
939 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
940 let snapshot = buffer.snapshot();
941 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
942 Self {
943 saved_mtime,
944 saved_version: buffer.version(),
945 preview_version: buffer.version(),
946 reload_task: None,
947 transaction_depth: 0,
948 was_dirty_before_starting_transaction: None,
949 has_unsaved_edits: Cell::new((buffer.version(), false)),
950 text: buffer,
951 branch_state: None,
952 file,
953 capability,
954 syntax_map,
955 parsing_in_background: false,
956 non_text_state_update_count: 0,
957 sync_parse_timeout: Duration::from_millis(1),
958 parse_status: async_watch::channel(ParseStatus::Idle),
959 autoindent_requests: Default::default(),
960 pending_autoindent: Default::default(),
961 language: None,
962 remote_selections: Default::default(),
963 diagnostics: Default::default(),
964 diagnostics_timestamp: Default::default(),
965 completion_triggers: Default::default(),
966 completion_triggers_per_language_server: Default::default(),
967 completion_triggers_timestamp: Default::default(),
968 deferred_ops: OperationQueue::new(),
969 has_conflict: false,
970 _subscriptions: Vec::new(),
971 }
972 }
973
974 pub fn build_snapshot(
975 text: Rope,
976 language: Option<Arc<Language>>,
977 language_registry: Option<Arc<LanguageRegistry>>,
978 cx: &mut App,
979 ) -> impl Future<Output = BufferSnapshot> {
980 let entity_id = cx.reserve_entity::<Self>().entity_id();
981 let buffer_id = entity_id.as_non_zero_u64().into();
982 async move {
983 let text =
984 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
985 let mut syntax = SyntaxMap::new(&text).snapshot();
986 if let Some(language) = language.clone() {
987 let text = text.clone();
988 let language = language.clone();
989 let language_registry = language_registry.clone();
990 syntax.reparse(&text, language_registry, language);
991 }
992 BufferSnapshot {
993 text,
994 syntax,
995 file: None,
996 diagnostics: Default::default(),
997 remote_selections: Default::default(),
998 language,
999 non_text_state_update_count: 0,
1000 }
1001 }
1002 }
1003
1004 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1005 let entity_id = cx.reserve_entity::<Self>().entity_id();
1006 let buffer_id = entity_id.as_non_zero_u64().into();
1007 let text =
1008 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
1009 let syntax = SyntaxMap::new(&text).snapshot();
1010 BufferSnapshot {
1011 text,
1012 syntax,
1013 file: None,
1014 diagnostics: Default::default(),
1015 remote_selections: Default::default(),
1016 language: None,
1017 non_text_state_update_count: 0,
1018 }
1019 }
1020
1021 #[cfg(any(test, feature = "test-support"))]
1022 pub fn build_snapshot_sync(
1023 text: Rope,
1024 language: Option<Arc<Language>>,
1025 language_registry: Option<Arc<LanguageRegistry>>,
1026 cx: &mut App,
1027 ) -> BufferSnapshot {
1028 let entity_id = cx.reserve_entity::<Self>().entity_id();
1029 let buffer_id = entity_id.as_non_zero_u64().into();
1030 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1031 let mut syntax = SyntaxMap::new(&text).snapshot();
1032 if let Some(language) = language.clone() {
1033 let text = text.clone();
1034 let language = language.clone();
1035 let language_registry = language_registry.clone();
1036 syntax.reparse(&text, language_registry, language);
1037 }
1038 BufferSnapshot {
1039 text,
1040 syntax,
1041 file: None,
1042 diagnostics: Default::default(),
1043 remote_selections: Default::default(),
1044 language,
1045 non_text_state_update_count: 0,
1046 }
1047 }
1048
1049 /// Retrieve a snapshot of the buffer's current state. This is computationally
1050 /// cheap, and allows reading from the buffer on a background thread.
1051 pub fn snapshot(&self) -> BufferSnapshot {
1052 let text = self.text.snapshot();
1053 let mut syntax_map = self.syntax_map.lock();
1054 syntax_map.interpolate(&text);
1055 let syntax = syntax_map.snapshot();
1056
1057 BufferSnapshot {
1058 text,
1059 syntax,
1060 file: self.file.clone(),
1061 remote_selections: self.remote_selections.clone(),
1062 diagnostics: self.diagnostics.clone(),
1063 language: self.language.clone(),
1064 non_text_state_update_count: self.non_text_state_update_count,
1065 }
1066 }
1067
1068 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1069 let this = cx.entity();
1070 cx.new(|cx| {
1071 let mut branch = Self {
1072 branch_state: Some(BufferBranchState {
1073 base_buffer: this.clone(),
1074 merged_operations: Default::default(),
1075 }),
1076 language: self.language.clone(),
1077 has_conflict: self.has_conflict,
1078 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1079 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1080 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1081 };
1082 if let Some(language_registry) = self.language_registry() {
1083 branch.set_language_registry(language_registry);
1084 }
1085
1086 // Reparse the branch buffer so that we get syntax highlighting immediately.
1087 branch.reparse(cx);
1088
1089 branch
1090 })
1091 }
1092
1093 pub fn preview_edits(
1094 &self,
1095 edits: Arc<[(Range<Anchor>, String)]>,
1096 cx: &App,
1097 ) -> Task<EditPreview> {
1098 let registry = self.language_registry();
1099 let language = self.language().cloned();
1100 let old_snapshot = self.text.snapshot();
1101 let mut branch_buffer = self.text.branch();
1102 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1103 cx.background_executor().spawn(async move {
1104 if !edits.is_empty() {
1105 branch_buffer.edit(edits.iter().cloned());
1106 let snapshot = branch_buffer.snapshot();
1107 syntax_snapshot.interpolate(&snapshot);
1108
1109 if let Some(language) = language {
1110 syntax_snapshot.reparse(&snapshot, registry, language);
1111 }
1112 }
1113 EditPreview {
1114 old_snapshot,
1115 applied_edits_snapshot: branch_buffer.snapshot(),
1116 syntax_snapshot,
1117 }
1118 })
1119 }
1120
1121 /// Applies all of the changes in this buffer that intersect any of the
1122 /// given `ranges` to its base buffer.
1123 ///
1124 /// If `ranges` is empty, then all changes will be applied. This buffer must
1125 /// be a branch buffer to call this method.
1126 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1127 let Some(base_buffer) = self.base_buffer() else {
1128 debug_panic!("not a branch buffer");
1129 return;
1130 };
1131
1132 let mut ranges = if ranges.is_empty() {
1133 &[0..usize::MAX]
1134 } else {
1135 ranges.as_slice()
1136 }
1137 .into_iter()
1138 .peekable();
1139
1140 let mut edits = Vec::new();
1141 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1142 let mut is_included = false;
1143 while let Some(range) = ranges.peek() {
1144 if range.end < edit.new.start {
1145 ranges.next().unwrap();
1146 } else {
1147 if range.start <= edit.new.end {
1148 is_included = true;
1149 }
1150 break;
1151 }
1152 }
1153
1154 if is_included {
1155 edits.push((
1156 edit.old.clone(),
1157 self.text_for_range(edit.new.clone()).collect::<String>(),
1158 ));
1159 }
1160 }
1161
1162 let operation = base_buffer.update(cx, |base_buffer, cx| {
1163 // cx.emit(BufferEvent::DiffBaseChanged);
1164 base_buffer.edit(edits, None, cx)
1165 });
1166
1167 if let Some(operation) = operation {
1168 if let Some(BufferBranchState {
1169 merged_operations, ..
1170 }) = &mut self.branch_state
1171 {
1172 merged_operations.push(operation);
1173 }
1174 }
1175 }
1176
1177 fn on_base_buffer_event(
1178 &mut self,
1179 _: Entity<Buffer>,
1180 event: &BufferEvent,
1181 cx: &mut Context<Self>,
1182 ) {
1183 let BufferEvent::Operation { operation, .. } = event else {
1184 return;
1185 };
1186 let Some(BufferBranchState {
1187 merged_operations, ..
1188 }) = &mut self.branch_state
1189 else {
1190 return;
1191 };
1192
1193 let mut operation_to_undo = None;
1194 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1195 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1196 merged_operations.remove(ix);
1197 operation_to_undo = Some(operation.timestamp);
1198 }
1199 }
1200
1201 self.apply_ops([operation.clone()], cx);
1202
1203 if let Some(timestamp) = operation_to_undo {
1204 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1205 self.undo_operations(counts, cx);
1206 }
1207 }
1208
1209 #[cfg(test)]
1210 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1211 &self.text
1212 }
1213
1214 /// Retrieve a snapshot of the buffer's raw text, without any
1215 /// language-related state like the syntax tree or diagnostics.
1216 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1217 self.text.snapshot()
1218 }
1219
1220 /// The file associated with the buffer, if any.
1221 pub fn file(&self) -> Option<&Arc<dyn File>> {
1222 self.file.as_ref()
1223 }
1224
1225 /// The version of the buffer that was last saved or reloaded from disk.
1226 pub fn saved_version(&self) -> &clock::Global {
1227 &self.saved_version
1228 }
1229
1230 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1231 pub fn saved_mtime(&self) -> Option<MTime> {
1232 self.saved_mtime
1233 }
1234
1235 /// Assign a language to the buffer.
1236 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1237 self.non_text_state_update_count += 1;
1238 self.syntax_map.lock().clear(&self.text);
1239 self.language = language;
1240 self.reparse(cx);
1241 cx.emit(BufferEvent::LanguageChanged);
1242 }
1243
1244 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1245 /// other languages if parts of the buffer are written in different languages.
1246 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1247 self.syntax_map
1248 .lock()
1249 .set_language_registry(language_registry);
1250 }
1251
1252 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1253 self.syntax_map.lock().language_registry()
1254 }
1255
1256 /// Assign the buffer a new [`Capability`].
1257 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1258 self.capability = capability;
1259 cx.emit(BufferEvent::CapabilityChanged)
1260 }
1261
1262 /// This method is called to signal that the buffer has been saved.
1263 pub fn did_save(
1264 &mut self,
1265 version: clock::Global,
1266 mtime: Option<MTime>,
1267 cx: &mut Context<Self>,
1268 ) {
1269 self.saved_version = version;
1270 self.has_unsaved_edits
1271 .set((self.saved_version().clone(), false));
1272 self.has_conflict = false;
1273 self.saved_mtime = mtime;
1274 cx.emit(BufferEvent::Saved);
1275 cx.notify();
1276 }
1277
1278 /// This method is called to signal that the buffer has been discarded.
1279 pub fn discarded(&self, cx: &mut Context<Self>) {
1280 cx.emit(BufferEvent::Discarded);
1281 cx.notify();
1282 }
1283
1284 /// Reloads the contents of the buffer from disk.
1285 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1286 let (tx, rx) = futures::channel::oneshot::channel();
1287 let prev_version = self.text.version();
1288 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
1289 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
1290 let file = this.file.as_ref()?.as_local()?;
1291 Some((file.disk_state().mtime(), file.load(cx)))
1292 })?
1293 else {
1294 return Ok(());
1295 };
1296
1297 let new_text = new_text.await?;
1298 let diff = this
1299 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
1300 .await;
1301 this.update(&mut cx, |this, cx| {
1302 if this.version() == diff.base_version {
1303 this.finalize_last_transaction();
1304 this.apply_diff(diff, cx);
1305 tx.send(this.finalize_last_transaction().cloned()).ok();
1306 this.has_conflict = false;
1307 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1308 } else {
1309 if !diff.edits.is_empty()
1310 || this
1311 .edits_since::<usize>(&diff.base_version)
1312 .next()
1313 .is_some()
1314 {
1315 this.has_conflict = true;
1316 }
1317
1318 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1319 }
1320
1321 this.reload_task.take();
1322 })
1323 }));
1324 rx
1325 }
1326
1327 /// This method is called to signal that the buffer has been reloaded.
1328 pub fn did_reload(
1329 &mut self,
1330 version: clock::Global,
1331 line_ending: LineEnding,
1332 mtime: Option<MTime>,
1333 cx: &mut Context<Self>,
1334 ) {
1335 self.saved_version = version;
1336 self.has_unsaved_edits
1337 .set((self.saved_version.clone(), false));
1338 self.text.set_line_ending(line_ending);
1339 self.saved_mtime = mtime;
1340 cx.emit(BufferEvent::Reloaded);
1341 cx.notify();
1342 }
1343
1344 /// Updates the [`File`] backing this buffer. This should be called when
1345 /// the file has changed or has been deleted.
1346 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1347 let was_dirty = self.is_dirty();
1348 let mut file_changed = false;
1349
1350 if let Some(old_file) = self.file.as_ref() {
1351 if new_file.path() != old_file.path() {
1352 file_changed = true;
1353 }
1354
1355 let old_state = old_file.disk_state();
1356 let new_state = new_file.disk_state();
1357 if old_state != new_state {
1358 file_changed = true;
1359 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1360 cx.emit(BufferEvent::ReloadNeeded)
1361 }
1362 }
1363 } else {
1364 file_changed = true;
1365 };
1366
1367 self.file = Some(new_file);
1368 if file_changed {
1369 self.non_text_state_update_count += 1;
1370 if was_dirty != self.is_dirty() {
1371 cx.emit(BufferEvent::DirtyChanged);
1372 }
1373 cx.emit(BufferEvent::FileHandleChanged);
1374 cx.notify();
1375 }
1376 }
1377
1378 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1379 Some(self.branch_state.as_ref()?.base_buffer.clone())
1380 }
1381
1382 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1383 pub fn language(&self) -> Option<&Arc<Language>> {
1384 self.language.as_ref()
1385 }
1386
1387 /// Returns the [`Language`] at the given location.
1388 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1389 let offset = position.to_offset(self);
1390 self.syntax_map
1391 .lock()
1392 .layers_for_range(offset..offset, &self.text, false)
1393 .last()
1394 .map(|info| info.language.clone())
1395 .or_else(|| self.language.clone())
1396 }
1397
1398 /// An integer version number that accounts for all updates besides
1399 /// the buffer's text itself (which is versioned via a version vector).
1400 pub fn non_text_state_update_count(&self) -> usize {
1401 self.non_text_state_update_count
1402 }
1403
1404 /// Whether the buffer is being parsed in the background.
1405 #[cfg(any(test, feature = "test-support"))]
1406 pub fn is_parsing(&self) -> bool {
1407 self.parsing_in_background
1408 }
1409
1410 /// Indicates whether the buffer contains any regions that may be
1411 /// written in a language that hasn't been loaded yet.
1412 pub fn contains_unknown_injections(&self) -> bool {
1413 self.syntax_map.lock().contains_unknown_injections()
1414 }
1415
1416 #[cfg(test)]
1417 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1418 self.sync_parse_timeout = timeout;
1419 }
1420
1421 /// Called after an edit to synchronize the buffer's main parse tree with
1422 /// the buffer's new underlying state.
1423 ///
1424 /// Locks the syntax map and interpolates the edits since the last reparse
1425 /// into the foreground syntax tree.
1426 ///
1427 /// Then takes a stable snapshot of the syntax map before unlocking it.
1428 /// The snapshot with the interpolated edits is sent to a background thread,
1429 /// where we ask Tree-sitter to perform an incremental parse.
1430 ///
1431 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1432 /// waiting on the parse to complete. As soon as it completes, we proceed
1433 /// synchronously, unless a 1ms timeout elapses.
1434 ///
1435 /// If we time out waiting on the parse, we spawn a second task waiting
1436 /// until the parse does complete and return with the interpolated tree still
1437 /// in the foreground. When the background parse completes, call back into
1438 /// the main thread and assign the foreground parse state.
1439 ///
1440 /// If the buffer or grammar changed since the start of the background parse,
1441 /// initiate an additional reparse recursively. To avoid concurrent parses
1442 /// for the same buffer, we only initiate a new parse if we are not already
1443 /// parsing in the background.
1444 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1445 if self.parsing_in_background {
1446 return;
1447 }
1448 let language = if let Some(language) = self.language.clone() {
1449 language
1450 } else {
1451 return;
1452 };
1453
1454 let text = self.text_snapshot();
1455 let parsed_version = self.version();
1456
1457 let mut syntax_map = self.syntax_map.lock();
1458 syntax_map.interpolate(&text);
1459 let language_registry = syntax_map.language_registry();
1460 let mut syntax_snapshot = syntax_map.snapshot();
1461 drop(syntax_map);
1462
1463 let parse_task = cx.background_executor().spawn({
1464 let language = language.clone();
1465 let language_registry = language_registry.clone();
1466 async move {
1467 syntax_snapshot.reparse(&text, language_registry, language);
1468 syntax_snapshot
1469 }
1470 });
1471
1472 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1473 match cx
1474 .background_executor()
1475 .block_with_timeout(self.sync_parse_timeout, parse_task)
1476 {
1477 Ok(new_syntax_snapshot) => {
1478 self.did_finish_parsing(new_syntax_snapshot, cx);
1479 }
1480 Err(parse_task) => {
1481 self.parsing_in_background = true;
1482 cx.spawn(move |this, mut cx| async move {
1483 let new_syntax_map = parse_task.await;
1484 this.update(&mut cx, move |this, cx| {
1485 let grammar_changed =
1486 this.language.as_ref().map_or(true, |current_language| {
1487 !Arc::ptr_eq(&language, current_language)
1488 });
1489 let language_registry_changed = new_syntax_map
1490 .contains_unknown_injections()
1491 && language_registry.map_or(false, |registry| {
1492 registry.version() != new_syntax_map.language_registry_version()
1493 });
1494 let parse_again = language_registry_changed
1495 || grammar_changed
1496 || this.version.changed_since(&parsed_version);
1497 this.did_finish_parsing(new_syntax_map, cx);
1498 this.parsing_in_background = false;
1499 if parse_again {
1500 this.reparse(cx);
1501 }
1502 })
1503 .ok();
1504 })
1505 .detach();
1506 }
1507 }
1508 }
1509
1510 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1511 self.non_text_state_update_count += 1;
1512 self.syntax_map.lock().did_parse(syntax_snapshot);
1513 self.request_autoindent(cx);
1514 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1515 cx.emit(BufferEvent::Reparsed);
1516 cx.notify();
1517 }
1518
1519 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1520 self.parse_status.1.clone()
1521 }
1522
1523 /// Assign to the buffer a set of diagnostics created by a given language server.
1524 pub fn update_diagnostics(
1525 &mut self,
1526 server_id: LanguageServerId,
1527 diagnostics: DiagnosticSet,
1528 cx: &mut Context<Self>,
1529 ) {
1530 let lamport_timestamp = self.text.lamport_clock.tick();
1531 let op = Operation::UpdateDiagnostics {
1532 server_id,
1533 diagnostics: diagnostics.iter().cloned().collect(),
1534 lamport_timestamp,
1535 };
1536 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1537 self.send_operation(op, true, cx);
1538 }
1539
1540 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1541 if let Some(indent_sizes) = self.compute_autoindents() {
1542 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1543 match cx
1544 .background_executor()
1545 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1546 {
1547 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1548 Err(indent_sizes) => {
1549 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1550 let indent_sizes = indent_sizes.await;
1551 this.update(&mut cx, |this, cx| {
1552 this.apply_autoindents(indent_sizes, cx);
1553 })
1554 .ok();
1555 }));
1556 }
1557 }
1558 } else {
1559 self.autoindent_requests.clear();
1560 }
1561 }
1562
1563 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1564 let max_rows_between_yields = 100;
1565 let snapshot = self.snapshot();
1566 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1567 return None;
1568 }
1569
1570 let autoindent_requests = self.autoindent_requests.clone();
1571 Some(async move {
1572 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1573 for request in autoindent_requests {
1574 // Resolve each edited range to its row in the current buffer and in the
1575 // buffer before this batch of edits.
1576 let mut row_ranges = Vec::new();
1577 let mut old_to_new_rows = BTreeMap::new();
1578 let mut language_indent_sizes_by_new_row = Vec::new();
1579 for entry in &request.entries {
1580 let position = entry.range.start;
1581 let new_row = position.to_point(&snapshot).row;
1582 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1583 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1584
1585 if !entry.first_line_is_new {
1586 let old_row = position.to_point(&request.before_edit).row;
1587 old_to_new_rows.insert(old_row, new_row);
1588 }
1589 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1590 }
1591
1592 // Build a map containing the suggested indentation for each of the edited lines
1593 // with respect to the state of the buffer before these edits. This map is keyed
1594 // by the rows for these lines in the current state of the buffer.
1595 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1596 let old_edited_ranges =
1597 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1598 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1599 let mut language_indent_size = IndentSize::default();
1600 for old_edited_range in old_edited_ranges {
1601 let suggestions = request
1602 .before_edit
1603 .suggest_autoindents(old_edited_range.clone())
1604 .into_iter()
1605 .flatten();
1606 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1607 if let Some(suggestion) = suggestion {
1608 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1609
1610 // Find the indent size based on the language for this row.
1611 while let Some((row, size)) = language_indent_sizes.peek() {
1612 if *row > new_row {
1613 break;
1614 }
1615 language_indent_size = *size;
1616 language_indent_sizes.next();
1617 }
1618
1619 let suggested_indent = old_to_new_rows
1620 .get(&suggestion.basis_row)
1621 .and_then(|from_row| {
1622 Some(old_suggestions.get(from_row).copied()?.0)
1623 })
1624 .unwrap_or_else(|| {
1625 request
1626 .before_edit
1627 .indent_size_for_line(suggestion.basis_row)
1628 })
1629 .with_delta(suggestion.delta, language_indent_size);
1630 old_suggestions
1631 .insert(new_row, (suggested_indent, suggestion.within_error));
1632 }
1633 }
1634 yield_now().await;
1635 }
1636
1637 // Compute new suggestions for each line, but only include them in the result
1638 // if they differ from the old suggestion for that line.
1639 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1640 let mut language_indent_size = IndentSize::default();
1641 for (row_range, original_indent_column) in row_ranges {
1642 let new_edited_row_range = if request.is_block_mode {
1643 row_range.start..row_range.start + 1
1644 } else {
1645 row_range.clone()
1646 };
1647
1648 let suggestions = snapshot
1649 .suggest_autoindents(new_edited_row_range.clone())
1650 .into_iter()
1651 .flatten();
1652 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1653 if let Some(suggestion) = suggestion {
1654 // Find the indent size based on the language for this row.
1655 while let Some((row, size)) = language_indent_sizes.peek() {
1656 if *row > new_row {
1657 break;
1658 }
1659 language_indent_size = *size;
1660 language_indent_sizes.next();
1661 }
1662
1663 let suggested_indent = indent_sizes
1664 .get(&suggestion.basis_row)
1665 .copied()
1666 .map(|e| e.0)
1667 .unwrap_or_else(|| {
1668 snapshot.indent_size_for_line(suggestion.basis_row)
1669 })
1670 .with_delta(suggestion.delta, language_indent_size);
1671
1672 if old_suggestions.get(&new_row).map_or(
1673 true,
1674 |(old_indentation, was_within_error)| {
1675 suggested_indent != *old_indentation
1676 && (!suggestion.within_error || *was_within_error)
1677 },
1678 ) {
1679 indent_sizes.insert(
1680 new_row,
1681 (suggested_indent, request.ignore_empty_lines),
1682 );
1683 }
1684 }
1685 }
1686
1687 if let (true, Some(original_indent_column)) =
1688 (request.is_block_mode, original_indent_column)
1689 {
1690 let new_indent =
1691 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1692 *indent
1693 } else {
1694 snapshot.indent_size_for_line(row_range.start)
1695 };
1696 let delta = new_indent.len as i64 - original_indent_column as i64;
1697 if delta != 0 {
1698 for row in row_range.skip(1) {
1699 indent_sizes.entry(row).or_insert_with(|| {
1700 let mut size = snapshot.indent_size_for_line(row);
1701 if size.kind == new_indent.kind {
1702 match delta.cmp(&0) {
1703 Ordering::Greater => size.len += delta as u32,
1704 Ordering::Less => {
1705 size.len = size.len.saturating_sub(-delta as u32)
1706 }
1707 Ordering::Equal => {}
1708 }
1709 }
1710 (size, request.ignore_empty_lines)
1711 });
1712 }
1713 }
1714 }
1715
1716 yield_now().await;
1717 }
1718 }
1719
1720 indent_sizes
1721 .into_iter()
1722 .filter_map(|(row, (indent, ignore_empty_lines))| {
1723 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1724 None
1725 } else {
1726 Some((row, indent))
1727 }
1728 })
1729 .collect()
1730 })
1731 }
1732
1733 fn apply_autoindents(
1734 &mut self,
1735 indent_sizes: BTreeMap<u32, IndentSize>,
1736 cx: &mut Context<Self>,
1737 ) {
1738 self.autoindent_requests.clear();
1739
1740 let edits: Vec<_> = indent_sizes
1741 .into_iter()
1742 .filter_map(|(row, indent_size)| {
1743 let current_size = indent_size_for_line(self, row);
1744 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1745 })
1746 .collect();
1747
1748 let preserve_preview = self.preserve_preview();
1749 self.edit(edits, None, cx);
1750 if preserve_preview {
1751 self.refresh_preview();
1752 }
1753 }
1754
1755 /// Create a minimal edit that will cause the given row to be indented
1756 /// with the given size. After applying this edit, the length of the line
1757 /// will always be at least `new_size.len`.
1758 pub fn edit_for_indent_size_adjustment(
1759 row: u32,
1760 current_size: IndentSize,
1761 new_size: IndentSize,
1762 ) -> Option<(Range<Point>, String)> {
1763 if new_size.kind == current_size.kind {
1764 match new_size.len.cmp(¤t_size.len) {
1765 Ordering::Greater => {
1766 let point = Point::new(row, 0);
1767 Some((
1768 point..point,
1769 iter::repeat(new_size.char())
1770 .take((new_size.len - current_size.len) as usize)
1771 .collect::<String>(),
1772 ))
1773 }
1774
1775 Ordering::Less => Some((
1776 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1777 String::new(),
1778 )),
1779
1780 Ordering::Equal => None,
1781 }
1782 } else {
1783 Some((
1784 Point::new(row, 0)..Point::new(row, current_size.len),
1785 iter::repeat(new_size.char())
1786 .take(new_size.len as usize)
1787 .collect::<String>(),
1788 ))
1789 }
1790 }
1791
1792 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1793 /// and the given new text.
1794 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1795 let old_text = self.as_rope().clone();
1796 let base_version = self.version();
1797 cx.background_executor()
1798 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1799 let old_text = old_text.to_string();
1800 let line_ending = LineEnding::detect(&new_text);
1801 LineEnding::normalize(&mut new_text);
1802
1803 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1804 let empty: Arc<str> = Arc::default();
1805
1806 let mut edits = Vec::new();
1807 let mut old_offset = 0;
1808 let mut new_offset = 0;
1809 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1810 for change in diff.iter_all_changes().map(Some).chain([None]) {
1811 if let Some(change) = &change {
1812 let len = change.value().len();
1813 match change.tag() {
1814 ChangeTag::Equal => {
1815 old_offset += len;
1816 new_offset += len;
1817 }
1818 ChangeTag::Delete => {
1819 let old_end_offset = old_offset + len;
1820 if let Some((last_old_range, _)) = &mut last_edit {
1821 last_old_range.end = old_end_offset;
1822 } else {
1823 last_edit =
1824 Some((old_offset..old_end_offset, new_offset..new_offset));
1825 }
1826 old_offset = old_end_offset;
1827 }
1828 ChangeTag::Insert => {
1829 let new_end_offset = new_offset + len;
1830 if let Some((_, last_new_range)) = &mut last_edit {
1831 last_new_range.end = new_end_offset;
1832 } else {
1833 last_edit =
1834 Some((old_offset..old_offset, new_offset..new_end_offset));
1835 }
1836 new_offset = new_end_offset;
1837 }
1838 }
1839 }
1840
1841 if let Some((old_range, new_range)) = &last_edit {
1842 if old_offset > old_range.end
1843 || new_offset > new_range.end
1844 || change.is_none()
1845 {
1846 let text = if new_range.is_empty() {
1847 empty.clone()
1848 } else {
1849 new_text[new_range.clone()].into()
1850 };
1851 edits.push((old_range.clone(), text));
1852 last_edit.take();
1853 }
1854 }
1855 }
1856
1857 Diff {
1858 base_version,
1859 line_ending,
1860 edits,
1861 }
1862 })
1863 }
1864
1865 /// Spawns a background task that searches the buffer for any whitespace
1866 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1867 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1868 let old_text = self.as_rope().clone();
1869 let line_ending = self.line_ending();
1870 let base_version = self.version();
1871 cx.background_executor().spawn(async move {
1872 let ranges = trailing_whitespace_ranges(&old_text);
1873 let empty = Arc::<str>::from("");
1874 Diff {
1875 base_version,
1876 line_ending,
1877 edits: ranges
1878 .into_iter()
1879 .map(|range| (range, empty.clone()))
1880 .collect(),
1881 }
1882 })
1883 }
1884
1885 /// Ensures that the buffer ends with a single newline character, and
1886 /// no other whitespace.
1887 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1888 let len = self.len();
1889 let mut offset = len;
1890 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1891 let non_whitespace_len = chunk
1892 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1893 .len();
1894 offset -= chunk.len();
1895 offset += non_whitespace_len;
1896 if non_whitespace_len != 0 {
1897 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1898 return;
1899 }
1900 break;
1901 }
1902 }
1903 self.edit([(offset..len, "\n")], None, cx);
1904 }
1905
1906 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1907 /// calculated, then adjust the diff to account for those changes, and discard any
1908 /// parts of the diff that conflict with those changes.
1909 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1910 // Check for any edits to the buffer that have occurred since this diff
1911 // was computed.
1912 let snapshot = self.snapshot();
1913 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1914 let mut delta = 0;
1915 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1916 while let Some(edit_since) = edits_since.peek() {
1917 // If the edit occurs after a diff hunk, then it does not
1918 // affect that hunk.
1919 if edit_since.old.start > range.end {
1920 break;
1921 }
1922 // If the edit precedes the diff hunk, then adjust the hunk
1923 // to reflect the edit.
1924 else if edit_since.old.end < range.start {
1925 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1926 edits_since.next();
1927 }
1928 // If the edit intersects a diff hunk, then discard that hunk.
1929 else {
1930 return None;
1931 }
1932 }
1933
1934 let start = (range.start as i64 + delta) as usize;
1935 let end = (range.end as i64 + delta) as usize;
1936 Some((start..end, new_text))
1937 });
1938
1939 self.start_transaction();
1940 self.text.set_line_ending(diff.line_ending);
1941 self.edit(adjusted_edits, None, cx);
1942 self.end_transaction(cx)
1943 }
1944
1945 fn has_unsaved_edits(&self) -> bool {
1946 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1947
1948 if last_version == self.version {
1949 self.has_unsaved_edits
1950 .set((last_version, has_unsaved_edits));
1951 return has_unsaved_edits;
1952 }
1953
1954 let has_edits = self.has_edits_since(&self.saved_version);
1955 self.has_unsaved_edits
1956 .set((self.version.clone(), has_edits));
1957 has_edits
1958 }
1959
1960 /// Checks if the buffer has unsaved changes.
1961 pub fn is_dirty(&self) -> bool {
1962 if self.capability == Capability::ReadOnly {
1963 return false;
1964 }
1965 if self.has_conflict || self.has_unsaved_edits() {
1966 return true;
1967 }
1968 match self.file.as_ref().map(|f| f.disk_state()) {
1969 Some(DiskState::New) => !self.is_empty(),
1970 Some(DiskState::Deleted) => true,
1971 _ => false,
1972 }
1973 }
1974
1975 /// Checks if the buffer and its file have both changed since the buffer
1976 /// was last saved or reloaded.
1977 pub fn has_conflict(&self) -> bool {
1978 if self.has_conflict {
1979 return true;
1980 }
1981 let Some(file) = self.file.as_ref() else {
1982 return false;
1983 };
1984 match file.disk_state() {
1985 DiskState::New => false,
1986 DiskState::Present { mtime } => match self.saved_mtime {
1987 Some(saved_mtime) => {
1988 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1989 }
1990 None => true,
1991 },
1992 DiskState::Deleted => true,
1993 }
1994 }
1995
1996 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1997 pub fn subscribe(&mut self) -> Subscription {
1998 self.text.subscribe()
1999 }
2000
2001 /// Starts a transaction, if one is not already in-progress. When undoing or
2002 /// redoing edits, all of the edits performed within a transaction are undone
2003 /// or redone together.
2004 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2005 self.start_transaction_at(Instant::now())
2006 }
2007
2008 /// Starts a transaction, providing the current time. Subsequent transactions
2009 /// that occur within a short period of time will be grouped together. This
2010 /// is controlled by the buffer's undo grouping duration.
2011 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2012 self.transaction_depth += 1;
2013 if self.was_dirty_before_starting_transaction.is_none() {
2014 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2015 }
2016 self.text.start_transaction_at(now)
2017 }
2018
2019 /// Terminates the current transaction, if this is the outermost transaction.
2020 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2021 self.end_transaction_at(Instant::now(), cx)
2022 }
2023
2024 /// Terminates the current transaction, providing the current time. Subsequent transactions
2025 /// that occur within a short period of time will be grouped together. This
2026 /// is controlled by the buffer's undo grouping duration.
2027 pub fn end_transaction_at(
2028 &mut self,
2029 now: Instant,
2030 cx: &mut Context<Self>,
2031 ) -> Option<TransactionId> {
2032 assert!(self.transaction_depth > 0);
2033 self.transaction_depth -= 1;
2034 let was_dirty = if self.transaction_depth == 0 {
2035 self.was_dirty_before_starting_transaction.take().unwrap()
2036 } else {
2037 false
2038 };
2039 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2040 self.did_edit(&start_version, was_dirty, cx);
2041 Some(transaction_id)
2042 } else {
2043 None
2044 }
2045 }
2046
2047 /// Manually add a transaction to the buffer's undo history.
2048 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2049 self.text.push_transaction(transaction, now);
2050 }
2051
2052 /// Prevent the last transaction from being grouped with any subsequent transactions,
2053 /// even if they occur with the buffer's undo grouping duration.
2054 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2055 self.text.finalize_last_transaction()
2056 }
2057
2058 /// Manually group all changes since a given transaction.
2059 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2060 self.text.group_until_transaction(transaction_id);
2061 }
2062
2063 /// Manually remove a transaction from the buffer's undo history
2064 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
2065 self.text.forget_transaction(transaction_id);
2066 }
2067
2068 /// Manually merge two adjacent transactions in the buffer's undo history.
2069 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2070 self.text.merge_transactions(transaction, destination);
2071 }
2072
2073 /// Waits for the buffer to receive operations with the given timestamps.
2074 pub fn wait_for_edits(
2075 &mut self,
2076 edit_ids: impl IntoIterator<Item = clock::Lamport>,
2077 ) -> impl Future<Output = Result<()>> {
2078 self.text.wait_for_edits(edit_ids)
2079 }
2080
2081 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2082 pub fn wait_for_anchors(
2083 &mut self,
2084 anchors: impl IntoIterator<Item = Anchor>,
2085 ) -> impl 'static + Future<Output = Result<()>> {
2086 self.text.wait_for_anchors(anchors)
2087 }
2088
2089 /// Waits for the buffer to receive operations up to the given version.
2090 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
2091 self.text.wait_for_version(version)
2092 }
2093
2094 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2095 /// [`Buffer::wait_for_version`] to resolve with an error.
2096 pub fn give_up_waiting(&mut self) {
2097 self.text.give_up_waiting();
2098 }
2099
2100 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2101 pub fn set_active_selections(
2102 &mut self,
2103 selections: Arc<[Selection<Anchor>]>,
2104 line_mode: bool,
2105 cursor_shape: CursorShape,
2106 cx: &mut Context<Self>,
2107 ) {
2108 let lamport_timestamp = self.text.lamport_clock.tick();
2109 self.remote_selections.insert(
2110 self.text.replica_id(),
2111 SelectionSet {
2112 selections: selections.clone(),
2113 lamport_timestamp,
2114 line_mode,
2115 cursor_shape,
2116 },
2117 );
2118 self.send_operation(
2119 Operation::UpdateSelections {
2120 selections,
2121 line_mode,
2122 lamport_timestamp,
2123 cursor_shape,
2124 },
2125 true,
2126 cx,
2127 );
2128 self.non_text_state_update_count += 1;
2129 cx.notify();
2130 }
2131
2132 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2133 /// this replica.
2134 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2135 if self
2136 .remote_selections
2137 .get(&self.text.replica_id())
2138 .map_or(true, |set| !set.selections.is_empty())
2139 {
2140 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2141 }
2142 }
2143
2144 /// Replaces the buffer's entire text.
2145 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2146 where
2147 T: Into<Arc<str>>,
2148 {
2149 self.autoindent_requests.clear();
2150 self.edit([(0..self.len(), text)], None, cx)
2151 }
2152
2153 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2154 /// delete, and a string of text to insert at that location.
2155 ///
2156 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2157 /// request for the edited ranges, which will be processed when the buffer finishes
2158 /// parsing.
2159 ///
2160 /// Parsing takes place at the end of a transaction, and may compute synchronously
2161 /// or asynchronously, depending on the changes.
2162 pub fn edit<I, S, T>(
2163 &mut self,
2164 edits_iter: I,
2165 autoindent_mode: Option<AutoindentMode>,
2166 cx: &mut Context<Self>,
2167 ) -> Option<clock::Lamport>
2168 where
2169 I: IntoIterator<Item = (Range<S>, T)>,
2170 S: ToOffset,
2171 T: Into<Arc<str>>,
2172 {
2173 // Skip invalid edits and coalesce contiguous ones.
2174 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2175 for (range, new_text) in edits_iter {
2176 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2177 if range.start > range.end {
2178 mem::swap(&mut range.start, &mut range.end);
2179 }
2180 let new_text = new_text.into();
2181 if !new_text.is_empty() || !range.is_empty() {
2182 if let Some((prev_range, prev_text)) = edits.last_mut() {
2183 if prev_range.end >= range.start {
2184 prev_range.end = cmp::max(prev_range.end, range.end);
2185 *prev_text = format!("{prev_text}{new_text}").into();
2186 } else {
2187 edits.push((range, new_text));
2188 }
2189 } else {
2190 edits.push((range, new_text));
2191 }
2192 }
2193 }
2194 if edits.is_empty() {
2195 return None;
2196 }
2197
2198 self.start_transaction();
2199 self.pending_autoindent.take();
2200 let autoindent_request = autoindent_mode
2201 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2202
2203 let edit_operation = self.text.edit(edits.iter().cloned());
2204 let edit_id = edit_operation.timestamp();
2205
2206 if let Some((before_edit, mode)) = autoindent_request {
2207 let mut delta = 0isize;
2208 let entries = edits
2209 .into_iter()
2210 .enumerate()
2211 .zip(&edit_operation.as_edit().unwrap().new_text)
2212 .map(|((ix, (range, _)), new_text)| {
2213 let new_text_length = new_text.len();
2214 let old_start = range.start.to_point(&before_edit);
2215 let new_start = (delta + range.start as isize) as usize;
2216 let range_len = range.end - range.start;
2217 delta += new_text_length as isize - range_len as isize;
2218
2219 // Decide what range of the insertion to auto-indent, and whether
2220 // the first line of the insertion should be considered a newly-inserted line
2221 // or an edit to an existing line.
2222 let mut range_of_insertion_to_indent = 0..new_text_length;
2223 let mut first_line_is_new = true;
2224
2225 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2226 let old_line_end = before_edit.line_len(old_start.row);
2227
2228 if old_start.column > old_line_start {
2229 first_line_is_new = false;
2230 }
2231
2232 if !new_text.contains('\n')
2233 && (old_start.column + (range_len as u32) < old_line_end
2234 || old_line_end == old_line_start)
2235 {
2236 first_line_is_new = false;
2237 }
2238
2239 // When inserting text starting with a newline, avoid auto-indenting the
2240 // previous line.
2241 if new_text.starts_with('\n') {
2242 range_of_insertion_to_indent.start += 1;
2243 first_line_is_new = true;
2244 }
2245
2246 let mut original_indent_column = None;
2247 if let AutoindentMode::Block {
2248 original_indent_columns,
2249 } = &mode
2250 {
2251 original_indent_column =
2252 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
2253 indent_size_for_text(
2254 new_text[range_of_insertion_to_indent.clone()].chars(),
2255 )
2256 .len
2257 }));
2258
2259 // Avoid auto-indenting the line after the edit.
2260 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2261 range_of_insertion_to_indent.end -= 1;
2262 }
2263 }
2264
2265 AutoindentRequestEntry {
2266 first_line_is_new,
2267 original_indent_column,
2268 indent_size: before_edit.language_indent_size_at(range.start, cx),
2269 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2270 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2271 }
2272 })
2273 .collect();
2274
2275 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2276 before_edit,
2277 entries,
2278 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2279 ignore_empty_lines: false,
2280 }));
2281 }
2282
2283 self.end_transaction(cx);
2284 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2285 Some(edit_id)
2286 }
2287
2288 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2289 if self.edits_since::<usize>(old_version).next().is_none() {
2290 return;
2291 }
2292
2293 self.reparse(cx);
2294
2295 cx.emit(BufferEvent::Edited);
2296 if was_dirty != self.is_dirty() {
2297 cx.emit(BufferEvent::DirtyChanged);
2298 }
2299 cx.notify();
2300 }
2301
2302 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2303 where
2304 I: IntoIterator<Item = Range<T>>,
2305 T: ToOffset + Copy,
2306 {
2307 let before_edit = self.snapshot();
2308 let entries = ranges
2309 .into_iter()
2310 .map(|range| AutoindentRequestEntry {
2311 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2312 first_line_is_new: true,
2313 indent_size: before_edit.language_indent_size_at(range.start, cx),
2314 original_indent_column: None,
2315 })
2316 .collect();
2317 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2318 before_edit,
2319 entries,
2320 is_block_mode: false,
2321 ignore_empty_lines: true,
2322 }));
2323 self.request_autoindent(cx);
2324 }
2325
2326 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2327 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2328 pub fn insert_empty_line(
2329 &mut self,
2330 position: impl ToPoint,
2331 space_above: bool,
2332 space_below: bool,
2333 cx: &mut Context<Self>,
2334 ) -> Point {
2335 let mut position = position.to_point(self);
2336
2337 self.start_transaction();
2338
2339 self.edit(
2340 [(position..position, "\n")],
2341 Some(AutoindentMode::EachLine),
2342 cx,
2343 );
2344
2345 if position.column > 0 {
2346 position += Point::new(1, 0);
2347 }
2348
2349 if !self.is_line_blank(position.row) {
2350 self.edit(
2351 [(position..position, "\n")],
2352 Some(AutoindentMode::EachLine),
2353 cx,
2354 );
2355 }
2356
2357 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2358 self.edit(
2359 [(position..position, "\n")],
2360 Some(AutoindentMode::EachLine),
2361 cx,
2362 );
2363 position.row += 1;
2364 }
2365
2366 if space_below
2367 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2368 {
2369 self.edit(
2370 [(position..position, "\n")],
2371 Some(AutoindentMode::EachLine),
2372 cx,
2373 );
2374 }
2375
2376 self.end_transaction(cx);
2377
2378 position
2379 }
2380
2381 /// Applies the given remote operations to the buffer.
2382 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2383 self.pending_autoindent.take();
2384 let was_dirty = self.is_dirty();
2385 let old_version = self.version.clone();
2386 let mut deferred_ops = Vec::new();
2387 let buffer_ops = ops
2388 .into_iter()
2389 .filter_map(|op| match op {
2390 Operation::Buffer(op) => Some(op),
2391 _ => {
2392 if self.can_apply_op(&op) {
2393 self.apply_op(op, cx);
2394 } else {
2395 deferred_ops.push(op);
2396 }
2397 None
2398 }
2399 })
2400 .collect::<Vec<_>>();
2401 for operation in buffer_ops.iter() {
2402 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2403 }
2404 self.text.apply_ops(buffer_ops);
2405 self.deferred_ops.insert(deferred_ops);
2406 self.flush_deferred_ops(cx);
2407 self.did_edit(&old_version, was_dirty, cx);
2408 // Notify independently of whether the buffer was edited as the operations could include a
2409 // selection update.
2410 cx.notify();
2411 }
2412
2413 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2414 let mut deferred_ops = Vec::new();
2415 for op in self.deferred_ops.drain().iter().cloned() {
2416 if self.can_apply_op(&op) {
2417 self.apply_op(op, cx);
2418 } else {
2419 deferred_ops.push(op);
2420 }
2421 }
2422 self.deferred_ops.insert(deferred_ops);
2423 }
2424
2425 pub fn has_deferred_ops(&self) -> bool {
2426 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2427 }
2428
2429 fn can_apply_op(&self, operation: &Operation) -> bool {
2430 match operation {
2431 Operation::Buffer(_) => {
2432 unreachable!("buffer operations should never be applied at this layer")
2433 }
2434 Operation::UpdateDiagnostics {
2435 diagnostics: diagnostic_set,
2436 ..
2437 } => diagnostic_set.iter().all(|diagnostic| {
2438 self.text.can_resolve(&diagnostic.range.start)
2439 && self.text.can_resolve(&diagnostic.range.end)
2440 }),
2441 Operation::UpdateSelections { selections, .. } => selections
2442 .iter()
2443 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2444 Operation::UpdateCompletionTriggers { .. } => true,
2445 }
2446 }
2447
2448 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2449 match operation {
2450 Operation::Buffer(_) => {
2451 unreachable!("buffer operations should never be applied at this layer")
2452 }
2453 Operation::UpdateDiagnostics {
2454 server_id,
2455 diagnostics: diagnostic_set,
2456 lamport_timestamp,
2457 } => {
2458 let snapshot = self.snapshot();
2459 self.apply_diagnostic_update(
2460 server_id,
2461 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2462 lamport_timestamp,
2463 cx,
2464 );
2465 }
2466 Operation::UpdateSelections {
2467 selections,
2468 lamport_timestamp,
2469 line_mode,
2470 cursor_shape,
2471 } => {
2472 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2473 if set.lamport_timestamp > lamport_timestamp {
2474 return;
2475 }
2476 }
2477
2478 self.remote_selections.insert(
2479 lamport_timestamp.replica_id,
2480 SelectionSet {
2481 selections,
2482 lamport_timestamp,
2483 line_mode,
2484 cursor_shape,
2485 },
2486 );
2487 self.text.lamport_clock.observe(lamport_timestamp);
2488 self.non_text_state_update_count += 1;
2489 }
2490 Operation::UpdateCompletionTriggers {
2491 triggers,
2492 lamport_timestamp,
2493 server_id,
2494 } => {
2495 if triggers.is_empty() {
2496 self.completion_triggers_per_language_server
2497 .remove(&server_id);
2498 self.completion_triggers = self
2499 .completion_triggers_per_language_server
2500 .values()
2501 .flat_map(|triggers| triggers.into_iter().cloned())
2502 .collect();
2503 } else {
2504 self.completion_triggers_per_language_server
2505 .insert(server_id, triggers.iter().cloned().collect());
2506 self.completion_triggers.extend(triggers);
2507 }
2508 self.text.lamport_clock.observe(lamport_timestamp);
2509 }
2510 }
2511 }
2512
2513 fn apply_diagnostic_update(
2514 &mut self,
2515 server_id: LanguageServerId,
2516 diagnostics: DiagnosticSet,
2517 lamport_timestamp: clock::Lamport,
2518 cx: &mut Context<Self>,
2519 ) {
2520 if lamport_timestamp > self.diagnostics_timestamp {
2521 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2522 if diagnostics.is_empty() {
2523 if let Ok(ix) = ix {
2524 self.diagnostics.remove(ix);
2525 }
2526 } else {
2527 match ix {
2528 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2529 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2530 };
2531 }
2532 self.diagnostics_timestamp = lamport_timestamp;
2533 self.non_text_state_update_count += 1;
2534 self.text.lamport_clock.observe(lamport_timestamp);
2535 cx.notify();
2536 cx.emit(BufferEvent::DiagnosticsUpdated);
2537 }
2538 }
2539
2540 fn send_operation(&self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2541 cx.emit(BufferEvent::Operation {
2542 operation,
2543 is_local,
2544 });
2545 }
2546
2547 /// Removes the selections for a given peer.
2548 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2549 self.remote_selections.remove(&replica_id);
2550 cx.notify();
2551 }
2552
2553 /// Undoes the most recent transaction.
2554 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2555 let was_dirty = self.is_dirty();
2556 let old_version = self.version.clone();
2557
2558 if let Some((transaction_id, operation)) = self.text.undo() {
2559 self.send_operation(Operation::Buffer(operation), true, cx);
2560 self.did_edit(&old_version, was_dirty, cx);
2561 Some(transaction_id)
2562 } else {
2563 None
2564 }
2565 }
2566
2567 /// Manually undoes a specific transaction in the buffer's undo history.
2568 pub fn undo_transaction(
2569 &mut self,
2570 transaction_id: TransactionId,
2571 cx: &mut Context<Self>,
2572 ) -> bool {
2573 let was_dirty = self.is_dirty();
2574 let old_version = self.version.clone();
2575 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2576 self.send_operation(Operation::Buffer(operation), true, cx);
2577 self.did_edit(&old_version, was_dirty, cx);
2578 true
2579 } else {
2580 false
2581 }
2582 }
2583
2584 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2585 pub fn undo_to_transaction(
2586 &mut self,
2587 transaction_id: TransactionId,
2588 cx: &mut Context<Self>,
2589 ) -> bool {
2590 let was_dirty = self.is_dirty();
2591 let old_version = self.version.clone();
2592
2593 let operations = self.text.undo_to_transaction(transaction_id);
2594 let undone = !operations.is_empty();
2595 for operation in operations {
2596 self.send_operation(Operation::Buffer(operation), true, cx);
2597 }
2598 if undone {
2599 self.did_edit(&old_version, was_dirty, cx)
2600 }
2601 undone
2602 }
2603
2604 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2605 let was_dirty = self.is_dirty();
2606 let operation = self.text.undo_operations(counts);
2607 let old_version = self.version.clone();
2608 self.send_operation(Operation::Buffer(operation), true, cx);
2609 self.did_edit(&old_version, was_dirty, cx);
2610 }
2611
2612 /// Manually redoes a specific transaction in the buffer's redo history.
2613 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2614 let was_dirty = self.is_dirty();
2615 let old_version = self.version.clone();
2616
2617 if let Some((transaction_id, operation)) = self.text.redo() {
2618 self.send_operation(Operation::Buffer(operation), true, cx);
2619 self.did_edit(&old_version, was_dirty, cx);
2620 Some(transaction_id)
2621 } else {
2622 None
2623 }
2624 }
2625
2626 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2627 pub fn redo_to_transaction(
2628 &mut self,
2629 transaction_id: TransactionId,
2630 cx: &mut Context<Self>,
2631 ) -> bool {
2632 let was_dirty = self.is_dirty();
2633 let old_version = self.version.clone();
2634
2635 let operations = self.text.redo_to_transaction(transaction_id);
2636 let redone = !operations.is_empty();
2637 for operation in operations {
2638 self.send_operation(Operation::Buffer(operation), true, cx);
2639 }
2640 if redone {
2641 self.did_edit(&old_version, was_dirty, cx)
2642 }
2643 redone
2644 }
2645
2646 /// Override current completion triggers with the user-provided completion triggers.
2647 pub fn set_completion_triggers(
2648 &mut self,
2649 server_id: LanguageServerId,
2650 triggers: BTreeSet<String>,
2651 cx: &mut Context<Self>,
2652 ) {
2653 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2654 if triggers.is_empty() {
2655 self.completion_triggers_per_language_server
2656 .remove(&server_id);
2657 self.completion_triggers = self
2658 .completion_triggers_per_language_server
2659 .values()
2660 .flat_map(|triggers| triggers.into_iter().cloned())
2661 .collect();
2662 } else {
2663 self.completion_triggers_per_language_server
2664 .insert(server_id, triggers.clone());
2665 self.completion_triggers.extend(triggers.iter().cloned());
2666 }
2667 self.send_operation(
2668 Operation::UpdateCompletionTriggers {
2669 triggers: triggers.iter().cloned().collect(),
2670 lamport_timestamp: self.completion_triggers_timestamp,
2671 server_id,
2672 },
2673 true,
2674 cx,
2675 );
2676 cx.notify();
2677 }
2678
2679 /// Returns a list of strings which trigger a completion menu for this language.
2680 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2681 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2682 &self.completion_triggers
2683 }
2684
2685 /// Call this directly after performing edits to prevent the preview tab
2686 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2687 /// to return false until there are additional edits.
2688 pub fn refresh_preview(&mut self) {
2689 self.preview_version = self.version.clone();
2690 }
2691
2692 /// Whether we should preserve the preview status of a tab containing this buffer.
2693 pub fn preserve_preview(&self) -> bool {
2694 !self.has_edits_since(&self.preview_version)
2695 }
2696}
2697
2698#[doc(hidden)]
2699#[cfg(any(test, feature = "test-support"))]
2700impl Buffer {
2701 pub fn edit_via_marked_text(
2702 &mut self,
2703 marked_string: &str,
2704 autoindent_mode: Option<AutoindentMode>,
2705 cx: &mut Context<Self>,
2706 ) {
2707 let edits = self.edits_for_marked_text(marked_string);
2708 self.edit(edits, autoindent_mode, cx);
2709 }
2710
2711 pub fn set_group_interval(&mut self, group_interval: Duration) {
2712 self.text.set_group_interval(group_interval);
2713 }
2714
2715 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2716 where
2717 T: rand::Rng,
2718 {
2719 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2720 let mut last_end = None;
2721 for _ in 0..old_range_count {
2722 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2723 break;
2724 }
2725
2726 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2727 let mut range = self.random_byte_range(new_start, rng);
2728 if rng.gen_bool(0.2) {
2729 mem::swap(&mut range.start, &mut range.end);
2730 }
2731 last_end = Some(range.end);
2732
2733 let new_text_len = rng.gen_range(0..10);
2734 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2735 new_text = new_text.to_uppercase();
2736
2737 edits.push((range, new_text));
2738 }
2739 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2740 self.edit(edits, None, cx);
2741 }
2742
2743 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2744 let was_dirty = self.is_dirty();
2745 let old_version = self.version.clone();
2746
2747 let ops = self.text.randomly_undo_redo(rng);
2748 if !ops.is_empty() {
2749 for op in ops {
2750 self.send_operation(Operation::Buffer(op), true, cx);
2751 self.did_edit(&old_version, was_dirty, cx);
2752 }
2753 }
2754 }
2755}
2756
2757impl EventEmitter<BufferEvent> for Buffer {}
2758
2759impl Deref for Buffer {
2760 type Target = TextBuffer;
2761
2762 fn deref(&self) -> &Self::Target {
2763 &self.text
2764 }
2765}
2766
2767impl BufferSnapshot {
2768 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2769 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2770 indent_size_for_line(self, row)
2771 }
2772 /// Returns [`IndentSize`] for a given position that respects user settings
2773 /// and language preferences.
2774 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2775 let settings = language_settings(
2776 self.language_at(position).map(|l| l.name()),
2777 self.file(),
2778 cx,
2779 );
2780 if settings.hard_tabs {
2781 IndentSize::tab()
2782 } else {
2783 IndentSize::spaces(settings.tab_size.get())
2784 }
2785 }
2786
2787 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2788 /// is passed in as `single_indent_size`.
2789 pub fn suggested_indents(
2790 &self,
2791 rows: impl Iterator<Item = u32>,
2792 single_indent_size: IndentSize,
2793 ) -> BTreeMap<u32, IndentSize> {
2794 let mut result = BTreeMap::new();
2795
2796 for row_range in contiguous_ranges(rows, 10) {
2797 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2798 Some(suggestions) => suggestions,
2799 _ => break,
2800 };
2801
2802 for (row, suggestion) in row_range.zip(suggestions) {
2803 let indent_size = if let Some(suggestion) = suggestion {
2804 result
2805 .get(&suggestion.basis_row)
2806 .copied()
2807 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2808 .with_delta(suggestion.delta, single_indent_size)
2809 } else {
2810 self.indent_size_for_line(row)
2811 };
2812
2813 result.insert(row, indent_size);
2814 }
2815 }
2816
2817 result
2818 }
2819
2820 fn suggest_autoindents(
2821 &self,
2822 row_range: Range<u32>,
2823 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2824 let config = &self.language.as_ref()?.config;
2825 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2826
2827 // Find the suggested indentation ranges based on the syntax tree.
2828 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2829 let end = Point::new(row_range.end, 0);
2830 let range = (start..end).to_offset(&self.text);
2831 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2832 Some(&grammar.indents_config.as_ref()?.query)
2833 });
2834 let indent_configs = matches
2835 .grammars()
2836 .iter()
2837 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2838 .collect::<Vec<_>>();
2839
2840 let mut indent_ranges = Vec::<Range<Point>>::new();
2841 let mut outdent_positions = Vec::<Point>::new();
2842 while let Some(mat) = matches.peek() {
2843 let mut start: Option<Point> = None;
2844 let mut end: Option<Point> = None;
2845
2846 let config = &indent_configs[mat.grammar_index];
2847 for capture in mat.captures {
2848 if capture.index == config.indent_capture_ix {
2849 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2850 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2851 } else if Some(capture.index) == config.start_capture_ix {
2852 start = Some(Point::from_ts_point(capture.node.end_position()));
2853 } else if Some(capture.index) == config.end_capture_ix {
2854 end = Some(Point::from_ts_point(capture.node.start_position()));
2855 } else if Some(capture.index) == config.outdent_capture_ix {
2856 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2857 }
2858 }
2859
2860 matches.advance();
2861 if let Some((start, end)) = start.zip(end) {
2862 if start.row == end.row {
2863 continue;
2864 }
2865
2866 let range = start..end;
2867 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2868 Err(ix) => indent_ranges.insert(ix, range),
2869 Ok(ix) => {
2870 let prev_range = &mut indent_ranges[ix];
2871 prev_range.end = prev_range.end.max(range.end);
2872 }
2873 }
2874 }
2875 }
2876
2877 let mut error_ranges = Vec::<Range<Point>>::new();
2878 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2879 Some(&grammar.error_query)
2880 });
2881 while let Some(mat) = matches.peek() {
2882 let node = mat.captures[0].node;
2883 let start = Point::from_ts_point(node.start_position());
2884 let end = Point::from_ts_point(node.end_position());
2885 let range = start..end;
2886 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2887 Ok(ix) | Err(ix) => ix,
2888 };
2889 let mut end_ix = ix;
2890 while let Some(existing_range) = error_ranges.get(end_ix) {
2891 if existing_range.end < end {
2892 end_ix += 1;
2893 } else {
2894 break;
2895 }
2896 }
2897 error_ranges.splice(ix..end_ix, [range]);
2898 matches.advance();
2899 }
2900
2901 outdent_positions.sort();
2902 for outdent_position in outdent_positions {
2903 // find the innermost indent range containing this outdent_position
2904 // set its end to the outdent position
2905 if let Some(range_to_truncate) = indent_ranges
2906 .iter_mut()
2907 .filter(|indent_range| indent_range.contains(&outdent_position))
2908 .last()
2909 {
2910 range_to_truncate.end = outdent_position;
2911 }
2912 }
2913
2914 // Find the suggested indentation increases and decreased based on regexes.
2915 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2916 self.for_each_line(
2917 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2918 ..Point::new(row_range.end, 0),
2919 |row, line| {
2920 if config
2921 .decrease_indent_pattern
2922 .as_ref()
2923 .map_or(false, |regex| regex.is_match(line))
2924 {
2925 indent_change_rows.push((row, Ordering::Less));
2926 }
2927 if config
2928 .increase_indent_pattern
2929 .as_ref()
2930 .map_or(false, |regex| regex.is_match(line))
2931 {
2932 indent_change_rows.push((row + 1, Ordering::Greater));
2933 }
2934 },
2935 );
2936
2937 let mut indent_changes = indent_change_rows.into_iter().peekable();
2938 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2939 prev_non_blank_row.unwrap_or(0)
2940 } else {
2941 row_range.start.saturating_sub(1)
2942 };
2943 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2944 Some(row_range.map(move |row| {
2945 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2946
2947 let mut indent_from_prev_row = false;
2948 let mut outdent_from_prev_row = false;
2949 let mut outdent_to_row = u32::MAX;
2950 let mut from_regex = false;
2951
2952 while let Some((indent_row, delta)) = indent_changes.peek() {
2953 match indent_row.cmp(&row) {
2954 Ordering::Equal => match delta {
2955 Ordering::Less => {
2956 from_regex = true;
2957 outdent_from_prev_row = true
2958 }
2959 Ordering::Greater => {
2960 indent_from_prev_row = true;
2961 from_regex = true
2962 }
2963 _ => {}
2964 },
2965
2966 Ordering::Greater => break,
2967 Ordering::Less => {}
2968 }
2969
2970 indent_changes.next();
2971 }
2972
2973 for range in &indent_ranges {
2974 if range.start.row >= row {
2975 break;
2976 }
2977 if range.start.row == prev_row && range.end > row_start {
2978 indent_from_prev_row = true;
2979 }
2980 if range.end > prev_row_start && range.end <= row_start {
2981 outdent_to_row = outdent_to_row.min(range.start.row);
2982 }
2983 }
2984
2985 let within_error = error_ranges
2986 .iter()
2987 .any(|e| e.start.row < row && e.end > row_start);
2988
2989 let suggestion = if outdent_to_row == prev_row
2990 || (outdent_from_prev_row && indent_from_prev_row)
2991 {
2992 Some(IndentSuggestion {
2993 basis_row: prev_row,
2994 delta: Ordering::Equal,
2995 within_error: within_error && !from_regex,
2996 })
2997 } else if indent_from_prev_row {
2998 Some(IndentSuggestion {
2999 basis_row: prev_row,
3000 delta: Ordering::Greater,
3001 within_error: within_error && !from_regex,
3002 })
3003 } else if outdent_to_row < prev_row {
3004 Some(IndentSuggestion {
3005 basis_row: outdent_to_row,
3006 delta: Ordering::Equal,
3007 within_error: within_error && !from_regex,
3008 })
3009 } else if outdent_from_prev_row {
3010 Some(IndentSuggestion {
3011 basis_row: prev_row,
3012 delta: Ordering::Less,
3013 within_error: within_error && !from_regex,
3014 })
3015 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3016 {
3017 Some(IndentSuggestion {
3018 basis_row: prev_row,
3019 delta: Ordering::Equal,
3020 within_error: within_error && !from_regex,
3021 })
3022 } else {
3023 None
3024 };
3025
3026 prev_row = row;
3027 prev_row_start = row_start;
3028 suggestion
3029 }))
3030 }
3031
3032 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3033 while row > 0 {
3034 row -= 1;
3035 if !self.is_line_blank(row) {
3036 return Some(row);
3037 }
3038 }
3039 None
3040 }
3041
3042 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3043 let captures = self.syntax.captures(range, &self.text, |grammar| {
3044 grammar.highlights_query.as_ref()
3045 });
3046 let highlight_maps = captures
3047 .grammars()
3048 .iter()
3049 .map(|grammar| grammar.highlight_map())
3050 .collect();
3051 (captures, highlight_maps)
3052 }
3053
3054 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3055 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3056 /// returned in chunks where each chunk has a single syntax highlighting style and
3057 /// diagnostic status.
3058 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3059 let range = range.start.to_offset(self)..range.end.to_offset(self);
3060
3061 let mut syntax = None;
3062 if language_aware {
3063 syntax = Some(self.get_highlights(range.clone()));
3064 }
3065 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3066 let diagnostics = language_aware;
3067 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3068 }
3069
3070 pub fn highlighted_text_for_range<T: ToOffset>(
3071 &self,
3072 range: Range<T>,
3073 override_style: Option<HighlightStyle>,
3074 syntax_theme: &SyntaxTheme,
3075 ) -> HighlightedText {
3076 HighlightedText::from_buffer_range(
3077 range,
3078 &self.text,
3079 &self.syntax,
3080 override_style,
3081 syntax_theme,
3082 )
3083 }
3084
3085 /// Invokes the given callback for each line of text in the given range of the buffer.
3086 /// Uses callback to avoid allocating a string for each line.
3087 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3088 let mut line = String::new();
3089 let mut row = range.start.row;
3090 for chunk in self
3091 .as_rope()
3092 .chunks_in_range(range.to_offset(self))
3093 .chain(["\n"])
3094 {
3095 for (newline_ix, text) in chunk.split('\n').enumerate() {
3096 if newline_ix > 0 {
3097 callback(row, &line);
3098 row += 1;
3099 line.clear();
3100 }
3101 line.push_str(text);
3102 }
3103 }
3104 }
3105
3106 /// Iterates over every [`SyntaxLayer`] in the buffer.
3107 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3108 self.syntax
3109 .layers_for_range(0..self.len(), &self.text, true)
3110 }
3111
3112 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3113 let offset = position.to_offset(self);
3114 self.syntax
3115 .layers_for_range(offset..offset, &self.text, false)
3116 .filter(|l| l.node().end_byte() > offset)
3117 .last()
3118 }
3119
3120 /// Returns the main [`Language`].
3121 pub fn language(&self) -> Option<&Arc<Language>> {
3122 self.language.as_ref()
3123 }
3124
3125 /// Returns the [`Language`] at the given location.
3126 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3127 self.syntax_layer_at(position)
3128 .map(|info| info.language)
3129 .or(self.language.as_ref())
3130 }
3131
3132 /// Returns the settings for the language at the given location.
3133 pub fn settings_at<'a, D: ToOffset>(
3134 &'a self,
3135 position: D,
3136 cx: &'a App,
3137 ) -> Cow<'a, LanguageSettings> {
3138 language_settings(
3139 self.language_at(position).map(|l| l.name()),
3140 self.file.as_ref(),
3141 cx,
3142 )
3143 }
3144
3145 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3146 CharClassifier::new(self.language_scope_at(point))
3147 }
3148
3149 /// Returns the [`LanguageScope`] at the given location.
3150 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3151 let offset = position.to_offset(self);
3152 let mut scope = None;
3153 let mut smallest_range: Option<Range<usize>> = None;
3154
3155 // Use the layer that has the smallest node intersecting the given point.
3156 for layer in self
3157 .syntax
3158 .layers_for_range(offset..offset, &self.text, false)
3159 {
3160 let mut cursor = layer.node().walk();
3161
3162 let mut range = None;
3163 loop {
3164 let child_range = cursor.node().byte_range();
3165 if !child_range.to_inclusive().contains(&offset) {
3166 break;
3167 }
3168
3169 range = Some(child_range);
3170 if cursor.goto_first_child_for_byte(offset).is_none() {
3171 break;
3172 }
3173 }
3174
3175 if let Some(range) = range {
3176 if smallest_range
3177 .as_ref()
3178 .map_or(true, |smallest_range| range.len() < smallest_range.len())
3179 {
3180 smallest_range = Some(range);
3181 scope = Some(LanguageScope {
3182 language: layer.language.clone(),
3183 override_id: layer.override_id(offset, &self.text),
3184 });
3185 }
3186 }
3187 }
3188
3189 scope.or_else(|| {
3190 self.language.clone().map(|language| LanguageScope {
3191 language,
3192 override_id: None,
3193 })
3194 })
3195 }
3196
3197 /// Returns a tuple of the range and character kind of the word
3198 /// surrounding the given position.
3199 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3200 let mut start = start.to_offset(self);
3201 let mut end = start;
3202 let mut next_chars = self.chars_at(start).peekable();
3203 let mut prev_chars = self.reversed_chars_at(start).peekable();
3204
3205 let classifier = self.char_classifier_at(start);
3206 let word_kind = cmp::max(
3207 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3208 next_chars.peek().copied().map(|c| classifier.kind(c)),
3209 );
3210
3211 for ch in prev_chars {
3212 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3213 start -= ch.len_utf8();
3214 } else {
3215 break;
3216 }
3217 }
3218
3219 for ch in next_chars {
3220 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3221 end += ch.len_utf8();
3222 } else {
3223 break;
3224 }
3225 }
3226
3227 (start..end, word_kind)
3228 }
3229
3230 /// Returns the closest syntax node enclosing the given range.
3231 pub fn syntax_ancestor<'a, T: ToOffset>(
3232 &'a self,
3233 range: Range<T>,
3234 ) -> Option<tree_sitter::Node<'a>> {
3235 let range = range.start.to_offset(self)..range.end.to_offset(self);
3236 let mut result: Option<tree_sitter::Node<'a>> = None;
3237 'outer: for layer in self
3238 .syntax
3239 .layers_for_range(range.clone(), &self.text, true)
3240 {
3241 let mut cursor = layer.node().walk();
3242
3243 // Descend to the first leaf that touches the start of the range,
3244 // and if the range is non-empty, extends beyond the start.
3245 while cursor.goto_first_child_for_byte(range.start).is_some() {
3246 if !range.is_empty() && cursor.node().end_byte() == range.start {
3247 cursor.goto_next_sibling();
3248 }
3249 }
3250
3251 // Ascend to the smallest ancestor that strictly contains the range.
3252 loop {
3253 let node_range = cursor.node().byte_range();
3254 if node_range.start <= range.start
3255 && node_range.end >= range.end
3256 && node_range.len() > range.len()
3257 {
3258 break;
3259 }
3260 if !cursor.goto_parent() {
3261 continue 'outer;
3262 }
3263 }
3264
3265 let left_node = cursor.node();
3266 let mut layer_result = left_node;
3267
3268 // For an empty range, try to find another node immediately to the right of the range.
3269 if left_node.end_byte() == range.start {
3270 let mut right_node = None;
3271 while !cursor.goto_next_sibling() {
3272 if !cursor.goto_parent() {
3273 break;
3274 }
3275 }
3276
3277 while cursor.node().start_byte() == range.start {
3278 right_node = Some(cursor.node());
3279 if !cursor.goto_first_child() {
3280 break;
3281 }
3282 }
3283
3284 // If there is a candidate node on both sides of the (empty) range, then
3285 // decide between the two by favoring a named node over an anonymous token.
3286 // If both nodes are the same in that regard, favor the right one.
3287 if let Some(right_node) = right_node {
3288 if right_node.is_named() || !left_node.is_named() {
3289 layer_result = right_node;
3290 }
3291 }
3292 }
3293
3294 if let Some(previous_result) = &result {
3295 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3296 continue;
3297 }
3298 }
3299 result = Some(layer_result);
3300 }
3301
3302 result
3303 }
3304
3305 /// Returns the outline for the buffer.
3306 ///
3307 /// This method allows passing an optional [`SyntaxTheme`] to
3308 /// syntax-highlight the returned symbols.
3309 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3310 self.outline_items_containing(0..self.len(), true, theme)
3311 .map(Outline::new)
3312 }
3313
3314 /// Returns all the symbols that contain the given position.
3315 ///
3316 /// This method allows passing an optional [`SyntaxTheme`] to
3317 /// syntax-highlight the returned symbols.
3318 pub fn symbols_containing<T: ToOffset>(
3319 &self,
3320 position: T,
3321 theme: Option<&SyntaxTheme>,
3322 ) -> Option<Vec<OutlineItem<Anchor>>> {
3323 let position = position.to_offset(self);
3324 let mut items = self.outline_items_containing(
3325 position.saturating_sub(1)..self.len().min(position + 1),
3326 false,
3327 theme,
3328 )?;
3329 let mut prev_depth = None;
3330 items.retain(|item| {
3331 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3332 prev_depth = Some(item.depth);
3333 result
3334 });
3335 Some(items)
3336 }
3337
3338 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3339 let range = range.to_offset(self);
3340 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3341 grammar.outline_config.as_ref().map(|c| &c.query)
3342 });
3343 let configs = matches
3344 .grammars()
3345 .iter()
3346 .map(|g| g.outline_config.as_ref().unwrap())
3347 .collect::<Vec<_>>();
3348
3349 while let Some(mat) = matches.peek() {
3350 let config = &configs[mat.grammar_index];
3351 let containing_item_node = maybe!({
3352 let item_node = mat.captures.iter().find_map(|cap| {
3353 if cap.index == config.item_capture_ix {
3354 Some(cap.node)
3355 } else {
3356 None
3357 }
3358 })?;
3359
3360 let item_byte_range = item_node.byte_range();
3361 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3362 None
3363 } else {
3364 Some(item_node)
3365 }
3366 });
3367
3368 if let Some(item_node) = containing_item_node {
3369 return Some(
3370 Point::from_ts_point(item_node.start_position())
3371 ..Point::from_ts_point(item_node.end_position()),
3372 );
3373 }
3374
3375 matches.advance();
3376 }
3377 None
3378 }
3379
3380 pub fn outline_items_containing<T: ToOffset>(
3381 &self,
3382 range: Range<T>,
3383 include_extra_context: bool,
3384 theme: Option<&SyntaxTheme>,
3385 ) -> Option<Vec<OutlineItem<Anchor>>> {
3386 let range = range.to_offset(self);
3387 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3388 grammar.outline_config.as_ref().map(|c| &c.query)
3389 });
3390 let configs = matches
3391 .grammars()
3392 .iter()
3393 .map(|g| g.outline_config.as_ref().unwrap())
3394 .collect::<Vec<_>>();
3395
3396 let mut items = Vec::new();
3397 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3398 while let Some(mat) = matches.peek() {
3399 let config = &configs[mat.grammar_index];
3400 if let Some(item) =
3401 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3402 {
3403 items.push(item);
3404 } else if let Some(capture) = mat
3405 .captures
3406 .iter()
3407 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3408 {
3409 let capture_range = capture.node.start_position()..capture.node.end_position();
3410 let mut capture_row_range =
3411 capture_range.start.row as u32..capture_range.end.row as u32;
3412 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3413 {
3414 capture_row_range.end -= 1;
3415 }
3416 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3417 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3418 last_row_range.end = capture_row_range.end;
3419 } else {
3420 annotation_row_ranges.push(capture_row_range);
3421 }
3422 } else {
3423 annotation_row_ranges.push(capture_row_range);
3424 }
3425 }
3426 matches.advance();
3427 }
3428
3429 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3430
3431 // Assign depths based on containment relationships and convert to anchors.
3432 let mut item_ends_stack = Vec::<Point>::new();
3433 let mut anchor_items = Vec::new();
3434 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3435 for item in items {
3436 while let Some(last_end) = item_ends_stack.last().copied() {
3437 if last_end < item.range.end {
3438 item_ends_stack.pop();
3439 } else {
3440 break;
3441 }
3442 }
3443
3444 let mut annotation_row_range = None;
3445 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3446 let row_preceding_item = item.range.start.row.saturating_sub(1);
3447 if next_annotation_row_range.end < row_preceding_item {
3448 annotation_row_ranges.next();
3449 } else {
3450 if next_annotation_row_range.end == row_preceding_item {
3451 annotation_row_range = Some(next_annotation_row_range.clone());
3452 annotation_row_ranges.next();
3453 }
3454 break;
3455 }
3456 }
3457
3458 anchor_items.push(OutlineItem {
3459 depth: item_ends_stack.len(),
3460 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3461 text: item.text,
3462 highlight_ranges: item.highlight_ranges,
3463 name_ranges: item.name_ranges,
3464 body_range: item.body_range.map(|body_range| {
3465 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3466 }),
3467 annotation_range: annotation_row_range.map(|annotation_range| {
3468 self.anchor_after(Point::new(annotation_range.start, 0))
3469 ..self.anchor_before(Point::new(
3470 annotation_range.end,
3471 self.line_len(annotation_range.end),
3472 ))
3473 }),
3474 });
3475 item_ends_stack.push(item.range.end);
3476 }
3477
3478 Some(anchor_items)
3479 }
3480
3481 fn next_outline_item(
3482 &self,
3483 config: &OutlineConfig,
3484 mat: &SyntaxMapMatch,
3485 range: &Range<usize>,
3486 include_extra_context: bool,
3487 theme: Option<&SyntaxTheme>,
3488 ) -> Option<OutlineItem<Point>> {
3489 let item_node = mat.captures.iter().find_map(|cap| {
3490 if cap.index == config.item_capture_ix {
3491 Some(cap.node)
3492 } else {
3493 None
3494 }
3495 })?;
3496
3497 let item_byte_range = item_node.byte_range();
3498 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3499 return None;
3500 }
3501 let item_point_range = Point::from_ts_point(item_node.start_position())
3502 ..Point::from_ts_point(item_node.end_position());
3503
3504 let mut open_point = None;
3505 let mut close_point = None;
3506 let mut buffer_ranges = Vec::new();
3507 for capture in mat.captures {
3508 let node_is_name;
3509 if capture.index == config.name_capture_ix {
3510 node_is_name = true;
3511 } else if Some(capture.index) == config.context_capture_ix
3512 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3513 {
3514 node_is_name = false;
3515 } else {
3516 if Some(capture.index) == config.open_capture_ix {
3517 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3518 } else if Some(capture.index) == config.close_capture_ix {
3519 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3520 }
3521
3522 continue;
3523 }
3524
3525 let mut range = capture.node.start_byte()..capture.node.end_byte();
3526 let start = capture.node.start_position();
3527 if capture.node.end_position().row > start.row {
3528 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3529 }
3530
3531 if !range.is_empty() {
3532 buffer_ranges.push((range, node_is_name));
3533 }
3534 }
3535 if buffer_ranges.is_empty() {
3536 return None;
3537 }
3538 let mut text = String::new();
3539 let mut highlight_ranges = Vec::new();
3540 let mut name_ranges = Vec::new();
3541 let mut chunks = self.chunks(
3542 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3543 true,
3544 );
3545 let mut last_buffer_range_end = 0;
3546 for (buffer_range, is_name) in buffer_ranges {
3547 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
3548 text.push(' ');
3549 }
3550 last_buffer_range_end = buffer_range.end;
3551 if is_name {
3552 let mut start = text.len();
3553 let end = start + buffer_range.len();
3554
3555 // When multiple names are captured, then the matchable text
3556 // includes the whitespace in between the names.
3557 if !name_ranges.is_empty() {
3558 start -= 1;
3559 }
3560
3561 name_ranges.push(start..end);
3562 }
3563
3564 let mut offset = buffer_range.start;
3565 chunks.seek(buffer_range.clone());
3566 for mut chunk in chunks.by_ref() {
3567 if chunk.text.len() > buffer_range.end - offset {
3568 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3569 offset = buffer_range.end;
3570 } else {
3571 offset += chunk.text.len();
3572 }
3573 let style = chunk
3574 .syntax_highlight_id
3575 .zip(theme)
3576 .and_then(|(highlight, theme)| highlight.style(theme));
3577 if let Some(style) = style {
3578 let start = text.len();
3579 let end = start + chunk.text.len();
3580 highlight_ranges.push((start..end, style));
3581 }
3582 text.push_str(chunk.text);
3583 if offset >= buffer_range.end {
3584 break;
3585 }
3586 }
3587 }
3588
3589 Some(OutlineItem {
3590 depth: 0, // We'll calculate the depth later
3591 range: item_point_range,
3592 text,
3593 highlight_ranges,
3594 name_ranges,
3595 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3596 annotation_range: None,
3597 })
3598 }
3599
3600 pub fn function_body_fold_ranges<T: ToOffset>(
3601 &self,
3602 within: Range<T>,
3603 ) -> impl Iterator<Item = Range<usize>> + '_ {
3604 self.text_object_ranges(within, TreeSitterOptions::default())
3605 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3606 }
3607
3608 /// For each grammar in the language, runs the provided
3609 /// [`tree_sitter::Query`] against the given range.
3610 pub fn matches(
3611 &self,
3612 range: Range<usize>,
3613 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3614 ) -> SyntaxMapMatches {
3615 self.syntax.matches(range, self, query)
3616 }
3617
3618 /// Returns bracket range pairs overlapping or adjacent to `range`
3619 pub fn bracket_ranges<T: ToOffset>(
3620 &self,
3621 range: Range<T>,
3622 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3623 // Find bracket pairs that *inclusively* contain the given range.
3624 let range = range.start.to_offset(self).saturating_sub(1)
3625 ..self.len().min(range.end.to_offset(self) + 1);
3626
3627 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3628 grammar.brackets_config.as_ref().map(|c| &c.query)
3629 });
3630 let configs = matches
3631 .grammars()
3632 .iter()
3633 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3634 .collect::<Vec<_>>();
3635
3636 iter::from_fn(move || {
3637 while let Some(mat) = matches.peek() {
3638 let mut open = None;
3639 let mut close = None;
3640 let config = &configs[mat.grammar_index];
3641 for capture in mat.captures {
3642 if capture.index == config.open_capture_ix {
3643 open = Some(capture.node.byte_range());
3644 } else if capture.index == config.close_capture_ix {
3645 close = Some(capture.node.byte_range());
3646 }
3647 }
3648
3649 matches.advance();
3650
3651 let Some((open, close)) = open.zip(close) else {
3652 continue;
3653 };
3654
3655 let bracket_range = open.start..=close.end;
3656 if !bracket_range.overlaps(&range) {
3657 continue;
3658 }
3659
3660 return Some((open, close));
3661 }
3662 None
3663 })
3664 }
3665
3666 pub fn text_object_ranges<T: ToOffset>(
3667 &self,
3668 range: Range<T>,
3669 options: TreeSitterOptions,
3670 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3671 let range = range.start.to_offset(self).saturating_sub(1)
3672 ..self.len().min(range.end.to_offset(self) + 1);
3673
3674 let mut matches =
3675 self.syntax
3676 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3677 grammar.text_object_config.as_ref().map(|c| &c.query)
3678 });
3679
3680 let configs = matches
3681 .grammars()
3682 .iter()
3683 .map(|grammar| grammar.text_object_config.as_ref())
3684 .collect::<Vec<_>>();
3685
3686 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3687
3688 iter::from_fn(move || loop {
3689 while let Some(capture) = captures.pop() {
3690 if capture.0.overlaps(&range) {
3691 return Some(capture);
3692 }
3693 }
3694
3695 let mat = matches.peek()?;
3696
3697 let Some(config) = configs[mat.grammar_index].as_ref() else {
3698 matches.advance();
3699 continue;
3700 };
3701
3702 for capture in mat.captures {
3703 let Some(ix) = config
3704 .text_objects_by_capture_ix
3705 .binary_search_by_key(&capture.index, |e| e.0)
3706 .ok()
3707 else {
3708 continue;
3709 };
3710 let text_object = config.text_objects_by_capture_ix[ix].1;
3711 let byte_range = capture.node.byte_range();
3712
3713 let mut found = false;
3714 for (range, existing) in captures.iter_mut() {
3715 if existing == &text_object {
3716 range.start = range.start.min(byte_range.start);
3717 range.end = range.end.max(byte_range.end);
3718 found = true;
3719 break;
3720 }
3721 }
3722
3723 if !found {
3724 captures.push((byte_range, text_object));
3725 }
3726 }
3727
3728 matches.advance();
3729 })
3730 }
3731
3732 /// Returns enclosing bracket ranges containing the given range
3733 pub fn enclosing_bracket_ranges<T: ToOffset>(
3734 &self,
3735 range: Range<T>,
3736 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3737 let range = range.start.to_offset(self)..range.end.to_offset(self);
3738
3739 self.bracket_ranges(range.clone())
3740 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
3741 }
3742
3743 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3744 ///
3745 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3746 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3747 &self,
3748 range: Range<T>,
3749 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3750 ) -> Option<(Range<usize>, Range<usize>)> {
3751 let range = range.start.to_offset(self)..range.end.to_offset(self);
3752
3753 // Get the ranges of the innermost pair of brackets.
3754 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3755
3756 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
3757 if let Some(range_filter) = range_filter {
3758 if !range_filter(open.clone(), close.clone()) {
3759 continue;
3760 }
3761 }
3762
3763 let len = close.end - open.start;
3764
3765 if let Some((existing_open, existing_close)) = &result {
3766 let existing_len = existing_close.end - existing_open.start;
3767 if len > existing_len {
3768 continue;
3769 }
3770 }
3771
3772 result = Some((open, close));
3773 }
3774
3775 result
3776 }
3777
3778 /// Returns anchor ranges for any matches of the redaction query.
3779 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3780 /// will be run on the relevant section of the buffer.
3781 pub fn redacted_ranges<T: ToOffset>(
3782 &self,
3783 range: Range<T>,
3784 ) -> impl Iterator<Item = Range<usize>> + '_ {
3785 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3786 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3787 grammar
3788 .redactions_config
3789 .as_ref()
3790 .map(|config| &config.query)
3791 });
3792
3793 let configs = syntax_matches
3794 .grammars()
3795 .iter()
3796 .map(|grammar| grammar.redactions_config.as_ref())
3797 .collect::<Vec<_>>();
3798
3799 iter::from_fn(move || {
3800 let redacted_range = syntax_matches
3801 .peek()
3802 .and_then(|mat| {
3803 configs[mat.grammar_index].and_then(|config| {
3804 mat.captures
3805 .iter()
3806 .find(|capture| capture.index == config.redaction_capture_ix)
3807 })
3808 })
3809 .map(|mat| mat.node.byte_range());
3810 syntax_matches.advance();
3811 redacted_range
3812 })
3813 }
3814
3815 pub fn injections_intersecting_range<T: ToOffset>(
3816 &self,
3817 range: Range<T>,
3818 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3819 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3820
3821 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3822 grammar
3823 .injection_config
3824 .as_ref()
3825 .map(|config| &config.query)
3826 });
3827
3828 let configs = syntax_matches
3829 .grammars()
3830 .iter()
3831 .map(|grammar| grammar.injection_config.as_ref())
3832 .collect::<Vec<_>>();
3833
3834 iter::from_fn(move || {
3835 let ranges = syntax_matches.peek().and_then(|mat| {
3836 let config = &configs[mat.grammar_index]?;
3837 let content_capture_range = mat.captures.iter().find_map(|capture| {
3838 if capture.index == config.content_capture_ix {
3839 Some(capture.node.byte_range())
3840 } else {
3841 None
3842 }
3843 })?;
3844 let language = self.language_at(content_capture_range.start)?;
3845 Some((content_capture_range, language))
3846 });
3847 syntax_matches.advance();
3848 ranges
3849 })
3850 }
3851
3852 pub fn runnable_ranges(
3853 &self,
3854 offset_range: Range<usize>,
3855 ) -> impl Iterator<Item = RunnableRange> + '_ {
3856 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3857 grammar.runnable_config.as_ref().map(|config| &config.query)
3858 });
3859
3860 let test_configs = syntax_matches
3861 .grammars()
3862 .iter()
3863 .map(|grammar| grammar.runnable_config.as_ref())
3864 .collect::<Vec<_>>();
3865
3866 iter::from_fn(move || loop {
3867 let mat = syntax_matches.peek()?;
3868
3869 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3870 let mut run_range = None;
3871 let full_range = mat.captures.iter().fold(
3872 Range {
3873 start: usize::MAX,
3874 end: 0,
3875 },
3876 |mut acc, next| {
3877 let byte_range = next.node.byte_range();
3878 if acc.start > byte_range.start {
3879 acc.start = byte_range.start;
3880 }
3881 if acc.end < byte_range.end {
3882 acc.end = byte_range.end;
3883 }
3884 acc
3885 },
3886 );
3887 if full_range.start > full_range.end {
3888 // We did not find a full spanning range of this match.
3889 return None;
3890 }
3891 let extra_captures: SmallVec<[_; 1]> =
3892 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3893 test_configs
3894 .extra_captures
3895 .get(capture.index as usize)
3896 .cloned()
3897 .and_then(|tag_name| match tag_name {
3898 RunnableCapture::Named(name) => {
3899 Some((capture.node.byte_range(), name))
3900 }
3901 RunnableCapture::Run => {
3902 let _ = run_range.insert(capture.node.byte_range());
3903 None
3904 }
3905 })
3906 }));
3907 let run_range = run_range?;
3908 let tags = test_configs
3909 .query
3910 .property_settings(mat.pattern_index)
3911 .iter()
3912 .filter_map(|property| {
3913 if *property.key == *"tag" {
3914 property
3915 .value
3916 .as_ref()
3917 .map(|value| RunnableTag(value.to_string().into()))
3918 } else {
3919 None
3920 }
3921 })
3922 .collect();
3923 let extra_captures = extra_captures
3924 .into_iter()
3925 .map(|(range, name)| {
3926 (
3927 name.to_string(),
3928 self.text_for_range(range.clone()).collect::<String>(),
3929 )
3930 })
3931 .collect();
3932 // All tags should have the same range.
3933 Some(RunnableRange {
3934 run_range,
3935 full_range,
3936 runnable: Runnable {
3937 tags,
3938 language: mat.language,
3939 buffer: self.remote_id(),
3940 },
3941 extra_captures,
3942 buffer_id: self.remote_id(),
3943 })
3944 });
3945
3946 syntax_matches.advance();
3947 if test_range.is_some() {
3948 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3949 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3950 return test_range;
3951 }
3952 })
3953 }
3954
3955 /// Returns selections for remote peers intersecting the given range.
3956 #[allow(clippy::type_complexity)]
3957 pub fn selections_in_range(
3958 &self,
3959 range: Range<Anchor>,
3960 include_local: bool,
3961 ) -> impl Iterator<
3962 Item = (
3963 ReplicaId,
3964 bool,
3965 CursorShape,
3966 impl Iterator<Item = &Selection<Anchor>> + '_,
3967 ),
3968 > + '_ {
3969 self.remote_selections
3970 .iter()
3971 .filter(move |(replica_id, set)| {
3972 (include_local || **replica_id != self.text.replica_id())
3973 && !set.selections.is_empty()
3974 })
3975 .map(move |(replica_id, set)| {
3976 let start_ix = match set.selections.binary_search_by(|probe| {
3977 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3978 }) {
3979 Ok(ix) | Err(ix) => ix,
3980 };
3981 let end_ix = match set.selections.binary_search_by(|probe| {
3982 probe.start.cmp(&range.end, self).then(Ordering::Less)
3983 }) {
3984 Ok(ix) | Err(ix) => ix,
3985 };
3986
3987 (
3988 *replica_id,
3989 set.line_mode,
3990 set.cursor_shape,
3991 set.selections[start_ix..end_ix].iter(),
3992 )
3993 })
3994 }
3995
3996 /// Returns if the buffer contains any diagnostics.
3997 pub fn has_diagnostics(&self) -> bool {
3998 !self.diagnostics.is_empty()
3999 }
4000
4001 /// Returns all the diagnostics intersecting the given range.
4002 pub fn diagnostics_in_range<'a, T, O>(
4003 &'a self,
4004 search_range: Range<T>,
4005 reversed: bool,
4006 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4007 where
4008 T: 'a + Clone + ToOffset,
4009 O: 'a + FromAnchor,
4010 {
4011 let mut iterators: Vec<_> = self
4012 .diagnostics
4013 .iter()
4014 .map(|(_, collection)| {
4015 collection
4016 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4017 .peekable()
4018 })
4019 .collect();
4020
4021 std::iter::from_fn(move || {
4022 let (next_ix, _) = iterators
4023 .iter_mut()
4024 .enumerate()
4025 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4026 .min_by(|(_, a), (_, b)| {
4027 let cmp = a
4028 .range
4029 .start
4030 .cmp(&b.range.start, self)
4031 // when range is equal, sort by diagnostic severity
4032 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4033 // and stabilize order with group_id
4034 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4035 if reversed {
4036 cmp.reverse()
4037 } else {
4038 cmp
4039 }
4040 })?;
4041 iterators[next_ix]
4042 .next()
4043 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4044 diagnostic,
4045 range: FromAnchor::from_anchor(&range.start, self)
4046 ..FromAnchor::from_anchor(&range.end, self),
4047 })
4048 })
4049 }
4050
4051 /// Returns all the diagnostic groups associated with the given
4052 /// language server ID. If no language server ID is provided,
4053 /// all diagnostics groups are returned.
4054 pub fn diagnostic_groups(
4055 &self,
4056 language_server_id: Option<LanguageServerId>,
4057 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4058 let mut groups = Vec::new();
4059
4060 if let Some(language_server_id) = language_server_id {
4061 if let Ok(ix) = self
4062 .diagnostics
4063 .binary_search_by_key(&language_server_id, |e| e.0)
4064 {
4065 self.diagnostics[ix]
4066 .1
4067 .groups(language_server_id, &mut groups, self);
4068 }
4069 } else {
4070 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4071 diagnostics.groups(*language_server_id, &mut groups, self);
4072 }
4073 }
4074
4075 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4076 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4077 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4078 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4079 });
4080
4081 groups
4082 }
4083
4084 /// Returns an iterator over the diagnostics for the given group.
4085 pub fn diagnostic_group<O>(
4086 &self,
4087 group_id: usize,
4088 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4089 where
4090 O: FromAnchor + 'static,
4091 {
4092 self.diagnostics
4093 .iter()
4094 .flat_map(move |(_, set)| set.group(group_id, self))
4095 }
4096
4097 /// An integer version number that accounts for all updates besides
4098 /// the buffer's text itself (which is versioned via a version vector).
4099 pub fn non_text_state_update_count(&self) -> usize {
4100 self.non_text_state_update_count
4101 }
4102
4103 /// Returns a snapshot of underlying file.
4104 pub fn file(&self) -> Option<&Arc<dyn File>> {
4105 self.file.as_ref()
4106 }
4107
4108 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4109 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4110 if let Some(file) = self.file() {
4111 if file.path().file_name().is_none() || include_root {
4112 Some(file.full_path(cx))
4113 } else {
4114 Some(file.path().to_path_buf())
4115 }
4116 } else {
4117 None
4118 }
4119 }
4120}
4121
4122fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4123 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4124}
4125
4126fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4127 let mut result = IndentSize::spaces(0);
4128 for c in text {
4129 let kind = match c {
4130 ' ' => IndentKind::Space,
4131 '\t' => IndentKind::Tab,
4132 _ => break,
4133 };
4134 if result.len == 0 {
4135 result.kind = kind;
4136 }
4137 result.len += 1;
4138 }
4139 result
4140}
4141
4142impl Clone for BufferSnapshot {
4143 fn clone(&self) -> Self {
4144 Self {
4145 text: self.text.clone(),
4146 syntax: self.syntax.clone(),
4147 file: self.file.clone(),
4148 remote_selections: self.remote_selections.clone(),
4149 diagnostics: self.diagnostics.clone(),
4150 language: self.language.clone(),
4151 non_text_state_update_count: self.non_text_state_update_count,
4152 }
4153 }
4154}
4155
4156impl Deref for BufferSnapshot {
4157 type Target = text::BufferSnapshot;
4158
4159 fn deref(&self) -> &Self::Target {
4160 &self.text
4161 }
4162}
4163
4164unsafe impl<'a> Send for BufferChunks<'a> {}
4165
4166impl<'a> BufferChunks<'a> {
4167 pub(crate) fn new(
4168 text: &'a Rope,
4169 range: Range<usize>,
4170 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4171 diagnostics: bool,
4172 buffer_snapshot: Option<&'a BufferSnapshot>,
4173 ) -> Self {
4174 let mut highlights = None;
4175 if let Some((captures, highlight_maps)) = syntax {
4176 highlights = Some(BufferChunkHighlights {
4177 captures,
4178 next_capture: None,
4179 stack: Default::default(),
4180 highlight_maps,
4181 })
4182 }
4183
4184 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4185 let chunks = text.chunks_in_range(range.clone());
4186
4187 let mut this = BufferChunks {
4188 range,
4189 buffer_snapshot,
4190 chunks,
4191 diagnostic_endpoints,
4192 error_depth: 0,
4193 warning_depth: 0,
4194 information_depth: 0,
4195 hint_depth: 0,
4196 unnecessary_depth: 0,
4197 highlights,
4198 };
4199 this.initialize_diagnostic_endpoints();
4200 this
4201 }
4202
4203 /// Seeks to the given byte offset in the buffer.
4204 pub fn seek(&mut self, range: Range<usize>) {
4205 let old_range = std::mem::replace(&mut self.range, range.clone());
4206 self.chunks.set_range(self.range.clone());
4207 if let Some(highlights) = self.highlights.as_mut() {
4208 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4209 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4210 highlights
4211 .stack
4212 .retain(|(end_offset, _)| *end_offset > range.start);
4213 if let Some(capture) = &highlights.next_capture {
4214 if range.start >= capture.node.start_byte() {
4215 let next_capture_end = capture.node.end_byte();
4216 if range.start < next_capture_end {
4217 highlights.stack.push((
4218 next_capture_end,
4219 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4220 ));
4221 }
4222 highlights.next_capture.take();
4223 }
4224 }
4225 } else if let Some(snapshot) = self.buffer_snapshot {
4226 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4227 *highlights = BufferChunkHighlights {
4228 captures,
4229 next_capture: None,
4230 stack: Default::default(),
4231 highlight_maps,
4232 };
4233 } else {
4234 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4235 // Seeking such BufferChunks is not supported.
4236 debug_assert!(false, "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot");
4237 }
4238
4239 highlights.captures.set_byte_range(self.range.clone());
4240 self.initialize_diagnostic_endpoints();
4241 }
4242 }
4243
4244 fn initialize_diagnostic_endpoints(&mut self) {
4245 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4246 if let Some(buffer) = self.buffer_snapshot {
4247 let mut diagnostic_endpoints = Vec::new();
4248 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4249 diagnostic_endpoints.push(DiagnosticEndpoint {
4250 offset: entry.range.start,
4251 is_start: true,
4252 severity: entry.diagnostic.severity,
4253 is_unnecessary: entry.diagnostic.is_unnecessary,
4254 });
4255 diagnostic_endpoints.push(DiagnosticEndpoint {
4256 offset: entry.range.end,
4257 is_start: false,
4258 severity: entry.diagnostic.severity,
4259 is_unnecessary: entry.diagnostic.is_unnecessary,
4260 });
4261 }
4262 diagnostic_endpoints
4263 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4264 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4265 self.hint_depth = 0;
4266 self.error_depth = 0;
4267 self.warning_depth = 0;
4268 self.information_depth = 0;
4269 }
4270 }
4271 }
4272
4273 /// The current byte offset in the buffer.
4274 pub fn offset(&self) -> usize {
4275 self.range.start
4276 }
4277
4278 pub fn range(&self) -> Range<usize> {
4279 self.range.clone()
4280 }
4281
4282 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4283 let depth = match endpoint.severity {
4284 DiagnosticSeverity::ERROR => &mut self.error_depth,
4285 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4286 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4287 DiagnosticSeverity::HINT => &mut self.hint_depth,
4288 _ => return,
4289 };
4290 if endpoint.is_start {
4291 *depth += 1;
4292 } else {
4293 *depth -= 1;
4294 }
4295
4296 if endpoint.is_unnecessary {
4297 if endpoint.is_start {
4298 self.unnecessary_depth += 1;
4299 } else {
4300 self.unnecessary_depth -= 1;
4301 }
4302 }
4303 }
4304
4305 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4306 if self.error_depth > 0 {
4307 Some(DiagnosticSeverity::ERROR)
4308 } else if self.warning_depth > 0 {
4309 Some(DiagnosticSeverity::WARNING)
4310 } else if self.information_depth > 0 {
4311 Some(DiagnosticSeverity::INFORMATION)
4312 } else if self.hint_depth > 0 {
4313 Some(DiagnosticSeverity::HINT)
4314 } else {
4315 None
4316 }
4317 }
4318
4319 fn current_code_is_unnecessary(&self) -> bool {
4320 self.unnecessary_depth > 0
4321 }
4322}
4323
4324impl<'a> Iterator for BufferChunks<'a> {
4325 type Item = Chunk<'a>;
4326
4327 fn next(&mut self) -> Option<Self::Item> {
4328 let mut next_capture_start = usize::MAX;
4329 let mut next_diagnostic_endpoint = usize::MAX;
4330
4331 if let Some(highlights) = self.highlights.as_mut() {
4332 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4333 if *parent_capture_end <= self.range.start {
4334 highlights.stack.pop();
4335 } else {
4336 break;
4337 }
4338 }
4339
4340 if highlights.next_capture.is_none() {
4341 highlights.next_capture = highlights.captures.next();
4342 }
4343
4344 while let Some(capture) = highlights.next_capture.as_ref() {
4345 if self.range.start < capture.node.start_byte() {
4346 next_capture_start = capture.node.start_byte();
4347 break;
4348 } else {
4349 let highlight_id =
4350 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4351 highlights
4352 .stack
4353 .push((capture.node.end_byte(), highlight_id));
4354 highlights.next_capture = highlights.captures.next();
4355 }
4356 }
4357 }
4358
4359 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4360 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4361 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4362 if endpoint.offset <= self.range.start {
4363 self.update_diagnostic_depths(endpoint);
4364 diagnostic_endpoints.next();
4365 } else {
4366 next_diagnostic_endpoint = endpoint.offset;
4367 break;
4368 }
4369 }
4370 }
4371 self.diagnostic_endpoints = diagnostic_endpoints;
4372
4373 if let Some(chunk) = self.chunks.peek() {
4374 let chunk_start = self.range.start;
4375 let mut chunk_end = (self.chunks.offset() + chunk.len())
4376 .min(next_capture_start)
4377 .min(next_diagnostic_endpoint);
4378 let mut highlight_id = None;
4379 if let Some(highlights) = self.highlights.as_ref() {
4380 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4381 chunk_end = chunk_end.min(*parent_capture_end);
4382 highlight_id = Some(*parent_highlight_id);
4383 }
4384 }
4385
4386 let slice =
4387 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4388 self.range.start = chunk_end;
4389 if self.range.start == self.chunks.offset() + chunk.len() {
4390 self.chunks.next().unwrap();
4391 }
4392
4393 Some(Chunk {
4394 text: slice,
4395 syntax_highlight_id: highlight_id,
4396 diagnostic_severity: self.current_diagnostic_severity(),
4397 is_unnecessary: self.current_code_is_unnecessary(),
4398 ..Default::default()
4399 })
4400 } else {
4401 None
4402 }
4403 }
4404}
4405
4406impl operation_queue::Operation for Operation {
4407 fn lamport_timestamp(&self) -> clock::Lamport {
4408 match self {
4409 Operation::Buffer(_) => {
4410 unreachable!("buffer operations should never be deferred at this layer")
4411 }
4412 Operation::UpdateDiagnostics {
4413 lamport_timestamp, ..
4414 }
4415 | Operation::UpdateSelections {
4416 lamport_timestamp, ..
4417 }
4418 | Operation::UpdateCompletionTriggers {
4419 lamport_timestamp, ..
4420 } => *lamport_timestamp,
4421 }
4422 }
4423}
4424
4425impl Default for Diagnostic {
4426 fn default() -> Self {
4427 Self {
4428 source: Default::default(),
4429 code: None,
4430 severity: DiagnosticSeverity::ERROR,
4431 message: Default::default(),
4432 group_id: 0,
4433 is_primary: false,
4434 is_disk_based: false,
4435 is_unnecessary: false,
4436 data: None,
4437 }
4438 }
4439}
4440
4441impl IndentSize {
4442 /// Returns an [`IndentSize`] representing the given spaces.
4443 pub fn spaces(len: u32) -> Self {
4444 Self {
4445 len,
4446 kind: IndentKind::Space,
4447 }
4448 }
4449
4450 /// Returns an [`IndentSize`] representing a tab.
4451 pub fn tab() -> Self {
4452 Self {
4453 len: 1,
4454 kind: IndentKind::Tab,
4455 }
4456 }
4457
4458 /// An iterator over the characters represented by this [`IndentSize`].
4459 pub fn chars(&self) -> impl Iterator<Item = char> {
4460 iter::repeat(self.char()).take(self.len as usize)
4461 }
4462
4463 /// The character representation of this [`IndentSize`].
4464 pub fn char(&self) -> char {
4465 match self.kind {
4466 IndentKind::Space => ' ',
4467 IndentKind::Tab => '\t',
4468 }
4469 }
4470
4471 /// Consumes the current [`IndentSize`] and returns a new one that has
4472 /// been shrunk or enlarged by the given size along the given direction.
4473 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4474 match direction {
4475 Ordering::Less => {
4476 if self.kind == size.kind && self.len >= size.len {
4477 self.len -= size.len;
4478 }
4479 }
4480 Ordering::Equal => {}
4481 Ordering::Greater => {
4482 if self.len == 0 {
4483 self = size;
4484 } else if self.kind == size.kind {
4485 self.len += size.len;
4486 }
4487 }
4488 }
4489 self
4490 }
4491
4492 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4493 match self.kind {
4494 IndentKind::Space => self.len as usize,
4495 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4496 }
4497 }
4498}
4499
4500#[cfg(any(test, feature = "test-support"))]
4501pub struct TestFile {
4502 pub path: Arc<Path>,
4503 pub root_name: String,
4504}
4505
4506#[cfg(any(test, feature = "test-support"))]
4507impl File for TestFile {
4508 fn path(&self) -> &Arc<Path> {
4509 &self.path
4510 }
4511
4512 fn full_path(&self, _: &gpui::App) -> PathBuf {
4513 PathBuf::from(&self.root_name).join(self.path.as_ref())
4514 }
4515
4516 fn as_local(&self) -> Option<&dyn LocalFile> {
4517 None
4518 }
4519
4520 fn disk_state(&self) -> DiskState {
4521 unimplemented!()
4522 }
4523
4524 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4525 self.path().file_name().unwrap_or(self.root_name.as_ref())
4526 }
4527
4528 fn worktree_id(&self, _: &App) -> WorktreeId {
4529 WorktreeId::from_usize(0)
4530 }
4531
4532 fn as_any(&self) -> &dyn std::any::Any {
4533 unimplemented!()
4534 }
4535
4536 fn to_proto(&self, _: &App) -> rpc::proto::File {
4537 unimplemented!()
4538 }
4539
4540 fn is_private(&self) -> bool {
4541 false
4542 }
4543}
4544
4545pub(crate) fn contiguous_ranges(
4546 values: impl Iterator<Item = u32>,
4547 max_len: usize,
4548) -> impl Iterator<Item = Range<u32>> {
4549 let mut values = values;
4550 let mut current_range: Option<Range<u32>> = None;
4551 std::iter::from_fn(move || loop {
4552 if let Some(value) = values.next() {
4553 if let Some(range) = &mut current_range {
4554 if value == range.end && range.len() < max_len {
4555 range.end += 1;
4556 continue;
4557 }
4558 }
4559
4560 let prev_range = current_range.clone();
4561 current_range = Some(value..(value + 1));
4562 if prev_range.is_some() {
4563 return prev_range;
4564 }
4565 } else {
4566 return current_range.take();
4567 }
4568 })
4569}
4570
4571#[derive(Default, Debug)]
4572pub struct CharClassifier {
4573 scope: Option<LanguageScope>,
4574 for_completion: bool,
4575 ignore_punctuation: bool,
4576}
4577
4578impl CharClassifier {
4579 pub fn new(scope: Option<LanguageScope>) -> Self {
4580 Self {
4581 scope,
4582 for_completion: false,
4583 ignore_punctuation: false,
4584 }
4585 }
4586
4587 pub fn for_completion(self, for_completion: bool) -> Self {
4588 Self {
4589 for_completion,
4590 ..self
4591 }
4592 }
4593
4594 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4595 Self {
4596 ignore_punctuation,
4597 ..self
4598 }
4599 }
4600
4601 pub fn is_whitespace(&self, c: char) -> bool {
4602 self.kind(c) == CharKind::Whitespace
4603 }
4604
4605 pub fn is_word(&self, c: char) -> bool {
4606 self.kind(c) == CharKind::Word
4607 }
4608
4609 pub fn is_punctuation(&self, c: char) -> bool {
4610 self.kind(c) == CharKind::Punctuation
4611 }
4612
4613 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4614 if c.is_whitespace() {
4615 return CharKind::Whitespace;
4616 } else if c.is_alphanumeric() || c == '_' {
4617 return CharKind::Word;
4618 }
4619
4620 if let Some(scope) = &self.scope {
4621 if let Some(characters) = scope.word_characters() {
4622 if characters.contains(&c) {
4623 if c == '-' && !self.for_completion && !ignore_punctuation {
4624 return CharKind::Punctuation;
4625 }
4626 return CharKind::Word;
4627 }
4628 }
4629 }
4630
4631 if ignore_punctuation {
4632 CharKind::Word
4633 } else {
4634 CharKind::Punctuation
4635 }
4636 }
4637
4638 pub fn kind(&self, c: char) -> CharKind {
4639 self.kind_with(c, self.ignore_punctuation)
4640 }
4641}
4642
4643/// Find all of the ranges of whitespace that occur at the ends of lines
4644/// in the given rope.
4645///
4646/// This could also be done with a regex search, but this implementation
4647/// avoids copying text.
4648pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4649 let mut ranges = Vec::new();
4650
4651 let mut offset = 0;
4652 let mut prev_chunk_trailing_whitespace_range = 0..0;
4653 for chunk in rope.chunks() {
4654 let mut prev_line_trailing_whitespace_range = 0..0;
4655 for (i, line) in chunk.split('\n').enumerate() {
4656 let line_end_offset = offset + line.len();
4657 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4658 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4659
4660 if i == 0 && trimmed_line_len == 0 {
4661 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4662 }
4663 if !prev_line_trailing_whitespace_range.is_empty() {
4664 ranges.push(prev_line_trailing_whitespace_range);
4665 }
4666
4667 offset = line_end_offset + 1;
4668 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4669 }
4670
4671 offset -= 1;
4672 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4673 }
4674
4675 if !prev_chunk_trailing_whitespace_range.is_empty() {
4676 ranges.push(prev_chunk_trailing_whitespace_range);
4677 }
4678
4679 ranges
4680}