1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
14 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
18 TreeSitterOptions,
19};
20use anyhow::{anyhow, Context as _, Result};
21use async_watch as watch;
22use clock::Lamport;
23pub use clock::ReplicaId;
24use collections::HashMap;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 AnyElement, App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, Pixels,
29 SharedString, Task, TaskLabel, Window,
30};
31use lsp::LanguageServerId;
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use similar::{ChangeTag, TextDiff};
38use smallvec::SmallVec;
39use smol::future::yield_now;
40use std::{
41 any::Any,
42 borrow::Cow,
43 cell::Cell,
44 cmp::{self, Ordering, Reverse},
45 collections::{BTreeMap, BTreeSet},
46 ffi::OsStr,
47 fmt,
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, DerefMut, Range},
53 path::{Path, PathBuf},
54 str,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
65 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
66 Transaction, TransactionId, Unclipped,
67};
68use theme::{ActiveTheme as _, SyntaxTheme};
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{debug_panic, maybe, RangeExt};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 pending_autoindent: Option<Task<()>>,
113 sync_parse_timeout: Duration,
114 syntax_map: Mutex<SyntaxMap>,
115 parsing_in_background: bool,
116 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
117 non_text_state_update_count: usize,
118 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
119 remote_selections: TreeMap<ReplicaId, SelectionSet>,
120 diagnostics_timestamp: clock::Lamport,
121 completion_triggers: BTreeSet<String>,
122 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
123 completion_triggers_timestamp: clock::Lamport,
124 deferred_ops: OperationQueue<Operation>,
125 capability: Capability,
126 has_conflict: bool,
127 /// Memoize calls to has_changes_since(saved_version).
128 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
129 has_unsaved_edits: Cell<(clock::Global, bool)>,
130 _subscriptions: Vec<gpui::Subscription>,
131}
132
133#[derive(Copy, Clone, Debug, PartialEq, Eq)]
134pub enum ParseStatus {
135 Idle,
136 Parsing,
137}
138
139struct BufferBranchState {
140 base_buffer: Entity<Buffer>,
141 merged_operations: Vec<Lamport>,
142}
143
144/// An immutable, cheaply cloneable representation of a fixed
145/// state of a buffer.
146pub struct BufferSnapshot {
147 pub text: text::BufferSnapshot,
148 pub(crate) syntax: SyntaxSnapshot,
149 file: Option<Arc<dyn File>>,
150 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
151 remote_selections: TreeMap<ReplicaId, SelectionSet>,
152 language: Option<Arc<Language>>,
153 non_text_state_update_count: usize,
154}
155
156/// The kind and amount of indentation in a particular line. For now,
157/// assumes that indentation is all the same character.
158#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
159pub struct IndentSize {
160 /// The number of bytes that comprise the indentation.
161 pub len: u32,
162 /// The kind of whitespace used for indentation.
163 pub kind: IndentKind,
164}
165
166/// A whitespace character that's used for indentation.
167#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
168pub enum IndentKind {
169 /// An ASCII space character.
170 #[default]
171 Space,
172 /// An ASCII tab character.
173 Tab,
174}
175
176/// The shape of a selection cursor.
177#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
178#[serde(rename_all = "snake_case")]
179pub enum CursorShape {
180 /// A vertical bar
181 #[default]
182 Bar,
183 /// A block that surrounds the following character
184 Block,
185 /// An underline that runs along the following character
186 Underline,
187 /// A box drawn around the following character
188 Hollow,
189}
190
191#[derive(Clone, Debug)]
192struct SelectionSet {
193 line_mode: bool,
194 cursor_shape: CursorShape,
195 selections: Arc<[Selection<Anchor>]>,
196 lamport_timestamp: clock::Lamport,
197}
198
199/// A diagnostic associated with a certain range of a buffer.
200#[derive(Clone, Debug, PartialEq, Eq)]
201pub struct Diagnostic {
202 /// The name of the service that produced this diagnostic.
203 pub source: Option<String>,
204 /// A machine-readable code that identifies this diagnostic.
205 pub code: Option<String>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// An id that identifies the group to which this diagnostic belongs.
211 ///
212 /// When a language server produces a diagnostic with
213 /// one or more associated diagnostics, those diagnostics are all
214 /// assigned a single group ID.
215 pub group_id: usize,
216 /// Whether this diagnostic is the primary diagnostic for its group.
217 ///
218 /// In a given group, the primary diagnostic is the top-level diagnostic
219 /// returned by the language server. The non-primary diagnostics are the
220 /// associated diagnostics.
221 pub is_primary: bool,
222 /// Whether this diagnostic is considered to originate from an analysis of
223 /// files on disk, as opposed to any unsaved buffer contents. This is a
224 /// property of a given diagnostic source, and is configured for a given
225 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
226 /// for the language server.
227 pub is_disk_based: bool,
228 /// Whether this diagnostic marks unnecessary code.
229 pub is_unnecessary: bool,
230 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
231 pub data: Option<Value>,
232}
233
234/// TODO - move this into the `project` crate and make it private.
235pub async fn prepare_completion_documentation(
236 documentation: &lsp::Documentation,
237 language_registry: &Arc<LanguageRegistry>,
238 language: Option<Arc<Language>>,
239) -> Documentation {
240 match documentation {
241 lsp::Documentation::String(text) => {
242 if text.lines().count() <= 1 {
243 Documentation::SingleLine(text.clone())
244 } else {
245 Documentation::MultiLinePlainText(text.clone())
246 }
247 }
248
249 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
250 lsp::MarkupKind::PlainText => {
251 if value.lines().count() <= 1 {
252 Documentation::SingleLine(value.clone())
253 } else {
254 Documentation::MultiLinePlainText(value.clone())
255 }
256 }
257
258 lsp::MarkupKind::Markdown => {
259 let parsed = parse_markdown(value, Some(language_registry), language).await;
260 Documentation::MultiLineMarkdown(parsed)
261 }
262 },
263 }
264}
265
266/// Documentation associated with a [`Completion`].
267#[derive(Clone, Debug)]
268pub enum Documentation {
269 /// There is no documentation for this completion.
270 Undocumented,
271 /// A single line of documentation.
272 SingleLine(String),
273 /// Multiple lines of plain text documentation.
274 MultiLinePlainText(String),
275 /// Markdown documentation.
276 MultiLineMarkdown(ParsedMarkdown),
277}
278
279/// An operation used to synchronize this buffer with its other replicas.
280#[derive(Clone, Debug, PartialEq)]
281pub enum Operation {
282 /// A text operation.
283 Buffer(text::Operation),
284
285 /// An update to the buffer's diagnostics.
286 UpdateDiagnostics {
287 /// The id of the language server that produced the new diagnostics.
288 server_id: LanguageServerId,
289 /// The diagnostics.
290 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
291 /// The buffer's lamport timestamp.
292 lamport_timestamp: clock::Lamport,
293 },
294
295 /// An update to the most recent selections in this buffer.
296 UpdateSelections {
297 /// The selections.
298 selections: Arc<[Selection<Anchor>]>,
299 /// The buffer's lamport timestamp.
300 lamport_timestamp: clock::Lamport,
301 /// Whether the selections are in 'line mode'.
302 line_mode: bool,
303 /// The [`CursorShape`] associated with these selections.
304 cursor_shape: CursorShape,
305 },
306
307 /// An update to the characters that should trigger autocompletion
308 /// for this buffer.
309 UpdateCompletionTriggers {
310 /// The characters that trigger autocompletion.
311 triggers: Vec<String>,
312 /// The buffer's lamport timestamp.
313 lamport_timestamp: clock::Lamport,
314 /// The language server ID.
315 server_id: LanguageServerId,
316 },
317}
318
319/// An event that occurs in a buffer.
320#[derive(Clone, Debug, PartialEq)]
321pub enum BufferEvent {
322 /// The buffer was changed in a way that must be
323 /// propagated to its other replicas.
324 Operation {
325 operation: Operation,
326 is_local: bool,
327 },
328 /// The buffer was edited.
329 Edited,
330 /// The buffer's `dirty` bit changed.
331 DirtyChanged,
332 /// The buffer was saved.
333 Saved,
334 /// The buffer's file was changed on disk.
335 FileHandleChanged,
336 /// The buffer was reloaded.
337 Reloaded,
338 /// The buffer is in need of a reload
339 ReloadNeeded,
340 /// The buffer's language was changed.
341 LanguageChanged,
342 /// The buffer's syntax trees were updated.
343 Reparsed,
344 /// The buffer's diagnostics were updated.
345 DiagnosticsUpdated,
346 /// The buffer gained or lost editing capabilities.
347 CapabilityChanged,
348 /// The buffer was explicitly requested to close.
349 Closed,
350 /// The buffer was discarded when closing.
351 Discarded,
352}
353
354/// The file associated with a buffer.
355pub trait File: Send + Sync {
356 /// Returns the [`LocalFile`] associated with this file, if the
357 /// file is local.
358 fn as_local(&self) -> Option<&dyn LocalFile>;
359
360 /// Returns whether this file is local.
361 fn is_local(&self) -> bool {
362 self.as_local().is_some()
363 }
364
365 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
366 /// only available in some states, such as modification time.
367 fn disk_state(&self) -> DiskState;
368
369 /// Returns the path of this file relative to the worktree's root directory.
370 fn path(&self) -> &Arc<Path>;
371
372 /// Returns the path of this file relative to the worktree's parent directory (this means it
373 /// includes the name of the worktree's root folder).
374 fn full_path(&self, cx: &App) -> PathBuf;
375
376 /// Returns the last component of this handle's absolute path. If this handle refers to the root
377 /// of its worktree, then this method will return the name of the worktree itself.
378 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
379
380 /// Returns the id of the worktree to which this file belongs.
381 ///
382 /// This is needed for looking up project-specific settings.
383 fn worktree_id(&self, cx: &App) -> WorktreeId;
384
385 /// Converts this file into an [`Any`] trait object.
386 fn as_any(&self) -> &dyn Any;
387
388 /// Converts this file into a protobuf message.
389 fn to_proto(&self, cx: &App) -> rpc::proto::File;
390
391 /// Return whether Zed considers this to be a private file.
392 fn is_private(&self) -> bool;
393}
394
395/// The file's storage status - whether it's stored (`Present`), and if so when it was last
396/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
397/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
398/// indicator for new files.
399#[derive(Copy, Clone, Debug, PartialEq)]
400pub enum DiskState {
401 /// File created in Zed that has not been saved.
402 New,
403 /// File present on the filesystem.
404 Present { mtime: MTime },
405 /// Deleted file that was previously present.
406 Deleted,
407}
408
409impl DiskState {
410 /// Returns the file's last known modification time on disk.
411 pub fn mtime(self) -> Option<MTime> {
412 match self {
413 DiskState::New => None,
414 DiskState::Present { mtime } => Some(mtime),
415 DiskState::Deleted => None,
416 }
417 }
418}
419
420/// The file associated with a buffer, in the case where the file is on the local disk.
421pub trait LocalFile: File {
422 /// Returns the absolute path of this file
423 fn abs_path(&self, cx: &App) -> PathBuf;
424
425 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
426 fn load(&self, cx: &App) -> Task<Result<String>>;
427
428 /// Loads the file's contents from disk.
429 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
430}
431
432/// The auto-indent behavior associated with an editing operation.
433/// For some editing operations, each affected line of text has its
434/// indentation recomputed. For other operations, the entire block
435/// of edited text is adjusted uniformly.
436#[derive(Clone, Debug)]
437pub enum AutoindentMode {
438 /// Indent each line of inserted text.
439 EachLine,
440 /// Apply the same indentation adjustment to all of the lines
441 /// in a given insertion.
442 Block {
443 /// The original indentation level of the first line of each
444 /// insertion, if it has been copied.
445 original_indent_columns: Vec<u32>,
446 },
447}
448
449#[derive(Clone)]
450struct AutoindentRequest {
451 before_edit: BufferSnapshot,
452 entries: Vec<AutoindentRequestEntry>,
453 is_block_mode: bool,
454 ignore_empty_lines: bool,
455}
456
457#[derive(Debug, Clone)]
458struct AutoindentRequestEntry {
459 /// A range of the buffer whose indentation should be adjusted.
460 range: Range<Anchor>,
461 /// Whether or not these lines should be considered brand new, for the
462 /// purpose of auto-indent. When text is not new, its indentation will
463 /// only be adjusted if the suggested indentation level has *changed*
464 /// since the edit was made.
465 first_line_is_new: bool,
466 indent_size: IndentSize,
467 original_indent_column: Option<u32>,
468}
469
470#[derive(Debug)]
471struct IndentSuggestion {
472 basis_row: u32,
473 delta: Ordering,
474 within_error: bool,
475}
476
477struct BufferChunkHighlights<'a> {
478 captures: SyntaxMapCaptures<'a>,
479 next_capture: Option<SyntaxMapCapture<'a>>,
480 stack: Vec<(usize, HighlightId)>,
481 highlight_maps: Vec<HighlightMap>,
482}
483
484/// An iterator that yields chunks of a buffer's text, along with their
485/// syntax highlights and diagnostic status.
486pub struct BufferChunks<'a> {
487 buffer_snapshot: Option<&'a BufferSnapshot>,
488 range: Range<usize>,
489 chunks: text::Chunks<'a>,
490 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
491 error_depth: usize,
492 warning_depth: usize,
493 information_depth: usize,
494 hint_depth: usize,
495 unnecessary_depth: usize,
496 highlights: Option<BufferChunkHighlights<'a>>,
497}
498
499/// A chunk of a buffer's text, along with its syntax highlight and
500/// diagnostic status.
501#[derive(Clone, Debug, Default)]
502pub struct Chunk<'a> {
503 /// The text of the chunk.
504 pub text: &'a str,
505 /// The syntax highlighting style of the chunk.
506 pub syntax_highlight_id: Option<HighlightId>,
507 /// The highlight style that has been applied to this chunk in
508 /// the editor.
509 pub highlight_style: Option<HighlightStyle>,
510 /// The severity of diagnostic associated with this chunk, if any.
511 pub diagnostic_severity: Option<DiagnosticSeverity>,
512 /// Whether this chunk of text is marked as unnecessary.
513 pub is_unnecessary: bool,
514 /// Whether this chunk of text was originally a tab character.
515 pub is_tab: bool,
516 /// An optional recipe for how the chunk should be presented.
517 pub renderer: Option<ChunkRenderer>,
518}
519
520/// A recipe for how the chunk should be presented.
521#[derive(Clone)]
522pub struct ChunkRenderer {
523 /// creates a custom element to represent this chunk.
524 pub render: Arc<dyn Send + Sync + Fn(&mut ChunkRendererContext) -> AnyElement>,
525 /// If true, the element is constrained to the shaped width of the text.
526 pub constrain_width: bool,
527}
528
529pub struct ChunkRendererContext<'a, 'b> {
530 pub window: &'a mut Window,
531 pub context: &'b mut App,
532 pub max_width: Pixels,
533}
534
535impl fmt::Debug for ChunkRenderer {
536 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
537 f.debug_struct("ChunkRenderer")
538 .field("constrain_width", &self.constrain_width)
539 .finish()
540 }
541}
542
543impl<'a, 'b> Deref for ChunkRendererContext<'a, 'b> {
544 type Target = App;
545
546 fn deref(&self) -> &Self::Target {
547 self.context
548 }
549}
550
551impl<'a, 'b> DerefMut for ChunkRendererContext<'a, 'b> {
552 fn deref_mut(&mut self) -> &mut Self::Target {
553 self.context
554 }
555}
556
557/// A set of edits to a given version of a buffer, computed asynchronously.
558#[derive(Debug)]
559pub struct Diff {
560 pub(crate) base_version: clock::Global,
561 line_ending: LineEnding,
562 pub edits: Vec<(Range<usize>, Arc<str>)>,
563}
564
565#[derive(Clone, Copy)]
566pub(crate) struct DiagnosticEndpoint {
567 offset: usize,
568 is_start: bool,
569 severity: DiagnosticSeverity,
570 is_unnecessary: bool,
571}
572
573/// A class of characters, used for characterizing a run of text.
574#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
575pub enum CharKind {
576 /// Whitespace.
577 Whitespace,
578 /// Punctuation.
579 Punctuation,
580 /// Word.
581 Word,
582}
583
584/// A runnable is a set of data about a region that could be resolved into a task
585pub struct Runnable {
586 pub tags: SmallVec<[RunnableTag; 1]>,
587 pub language: Arc<Language>,
588 pub buffer: BufferId,
589}
590
591#[derive(Clone)]
592pub struct EditPreview {
593 applied_edits_snapshot: text::BufferSnapshot,
594 syntax_snapshot: SyntaxSnapshot,
595}
596
597#[derive(Default, Clone, Debug)]
598pub struct HighlightedEdits {
599 pub text: SharedString,
600 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
601}
602
603impl EditPreview {
604 pub fn highlight_edits(
605 &self,
606 current_snapshot: &BufferSnapshot,
607 edits: &[(Range<Anchor>, String)],
608 include_deletions: bool,
609 cx: &App,
610 ) -> HighlightedEdits {
611 let mut text = String::new();
612 let mut highlights = Vec::new();
613 let Some(range) = self.compute_visible_range(edits, current_snapshot) else {
614 return HighlightedEdits::default();
615 };
616 let mut offset = range.start;
617 let mut delta = 0isize;
618
619 let status_colors = cx.theme().status();
620
621 for (range, edit_text) in edits {
622 let edit_range = range.to_offset(current_snapshot);
623 let new_edit_start = (edit_range.start as isize + delta) as usize;
624 let new_edit_range = new_edit_start..new_edit_start + edit_text.len();
625
626 let prev_range = offset..new_edit_start;
627
628 if !prev_range.is_empty() {
629 let start = text.len();
630 self.highlight_text(prev_range, &mut text, &mut highlights, None, cx);
631 offset += text.len() - start;
632 }
633
634 if include_deletions && !edit_range.is_empty() {
635 let start = text.len();
636 text.extend(current_snapshot.text_for_range(edit_range.clone()));
637 let end = text.len();
638
639 highlights.push((
640 start..end,
641 HighlightStyle {
642 background_color: Some(status_colors.deleted_background),
643 ..Default::default()
644 },
645 ));
646 }
647
648 if !edit_text.is_empty() {
649 self.highlight_text(
650 new_edit_range,
651 &mut text,
652 &mut highlights,
653 Some(HighlightStyle {
654 background_color: Some(status_colors.created_background),
655 ..Default::default()
656 }),
657 cx,
658 );
659
660 offset += edit_text.len();
661 }
662
663 delta += edit_text.len() as isize - edit_range.len() as isize;
664 }
665
666 self.highlight_text(
667 offset..(range.end as isize + delta) as usize,
668 &mut text,
669 &mut highlights,
670 None,
671 cx,
672 );
673
674 HighlightedEdits {
675 text: text.into(),
676 highlights,
677 }
678 }
679
680 fn highlight_text(
681 &self,
682 range: Range<usize>,
683 text: &mut String,
684 highlights: &mut Vec<(Range<usize>, HighlightStyle)>,
685 override_style: Option<HighlightStyle>,
686 cx: &App,
687 ) {
688 for chunk in self.highlighted_chunks(range) {
689 let start = text.len();
690 text.push_str(chunk.text);
691 let end = text.len();
692
693 if let Some(mut highlight_style) = chunk
694 .syntax_highlight_id
695 .and_then(|id| id.style(cx.theme().syntax()))
696 {
697 if let Some(override_style) = override_style {
698 highlight_style.highlight(override_style);
699 }
700 highlights.push((start..end, highlight_style));
701 } else if let Some(override_style) = override_style {
702 highlights.push((start..end, override_style));
703 }
704 }
705 }
706
707 fn highlighted_chunks(&self, range: Range<usize>) -> BufferChunks {
708 let captures =
709 self.syntax_snapshot
710 .captures(range.clone(), &self.applied_edits_snapshot, |grammar| {
711 grammar.highlights_query.as_ref()
712 });
713
714 let highlight_maps = captures
715 .grammars()
716 .iter()
717 .map(|grammar| grammar.highlight_map())
718 .collect();
719
720 BufferChunks::new(
721 self.applied_edits_snapshot.as_rope(),
722 range,
723 Some((captures, highlight_maps)),
724 false,
725 None,
726 )
727 }
728
729 fn compute_visible_range(
730 &self,
731 edits: &[(Range<Anchor>, String)],
732 snapshot: &BufferSnapshot,
733 ) -> Option<Range<usize>> {
734 let (first, _) = edits.first()?;
735 let (last, _) = edits.last()?;
736
737 let start = first.start.to_point(snapshot);
738 let end = last.end.to_point(snapshot);
739
740 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
741 let range = Point::new(start.row, 0)..Point::new(end.row, snapshot.line_len(end.row));
742
743 Some(range.to_offset(&snapshot))
744 }
745}
746
747impl Buffer {
748 /// Create a new buffer with the given base text.
749 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
750 Self::build(
751 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
752 None,
753 Capability::ReadWrite,
754 )
755 }
756
757 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
758 pub fn local_normalized(
759 base_text_normalized: Rope,
760 line_ending: LineEnding,
761 cx: &Context<Self>,
762 ) -> Self {
763 Self::build(
764 TextBuffer::new_normalized(
765 0,
766 cx.entity_id().as_non_zero_u64().into(),
767 line_ending,
768 base_text_normalized,
769 ),
770 None,
771 Capability::ReadWrite,
772 )
773 }
774
775 /// Create a new buffer that is a replica of a remote buffer.
776 pub fn remote(
777 remote_id: BufferId,
778 replica_id: ReplicaId,
779 capability: Capability,
780 base_text: impl Into<String>,
781 ) -> Self {
782 Self::build(
783 TextBuffer::new(replica_id, remote_id, base_text.into()),
784 None,
785 capability,
786 )
787 }
788
789 /// Create a new buffer that is a replica of a remote buffer, populating its
790 /// state from the given protobuf message.
791 pub fn from_proto(
792 replica_id: ReplicaId,
793 capability: Capability,
794 message: proto::BufferState,
795 file: Option<Arc<dyn File>>,
796 ) -> Result<Self> {
797 let buffer_id = BufferId::new(message.id)
798 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
799 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
800 let mut this = Self::build(buffer, file, capability);
801 this.text.set_line_ending(proto::deserialize_line_ending(
802 rpc::proto::LineEnding::from_i32(message.line_ending)
803 .ok_or_else(|| anyhow!("missing line_ending"))?,
804 ));
805 this.saved_version = proto::deserialize_version(&message.saved_version);
806 this.saved_mtime = message.saved_mtime.map(|time| time.into());
807 Ok(this)
808 }
809
810 /// Serialize the buffer's state to a protobuf message.
811 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
812 proto::BufferState {
813 id: self.remote_id().into(),
814 file: self.file.as_ref().map(|f| f.to_proto(cx)),
815 base_text: self.base_text().to_string(),
816 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
817 saved_version: proto::serialize_version(&self.saved_version),
818 saved_mtime: self.saved_mtime.map(|time| time.into()),
819 }
820 }
821
822 /// Serialize as protobufs all of the changes to the buffer since the given version.
823 pub fn serialize_ops(
824 &self,
825 since: Option<clock::Global>,
826 cx: &App,
827 ) -> Task<Vec<proto::Operation>> {
828 let mut operations = Vec::new();
829 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
830
831 operations.extend(self.remote_selections.iter().map(|(_, set)| {
832 proto::serialize_operation(&Operation::UpdateSelections {
833 selections: set.selections.clone(),
834 lamport_timestamp: set.lamport_timestamp,
835 line_mode: set.line_mode,
836 cursor_shape: set.cursor_shape,
837 })
838 }));
839
840 for (server_id, diagnostics) in &self.diagnostics {
841 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
842 lamport_timestamp: self.diagnostics_timestamp,
843 server_id: *server_id,
844 diagnostics: diagnostics.iter().cloned().collect(),
845 }));
846 }
847
848 for (server_id, completions) in &self.completion_triggers_per_language_server {
849 operations.push(proto::serialize_operation(
850 &Operation::UpdateCompletionTriggers {
851 triggers: completions.iter().cloned().collect(),
852 lamport_timestamp: self.completion_triggers_timestamp,
853 server_id: *server_id,
854 },
855 ));
856 }
857
858 let text_operations = self.text.operations().clone();
859 cx.background_executor().spawn(async move {
860 let since = since.unwrap_or_default();
861 operations.extend(
862 text_operations
863 .iter()
864 .filter(|(_, op)| !since.observed(op.timestamp()))
865 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
866 );
867 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
868 operations
869 })
870 }
871
872 /// Assign a language to the buffer, returning the buffer.
873 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
874 self.set_language(Some(language), cx);
875 self
876 }
877
878 /// Returns the [`Capability`] of this buffer.
879 pub fn capability(&self) -> Capability {
880 self.capability
881 }
882
883 /// Whether this buffer can only be read.
884 pub fn read_only(&self) -> bool {
885 self.capability == Capability::ReadOnly
886 }
887
888 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
889 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
890 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
891 let snapshot = buffer.snapshot();
892 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
893 Self {
894 saved_mtime,
895 saved_version: buffer.version(),
896 preview_version: buffer.version(),
897 reload_task: None,
898 transaction_depth: 0,
899 was_dirty_before_starting_transaction: None,
900 has_unsaved_edits: Cell::new((buffer.version(), false)),
901 text: buffer,
902 branch_state: None,
903 file,
904 capability,
905 syntax_map,
906 parsing_in_background: false,
907 non_text_state_update_count: 0,
908 sync_parse_timeout: Duration::from_millis(1),
909 parse_status: async_watch::channel(ParseStatus::Idle),
910 autoindent_requests: Default::default(),
911 pending_autoindent: Default::default(),
912 language: None,
913 remote_selections: Default::default(),
914 diagnostics: Default::default(),
915 diagnostics_timestamp: Default::default(),
916 completion_triggers: Default::default(),
917 completion_triggers_per_language_server: Default::default(),
918 completion_triggers_timestamp: Default::default(),
919 deferred_ops: OperationQueue::new(),
920 has_conflict: false,
921 _subscriptions: Vec::new(),
922 }
923 }
924
925 pub fn build_snapshot(
926 text: Rope,
927 language: Option<Arc<Language>>,
928 language_registry: Option<Arc<LanguageRegistry>>,
929 cx: &mut App,
930 ) -> impl Future<Output = BufferSnapshot> {
931 let entity_id = cx.reserve_entity::<Self>().entity_id();
932 let buffer_id = entity_id.as_non_zero_u64().into();
933 async move {
934 let text =
935 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
936 let mut syntax = SyntaxMap::new(&text).snapshot();
937 if let Some(language) = language.clone() {
938 let text = text.clone();
939 let language = language.clone();
940 let language_registry = language_registry.clone();
941 syntax.reparse(&text, language_registry, language);
942 }
943 BufferSnapshot {
944 text,
945 syntax,
946 file: None,
947 diagnostics: Default::default(),
948 remote_selections: Default::default(),
949 language,
950 non_text_state_update_count: 0,
951 }
952 }
953 }
954
955 /// Retrieve a snapshot of the buffer's current state. This is computationally
956 /// cheap, and allows reading from the buffer on a background thread.
957 pub fn snapshot(&self) -> BufferSnapshot {
958 let text = self.text.snapshot();
959 let mut syntax_map = self.syntax_map.lock();
960 syntax_map.interpolate(&text);
961 let syntax = syntax_map.snapshot();
962
963 BufferSnapshot {
964 text,
965 syntax,
966 file: self.file.clone(),
967 remote_selections: self.remote_selections.clone(),
968 diagnostics: self.diagnostics.clone(),
969 language: self.language.clone(),
970 non_text_state_update_count: self.non_text_state_update_count,
971 }
972 }
973
974 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
975 let this = cx.model();
976 cx.new(|cx| {
977 let mut branch = Self {
978 branch_state: Some(BufferBranchState {
979 base_buffer: this.clone(),
980 merged_operations: Default::default(),
981 }),
982 language: self.language.clone(),
983 has_conflict: self.has_conflict,
984 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
985 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
986 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
987 };
988 if let Some(language_registry) = self.language_registry() {
989 branch.set_language_registry(language_registry);
990 }
991
992 // Reparse the branch buffer so that we get syntax highlighting immediately.
993 branch.reparse(cx);
994
995 branch
996 })
997 }
998
999 pub fn preview_edits(
1000 &self,
1001 edits: Arc<[(Range<Anchor>, String)]>,
1002 cx: &App,
1003 ) -> Task<EditPreview> {
1004 let registry = self.language_registry();
1005 let language = self.language().cloned();
1006
1007 let mut branch_buffer = self.text.branch();
1008 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1009 cx.background_executor().spawn(async move {
1010 if !edits.is_empty() {
1011 branch_buffer.edit(edits.iter().cloned());
1012 let snapshot = branch_buffer.snapshot();
1013 syntax_snapshot.interpolate(&snapshot);
1014
1015 if let Some(language) = language {
1016 syntax_snapshot.reparse(&snapshot, registry, language);
1017 }
1018 }
1019 EditPreview {
1020 applied_edits_snapshot: branch_buffer.snapshot(),
1021 syntax_snapshot,
1022 }
1023 })
1024 }
1025
1026 /// Applies all of the changes in this buffer that intersect any of the
1027 /// given `ranges` to its base buffer.
1028 ///
1029 /// If `ranges` is empty, then all changes will be applied. This buffer must
1030 /// be a branch buffer to call this method.
1031 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1032 let Some(base_buffer) = self.base_buffer() else {
1033 debug_panic!("not a branch buffer");
1034 return;
1035 };
1036
1037 let mut ranges = if ranges.is_empty() {
1038 &[0..usize::MAX]
1039 } else {
1040 ranges.as_slice()
1041 }
1042 .into_iter()
1043 .peekable();
1044
1045 let mut edits = Vec::new();
1046 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1047 let mut is_included = false;
1048 while let Some(range) = ranges.peek() {
1049 if range.end < edit.new.start {
1050 ranges.next().unwrap();
1051 } else {
1052 if range.start <= edit.new.end {
1053 is_included = true;
1054 }
1055 break;
1056 }
1057 }
1058
1059 if is_included {
1060 edits.push((
1061 edit.old.clone(),
1062 self.text_for_range(edit.new.clone()).collect::<String>(),
1063 ));
1064 }
1065 }
1066
1067 let operation = base_buffer.update(cx, |base_buffer, cx| {
1068 // cx.emit(BufferEvent::DiffBaseChanged);
1069 base_buffer.edit(edits, None, cx)
1070 });
1071
1072 if let Some(operation) = operation {
1073 if let Some(BufferBranchState {
1074 merged_operations, ..
1075 }) = &mut self.branch_state
1076 {
1077 merged_operations.push(operation);
1078 }
1079 }
1080 }
1081
1082 fn on_base_buffer_event(
1083 &mut self,
1084 _: Entity<Buffer>,
1085 event: &BufferEvent,
1086 cx: &mut Context<Self>,
1087 ) {
1088 let BufferEvent::Operation { operation, .. } = event else {
1089 return;
1090 };
1091 let Some(BufferBranchState {
1092 merged_operations, ..
1093 }) = &mut self.branch_state
1094 else {
1095 return;
1096 };
1097
1098 let mut operation_to_undo = None;
1099 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1100 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1101 merged_operations.remove(ix);
1102 operation_to_undo = Some(operation.timestamp);
1103 }
1104 }
1105
1106 self.apply_ops([operation.clone()], cx);
1107
1108 if let Some(timestamp) = operation_to_undo {
1109 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1110 self.undo_operations(counts, cx);
1111 }
1112 }
1113
1114 #[cfg(test)]
1115 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1116 &self.text
1117 }
1118
1119 /// Retrieve a snapshot of the buffer's raw text, without any
1120 /// language-related state like the syntax tree or diagnostics.
1121 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1122 self.text.snapshot()
1123 }
1124
1125 /// The file associated with the buffer, if any.
1126 pub fn file(&self) -> Option<&Arc<dyn File>> {
1127 self.file.as_ref()
1128 }
1129
1130 /// The version of the buffer that was last saved or reloaded from disk.
1131 pub fn saved_version(&self) -> &clock::Global {
1132 &self.saved_version
1133 }
1134
1135 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1136 pub fn saved_mtime(&self) -> Option<MTime> {
1137 self.saved_mtime
1138 }
1139
1140 /// Assign a language to the buffer.
1141 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1142 self.non_text_state_update_count += 1;
1143 self.syntax_map.lock().clear(&self.text);
1144 self.language = language;
1145 self.reparse(cx);
1146 cx.emit(BufferEvent::LanguageChanged);
1147 }
1148
1149 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1150 /// other languages if parts of the buffer are written in different languages.
1151 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1152 self.syntax_map
1153 .lock()
1154 .set_language_registry(language_registry);
1155 }
1156
1157 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1158 self.syntax_map.lock().language_registry()
1159 }
1160
1161 /// Assign the buffer a new [`Capability`].
1162 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1163 self.capability = capability;
1164 cx.emit(BufferEvent::CapabilityChanged)
1165 }
1166
1167 /// This method is called to signal that the buffer has been saved.
1168 pub fn did_save(
1169 &mut self,
1170 version: clock::Global,
1171 mtime: Option<MTime>,
1172 cx: &mut Context<Self>,
1173 ) {
1174 self.saved_version = version;
1175 self.has_unsaved_edits
1176 .set((self.saved_version().clone(), false));
1177 self.has_conflict = false;
1178 self.saved_mtime = mtime;
1179 cx.emit(BufferEvent::Saved);
1180 cx.notify();
1181 }
1182
1183 /// This method is called to signal that the buffer has been discarded.
1184 pub fn discarded(&self, cx: &mut Context<Self>) {
1185 cx.emit(BufferEvent::Discarded);
1186 cx.notify();
1187 }
1188
1189 /// Reloads the contents of the buffer from disk.
1190 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1191 let (tx, rx) = futures::channel::oneshot::channel();
1192 let prev_version = self.text.version();
1193 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
1194 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
1195 let file = this.file.as_ref()?.as_local()?;
1196 Some((file.disk_state().mtime(), file.load(cx)))
1197 })?
1198 else {
1199 return Ok(());
1200 };
1201
1202 let new_text = new_text.await?;
1203 let diff = this
1204 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
1205 .await;
1206 this.update(&mut cx, |this, cx| {
1207 if this.version() == diff.base_version {
1208 this.finalize_last_transaction();
1209 this.apply_diff(diff, cx);
1210 tx.send(this.finalize_last_transaction().cloned()).ok();
1211 this.has_conflict = false;
1212 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1213 } else {
1214 if !diff.edits.is_empty()
1215 || this
1216 .edits_since::<usize>(&diff.base_version)
1217 .next()
1218 .is_some()
1219 {
1220 this.has_conflict = true;
1221 }
1222
1223 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1224 }
1225
1226 this.reload_task.take();
1227 })
1228 }));
1229 rx
1230 }
1231
1232 /// This method is called to signal that the buffer has been reloaded.
1233 pub fn did_reload(
1234 &mut self,
1235 version: clock::Global,
1236 line_ending: LineEnding,
1237 mtime: Option<MTime>,
1238 cx: &mut Context<Self>,
1239 ) {
1240 self.saved_version = version;
1241 self.has_unsaved_edits
1242 .set((self.saved_version.clone(), false));
1243 self.text.set_line_ending(line_ending);
1244 self.saved_mtime = mtime;
1245 cx.emit(BufferEvent::Reloaded);
1246 cx.notify();
1247 }
1248
1249 /// Updates the [`File`] backing this buffer. This should be called when
1250 /// the file has changed or has been deleted.
1251 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1252 let was_dirty = self.is_dirty();
1253 let mut file_changed = false;
1254
1255 if let Some(old_file) = self.file.as_ref() {
1256 if new_file.path() != old_file.path() {
1257 file_changed = true;
1258 }
1259
1260 let old_state = old_file.disk_state();
1261 let new_state = new_file.disk_state();
1262 if old_state != new_state {
1263 file_changed = true;
1264 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1265 cx.emit(BufferEvent::ReloadNeeded)
1266 }
1267 }
1268 } else {
1269 file_changed = true;
1270 };
1271
1272 self.file = Some(new_file);
1273 if file_changed {
1274 self.non_text_state_update_count += 1;
1275 if was_dirty != self.is_dirty() {
1276 cx.emit(BufferEvent::DirtyChanged);
1277 }
1278 cx.emit(BufferEvent::FileHandleChanged);
1279 cx.notify();
1280 }
1281 }
1282
1283 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1284 Some(self.branch_state.as_ref()?.base_buffer.clone())
1285 }
1286
1287 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1288 pub fn language(&self) -> Option<&Arc<Language>> {
1289 self.language.as_ref()
1290 }
1291
1292 /// Returns the [`Language`] at the given location.
1293 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1294 let offset = position.to_offset(self);
1295 self.syntax_map
1296 .lock()
1297 .layers_for_range(offset..offset, &self.text, false)
1298 .last()
1299 .map(|info| info.language.clone())
1300 .or_else(|| self.language.clone())
1301 }
1302
1303 /// An integer version number that accounts for all updates besides
1304 /// the buffer's text itself (which is versioned via a version vector).
1305 pub fn non_text_state_update_count(&self) -> usize {
1306 self.non_text_state_update_count
1307 }
1308
1309 /// Whether the buffer is being parsed in the background.
1310 #[cfg(any(test, feature = "test-support"))]
1311 pub fn is_parsing(&self) -> bool {
1312 self.parsing_in_background
1313 }
1314
1315 /// Indicates whether the buffer contains any regions that may be
1316 /// written in a language that hasn't been loaded yet.
1317 pub fn contains_unknown_injections(&self) -> bool {
1318 self.syntax_map.lock().contains_unknown_injections()
1319 }
1320
1321 #[cfg(test)]
1322 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1323 self.sync_parse_timeout = timeout;
1324 }
1325
1326 /// Called after an edit to synchronize the buffer's main parse tree with
1327 /// the buffer's new underlying state.
1328 ///
1329 /// Locks the syntax map and interpolates the edits since the last reparse
1330 /// into the foreground syntax tree.
1331 ///
1332 /// Then takes a stable snapshot of the syntax map before unlocking it.
1333 /// The snapshot with the interpolated edits is sent to a background thread,
1334 /// where we ask Tree-sitter to perform an incremental parse.
1335 ///
1336 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1337 /// waiting on the parse to complete. As soon as it completes, we proceed
1338 /// synchronously, unless a 1ms timeout elapses.
1339 ///
1340 /// If we time out waiting on the parse, we spawn a second task waiting
1341 /// until the parse does complete and return with the interpolated tree still
1342 /// in the foreground. When the background parse completes, call back into
1343 /// the main thread and assign the foreground parse state.
1344 ///
1345 /// If the buffer or grammar changed since the start of the background parse,
1346 /// initiate an additional reparse recursively. To avoid concurrent parses
1347 /// for the same buffer, we only initiate a new parse if we are not already
1348 /// parsing in the background.
1349 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1350 if self.parsing_in_background {
1351 return;
1352 }
1353 let language = if let Some(language) = self.language.clone() {
1354 language
1355 } else {
1356 return;
1357 };
1358
1359 let text = self.text_snapshot();
1360 let parsed_version = self.version();
1361
1362 let mut syntax_map = self.syntax_map.lock();
1363 syntax_map.interpolate(&text);
1364 let language_registry = syntax_map.language_registry();
1365 let mut syntax_snapshot = syntax_map.snapshot();
1366 drop(syntax_map);
1367
1368 let parse_task = cx.background_executor().spawn({
1369 let language = language.clone();
1370 let language_registry = language_registry.clone();
1371 async move {
1372 syntax_snapshot.reparse(&text, language_registry, language);
1373 syntax_snapshot
1374 }
1375 });
1376
1377 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1378 match cx
1379 .background_executor()
1380 .block_with_timeout(self.sync_parse_timeout, parse_task)
1381 {
1382 Ok(new_syntax_snapshot) => {
1383 self.did_finish_parsing(new_syntax_snapshot, cx);
1384 }
1385 Err(parse_task) => {
1386 self.parsing_in_background = true;
1387 cx.spawn(move |this, mut cx| async move {
1388 let new_syntax_map = parse_task.await;
1389 this.update(&mut cx, move |this, cx| {
1390 let grammar_changed =
1391 this.language.as_ref().map_or(true, |current_language| {
1392 !Arc::ptr_eq(&language, current_language)
1393 });
1394 let language_registry_changed = new_syntax_map
1395 .contains_unknown_injections()
1396 && language_registry.map_or(false, |registry| {
1397 registry.version() != new_syntax_map.language_registry_version()
1398 });
1399 let parse_again = language_registry_changed
1400 || grammar_changed
1401 || this.version.changed_since(&parsed_version);
1402 this.did_finish_parsing(new_syntax_map, cx);
1403 this.parsing_in_background = false;
1404 if parse_again {
1405 this.reparse(cx);
1406 }
1407 })
1408 .ok();
1409 })
1410 .detach();
1411 }
1412 }
1413 }
1414
1415 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1416 self.non_text_state_update_count += 1;
1417 self.syntax_map.lock().did_parse(syntax_snapshot);
1418 self.request_autoindent(cx);
1419 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1420 cx.emit(BufferEvent::Reparsed);
1421 cx.notify();
1422 }
1423
1424 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1425 self.parse_status.1.clone()
1426 }
1427
1428 /// Assign to the buffer a set of diagnostics created by a given language server.
1429 pub fn update_diagnostics(
1430 &mut self,
1431 server_id: LanguageServerId,
1432 diagnostics: DiagnosticSet,
1433 cx: &mut Context<Self>,
1434 ) {
1435 let lamport_timestamp = self.text.lamport_clock.tick();
1436 let op = Operation::UpdateDiagnostics {
1437 server_id,
1438 diagnostics: diagnostics.iter().cloned().collect(),
1439 lamport_timestamp,
1440 };
1441 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1442 self.send_operation(op, true, cx);
1443 }
1444
1445 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1446 if let Some(indent_sizes) = self.compute_autoindents() {
1447 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1448 match cx
1449 .background_executor()
1450 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1451 {
1452 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1453 Err(indent_sizes) => {
1454 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1455 let indent_sizes = indent_sizes.await;
1456 this.update(&mut cx, |this, cx| {
1457 this.apply_autoindents(indent_sizes, cx);
1458 })
1459 .ok();
1460 }));
1461 }
1462 }
1463 } else {
1464 self.autoindent_requests.clear();
1465 }
1466 }
1467
1468 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1469 let max_rows_between_yields = 100;
1470 let snapshot = self.snapshot();
1471 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1472 return None;
1473 }
1474
1475 let autoindent_requests = self.autoindent_requests.clone();
1476 Some(async move {
1477 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1478 for request in autoindent_requests {
1479 // Resolve each edited range to its row in the current buffer and in the
1480 // buffer before this batch of edits.
1481 let mut row_ranges = Vec::new();
1482 let mut old_to_new_rows = BTreeMap::new();
1483 let mut language_indent_sizes_by_new_row = Vec::new();
1484 for entry in &request.entries {
1485 let position = entry.range.start;
1486 let new_row = position.to_point(&snapshot).row;
1487 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1488 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1489
1490 if !entry.first_line_is_new {
1491 let old_row = position.to_point(&request.before_edit).row;
1492 old_to_new_rows.insert(old_row, new_row);
1493 }
1494 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1495 }
1496
1497 // Build a map containing the suggested indentation for each of the edited lines
1498 // with respect to the state of the buffer before these edits. This map is keyed
1499 // by the rows for these lines in the current state of the buffer.
1500 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1501 let old_edited_ranges =
1502 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1503 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1504 let mut language_indent_size = IndentSize::default();
1505 for old_edited_range in old_edited_ranges {
1506 let suggestions = request
1507 .before_edit
1508 .suggest_autoindents(old_edited_range.clone())
1509 .into_iter()
1510 .flatten();
1511 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1512 if let Some(suggestion) = suggestion {
1513 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1514
1515 // Find the indent size based on the language for this row.
1516 while let Some((row, size)) = language_indent_sizes.peek() {
1517 if *row > new_row {
1518 break;
1519 }
1520 language_indent_size = *size;
1521 language_indent_sizes.next();
1522 }
1523
1524 let suggested_indent = old_to_new_rows
1525 .get(&suggestion.basis_row)
1526 .and_then(|from_row| {
1527 Some(old_suggestions.get(from_row).copied()?.0)
1528 })
1529 .unwrap_or_else(|| {
1530 request
1531 .before_edit
1532 .indent_size_for_line(suggestion.basis_row)
1533 })
1534 .with_delta(suggestion.delta, language_indent_size);
1535 old_suggestions
1536 .insert(new_row, (suggested_indent, suggestion.within_error));
1537 }
1538 }
1539 yield_now().await;
1540 }
1541
1542 // Compute new suggestions for each line, but only include them in the result
1543 // if they differ from the old suggestion for that line.
1544 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1545 let mut language_indent_size = IndentSize::default();
1546 for (row_range, original_indent_column) in row_ranges {
1547 let new_edited_row_range = if request.is_block_mode {
1548 row_range.start..row_range.start + 1
1549 } else {
1550 row_range.clone()
1551 };
1552
1553 let suggestions = snapshot
1554 .suggest_autoindents(new_edited_row_range.clone())
1555 .into_iter()
1556 .flatten();
1557 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1558 if let Some(suggestion) = suggestion {
1559 // Find the indent size based on the language for this row.
1560 while let Some((row, size)) = language_indent_sizes.peek() {
1561 if *row > new_row {
1562 break;
1563 }
1564 language_indent_size = *size;
1565 language_indent_sizes.next();
1566 }
1567
1568 let suggested_indent = indent_sizes
1569 .get(&suggestion.basis_row)
1570 .copied()
1571 .map(|e| e.0)
1572 .unwrap_or_else(|| {
1573 snapshot.indent_size_for_line(suggestion.basis_row)
1574 })
1575 .with_delta(suggestion.delta, language_indent_size);
1576
1577 if old_suggestions.get(&new_row).map_or(
1578 true,
1579 |(old_indentation, was_within_error)| {
1580 suggested_indent != *old_indentation
1581 && (!suggestion.within_error || *was_within_error)
1582 },
1583 ) {
1584 indent_sizes.insert(
1585 new_row,
1586 (suggested_indent, request.ignore_empty_lines),
1587 );
1588 }
1589 }
1590 }
1591
1592 if let (true, Some(original_indent_column)) =
1593 (request.is_block_mode, original_indent_column)
1594 {
1595 let new_indent =
1596 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1597 *indent
1598 } else {
1599 snapshot.indent_size_for_line(row_range.start)
1600 };
1601 let delta = new_indent.len as i64 - original_indent_column as i64;
1602 if delta != 0 {
1603 for row in row_range.skip(1) {
1604 indent_sizes.entry(row).or_insert_with(|| {
1605 let mut size = snapshot.indent_size_for_line(row);
1606 if size.kind == new_indent.kind {
1607 match delta.cmp(&0) {
1608 Ordering::Greater => size.len += delta as u32,
1609 Ordering::Less => {
1610 size.len = size.len.saturating_sub(-delta as u32)
1611 }
1612 Ordering::Equal => {}
1613 }
1614 }
1615 (size, request.ignore_empty_lines)
1616 });
1617 }
1618 }
1619 }
1620
1621 yield_now().await;
1622 }
1623 }
1624
1625 indent_sizes
1626 .into_iter()
1627 .filter_map(|(row, (indent, ignore_empty_lines))| {
1628 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1629 None
1630 } else {
1631 Some((row, indent))
1632 }
1633 })
1634 .collect()
1635 })
1636 }
1637
1638 fn apply_autoindents(
1639 &mut self,
1640 indent_sizes: BTreeMap<u32, IndentSize>,
1641 cx: &mut Context<Self>,
1642 ) {
1643 self.autoindent_requests.clear();
1644
1645 let edits: Vec<_> = indent_sizes
1646 .into_iter()
1647 .filter_map(|(row, indent_size)| {
1648 let current_size = indent_size_for_line(self, row);
1649 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1650 })
1651 .collect();
1652
1653 let preserve_preview = self.preserve_preview();
1654 self.edit(edits, None, cx);
1655 if preserve_preview {
1656 self.refresh_preview();
1657 }
1658 }
1659
1660 /// Create a minimal edit that will cause the given row to be indented
1661 /// with the given size. After applying this edit, the length of the line
1662 /// will always be at least `new_size.len`.
1663 pub fn edit_for_indent_size_adjustment(
1664 row: u32,
1665 current_size: IndentSize,
1666 new_size: IndentSize,
1667 ) -> Option<(Range<Point>, String)> {
1668 if new_size.kind == current_size.kind {
1669 match new_size.len.cmp(¤t_size.len) {
1670 Ordering::Greater => {
1671 let point = Point::new(row, 0);
1672 Some((
1673 point..point,
1674 iter::repeat(new_size.char())
1675 .take((new_size.len - current_size.len) as usize)
1676 .collect::<String>(),
1677 ))
1678 }
1679
1680 Ordering::Less => Some((
1681 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1682 String::new(),
1683 )),
1684
1685 Ordering::Equal => None,
1686 }
1687 } else {
1688 Some((
1689 Point::new(row, 0)..Point::new(row, current_size.len),
1690 iter::repeat(new_size.char())
1691 .take(new_size.len as usize)
1692 .collect::<String>(),
1693 ))
1694 }
1695 }
1696
1697 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1698 /// and the given new text.
1699 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1700 let old_text = self.as_rope().clone();
1701 let base_version = self.version();
1702 cx.background_executor()
1703 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1704 let old_text = old_text.to_string();
1705 let line_ending = LineEnding::detect(&new_text);
1706 LineEnding::normalize(&mut new_text);
1707
1708 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1709 let empty: Arc<str> = Arc::default();
1710
1711 let mut edits = Vec::new();
1712 let mut old_offset = 0;
1713 let mut new_offset = 0;
1714 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1715 for change in diff.iter_all_changes().map(Some).chain([None]) {
1716 if let Some(change) = &change {
1717 let len = change.value().len();
1718 match change.tag() {
1719 ChangeTag::Equal => {
1720 old_offset += len;
1721 new_offset += len;
1722 }
1723 ChangeTag::Delete => {
1724 let old_end_offset = old_offset + len;
1725 if let Some((last_old_range, _)) = &mut last_edit {
1726 last_old_range.end = old_end_offset;
1727 } else {
1728 last_edit =
1729 Some((old_offset..old_end_offset, new_offset..new_offset));
1730 }
1731 old_offset = old_end_offset;
1732 }
1733 ChangeTag::Insert => {
1734 let new_end_offset = new_offset + len;
1735 if let Some((_, last_new_range)) = &mut last_edit {
1736 last_new_range.end = new_end_offset;
1737 } else {
1738 last_edit =
1739 Some((old_offset..old_offset, new_offset..new_end_offset));
1740 }
1741 new_offset = new_end_offset;
1742 }
1743 }
1744 }
1745
1746 if let Some((old_range, new_range)) = &last_edit {
1747 if old_offset > old_range.end
1748 || new_offset > new_range.end
1749 || change.is_none()
1750 {
1751 let text = if new_range.is_empty() {
1752 empty.clone()
1753 } else {
1754 new_text[new_range.clone()].into()
1755 };
1756 edits.push((old_range.clone(), text));
1757 last_edit.take();
1758 }
1759 }
1760 }
1761
1762 Diff {
1763 base_version,
1764 line_ending,
1765 edits,
1766 }
1767 })
1768 }
1769
1770 /// Spawns a background task that searches the buffer for any whitespace
1771 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1772 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1773 let old_text = self.as_rope().clone();
1774 let line_ending = self.line_ending();
1775 let base_version = self.version();
1776 cx.background_executor().spawn(async move {
1777 let ranges = trailing_whitespace_ranges(&old_text);
1778 let empty = Arc::<str>::from("");
1779 Diff {
1780 base_version,
1781 line_ending,
1782 edits: ranges
1783 .into_iter()
1784 .map(|range| (range, empty.clone()))
1785 .collect(),
1786 }
1787 })
1788 }
1789
1790 /// Ensures that the buffer ends with a single newline character, and
1791 /// no other whitespace.
1792 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1793 let len = self.len();
1794 let mut offset = len;
1795 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1796 let non_whitespace_len = chunk
1797 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1798 .len();
1799 offset -= chunk.len();
1800 offset += non_whitespace_len;
1801 if non_whitespace_len != 0 {
1802 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1803 return;
1804 }
1805 break;
1806 }
1807 }
1808 self.edit([(offset..len, "\n")], None, cx);
1809 }
1810
1811 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1812 /// calculated, then adjust the diff to account for those changes, and discard any
1813 /// parts of the diff that conflict with those changes.
1814 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1815 // Check for any edits to the buffer that have occurred since this diff
1816 // was computed.
1817 let snapshot = self.snapshot();
1818 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1819 let mut delta = 0;
1820 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1821 while let Some(edit_since) = edits_since.peek() {
1822 // If the edit occurs after a diff hunk, then it does not
1823 // affect that hunk.
1824 if edit_since.old.start > range.end {
1825 break;
1826 }
1827 // If the edit precedes the diff hunk, then adjust the hunk
1828 // to reflect the edit.
1829 else if edit_since.old.end < range.start {
1830 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1831 edits_since.next();
1832 }
1833 // If the edit intersects a diff hunk, then discard that hunk.
1834 else {
1835 return None;
1836 }
1837 }
1838
1839 let start = (range.start as i64 + delta) as usize;
1840 let end = (range.end as i64 + delta) as usize;
1841 Some((start..end, new_text))
1842 });
1843
1844 self.start_transaction();
1845 self.text.set_line_ending(diff.line_ending);
1846 self.edit(adjusted_edits, None, cx);
1847 self.end_transaction(cx)
1848 }
1849
1850 fn has_unsaved_edits(&self) -> bool {
1851 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1852
1853 if last_version == self.version {
1854 self.has_unsaved_edits
1855 .set((last_version, has_unsaved_edits));
1856 return has_unsaved_edits;
1857 }
1858
1859 let has_edits = self.has_edits_since(&self.saved_version);
1860 self.has_unsaved_edits
1861 .set((self.version.clone(), has_edits));
1862 has_edits
1863 }
1864
1865 /// Checks if the buffer has unsaved changes.
1866 pub fn is_dirty(&self) -> bool {
1867 self.capability != Capability::ReadOnly
1868 && (self.has_conflict
1869 || self.file.as_ref().map_or(false, |file| {
1870 matches!(file.disk_state(), DiskState::New | DiskState::Deleted)
1871 })
1872 || self.has_unsaved_edits())
1873 }
1874
1875 /// Checks if the buffer and its file have both changed since the buffer
1876 /// was last saved or reloaded.
1877 pub fn has_conflict(&self) -> bool {
1878 if self.has_conflict {
1879 return true;
1880 }
1881 let Some(file) = self.file.as_ref() else {
1882 return false;
1883 };
1884 match file.disk_state() {
1885 DiskState::New => false,
1886 DiskState::Present { mtime } => match self.saved_mtime {
1887 Some(saved_mtime) => {
1888 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1889 }
1890 None => true,
1891 },
1892 DiskState::Deleted => true,
1893 }
1894 }
1895
1896 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1897 pub fn subscribe(&mut self) -> Subscription {
1898 self.text.subscribe()
1899 }
1900
1901 /// Starts a transaction, if one is not already in-progress. When undoing or
1902 /// redoing edits, all of the edits performed within a transaction are undone
1903 /// or redone together.
1904 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1905 self.start_transaction_at(Instant::now())
1906 }
1907
1908 /// Starts a transaction, providing the current time. Subsequent transactions
1909 /// that occur within a short period of time will be grouped together. This
1910 /// is controlled by the buffer's undo grouping duration.
1911 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1912 self.transaction_depth += 1;
1913 if self.was_dirty_before_starting_transaction.is_none() {
1914 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1915 }
1916 self.text.start_transaction_at(now)
1917 }
1918
1919 /// Terminates the current transaction, if this is the outermost transaction.
1920 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
1921 self.end_transaction_at(Instant::now(), cx)
1922 }
1923
1924 /// Terminates the current transaction, providing the current time. Subsequent transactions
1925 /// that occur within a short period of time will be grouped together. This
1926 /// is controlled by the buffer's undo grouping duration.
1927 pub fn end_transaction_at(
1928 &mut self,
1929 now: Instant,
1930 cx: &mut Context<Self>,
1931 ) -> Option<TransactionId> {
1932 assert!(self.transaction_depth > 0);
1933 self.transaction_depth -= 1;
1934 let was_dirty = if self.transaction_depth == 0 {
1935 self.was_dirty_before_starting_transaction.take().unwrap()
1936 } else {
1937 false
1938 };
1939 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1940 self.did_edit(&start_version, was_dirty, cx);
1941 Some(transaction_id)
1942 } else {
1943 None
1944 }
1945 }
1946
1947 /// Manually add a transaction to the buffer's undo history.
1948 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1949 self.text.push_transaction(transaction, now);
1950 }
1951
1952 /// Prevent the last transaction from being grouped with any subsequent transactions,
1953 /// even if they occur with the buffer's undo grouping duration.
1954 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1955 self.text.finalize_last_transaction()
1956 }
1957
1958 /// Manually group all changes since a given transaction.
1959 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1960 self.text.group_until_transaction(transaction_id);
1961 }
1962
1963 /// Manually remove a transaction from the buffer's undo history
1964 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1965 self.text.forget_transaction(transaction_id);
1966 }
1967
1968 /// Manually merge two adjacent transactions in the buffer's undo history.
1969 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1970 self.text.merge_transactions(transaction, destination);
1971 }
1972
1973 /// Waits for the buffer to receive operations with the given timestamps.
1974 pub fn wait_for_edits(
1975 &mut self,
1976 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1977 ) -> impl Future<Output = Result<()>> {
1978 self.text.wait_for_edits(edit_ids)
1979 }
1980
1981 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1982 pub fn wait_for_anchors(
1983 &mut self,
1984 anchors: impl IntoIterator<Item = Anchor>,
1985 ) -> impl 'static + Future<Output = Result<()>> {
1986 self.text.wait_for_anchors(anchors)
1987 }
1988
1989 /// Waits for the buffer to receive operations up to the given version.
1990 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1991 self.text.wait_for_version(version)
1992 }
1993
1994 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1995 /// [`Buffer::wait_for_version`] to resolve with an error.
1996 pub fn give_up_waiting(&mut self) {
1997 self.text.give_up_waiting();
1998 }
1999
2000 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2001 pub fn set_active_selections(
2002 &mut self,
2003 selections: Arc<[Selection<Anchor>]>,
2004 line_mode: bool,
2005 cursor_shape: CursorShape,
2006 cx: &mut Context<Self>,
2007 ) {
2008 let lamport_timestamp = self.text.lamport_clock.tick();
2009 self.remote_selections.insert(
2010 self.text.replica_id(),
2011 SelectionSet {
2012 selections: selections.clone(),
2013 lamport_timestamp,
2014 line_mode,
2015 cursor_shape,
2016 },
2017 );
2018 self.send_operation(
2019 Operation::UpdateSelections {
2020 selections,
2021 line_mode,
2022 lamport_timestamp,
2023 cursor_shape,
2024 },
2025 true,
2026 cx,
2027 );
2028 self.non_text_state_update_count += 1;
2029 cx.notify();
2030 }
2031
2032 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2033 /// this replica.
2034 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2035 if self
2036 .remote_selections
2037 .get(&self.text.replica_id())
2038 .map_or(true, |set| !set.selections.is_empty())
2039 {
2040 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2041 }
2042 }
2043
2044 /// Replaces the buffer's entire text.
2045 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2046 where
2047 T: Into<Arc<str>>,
2048 {
2049 self.autoindent_requests.clear();
2050 self.edit([(0..self.len(), text)], None, cx)
2051 }
2052
2053 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2054 /// delete, and a string of text to insert at that location.
2055 ///
2056 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2057 /// request for the edited ranges, which will be processed when the buffer finishes
2058 /// parsing.
2059 ///
2060 /// Parsing takes place at the end of a transaction, and may compute synchronously
2061 /// or asynchronously, depending on the changes.
2062 pub fn edit<I, S, T>(
2063 &mut self,
2064 edits_iter: I,
2065 autoindent_mode: Option<AutoindentMode>,
2066 cx: &mut Context<Self>,
2067 ) -> Option<clock::Lamport>
2068 where
2069 I: IntoIterator<Item = (Range<S>, T)>,
2070 S: ToOffset,
2071 T: Into<Arc<str>>,
2072 {
2073 // Skip invalid edits and coalesce contiguous ones.
2074 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2075 for (range, new_text) in edits_iter {
2076 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2077 if range.start > range.end {
2078 mem::swap(&mut range.start, &mut range.end);
2079 }
2080 let new_text = new_text.into();
2081 if !new_text.is_empty() || !range.is_empty() {
2082 if let Some((prev_range, prev_text)) = edits.last_mut() {
2083 if prev_range.end >= range.start {
2084 prev_range.end = cmp::max(prev_range.end, range.end);
2085 *prev_text = format!("{prev_text}{new_text}").into();
2086 } else {
2087 edits.push((range, new_text));
2088 }
2089 } else {
2090 edits.push((range, new_text));
2091 }
2092 }
2093 }
2094 if edits.is_empty() {
2095 return None;
2096 }
2097
2098 self.start_transaction();
2099 self.pending_autoindent.take();
2100 let autoindent_request = autoindent_mode
2101 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2102
2103 let edit_operation = self.text.edit(edits.iter().cloned());
2104 let edit_id = edit_operation.timestamp();
2105
2106 if let Some((before_edit, mode)) = autoindent_request {
2107 let mut delta = 0isize;
2108 let entries = edits
2109 .into_iter()
2110 .enumerate()
2111 .zip(&edit_operation.as_edit().unwrap().new_text)
2112 .map(|((ix, (range, _)), new_text)| {
2113 let new_text_length = new_text.len();
2114 let old_start = range.start.to_point(&before_edit);
2115 let new_start = (delta + range.start as isize) as usize;
2116 let range_len = range.end - range.start;
2117 delta += new_text_length as isize - range_len as isize;
2118
2119 // Decide what range of the insertion to auto-indent, and whether
2120 // the first line of the insertion should be considered a newly-inserted line
2121 // or an edit to an existing line.
2122 let mut range_of_insertion_to_indent = 0..new_text_length;
2123 let mut first_line_is_new = true;
2124
2125 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2126 let old_line_end = before_edit.line_len(old_start.row);
2127
2128 if old_start.column > old_line_start {
2129 first_line_is_new = false;
2130 }
2131
2132 if !new_text.contains('\n')
2133 && (old_start.column + (range_len as u32) < old_line_end
2134 || old_line_end == old_line_start)
2135 {
2136 first_line_is_new = false;
2137 }
2138
2139 // When inserting text starting with a newline, avoid auto-indenting the
2140 // previous line.
2141 if new_text.starts_with('\n') {
2142 range_of_insertion_to_indent.start += 1;
2143 first_line_is_new = true;
2144 }
2145
2146 let mut original_indent_column = None;
2147 if let AutoindentMode::Block {
2148 original_indent_columns,
2149 } = &mode
2150 {
2151 original_indent_column =
2152 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
2153 indent_size_for_text(
2154 new_text[range_of_insertion_to_indent.clone()].chars(),
2155 )
2156 .len
2157 }));
2158
2159 // Avoid auto-indenting the line after the edit.
2160 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2161 range_of_insertion_to_indent.end -= 1;
2162 }
2163 }
2164
2165 AutoindentRequestEntry {
2166 first_line_is_new,
2167 original_indent_column,
2168 indent_size: before_edit.language_indent_size_at(range.start, cx),
2169 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2170 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2171 }
2172 })
2173 .collect();
2174
2175 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2176 before_edit,
2177 entries,
2178 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2179 ignore_empty_lines: false,
2180 }));
2181 }
2182
2183 self.end_transaction(cx);
2184 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2185 Some(edit_id)
2186 }
2187
2188 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2189 if self.edits_since::<usize>(old_version).next().is_none() {
2190 return;
2191 }
2192
2193 self.reparse(cx);
2194
2195 cx.emit(BufferEvent::Edited);
2196 if was_dirty != self.is_dirty() {
2197 cx.emit(BufferEvent::DirtyChanged);
2198 }
2199 cx.notify();
2200 }
2201
2202 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2203 where
2204 I: IntoIterator<Item = Range<T>>,
2205 T: ToOffset + Copy,
2206 {
2207 let before_edit = self.snapshot();
2208 let entries = ranges
2209 .into_iter()
2210 .map(|range| AutoindentRequestEntry {
2211 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2212 first_line_is_new: true,
2213 indent_size: before_edit.language_indent_size_at(range.start, cx),
2214 original_indent_column: None,
2215 })
2216 .collect();
2217 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2218 before_edit,
2219 entries,
2220 is_block_mode: false,
2221 ignore_empty_lines: true,
2222 }));
2223 self.request_autoindent(cx);
2224 }
2225
2226 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2227 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2228 pub fn insert_empty_line(
2229 &mut self,
2230 position: impl ToPoint,
2231 space_above: bool,
2232 space_below: bool,
2233 cx: &mut Context<Self>,
2234 ) -> Point {
2235 let mut position = position.to_point(self);
2236
2237 self.start_transaction();
2238
2239 self.edit(
2240 [(position..position, "\n")],
2241 Some(AutoindentMode::EachLine),
2242 cx,
2243 );
2244
2245 if position.column > 0 {
2246 position += Point::new(1, 0);
2247 }
2248
2249 if !self.is_line_blank(position.row) {
2250 self.edit(
2251 [(position..position, "\n")],
2252 Some(AutoindentMode::EachLine),
2253 cx,
2254 );
2255 }
2256
2257 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2258 self.edit(
2259 [(position..position, "\n")],
2260 Some(AutoindentMode::EachLine),
2261 cx,
2262 );
2263 position.row += 1;
2264 }
2265
2266 if space_below
2267 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2268 {
2269 self.edit(
2270 [(position..position, "\n")],
2271 Some(AutoindentMode::EachLine),
2272 cx,
2273 );
2274 }
2275
2276 self.end_transaction(cx);
2277
2278 position
2279 }
2280
2281 /// Applies the given remote operations to the buffer.
2282 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2283 self.pending_autoindent.take();
2284 let was_dirty = self.is_dirty();
2285 let old_version = self.version.clone();
2286 let mut deferred_ops = Vec::new();
2287 let buffer_ops = ops
2288 .into_iter()
2289 .filter_map(|op| match op {
2290 Operation::Buffer(op) => Some(op),
2291 _ => {
2292 if self.can_apply_op(&op) {
2293 self.apply_op(op, cx);
2294 } else {
2295 deferred_ops.push(op);
2296 }
2297 None
2298 }
2299 })
2300 .collect::<Vec<_>>();
2301 for operation in buffer_ops.iter() {
2302 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2303 }
2304 self.text.apply_ops(buffer_ops);
2305 self.deferred_ops.insert(deferred_ops);
2306 self.flush_deferred_ops(cx);
2307 self.did_edit(&old_version, was_dirty, cx);
2308 // Notify independently of whether the buffer was edited as the operations could include a
2309 // selection update.
2310 cx.notify();
2311 }
2312
2313 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2314 let mut deferred_ops = Vec::new();
2315 for op in self.deferred_ops.drain().iter().cloned() {
2316 if self.can_apply_op(&op) {
2317 self.apply_op(op, cx);
2318 } else {
2319 deferred_ops.push(op);
2320 }
2321 }
2322 self.deferred_ops.insert(deferred_ops);
2323 }
2324
2325 pub fn has_deferred_ops(&self) -> bool {
2326 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2327 }
2328
2329 fn can_apply_op(&self, operation: &Operation) -> bool {
2330 match operation {
2331 Operation::Buffer(_) => {
2332 unreachable!("buffer operations should never be applied at this layer")
2333 }
2334 Operation::UpdateDiagnostics {
2335 diagnostics: diagnostic_set,
2336 ..
2337 } => diagnostic_set.iter().all(|diagnostic| {
2338 self.text.can_resolve(&diagnostic.range.start)
2339 && self.text.can_resolve(&diagnostic.range.end)
2340 }),
2341 Operation::UpdateSelections { selections, .. } => selections
2342 .iter()
2343 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2344 Operation::UpdateCompletionTriggers { .. } => true,
2345 }
2346 }
2347
2348 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2349 match operation {
2350 Operation::Buffer(_) => {
2351 unreachable!("buffer operations should never be applied at this layer")
2352 }
2353 Operation::UpdateDiagnostics {
2354 server_id,
2355 diagnostics: diagnostic_set,
2356 lamport_timestamp,
2357 } => {
2358 let snapshot = self.snapshot();
2359 self.apply_diagnostic_update(
2360 server_id,
2361 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2362 lamport_timestamp,
2363 cx,
2364 );
2365 }
2366 Operation::UpdateSelections {
2367 selections,
2368 lamport_timestamp,
2369 line_mode,
2370 cursor_shape,
2371 } => {
2372 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2373 if set.lamport_timestamp > lamport_timestamp {
2374 return;
2375 }
2376 }
2377
2378 self.remote_selections.insert(
2379 lamport_timestamp.replica_id,
2380 SelectionSet {
2381 selections,
2382 lamport_timestamp,
2383 line_mode,
2384 cursor_shape,
2385 },
2386 );
2387 self.text.lamport_clock.observe(lamport_timestamp);
2388 self.non_text_state_update_count += 1;
2389 }
2390 Operation::UpdateCompletionTriggers {
2391 triggers,
2392 lamport_timestamp,
2393 server_id,
2394 } => {
2395 if triggers.is_empty() {
2396 self.completion_triggers_per_language_server
2397 .remove(&server_id);
2398 self.completion_triggers = self
2399 .completion_triggers_per_language_server
2400 .values()
2401 .flat_map(|triggers| triggers.into_iter().cloned())
2402 .collect();
2403 } else {
2404 self.completion_triggers_per_language_server
2405 .insert(server_id, triggers.iter().cloned().collect());
2406 self.completion_triggers.extend(triggers);
2407 }
2408 self.text.lamport_clock.observe(lamport_timestamp);
2409 }
2410 }
2411 }
2412
2413 fn apply_diagnostic_update(
2414 &mut self,
2415 server_id: LanguageServerId,
2416 diagnostics: DiagnosticSet,
2417 lamport_timestamp: clock::Lamport,
2418 cx: &mut Context<Self>,
2419 ) {
2420 if lamport_timestamp > self.diagnostics_timestamp {
2421 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2422 if diagnostics.is_empty() {
2423 if let Ok(ix) = ix {
2424 self.diagnostics.remove(ix);
2425 }
2426 } else {
2427 match ix {
2428 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2429 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2430 };
2431 }
2432 self.diagnostics_timestamp = lamport_timestamp;
2433 self.non_text_state_update_count += 1;
2434 self.text.lamport_clock.observe(lamport_timestamp);
2435 cx.notify();
2436 cx.emit(BufferEvent::DiagnosticsUpdated);
2437 }
2438 }
2439
2440 fn send_operation(&self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2441 cx.emit(BufferEvent::Operation {
2442 operation,
2443 is_local,
2444 });
2445 }
2446
2447 /// Removes the selections for a given peer.
2448 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2449 self.remote_selections.remove(&replica_id);
2450 cx.notify();
2451 }
2452
2453 /// Undoes the most recent transaction.
2454 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2455 let was_dirty = self.is_dirty();
2456 let old_version = self.version.clone();
2457
2458 if let Some((transaction_id, operation)) = self.text.undo() {
2459 self.send_operation(Operation::Buffer(operation), true, cx);
2460 self.did_edit(&old_version, was_dirty, cx);
2461 Some(transaction_id)
2462 } else {
2463 None
2464 }
2465 }
2466
2467 /// Manually undoes a specific transaction in the buffer's undo history.
2468 pub fn undo_transaction(
2469 &mut self,
2470 transaction_id: TransactionId,
2471 cx: &mut Context<Self>,
2472 ) -> bool {
2473 let was_dirty = self.is_dirty();
2474 let old_version = self.version.clone();
2475 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2476 self.send_operation(Operation::Buffer(operation), true, cx);
2477 self.did_edit(&old_version, was_dirty, cx);
2478 true
2479 } else {
2480 false
2481 }
2482 }
2483
2484 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2485 pub fn undo_to_transaction(
2486 &mut self,
2487 transaction_id: TransactionId,
2488 cx: &mut Context<Self>,
2489 ) -> bool {
2490 let was_dirty = self.is_dirty();
2491 let old_version = self.version.clone();
2492
2493 let operations = self.text.undo_to_transaction(transaction_id);
2494 let undone = !operations.is_empty();
2495 for operation in operations {
2496 self.send_operation(Operation::Buffer(operation), true, cx);
2497 }
2498 if undone {
2499 self.did_edit(&old_version, was_dirty, cx)
2500 }
2501 undone
2502 }
2503
2504 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2505 let was_dirty = self.is_dirty();
2506 let operation = self.text.undo_operations(counts);
2507 let old_version = self.version.clone();
2508 self.send_operation(Operation::Buffer(operation), true, cx);
2509 self.did_edit(&old_version, was_dirty, cx);
2510 }
2511
2512 /// Manually redoes a specific transaction in the buffer's redo history.
2513 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2514 let was_dirty = self.is_dirty();
2515 let old_version = self.version.clone();
2516
2517 if let Some((transaction_id, operation)) = self.text.redo() {
2518 self.send_operation(Operation::Buffer(operation), true, cx);
2519 self.did_edit(&old_version, was_dirty, cx);
2520 Some(transaction_id)
2521 } else {
2522 None
2523 }
2524 }
2525
2526 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2527 pub fn redo_to_transaction(
2528 &mut self,
2529 transaction_id: TransactionId,
2530 cx: &mut Context<Self>,
2531 ) -> bool {
2532 let was_dirty = self.is_dirty();
2533 let old_version = self.version.clone();
2534
2535 let operations = self.text.redo_to_transaction(transaction_id);
2536 let redone = !operations.is_empty();
2537 for operation in operations {
2538 self.send_operation(Operation::Buffer(operation), true, cx);
2539 }
2540 if redone {
2541 self.did_edit(&old_version, was_dirty, cx)
2542 }
2543 redone
2544 }
2545
2546 /// Override current completion triggers with the user-provided completion triggers.
2547 pub fn set_completion_triggers(
2548 &mut self,
2549 server_id: LanguageServerId,
2550 triggers: BTreeSet<String>,
2551 cx: &mut Context<Self>,
2552 ) {
2553 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2554 if triggers.is_empty() {
2555 self.completion_triggers_per_language_server
2556 .remove(&server_id);
2557 self.completion_triggers = self
2558 .completion_triggers_per_language_server
2559 .values()
2560 .flat_map(|triggers| triggers.into_iter().cloned())
2561 .collect();
2562 } else {
2563 self.completion_triggers_per_language_server
2564 .insert(server_id, triggers.clone());
2565 self.completion_triggers.extend(triggers.iter().cloned());
2566 }
2567 self.send_operation(
2568 Operation::UpdateCompletionTriggers {
2569 triggers: triggers.iter().cloned().collect(),
2570 lamport_timestamp: self.completion_triggers_timestamp,
2571 server_id,
2572 },
2573 true,
2574 cx,
2575 );
2576 cx.notify();
2577 }
2578
2579 /// Returns a list of strings which trigger a completion menu for this language.
2580 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2581 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2582 &self.completion_triggers
2583 }
2584
2585 /// Call this directly after performing edits to prevent the preview tab
2586 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2587 /// to return false until there are additional edits.
2588 pub fn refresh_preview(&mut self) {
2589 self.preview_version = self.version.clone();
2590 }
2591
2592 /// Whether we should preserve the preview status of a tab containing this buffer.
2593 pub fn preserve_preview(&self) -> bool {
2594 !self.has_edits_since(&self.preview_version)
2595 }
2596}
2597
2598#[doc(hidden)]
2599#[cfg(any(test, feature = "test-support"))]
2600impl Buffer {
2601 pub fn edit_via_marked_text(
2602 &mut self,
2603 marked_string: &str,
2604 autoindent_mode: Option<AutoindentMode>,
2605 cx: &mut Context<Self>,
2606 ) {
2607 let edits = self.edits_for_marked_text(marked_string);
2608 self.edit(edits, autoindent_mode, cx);
2609 }
2610
2611 pub fn set_group_interval(&mut self, group_interval: Duration) {
2612 self.text.set_group_interval(group_interval);
2613 }
2614
2615 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2616 where
2617 T: rand::Rng,
2618 {
2619 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2620 let mut last_end = None;
2621 for _ in 0..old_range_count {
2622 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2623 break;
2624 }
2625
2626 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2627 let mut range = self.random_byte_range(new_start, rng);
2628 if rng.gen_bool(0.2) {
2629 mem::swap(&mut range.start, &mut range.end);
2630 }
2631 last_end = Some(range.end);
2632
2633 let new_text_len = rng.gen_range(0..10);
2634 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2635 new_text = new_text.to_uppercase();
2636
2637 edits.push((range, new_text));
2638 }
2639 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2640 self.edit(edits, None, cx);
2641 }
2642
2643 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2644 let was_dirty = self.is_dirty();
2645 let old_version = self.version.clone();
2646
2647 let ops = self.text.randomly_undo_redo(rng);
2648 if !ops.is_empty() {
2649 for op in ops {
2650 self.send_operation(Operation::Buffer(op), true, cx);
2651 self.did_edit(&old_version, was_dirty, cx);
2652 }
2653 }
2654 }
2655}
2656
2657impl EventEmitter<BufferEvent> for Buffer {}
2658
2659impl Deref for Buffer {
2660 type Target = TextBuffer;
2661
2662 fn deref(&self) -> &Self::Target {
2663 &self.text
2664 }
2665}
2666
2667impl BufferSnapshot {
2668 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2669 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2670 indent_size_for_line(self, row)
2671 }
2672 /// Returns [`IndentSize`] for a given position that respects user settings
2673 /// and language preferences.
2674 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2675 let settings = language_settings(
2676 self.language_at(position).map(|l| l.name()),
2677 self.file(),
2678 cx,
2679 );
2680 if settings.hard_tabs {
2681 IndentSize::tab()
2682 } else {
2683 IndentSize::spaces(settings.tab_size.get())
2684 }
2685 }
2686
2687 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2688 /// is passed in as `single_indent_size`.
2689 pub fn suggested_indents(
2690 &self,
2691 rows: impl Iterator<Item = u32>,
2692 single_indent_size: IndentSize,
2693 ) -> BTreeMap<u32, IndentSize> {
2694 let mut result = BTreeMap::new();
2695
2696 for row_range in contiguous_ranges(rows, 10) {
2697 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2698 Some(suggestions) => suggestions,
2699 _ => break,
2700 };
2701
2702 for (row, suggestion) in row_range.zip(suggestions) {
2703 let indent_size = if let Some(suggestion) = suggestion {
2704 result
2705 .get(&suggestion.basis_row)
2706 .copied()
2707 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2708 .with_delta(suggestion.delta, single_indent_size)
2709 } else {
2710 self.indent_size_for_line(row)
2711 };
2712
2713 result.insert(row, indent_size);
2714 }
2715 }
2716
2717 result
2718 }
2719
2720 fn suggest_autoindents(
2721 &self,
2722 row_range: Range<u32>,
2723 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2724 let config = &self.language.as_ref()?.config;
2725 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2726
2727 // Find the suggested indentation ranges based on the syntax tree.
2728 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2729 let end = Point::new(row_range.end, 0);
2730 let range = (start..end).to_offset(&self.text);
2731 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2732 Some(&grammar.indents_config.as_ref()?.query)
2733 });
2734 let indent_configs = matches
2735 .grammars()
2736 .iter()
2737 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2738 .collect::<Vec<_>>();
2739
2740 let mut indent_ranges = Vec::<Range<Point>>::new();
2741 let mut outdent_positions = Vec::<Point>::new();
2742 while let Some(mat) = matches.peek() {
2743 let mut start: Option<Point> = None;
2744 let mut end: Option<Point> = None;
2745
2746 let config = &indent_configs[mat.grammar_index];
2747 for capture in mat.captures {
2748 if capture.index == config.indent_capture_ix {
2749 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2750 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2751 } else if Some(capture.index) == config.start_capture_ix {
2752 start = Some(Point::from_ts_point(capture.node.end_position()));
2753 } else if Some(capture.index) == config.end_capture_ix {
2754 end = Some(Point::from_ts_point(capture.node.start_position()));
2755 } else if Some(capture.index) == config.outdent_capture_ix {
2756 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2757 }
2758 }
2759
2760 matches.advance();
2761 if let Some((start, end)) = start.zip(end) {
2762 if start.row == end.row {
2763 continue;
2764 }
2765
2766 let range = start..end;
2767 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2768 Err(ix) => indent_ranges.insert(ix, range),
2769 Ok(ix) => {
2770 let prev_range = &mut indent_ranges[ix];
2771 prev_range.end = prev_range.end.max(range.end);
2772 }
2773 }
2774 }
2775 }
2776
2777 let mut error_ranges = Vec::<Range<Point>>::new();
2778 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2779 Some(&grammar.error_query)
2780 });
2781 while let Some(mat) = matches.peek() {
2782 let node = mat.captures[0].node;
2783 let start = Point::from_ts_point(node.start_position());
2784 let end = Point::from_ts_point(node.end_position());
2785 let range = start..end;
2786 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2787 Ok(ix) | Err(ix) => ix,
2788 };
2789 let mut end_ix = ix;
2790 while let Some(existing_range) = error_ranges.get(end_ix) {
2791 if existing_range.end < end {
2792 end_ix += 1;
2793 } else {
2794 break;
2795 }
2796 }
2797 error_ranges.splice(ix..end_ix, [range]);
2798 matches.advance();
2799 }
2800
2801 outdent_positions.sort();
2802 for outdent_position in outdent_positions {
2803 // find the innermost indent range containing this outdent_position
2804 // set its end to the outdent position
2805 if let Some(range_to_truncate) = indent_ranges
2806 .iter_mut()
2807 .filter(|indent_range| indent_range.contains(&outdent_position))
2808 .last()
2809 {
2810 range_to_truncate.end = outdent_position;
2811 }
2812 }
2813
2814 // Find the suggested indentation increases and decreased based on regexes.
2815 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2816 self.for_each_line(
2817 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2818 ..Point::new(row_range.end, 0),
2819 |row, line| {
2820 if config
2821 .decrease_indent_pattern
2822 .as_ref()
2823 .map_or(false, |regex| regex.is_match(line))
2824 {
2825 indent_change_rows.push((row, Ordering::Less));
2826 }
2827 if config
2828 .increase_indent_pattern
2829 .as_ref()
2830 .map_or(false, |regex| regex.is_match(line))
2831 {
2832 indent_change_rows.push((row + 1, Ordering::Greater));
2833 }
2834 },
2835 );
2836
2837 let mut indent_changes = indent_change_rows.into_iter().peekable();
2838 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2839 prev_non_blank_row.unwrap_or(0)
2840 } else {
2841 row_range.start.saturating_sub(1)
2842 };
2843 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2844 Some(row_range.map(move |row| {
2845 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2846
2847 let mut indent_from_prev_row = false;
2848 let mut outdent_from_prev_row = false;
2849 let mut outdent_to_row = u32::MAX;
2850
2851 while let Some((indent_row, delta)) = indent_changes.peek() {
2852 match indent_row.cmp(&row) {
2853 Ordering::Equal => match delta {
2854 Ordering::Less => outdent_from_prev_row = true,
2855 Ordering::Greater => indent_from_prev_row = true,
2856 _ => {}
2857 },
2858
2859 Ordering::Greater => break,
2860 Ordering::Less => {}
2861 }
2862
2863 indent_changes.next();
2864 }
2865
2866 for range in &indent_ranges {
2867 if range.start.row >= row {
2868 break;
2869 }
2870 if range.start.row == prev_row && range.end > row_start {
2871 indent_from_prev_row = true;
2872 }
2873 if range.end > prev_row_start && range.end <= row_start {
2874 outdent_to_row = outdent_to_row.min(range.start.row);
2875 }
2876 }
2877
2878 let within_error = error_ranges
2879 .iter()
2880 .any(|e| e.start.row < row && e.end > row_start);
2881
2882 let suggestion = if outdent_to_row == prev_row
2883 || (outdent_from_prev_row && indent_from_prev_row)
2884 {
2885 Some(IndentSuggestion {
2886 basis_row: prev_row,
2887 delta: Ordering::Equal,
2888 within_error,
2889 })
2890 } else if indent_from_prev_row {
2891 Some(IndentSuggestion {
2892 basis_row: prev_row,
2893 delta: Ordering::Greater,
2894 within_error,
2895 })
2896 } else if outdent_to_row < prev_row {
2897 Some(IndentSuggestion {
2898 basis_row: outdent_to_row,
2899 delta: Ordering::Equal,
2900 within_error,
2901 })
2902 } else if outdent_from_prev_row {
2903 Some(IndentSuggestion {
2904 basis_row: prev_row,
2905 delta: Ordering::Less,
2906 within_error,
2907 })
2908 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2909 {
2910 Some(IndentSuggestion {
2911 basis_row: prev_row,
2912 delta: Ordering::Equal,
2913 within_error,
2914 })
2915 } else {
2916 None
2917 };
2918
2919 prev_row = row;
2920 prev_row_start = row_start;
2921 suggestion
2922 }))
2923 }
2924
2925 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2926 while row > 0 {
2927 row -= 1;
2928 if !self.is_line_blank(row) {
2929 return Some(row);
2930 }
2931 }
2932 None
2933 }
2934
2935 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
2936 let captures = self.syntax.captures(range, &self.text, |grammar| {
2937 grammar.highlights_query.as_ref()
2938 });
2939 let highlight_maps = captures
2940 .grammars()
2941 .iter()
2942 .map(|grammar| grammar.highlight_map())
2943 .collect();
2944 (captures, highlight_maps)
2945 }
2946
2947 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2948 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2949 /// returned in chunks where each chunk has a single syntax highlighting style and
2950 /// diagnostic status.
2951 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2952 let range = range.start.to_offset(self)..range.end.to_offset(self);
2953
2954 let mut syntax = None;
2955 if language_aware {
2956 syntax = Some(self.get_highlights(range.clone()));
2957 }
2958 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
2959 let diagnostics = language_aware;
2960 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
2961 }
2962
2963 /// Invokes the given callback for each line of text in the given range of the buffer.
2964 /// Uses callback to avoid allocating a string for each line.
2965 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2966 let mut line = String::new();
2967 let mut row = range.start.row;
2968 for chunk in self
2969 .as_rope()
2970 .chunks_in_range(range.to_offset(self))
2971 .chain(["\n"])
2972 {
2973 for (newline_ix, text) in chunk.split('\n').enumerate() {
2974 if newline_ix > 0 {
2975 callback(row, &line);
2976 row += 1;
2977 line.clear();
2978 }
2979 line.push_str(text);
2980 }
2981 }
2982 }
2983
2984 /// Iterates over every [`SyntaxLayer`] in the buffer.
2985 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2986 self.syntax
2987 .layers_for_range(0..self.len(), &self.text, true)
2988 }
2989
2990 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2991 let offset = position.to_offset(self);
2992 self.syntax
2993 .layers_for_range(offset..offset, &self.text, false)
2994 .filter(|l| l.node().end_byte() > offset)
2995 .last()
2996 }
2997
2998 /// Returns the main [`Language`].
2999 pub fn language(&self) -> Option<&Arc<Language>> {
3000 self.language.as_ref()
3001 }
3002
3003 /// Returns the [`Language`] at the given location.
3004 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3005 self.syntax_layer_at(position)
3006 .map(|info| info.language)
3007 .or(self.language.as_ref())
3008 }
3009
3010 /// Returns the settings for the language at the given location.
3011 pub fn settings_at<'a, D: ToOffset>(
3012 &'a self,
3013 position: D,
3014 cx: &'a App,
3015 ) -> Cow<'a, LanguageSettings> {
3016 language_settings(
3017 self.language_at(position).map(|l| l.name()),
3018 self.file.as_ref(),
3019 cx,
3020 )
3021 }
3022
3023 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3024 CharClassifier::new(self.language_scope_at(point))
3025 }
3026
3027 /// Returns the [`LanguageScope`] at the given location.
3028 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3029 let offset = position.to_offset(self);
3030 let mut scope = None;
3031 let mut smallest_range: Option<Range<usize>> = None;
3032
3033 // Use the layer that has the smallest node intersecting the given point.
3034 for layer in self
3035 .syntax
3036 .layers_for_range(offset..offset, &self.text, false)
3037 {
3038 let mut cursor = layer.node().walk();
3039
3040 let mut range = None;
3041 loop {
3042 let child_range = cursor.node().byte_range();
3043 if !child_range.to_inclusive().contains(&offset) {
3044 break;
3045 }
3046
3047 range = Some(child_range);
3048 if cursor.goto_first_child_for_byte(offset).is_none() {
3049 break;
3050 }
3051 }
3052
3053 if let Some(range) = range {
3054 if smallest_range
3055 .as_ref()
3056 .map_or(true, |smallest_range| range.len() < smallest_range.len())
3057 {
3058 smallest_range = Some(range);
3059 scope = Some(LanguageScope {
3060 language: layer.language.clone(),
3061 override_id: layer.override_id(offset, &self.text),
3062 });
3063 }
3064 }
3065 }
3066
3067 scope.or_else(|| {
3068 self.language.clone().map(|language| LanguageScope {
3069 language,
3070 override_id: None,
3071 })
3072 })
3073 }
3074
3075 /// Returns a tuple of the range and character kind of the word
3076 /// surrounding the given position.
3077 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3078 let mut start = start.to_offset(self);
3079 let mut end = start;
3080 let mut next_chars = self.chars_at(start).peekable();
3081 let mut prev_chars = self.reversed_chars_at(start).peekable();
3082
3083 let classifier = self.char_classifier_at(start);
3084 let word_kind = cmp::max(
3085 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3086 next_chars.peek().copied().map(|c| classifier.kind(c)),
3087 );
3088
3089 for ch in prev_chars {
3090 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3091 start -= ch.len_utf8();
3092 } else {
3093 break;
3094 }
3095 }
3096
3097 for ch in next_chars {
3098 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3099 end += ch.len_utf8();
3100 } else {
3101 break;
3102 }
3103 }
3104
3105 (start..end, word_kind)
3106 }
3107
3108 /// Returns the closest syntax node enclosing the given range.
3109 pub fn syntax_ancestor<'a, T: ToOffset>(
3110 &'a self,
3111 range: Range<T>,
3112 ) -> Option<tree_sitter::Node<'a>> {
3113 let range = range.start.to_offset(self)..range.end.to_offset(self);
3114 let mut result: Option<tree_sitter::Node<'a>> = None;
3115 'outer: for layer in self
3116 .syntax
3117 .layers_for_range(range.clone(), &self.text, true)
3118 {
3119 let mut cursor = layer.node().walk();
3120
3121 // Descend to the first leaf that touches the start of the range,
3122 // and if the range is non-empty, extends beyond the start.
3123 while cursor.goto_first_child_for_byte(range.start).is_some() {
3124 if !range.is_empty() && cursor.node().end_byte() == range.start {
3125 cursor.goto_next_sibling();
3126 }
3127 }
3128
3129 // Ascend to the smallest ancestor that strictly contains the range.
3130 loop {
3131 let node_range = cursor.node().byte_range();
3132 if node_range.start <= range.start
3133 && node_range.end >= range.end
3134 && node_range.len() > range.len()
3135 {
3136 break;
3137 }
3138 if !cursor.goto_parent() {
3139 continue 'outer;
3140 }
3141 }
3142
3143 let left_node = cursor.node();
3144 let mut layer_result = left_node;
3145
3146 // For an empty range, try to find another node immediately to the right of the range.
3147 if left_node.end_byte() == range.start {
3148 let mut right_node = None;
3149 while !cursor.goto_next_sibling() {
3150 if !cursor.goto_parent() {
3151 break;
3152 }
3153 }
3154
3155 while cursor.node().start_byte() == range.start {
3156 right_node = Some(cursor.node());
3157 if !cursor.goto_first_child() {
3158 break;
3159 }
3160 }
3161
3162 // If there is a candidate node on both sides of the (empty) range, then
3163 // decide between the two by favoring a named node over an anonymous token.
3164 // If both nodes are the same in that regard, favor the right one.
3165 if let Some(right_node) = right_node {
3166 if right_node.is_named() || !left_node.is_named() {
3167 layer_result = right_node;
3168 }
3169 }
3170 }
3171
3172 if let Some(previous_result) = &result {
3173 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3174 continue;
3175 }
3176 }
3177 result = Some(layer_result);
3178 }
3179
3180 result
3181 }
3182
3183 /// Returns the outline for the buffer.
3184 ///
3185 /// This method allows passing an optional [`SyntaxTheme`] to
3186 /// syntax-highlight the returned symbols.
3187 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3188 self.outline_items_containing(0..self.len(), true, theme)
3189 .map(Outline::new)
3190 }
3191
3192 /// Returns all the symbols that contain the given position.
3193 ///
3194 /// This method allows passing an optional [`SyntaxTheme`] to
3195 /// syntax-highlight the returned symbols.
3196 pub fn symbols_containing<T: ToOffset>(
3197 &self,
3198 position: T,
3199 theme: Option<&SyntaxTheme>,
3200 ) -> Option<Vec<OutlineItem<Anchor>>> {
3201 let position = position.to_offset(self);
3202 let mut items = self.outline_items_containing(
3203 position.saturating_sub(1)..self.len().min(position + 1),
3204 false,
3205 theme,
3206 )?;
3207 let mut prev_depth = None;
3208 items.retain(|item| {
3209 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3210 prev_depth = Some(item.depth);
3211 result
3212 });
3213 Some(items)
3214 }
3215
3216 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3217 let range = range.to_offset(self);
3218 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3219 grammar.outline_config.as_ref().map(|c| &c.query)
3220 });
3221 let configs = matches
3222 .grammars()
3223 .iter()
3224 .map(|g| g.outline_config.as_ref().unwrap())
3225 .collect::<Vec<_>>();
3226
3227 while let Some(mat) = matches.peek() {
3228 let config = &configs[mat.grammar_index];
3229 let containing_item_node = maybe!({
3230 let item_node = mat.captures.iter().find_map(|cap| {
3231 if cap.index == config.item_capture_ix {
3232 Some(cap.node)
3233 } else {
3234 None
3235 }
3236 })?;
3237
3238 let item_byte_range = item_node.byte_range();
3239 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3240 None
3241 } else {
3242 Some(item_node)
3243 }
3244 });
3245
3246 if let Some(item_node) = containing_item_node {
3247 return Some(
3248 Point::from_ts_point(item_node.start_position())
3249 ..Point::from_ts_point(item_node.end_position()),
3250 );
3251 }
3252
3253 matches.advance();
3254 }
3255 None
3256 }
3257
3258 pub fn outline_items_containing<T: ToOffset>(
3259 &self,
3260 range: Range<T>,
3261 include_extra_context: bool,
3262 theme: Option<&SyntaxTheme>,
3263 ) -> Option<Vec<OutlineItem<Anchor>>> {
3264 let range = range.to_offset(self);
3265 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3266 grammar.outline_config.as_ref().map(|c| &c.query)
3267 });
3268 let configs = matches
3269 .grammars()
3270 .iter()
3271 .map(|g| g.outline_config.as_ref().unwrap())
3272 .collect::<Vec<_>>();
3273
3274 let mut items = Vec::new();
3275 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3276 while let Some(mat) = matches.peek() {
3277 let config = &configs[mat.grammar_index];
3278 if let Some(item) =
3279 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3280 {
3281 items.push(item);
3282 } else if let Some(capture) = mat
3283 .captures
3284 .iter()
3285 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3286 {
3287 let capture_range = capture.node.start_position()..capture.node.end_position();
3288 let mut capture_row_range =
3289 capture_range.start.row as u32..capture_range.end.row as u32;
3290 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3291 {
3292 capture_row_range.end -= 1;
3293 }
3294 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3295 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3296 last_row_range.end = capture_row_range.end;
3297 } else {
3298 annotation_row_ranges.push(capture_row_range);
3299 }
3300 } else {
3301 annotation_row_ranges.push(capture_row_range);
3302 }
3303 }
3304 matches.advance();
3305 }
3306
3307 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3308
3309 // Assign depths based on containment relationships and convert to anchors.
3310 let mut item_ends_stack = Vec::<Point>::new();
3311 let mut anchor_items = Vec::new();
3312 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3313 for item in items {
3314 while let Some(last_end) = item_ends_stack.last().copied() {
3315 if last_end < item.range.end {
3316 item_ends_stack.pop();
3317 } else {
3318 break;
3319 }
3320 }
3321
3322 let mut annotation_row_range = None;
3323 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3324 let row_preceding_item = item.range.start.row.saturating_sub(1);
3325 if next_annotation_row_range.end < row_preceding_item {
3326 annotation_row_ranges.next();
3327 } else {
3328 if next_annotation_row_range.end == row_preceding_item {
3329 annotation_row_range = Some(next_annotation_row_range.clone());
3330 annotation_row_ranges.next();
3331 }
3332 break;
3333 }
3334 }
3335
3336 anchor_items.push(OutlineItem {
3337 depth: item_ends_stack.len(),
3338 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3339 text: item.text,
3340 highlight_ranges: item.highlight_ranges,
3341 name_ranges: item.name_ranges,
3342 body_range: item.body_range.map(|body_range| {
3343 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3344 }),
3345 annotation_range: annotation_row_range.map(|annotation_range| {
3346 self.anchor_after(Point::new(annotation_range.start, 0))
3347 ..self.anchor_before(Point::new(
3348 annotation_range.end,
3349 self.line_len(annotation_range.end),
3350 ))
3351 }),
3352 });
3353 item_ends_stack.push(item.range.end);
3354 }
3355
3356 Some(anchor_items)
3357 }
3358
3359 fn next_outline_item(
3360 &self,
3361 config: &OutlineConfig,
3362 mat: &SyntaxMapMatch,
3363 range: &Range<usize>,
3364 include_extra_context: bool,
3365 theme: Option<&SyntaxTheme>,
3366 ) -> Option<OutlineItem<Point>> {
3367 let item_node = mat.captures.iter().find_map(|cap| {
3368 if cap.index == config.item_capture_ix {
3369 Some(cap.node)
3370 } else {
3371 None
3372 }
3373 })?;
3374
3375 let item_byte_range = item_node.byte_range();
3376 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3377 return None;
3378 }
3379 let item_point_range = Point::from_ts_point(item_node.start_position())
3380 ..Point::from_ts_point(item_node.end_position());
3381
3382 let mut open_point = None;
3383 let mut close_point = None;
3384 let mut buffer_ranges = Vec::new();
3385 for capture in mat.captures {
3386 let node_is_name;
3387 if capture.index == config.name_capture_ix {
3388 node_is_name = true;
3389 } else if Some(capture.index) == config.context_capture_ix
3390 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3391 {
3392 node_is_name = false;
3393 } else {
3394 if Some(capture.index) == config.open_capture_ix {
3395 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3396 } else if Some(capture.index) == config.close_capture_ix {
3397 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3398 }
3399
3400 continue;
3401 }
3402
3403 let mut range = capture.node.start_byte()..capture.node.end_byte();
3404 let start = capture.node.start_position();
3405 if capture.node.end_position().row > start.row {
3406 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3407 }
3408
3409 if !range.is_empty() {
3410 buffer_ranges.push((range, node_is_name));
3411 }
3412 }
3413 if buffer_ranges.is_empty() {
3414 return None;
3415 }
3416 let mut text = String::new();
3417 let mut highlight_ranges = Vec::new();
3418 let mut name_ranges = Vec::new();
3419 let mut chunks = self.chunks(
3420 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3421 true,
3422 );
3423 let mut last_buffer_range_end = 0;
3424 for (buffer_range, is_name) in buffer_ranges {
3425 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
3426 text.push(' ');
3427 }
3428 last_buffer_range_end = buffer_range.end;
3429 if is_name {
3430 let mut start = text.len();
3431 let end = start + buffer_range.len();
3432
3433 // When multiple names are captured, then the matchable text
3434 // includes the whitespace in between the names.
3435 if !name_ranges.is_empty() {
3436 start -= 1;
3437 }
3438
3439 name_ranges.push(start..end);
3440 }
3441
3442 let mut offset = buffer_range.start;
3443 chunks.seek(buffer_range.clone());
3444 for mut chunk in chunks.by_ref() {
3445 if chunk.text.len() > buffer_range.end - offset {
3446 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3447 offset = buffer_range.end;
3448 } else {
3449 offset += chunk.text.len();
3450 }
3451 let style = chunk
3452 .syntax_highlight_id
3453 .zip(theme)
3454 .and_then(|(highlight, theme)| highlight.style(theme));
3455 if let Some(style) = style {
3456 let start = text.len();
3457 let end = start + chunk.text.len();
3458 highlight_ranges.push((start..end, style));
3459 }
3460 text.push_str(chunk.text);
3461 if offset >= buffer_range.end {
3462 break;
3463 }
3464 }
3465 }
3466
3467 Some(OutlineItem {
3468 depth: 0, // We'll calculate the depth later
3469 range: item_point_range,
3470 text,
3471 highlight_ranges,
3472 name_ranges,
3473 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3474 annotation_range: None,
3475 })
3476 }
3477
3478 pub fn function_body_fold_ranges<T: ToOffset>(
3479 &self,
3480 within: Range<T>,
3481 ) -> impl Iterator<Item = Range<usize>> + '_ {
3482 self.text_object_ranges(within, TreeSitterOptions::default())
3483 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3484 }
3485
3486 /// For each grammar in the language, runs the provided
3487 /// [`tree_sitter::Query`] against the given range.
3488 pub fn matches(
3489 &self,
3490 range: Range<usize>,
3491 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3492 ) -> SyntaxMapMatches {
3493 self.syntax.matches(range, self, query)
3494 }
3495
3496 /// Returns bracket range pairs overlapping or adjacent to `range`
3497 pub fn bracket_ranges<T: ToOffset>(
3498 &self,
3499 range: Range<T>,
3500 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3501 // Find bracket pairs that *inclusively* contain the given range.
3502 let range = range.start.to_offset(self).saturating_sub(1)
3503 ..self.len().min(range.end.to_offset(self) + 1);
3504
3505 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3506 grammar.brackets_config.as_ref().map(|c| &c.query)
3507 });
3508 let configs = matches
3509 .grammars()
3510 .iter()
3511 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3512 .collect::<Vec<_>>();
3513
3514 iter::from_fn(move || {
3515 while let Some(mat) = matches.peek() {
3516 let mut open = None;
3517 let mut close = None;
3518 let config = &configs[mat.grammar_index];
3519 for capture in mat.captures {
3520 if capture.index == config.open_capture_ix {
3521 open = Some(capture.node.byte_range());
3522 } else if capture.index == config.close_capture_ix {
3523 close = Some(capture.node.byte_range());
3524 }
3525 }
3526
3527 matches.advance();
3528
3529 let Some((open, close)) = open.zip(close) else {
3530 continue;
3531 };
3532
3533 let bracket_range = open.start..=close.end;
3534 if !bracket_range.overlaps(&range) {
3535 continue;
3536 }
3537
3538 return Some((open, close));
3539 }
3540 None
3541 })
3542 }
3543
3544 pub fn text_object_ranges<T: ToOffset>(
3545 &self,
3546 range: Range<T>,
3547 options: TreeSitterOptions,
3548 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3549 let range = range.start.to_offset(self).saturating_sub(1)
3550 ..self.len().min(range.end.to_offset(self) + 1);
3551
3552 let mut matches =
3553 self.syntax
3554 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3555 grammar.text_object_config.as_ref().map(|c| &c.query)
3556 });
3557
3558 let configs = matches
3559 .grammars()
3560 .iter()
3561 .map(|grammar| grammar.text_object_config.as_ref())
3562 .collect::<Vec<_>>();
3563
3564 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3565
3566 iter::from_fn(move || loop {
3567 while let Some(capture) = captures.pop() {
3568 if capture.0.overlaps(&range) {
3569 return Some(capture);
3570 }
3571 }
3572
3573 let mat = matches.peek()?;
3574
3575 let Some(config) = configs[mat.grammar_index].as_ref() else {
3576 matches.advance();
3577 continue;
3578 };
3579
3580 for capture in mat.captures {
3581 let Some(ix) = config
3582 .text_objects_by_capture_ix
3583 .binary_search_by_key(&capture.index, |e| e.0)
3584 .ok()
3585 else {
3586 continue;
3587 };
3588 let text_object = config.text_objects_by_capture_ix[ix].1;
3589 let byte_range = capture.node.byte_range();
3590
3591 let mut found = false;
3592 for (range, existing) in captures.iter_mut() {
3593 if existing == &text_object {
3594 range.start = range.start.min(byte_range.start);
3595 range.end = range.end.max(byte_range.end);
3596 found = true;
3597 break;
3598 }
3599 }
3600
3601 if !found {
3602 captures.push((byte_range, text_object));
3603 }
3604 }
3605
3606 matches.advance();
3607 })
3608 }
3609
3610 /// Returns enclosing bracket ranges containing the given range
3611 pub fn enclosing_bracket_ranges<T: ToOffset>(
3612 &self,
3613 range: Range<T>,
3614 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3615 let range = range.start.to_offset(self)..range.end.to_offset(self);
3616
3617 self.bracket_ranges(range.clone())
3618 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
3619 }
3620
3621 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3622 ///
3623 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3624 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3625 &self,
3626 range: Range<T>,
3627 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3628 ) -> Option<(Range<usize>, Range<usize>)> {
3629 let range = range.start.to_offset(self)..range.end.to_offset(self);
3630
3631 // Get the ranges of the innermost pair of brackets.
3632 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3633
3634 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
3635 if let Some(range_filter) = range_filter {
3636 if !range_filter(open.clone(), close.clone()) {
3637 continue;
3638 }
3639 }
3640
3641 let len = close.end - open.start;
3642
3643 if let Some((existing_open, existing_close)) = &result {
3644 let existing_len = existing_close.end - existing_open.start;
3645 if len > existing_len {
3646 continue;
3647 }
3648 }
3649
3650 result = Some((open, close));
3651 }
3652
3653 result
3654 }
3655
3656 /// Returns anchor ranges for any matches of the redaction query.
3657 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3658 /// will be run on the relevant section of the buffer.
3659 pub fn redacted_ranges<T: ToOffset>(
3660 &self,
3661 range: Range<T>,
3662 ) -> impl Iterator<Item = Range<usize>> + '_ {
3663 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3664 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3665 grammar
3666 .redactions_config
3667 .as_ref()
3668 .map(|config| &config.query)
3669 });
3670
3671 let configs = syntax_matches
3672 .grammars()
3673 .iter()
3674 .map(|grammar| grammar.redactions_config.as_ref())
3675 .collect::<Vec<_>>();
3676
3677 iter::from_fn(move || {
3678 let redacted_range = syntax_matches
3679 .peek()
3680 .and_then(|mat| {
3681 configs[mat.grammar_index].and_then(|config| {
3682 mat.captures
3683 .iter()
3684 .find(|capture| capture.index == config.redaction_capture_ix)
3685 })
3686 })
3687 .map(|mat| mat.node.byte_range());
3688 syntax_matches.advance();
3689 redacted_range
3690 })
3691 }
3692
3693 pub fn injections_intersecting_range<T: ToOffset>(
3694 &self,
3695 range: Range<T>,
3696 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3697 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3698
3699 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3700 grammar
3701 .injection_config
3702 .as_ref()
3703 .map(|config| &config.query)
3704 });
3705
3706 let configs = syntax_matches
3707 .grammars()
3708 .iter()
3709 .map(|grammar| grammar.injection_config.as_ref())
3710 .collect::<Vec<_>>();
3711
3712 iter::from_fn(move || {
3713 let ranges = syntax_matches.peek().and_then(|mat| {
3714 let config = &configs[mat.grammar_index]?;
3715 let content_capture_range = mat.captures.iter().find_map(|capture| {
3716 if capture.index == config.content_capture_ix {
3717 Some(capture.node.byte_range())
3718 } else {
3719 None
3720 }
3721 })?;
3722 let language = self.language_at(content_capture_range.start)?;
3723 Some((content_capture_range, language))
3724 });
3725 syntax_matches.advance();
3726 ranges
3727 })
3728 }
3729
3730 pub fn runnable_ranges(
3731 &self,
3732 offset_range: Range<usize>,
3733 ) -> impl Iterator<Item = RunnableRange> + '_ {
3734 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3735 grammar.runnable_config.as_ref().map(|config| &config.query)
3736 });
3737
3738 let test_configs = syntax_matches
3739 .grammars()
3740 .iter()
3741 .map(|grammar| grammar.runnable_config.as_ref())
3742 .collect::<Vec<_>>();
3743
3744 iter::from_fn(move || loop {
3745 let mat = syntax_matches.peek()?;
3746
3747 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3748 let mut run_range = None;
3749 let full_range = mat.captures.iter().fold(
3750 Range {
3751 start: usize::MAX,
3752 end: 0,
3753 },
3754 |mut acc, next| {
3755 let byte_range = next.node.byte_range();
3756 if acc.start > byte_range.start {
3757 acc.start = byte_range.start;
3758 }
3759 if acc.end < byte_range.end {
3760 acc.end = byte_range.end;
3761 }
3762 acc
3763 },
3764 );
3765 if full_range.start > full_range.end {
3766 // We did not find a full spanning range of this match.
3767 return None;
3768 }
3769 let extra_captures: SmallVec<[_; 1]> =
3770 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3771 test_configs
3772 .extra_captures
3773 .get(capture.index as usize)
3774 .cloned()
3775 .and_then(|tag_name| match tag_name {
3776 RunnableCapture::Named(name) => {
3777 Some((capture.node.byte_range(), name))
3778 }
3779 RunnableCapture::Run => {
3780 let _ = run_range.insert(capture.node.byte_range());
3781 None
3782 }
3783 })
3784 }));
3785 let run_range = run_range?;
3786 let tags = test_configs
3787 .query
3788 .property_settings(mat.pattern_index)
3789 .iter()
3790 .filter_map(|property| {
3791 if *property.key == *"tag" {
3792 property
3793 .value
3794 .as_ref()
3795 .map(|value| RunnableTag(value.to_string().into()))
3796 } else {
3797 None
3798 }
3799 })
3800 .collect();
3801 let extra_captures = extra_captures
3802 .into_iter()
3803 .map(|(range, name)| {
3804 (
3805 name.to_string(),
3806 self.text_for_range(range.clone()).collect::<String>(),
3807 )
3808 })
3809 .collect();
3810 // All tags should have the same range.
3811 Some(RunnableRange {
3812 run_range,
3813 full_range,
3814 runnable: Runnable {
3815 tags,
3816 language: mat.language,
3817 buffer: self.remote_id(),
3818 },
3819 extra_captures,
3820 buffer_id: self.remote_id(),
3821 })
3822 });
3823
3824 syntax_matches.advance();
3825 if test_range.is_some() {
3826 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3827 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3828 return test_range;
3829 }
3830 })
3831 }
3832
3833 /// Returns selections for remote peers intersecting the given range.
3834 #[allow(clippy::type_complexity)]
3835 pub fn selections_in_range(
3836 &self,
3837 range: Range<Anchor>,
3838 include_local: bool,
3839 ) -> impl Iterator<
3840 Item = (
3841 ReplicaId,
3842 bool,
3843 CursorShape,
3844 impl Iterator<Item = &Selection<Anchor>> + '_,
3845 ),
3846 > + '_ {
3847 self.remote_selections
3848 .iter()
3849 .filter(move |(replica_id, set)| {
3850 (include_local || **replica_id != self.text.replica_id())
3851 && !set.selections.is_empty()
3852 })
3853 .map(move |(replica_id, set)| {
3854 let start_ix = match set.selections.binary_search_by(|probe| {
3855 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3856 }) {
3857 Ok(ix) | Err(ix) => ix,
3858 };
3859 let end_ix = match set.selections.binary_search_by(|probe| {
3860 probe.start.cmp(&range.end, self).then(Ordering::Less)
3861 }) {
3862 Ok(ix) | Err(ix) => ix,
3863 };
3864
3865 (
3866 *replica_id,
3867 set.line_mode,
3868 set.cursor_shape,
3869 set.selections[start_ix..end_ix].iter(),
3870 )
3871 })
3872 }
3873
3874 /// Returns if the buffer contains any diagnostics.
3875 pub fn has_diagnostics(&self) -> bool {
3876 !self.diagnostics.is_empty()
3877 }
3878
3879 /// Returns all the diagnostics intersecting the given range.
3880 pub fn diagnostics_in_range<'a, T, O>(
3881 &'a self,
3882 search_range: Range<T>,
3883 reversed: bool,
3884 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3885 where
3886 T: 'a + Clone + ToOffset,
3887 O: 'a + FromAnchor,
3888 {
3889 let mut iterators: Vec<_> = self
3890 .diagnostics
3891 .iter()
3892 .map(|(_, collection)| {
3893 collection
3894 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
3895 .peekable()
3896 })
3897 .collect();
3898
3899 std::iter::from_fn(move || {
3900 let (next_ix, _) = iterators
3901 .iter_mut()
3902 .enumerate()
3903 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3904 .min_by(|(_, a), (_, b)| {
3905 let cmp = a
3906 .range
3907 .start
3908 .cmp(&b.range.start, self)
3909 // when range is equal, sort by diagnostic severity
3910 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3911 // and stabilize order with group_id
3912 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3913 if reversed {
3914 cmp.reverse()
3915 } else {
3916 cmp
3917 }
3918 })?;
3919 iterators[next_ix]
3920 .next()
3921 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
3922 diagnostic,
3923 range: FromAnchor::from_anchor(&range.start, self)
3924 ..FromAnchor::from_anchor(&range.end, self),
3925 })
3926 })
3927 }
3928
3929 /// Returns all the diagnostic groups associated with the given
3930 /// language server ID. If no language server ID is provided,
3931 /// all diagnostics groups are returned.
3932 pub fn diagnostic_groups(
3933 &self,
3934 language_server_id: Option<LanguageServerId>,
3935 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3936 let mut groups = Vec::new();
3937
3938 if let Some(language_server_id) = language_server_id {
3939 if let Ok(ix) = self
3940 .diagnostics
3941 .binary_search_by_key(&language_server_id, |e| e.0)
3942 {
3943 self.diagnostics[ix]
3944 .1
3945 .groups(language_server_id, &mut groups, self);
3946 }
3947 } else {
3948 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3949 diagnostics.groups(*language_server_id, &mut groups, self);
3950 }
3951 }
3952
3953 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3954 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3955 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3956 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3957 });
3958
3959 groups
3960 }
3961
3962 /// Returns an iterator over the diagnostics for the given group.
3963 pub fn diagnostic_group<O>(
3964 &self,
3965 group_id: usize,
3966 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
3967 where
3968 O: FromAnchor + 'static,
3969 {
3970 self.diagnostics
3971 .iter()
3972 .flat_map(move |(_, set)| set.group(group_id, self))
3973 }
3974
3975 /// An integer version number that accounts for all updates besides
3976 /// the buffer's text itself (which is versioned via a version vector).
3977 pub fn non_text_state_update_count(&self) -> usize {
3978 self.non_text_state_update_count
3979 }
3980
3981 /// Returns a snapshot of underlying file.
3982 pub fn file(&self) -> Option<&Arc<dyn File>> {
3983 self.file.as_ref()
3984 }
3985
3986 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
3987 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
3988 if let Some(file) = self.file() {
3989 if file.path().file_name().is_none() || include_root {
3990 Some(file.full_path(cx))
3991 } else {
3992 Some(file.path().to_path_buf())
3993 }
3994 } else {
3995 None
3996 }
3997 }
3998}
3999
4000fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4001 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4002}
4003
4004fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4005 let mut result = IndentSize::spaces(0);
4006 for c in text {
4007 let kind = match c {
4008 ' ' => IndentKind::Space,
4009 '\t' => IndentKind::Tab,
4010 _ => break,
4011 };
4012 if result.len == 0 {
4013 result.kind = kind;
4014 }
4015 result.len += 1;
4016 }
4017 result
4018}
4019
4020impl Clone for BufferSnapshot {
4021 fn clone(&self) -> Self {
4022 Self {
4023 text: self.text.clone(),
4024 syntax: self.syntax.clone(),
4025 file: self.file.clone(),
4026 remote_selections: self.remote_selections.clone(),
4027 diagnostics: self.diagnostics.clone(),
4028 language: self.language.clone(),
4029 non_text_state_update_count: self.non_text_state_update_count,
4030 }
4031 }
4032}
4033
4034impl Deref for BufferSnapshot {
4035 type Target = text::BufferSnapshot;
4036
4037 fn deref(&self) -> &Self::Target {
4038 &self.text
4039 }
4040}
4041
4042unsafe impl<'a> Send for BufferChunks<'a> {}
4043
4044impl<'a> BufferChunks<'a> {
4045 pub(crate) fn new(
4046 text: &'a Rope,
4047 range: Range<usize>,
4048 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4049 diagnostics: bool,
4050 buffer_snapshot: Option<&'a BufferSnapshot>,
4051 ) -> Self {
4052 let mut highlights = None;
4053 if let Some((captures, highlight_maps)) = syntax {
4054 highlights = Some(BufferChunkHighlights {
4055 captures,
4056 next_capture: None,
4057 stack: Default::default(),
4058 highlight_maps,
4059 })
4060 }
4061
4062 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4063 let chunks = text.chunks_in_range(range.clone());
4064
4065 let mut this = BufferChunks {
4066 range,
4067 buffer_snapshot,
4068 chunks,
4069 diagnostic_endpoints,
4070 error_depth: 0,
4071 warning_depth: 0,
4072 information_depth: 0,
4073 hint_depth: 0,
4074 unnecessary_depth: 0,
4075 highlights,
4076 };
4077 this.initialize_diagnostic_endpoints();
4078 this
4079 }
4080
4081 /// Seeks to the given byte offset in the buffer.
4082 pub fn seek(&mut self, range: Range<usize>) {
4083 let old_range = std::mem::replace(&mut self.range, range.clone());
4084 self.chunks.set_range(self.range.clone());
4085 if let Some(highlights) = self.highlights.as_mut() {
4086 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4087 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4088 highlights
4089 .stack
4090 .retain(|(end_offset, _)| *end_offset > range.start);
4091 if let Some(capture) = &highlights.next_capture {
4092 if range.start >= capture.node.start_byte() {
4093 let next_capture_end = capture.node.end_byte();
4094 if range.start < next_capture_end {
4095 highlights.stack.push((
4096 next_capture_end,
4097 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4098 ));
4099 }
4100 highlights.next_capture.take();
4101 }
4102 }
4103 } else if let Some(snapshot) = self.buffer_snapshot {
4104 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4105 *highlights = BufferChunkHighlights {
4106 captures,
4107 next_capture: None,
4108 stack: Default::default(),
4109 highlight_maps,
4110 };
4111 } else {
4112 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4113 // Seeking such BufferChunks is not supported.
4114 debug_assert!(false, "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot");
4115 }
4116
4117 highlights.captures.set_byte_range(self.range.clone());
4118 self.initialize_diagnostic_endpoints();
4119 }
4120 }
4121
4122 fn initialize_diagnostic_endpoints(&mut self) {
4123 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4124 if let Some(buffer) = self.buffer_snapshot {
4125 let mut diagnostic_endpoints = Vec::new();
4126 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4127 diagnostic_endpoints.push(DiagnosticEndpoint {
4128 offset: entry.range.start,
4129 is_start: true,
4130 severity: entry.diagnostic.severity,
4131 is_unnecessary: entry.diagnostic.is_unnecessary,
4132 });
4133 diagnostic_endpoints.push(DiagnosticEndpoint {
4134 offset: entry.range.end,
4135 is_start: false,
4136 severity: entry.diagnostic.severity,
4137 is_unnecessary: entry.diagnostic.is_unnecessary,
4138 });
4139 }
4140 diagnostic_endpoints
4141 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4142 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4143 self.hint_depth = 0;
4144 self.error_depth = 0;
4145 self.warning_depth = 0;
4146 self.information_depth = 0;
4147 }
4148 }
4149 }
4150
4151 /// The current byte offset in the buffer.
4152 pub fn offset(&self) -> usize {
4153 self.range.start
4154 }
4155
4156 pub fn range(&self) -> Range<usize> {
4157 self.range.clone()
4158 }
4159
4160 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4161 let depth = match endpoint.severity {
4162 DiagnosticSeverity::ERROR => &mut self.error_depth,
4163 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4164 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4165 DiagnosticSeverity::HINT => &mut self.hint_depth,
4166 _ => return,
4167 };
4168 if endpoint.is_start {
4169 *depth += 1;
4170 } else {
4171 *depth -= 1;
4172 }
4173
4174 if endpoint.is_unnecessary {
4175 if endpoint.is_start {
4176 self.unnecessary_depth += 1;
4177 } else {
4178 self.unnecessary_depth -= 1;
4179 }
4180 }
4181 }
4182
4183 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4184 if self.error_depth > 0 {
4185 Some(DiagnosticSeverity::ERROR)
4186 } else if self.warning_depth > 0 {
4187 Some(DiagnosticSeverity::WARNING)
4188 } else if self.information_depth > 0 {
4189 Some(DiagnosticSeverity::INFORMATION)
4190 } else if self.hint_depth > 0 {
4191 Some(DiagnosticSeverity::HINT)
4192 } else {
4193 None
4194 }
4195 }
4196
4197 fn current_code_is_unnecessary(&self) -> bool {
4198 self.unnecessary_depth > 0
4199 }
4200}
4201
4202impl<'a> Iterator for BufferChunks<'a> {
4203 type Item = Chunk<'a>;
4204
4205 fn next(&mut self) -> Option<Self::Item> {
4206 let mut next_capture_start = usize::MAX;
4207 let mut next_diagnostic_endpoint = usize::MAX;
4208
4209 if let Some(highlights) = self.highlights.as_mut() {
4210 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4211 if *parent_capture_end <= self.range.start {
4212 highlights.stack.pop();
4213 } else {
4214 break;
4215 }
4216 }
4217
4218 if highlights.next_capture.is_none() {
4219 highlights.next_capture = highlights.captures.next();
4220 }
4221
4222 while let Some(capture) = highlights.next_capture.as_ref() {
4223 if self.range.start < capture.node.start_byte() {
4224 next_capture_start = capture.node.start_byte();
4225 break;
4226 } else {
4227 let highlight_id =
4228 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4229 highlights
4230 .stack
4231 .push((capture.node.end_byte(), highlight_id));
4232 highlights.next_capture = highlights.captures.next();
4233 }
4234 }
4235 }
4236
4237 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4238 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4239 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4240 if endpoint.offset <= self.range.start {
4241 self.update_diagnostic_depths(endpoint);
4242 diagnostic_endpoints.next();
4243 } else {
4244 next_diagnostic_endpoint = endpoint.offset;
4245 break;
4246 }
4247 }
4248 }
4249 self.diagnostic_endpoints = diagnostic_endpoints;
4250
4251 if let Some(chunk) = self.chunks.peek() {
4252 let chunk_start = self.range.start;
4253 let mut chunk_end = (self.chunks.offset() + chunk.len())
4254 .min(next_capture_start)
4255 .min(next_diagnostic_endpoint);
4256 let mut highlight_id = None;
4257 if let Some(highlights) = self.highlights.as_ref() {
4258 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4259 chunk_end = chunk_end.min(*parent_capture_end);
4260 highlight_id = Some(*parent_highlight_id);
4261 }
4262 }
4263
4264 let slice =
4265 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4266 self.range.start = chunk_end;
4267 if self.range.start == self.chunks.offset() + chunk.len() {
4268 self.chunks.next().unwrap();
4269 }
4270
4271 Some(Chunk {
4272 text: slice,
4273 syntax_highlight_id: highlight_id,
4274 diagnostic_severity: self.current_diagnostic_severity(),
4275 is_unnecessary: self.current_code_is_unnecessary(),
4276 ..Default::default()
4277 })
4278 } else {
4279 None
4280 }
4281 }
4282}
4283
4284impl operation_queue::Operation for Operation {
4285 fn lamport_timestamp(&self) -> clock::Lamport {
4286 match self {
4287 Operation::Buffer(_) => {
4288 unreachable!("buffer operations should never be deferred at this layer")
4289 }
4290 Operation::UpdateDiagnostics {
4291 lamport_timestamp, ..
4292 }
4293 | Operation::UpdateSelections {
4294 lamport_timestamp, ..
4295 }
4296 | Operation::UpdateCompletionTriggers {
4297 lamport_timestamp, ..
4298 } => *lamport_timestamp,
4299 }
4300 }
4301}
4302
4303impl Default for Diagnostic {
4304 fn default() -> Self {
4305 Self {
4306 source: Default::default(),
4307 code: None,
4308 severity: DiagnosticSeverity::ERROR,
4309 message: Default::default(),
4310 group_id: 0,
4311 is_primary: false,
4312 is_disk_based: false,
4313 is_unnecessary: false,
4314 data: None,
4315 }
4316 }
4317}
4318
4319impl IndentSize {
4320 /// Returns an [`IndentSize`] representing the given spaces.
4321 pub fn spaces(len: u32) -> Self {
4322 Self {
4323 len,
4324 kind: IndentKind::Space,
4325 }
4326 }
4327
4328 /// Returns an [`IndentSize`] representing a tab.
4329 pub fn tab() -> Self {
4330 Self {
4331 len: 1,
4332 kind: IndentKind::Tab,
4333 }
4334 }
4335
4336 /// An iterator over the characters represented by this [`IndentSize`].
4337 pub fn chars(&self) -> impl Iterator<Item = char> {
4338 iter::repeat(self.char()).take(self.len as usize)
4339 }
4340
4341 /// The character representation of this [`IndentSize`].
4342 pub fn char(&self) -> char {
4343 match self.kind {
4344 IndentKind::Space => ' ',
4345 IndentKind::Tab => '\t',
4346 }
4347 }
4348
4349 /// Consumes the current [`IndentSize`] and returns a new one that has
4350 /// been shrunk or enlarged by the given size along the given direction.
4351 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4352 match direction {
4353 Ordering::Less => {
4354 if self.kind == size.kind && self.len >= size.len {
4355 self.len -= size.len;
4356 }
4357 }
4358 Ordering::Equal => {}
4359 Ordering::Greater => {
4360 if self.len == 0 {
4361 self = size;
4362 } else if self.kind == size.kind {
4363 self.len += size.len;
4364 }
4365 }
4366 }
4367 self
4368 }
4369
4370 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4371 match self.kind {
4372 IndentKind::Space => self.len as usize,
4373 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4374 }
4375 }
4376}
4377
4378#[cfg(any(test, feature = "test-support"))]
4379pub struct TestFile {
4380 pub path: Arc<Path>,
4381 pub root_name: String,
4382}
4383
4384#[cfg(any(test, feature = "test-support"))]
4385impl File for TestFile {
4386 fn path(&self) -> &Arc<Path> {
4387 &self.path
4388 }
4389
4390 fn full_path(&self, _: &gpui::App) -> PathBuf {
4391 PathBuf::from(&self.root_name).join(self.path.as_ref())
4392 }
4393
4394 fn as_local(&self) -> Option<&dyn LocalFile> {
4395 None
4396 }
4397
4398 fn disk_state(&self) -> DiskState {
4399 unimplemented!()
4400 }
4401
4402 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4403 self.path().file_name().unwrap_or(self.root_name.as_ref())
4404 }
4405
4406 fn worktree_id(&self, _: &App) -> WorktreeId {
4407 WorktreeId::from_usize(0)
4408 }
4409
4410 fn as_any(&self) -> &dyn std::any::Any {
4411 unimplemented!()
4412 }
4413
4414 fn to_proto(&self, _: &App) -> rpc::proto::File {
4415 unimplemented!()
4416 }
4417
4418 fn is_private(&self) -> bool {
4419 false
4420 }
4421}
4422
4423pub(crate) fn contiguous_ranges(
4424 values: impl Iterator<Item = u32>,
4425 max_len: usize,
4426) -> impl Iterator<Item = Range<u32>> {
4427 let mut values = values;
4428 let mut current_range: Option<Range<u32>> = None;
4429 std::iter::from_fn(move || loop {
4430 if let Some(value) = values.next() {
4431 if let Some(range) = &mut current_range {
4432 if value == range.end && range.len() < max_len {
4433 range.end += 1;
4434 continue;
4435 }
4436 }
4437
4438 let prev_range = current_range.clone();
4439 current_range = Some(value..(value + 1));
4440 if prev_range.is_some() {
4441 return prev_range;
4442 }
4443 } else {
4444 return current_range.take();
4445 }
4446 })
4447}
4448
4449#[derive(Default, Debug)]
4450pub struct CharClassifier {
4451 scope: Option<LanguageScope>,
4452 for_completion: bool,
4453 ignore_punctuation: bool,
4454}
4455
4456impl CharClassifier {
4457 pub fn new(scope: Option<LanguageScope>) -> Self {
4458 Self {
4459 scope,
4460 for_completion: false,
4461 ignore_punctuation: false,
4462 }
4463 }
4464
4465 pub fn for_completion(self, for_completion: bool) -> Self {
4466 Self {
4467 for_completion,
4468 ..self
4469 }
4470 }
4471
4472 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4473 Self {
4474 ignore_punctuation,
4475 ..self
4476 }
4477 }
4478
4479 pub fn is_whitespace(&self, c: char) -> bool {
4480 self.kind(c) == CharKind::Whitespace
4481 }
4482
4483 pub fn is_word(&self, c: char) -> bool {
4484 self.kind(c) == CharKind::Word
4485 }
4486
4487 pub fn is_punctuation(&self, c: char) -> bool {
4488 self.kind(c) == CharKind::Punctuation
4489 }
4490
4491 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4492 if c.is_whitespace() {
4493 return CharKind::Whitespace;
4494 } else if c.is_alphanumeric() || c == '_' {
4495 return CharKind::Word;
4496 }
4497
4498 if let Some(scope) = &self.scope {
4499 if let Some(characters) = scope.word_characters() {
4500 if characters.contains(&c) {
4501 if c == '-' && !self.for_completion && !ignore_punctuation {
4502 return CharKind::Punctuation;
4503 }
4504 return CharKind::Word;
4505 }
4506 }
4507 }
4508
4509 if ignore_punctuation {
4510 CharKind::Word
4511 } else {
4512 CharKind::Punctuation
4513 }
4514 }
4515
4516 pub fn kind(&self, c: char) -> CharKind {
4517 self.kind_with(c, self.ignore_punctuation)
4518 }
4519}
4520
4521/// Find all of the ranges of whitespace that occur at the ends of lines
4522/// in the given rope.
4523///
4524/// This could also be done with a regex search, but this implementation
4525/// avoids copying text.
4526pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4527 let mut ranges = Vec::new();
4528
4529 let mut offset = 0;
4530 let mut prev_chunk_trailing_whitespace_range = 0..0;
4531 for chunk in rope.chunks() {
4532 let mut prev_line_trailing_whitespace_range = 0..0;
4533 for (i, line) in chunk.split('\n').enumerate() {
4534 let line_end_offset = offset + line.len();
4535 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4536 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4537
4538 if i == 0 && trimmed_line_len == 0 {
4539 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4540 }
4541 if !prev_line_trailing_whitespace_range.is_empty() {
4542 ranges.push(prev_line_trailing_whitespace_range);
4543 }
4544
4545 offset = line_end_offset + 1;
4546 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4547 }
4548
4549 offset -= 1;
4550 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4551 }
4552
4553 if !prev_chunk_trailing_whitespace_range.is_empty() {
4554 ranges.push(prev_chunk_trailing_whitespace_range);
4555 }
4556
4557 ranges
4558}