1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
14 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
18 TreeSitterOptions,
19};
20use anyhow::{anyhow, Context, Result};
21use async_watch as watch;
22use clock::Lamport;
23pub use clock::ReplicaId;
24use collections::HashMap;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 AnyElement, AppContext, Context as _, EventEmitter, HighlightStyle, Model, ModelContext,
29 Pixels, SharedString, Task, TaskLabel, WindowContext,
30};
31use lsp::LanguageServerId;
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use similar::{ChangeTag, TextDiff};
38use smallvec::SmallVec;
39use smol::future::yield_now;
40use std::{
41 any::Any,
42 borrow::Cow,
43 cell::Cell,
44 cmp::{self, Ordering, Reverse},
45 collections::{BTreeMap, BTreeSet},
46 ffi::OsStr,
47 fmt,
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, DerefMut, Range},
53 path::{Path, PathBuf},
54 str,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
65 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
66 Transaction, TransactionId, Unclipped,
67};
68use theme::{ActiveTheme as _, SyntaxTheme};
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{debug_panic, maybe, RangeExt};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 pending_autoindent: Option<Task<()>>,
113 sync_parse_timeout: Duration,
114 syntax_map: Mutex<SyntaxMap>,
115 parsing_in_background: bool,
116 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
117 non_text_state_update_count: usize,
118 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
119 remote_selections: TreeMap<ReplicaId, SelectionSet>,
120 diagnostics_timestamp: clock::Lamport,
121 completion_triggers: BTreeSet<String>,
122 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
123 completion_triggers_timestamp: clock::Lamport,
124 deferred_ops: OperationQueue<Operation>,
125 capability: Capability,
126 has_conflict: bool,
127 /// Memoize calls to has_changes_since(saved_version).
128 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
129 has_unsaved_edits: Cell<(clock::Global, bool)>,
130 _subscriptions: Vec<gpui::Subscription>,
131}
132
133#[derive(Copy, Clone, Debug, PartialEq, Eq)]
134pub enum ParseStatus {
135 Idle,
136 Parsing,
137}
138
139struct BufferBranchState {
140 base_buffer: Model<Buffer>,
141 merged_operations: Vec<Lamport>,
142}
143
144/// An immutable, cheaply cloneable representation of a fixed
145/// state of a buffer.
146pub struct BufferSnapshot {
147 pub text: text::BufferSnapshot,
148 pub(crate) syntax: SyntaxSnapshot,
149 file: Option<Arc<dyn File>>,
150 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
151 remote_selections: TreeMap<ReplicaId, SelectionSet>,
152 language: Option<Arc<Language>>,
153 non_text_state_update_count: usize,
154}
155
156/// The kind and amount of indentation in a particular line. For now,
157/// assumes that indentation is all the same character.
158#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
159pub struct IndentSize {
160 /// The number of bytes that comprise the indentation.
161 pub len: u32,
162 /// The kind of whitespace used for indentation.
163 pub kind: IndentKind,
164}
165
166/// A whitespace character that's used for indentation.
167#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
168pub enum IndentKind {
169 /// An ASCII space character.
170 #[default]
171 Space,
172 /// An ASCII tab character.
173 Tab,
174}
175
176/// The shape of a selection cursor.
177#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
178#[serde(rename_all = "snake_case")]
179pub enum CursorShape {
180 /// A vertical bar
181 #[default]
182 Bar,
183 /// A block that surrounds the following character
184 Block,
185 /// An underline that runs along the following character
186 Underline,
187 /// A box drawn around the following character
188 Hollow,
189}
190
191#[derive(Clone, Debug)]
192struct SelectionSet {
193 line_mode: bool,
194 cursor_shape: CursorShape,
195 selections: Arc<[Selection<Anchor>]>,
196 lamport_timestamp: clock::Lamport,
197}
198
199/// A diagnostic associated with a certain range of a buffer.
200#[derive(Clone, Debug, PartialEq, Eq)]
201pub struct Diagnostic {
202 /// The name of the service that produced this diagnostic.
203 pub source: Option<String>,
204 /// A machine-readable code that identifies this diagnostic.
205 pub code: Option<String>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// An id that identifies the group to which this diagnostic belongs.
211 ///
212 /// When a language server produces a diagnostic with
213 /// one or more associated diagnostics, those diagnostics are all
214 /// assigned a single group ID.
215 pub group_id: usize,
216 /// Whether this diagnostic is the primary diagnostic for its group.
217 ///
218 /// In a given group, the primary diagnostic is the top-level diagnostic
219 /// returned by the language server. The non-primary diagnostics are the
220 /// associated diagnostics.
221 pub is_primary: bool,
222 /// Whether this diagnostic is considered to originate from an analysis of
223 /// files on disk, as opposed to any unsaved buffer contents. This is a
224 /// property of a given diagnostic source, and is configured for a given
225 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
226 /// for the language server.
227 pub is_disk_based: bool,
228 /// Whether this diagnostic marks unnecessary code.
229 pub is_unnecessary: bool,
230 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
231 pub data: Option<Value>,
232}
233
234/// TODO - move this into the `project` crate and make it private.
235pub async fn prepare_completion_documentation(
236 documentation: &lsp::Documentation,
237 language_registry: &Arc<LanguageRegistry>,
238 language: Option<Arc<Language>>,
239) -> Documentation {
240 match documentation {
241 lsp::Documentation::String(text) => {
242 if text.lines().count() <= 1 {
243 Documentation::SingleLine(text.clone())
244 } else {
245 Documentation::MultiLinePlainText(text.clone())
246 }
247 }
248
249 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
250 lsp::MarkupKind::PlainText => {
251 if value.lines().count() <= 1 {
252 Documentation::SingleLine(value.clone())
253 } else {
254 Documentation::MultiLinePlainText(value.clone())
255 }
256 }
257
258 lsp::MarkupKind::Markdown => {
259 let parsed = parse_markdown(value, Some(language_registry), language).await;
260 Documentation::MultiLineMarkdown(parsed)
261 }
262 },
263 }
264}
265
266/// Documentation associated with a [`Completion`].
267#[derive(Clone, Debug)]
268pub enum Documentation {
269 /// There is no documentation for this completion.
270 Undocumented,
271 /// A single line of documentation.
272 SingleLine(String),
273 /// Multiple lines of plain text documentation.
274 MultiLinePlainText(String),
275 /// Markdown documentation.
276 MultiLineMarkdown(ParsedMarkdown),
277}
278
279/// An operation used to synchronize this buffer with its other replicas.
280#[derive(Clone, Debug, PartialEq)]
281pub enum Operation {
282 /// A text operation.
283 Buffer(text::Operation),
284
285 /// An update to the buffer's diagnostics.
286 UpdateDiagnostics {
287 /// The id of the language server that produced the new diagnostics.
288 server_id: LanguageServerId,
289 /// The diagnostics.
290 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
291 /// The buffer's lamport timestamp.
292 lamport_timestamp: clock::Lamport,
293 },
294
295 /// An update to the most recent selections in this buffer.
296 UpdateSelections {
297 /// The selections.
298 selections: Arc<[Selection<Anchor>]>,
299 /// The buffer's lamport timestamp.
300 lamport_timestamp: clock::Lamport,
301 /// Whether the selections are in 'line mode'.
302 line_mode: bool,
303 /// The [`CursorShape`] associated with these selections.
304 cursor_shape: CursorShape,
305 },
306
307 /// An update to the characters that should trigger autocompletion
308 /// for this buffer.
309 UpdateCompletionTriggers {
310 /// The characters that trigger autocompletion.
311 triggers: Vec<String>,
312 /// The buffer's lamport timestamp.
313 lamport_timestamp: clock::Lamport,
314 /// The language server ID.
315 server_id: LanguageServerId,
316 },
317}
318
319/// An event that occurs in a buffer.
320#[derive(Clone, Debug, PartialEq)]
321pub enum BufferEvent {
322 /// The buffer was changed in a way that must be
323 /// propagated to its other replicas.
324 Operation {
325 operation: Operation,
326 is_local: bool,
327 },
328 /// The buffer was edited.
329 Edited,
330 /// The buffer's `dirty` bit changed.
331 DirtyChanged,
332 /// The buffer was saved.
333 Saved,
334 /// The buffer's file was changed on disk.
335 FileHandleChanged,
336 /// The buffer was reloaded.
337 Reloaded,
338 /// The buffer is in need of a reload
339 ReloadNeeded,
340 /// The buffer's language was changed.
341 LanguageChanged,
342 /// The buffer's syntax trees were updated.
343 Reparsed,
344 /// The buffer's diagnostics were updated.
345 DiagnosticsUpdated,
346 /// The buffer gained or lost editing capabilities.
347 CapabilityChanged,
348 /// The buffer was explicitly requested to close.
349 Closed,
350 /// The buffer was discarded when closing.
351 Discarded,
352}
353
354/// The file associated with a buffer.
355pub trait File: Send + Sync {
356 /// Returns the [`LocalFile`] associated with this file, if the
357 /// file is local.
358 fn as_local(&self) -> Option<&dyn LocalFile>;
359
360 /// Returns whether this file is local.
361 fn is_local(&self) -> bool {
362 self.as_local().is_some()
363 }
364
365 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
366 /// only available in some states, such as modification time.
367 fn disk_state(&self) -> DiskState;
368
369 /// Returns the path of this file relative to the worktree's root directory.
370 fn path(&self) -> &Arc<Path>;
371
372 /// Returns the path of this file relative to the worktree's parent directory (this means it
373 /// includes the name of the worktree's root folder).
374 fn full_path(&self, cx: &AppContext) -> PathBuf;
375
376 /// Returns the last component of this handle's absolute path. If this handle refers to the root
377 /// of its worktree, then this method will return the name of the worktree itself.
378 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
379
380 /// Returns the id of the worktree to which this file belongs.
381 ///
382 /// This is needed for looking up project-specific settings.
383 fn worktree_id(&self, cx: &AppContext) -> WorktreeId;
384
385 /// Converts this file into an [`Any`] trait object.
386 fn as_any(&self) -> &dyn Any;
387
388 /// Converts this file into a protobuf message.
389 fn to_proto(&self, cx: &AppContext) -> rpc::proto::File;
390
391 /// Return whether Zed considers this to be a private file.
392 fn is_private(&self) -> bool;
393}
394
395/// The file's storage status - whether it's stored (`Present`), and if so when it was last
396/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
397/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
398/// indicator for new files.
399#[derive(Copy, Clone, Debug, PartialEq)]
400pub enum DiskState {
401 /// File created in Zed that has not been saved.
402 New,
403 /// File present on the filesystem.
404 Present { mtime: MTime },
405 /// Deleted file that was previously present.
406 Deleted,
407}
408
409impl DiskState {
410 /// Returns the file's last known modification time on disk.
411 pub fn mtime(self) -> Option<MTime> {
412 match self {
413 DiskState::New => None,
414 DiskState::Present { mtime } => Some(mtime),
415 DiskState::Deleted => None,
416 }
417 }
418}
419
420/// The file associated with a buffer, in the case where the file is on the local disk.
421pub trait LocalFile: File {
422 /// Returns the absolute path of this file
423 fn abs_path(&self, cx: &AppContext) -> PathBuf;
424
425 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
426 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
427
428 /// Loads the file's contents from disk.
429 fn load_bytes(&self, cx: &AppContext) -> Task<Result<Vec<u8>>>;
430}
431
432/// The auto-indent behavior associated with an editing operation.
433/// For some editing operations, each affected line of text has its
434/// indentation recomputed. For other operations, the entire block
435/// of edited text is adjusted uniformly.
436#[derive(Clone, Debug)]
437pub enum AutoindentMode {
438 /// Indent each line of inserted text.
439 EachLine,
440 /// Apply the same indentation adjustment to all of the lines
441 /// in a given insertion.
442 Block {
443 /// The original indentation level of the first line of each
444 /// insertion, if it has been copied.
445 original_indent_columns: Vec<u32>,
446 },
447}
448
449#[derive(Clone)]
450struct AutoindentRequest {
451 before_edit: BufferSnapshot,
452 entries: Vec<AutoindentRequestEntry>,
453 is_block_mode: bool,
454 ignore_empty_lines: bool,
455}
456
457#[derive(Debug, Clone)]
458struct AutoindentRequestEntry {
459 /// A range of the buffer whose indentation should be adjusted.
460 range: Range<Anchor>,
461 /// Whether or not these lines should be considered brand new, for the
462 /// purpose of auto-indent. When text is not new, its indentation will
463 /// only be adjusted if the suggested indentation level has *changed*
464 /// since the edit was made.
465 first_line_is_new: bool,
466 indent_size: IndentSize,
467 original_indent_column: Option<u32>,
468}
469
470#[derive(Debug)]
471struct IndentSuggestion {
472 basis_row: u32,
473 delta: Ordering,
474 within_error: bool,
475}
476
477struct BufferChunkHighlights<'a> {
478 captures: SyntaxMapCaptures<'a>,
479 next_capture: Option<SyntaxMapCapture<'a>>,
480 stack: Vec<(usize, HighlightId)>,
481 highlight_maps: Vec<HighlightMap>,
482}
483
484/// An iterator that yields chunks of a buffer's text, along with their
485/// syntax highlights and diagnostic status.
486pub struct BufferChunks<'a> {
487 buffer_snapshot: Option<&'a BufferSnapshot>,
488 range: Range<usize>,
489 chunks: text::Chunks<'a>,
490 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
491 error_depth: usize,
492 warning_depth: usize,
493 information_depth: usize,
494 hint_depth: usize,
495 unnecessary_depth: usize,
496 highlights: Option<BufferChunkHighlights<'a>>,
497}
498
499/// A chunk of a buffer's text, along with its syntax highlight and
500/// diagnostic status.
501#[derive(Clone, Debug, Default)]
502pub struct Chunk<'a> {
503 /// The text of the chunk.
504 pub text: &'a str,
505 /// The syntax highlighting style of the chunk.
506 pub syntax_highlight_id: Option<HighlightId>,
507 /// The highlight style that has been applied to this chunk in
508 /// the editor.
509 pub highlight_style: Option<HighlightStyle>,
510 /// The severity of diagnostic associated with this chunk, if any.
511 pub diagnostic_severity: Option<DiagnosticSeverity>,
512 /// Whether this chunk of text is marked as unnecessary.
513 pub is_unnecessary: bool,
514 /// Whether this chunk of text was originally a tab character.
515 pub is_tab: bool,
516 /// An optional recipe for how the chunk should be presented.
517 pub renderer: Option<ChunkRenderer>,
518}
519
520/// A recipe for how the chunk should be presented.
521#[derive(Clone)]
522pub struct ChunkRenderer {
523 /// creates a custom element to represent this chunk.
524 pub render: Arc<dyn Send + Sync + Fn(&mut ChunkRendererContext) -> AnyElement>,
525 /// If true, the element is constrained to the shaped width of the text.
526 pub constrain_width: bool,
527}
528
529pub struct ChunkRendererContext<'a, 'b> {
530 pub context: &'a mut WindowContext<'b>,
531 pub max_width: Pixels,
532}
533
534impl fmt::Debug for ChunkRenderer {
535 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
536 f.debug_struct("ChunkRenderer")
537 .field("constrain_width", &self.constrain_width)
538 .finish()
539 }
540}
541
542impl<'a, 'b> Deref for ChunkRendererContext<'a, 'b> {
543 type Target = WindowContext<'b>;
544
545 fn deref(&self) -> &Self::Target {
546 self.context
547 }
548}
549
550impl<'a, 'b> DerefMut for ChunkRendererContext<'a, 'b> {
551 fn deref_mut(&mut self) -> &mut Self::Target {
552 self.context
553 }
554}
555
556/// A set of edits to a given version of a buffer, computed asynchronously.
557#[derive(Debug)]
558pub struct Diff {
559 pub(crate) base_version: clock::Global,
560 line_ending: LineEnding,
561 pub edits: Vec<(Range<usize>, Arc<str>)>,
562}
563
564#[derive(Clone, Copy)]
565pub(crate) struct DiagnosticEndpoint {
566 offset: usize,
567 is_start: bool,
568 severity: DiagnosticSeverity,
569 is_unnecessary: bool,
570}
571
572/// A class of characters, used for characterizing a run of text.
573#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
574pub enum CharKind {
575 /// Whitespace.
576 Whitespace,
577 /// Punctuation.
578 Punctuation,
579 /// Word.
580 Word,
581}
582
583/// A runnable is a set of data about a region that could be resolved into a task
584pub struct Runnable {
585 pub tags: SmallVec<[RunnableTag; 1]>,
586 pub language: Arc<Language>,
587 pub buffer: BufferId,
588}
589
590#[derive(Clone)]
591pub struct EditPreview {
592 applied_edits_snapshot: text::BufferSnapshot,
593 syntax_snapshot: SyntaxSnapshot,
594}
595
596#[derive(Default, Clone, Debug)]
597pub struct HighlightedEdits {
598 pub text: SharedString,
599 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
600}
601
602impl EditPreview {
603 pub fn highlight_edits(
604 &self,
605 current_snapshot: &BufferSnapshot,
606 edits: &[(Range<Anchor>, String)],
607 include_deletions: bool,
608 cx: &AppContext,
609 ) -> HighlightedEdits {
610 let mut text = String::new();
611 let mut highlights = Vec::new();
612 let Some(range) = self.compute_visible_range(edits, current_snapshot) else {
613 return HighlightedEdits::default();
614 };
615 let mut offset = range.start;
616 let mut delta = 0isize;
617
618 let status_colors = cx.theme().status();
619
620 for (range, edit_text) in edits {
621 let edit_range = range.to_offset(current_snapshot);
622 let new_edit_start = (edit_range.start as isize + delta) as usize;
623 let new_edit_range = new_edit_start..new_edit_start + edit_text.len();
624
625 let prev_range = offset..new_edit_start;
626
627 if !prev_range.is_empty() {
628 let start = text.len();
629 self.highlight_text(prev_range, &mut text, &mut highlights, None, cx);
630 offset += text.len() - start;
631 }
632
633 if include_deletions && !edit_range.is_empty() {
634 let start = text.len();
635 text.extend(current_snapshot.text_for_range(edit_range.clone()));
636 let end = text.len();
637
638 highlights.push((
639 start..end,
640 HighlightStyle {
641 background_color: Some(status_colors.deleted_background),
642 ..Default::default()
643 },
644 ));
645 }
646
647 if !edit_text.is_empty() {
648 self.highlight_text(
649 new_edit_range,
650 &mut text,
651 &mut highlights,
652 Some(HighlightStyle {
653 background_color: Some(status_colors.created_background),
654 ..Default::default()
655 }),
656 cx,
657 );
658
659 offset += edit_text.len();
660 }
661
662 delta += edit_text.len() as isize - edit_range.len() as isize;
663 }
664
665 self.highlight_text(
666 offset..(range.end as isize + delta) as usize,
667 &mut text,
668 &mut highlights,
669 None,
670 cx,
671 );
672
673 HighlightedEdits {
674 text: text.into(),
675 highlights,
676 }
677 }
678
679 fn highlight_text(
680 &self,
681 range: Range<usize>,
682 text: &mut String,
683 highlights: &mut Vec<(Range<usize>, HighlightStyle)>,
684 override_style: Option<HighlightStyle>,
685 cx: &AppContext,
686 ) {
687 for chunk in self.highlighted_chunks(range) {
688 let start = text.len();
689 text.push_str(chunk.text);
690 let end = text.len();
691
692 if let Some(mut highlight_style) = chunk
693 .syntax_highlight_id
694 .and_then(|id| id.style(cx.theme().syntax()))
695 {
696 if let Some(override_style) = override_style {
697 highlight_style.highlight(override_style);
698 }
699 highlights.push((start..end, highlight_style));
700 } else if let Some(override_style) = override_style {
701 highlights.push((start..end, override_style));
702 }
703 }
704 }
705
706 fn highlighted_chunks(&self, range: Range<usize>) -> BufferChunks {
707 let captures =
708 self.syntax_snapshot
709 .captures(range.clone(), &self.applied_edits_snapshot, |grammar| {
710 grammar.highlights_query.as_ref()
711 });
712
713 let highlight_maps = captures
714 .grammars()
715 .iter()
716 .map(|grammar| grammar.highlight_map())
717 .collect();
718
719 BufferChunks::new(
720 self.applied_edits_snapshot.as_rope(),
721 range,
722 Some((captures, highlight_maps)),
723 false,
724 None,
725 )
726 }
727
728 fn compute_visible_range(
729 &self,
730 edits: &[(Range<Anchor>, String)],
731 snapshot: &BufferSnapshot,
732 ) -> Option<Range<usize>> {
733 let (first, _) = edits.first()?;
734 let (last, _) = edits.last()?;
735
736 let start = first.start.to_point(snapshot);
737 let end = last.end.to_point(snapshot);
738
739 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
740 let range = Point::new(start.row, 0)..Point::new(end.row, snapshot.line_len(end.row));
741
742 Some(range.to_offset(&snapshot))
743 }
744}
745
746impl Buffer {
747 /// Create a new buffer with the given base text.
748 pub fn local<T: Into<String>>(base_text: T, cx: &ModelContext<Self>) -> Self {
749 Self::build(
750 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
751 None,
752 Capability::ReadWrite,
753 )
754 }
755
756 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
757 pub fn local_normalized(
758 base_text_normalized: Rope,
759 line_ending: LineEnding,
760 cx: &ModelContext<Self>,
761 ) -> Self {
762 Self::build(
763 TextBuffer::new_normalized(
764 0,
765 cx.entity_id().as_non_zero_u64().into(),
766 line_ending,
767 base_text_normalized,
768 ),
769 None,
770 Capability::ReadWrite,
771 )
772 }
773
774 /// Create a new buffer that is a replica of a remote buffer.
775 pub fn remote(
776 remote_id: BufferId,
777 replica_id: ReplicaId,
778 capability: Capability,
779 base_text: impl Into<String>,
780 ) -> Self {
781 Self::build(
782 TextBuffer::new(replica_id, remote_id, base_text.into()),
783 None,
784 capability,
785 )
786 }
787
788 /// Create a new buffer that is a replica of a remote buffer, populating its
789 /// state from the given protobuf message.
790 pub fn from_proto(
791 replica_id: ReplicaId,
792 capability: Capability,
793 message: proto::BufferState,
794 file: Option<Arc<dyn File>>,
795 ) -> Result<Self> {
796 let buffer_id = BufferId::new(message.id)
797 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
798 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
799 let mut this = Self::build(buffer, file, capability);
800 this.text.set_line_ending(proto::deserialize_line_ending(
801 rpc::proto::LineEnding::from_i32(message.line_ending)
802 .ok_or_else(|| anyhow!("missing line_ending"))?,
803 ));
804 this.saved_version = proto::deserialize_version(&message.saved_version);
805 this.saved_mtime = message.saved_mtime.map(|time| time.into());
806 Ok(this)
807 }
808
809 /// Serialize the buffer's state to a protobuf message.
810 pub fn to_proto(&self, cx: &AppContext) -> proto::BufferState {
811 proto::BufferState {
812 id: self.remote_id().into(),
813 file: self.file.as_ref().map(|f| f.to_proto(cx)),
814 base_text: self.base_text().to_string(),
815 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
816 saved_version: proto::serialize_version(&self.saved_version),
817 saved_mtime: self.saved_mtime.map(|time| time.into()),
818 }
819 }
820
821 /// Serialize as protobufs all of the changes to the buffer since the given version.
822 pub fn serialize_ops(
823 &self,
824 since: Option<clock::Global>,
825 cx: &AppContext,
826 ) -> Task<Vec<proto::Operation>> {
827 let mut operations = Vec::new();
828 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
829
830 operations.extend(self.remote_selections.iter().map(|(_, set)| {
831 proto::serialize_operation(&Operation::UpdateSelections {
832 selections: set.selections.clone(),
833 lamport_timestamp: set.lamport_timestamp,
834 line_mode: set.line_mode,
835 cursor_shape: set.cursor_shape,
836 })
837 }));
838
839 for (server_id, diagnostics) in &self.diagnostics {
840 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
841 lamport_timestamp: self.diagnostics_timestamp,
842 server_id: *server_id,
843 diagnostics: diagnostics.iter().cloned().collect(),
844 }));
845 }
846
847 for (server_id, completions) in &self.completion_triggers_per_language_server {
848 operations.push(proto::serialize_operation(
849 &Operation::UpdateCompletionTriggers {
850 triggers: completions.iter().cloned().collect(),
851 lamport_timestamp: self.completion_triggers_timestamp,
852 server_id: *server_id,
853 },
854 ));
855 }
856
857 let text_operations = self.text.operations().clone();
858 cx.background_executor().spawn(async move {
859 let since = since.unwrap_or_default();
860 operations.extend(
861 text_operations
862 .iter()
863 .filter(|(_, op)| !since.observed(op.timestamp()))
864 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
865 );
866 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
867 operations
868 })
869 }
870
871 /// Assign a language to the buffer, returning the buffer.
872 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
873 self.set_language(Some(language), cx);
874 self
875 }
876
877 /// Returns the [`Capability`] of this buffer.
878 pub fn capability(&self) -> Capability {
879 self.capability
880 }
881
882 /// Whether this buffer can only be read.
883 pub fn read_only(&self) -> bool {
884 self.capability == Capability::ReadOnly
885 }
886
887 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
888 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
889 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
890 let snapshot = buffer.snapshot();
891 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
892 Self {
893 saved_mtime,
894 saved_version: buffer.version(),
895 preview_version: buffer.version(),
896 reload_task: None,
897 transaction_depth: 0,
898 was_dirty_before_starting_transaction: None,
899 has_unsaved_edits: Cell::new((buffer.version(), false)),
900 text: buffer,
901 branch_state: None,
902 file,
903 capability,
904 syntax_map,
905 parsing_in_background: false,
906 non_text_state_update_count: 0,
907 sync_parse_timeout: Duration::from_millis(1),
908 parse_status: async_watch::channel(ParseStatus::Idle),
909 autoindent_requests: Default::default(),
910 pending_autoindent: Default::default(),
911 language: None,
912 remote_selections: Default::default(),
913 diagnostics: Default::default(),
914 diagnostics_timestamp: Default::default(),
915 completion_triggers: Default::default(),
916 completion_triggers_per_language_server: Default::default(),
917 completion_triggers_timestamp: Default::default(),
918 deferred_ops: OperationQueue::new(),
919 has_conflict: false,
920 _subscriptions: Vec::new(),
921 }
922 }
923
924 pub fn build_snapshot(
925 text: Rope,
926 language: Option<Arc<Language>>,
927 language_registry: Option<Arc<LanguageRegistry>>,
928 cx: &mut AppContext,
929 ) -> impl Future<Output = BufferSnapshot> {
930 let entity_id = cx.reserve_model::<Self>().entity_id();
931 let buffer_id = entity_id.as_non_zero_u64().into();
932 async move {
933 let text =
934 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
935 let mut syntax = SyntaxMap::new(&text).snapshot();
936 if let Some(language) = language.clone() {
937 let text = text.clone();
938 let language = language.clone();
939 let language_registry = language_registry.clone();
940 syntax.reparse(&text, language_registry, language);
941 }
942 BufferSnapshot {
943 text,
944 syntax,
945 file: None,
946 diagnostics: Default::default(),
947 remote_selections: Default::default(),
948 language,
949 non_text_state_update_count: 0,
950 }
951 }
952 }
953
954 /// Retrieve a snapshot of the buffer's current state. This is computationally
955 /// cheap, and allows reading from the buffer on a background thread.
956 pub fn snapshot(&self) -> BufferSnapshot {
957 let text = self.text.snapshot();
958 let mut syntax_map = self.syntax_map.lock();
959 syntax_map.interpolate(&text);
960 let syntax = syntax_map.snapshot();
961
962 BufferSnapshot {
963 text,
964 syntax,
965 file: self.file.clone(),
966 remote_selections: self.remote_selections.clone(),
967 diagnostics: self.diagnostics.clone(),
968 language: self.language.clone(),
969 non_text_state_update_count: self.non_text_state_update_count,
970 }
971 }
972
973 pub fn branch(&mut self, cx: &mut ModelContext<Self>) -> Model<Self> {
974 let this = cx.handle();
975 cx.new_model(|cx| {
976 let mut branch = Self {
977 branch_state: Some(BufferBranchState {
978 base_buffer: this.clone(),
979 merged_operations: Default::default(),
980 }),
981 language: self.language.clone(),
982 has_conflict: self.has_conflict,
983 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
984 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
985 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
986 };
987 if let Some(language_registry) = self.language_registry() {
988 branch.set_language_registry(language_registry);
989 }
990
991 // Reparse the branch buffer so that we get syntax highlighting immediately.
992 branch.reparse(cx);
993
994 branch
995 })
996 }
997
998 pub fn preview_edits(
999 &self,
1000 edits: Arc<[(Range<Anchor>, String)]>,
1001 cx: &AppContext,
1002 ) -> Task<EditPreview> {
1003 let registry = self.language_registry();
1004 let language = self.language().cloned();
1005
1006 let mut branch_buffer = self.text.branch();
1007 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1008 cx.background_executor().spawn(async move {
1009 if !edits.is_empty() {
1010 branch_buffer.edit(edits.iter().cloned());
1011 let snapshot = branch_buffer.snapshot();
1012 syntax_snapshot.interpolate(&snapshot);
1013
1014 if let Some(language) = language {
1015 syntax_snapshot.reparse(&snapshot, registry, language);
1016 }
1017 }
1018 EditPreview {
1019 applied_edits_snapshot: branch_buffer.snapshot(),
1020 syntax_snapshot,
1021 }
1022 })
1023 }
1024
1025 /// Applies all of the changes in this buffer that intersect any of the
1026 /// given `ranges` to its base buffer.
1027 ///
1028 /// If `ranges` is empty, then all changes will be applied. This buffer must
1029 /// be a branch buffer to call this method.
1030 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut ModelContext<Self>) {
1031 let Some(base_buffer) = self.base_buffer() else {
1032 debug_panic!("not a branch buffer");
1033 return;
1034 };
1035
1036 let mut ranges = if ranges.is_empty() {
1037 &[0..usize::MAX]
1038 } else {
1039 ranges.as_slice()
1040 }
1041 .into_iter()
1042 .peekable();
1043
1044 let mut edits = Vec::new();
1045 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1046 let mut is_included = false;
1047 while let Some(range) = ranges.peek() {
1048 if range.end < edit.new.start {
1049 ranges.next().unwrap();
1050 } else {
1051 if range.start <= edit.new.end {
1052 is_included = true;
1053 }
1054 break;
1055 }
1056 }
1057
1058 if is_included {
1059 edits.push((
1060 edit.old.clone(),
1061 self.text_for_range(edit.new.clone()).collect::<String>(),
1062 ));
1063 }
1064 }
1065
1066 let operation = base_buffer.update(cx, |base_buffer, cx| {
1067 // cx.emit(BufferEvent::DiffBaseChanged);
1068 base_buffer.edit(edits, None, cx)
1069 });
1070
1071 if let Some(operation) = operation {
1072 if let Some(BufferBranchState {
1073 merged_operations, ..
1074 }) = &mut self.branch_state
1075 {
1076 merged_operations.push(operation);
1077 }
1078 }
1079 }
1080
1081 fn on_base_buffer_event(
1082 &mut self,
1083 _: Model<Buffer>,
1084 event: &BufferEvent,
1085 cx: &mut ModelContext<Self>,
1086 ) {
1087 let BufferEvent::Operation { operation, .. } = event else {
1088 return;
1089 };
1090 let Some(BufferBranchState {
1091 merged_operations, ..
1092 }) = &mut self.branch_state
1093 else {
1094 return;
1095 };
1096
1097 let mut operation_to_undo = None;
1098 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1099 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1100 merged_operations.remove(ix);
1101 operation_to_undo = Some(operation.timestamp);
1102 }
1103 }
1104
1105 self.apply_ops([operation.clone()], cx);
1106
1107 if let Some(timestamp) = operation_to_undo {
1108 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1109 self.undo_operations(counts, cx);
1110 }
1111 }
1112
1113 #[cfg(test)]
1114 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1115 &self.text
1116 }
1117
1118 /// Retrieve a snapshot of the buffer's raw text, without any
1119 /// language-related state like the syntax tree or diagnostics.
1120 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1121 self.text.snapshot()
1122 }
1123
1124 /// The file associated with the buffer, if any.
1125 pub fn file(&self) -> Option<&Arc<dyn File>> {
1126 self.file.as_ref()
1127 }
1128
1129 /// The version of the buffer that was last saved or reloaded from disk.
1130 pub fn saved_version(&self) -> &clock::Global {
1131 &self.saved_version
1132 }
1133
1134 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1135 pub fn saved_mtime(&self) -> Option<MTime> {
1136 self.saved_mtime
1137 }
1138
1139 /// Assign a language to the buffer.
1140 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
1141 self.non_text_state_update_count += 1;
1142 self.syntax_map.lock().clear(&self.text);
1143 self.language = language;
1144 self.reparse(cx);
1145 cx.emit(BufferEvent::LanguageChanged);
1146 }
1147
1148 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1149 /// other languages if parts of the buffer are written in different languages.
1150 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1151 self.syntax_map
1152 .lock()
1153 .set_language_registry(language_registry);
1154 }
1155
1156 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1157 self.syntax_map.lock().language_registry()
1158 }
1159
1160 /// Assign the buffer a new [`Capability`].
1161 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
1162 self.capability = capability;
1163 cx.emit(BufferEvent::CapabilityChanged)
1164 }
1165
1166 /// This method is called to signal that the buffer has been saved.
1167 pub fn did_save(
1168 &mut self,
1169 version: clock::Global,
1170 mtime: Option<MTime>,
1171 cx: &mut ModelContext<Self>,
1172 ) {
1173 self.saved_version = version;
1174 self.has_unsaved_edits
1175 .set((self.saved_version().clone(), false));
1176 self.has_conflict = false;
1177 self.saved_mtime = mtime;
1178 cx.emit(BufferEvent::Saved);
1179 cx.notify();
1180 }
1181
1182 /// This method is called to signal that the buffer has been discarded.
1183 pub fn discarded(&self, cx: &mut ModelContext<Self>) {
1184 cx.emit(BufferEvent::Discarded);
1185 cx.notify();
1186 }
1187
1188 /// Reloads the contents of the buffer from disk.
1189 pub fn reload(&mut self, cx: &ModelContext<Self>) -> oneshot::Receiver<Option<Transaction>> {
1190 let (tx, rx) = futures::channel::oneshot::channel();
1191 let prev_version = self.text.version();
1192 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
1193 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
1194 let file = this.file.as_ref()?.as_local()?;
1195 Some((file.disk_state().mtime(), file.load(cx)))
1196 })?
1197 else {
1198 return Ok(());
1199 };
1200
1201 let new_text = new_text.await?;
1202 let diff = this
1203 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
1204 .await;
1205 this.update(&mut cx, |this, cx| {
1206 if this.version() == diff.base_version {
1207 this.finalize_last_transaction();
1208 this.apply_diff(diff, cx);
1209 tx.send(this.finalize_last_transaction().cloned()).ok();
1210 this.has_conflict = false;
1211 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1212 } else {
1213 if !diff.edits.is_empty()
1214 || this
1215 .edits_since::<usize>(&diff.base_version)
1216 .next()
1217 .is_some()
1218 {
1219 this.has_conflict = true;
1220 }
1221
1222 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1223 }
1224
1225 this.reload_task.take();
1226 })
1227 }));
1228 rx
1229 }
1230
1231 /// This method is called to signal that the buffer has been reloaded.
1232 pub fn did_reload(
1233 &mut self,
1234 version: clock::Global,
1235 line_ending: LineEnding,
1236 mtime: Option<MTime>,
1237 cx: &mut ModelContext<Self>,
1238 ) {
1239 self.saved_version = version;
1240 self.has_unsaved_edits
1241 .set((self.saved_version.clone(), false));
1242 self.text.set_line_ending(line_ending);
1243 self.saved_mtime = mtime;
1244 cx.emit(BufferEvent::Reloaded);
1245 cx.notify();
1246 }
1247
1248 /// Updates the [`File`] backing this buffer. This should be called when
1249 /// the file has changed or has been deleted.
1250 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
1251 let was_dirty = self.is_dirty();
1252 let mut file_changed = false;
1253
1254 if let Some(old_file) = self.file.as_ref() {
1255 if new_file.path() != old_file.path() {
1256 file_changed = true;
1257 }
1258
1259 let old_state = old_file.disk_state();
1260 let new_state = new_file.disk_state();
1261 if old_state != new_state {
1262 file_changed = true;
1263 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1264 cx.emit(BufferEvent::ReloadNeeded)
1265 }
1266 }
1267 } else {
1268 file_changed = true;
1269 };
1270
1271 self.file = Some(new_file);
1272 if file_changed {
1273 self.non_text_state_update_count += 1;
1274 if was_dirty != self.is_dirty() {
1275 cx.emit(BufferEvent::DirtyChanged);
1276 }
1277 cx.emit(BufferEvent::FileHandleChanged);
1278 cx.notify();
1279 }
1280 }
1281
1282 pub fn base_buffer(&self) -> Option<Model<Self>> {
1283 Some(self.branch_state.as_ref()?.base_buffer.clone())
1284 }
1285
1286 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1287 pub fn language(&self) -> Option<&Arc<Language>> {
1288 self.language.as_ref()
1289 }
1290
1291 /// Returns the [`Language`] at the given location.
1292 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1293 let offset = position.to_offset(self);
1294 self.syntax_map
1295 .lock()
1296 .layers_for_range(offset..offset, &self.text, false)
1297 .last()
1298 .map(|info| info.language.clone())
1299 .or_else(|| self.language.clone())
1300 }
1301
1302 /// An integer version number that accounts for all updates besides
1303 /// the buffer's text itself (which is versioned via a version vector).
1304 pub fn non_text_state_update_count(&self) -> usize {
1305 self.non_text_state_update_count
1306 }
1307
1308 /// Whether the buffer is being parsed in the background.
1309 #[cfg(any(test, feature = "test-support"))]
1310 pub fn is_parsing(&self) -> bool {
1311 self.parsing_in_background
1312 }
1313
1314 /// Indicates whether the buffer contains any regions that may be
1315 /// written in a language that hasn't been loaded yet.
1316 pub fn contains_unknown_injections(&self) -> bool {
1317 self.syntax_map.lock().contains_unknown_injections()
1318 }
1319
1320 #[cfg(test)]
1321 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1322 self.sync_parse_timeout = timeout;
1323 }
1324
1325 /// Called after an edit to synchronize the buffer's main parse tree with
1326 /// the buffer's new underlying state.
1327 ///
1328 /// Locks the syntax map and interpolates the edits since the last reparse
1329 /// into the foreground syntax tree.
1330 ///
1331 /// Then takes a stable snapshot of the syntax map before unlocking it.
1332 /// The snapshot with the interpolated edits is sent to a background thread,
1333 /// where we ask Tree-sitter to perform an incremental parse.
1334 ///
1335 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1336 /// waiting on the parse to complete. As soon as it completes, we proceed
1337 /// synchronously, unless a 1ms timeout elapses.
1338 ///
1339 /// If we time out waiting on the parse, we spawn a second task waiting
1340 /// until the parse does complete and return with the interpolated tree still
1341 /// in the foreground. When the background parse completes, call back into
1342 /// the main thread and assign the foreground parse state.
1343 ///
1344 /// If the buffer or grammar changed since the start of the background parse,
1345 /// initiate an additional reparse recursively. To avoid concurrent parses
1346 /// for the same buffer, we only initiate a new parse if we are not already
1347 /// parsing in the background.
1348 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1349 if self.parsing_in_background {
1350 return;
1351 }
1352 let language = if let Some(language) = self.language.clone() {
1353 language
1354 } else {
1355 return;
1356 };
1357
1358 let text = self.text_snapshot();
1359 let parsed_version = self.version();
1360
1361 let mut syntax_map = self.syntax_map.lock();
1362 syntax_map.interpolate(&text);
1363 let language_registry = syntax_map.language_registry();
1364 let mut syntax_snapshot = syntax_map.snapshot();
1365 drop(syntax_map);
1366
1367 let parse_task = cx.background_executor().spawn({
1368 let language = language.clone();
1369 let language_registry = language_registry.clone();
1370 async move {
1371 syntax_snapshot.reparse(&text, language_registry, language);
1372 syntax_snapshot
1373 }
1374 });
1375
1376 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1377 match cx
1378 .background_executor()
1379 .block_with_timeout(self.sync_parse_timeout, parse_task)
1380 {
1381 Ok(new_syntax_snapshot) => {
1382 self.did_finish_parsing(new_syntax_snapshot, cx);
1383 }
1384 Err(parse_task) => {
1385 self.parsing_in_background = true;
1386 cx.spawn(move |this, mut cx| async move {
1387 let new_syntax_map = parse_task.await;
1388 this.update(&mut cx, move |this, cx| {
1389 let grammar_changed =
1390 this.language.as_ref().map_or(true, |current_language| {
1391 !Arc::ptr_eq(&language, current_language)
1392 });
1393 let language_registry_changed = new_syntax_map
1394 .contains_unknown_injections()
1395 && language_registry.map_or(false, |registry| {
1396 registry.version() != new_syntax_map.language_registry_version()
1397 });
1398 let parse_again = language_registry_changed
1399 || grammar_changed
1400 || this.version.changed_since(&parsed_version);
1401 this.did_finish_parsing(new_syntax_map, cx);
1402 this.parsing_in_background = false;
1403 if parse_again {
1404 this.reparse(cx);
1405 }
1406 })
1407 .ok();
1408 })
1409 .detach();
1410 }
1411 }
1412 }
1413
1414 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
1415 self.non_text_state_update_count += 1;
1416 self.syntax_map.lock().did_parse(syntax_snapshot);
1417 self.request_autoindent(cx);
1418 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1419 cx.emit(BufferEvent::Reparsed);
1420 cx.notify();
1421 }
1422
1423 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1424 self.parse_status.1.clone()
1425 }
1426
1427 /// Assign to the buffer a set of diagnostics created by a given language server.
1428 pub fn update_diagnostics(
1429 &mut self,
1430 server_id: LanguageServerId,
1431 diagnostics: DiagnosticSet,
1432 cx: &mut ModelContext<Self>,
1433 ) {
1434 let lamport_timestamp = self.text.lamport_clock.tick();
1435 let op = Operation::UpdateDiagnostics {
1436 server_id,
1437 diagnostics: diagnostics.iter().cloned().collect(),
1438 lamport_timestamp,
1439 };
1440 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1441 self.send_operation(op, true, cx);
1442 }
1443
1444 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1445 if let Some(indent_sizes) = self.compute_autoindents() {
1446 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1447 match cx
1448 .background_executor()
1449 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1450 {
1451 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1452 Err(indent_sizes) => {
1453 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1454 let indent_sizes = indent_sizes.await;
1455 this.update(&mut cx, |this, cx| {
1456 this.apply_autoindents(indent_sizes, cx);
1457 })
1458 .ok();
1459 }));
1460 }
1461 }
1462 } else {
1463 self.autoindent_requests.clear();
1464 }
1465 }
1466
1467 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1468 let max_rows_between_yields = 100;
1469 let snapshot = self.snapshot();
1470 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1471 return None;
1472 }
1473
1474 let autoindent_requests = self.autoindent_requests.clone();
1475 Some(async move {
1476 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1477 for request in autoindent_requests {
1478 // Resolve each edited range to its row in the current buffer and in the
1479 // buffer before this batch of edits.
1480 let mut row_ranges = Vec::new();
1481 let mut old_to_new_rows = BTreeMap::new();
1482 let mut language_indent_sizes_by_new_row = Vec::new();
1483 for entry in &request.entries {
1484 let position = entry.range.start;
1485 let new_row = position.to_point(&snapshot).row;
1486 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1487 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1488
1489 if !entry.first_line_is_new {
1490 let old_row = position.to_point(&request.before_edit).row;
1491 old_to_new_rows.insert(old_row, new_row);
1492 }
1493 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1494 }
1495
1496 // Build a map containing the suggested indentation for each of the edited lines
1497 // with respect to the state of the buffer before these edits. This map is keyed
1498 // by the rows for these lines in the current state of the buffer.
1499 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1500 let old_edited_ranges =
1501 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1502 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1503 let mut language_indent_size = IndentSize::default();
1504 for old_edited_range in old_edited_ranges {
1505 let suggestions = request
1506 .before_edit
1507 .suggest_autoindents(old_edited_range.clone())
1508 .into_iter()
1509 .flatten();
1510 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1511 if let Some(suggestion) = suggestion {
1512 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1513
1514 // Find the indent size based on the language for this row.
1515 while let Some((row, size)) = language_indent_sizes.peek() {
1516 if *row > new_row {
1517 break;
1518 }
1519 language_indent_size = *size;
1520 language_indent_sizes.next();
1521 }
1522
1523 let suggested_indent = old_to_new_rows
1524 .get(&suggestion.basis_row)
1525 .and_then(|from_row| {
1526 Some(old_suggestions.get(from_row).copied()?.0)
1527 })
1528 .unwrap_or_else(|| {
1529 request
1530 .before_edit
1531 .indent_size_for_line(suggestion.basis_row)
1532 })
1533 .with_delta(suggestion.delta, language_indent_size);
1534 old_suggestions
1535 .insert(new_row, (suggested_indent, suggestion.within_error));
1536 }
1537 }
1538 yield_now().await;
1539 }
1540
1541 // Compute new suggestions for each line, but only include them in the result
1542 // if they differ from the old suggestion for that line.
1543 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1544 let mut language_indent_size = IndentSize::default();
1545 for (row_range, original_indent_column) in row_ranges {
1546 let new_edited_row_range = if request.is_block_mode {
1547 row_range.start..row_range.start + 1
1548 } else {
1549 row_range.clone()
1550 };
1551
1552 let suggestions = snapshot
1553 .suggest_autoindents(new_edited_row_range.clone())
1554 .into_iter()
1555 .flatten();
1556 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1557 if let Some(suggestion) = suggestion {
1558 // Find the indent size based on the language for this row.
1559 while let Some((row, size)) = language_indent_sizes.peek() {
1560 if *row > new_row {
1561 break;
1562 }
1563 language_indent_size = *size;
1564 language_indent_sizes.next();
1565 }
1566
1567 let suggested_indent = indent_sizes
1568 .get(&suggestion.basis_row)
1569 .copied()
1570 .map(|e| e.0)
1571 .unwrap_or_else(|| {
1572 snapshot.indent_size_for_line(suggestion.basis_row)
1573 })
1574 .with_delta(suggestion.delta, language_indent_size);
1575
1576 if old_suggestions.get(&new_row).map_or(
1577 true,
1578 |(old_indentation, was_within_error)| {
1579 suggested_indent != *old_indentation
1580 && (!suggestion.within_error || *was_within_error)
1581 },
1582 ) {
1583 indent_sizes.insert(
1584 new_row,
1585 (suggested_indent, request.ignore_empty_lines),
1586 );
1587 }
1588 }
1589 }
1590
1591 if let (true, Some(original_indent_column)) =
1592 (request.is_block_mode, original_indent_column)
1593 {
1594 let new_indent =
1595 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1596 *indent
1597 } else {
1598 snapshot.indent_size_for_line(row_range.start)
1599 };
1600 let delta = new_indent.len as i64 - original_indent_column as i64;
1601 if delta != 0 {
1602 for row in row_range.skip(1) {
1603 indent_sizes.entry(row).or_insert_with(|| {
1604 let mut size = snapshot.indent_size_for_line(row);
1605 if size.kind == new_indent.kind {
1606 match delta.cmp(&0) {
1607 Ordering::Greater => size.len += delta as u32,
1608 Ordering::Less => {
1609 size.len = size.len.saturating_sub(-delta as u32)
1610 }
1611 Ordering::Equal => {}
1612 }
1613 }
1614 (size, request.ignore_empty_lines)
1615 });
1616 }
1617 }
1618 }
1619
1620 yield_now().await;
1621 }
1622 }
1623
1624 indent_sizes
1625 .into_iter()
1626 .filter_map(|(row, (indent, ignore_empty_lines))| {
1627 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1628 None
1629 } else {
1630 Some((row, indent))
1631 }
1632 })
1633 .collect()
1634 })
1635 }
1636
1637 fn apply_autoindents(
1638 &mut self,
1639 indent_sizes: BTreeMap<u32, IndentSize>,
1640 cx: &mut ModelContext<Self>,
1641 ) {
1642 self.autoindent_requests.clear();
1643
1644 let edits: Vec<_> = indent_sizes
1645 .into_iter()
1646 .filter_map(|(row, indent_size)| {
1647 let current_size = indent_size_for_line(self, row);
1648 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1649 })
1650 .collect();
1651
1652 let preserve_preview = self.preserve_preview();
1653 self.edit(edits, None, cx);
1654 if preserve_preview {
1655 self.refresh_preview();
1656 }
1657 }
1658
1659 /// Create a minimal edit that will cause the given row to be indented
1660 /// with the given size. After applying this edit, the length of the line
1661 /// will always be at least `new_size.len`.
1662 pub fn edit_for_indent_size_adjustment(
1663 row: u32,
1664 current_size: IndentSize,
1665 new_size: IndentSize,
1666 ) -> Option<(Range<Point>, String)> {
1667 if new_size.kind == current_size.kind {
1668 match new_size.len.cmp(¤t_size.len) {
1669 Ordering::Greater => {
1670 let point = Point::new(row, 0);
1671 Some((
1672 point..point,
1673 iter::repeat(new_size.char())
1674 .take((new_size.len - current_size.len) as usize)
1675 .collect::<String>(),
1676 ))
1677 }
1678
1679 Ordering::Less => Some((
1680 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1681 String::new(),
1682 )),
1683
1684 Ordering::Equal => None,
1685 }
1686 } else {
1687 Some((
1688 Point::new(row, 0)..Point::new(row, current_size.len),
1689 iter::repeat(new_size.char())
1690 .take(new_size.len as usize)
1691 .collect::<String>(),
1692 ))
1693 }
1694 }
1695
1696 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1697 /// and the given new text.
1698 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1699 let old_text = self.as_rope().clone();
1700 let base_version = self.version();
1701 cx.background_executor()
1702 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1703 let old_text = old_text.to_string();
1704 let line_ending = LineEnding::detect(&new_text);
1705 LineEnding::normalize(&mut new_text);
1706
1707 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1708 let empty: Arc<str> = Arc::default();
1709
1710 let mut edits = Vec::new();
1711 let mut old_offset = 0;
1712 let mut new_offset = 0;
1713 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1714 for change in diff.iter_all_changes().map(Some).chain([None]) {
1715 if let Some(change) = &change {
1716 let len = change.value().len();
1717 match change.tag() {
1718 ChangeTag::Equal => {
1719 old_offset += len;
1720 new_offset += len;
1721 }
1722 ChangeTag::Delete => {
1723 let old_end_offset = old_offset + len;
1724 if let Some((last_old_range, _)) = &mut last_edit {
1725 last_old_range.end = old_end_offset;
1726 } else {
1727 last_edit =
1728 Some((old_offset..old_end_offset, new_offset..new_offset));
1729 }
1730 old_offset = old_end_offset;
1731 }
1732 ChangeTag::Insert => {
1733 let new_end_offset = new_offset + len;
1734 if let Some((_, last_new_range)) = &mut last_edit {
1735 last_new_range.end = new_end_offset;
1736 } else {
1737 last_edit =
1738 Some((old_offset..old_offset, new_offset..new_end_offset));
1739 }
1740 new_offset = new_end_offset;
1741 }
1742 }
1743 }
1744
1745 if let Some((old_range, new_range)) = &last_edit {
1746 if old_offset > old_range.end
1747 || new_offset > new_range.end
1748 || change.is_none()
1749 {
1750 let text = if new_range.is_empty() {
1751 empty.clone()
1752 } else {
1753 new_text[new_range.clone()].into()
1754 };
1755 edits.push((old_range.clone(), text));
1756 last_edit.take();
1757 }
1758 }
1759 }
1760
1761 Diff {
1762 base_version,
1763 line_ending,
1764 edits,
1765 }
1766 })
1767 }
1768
1769 /// Spawns a background task that searches the buffer for any whitespace
1770 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1771 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1772 let old_text = self.as_rope().clone();
1773 let line_ending = self.line_ending();
1774 let base_version = self.version();
1775 cx.background_executor().spawn(async move {
1776 let ranges = trailing_whitespace_ranges(&old_text);
1777 let empty = Arc::<str>::from("");
1778 Diff {
1779 base_version,
1780 line_ending,
1781 edits: ranges
1782 .into_iter()
1783 .map(|range| (range, empty.clone()))
1784 .collect(),
1785 }
1786 })
1787 }
1788
1789 /// Ensures that the buffer ends with a single newline character, and
1790 /// no other whitespace.
1791 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1792 let len = self.len();
1793 let mut offset = len;
1794 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1795 let non_whitespace_len = chunk
1796 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1797 .len();
1798 offset -= chunk.len();
1799 offset += non_whitespace_len;
1800 if non_whitespace_len != 0 {
1801 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1802 return;
1803 }
1804 break;
1805 }
1806 }
1807 self.edit([(offset..len, "\n")], None, cx);
1808 }
1809
1810 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1811 /// calculated, then adjust the diff to account for those changes, and discard any
1812 /// parts of the diff that conflict with those changes.
1813 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1814 // Check for any edits to the buffer that have occurred since this diff
1815 // was computed.
1816 let snapshot = self.snapshot();
1817 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1818 let mut delta = 0;
1819 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1820 while let Some(edit_since) = edits_since.peek() {
1821 // If the edit occurs after a diff hunk, then it does not
1822 // affect that hunk.
1823 if edit_since.old.start > range.end {
1824 break;
1825 }
1826 // If the edit precedes the diff hunk, then adjust the hunk
1827 // to reflect the edit.
1828 else if edit_since.old.end < range.start {
1829 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1830 edits_since.next();
1831 }
1832 // If the edit intersects a diff hunk, then discard that hunk.
1833 else {
1834 return None;
1835 }
1836 }
1837
1838 let start = (range.start as i64 + delta) as usize;
1839 let end = (range.end as i64 + delta) as usize;
1840 Some((start..end, new_text))
1841 });
1842
1843 self.start_transaction();
1844 self.text.set_line_ending(diff.line_ending);
1845 self.edit(adjusted_edits, None, cx);
1846 self.end_transaction(cx)
1847 }
1848
1849 fn has_unsaved_edits(&self) -> bool {
1850 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1851
1852 if last_version == self.version {
1853 self.has_unsaved_edits
1854 .set((last_version, has_unsaved_edits));
1855 return has_unsaved_edits;
1856 }
1857
1858 let has_edits = self.has_edits_since(&self.saved_version);
1859 self.has_unsaved_edits
1860 .set((self.version.clone(), has_edits));
1861 has_edits
1862 }
1863
1864 /// Checks if the buffer has unsaved changes.
1865 pub fn is_dirty(&self) -> bool {
1866 self.capability != Capability::ReadOnly
1867 && (self.has_conflict
1868 || self.file.as_ref().map_or(false, |file| {
1869 matches!(file.disk_state(), DiskState::New | DiskState::Deleted)
1870 })
1871 || self.has_unsaved_edits())
1872 }
1873
1874 /// Checks if the buffer and its file have both changed since the buffer
1875 /// was last saved or reloaded.
1876 pub fn has_conflict(&self) -> bool {
1877 if self.has_conflict {
1878 return true;
1879 }
1880 let Some(file) = self.file.as_ref() else {
1881 return false;
1882 };
1883 match file.disk_state() {
1884 DiskState::New => false,
1885 DiskState::Present { mtime } => match self.saved_mtime {
1886 Some(saved_mtime) => {
1887 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1888 }
1889 None => true,
1890 },
1891 DiskState::Deleted => true,
1892 }
1893 }
1894
1895 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1896 pub fn subscribe(&mut self) -> Subscription {
1897 self.text.subscribe()
1898 }
1899
1900 /// Starts a transaction, if one is not already in-progress. When undoing or
1901 /// redoing edits, all of the edits performed within a transaction are undone
1902 /// or redone together.
1903 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1904 self.start_transaction_at(Instant::now())
1905 }
1906
1907 /// Starts a transaction, providing the current time. Subsequent transactions
1908 /// that occur within a short period of time will be grouped together. This
1909 /// is controlled by the buffer's undo grouping duration.
1910 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1911 self.transaction_depth += 1;
1912 if self.was_dirty_before_starting_transaction.is_none() {
1913 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1914 }
1915 self.text.start_transaction_at(now)
1916 }
1917
1918 /// Terminates the current transaction, if this is the outermost transaction.
1919 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1920 self.end_transaction_at(Instant::now(), cx)
1921 }
1922
1923 /// Terminates the current transaction, providing the current time. Subsequent transactions
1924 /// that occur within a short period of time will be grouped together. This
1925 /// is controlled by the buffer's undo grouping duration.
1926 pub fn end_transaction_at(
1927 &mut self,
1928 now: Instant,
1929 cx: &mut ModelContext<Self>,
1930 ) -> Option<TransactionId> {
1931 assert!(self.transaction_depth > 0);
1932 self.transaction_depth -= 1;
1933 let was_dirty = if self.transaction_depth == 0 {
1934 self.was_dirty_before_starting_transaction.take().unwrap()
1935 } else {
1936 false
1937 };
1938 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1939 self.did_edit(&start_version, was_dirty, cx);
1940 Some(transaction_id)
1941 } else {
1942 None
1943 }
1944 }
1945
1946 /// Manually add a transaction to the buffer's undo history.
1947 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1948 self.text.push_transaction(transaction, now);
1949 }
1950
1951 /// Prevent the last transaction from being grouped with any subsequent transactions,
1952 /// even if they occur with the buffer's undo grouping duration.
1953 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1954 self.text.finalize_last_transaction()
1955 }
1956
1957 /// Manually group all changes since a given transaction.
1958 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1959 self.text.group_until_transaction(transaction_id);
1960 }
1961
1962 /// Manually remove a transaction from the buffer's undo history
1963 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1964 self.text.forget_transaction(transaction_id);
1965 }
1966
1967 /// Manually merge two adjacent transactions in the buffer's undo history.
1968 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1969 self.text.merge_transactions(transaction, destination);
1970 }
1971
1972 /// Waits for the buffer to receive operations with the given timestamps.
1973 pub fn wait_for_edits(
1974 &mut self,
1975 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1976 ) -> impl Future<Output = Result<()>> {
1977 self.text.wait_for_edits(edit_ids)
1978 }
1979
1980 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1981 pub fn wait_for_anchors(
1982 &mut self,
1983 anchors: impl IntoIterator<Item = Anchor>,
1984 ) -> impl 'static + Future<Output = Result<()>> {
1985 self.text.wait_for_anchors(anchors)
1986 }
1987
1988 /// Waits for the buffer to receive operations up to the given version.
1989 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1990 self.text.wait_for_version(version)
1991 }
1992
1993 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1994 /// [`Buffer::wait_for_version`] to resolve with an error.
1995 pub fn give_up_waiting(&mut self) {
1996 self.text.give_up_waiting();
1997 }
1998
1999 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2000 pub fn set_active_selections(
2001 &mut self,
2002 selections: Arc<[Selection<Anchor>]>,
2003 line_mode: bool,
2004 cursor_shape: CursorShape,
2005 cx: &mut ModelContext<Self>,
2006 ) {
2007 let lamport_timestamp = self.text.lamport_clock.tick();
2008 self.remote_selections.insert(
2009 self.text.replica_id(),
2010 SelectionSet {
2011 selections: selections.clone(),
2012 lamport_timestamp,
2013 line_mode,
2014 cursor_shape,
2015 },
2016 );
2017 self.send_operation(
2018 Operation::UpdateSelections {
2019 selections,
2020 line_mode,
2021 lamport_timestamp,
2022 cursor_shape,
2023 },
2024 true,
2025 cx,
2026 );
2027 self.non_text_state_update_count += 1;
2028 cx.notify();
2029 }
2030
2031 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2032 /// this replica.
2033 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
2034 if self
2035 .remote_selections
2036 .get(&self.text.replica_id())
2037 .map_or(true, |set| !set.selections.is_empty())
2038 {
2039 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2040 }
2041 }
2042
2043 /// Replaces the buffer's entire text.
2044 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
2045 where
2046 T: Into<Arc<str>>,
2047 {
2048 self.autoindent_requests.clear();
2049 self.edit([(0..self.len(), text)], None, cx)
2050 }
2051
2052 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2053 /// delete, and a string of text to insert at that location.
2054 ///
2055 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2056 /// request for the edited ranges, which will be processed when the buffer finishes
2057 /// parsing.
2058 ///
2059 /// Parsing takes place at the end of a transaction, and may compute synchronously
2060 /// or asynchronously, depending on the changes.
2061 pub fn edit<I, S, T>(
2062 &mut self,
2063 edits_iter: I,
2064 autoindent_mode: Option<AutoindentMode>,
2065 cx: &mut ModelContext<Self>,
2066 ) -> Option<clock::Lamport>
2067 where
2068 I: IntoIterator<Item = (Range<S>, T)>,
2069 S: ToOffset,
2070 T: Into<Arc<str>>,
2071 {
2072 // Skip invalid edits and coalesce contiguous ones.
2073 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2074 for (range, new_text) in edits_iter {
2075 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2076 if range.start > range.end {
2077 mem::swap(&mut range.start, &mut range.end);
2078 }
2079 let new_text = new_text.into();
2080 if !new_text.is_empty() || !range.is_empty() {
2081 if let Some((prev_range, prev_text)) = edits.last_mut() {
2082 if prev_range.end >= range.start {
2083 prev_range.end = cmp::max(prev_range.end, range.end);
2084 *prev_text = format!("{prev_text}{new_text}").into();
2085 } else {
2086 edits.push((range, new_text));
2087 }
2088 } else {
2089 edits.push((range, new_text));
2090 }
2091 }
2092 }
2093 if edits.is_empty() {
2094 return None;
2095 }
2096
2097 self.start_transaction();
2098 self.pending_autoindent.take();
2099 let autoindent_request = autoindent_mode
2100 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2101
2102 let edit_operation = self.text.edit(edits.iter().cloned());
2103 let edit_id = edit_operation.timestamp();
2104
2105 if let Some((before_edit, mode)) = autoindent_request {
2106 let mut delta = 0isize;
2107 let entries = edits
2108 .into_iter()
2109 .enumerate()
2110 .zip(&edit_operation.as_edit().unwrap().new_text)
2111 .map(|((ix, (range, _)), new_text)| {
2112 let new_text_length = new_text.len();
2113 let old_start = range.start.to_point(&before_edit);
2114 let new_start = (delta + range.start as isize) as usize;
2115 let range_len = range.end - range.start;
2116 delta += new_text_length as isize - range_len as isize;
2117
2118 // Decide what range of the insertion to auto-indent, and whether
2119 // the first line of the insertion should be considered a newly-inserted line
2120 // or an edit to an existing line.
2121 let mut range_of_insertion_to_indent = 0..new_text_length;
2122 let mut first_line_is_new = true;
2123
2124 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2125 let old_line_end = before_edit.line_len(old_start.row);
2126
2127 if old_start.column > old_line_start {
2128 first_line_is_new = false;
2129 }
2130
2131 if !new_text.contains('\n')
2132 && (old_start.column + (range_len as u32) < old_line_end
2133 || old_line_end == old_line_start)
2134 {
2135 first_line_is_new = false;
2136 }
2137
2138 // When inserting text starting with a newline, avoid auto-indenting the
2139 // previous line.
2140 if new_text.starts_with('\n') {
2141 range_of_insertion_to_indent.start += 1;
2142 first_line_is_new = true;
2143 }
2144
2145 let mut original_indent_column = None;
2146 if let AutoindentMode::Block {
2147 original_indent_columns,
2148 } = &mode
2149 {
2150 original_indent_column =
2151 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
2152 indent_size_for_text(
2153 new_text[range_of_insertion_to_indent.clone()].chars(),
2154 )
2155 .len
2156 }));
2157
2158 // Avoid auto-indenting the line after the edit.
2159 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2160 range_of_insertion_to_indent.end -= 1;
2161 }
2162 }
2163
2164 AutoindentRequestEntry {
2165 first_line_is_new,
2166 original_indent_column,
2167 indent_size: before_edit.language_indent_size_at(range.start, cx),
2168 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2169 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2170 }
2171 })
2172 .collect();
2173
2174 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2175 before_edit,
2176 entries,
2177 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2178 ignore_empty_lines: false,
2179 }));
2180 }
2181
2182 self.end_transaction(cx);
2183 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2184 Some(edit_id)
2185 }
2186
2187 fn did_edit(
2188 &mut self,
2189 old_version: &clock::Global,
2190 was_dirty: bool,
2191 cx: &mut ModelContext<Self>,
2192 ) {
2193 if self.edits_since::<usize>(old_version).next().is_none() {
2194 return;
2195 }
2196
2197 self.reparse(cx);
2198
2199 cx.emit(BufferEvent::Edited);
2200 if was_dirty != self.is_dirty() {
2201 cx.emit(BufferEvent::DirtyChanged);
2202 }
2203 cx.notify();
2204 }
2205
2206 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut ModelContext<Self>)
2207 where
2208 I: IntoIterator<Item = Range<T>>,
2209 T: ToOffset + Copy,
2210 {
2211 let before_edit = self.snapshot();
2212 let entries = ranges
2213 .into_iter()
2214 .map(|range| AutoindentRequestEntry {
2215 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2216 first_line_is_new: true,
2217 indent_size: before_edit.language_indent_size_at(range.start, cx),
2218 original_indent_column: None,
2219 })
2220 .collect();
2221 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2222 before_edit,
2223 entries,
2224 is_block_mode: false,
2225 ignore_empty_lines: true,
2226 }));
2227 self.request_autoindent(cx);
2228 }
2229
2230 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2231 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2232 pub fn insert_empty_line(
2233 &mut self,
2234 position: impl ToPoint,
2235 space_above: bool,
2236 space_below: bool,
2237 cx: &mut ModelContext<Self>,
2238 ) -> Point {
2239 let mut position = position.to_point(self);
2240
2241 self.start_transaction();
2242
2243 self.edit(
2244 [(position..position, "\n")],
2245 Some(AutoindentMode::EachLine),
2246 cx,
2247 );
2248
2249 if position.column > 0 {
2250 position += Point::new(1, 0);
2251 }
2252
2253 if !self.is_line_blank(position.row) {
2254 self.edit(
2255 [(position..position, "\n")],
2256 Some(AutoindentMode::EachLine),
2257 cx,
2258 );
2259 }
2260
2261 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2262 self.edit(
2263 [(position..position, "\n")],
2264 Some(AutoindentMode::EachLine),
2265 cx,
2266 );
2267 position.row += 1;
2268 }
2269
2270 if space_below
2271 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2272 {
2273 self.edit(
2274 [(position..position, "\n")],
2275 Some(AutoindentMode::EachLine),
2276 cx,
2277 );
2278 }
2279
2280 self.end_transaction(cx);
2281
2282 position
2283 }
2284
2285 /// Applies the given remote operations to the buffer.
2286 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
2287 &mut self,
2288 ops: I,
2289 cx: &mut ModelContext<Self>,
2290 ) {
2291 self.pending_autoindent.take();
2292 let was_dirty = self.is_dirty();
2293 let old_version = self.version.clone();
2294 let mut deferred_ops = Vec::new();
2295 let buffer_ops = ops
2296 .into_iter()
2297 .filter_map(|op| match op {
2298 Operation::Buffer(op) => Some(op),
2299 _ => {
2300 if self.can_apply_op(&op) {
2301 self.apply_op(op, cx);
2302 } else {
2303 deferred_ops.push(op);
2304 }
2305 None
2306 }
2307 })
2308 .collect::<Vec<_>>();
2309 for operation in buffer_ops.iter() {
2310 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2311 }
2312 self.text.apply_ops(buffer_ops);
2313 self.deferred_ops.insert(deferred_ops);
2314 self.flush_deferred_ops(cx);
2315 self.did_edit(&old_version, was_dirty, cx);
2316 // Notify independently of whether the buffer was edited as the operations could include a
2317 // selection update.
2318 cx.notify();
2319 }
2320
2321 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
2322 let mut deferred_ops = Vec::new();
2323 for op in self.deferred_ops.drain().iter().cloned() {
2324 if self.can_apply_op(&op) {
2325 self.apply_op(op, cx);
2326 } else {
2327 deferred_ops.push(op);
2328 }
2329 }
2330 self.deferred_ops.insert(deferred_ops);
2331 }
2332
2333 pub fn has_deferred_ops(&self) -> bool {
2334 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2335 }
2336
2337 fn can_apply_op(&self, operation: &Operation) -> bool {
2338 match operation {
2339 Operation::Buffer(_) => {
2340 unreachable!("buffer operations should never be applied at this layer")
2341 }
2342 Operation::UpdateDiagnostics {
2343 diagnostics: diagnostic_set,
2344 ..
2345 } => diagnostic_set.iter().all(|diagnostic| {
2346 self.text.can_resolve(&diagnostic.range.start)
2347 && self.text.can_resolve(&diagnostic.range.end)
2348 }),
2349 Operation::UpdateSelections { selections, .. } => selections
2350 .iter()
2351 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2352 Operation::UpdateCompletionTriggers { .. } => true,
2353 }
2354 }
2355
2356 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
2357 match operation {
2358 Operation::Buffer(_) => {
2359 unreachable!("buffer operations should never be applied at this layer")
2360 }
2361 Operation::UpdateDiagnostics {
2362 server_id,
2363 diagnostics: diagnostic_set,
2364 lamport_timestamp,
2365 } => {
2366 let snapshot = self.snapshot();
2367 self.apply_diagnostic_update(
2368 server_id,
2369 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2370 lamport_timestamp,
2371 cx,
2372 );
2373 }
2374 Operation::UpdateSelections {
2375 selections,
2376 lamport_timestamp,
2377 line_mode,
2378 cursor_shape,
2379 } => {
2380 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2381 if set.lamport_timestamp > lamport_timestamp {
2382 return;
2383 }
2384 }
2385
2386 self.remote_selections.insert(
2387 lamport_timestamp.replica_id,
2388 SelectionSet {
2389 selections,
2390 lamport_timestamp,
2391 line_mode,
2392 cursor_shape,
2393 },
2394 );
2395 self.text.lamport_clock.observe(lamport_timestamp);
2396 self.non_text_state_update_count += 1;
2397 }
2398 Operation::UpdateCompletionTriggers {
2399 triggers,
2400 lamport_timestamp,
2401 server_id,
2402 } => {
2403 if triggers.is_empty() {
2404 self.completion_triggers_per_language_server
2405 .remove(&server_id);
2406 self.completion_triggers = self
2407 .completion_triggers_per_language_server
2408 .values()
2409 .flat_map(|triggers| triggers.into_iter().cloned())
2410 .collect();
2411 } else {
2412 self.completion_triggers_per_language_server
2413 .insert(server_id, triggers.iter().cloned().collect());
2414 self.completion_triggers.extend(triggers);
2415 }
2416 self.text.lamport_clock.observe(lamport_timestamp);
2417 }
2418 }
2419 }
2420
2421 fn apply_diagnostic_update(
2422 &mut self,
2423 server_id: LanguageServerId,
2424 diagnostics: DiagnosticSet,
2425 lamport_timestamp: clock::Lamport,
2426 cx: &mut ModelContext<Self>,
2427 ) {
2428 if lamport_timestamp > self.diagnostics_timestamp {
2429 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2430 if diagnostics.is_empty() {
2431 if let Ok(ix) = ix {
2432 self.diagnostics.remove(ix);
2433 }
2434 } else {
2435 match ix {
2436 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2437 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2438 };
2439 }
2440 self.diagnostics_timestamp = lamport_timestamp;
2441 self.non_text_state_update_count += 1;
2442 self.text.lamport_clock.observe(lamport_timestamp);
2443 cx.notify();
2444 cx.emit(BufferEvent::DiagnosticsUpdated);
2445 }
2446 }
2447
2448 fn send_operation(&self, operation: Operation, is_local: bool, cx: &mut ModelContext<Self>) {
2449 cx.emit(BufferEvent::Operation {
2450 operation,
2451 is_local,
2452 });
2453 }
2454
2455 /// Removes the selections for a given peer.
2456 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
2457 self.remote_selections.remove(&replica_id);
2458 cx.notify();
2459 }
2460
2461 /// Undoes the most recent transaction.
2462 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2463 let was_dirty = self.is_dirty();
2464 let old_version = self.version.clone();
2465
2466 if let Some((transaction_id, operation)) = self.text.undo() {
2467 self.send_operation(Operation::Buffer(operation), true, cx);
2468 self.did_edit(&old_version, was_dirty, cx);
2469 Some(transaction_id)
2470 } else {
2471 None
2472 }
2473 }
2474
2475 /// Manually undoes a specific transaction in the buffer's undo history.
2476 pub fn undo_transaction(
2477 &mut self,
2478 transaction_id: TransactionId,
2479 cx: &mut ModelContext<Self>,
2480 ) -> bool {
2481 let was_dirty = self.is_dirty();
2482 let old_version = self.version.clone();
2483 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2484 self.send_operation(Operation::Buffer(operation), true, cx);
2485 self.did_edit(&old_version, was_dirty, cx);
2486 true
2487 } else {
2488 false
2489 }
2490 }
2491
2492 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2493 pub fn undo_to_transaction(
2494 &mut self,
2495 transaction_id: TransactionId,
2496 cx: &mut ModelContext<Self>,
2497 ) -> bool {
2498 let was_dirty = self.is_dirty();
2499 let old_version = self.version.clone();
2500
2501 let operations = self.text.undo_to_transaction(transaction_id);
2502 let undone = !operations.is_empty();
2503 for operation in operations {
2504 self.send_operation(Operation::Buffer(operation), true, cx);
2505 }
2506 if undone {
2507 self.did_edit(&old_version, was_dirty, cx)
2508 }
2509 undone
2510 }
2511
2512 pub fn undo_operations(
2513 &mut self,
2514 counts: HashMap<Lamport, u32>,
2515 cx: &mut ModelContext<Buffer>,
2516 ) {
2517 let was_dirty = self.is_dirty();
2518 let operation = self.text.undo_operations(counts);
2519 let old_version = self.version.clone();
2520 self.send_operation(Operation::Buffer(operation), true, cx);
2521 self.did_edit(&old_version, was_dirty, cx);
2522 }
2523
2524 /// Manually redoes a specific transaction in the buffer's redo history.
2525 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2526 let was_dirty = self.is_dirty();
2527 let old_version = self.version.clone();
2528
2529 if let Some((transaction_id, operation)) = self.text.redo() {
2530 self.send_operation(Operation::Buffer(operation), true, cx);
2531 self.did_edit(&old_version, was_dirty, cx);
2532 Some(transaction_id)
2533 } else {
2534 None
2535 }
2536 }
2537
2538 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2539 pub fn redo_to_transaction(
2540 &mut self,
2541 transaction_id: TransactionId,
2542 cx: &mut ModelContext<Self>,
2543 ) -> bool {
2544 let was_dirty = self.is_dirty();
2545 let old_version = self.version.clone();
2546
2547 let operations = self.text.redo_to_transaction(transaction_id);
2548 let redone = !operations.is_empty();
2549 for operation in operations {
2550 self.send_operation(Operation::Buffer(operation), true, cx);
2551 }
2552 if redone {
2553 self.did_edit(&old_version, was_dirty, cx)
2554 }
2555 redone
2556 }
2557
2558 /// Override current completion triggers with the user-provided completion triggers.
2559 pub fn set_completion_triggers(
2560 &mut self,
2561 server_id: LanguageServerId,
2562 triggers: BTreeSet<String>,
2563 cx: &mut ModelContext<Self>,
2564 ) {
2565 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2566 if triggers.is_empty() {
2567 self.completion_triggers_per_language_server
2568 .remove(&server_id);
2569 self.completion_triggers = self
2570 .completion_triggers_per_language_server
2571 .values()
2572 .flat_map(|triggers| triggers.into_iter().cloned())
2573 .collect();
2574 } else {
2575 self.completion_triggers_per_language_server
2576 .insert(server_id, triggers.clone());
2577 self.completion_triggers.extend(triggers.iter().cloned());
2578 }
2579 self.send_operation(
2580 Operation::UpdateCompletionTriggers {
2581 triggers: triggers.iter().cloned().collect(),
2582 lamport_timestamp: self.completion_triggers_timestamp,
2583 server_id,
2584 },
2585 true,
2586 cx,
2587 );
2588 cx.notify();
2589 }
2590
2591 /// Returns a list of strings which trigger a completion menu for this language.
2592 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2593 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2594 &self.completion_triggers
2595 }
2596
2597 /// Call this directly after performing edits to prevent the preview tab
2598 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2599 /// to return false until there are additional edits.
2600 pub fn refresh_preview(&mut self) {
2601 self.preview_version = self.version.clone();
2602 }
2603
2604 /// Whether we should preserve the preview status of a tab containing this buffer.
2605 pub fn preserve_preview(&self) -> bool {
2606 !self.has_edits_since(&self.preview_version)
2607 }
2608}
2609
2610#[doc(hidden)]
2611#[cfg(any(test, feature = "test-support"))]
2612impl Buffer {
2613 pub fn edit_via_marked_text(
2614 &mut self,
2615 marked_string: &str,
2616 autoindent_mode: Option<AutoindentMode>,
2617 cx: &mut ModelContext<Self>,
2618 ) {
2619 let edits = self.edits_for_marked_text(marked_string);
2620 self.edit(edits, autoindent_mode, cx);
2621 }
2622
2623 pub fn set_group_interval(&mut self, group_interval: Duration) {
2624 self.text.set_group_interval(group_interval);
2625 }
2626
2627 pub fn randomly_edit<T>(
2628 &mut self,
2629 rng: &mut T,
2630 old_range_count: usize,
2631 cx: &mut ModelContext<Self>,
2632 ) where
2633 T: rand::Rng,
2634 {
2635 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2636 let mut last_end = None;
2637 for _ in 0..old_range_count {
2638 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2639 break;
2640 }
2641
2642 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2643 let mut range = self.random_byte_range(new_start, rng);
2644 if rng.gen_bool(0.2) {
2645 mem::swap(&mut range.start, &mut range.end);
2646 }
2647 last_end = Some(range.end);
2648
2649 let new_text_len = rng.gen_range(0..10);
2650 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2651 new_text = new_text.to_uppercase();
2652
2653 edits.push((range, new_text));
2654 }
2655 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2656 self.edit(edits, None, cx);
2657 }
2658
2659 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
2660 let was_dirty = self.is_dirty();
2661 let old_version = self.version.clone();
2662
2663 let ops = self.text.randomly_undo_redo(rng);
2664 if !ops.is_empty() {
2665 for op in ops {
2666 self.send_operation(Operation::Buffer(op), true, cx);
2667 self.did_edit(&old_version, was_dirty, cx);
2668 }
2669 }
2670 }
2671}
2672
2673impl EventEmitter<BufferEvent> for Buffer {}
2674
2675impl Deref for Buffer {
2676 type Target = TextBuffer;
2677
2678 fn deref(&self) -> &Self::Target {
2679 &self.text
2680 }
2681}
2682
2683impl BufferSnapshot {
2684 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2685 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2686 indent_size_for_line(self, row)
2687 }
2688 /// Returns [`IndentSize`] for a given position that respects user settings
2689 /// and language preferences.
2690 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
2691 let settings = language_settings(
2692 self.language_at(position).map(|l| l.name()),
2693 self.file(),
2694 cx,
2695 );
2696 if settings.hard_tabs {
2697 IndentSize::tab()
2698 } else {
2699 IndentSize::spaces(settings.tab_size.get())
2700 }
2701 }
2702
2703 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2704 /// is passed in as `single_indent_size`.
2705 pub fn suggested_indents(
2706 &self,
2707 rows: impl Iterator<Item = u32>,
2708 single_indent_size: IndentSize,
2709 ) -> BTreeMap<u32, IndentSize> {
2710 let mut result = BTreeMap::new();
2711
2712 for row_range in contiguous_ranges(rows, 10) {
2713 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2714 Some(suggestions) => suggestions,
2715 _ => break,
2716 };
2717
2718 for (row, suggestion) in row_range.zip(suggestions) {
2719 let indent_size = if let Some(suggestion) = suggestion {
2720 result
2721 .get(&suggestion.basis_row)
2722 .copied()
2723 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2724 .with_delta(suggestion.delta, single_indent_size)
2725 } else {
2726 self.indent_size_for_line(row)
2727 };
2728
2729 result.insert(row, indent_size);
2730 }
2731 }
2732
2733 result
2734 }
2735
2736 fn suggest_autoindents(
2737 &self,
2738 row_range: Range<u32>,
2739 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2740 let config = &self.language.as_ref()?.config;
2741 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2742
2743 // Find the suggested indentation ranges based on the syntax tree.
2744 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2745 let end = Point::new(row_range.end, 0);
2746 let range = (start..end).to_offset(&self.text);
2747 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2748 Some(&grammar.indents_config.as_ref()?.query)
2749 });
2750 let indent_configs = matches
2751 .grammars()
2752 .iter()
2753 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2754 .collect::<Vec<_>>();
2755
2756 let mut indent_ranges = Vec::<Range<Point>>::new();
2757 let mut outdent_positions = Vec::<Point>::new();
2758 while let Some(mat) = matches.peek() {
2759 let mut start: Option<Point> = None;
2760 let mut end: Option<Point> = None;
2761
2762 let config = &indent_configs[mat.grammar_index];
2763 for capture in mat.captures {
2764 if capture.index == config.indent_capture_ix {
2765 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2766 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2767 } else if Some(capture.index) == config.start_capture_ix {
2768 start = Some(Point::from_ts_point(capture.node.end_position()));
2769 } else if Some(capture.index) == config.end_capture_ix {
2770 end = Some(Point::from_ts_point(capture.node.start_position()));
2771 } else if Some(capture.index) == config.outdent_capture_ix {
2772 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2773 }
2774 }
2775
2776 matches.advance();
2777 if let Some((start, end)) = start.zip(end) {
2778 if start.row == end.row {
2779 continue;
2780 }
2781
2782 let range = start..end;
2783 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2784 Err(ix) => indent_ranges.insert(ix, range),
2785 Ok(ix) => {
2786 let prev_range = &mut indent_ranges[ix];
2787 prev_range.end = prev_range.end.max(range.end);
2788 }
2789 }
2790 }
2791 }
2792
2793 let mut error_ranges = Vec::<Range<Point>>::new();
2794 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2795 Some(&grammar.error_query)
2796 });
2797 while let Some(mat) = matches.peek() {
2798 let node = mat.captures[0].node;
2799 let start = Point::from_ts_point(node.start_position());
2800 let end = Point::from_ts_point(node.end_position());
2801 let range = start..end;
2802 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2803 Ok(ix) | Err(ix) => ix,
2804 };
2805 let mut end_ix = ix;
2806 while let Some(existing_range) = error_ranges.get(end_ix) {
2807 if existing_range.end < end {
2808 end_ix += 1;
2809 } else {
2810 break;
2811 }
2812 }
2813 error_ranges.splice(ix..end_ix, [range]);
2814 matches.advance();
2815 }
2816
2817 outdent_positions.sort();
2818 for outdent_position in outdent_positions {
2819 // find the innermost indent range containing this outdent_position
2820 // set its end to the outdent position
2821 if let Some(range_to_truncate) = indent_ranges
2822 .iter_mut()
2823 .filter(|indent_range| indent_range.contains(&outdent_position))
2824 .last()
2825 {
2826 range_to_truncate.end = outdent_position;
2827 }
2828 }
2829
2830 // Find the suggested indentation increases and decreased based on regexes.
2831 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2832 self.for_each_line(
2833 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2834 ..Point::new(row_range.end, 0),
2835 |row, line| {
2836 if config
2837 .decrease_indent_pattern
2838 .as_ref()
2839 .map_or(false, |regex| regex.is_match(line))
2840 {
2841 indent_change_rows.push((row, Ordering::Less));
2842 }
2843 if config
2844 .increase_indent_pattern
2845 .as_ref()
2846 .map_or(false, |regex| regex.is_match(line))
2847 {
2848 indent_change_rows.push((row + 1, Ordering::Greater));
2849 }
2850 },
2851 );
2852
2853 let mut indent_changes = indent_change_rows.into_iter().peekable();
2854 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2855 prev_non_blank_row.unwrap_or(0)
2856 } else {
2857 row_range.start.saturating_sub(1)
2858 };
2859 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2860 Some(row_range.map(move |row| {
2861 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2862
2863 let mut indent_from_prev_row = false;
2864 let mut outdent_from_prev_row = false;
2865 let mut outdent_to_row = u32::MAX;
2866
2867 while let Some((indent_row, delta)) = indent_changes.peek() {
2868 match indent_row.cmp(&row) {
2869 Ordering::Equal => match delta {
2870 Ordering::Less => outdent_from_prev_row = true,
2871 Ordering::Greater => indent_from_prev_row = true,
2872 _ => {}
2873 },
2874
2875 Ordering::Greater => break,
2876 Ordering::Less => {}
2877 }
2878
2879 indent_changes.next();
2880 }
2881
2882 for range in &indent_ranges {
2883 if range.start.row >= row {
2884 break;
2885 }
2886 if range.start.row == prev_row && range.end > row_start {
2887 indent_from_prev_row = true;
2888 }
2889 if range.end > prev_row_start && range.end <= row_start {
2890 outdent_to_row = outdent_to_row.min(range.start.row);
2891 }
2892 }
2893
2894 let within_error = error_ranges
2895 .iter()
2896 .any(|e| e.start.row < row && e.end > row_start);
2897
2898 let suggestion = if outdent_to_row == prev_row
2899 || (outdent_from_prev_row && indent_from_prev_row)
2900 {
2901 Some(IndentSuggestion {
2902 basis_row: prev_row,
2903 delta: Ordering::Equal,
2904 within_error,
2905 })
2906 } else if indent_from_prev_row {
2907 Some(IndentSuggestion {
2908 basis_row: prev_row,
2909 delta: Ordering::Greater,
2910 within_error,
2911 })
2912 } else if outdent_to_row < prev_row {
2913 Some(IndentSuggestion {
2914 basis_row: outdent_to_row,
2915 delta: Ordering::Equal,
2916 within_error,
2917 })
2918 } else if outdent_from_prev_row {
2919 Some(IndentSuggestion {
2920 basis_row: prev_row,
2921 delta: Ordering::Less,
2922 within_error,
2923 })
2924 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2925 {
2926 Some(IndentSuggestion {
2927 basis_row: prev_row,
2928 delta: Ordering::Equal,
2929 within_error,
2930 })
2931 } else {
2932 None
2933 };
2934
2935 prev_row = row;
2936 prev_row_start = row_start;
2937 suggestion
2938 }))
2939 }
2940
2941 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2942 while row > 0 {
2943 row -= 1;
2944 if !self.is_line_blank(row) {
2945 return Some(row);
2946 }
2947 }
2948 None
2949 }
2950
2951 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
2952 let captures = self.syntax.captures(range, &self.text, |grammar| {
2953 grammar.highlights_query.as_ref()
2954 });
2955 let highlight_maps = captures
2956 .grammars()
2957 .iter()
2958 .map(|grammar| grammar.highlight_map())
2959 .collect();
2960 (captures, highlight_maps)
2961 }
2962
2963 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2964 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2965 /// returned in chunks where each chunk has a single syntax highlighting style and
2966 /// diagnostic status.
2967 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2968 let range = range.start.to_offset(self)..range.end.to_offset(self);
2969
2970 let mut syntax = None;
2971 if language_aware {
2972 syntax = Some(self.get_highlights(range.clone()));
2973 }
2974 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
2975 let diagnostics = language_aware;
2976 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
2977 }
2978
2979 /// Invokes the given callback for each line of text in the given range of the buffer.
2980 /// Uses callback to avoid allocating a string for each line.
2981 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2982 let mut line = String::new();
2983 let mut row = range.start.row;
2984 for chunk in self
2985 .as_rope()
2986 .chunks_in_range(range.to_offset(self))
2987 .chain(["\n"])
2988 {
2989 for (newline_ix, text) in chunk.split('\n').enumerate() {
2990 if newline_ix > 0 {
2991 callback(row, &line);
2992 row += 1;
2993 line.clear();
2994 }
2995 line.push_str(text);
2996 }
2997 }
2998 }
2999
3000 /// Iterates over every [`SyntaxLayer`] in the buffer.
3001 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3002 self.syntax
3003 .layers_for_range(0..self.len(), &self.text, true)
3004 }
3005
3006 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3007 let offset = position.to_offset(self);
3008 self.syntax
3009 .layers_for_range(offset..offset, &self.text, false)
3010 .filter(|l| l.node().end_byte() > offset)
3011 .last()
3012 }
3013
3014 /// Returns the main [`Language`].
3015 pub fn language(&self) -> Option<&Arc<Language>> {
3016 self.language.as_ref()
3017 }
3018
3019 /// Returns the [`Language`] at the given location.
3020 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3021 self.syntax_layer_at(position)
3022 .map(|info| info.language)
3023 .or(self.language.as_ref())
3024 }
3025
3026 /// Returns the settings for the language at the given location.
3027 pub fn settings_at<'a, D: ToOffset>(
3028 &'a self,
3029 position: D,
3030 cx: &'a AppContext,
3031 ) -> Cow<'a, LanguageSettings> {
3032 language_settings(
3033 self.language_at(position).map(|l| l.name()),
3034 self.file.as_ref(),
3035 cx,
3036 )
3037 }
3038
3039 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3040 CharClassifier::new(self.language_scope_at(point))
3041 }
3042
3043 /// Returns the [`LanguageScope`] at the given location.
3044 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3045 let offset = position.to_offset(self);
3046 let mut scope = None;
3047 let mut smallest_range: Option<Range<usize>> = None;
3048
3049 // Use the layer that has the smallest node intersecting the given point.
3050 for layer in self
3051 .syntax
3052 .layers_for_range(offset..offset, &self.text, false)
3053 {
3054 let mut cursor = layer.node().walk();
3055
3056 let mut range = None;
3057 loop {
3058 let child_range = cursor.node().byte_range();
3059 if !child_range.to_inclusive().contains(&offset) {
3060 break;
3061 }
3062
3063 range = Some(child_range);
3064 if cursor.goto_first_child_for_byte(offset).is_none() {
3065 break;
3066 }
3067 }
3068
3069 if let Some(range) = range {
3070 if smallest_range
3071 .as_ref()
3072 .map_or(true, |smallest_range| range.len() < smallest_range.len())
3073 {
3074 smallest_range = Some(range);
3075 scope = Some(LanguageScope {
3076 language: layer.language.clone(),
3077 override_id: layer.override_id(offset, &self.text),
3078 });
3079 }
3080 }
3081 }
3082
3083 scope.or_else(|| {
3084 self.language.clone().map(|language| LanguageScope {
3085 language,
3086 override_id: None,
3087 })
3088 })
3089 }
3090
3091 /// Returns a tuple of the range and character kind of the word
3092 /// surrounding the given position.
3093 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3094 let mut start = start.to_offset(self);
3095 let mut end = start;
3096 let mut next_chars = self.chars_at(start).peekable();
3097 let mut prev_chars = self.reversed_chars_at(start).peekable();
3098
3099 let classifier = self.char_classifier_at(start);
3100 let word_kind = cmp::max(
3101 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3102 next_chars.peek().copied().map(|c| classifier.kind(c)),
3103 );
3104
3105 for ch in prev_chars {
3106 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3107 start -= ch.len_utf8();
3108 } else {
3109 break;
3110 }
3111 }
3112
3113 for ch in next_chars {
3114 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3115 end += ch.len_utf8();
3116 } else {
3117 break;
3118 }
3119 }
3120
3121 (start..end, word_kind)
3122 }
3123
3124 /// Returns the closest syntax node enclosing the given range.
3125 pub fn syntax_ancestor<'a, T: ToOffset>(
3126 &'a self,
3127 range: Range<T>,
3128 ) -> Option<tree_sitter::Node<'a>> {
3129 let range = range.start.to_offset(self)..range.end.to_offset(self);
3130 let mut result: Option<tree_sitter::Node<'a>> = None;
3131 'outer: for layer in self
3132 .syntax
3133 .layers_for_range(range.clone(), &self.text, true)
3134 {
3135 let mut cursor = layer.node().walk();
3136
3137 // Descend to the first leaf that touches the start of the range,
3138 // and if the range is non-empty, extends beyond the start.
3139 while cursor.goto_first_child_for_byte(range.start).is_some() {
3140 if !range.is_empty() && cursor.node().end_byte() == range.start {
3141 cursor.goto_next_sibling();
3142 }
3143 }
3144
3145 // Ascend to the smallest ancestor that strictly contains the range.
3146 loop {
3147 let node_range = cursor.node().byte_range();
3148 if node_range.start <= range.start
3149 && node_range.end >= range.end
3150 && node_range.len() > range.len()
3151 {
3152 break;
3153 }
3154 if !cursor.goto_parent() {
3155 continue 'outer;
3156 }
3157 }
3158
3159 let left_node = cursor.node();
3160 let mut layer_result = left_node;
3161
3162 // For an empty range, try to find another node immediately to the right of the range.
3163 if left_node.end_byte() == range.start {
3164 let mut right_node = None;
3165 while !cursor.goto_next_sibling() {
3166 if !cursor.goto_parent() {
3167 break;
3168 }
3169 }
3170
3171 while cursor.node().start_byte() == range.start {
3172 right_node = Some(cursor.node());
3173 if !cursor.goto_first_child() {
3174 break;
3175 }
3176 }
3177
3178 // If there is a candidate node on both sides of the (empty) range, then
3179 // decide between the two by favoring a named node over an anonymous token.
3180 // If both nodes are the same in that regard, favor the right one.
3181 if let Some(right_node) = right_node {
3182 if right_node.is_named() || !left_node.is_named() {
3183 layer_result = right_node;
3184 }
3185 }
3186 }
3187
3188 if let Some(previous_result) = &result {
3189 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3190 continue;
3191 }
3192 }
3193 result = Some(layer_result);
3194 }
3195
3196 result
3197 }
3198
3199 /// Returns the outline for the buffer.
3200 ///
3201 /// This method allows passing an optional [`SyntaxTheme`] to
3202 /// syntax-highlight the returned symbols.
3203 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3204 self.outline_items_containing(0..self.len(), true, theme)
3205 .map(Outline::new)
3206 }
3207
3208 /// Returns all the symbols that contain the given position.
3209 ///
3210 /// This method allows passing an optional [`SyntaxTheme`] to
3211 /// syntax-highlight the returned symbols.
3212 pub fn symbols_containing<T: ToOffset>(
3213 &self,
3214 position: T,
3215 theme: Option<&SyntaxTheme>,
3216 ) -> Option<Vec<OutlineItem<Anchor>>> {
3217 let position = position.to_offset(self);
3218 let mut items = self.outline_items_containing(
3219 position.saturating_sub(1)..self.len().min(position + 1),
3220 false,
3221 theme,
3222 )?;
3223 let mut prev_depth = None;
3224 items.retain(|item| {
3225 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3226 prev_depth = Some(item.depth);
3227 result
3228 });
3229 Some(items)
3230 }
3231
3232 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3233 let range = range.to_offset(self);
3234 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3235 grammar.outline_config.as_ref().map(|c| &c.query)
3236 });
3237 let configs = matches
3238 .grammars()
3239 .iter()
3240 .map(|g| g.outline_config.as_ref().unwrap())
3241 .collect::<Vec<_>>();
3242
3243 while let Some(mat) = matches.peek() {
3244 let config = &configs[mat.grammar_index];
3245 let containing_item_node = maybe!({
3246 let item_node = mat.captures.iter().find_map(|cap| {
3247 if cap.index == config.item_capture_ix {
3248 Some(cap.node)
3249 } else {
3250 None
3251 }
3252 })?;
3253
3254 let item_byte_range = item_node.byte_range();
3255 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3256 None
3257 } else {
3258 Some(item_node)
3259 }
3260 });
3261
3262 if let Some(item_node) = containing_item_node {
3263 return Some(
3264 Point::from_ts_point(item_node.start_position())
3265 ..Point::from_ts_point(item_node.end_position()),
3266 );
3267 }
3268
3269 matches.advance();
3270 }
3271 None
3272 }
3273
3274 pub fn outline_items_containing<T: ToOffset>(
3275 &self,
3276 range: Range<T>,
3277 include_extra_context: bool,
3278 theme: Option<&SyntaxTheme>,
3279 ) -> Option<Vec<OutlineItem<Anchor>>> {
3280 let range = range.to_offset(self);
3281 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3282 grammar.outline_config.as_ref().map(|c| &c.query)
3283 });
3284 let configs = matches
3285 .grammars()
3286 .iter()
3287 .map(|g| g.outline_config.as_ref().unwrap())
3288 .collect::<Vec<_>>();
3289
3290 let mut items = Vec::new();
3291 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3292 while let Some(mat) = matches.peek() {
3293 let config = &configs[mat.grammar_index];
3294 if let Some(item) =
3295 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3296 {
3297 items.push(item);
3298 } else if let Some(capture) = mat
3299 .captures
3300 .iter()
3301 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3302 {
3303 let capture_range = capture.node.start_position()..capture.node.end_position();
3304 let mut capture_row_range =
3305 capture_range.start.row as u32..capture_range.end.row as u32;
3306 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3307 {
3308 capture_row_range.end -= 1;
3309 }
3310 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3311 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3312 last_row_range.end = capture_row_range.end;
3313 } else {
3314 annotation_row_ranges.push(capture_row_range);
3315 }
3316 } else {
3317 annotation_row_ranges.push(capture_row_range);
3318 }
3319 }
3320 matches.advance();
3321 }
3322
3323 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3324
3325 // Assign depths based on containment relationships and convert to anchors.
3326 let mut item_ends_stack = Vec::<Point>::new();
3327 let mut anchor_items = Vec::new();
3328 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3329 for item in items {
3330 while let Some(last_end) = item_ends_stack.last().copied() {
3331 if last_end < item.range.end {
3332 item_ends_stack.pop();
3333 } else {
3334 break;
3335 }
3336 }
3337
3338 let mut annotation_row_range = None;
3339 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3340 let row_preceding_item = item.range.start.row.saturating_sub(1);
3341 if next_annotation_row_range.end < row_preceding_item {
3342 annotation_row_ranges.next();
3343 } else {
3344 if next_annotation_row_range.end == row_preceding_item {
3345 annotation_row_range = Some(next_annotation_row_range.clone());
3346 annotation_row_ranges.next();
3347 }
3348 break;
3349 }
3350 }
3351
3352 anchor_items.push(OutlineItem {
3353 depth: item_ends_stack.len(),
3354 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3355 text: item.text,
3356 highlight_ranges: item.highlight_ranges,
3357 name_ranges: item.name_ranges,
3358 body_range: item.body_range.map(|body_range| {
3359 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3360 }),
3361 annotation_range: annotation_row_range.map(|annotation_range| {
3362 self.anchor_after(Point::new(annotation_range.start, 0))
3363 ..self.anchor_before(Point::new(
3364 annotation_range.end,
3365 self.line_len(annotation_range.end),
3366 ))
3367 }),
3368 });
3369 item_ends_stack.push(item.range.end);
3370 }
3371
3372 Some(anchor_items)
3373 }
3374
3375 fn next_outline_item(
3376 &self,
3377 config: &OutlineConfig,
3378 mat: &SyntaxMapMatch,
3379 range: &Range<usize>,
3380 include_extra_context: bool,
3381 theme: Option<&SyntaxTheme>,
3382 ) -> Option<OutlineItem<Point>> {
3383 let item_node = mat.captures.iter().find_map(|cap| {
3384 if cap.index == config.item_capture_ix {
3385 Some(cap.node)
3386 } else {
3387 None
3388 }
3389 })?;
3390
3391 let item_byte_range = item_node.byte_range();
3392 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3393 return None;
3394 }
3395 let item_point_range = Point::from_ts_point(item_node.start_position())
3396 ..Point::from_ts_point(item_node.end_position());
3397
3398 let mut open_point = None;
3399 let mut close_point = None;
3400 let mut buffer_ranges = Vec::new();
3401 for capture in mat.captures {
3402 let node_is_name;
3403 if capture.index == config.name_capture_ix {
3404 node_is_name = true;
3405 } else if Some(capture.index) == config.context_capture_ix
3406 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3407 {
3408 node_is_name = false;
3409 } else {
3410 if Some(capture.index) == config.open_capture_ix {
3411 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3412 } else if Some(capture.index) == config.close_capture_ix {
3413 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3414 }
3415
3416 continue;
3417 }
3418
3419 let mut range = capture.node.start_byte()..capture.node.end_byte();
3420 let start = capture.node.start_position();
3421 if capture.node.end_position().row > start.row {
3422 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3423 }
3424
3425 if !range.is_empty() {
3426 buffer_ranges.push((range, node_is_name));
3427 }
3428 }
3429 if buffer_ranges.is_empty() {
3430 return None;
3431 }
3432 let mut text = String::new();
3433 let mut highlight_ranges = Vec::new();
3434 let mut name_ranges = Vec::new();
3435 let mut chunks = self.chunks(
3436 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3437 true,
3438 );
3439 let mut last_buffer_range_end = 0;
3440 for (buffer_range, is_name) in buffer_ranges {
3441 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
3442 text.push(' ');
3443 }
3444 last_buffer_range_end = buffer_range.end;
3445 if is_name {
3446 let mut start = text.len();
3447 let end = start + buffer_range.len();
3448
3449 // When multiple names are captured, then the matchable text
3450 // includes the whitespace in between the names.
3451 if !name_ranges.is_empty() {
3452 start -= 1;
3453 }
3454
3455 name_ranges.push(start..end);
3456 }
3457
3458 let mut offset = buffer_range.start;
3459 chunks.seek(buffer_range.clone());
3460 for mut chunk in chunks.by_ref() {
3461 if chunk.text.len() > buffer_range.end - offset {
3462 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3463 offset = buffer_range.end;
3464 } else {
3465 offset += chunk.text.len();
3466 }
3467 let style = chunk
3468 .syntax_highlight_id
3469 .zip(theme)
3470 .and_then(|(highlight, theme)| highlight.style(theme));
3471 if let Some(style) = style {
3472 let start = text.len();
3473 let end = start + chunk.text.len();
3474 highlight_ranges.push((start..end, style));
3475 }
3476 text.push_str(chunk.text);
3477 if offset >= buffer_range.end {
3478 break;
3479 }
3480 }
3481 }
3482
3483 Some(OutlineItem {
3484 depth: 0, // We'll calculate the depth later
3485 range: item_point_range,
3486 text,
3487 highlight_ranges,
3488 name_ranges,
3489 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3490 annotation_range: None,
3491 })
3492 }
3493
3494 pub fn function_body_fold_ranges<T: ToOffset>(
3495 &self,
3496 within: Range<T>,
3497 ) -> impl Iterator<Item = Range<usize>> + '_ {
3498 self.text_object_ranges(within, TreeSitterOptions::default())
3499 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3500 }
3501
3502 /// For each grammar in the language, runs the provided
3503 /// [`tree_sitter::Query`] against the given range.
3504 pub fn matches(
3505 &self,
3506 range: Range<usize>,
3507 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3508 ) -> SyntaxMapMatches {
3509 self.syntax.matches(range, self, query)
3510 }
3511
3512 /// Returns bracket range pairs overlapping or adjacent to `range`
3513 pub fn bracket_ranges<T: ToOffset>(
3514 &self,
3515 range: Range<T>,
3516 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3517 // Find bracket pairs that *inclusively* contain the given range.
3518 let range = range.start.to_offset(self).saturating_sub(1)
3519 ..self.len().min(range.end.to_offset(self) + 1);
3520
3521 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3522 grammar.brackets_config.as_ref().map(|c| &c.query)
3523 });
3524 let configs = matches
3525 .grammars()
3526 .iter()
3527 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3528 .collect::<Vec<_>>();
3529
3530 iter::from_fn(move || {
3531 while let Some(mat) = matches.peek() {
3532 let mut open = None;
3533 let mut close = None;
3534 let config = &configs[mat.grammar_index];
3535 for capture in mat.captures {
3536 if capture.index == config.open_capture_ix {
3537 open = Some(capture.node.byte_range());
3538 } else if capture.index == config.close_capture_ix {
3539 close = Some(capture.node.byte_range());
3540 }
3541 }
3542
3543 matches.advance();
3544
3545 let Some((open, close)) = open.zip(close) else {
3546 continue;
3547 };
3548
3549 let bracket_range = open.start..=close.end;
3550 if !bracket_range.overlaps(&range) {
3551 continue;
3552 }
3553
3554 return Some((open, close));
3555 }
3556 None
3557 })
3558 }
3559
3560 pub fn text_object_ranges<T: ToOffset>(
3561 &self,
3562 range: Range<T>,
3563 options: TreeSitterOptions,
3564 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3565 let range = range.start.to_offset(self).saturating_sub(1)
3566 ..self.len().min(range.end.to_offset(self) + 1);
3567
3568 let mut matches =
3569 self.syntax
3570 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3571 grammar.text_object_config.as_ref().map(|c| &c.query)
3572 });
3573
3574 let configs = matches
3575 .grammars()
3576 .iter()
3577 .map(|grammar| grammar.text_object_config.as_ref())
3578 .collect::<Vec<_>>();
3579
3580 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3581
3582 iter::from_fn(move || loop {
3583 while let Some(capture) = captures.pop() {
3584 if capture.0.overlaps(&range) {
3585 return Some(capture);
3586 }
3587 }
3588
3589 let mat = matches.peek()?;
3590
3591 let Some(config) = configs[mat.grammar_index].as_ref() else {
3592 matches.advance();
3593 continue;
3594 };
3595
3596 for capture in mat.captures {
3597 let Some(ix) = config
3598 .text_objects_by_capture_ix
3599 .binary_search_by_key(&capture.index, |e| e.0)
3600 .ok()
3601 else {
3602 continue;
3603 };
3604 let text_object = config.text_objects_by_capture_ix[ix].1;
3605 let byte_range = capture.node.byte_range();
3606
3607 let mut found = false;
3608 for (range, existing) in captures.iter_mut() {
3609 if existing == &text_object {
3610 range.start = range.start.min(byte_range.start);
3611 range.end = range.end.max(byte_range.end);
3612 found = true;
3613 break;
3614 }
3615 }
3616
3617 if !found {
3618 captures.push((byte_range, text_object));
3619 }
3620 }
3621
3622 matches.advance();
3623 })
3624 }
3625
3626 /// Returns enclosing bracket ranges containing the given range
3627 pub fn enclosing_bracket_ranges<T: ToOffset>(
3628 &self,
3629 range: Range<T>,
3630 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3631 let range = range.start.to_offset(self)..range.end.to_offset(self);
3632
3633 self.bracket_ranges(range.clone())
3634 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
3635 }
3636
3637 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3638 ///
3639 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3640 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3641 &self,
3642 range: Range<T>,
3643 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3644 ) -> Option<(Range<usize>, Range<usize>)> {
3645 let range = range.start.to_offset(self)..range.end.to_offset(self);
3646
3647 // Get the ranges of the innermost pair of brackets.
3648 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3649
3650 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
3651 if let Some(range_filter) = range_filter {
3652 if !range_filter(open.clone(), close.clone()) {
3653 continue;
3654 }
3655 }
3656
3657 let len = close.end - open.start;
3658
3659 if let Some((existing_open, existing_close)) = &result {
3660 let existing_len = existing_close.end - existing_open.start;
3661 if len > existing_len {
3662 continue;
3663 }
3664 }
3665
3666 result = Some((open, close));
3667 }
3668
3669 result
3670 }
3671
3672 /// Returns anchor ranges for any matches of the redaction query.
3673 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3674 /// will be run on the relevant section of the buffer.
3675 pub fn redacted_ranges<T: ToOffset>(
3676 &self,
3677 range: Range<T>,
3678 ) -> impl Iterator<Item = Range<usize>> + '_ {
3679 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3680 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3681 grammar
3682 .redactions_config
3683 .as_ref()
3684 .map(|config| &config.query)
3685 });
3686
3687 let configs = syntax_matches
3688 .grammars()
3689 .iter()
3690 .map(|grammar| grammar.redactions_config.as_ref())
3691 .collect::<Vec<_>>();
3692
3693 iter::from_fn(move || {
3694 let redacted_range = syntax_matches
3695 .peek()
3696 .and_then(|mat| {
3697 configs[mat.grammar_index].and_then(|config| {
3698 mat.captures
3699 .iter()
3700 .find(|capture| capture.index == config.redaction_capture_ix)
3701 })
3702 })
3703 .map(|mat| mat.node.byte_range());
3704 syntax_matches.advance();
3705 redacted_range
3706 })
3707 }
3708
3709 pub fn injections_intersecting_range<T: ToOffset>(
3710 &self,
3711 range: Range<T>,
3712 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3713 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3714
3715 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3716 grammar
3717 .injection_config
3718 .as_ref()
3719 .map(|config| &config.query)
3720 });
3721
3722 let configs = syntax_matches
3723 .grammars()
3724 .iter()
3725 .map(|grammar| grammar.injection_config.as_ref())
3726 .collect::<Vec<_>>();
3727
3728 iter::from_fn(move || {
3729 let ranges = syntax_matches.peek().and_then(|mat| {
3730 let config = &configs[mat.grammar_index]?;
3731 let content_capture_range = mat.captures.iter().find_map(|capture| {
3732 if capture.index == config.content_capture_ix {
3733 Some(capture.node.byte_range())
3734 } else {
3735 None
3736 }
3737 })?;
3738 let language = self.language_at(content_capture_range.start)?;
3739 Some((content_capture_range, language))
3740 });
3741 syntax_matches.advance();
3742 ranges
3743 })
3744 }
3745
3746 pub fn runnable_ranges(
3747 &self,
3748 offset_range: Range<usize>,
3749 ) -> impl Iterator<Item = RunnableRange> + '_ {
3750 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3751 grammar.runnable_config.as_ref().map(|config| &config.query)
3752 });
3753
3754 let test_configs = syntax_matches
3755 .grammars()
3756 .iter()
3757 .map(|grammar| grammar.runnable_config.as_ref())
3758 .collect::<Vec<_>>();
3759
3760 iter::from_fn(move || loop {
3761 let mat = syntax_matches.peek()?;
3762
3763 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3764 let mut run_range = None;
3765 let full_range = mat.captures.iter().fold(
3766 Range {
3767 start: usize::MAX,
3768 end: 0,
3769 },
3770 |mut acc, next| {
3771 let byte_range = next.node.byte_range();
3772 if acc.start > byte_range.start {
3773 acc.start = byte_range.start;
3774 }
3775 if acc.end < byte_range.end {
3776 acc.end = byte_range.end;
3777 }
3778 acc
3779 },
3780 );
3781 if full_range.start > full_range.end {
3782 // We did not find a full spanning range of this match.
3783 return None;
3784 }
3785 let extra_captures: SmallVec<[_; 1]> =
3786 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3787 test_configs
3788 .extra_captures
3789 .get(capture.index as usize)
3790 .cloned()
3791 .and_then(|tag_name| match tag_name {
3792 RunnableCapture::Named(name) => {
3793 Some((capture.node.byte_range(), name))
3794 }
3795 RunnableCapture::Run => {
3796 let _ = run_range.insert(capture.node.byte_range());
3797 None
3798 }
3799 })
3800 }));
3801 let run_range = run_range?;
3802 let tags = test_configs
3803 .query
3804 .property_settings(mat.pattern_index)
3805 .iter()
3806 .filter_map(|property| {
3807 if *property.key == *"tag" {
3808 property
3809 .value
3810 .as_ref()
3811 .map(|value| RunnableTag(value.to_string().into()))
3812 } else {
3813 None
3814 }
3815 })
3816 .collect();
3817 let extra_captures = extra_captures
3818 .into_iter()
3819 .map(|(range, name)| {
3820 (
3821 name.to_string(),
3822 self.text_for_range(range.clone()).collect::<String>(),
3823 )
3824 })
3825 .collect();
3826 // All tags should have the same range.
3827 Some(RunnableRange {
3828 run_range,
3829 full_range,
3830 runnable: Runnable {
3831 tags,
3832 language: mat.language,
3833 buffer: self.remote_id(),
3834 },
3835 extra_captures,
3836 buffer_id: self.remote_id(),
3837 })
3838 });
3839
3840 syntax_matches.advance();
3841 if test_range.is_some() {
3842 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3843 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3844 return test_range;
3845 }
3846 })
3847 }
3848
3849 /// Returns selections for remote peers intersecting the given range.
3850 #[allow(clippy::type_complexity)]
3851 pub fn selections_in_range(
3852 &self,
3853 range: Range<Anchor>,
3854 include_local: bool,
3855 ) -> impl Iterator<
3856 Item = (
3857 ReplicaId,
3858 bool,
3859 CursorShape,
3860 impl Iterator<Item = &Selection<Anchor>> + '_,
3861 ),
3862 > + '_ {
3863 self.remote_selections
3864 .iter()
3865 .filter(move |(replica_id, set)| {
3866 (include_local || **replica_id != self.text.replica_id())
3867 && !set.selections.is_empty()
3868 })
3869 .map(move |(replica_id, set)| {
3870 let start_ix = match set.selections.binary_search_by(|probe| {
3871 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3872 }) {
3873 Ok(ix) | Err(ix) => ix,
3874 };
3875 let end_ix = match set.selections.binary_search_by(|probe| {
3876 probe.start.cmp(&range.end, self).then(Ordering::Less)
3877 }) {
3878 Ok(ix) | Err(ix) => ix,
3879 };
3880
3881 (
3882 *replica_id,
3883 set.line_mode,
3884 set.cursor_shape,
3885 set.selections[start_ix..end_ix].iter(),
3886 )
3887 })
3888 }
3889
3890 /// Returns if the buffer contains any diagnostics.
3891 pub fn has_diagnostics(&self) -> bool {
3892 !self.diagnostics.is_empty()
3893 }
3894
3895 /// Returns all the diagnostics intersecting the given range.
3896 pub fn diagnostics_in_range<'a, T, O>(
3897 &'a self,
3898 search_range: Range<T>,
3899 reversed: bool,
3900 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3901 where
3902 T: 'a + Clone + ToOffset,
3903 O: 'a + FromAnchor,
3904 {
3905 let mut iterators: Vec<_> = self
3906 .diagnostics
3907 .iter()
3908 .map(|(_, collection)| {
3909 collection
3910 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
3911 .peekable()
3912 })
3913 .collect();
3914
3915 std::iter::from_fn(move || {
3916 let (next_ix, _) = iterators
3917 .iter_mut()
3918 .enumerate()
3919 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3920 .min_by(|(_, a), (_, b)| {
3921 let cmp = a
3922 .range
3923 .start
3924 .cmp(&b.range.start, self)
3925 // when range is equal, sort by diagnostic severity
3926 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3927 // and stabilize order with group_id
3928 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3929 if reversed {
3930 cmp.reverse()
3931 } else {
3932 cmp
3933 }
3934 })?;
3935 iterators[next_ix]
3936 .next()
3937 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
3938 diagnostic,
3939 range: FromAnchor::from_anchor(&range.start, self)
3940 ..FromAnchor::from_anchor(&range.end, self),
3941 })
3942 })
3943 }
3944
3945 /// Returns all the diagnostic groups associated with the given
3946 /// language server ID. If no language server ID is provided,
3947 /// all diagnostics groups are returned.
3948 pub fn diagnostic_groups(
3949 &self,
3950 language_server_id: Option<LanguageServerId>,
3951 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3952 let mut groups = Vec::new();
3953
3954 if let Some(language_server_id) = language_server_id {
3955 if let Ok(ix) = self
3956 .diagnostics
3957 .binary_search_by_key(&language_server_id, |e| e.0)
3958 {
3959 self.diagnostics[ix]
3960 .1
3961 .groups(language_server_id, &mut groups, self);
3962 }
3963 } else {
3964 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3965 diagnostics.groups(*language_server_id, &mut groups, self);
3966 }
3967 }
3968
3969 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3970 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3971 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3972 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3973 });
3974
3975 groups
3976 }
3977
3978 /// Returns an iterator over the diagnostics for the given group.
3979 pub fn diagnostic_group<O>(
3980 &self,
3981 group_id: usize,
3982 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
3983 where
3984 O: FromAnchor + 'static,
3985 {
3986 self.diagnostics
3987 .iter()
3988 .flat_map(move |(_, set)| set.group(group_id, self))
3989 }
3990
3991 /// An integer version number that accounts for all updates besides
3992 /// the buffer's text itself (which is versioned via a version vector).
3993 pub fn non_text_state_update_count(&self) -> usize {
3994 self.non_text_state_update_count
3995 }
3996
3997 /// Returns a snapshot of underlying file.
3998 pub fn file(&self) -> Option<&Arc<dyn File>> {
3999 self.file.as_ref()
4000 }
4001
4002 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4003 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
4004 if let Some(file) = self.file() {
4005 if file.path().file_name().is_none() || include_root {
4006 Some(file.full_path(cx))
4007 } else {
4008 Some(file.path().to_path_buf())
4009 }
4010 } else {
4011 None
4012 }
4013 }
4014}
4015
4016fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4017 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4018}
4019
4020fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4021 let mut result = IndentSize::spaces(0);
4022 for c in text {
4023 let kind = match c {
4024 ' ' => IndentKind::Space,
4025 '\t' => IndentKind::Tab,
4026 _ => break,
4027 };
4028 if result.len == 0 {
4029 result.kind = kind;
4030 }
4031 result.len += 1;
4032 }
4033 result
4034}
4035
4036impl Clone for BufferSnapshot {
4037 fn clone(&self) -> Self {
4038 Self {
4039 text: self.text.clone(),
4040 syntax: self.syntax.clone(),
4041 file: self.file.clone(),
4042 remote_selections: self.remote_selections.clone(),
4043 diagnostics: self.diagnostics.clone(),
4044 language: self.language.clone(),
4045 non_text_state_update_count: self.non_text_state_update_count,
4046 }
4047 }
4048}
4049
4050impl Deref for BufferSnapshot {
4051 type Target = text::BufferSnapshot;
4052
4053 fn deref(&self) -> &Self::Target {
4054 &self.text
4055 }
4056}
4057
4058unsafe impl<'a> Send for BufferChunks<'a> {}
4059
4060impl<'a> BufferChunks<'a> {
4061 pub(crate) fn new(
4062 text: &'a Rope,
4063 range: Range<usize>,
4064 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4065 diagnostics: bool,
4066 buffer_snapshot: Option<&'a BufferSnapshot>,
4067 ) -> Self {
4068 let mut highlights = None;
4069 if let Some((captures, highlight_maps)) = syntax {
4070 highlights = Some(BufferChunkHighlights {
4071 captures,
4072 next_capture: None,
4073 stack: Default::default(),
4074 highlight_maps,
4075 })
4076 }
4077
4078 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4079 let chunks = text.chunks_in_range(range.clone());
4080
4081 let mut this = BufferChunks {
4082 range,
4083 buffer_snapshot,
4084 chunks,
4085 diagnostic_endpoints,
4086 error_depth: 0,
4087 warning_depth: 0,
4088 information_depth: 0,
4089 hint_depth: 0,
4090 unnecessary_depth: 0,
4091 highlights,
4092 };
4093 this.initialize_diagnostic_endpoints();
4094 this
4095 }
4096
4097 /// Seeks to the given byte offset in the buffer.
4098 pub fn seek(&mut self, range: Range<usize>) {
4099 let old_range = std::mem::replace(&mut self.range, range.clone());
4100 self.chunks.set_range(self.range.clone());
4101 if let Some(highlights) = self.highlights.as_mut() {
4102 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4103 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4104 highlights
4105 .stack
4106 .retain(|(end_offset, _)| *end_offset > range.start);
4107 if let Some(capture) = &highlights.next_capture {
4108 if range.start >= capture.node.start_byte() {
4109 let next_capture_end = capture.node.end_byte();
4110 if range.start < next_capture_end {
4111 highlights.stack.push((
4112 next_capture_end,
4113 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4114 ));
4115 }
4116 highlights.next_capture.take();
4117 }
4118 }
4119 } else if let Some(snapshot) = self.buffer_snapshot {
4120 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4121 *highlights = BufferChunkHighlights {
4122 captures,
4123 next_capture: None,
4124 stack: Default::default(),
4125 highlight_maps,
4126 };
4127 } else {
4128 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4129 // Seeking such BufferChunks is not supported.
4130 debug_assert!(false, "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot");
4131 }
4132
4133 highlights.captures.set_byte_range(self.range.clone());
4134 self.initialize_diagnostic_endpoints();
4135 }
4136 }
4137
4138 fn initialize_diagnostic_endpoints(&mut self) {
4139 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4140 if let Some(buffer) = self.buffer_snapshot {
4141 let mut diagnostic_endpoints = Vec::new();
4142 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4143 diagnostic_endpoints.push(DiagnosticEndpoint {
4144 offset: entry.range.start,
4145 is_start: true,
4146 severity: entry.diagnostic.severity,
4147 is_unnecessary: entry.diagnostic.is_unnecessary,
4148 });
4149 diagnostic_endpoints.push(DiagnosticEndpoint {
4150 offset: entry.range.end,
4151 is_start: false,
4152 severity: entry.diagnostic.severity,
4153 is_unnecessary: entry.diagnostic.is_unnecessary,
4154 });
4155 }
4156 diagnostic_endpoints
4157 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4158 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4159 self.hint_depth = 0;
4160 self.error_depth = 0;
4161 self.warning_depth = 0;
4162 self.information_depth = 0;
4163 }
4164 }
4165 }
4166
4167 /// The current byte offset in the buffer.
4168 pub fn offset(&self) -> usize {
4169 self.range.start
4170 }
4171
4172 pub fn range(&self) -> Range<usize> {
4173 self.range.clone()
4174 }
4175
4176 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4177 let depth = match endpoint.severity {
4178 DiagnosticSeverity::ERROR => &mut self.error_depth,
4179 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4180 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4181 DiagnosticSeverity::HINT => &mut self.hint_depth,
4182 _ => return,
4183 };
4184 if endpoint.is_start {
4185 *depth += 1;
4186 } else {
4187 *depth -= 1;
4188 }
4189
4190 if endpoint.is_unnecessary {
4191 if endpoint.is_start {
4192 self.unnecessary_depth += 1;
4193 } else {
4194 self.unnecessary_depth -= 1;
4195 }
4196 }
4197 }
4198
4199 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4200 if self.error_depth > 0 {
4201 Some(DiagnosticSeverity::ERROR)
4202 } else if self.warning_depth > 0 {
4203 Some(DiagnosticSeverity::WARNING)
4204 } else if self.information_depth > 0 {
4205 Some(DiagnosticSeverity::INFORMATION)
4206 } else if self.hint_depth > 0 {
4207 Some(DiagnosticSeverity::HINT)
4208 } else {
4209 None
4210 }
4211 }
4212
4213 fn current_code_is_unnecessary(&self) -> bool {
4214 self.unnecessary_depth > 0
4215 }
4216}
4217
4218impl<'a> Iterator for BufferChunks<'a> {
4219 type Item = Chunk<'a>;
4220
4221 fn next(&mut self) -> Option<Self::Item> {
4222 let mut next_capture_start = usize::MAX;
4223 let mut next_diagnostic_endpoint = usize::MAX;
4224
4225 if let Some(highlights) = self.highlights.as_mut() {
4226 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4227 if *parent_capture_end <= self.range.start {
4228 highlights.stack.pop();
4229 } else {
4230 break;
4231 }
4232 }
4233
4234 if highlights.next_capture.is_none() {
4235 highlights.next_capture = highlights.captures.next();
4236 }
4237
4238 while let Some(capture) = highlights.next_capture.as_ref() {
4239 if self.range.start < capture.node.start_byte() {
4240 next_capture_start = capture.node.start_byte();
4241 break;
4242 } else {
4243 let highlight_id =
4244 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4245 highlights
4246 .stack
4247 .push((capture.node.end_byte(), highlight_id));
4248 highlights.next_capture = highlights.captures.next();
4249 }
4250 }
4251 }
4252
4253 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4254 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4255 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4256 if endpoint.offset <= self.range.start {
4257 self.update_diagnostic_depths(endpoint);
4258 diagnostic_endpoints.next();
4259 } else {
4260 next_diagnostic_endpoint = endpoint.offset;
4261 break;
4262 }
4263 }
4264 }
4265 self.diagnostic_endpoints = diagnostic_endpoints;
4266
4267 if let Some(chunk) = self.chunks.peek() {
4268 let chunk_start = self.range.start;
4269 let mut chunk_end = (self.chunks.offset() + chunk.len())
4270 .min(next_capture_start)
4271 .min(next_diagnostic_endpoint);
4272 let mut highlight_id = None;
4273 if let Some(highlights) = self.highlights.as_ref() {
4274 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4275 chunk_end = chunk_end.min(*parent_capture_end);
4276 highlight_id = Some(*parent_highlight_id);
4277 }
4278 }
4279
4280 let slice =
4281 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4282 self.range.start = chunk_end;
4283 if self.range.start == self.chunks.offset() + chunk.len() {
4284 self.chunks.next().unwrap();
4285 }
4286
4287 Some(Chunk {
4288 text: slice,
4289 syntax_highlight_id: highlight_id,
4290 diagnostic_severity: self.current_diagnostic_severity(),
4291 is_unnecessary: self.current_code_is_unnecessary(),
4292 ..Default::default()
4293 })
4294 } else {
4295 None
4296 }
4297 }
4298}
4299
4300impl operation_queue::Operation for Operation {
4301 fn lamport_timestamp(&self) -> clock::Lamport {
4302 match self {
4303 Operation::Buffer(_) => {
4304 unreachable!("buffer operations should never be deferred at this layer")
4305 }
4306 Operation::UpdateDiagnostics {
4307 lamport_timestamp, ..
4308 }
4309 | Operation::UpdateSelections {
4310 lamport_timestamp, ..
4311 }
4312 | Operation::UpdateCompletionTriggers {
4313 lamport_timestamp, ..
4314 } => *lamport_timestamp,
4315 }
4316 }
4317}
4318
4319impl Default for Diagnostic {
4320 fn default() -> Self {
4321 Self {
4322 source: Default::default(),
4323 code: None,
4324 severity: DiagnosticSeverity::ERROR,
4325 message: Default::default(),
4326 group_id: 0,
4327 is_primary: false,
4328 is_disk_based: false,
4329 is_unnecessary: false,
4330 data: None,
4331 }
4332 }
4333}
4334
4335impl IndentSize {
4336 /// Returns an [`IndentSize`] representing the given spaces.
4337 pub fn spaces(len: u32) -> Self {
4338 Self {
4339 len,
4340 kind: IndentKind::Space,
4341 }
4342 }
4343
4344 /// Returns an [`IndentSize`] representing a tab.
4345 pub fn tab() -> Self {
4346 Self {
4347 len: 1,
4348 kind: IndentKind::Tab,
4349 }
4350 }
4351
4352 /// An iterator over the characters represented by this [`IndentSize`].
4353 pub fn chars(&self) -> impl Iterator<Item = char> {
4354 iter::repeat(self.char()).take(self.len as usize)
4355 }
4356
4357 /// The character representation of this [`IndentSize`].
4358 pub fn char(&self) -> char {
4359 match self.kind {
4360 IndentKind::Space => ' ',
4361 IndentKind::Tab => '\t',
4362 }
4363 }
4364
4365 /// Consumes the current [`IndentSize`] and returns a new one that has
4366 /// been shrunk or enlarged by the given size along the given direction.
4367 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4368 match direction {
4369 Ordering::Less => {
4370 if self.kind == size.kind && self.len >= size.len {
4371 self.len -= size.len;
4372 }
4373 }
4374 Ordering::Equal => {}
4375 Ordering::Greater => {
4376 if self.len == 0 {
4377 self = size;
4378 } else if self.kind == size.kind {
4379 self.len += size.len;
4380 }
4381 }
4382 }
4383 self
4384 }
4385
4386 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4387 match self.kind {
4388 IndentKind::Space => self.len as usize,
4389 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4390 }
4391 }
4392}
4393
4394#[cfg(any(test, feature = "test-support"))]
4395pub struct TestFile {
4396 pub path: Arc<Path>,
4397 pub root_name: String,
4398}
4399
4400#[cfg(any(test, feature = "test-support"))]
4401impl File for TestFile {
4402 fn path(&self) -> &Arc<Path> {
4403 &self.path
4404 }
4405
4406 fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
4407 PathBuf::from(&self.root_name).join(self.path.as_ref())
4408 }
4409
4410 fn as_local(&self) -> Option<&dyn LocalFile> {
4411 None
4412 }
4413
4414 fn disk_state(&self) -> DiskState {
4415 unimplemented!()
4416 }
4417
4418 fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
4419 self.path().file_name().unwrap_or(self.root_name.as_ref())
4420 }
4421
4422 fn worktree_id(&self, _: &AppContext) -> WorktreeId {
4423 WorktreeId::from_usize(0)
4424 }
4425
4426 fn as_any(&self) -> &dyn std::any::Any {
4427 unimplemented!()
4428 }
4429
4430 fn to_proto(&self, _: &AppContext) -> rpc::proto::File {
4431 unimplemented!()
4432 }
4433
4434 fn is_private(&self) -> bool {
4435 false
4436 }
4437}
4438
4439pub(crate) fn contiguous_ranges(
4440 values: impl Iterator<Item = u32>,
4441 max_len: usize,
4442) -> impl Iterator<Item = Range<u32>> {
4443 let mut values = values;
4444 let mut current_range: Option<Range<u32>> = None;
4445 std::iter::from_fn(move || loop {
4446 if let Some(value) = values.next() {
4447 if let Some(range) = &mut current_range {
4448 if value == range.end && range.len() < max_len {
4449 range.end += 1;
4450 continue;
4451 }
4452 }
4453
4454 let prev_range = current_range.clone();
4455 current_range = Some(value..(value + 1));
4456 if prev_range.is_some() {
4457 return prev_range;
4458 }
4459 } else {
4460 return current_range.take();
4461 }
4462 })
4463}
4464
4465#[derive(Default, Debug)]
4466pub struct CharClassifier {
4467 scope: Option<LanguageScope>,
4468 for_completion: bool,
4469 ignore_punctuation: bool,
4470}
4471
4472impl CharClassifier {
4473 pub fn new(scope: Option<LanguageScope>) -> Self {
4474 Self {
4475 scope,
4476 for_completion: false,
4477 ignore_punctuation: false,
4478 }
4479 }
4480
4481 pub fn for_completion(self, for_completion: bool) -> Self {
4482 Self {
4483 for_completion,
4484 ..self
4485 }
4486 }
4487
4488 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4489 Self {
4490 ignore_punctuation,
4491 ..self
4492 }
4493 }
4494
4495 pub fn is_whitespace(&self, c: char) -> bool {
4496 self.kind(c) == CharKind::Whitespace
4497 }
4498
4499 pub fn is_word(&self, c: char) -> bool {
4500 self.kind(c) == CharKind::Word
4501 }
4502
4503 pub fn is_punctuation(&self, c: char) -> bool {
4504 self.kind(c) == CharKind::Punctuation
4505 }
4506
4507 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4508 if c.is_whitespace() {
4509 return CharKind::Whitespace;
4510 } else if c.is_alphanumeric() || c == '_' {
4511 return CharKind::Word;
4512 }
4513
4514 if let Some(scope) = &self.scope {
4515 if let Some(characters) = scope.word_characters() {
4516 if characters.contains(&c) {
4517 if c == '-' && !self.for_completion && !ignore_punctuation {
4518 return CharKind::Punctuation;
4519 }
4520 return CharKind::Word;
4521 }
4522 }
4523 }
4524
4525 if ignore_punctuation {
4526 CharKind::Word
4527 } else {
4528 CharKind::Punctuation
4529 }
4530 }
4531
4532 pub fn kind(&self, c: char) -> CharKind {
4533 self.kind_with(c, self.ignore_punctuation)
4534 }
4535}
4536
4537/// Find all of the ranges of whitespace that occur at the ends of lines
4538/// in the given rope.
4539///
4540/// This could also be done with a regex search, but this implementation
4541/// avoids copying text.
4542pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4543 let mut ranges = Vec::new();
4544
4545 let mut offset = 0;
4546 let mut prev_chunk_trailing_whitespace_range = 0..0;
4547 for chunk in rope.chunks() {
4548 let mut prev_line_trailing_whitespace_range = 0..0;
4549 for (i, line) in chunk.split('\n').enumerate() {
4550 let line_end_offset = offset + line.len();
4551 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4552 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4553
4554 if i == 0 && trimmed_line_len == 0 {
4555 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4556 }
4557 if !prev_line_trailing_whitespace_range.is_empty() {
4558 ranges.push(prev_line_trailing_whitespace_range);
4559 }
4560
4561 offset = line_end_offset + 1;
4562 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4563 }
4564
4565 offset -= 1;
4566 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4567 }
4568
4569 if !prev_chunk_trailing_whitespace_range.is_empty() {
4570 ranges.push(prev_chunk_trailing_whitespace_range);
4571 }
4572
4573 ranges
4574}