1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, IndentGuideSettings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
14 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
18 TreeSitterOptions,
19};
20use anyhow::{anyhow, Context, Result};
21use async_watch as watch;
22use clock::Lamport;
23pub use clock::ReplicaId;
24use collections::HashMap;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 AnyElement, AppContext, Context as _, EventEmitter, HighlightStyle, Model, ModelContext,
29 Pixels, Task, TaskLabel, WindowContext,
30};
31use lsp::LanguageServerId;
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use similar::{ChangeTag, TextDiff};
38use smallvec::SmallVec;
39use smol::future::yield_now;
40use std::{
41 any::Any,
42 borrow::Cow,
43 cell::Cell,
44 cmp::{self, Ordering, Reverse},
45 collections::{BTreeMap, BTreeSet},
46 ffi::OsStr,
47 fmt,
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, DerefMut, Range},
53 path::{Path, PathBuf},
54 str,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
65 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
66 Transaction, TransactionId, Unclipped,
67};
68use theme::SyntaxTheme;
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{debug_panic, RangeExt};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 pending_autoindent: Option<Task<()>>,
113 sync_parse_timeout: Duration,
114 syntax_map: Mutex<SyntaxMap>,
115 parsing_in_background: bool,
116 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
117 non_text_state_update_count: usize,
118 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
119 remote_selections: TreeMap<ReplicaId, SelectionSet>,
120 diagnostics_timestamp: clock::Lamport,
121 completion_triggers: BTreeSet<String>,
122 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
123 completion_triggers_timestamp: clock::Lamport,
124 deferred_ops: OperationQueue<Operation>,
125 capability: Capability,
126 has_conflict: bool,
127 /// Memoize calls to has_changes_since(saved_version).
128 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
129 has_unsaved_edits: Cell<(clock::Global, bool)>,
130 _subscriptions: Vec<gpui::Subscription>,
131}
132
133#[derive(Copy, Clone, Debug, PartialEq, Eq)]
134pub enum ParseStatus {
135 Idle,
136 Parsing,
137}
138
139struct BufferBranchState {
140 base_buffer: Model<Buffer>,
141 merged_operations: Vec<Lamport>,
142}
143
144/// An immutable, cheaply cloneable representation of a fixed
145/// state of a buffer.
146pub struct BufferSnapshot {
147 text: text::BufferSnapshot,
148 pub(crate) syntax: SyntaxSnapshot,
149 file: Option<Arc<dyn File>>,
150 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
151 remote_selections: TreeMap<ReplicaId, SelectionSet>,
152 language: Option<Arc<Language>>,
153 non_text_state_update_count: usize,
154}
155
156/// The kind and amount of indentation in a particular line. For now,
157/// assumes that indentation is all the same character.
158#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
159pub struct IndentSize {
160 /// The number of bytes that comprise the indentation.
161 pub len: u32,
162 /// The kind of whitespace used for indentation.
163 pub kind: IndentKind,
164}
165
166/// A whitespace character that's used for indentation.
167#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
168pub enum IndentKind {
169 /// An ASCII space character.
170 #[default]
171 Space,
172 /// An ASCII tab character.
173 Tab,
174}
175
176/// The shape of a selection cursor.
177#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
178#[serde(rename_all = "snake_case")]
179pub enum CursorShape {
180 /// A vertical bar
181 #[default]
182 Bar,
183 /// A block that surrounds the following character
184 Block,
185 /// An underline that runs along the following character
186 Underline,
187 /// A box drawn around the following character
188 Hollow,
189}
190
191#[derive(Clone, Debug)]
192struct SelectionSet {
193 line_mode: bool,
194 cursor_shape: CursorShape,
195 selections: Arc<[Selection<Anchor>]>,
196 lamport_timestamp: clock::Lamport,
197}
198
199/// A diagnostic associated with a certain range of a buffer.
200#[derive(Clone, Debug, PartialEq, Eq)]
201pub struct Diagnostic {
202 /// The name of the service that produced this diagnostic.
203 pub source: Option<String>,
204 /// A machine-readable code that identifies this diagnostic.
205 pub code: Option<String>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// An id that identifies the group to which this diagnostic belongs.
211 ///
212 /// When a language server produces a diagnostic with
213 /// one or more associated diagnostics, those diagnostics are all
214 /// assigned a single group ID.
215 pub group_id: usize,
216 /// Whether this diagnostic is the primary diagnostic for its group.
217 ///
218 /// In a given group, the primary diagnostic is the top-level diagnostic
219 /// returned by the language server. The non-primary diagnostics are the
220 /// associated diagnostics.
221 pub is_primary: bool,
222 /// Whether this diagnostic is considered to originate from an analysis of
223 /// files on disk, as opposed to any unsaved buffer contents. This is a
224 /// property of a given diagnostic source, and is configured for a given
225 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
226 /// for the language server.
227 pub is_disk_based: bool,
228 /// Whether this diagnostic marks unnecessary code.
229 pub is_unnecessary: bool,
230 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
231 pub data: Option<Value>,
232}
233
234/// TODO - move this into the `project` crate and make it private.
235pub async fn prepare_completion_documentation(
236 documentation: &lsp::Documentation,
237 language_registry: &Arc<LanguageRegistry>,
238 language: Option<Arc<Language>>,
239) -> Documentation {
240 match documentation {
241 lsp::Documentation::String(text) => {
242 if text.lines().count() <= 1 {
243 Documentation::SingleLine(text.clone())
244 } else {
245 Documentation::MultiLinePlainText(text.clone())
246 }
247 }
248
249 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
250 lsp::MarkupKind::PlainText => {
251 if value.lines().count() <= 1 {
252 Documentation::SingleLine(value.clone())
253 } else {
254 Documentation::MultiLinePlainText(value.clone())
255 }
256 }
257
258 lsp::MarkupKind::Markdown => {
259 let parsed = parse_markdown(value, language_registry, language).await;
260 Documentation::MultiLineMarkdown(parsed)
261 }
262 },
263 }
264}
265
266/// Documentation associated with a [`Completion`].
267#[derive(Clone, Debug)]
268pub enum Documentation {
269 /// There is no documentation for this completion.
270 Undocumented,
271 /// A single line of documentation.
272 SingleLine(String),
273 /// Multiple lines of plain text documentation.
274 MultiLinePlainText(String),
275 /// Markdown documentation.
276 MultiLineMarkdown(ParsedMarkdown),
277}
278
279/// An operation used to synchronize this buffer with its other replicas.
280#[derive(Clone, Debug, PartialEq)]
281pub enum Operation {
282 /// A text operation.
283 Buffer(text::Operation),
284
285 /// An update to the buffer's diagnostics.
286 UpdateDiagnostics {
287 /// The id of the language server that produced the new diagnostics.
288 server_id: LanguageServerId,
289 /// The diagnostics.
290 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
291 /// The buffer's lamport timestamp.
292 lamport_timestamp: clock::Lamport,
293 },
294
295 /// An update to the most recent selections in this buffer.
296 UpdateSelections {
297 /// The selections.
298 selections: Arc<[Selection<Anchor>]>,
299 /// The buffer's lamport timestamp.
300 lamport_timestamp: clock::Lamport,
301 /// Whether the selections are in 'line mode'.
302 line_mode: bool,
303 /// The [`CursorShape`] associated with these selections.
304 cursor_shape: CursorShape,
305 },
306
307 /// An update to the characters that should trigger autocompletion
308 /// for this buffer.
309 UpdateCompletionTriggers {
310 /// The characters that trigger autocompletion.
311 triggers: Vec<String>,
312 /// The buffer's lamport timestamp.
313 lamport_timestamp: clock::Lamport,
314 /// The language server ID.
315 server_id: LanguageServerId,
316 },
317}
318
319/// An event that occurs in a buffer.
320#[derive(Clone, Debug, PartialEq)]
321pub enum BufferEvent {
322 /// The buffer was changed in a way that must be
323 /// propagated to its other replicas.
324 Operation {
325 operation: Operation,
326 is_local: bool,
327 },
328 /// The buffer was edited.
329 Edited,
330 /// The buffer's `dirty` bit changed.
331 DirtyChanged,
332 /// The buffer was saved.
333 Saved,
334 /// The buffer's file was changed on disk.
335 FileHandleChanged,
336 /// The buffer was reloaded.
337 Reloaded,
338 /// The buffer is in need of a reload
339 ReloadNeeded,
340 /// The buffer's language was changed.
341 LanguageChanged,
342 /// The buffer's syntax trees were updated.
343 Reparsed,
344 /// The buffer's diagnostics were updated.
345 DiagnosticsUpdated,
346 /// The buffer gained or lost editing capabilities.
347 CapabilityChanged,
348 /// The buffer was explicitly requested to close.
349 Closed,
350 /// The buffer was discarded when closing.
351 Discarded,
352}
353
354/// The file associated with a buffer.
355pub trait File: Send + Sync {
356 /// Returns the [`LocalFile`] associated with this file, if the
357 /// file is local.
358 fn as_local(&self) -> Option<&dyn LocalFile>;
359
360 /// Returns whether this file is local.
361 fn is_local(&self) -> bool {
362 self.as_local().is_some()
363 }
364
365 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
366 /// only available in some states, such as modification time.
367 fn disk_state(&self) -> DiskState;
368
369 /// Returns the path of this file relative to the worktree's root directory.
370 fn path(&self) -> &Arc<Path>;
371
372 /// Returns the path of this file relative to the worktree's parent directory (this means it
373 /// includes the name of the worktree's root folder).
374 fn full_path(&self, cx: &AppContext) -> PathBuf;
375
376 /// Returns the last component of this handle's absolute path. If this handle refers to the root
377 /// of its worktree, then this method will return the name of the worktree itself.
378 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
379
380 /// Returns the id of the worktree to which this file belongs.
381 ///
382 /// This is needed for looking up project-specific settings.
383 fn worktree_id(&self, cx: &AppContext) -> WorktreeId;
384
385 /// Converts this file into an [`Any`] trait object.
386 fn as_any(&self) -> &dyn Any;
387
388 /// Converts this file into a protobuf message.
389 fn to_proto(&self, cx: &AppContext) -> rpc::proto::File;
390
391 /// Return whether Zed considers this to be a private file.
392 fn is_private(&self) -> bool;
393}
394
395/// The file's storage status - whether it's stored (`Present`), and if so when it was last
396/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
397/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
398/// indicator for new files.
399#[derive(Copy, Clone, Debug, PartialEq)]
400pub enum DiskState {
401 /// File created in Zed that has not been saved.
402 New,
403 /// File present on the filesystem.
404 Present { mtime: MTime },
405 /// Deleted file that was previously present.
406 Deleted,
407}
408
409impl DiskState {
410 /// Returns the file's last known modification time on disk.
411 pub fn mtime(self) -> Option<MTime> {
412 match self {
413 DiskState::New => None,
414 DiskState::Present { mtime } => Some(mtime),
415 DiskState::Deleted => None,
416 }
417 }
418}
419
420/// The file associated with a buffer, in the case where the file is on the local disk.
421pub trait LocalFile: File {
422 /// Returns the absolute path of this file
423 fn abs_path(&self, cx: &AppContext) -> PathBuf;
424
425 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
426 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
427
428 /// Loads the file's contents from disk.
429 fn load_bytes(&self, cx: &AppContext) -> Task<Result<Vec<u8>>>;
430
431 /// Returns true if the file should not be shared with collaborators.
432 fn is_private(&self, _: &AppContext) -> bool {
433 false
434 }
435}
436
437/// The auto-indent behavior associated with an editing operation.
438/// For some editing operations, each affected line of text has its
439/// indentation recomputed. For other operations, the entire block
440/// of edited text is adjusted uniformly.
441#[derive(Clone, Debug)]
442pub enum AutoindentMode {
443 /// Indent each line of inserted text.
444 EachLine,
445 /// Apply the same indentation adjustment to all of the lines
446 /// in a given insertion.
447 Block {
448 /// The original indentation level of the first line of each
449 /// insertion, if it has been copied.
450 original_indent_columns: Vec<u32>,
451 },
452}
453
454#[derive(Clone)]
455struct AutoindentRequest {
456 before_edit: BufferSnapshot,
457 entries: Vec<AutoindentRequestEntry>,
458 is_block_mode: bool,
459 ignore_empty_lines: bool,
460}
461
462#[derive(Debug, Clone)]
463struct AutoindentRequestEntry {
464 /// A range of the buffer whose indentation should be adjusted.
465 range: Range<Anchor>,
466 /// Whether or not these lines should be considered brand new, for the
467 /// purpose of auto-indent. When text is not new, its indentation will
468 /// only be adjusted if the suggested indentation level has *changed*
469 /// since the edit was made.
470 first_line_is_new: bool,
471 indent_size: IndentSize,
472 original_indent_column: Option<u32>,
473}
474
475#[derive(Debug)]
476struct IndentSuggestion {
477 basis_row: u32,
478 delta: Ordering,
479 within_error: bool,
480}
481
482struct BufferChunkHighlights<'a> {
483 captures: SyntaxMapCaptures<'a>,
484 next_capture: Option<SyntaxMapCapture<'a>>,
485 stack: Vec<(usize, HighlightId)>,
486 highlight_maps: Vec<HighlightMap>,
487}
488
489/// An iterator that yields chunks of a buffer's text, along with their
490/// syntax highlights and diagnostic status.
491pub struct BufferChunks<'a> {
492 buffer_snapshot: Option<&'a BufferSnapshot>,
493 range: Range<usize>,
494 chunks: text::Chunks<'a>,
495 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
496 error_depth: usize,
497 warning_depth: usize,
498 information_depth: usize,
499 hint_depth: usize,
500 unnecessary_depth: usize,
501 highlights: Option<BufferChunkHighlights<'a>>,
502}
503
504/// A chunk of a buffer's text, along with its syntax highlight and
505/// diagnostic status.
506#[derive(Clone, Debug, Default)]
507pub struct Chunk<'a> {
508 /// The text of the chunk.
509 pub text: &'a str,
510 /// The syntax highlighting style of the chunk.
511 pub syntax_highlight_id: Option<HighlightId>,
512 /// The highlight style that has been applied to this chunk in
513 /// the editor.
514 pub highlight_style: Option<HighlightStyle>,
515 /// The severity of diagnostic associated with this chunk, if any.
516 pub diagnostic_severity: Option<DiagnosticSeverity>,
517 /// Whether this chunk of text is marked as unnecessary.
518 pub is_unnecessary: bool,
519 /// Whether this chunk of text was originally a tab character.
520 pub is_tab: bool,
521 /// An optional recipe for how the chunk should be presented.
522 pub renderer: Option<ChunkRenderer>,
523}
524
525/// A recipe for how the chunk should be presented.
526#[derive(Clone)]
527pub struct ChunkRenderer {
528 /// creates a custom element to represent this chunk.
529 pub render: Arc<dyn Send + Sync + Fn(&mut ChunkRendererContext) -> AnyElement>,
530 /// If true, the element is constrained to the shaped width of the text.
531 pub constrain_width: bool,
532}
533
534pub struct ChunkRendererContext<'a, 'b> {
535 pub context: &'a mut WindowContext<'b>,
536 pub max_width: Pixels,
537}
538
539impl fmt::Debug for ChunkRenderer {
540 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
541 f.debug_struct("ChunkRenderer")
542 .field("constrain_width", &self.constrain_width)
543 .finish()
544 }
545}
546
547impl<'a, 'b> Deref for ChunkRendererContext<'a, 'b> {
548 type Target = WindowContext<'b>;
549
550 fn deref(&self) -> &Self::Target {
551 self.context
552 }
553}
554
555impl<'a, 'b> DerefMut for ChunkRendererContext<'a, 'b> {
556 fn deref_mut(&mut self) -> &mut Self::Target {
557 self.context
558 }
559}
560
561/// A set of edits to a given version of a buffer, computed asynchronously.
562#[derive(Debug)]
563pub struct Diff {
564 pub(crate) base_version: clock::Global,
565 line_ending: LineEnding,
566 pub edits: Vec<(Range<usize>, Arc<str>)>,
567}
568
569#[derive(Clone, Copy)]
570pub(crate) struct DiagnosticEndpoint {
571 offset: usize,
572 is_start: bool,
573 severity: DiagnosticSeverity,
574 is_unnecessary: bool,
575}
576
577/// A class of characters, used for characterizing a run of text.
578#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
579pub enum CharKind {
580 /// Whitespace.
581 Whitespace,
582 /// Punctuation.
583 Punctuation,
584 /// Word.
585 Word,
586}
587
588/// A runnable is a set of data about a region that could be resolved into a task
589pub struct Runnable {
590 pub tags: SmallVec<[RunnableTag; 1]>,
591 pub language: Arc<Language>,
592 pub buffer: BufferId,
593}
594
595#[derive(Clone, Debug, PartialEq)]
596pub struct IndentGuide {
597 pub buffer_id: BufferId,
598 pub start_row: BufferRow,
599 pub end_row: BufferRow,
600 pub depth: u32,
601 pub tab_size: u32,
602 pub settings: IndentGuideSettings,
603}
604
605impl IndentGuide {
606 pub fn indent_level(&self) -> u32 {
607 self.depth * self.tab_size
608 }
609}
610
611impl Buffer {
612 /// Create a new buffer with the given base text.
613 pub fn local<T: Into<String>>(base_text: T, cx: &ModelContext<Self>) -> Self {
614 Self::build(
615 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
616 None,
617 Capability::ReadWrite,
618 )
619 }
620
621 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
622 pub fn local_normalized(
623 base_text_normalized: Rope,
624 line_ending: LineEnding,
625 cx: &ModelContext<Self>,
626 ) -> Self {
627 Self::build(
628 TextBuffer::new_normalized(
629 0,
630 cx.entity_id().as_non_zero_u64().into(),
631 line_ending,
632 base_text_normalized,
633 ),
634 None,
635 Capability::ReadWrite,
636 )
637 }
638
639 /// Create a new buffer that is a replica of a remote buffer.
640 pub fn remote(
641 remote_id: BufferId,
642 replica_id: ReplicaId,
643 capability: Capability,
644 base_text: impl Into<String>,
645 ) -> Self {
646 Self::build(
647 TextBuffer::new(replica_id, remote_id, base_text.into()),
648 None,
649 capability,
650 )
651 }
652
653 /// Create a new buffer that is a replica of a remote buffer, populating its
654 /// state from the given protobuf message.
655 pub fn from_proto(
656 replica_id: ReplicaId,
657 capability: Capability,
658 message: proto::BufferState,
659 file: Option<Arc<dyn File>>,
660 ) -> Result<Self> {
661 let buffer_id = BufferId::new(message.id)
662 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
663 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
664 let mut this = Self::build(buffer, file, capability);
665 this.text.set_line_ending(proto::deserialize_line_ending(
666 rpc::proto::LineEnding::from_i32(message.line_ending)
667 .ok_or_else(|| anyhow!("missing line_ending"))?,
668 ));
669 this.saved_version = proto::deserialize_version(&message.saved_version);
670 this.saved_mtime = message.saved_mtime.map(|time| time.into());
671 Ok(this)
672 }
673
674 /// Serialize the buffer's state to a protobuf message.
675 pub fn to_proto(&self, cx: &AppContext) -> proto::BufferState {
676 proto::BufferState {
677 id: self.remote_id().into(),
678 file: self.file.as_ref().map(|f| f.to_proto(cx)),
679 base_text: self.base_text().to_string(),
680 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
681 saved_version: proto::serialize_version(&self.saved_version),
682 saved_mtime: self.saved_mtime.map(|time| time.into()),
683 }
684 }
685
686 /// Serialize as protobufs all of the changes to the buffer since the given version.
687 pub fn serialize_ops(
688 &self,
689 since: Option<clock::Global>,
690 cx: &AppContext,
691 ) -> Task<Vec<proto::Operation>> {
692 let mut operations = Vec::new();
693 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
694
695 operations.extend(self.remote_selections.iter().map(|(_, set)| {
696 proto::serialize_operation(&Operation::UpdateSelections {
697 selections: set.selections.clone(),
698 lamport_timestamp: set.lamport_timestamp,
699 line_mode: set.line_mode,
700 cursor_shape: set.cursor_shape,
701 })
702 }));
703
704 for (server_id, diagnostics) in &self.diagnostics {
705 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
706 lamport_timestamp: self.diagnostics_timestamp,
707 server_id: *server_id,
708 diagnostics: diagnostics.iter().cloned().collect(),
709 }));
710 }
711
712 for (server_id, completions) in &self.completion_triggers_per_language_server {
713 operations.push(proto::serialize_operation(
714 &Operation::UpdateCompletionTriggers {
715 triggers: completions.iter().cloned().collect(),
716 lamport_timestamp: self.completion_triggers_timestamp,
717 server_id: *server_id,
718 },
719 ));
720 }
721
722 let text_operations = self.text.operations().clone();
723 cx.background_executor().spawn(async move {
724 let since = since.unwrap_or_default();
725 operations.extend(
726 text_operations
727 .iter()
728 .filter(|(_, op)| !since.observed(op.timestamp()))
729 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
730 );
731 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
732 operations
733 })
734 }
735
736 /// Assign a language to the buffer, returning the buffer.
737 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
738 self.set_language(Some(language), cx);
739 self
740 }
741
742 /// Returns the [`Capability`] of this buffer.
743 pub fn capability(&self) -> Capability {
744 self.capability
745 }
746
747 /// Whether this buffer can only be read.
748 pub fn read_only(&self) -> bool {
749 self.capability == Capability::ReadOnly
750 }
751
752 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
753 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
754 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
755 let snapshot = buffer.snapshot();
756 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
757 Self {
758 saved_mtime,
759 saved_version: buffer.version(),
760 preview_version: buffer.version(),
761 reload_task: None,
762 transaction_depth: 0,
763 was_dirty_before_starting_transaction: None,
764 has_unsaved_edits: Cell::new((buffer.version(), false)),
765 text: buffer,
766 branch_state: None,
767 file,
768 capability,
769 syntax_map,
770 parsing_in_background: false,
771 non_text_state_update_count: 0,
772 sync_parse_timeout: Duration::from_millis(1),
773 parse_status: async_watch::channel(ParseStatus::Idle),
774 autoindent_requests: Default::default(),
775 pending_autoindent: Default::default(),
776 language: None,
777 remote_selections: Default::default(),
778 diagnostics: Default::default(),
779 diagnostics_timestamp: Default::default(),
780 completion_triggers: Default::default(),
781 completion_triggers_per_language_server: Default::default(),
782 completion_triggers_timestamp: Default::default(),
783 deferred_ops: OperationQueue::new(),
784 has_conflict: false,
785 _subscriptions: Vec::new(),
786 }
787 }
788
789 /// Retrieve a snapshot of the buffer's current state. This is computationally
790 /// cheap, and allows reading from the buffer on a background thread.
791 pub fn snapshot(&self) -> BufferSnapshot {
792 let text = self.text.snapshot();
793 let mut syntax_map = self.syntax_map.lock();
794 syntax_map.interpolate(&text);
795 let syntax = syntax_map.snapshot();
796
797 BufferSnapshot {
798 text,
799 syntax,
800 file: self.file.clone(),
801 remote_selections: self.remote_selections.clone(),
802 diagnostics: self.diagnostics.clone(),
803 language: self.language.clone(),
804 non_text_state_update_count: self.non_text_state_update_count,
805 }
806 }
807
808 pub fn branch(&mut self, cx: &mut ModelContext<Self>) -> Model<Self> {
809 let this = cx.handle();
810 cx.new_model(|cx| {
811 let mut branch = Self {
812 branch_state: Some(BufferBranchState {
813 base_buffer: this.clone(),
814 merged_operations: Default::default(),
815 }),
816 language: self.language.clone(),
817 has_conflict: self.has_conflict,
818 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
819 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
820 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
821 };
822 if let Some(language_registry) = self.language_registry() {
823 branch.set_language_registry(language_registry);
824 }
825
826 // Reparse the branch buffer so that we get syntax highlighting immediately.
827 branch.reparse(cx);
828
829 branch
830 })
831 }
832
833 /// Applies all of the changes in this buffer that intersect any of the
834 /// given `ranges` to its base buffer.
835 ///
836 /// If `ranges` is empty, then all changes will be applied. This buffer must
837 /// be a branch buffer to call this method.
838 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut ModelContext<Self>) {
839 let Some(base_buffer) = self.base_buffer() else {
840 debug_panic!("not a branch buffer");
841 return;
842 };
843
844 let mut ranges = if ranges.is_empty() {
845 &[0..usize::MAX]
846 } else {
847 ranges.as_slice()
848 }
849 .into_iter()
850 .peekable();
851
852 let mut edits = Vec::new();
853 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
854 let mut is_included = false;
855 while let Some(range) = ranges.peek() {
856 if range.end < edit.new.start {
857 ranges.next().unwrap();
858 } else {
859 if range.start <= edit.new.end {
860 is_included = true;
861 }
862 break;
863 }
864 }
865
866 if is_included {
867 edits.push((
868 edit.old.clone(),
869 self.text_for_range(edit.new.clone()).collect::<String>(),
870 ));
871 }
872 }
873
874 let operation = base_buffer.update(cx, |base_buffer, cx| {
875 // cx.emit(BufferEvent::DiffBaseChanged);
876 base_buffer.edit(edits, None, cx)
877 });
878
879 if let Some(operation) = operation {
880 if let Some(BufferBranchState {
881 merged_operations, ..
882 }) = &mut self.branch_state
883 {
884 merged_operations.push(operation);
885 }
886 }
887 }
888
889 fn on_base_buffer_event(
890 &mut self,
891 _: Model<Buffer>,
892 event: &BufferEvent,
893 cx: &mut ModelContext<Self>,
894 ) {
895 let BufferEvent::Operation { operation, .. } = event else {
896 return;
897 };
898 let Some(BufferBranchState {
899 merged_operations, ..
900 }) = &mut self.branch_state
901 else {
902 return;
903 };
904
905 let mut operation_to_undo = None;
906 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
907 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
908 merged_operations.remove(ix);
909 operation_to_undo = Some(operation.timestamp);
910 }
911 }
912
913 self.apply_ops([operation.clone()], cx);
914
915 if let Some(timestamp) = operation_to_undo {
916 let counts = [(timestamp, u32::MAX)].into_iter().collect();
917 self.undo_operations(counts, cx);
918 }
919 }
920
921 #[cfg(test)]
922 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
923 &self.text
924 }
925
926 /// Retrieve a snapshot of the buffer's raw text, without any
927 /// language-related state like the syntax tree or diagnostics.
928 pub fn text_snapshot(&self) -> text::BufferSnapshot {
929 self.text.snapshot()
930 }
931
932 /// The file associated with the buffer, if any.
933 pub fn file(&self) -> Option<&Arc<dyn File>> {
934 self.file.as_ref()
935 }
936
937 /// The version of the buffer that was last saved or reloaded from disk.
938 pub fn saved_version(&self) -> &clock::Global {
939 &self.saved_version
940 }
941
942 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
943 pub fn saved_mtime(&self) -> Option<MTime> {
944 self.saved_mtime
945 }
946
947 /// Assign a language to the buffer.
948 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
949 self.non_text_state_update_count += 1;
950 self.syntax_map.lock().clear(&self.text);
951 self.language = language;
952 self.reparse(cx);
953 cx.emit(BufferEvent::LanguageChanged);
954 }
955
956 /// Assign a language registry to the buffer. This allows the buffer to retrieve
957 /// other languages if parts of the buffer are written in different languages.
958 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
959 self.syntax_map
960 .lock()
961 .set_language_registry(language_registry);
962 }
963
964 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
965 self.syntax_map.lock().language_registry()
966 }
967
968 /// Assign the buffer a new [`Capability`].
969 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
970 self.capability = capability;
971 cx.emit(BufferEvent::CapabilityChanged)
972 }
973
974 /// This method is called to signal that the buffer has been saved.
975 pub fn did_save(
976 &mut self,
977 version: clock::Global,
978 mtime: Option<MTime>,
979 cx: &mut ModelContext<Self>,
980 ) {
981 self.saved_version = version;
982 self.has_unsaved_edits
983 .set((self.saved_version().clone(), false));
984 self.has_conflict = false;
985 self.saved_mtime = mtime;
986 cx.emit(BufferEvent::Saved);
987 cx.notify();
988 }
989
990 /// This method is called to signal that the buffer has been discarded.
991 pub fn discarded(&self, cx: &mut ModelContext<Self>) {
992 cx.emit(BufferEvent::Discarded);
993 cx.notify();
994 }
995
996 /// Reloads the contents of the buffer from disk.
997 pub fn reload(&mut self, cx: &ModelContext<Self>) -> oneshot::Receiver<Option<Transaction>> {
998 let (tx, rx) = futures::channel::oneshot::channel();
999 let prev_version = self.text.version();
1000 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
1001 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
1002 let file = this.file.as_ref()?.as_local()?;
1003 Some((file.disk_state().mtime(), file.load(cx)))
1004 })?
1005 else {
1006 return Ok(());
1007 };
1008
1009 let new_text = new_text.await?;
1010 let diff = this
1011 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
1012 .await;
1013 this.update(&mut cx, |this, cx| {
1014 if this.version() == diff.base_version {
1015 this.finalize_last_transaction();
1016 this.apply_diff(diff, cx);
1017 tx.send(this.finalize_last_transaction().cloned()).ok();
1018 this.has_conflict = false;
1019 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1020 } else {
1021 if !diff.edits.is_empty()
1022 || this
1023 .edits_since::<usize>(&diff.base_version)
1024 .next()
1025 .is_some()
1026 {
1027 this.has_conflict = true;
1028 }
1029
1030 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1031 }
1032
1033 this.reload_task.take();
1034 })
1035 }));
1036 rx
1037 }
1038
1039 /// This method is called to signal that the buffer has been reloaded.
1040 pub fn did_reload(
1041 &mut self,
1042 version: clock::Global,
1043 line_ending: LineEnding,
1044 mtime: Option<MTime>,
1045 cx: &mut ModelContext<Self>,
1046 ) {
1047 self.saved_version = version;
1048 self.has_unsaved_edits
1049 .set((self.saved_version.clone(), false));
1050 self.text.set_line_ending(line_ending);
1051 self.saved_mtime = mtime;
1052 cx.emit(BufferEvent::Reloaded);
1053 cx.notify();
1054 }
1055
1056 /// Updates the [`File`] backing this buffer. This should be called when
1057 /// the file has changed or has been deleted.
1058 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
1059 let was_dirty = self.is_dirty();
1060 let mut file_changed = false;
1061
1062 if let Some(old_file) = self.file.as_ref() {
1063 if new_file.path() != old_file.path() {
1064 file_changed = true;
1065 }
1066
1067 let old_state = old_file.disk_state();
1068 let new_state = new_file.disk_state();
1069 if old_state != new_state {
1070 file_changed = true;
1071 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1072 cx.emit(BufferEvent::ReloadNeeded)
1073 }
1074 }
1075 } else {
1076 file_changed = true;
1077 };
1078
1079 self.file = Some(new_file);
1080 if file_changed {
1081 self.non_text_state_update_count += 1;
1082 if was_dirty != self.is_dirty() {
1083 cx.emit(BufferEvent::DirtyChanged);
1084 }
1085 cx.emit(BufferEvent::FileHandleChanged);
1086 cx.notify();
1087 }
1088 }
1089
1090 pub fn base_buffer(&self) -> Option<Model<Self>> {
1091 Some(self.branch_state.as_ref()?.base_buffer.clone())
1092 }
1093
1094 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1095 pub fn language(&self) -> Option<&Arc<Language>> {
1096 self.language.as_ref()
1097 }
1098
1099 /// Returns the [`Language`] at the given location.
1100 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1101 let offset = position.to_offset(self);
1102 self.syntax_map
1103 .lock()
1104 .layers_for_range(offset..offset, &self.text, false)
1105 .last()
1106 .map(|info| info.language.clone())
1107 .or_else(|| self.language.clone())
1108 }
1109
1110 /// An integer version number that accounts for all updates besides
1111 /// the buffer's text itself (which is versioned via a version vector).
1112 pub fn non_text_state_update_count(&self) -> usize {
1113 self.non_text_state_update_count
1114 }
1115
1116 /// Whether the buffer is being parsed in the background.
1117 #[cfg(any(test, feature = "test-support"))]
1118 pub fn is_parsing(&self) -> bool {
1119 self.parsing_in_background
1120 }
1121
1122 /// Indicates whether the buffer contains any regions that may be
1123 /// written in a language that hasn't been loaded yet.
1124 pub fn contains_unknown_injections(&self) -> bool {
1125 self.syntax_map.lock().contains_unknown_injections()
1126 }
1127
1128 #[cfg(test)]
1129 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1130 self.sync_parse_timeout = timeout;
1131 }
1132
1133 /// Called after an edit to synchronize the buffer's main parse tree with
1134 /// the buffer's new underlying state.
1135 ///
1136 /// Locks the syntax map and interpolates the edits since the last reparse
1137 /// into the foreground syntax tree.
1138 ///
1139 /// Then takes a stable snapshot of the syntax map before unlocking it.
1140 /// The snapshot with the interpolated edits is sent to a background thread,
1141 /// where we ask Tree-sitter to perform an incremental parse.
1142 ///
1143 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1144 /// waiting on the parse to complete. As soon as it completes, we proceed
1145 /// synchronously, unless a 1ms timeout elapses.
1146 ///
1147 /// If we time out waiting on the parse, we spawn a second task waiting
1148 /// until the parse does complete and return with the interpolated tree still
1149 /// in the foreground. When the background parse completes, call back into
1150 /// the main thread and assign the foreground parse state.
1151 ///
1152 /// If the buffer or grammar changed since the start of the background parse,
1153 /// initiate an additional reparse recursively. To avoid concurrent parses
1154 /// for the same buffer, we only initiate a new parse if we are not already
1155 /// parsing in the background.
1156 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1157 if self.parsing_in_background {
1158 return;
1159 }
1160 let language = if let Some(language) = self.language.clone() {
1161 language
1162 } else {
1163 return;
1164 };
1165
1166 let text = self.text_snapshot();
1167 let parsed_version = self.version();
1168
1169 let mut syntax_map = self.syntax_map.lock();
1170 syntax_map.interpolate(&text);
1171 let language_registry = syntax_map.language_registry();
1172 let mut syntax_snapshot = syntax_map.snapshot();
1173 drop(syntax_map);
1174
1175 let parse_task = cx.background_executor().spawn({
1176 let language = language.clone();
1177 let language_registry = language_registry.clone();
1178 async move {
1179 syntax_snapshot.reparse(&text, language_registry, language);
1180 syntax_snapshot
1181 }
1182 });
1183
1184 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1185 match cx
1186 .background_executor()
1187 .block_with_timeout(self.sync_parse_timeout, parse_task)
1188 {
1189 Ok(new_syntax_snapshot) => {
1190 self.did_finish_parsing(new_syntax_snapshot, cx);
1191 }
1192 Err(parse_task) => {
1193 self.parsing_in_background = true;
1194 cx.spawn(move |this, mut cx| async move {
1195 let new_syntax_map = parse_task.await;
1196 this.update(&mut cx, move |this, cx| {
1197 let grammar_changed =
1198 this.language.as_ref().map_or(true, |current_language| {
1199 !Arc::ptr_eq(&language, current_language)
1200 });
1201 let language_registry_changed = new_syntax_map
1202 .contains_unknown_injections()
1203 && language_registry.map_or(false, |registry| {
1204 registry.version() != new_syntax_map.language_registry_version()
1205 });
1206 let parse_again = language_registry_changed
1207 || grammar_changed
1208 || this.version.changed_since(&parsed_version);
1209 this.did_finish_parsing(new_syntax_map, cx);
1210 this.parsing_in_background = false;
1211 if parse_again {
1212 this.reparse(cx);
1213 }
1214 })
1215 .ok();
1216 })
1217 .detach();
1218 }
1219 }
1220 }
1221
1222 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
1223 self.non_text_state_update_count += 1;
1224 self.syntax_map.lock().did_parse(syntax_snapshot);
1225 self.request_autoindent(cx);
1226 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1227 cx.emit(BufferEvent::Reparsed);
1228 cx.notify();
1229 }
1230
1231 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1232 self.parse_status.1.clone()
1233 }
1234
1235 /// Assign to the buffer a set of diagnostics created by a given language server.
1236 pub fn update_diagnostics(
1237 &mut self,
1238 server_id: LanguageServerId,
1239 diagnostics: DiagnosticSet,
1240 cx: &mut ModelContext<Self>,
1241 ) {
1242 let lamport_timestamp = self.text.lamport_clock.tick();
1243 let op = Operation::UpdateDiagnostics {
1244 server_id,
1245 diagnostics: diagnostics.iter().cloned().collect(),
1246 lamport_timestamp,
1247 };
1248 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1249 self.send_operation(op, true, cx);
1250 }
1251
1252 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1253 if let Some(indent_sizes) = self.compute_autoindents() {
1254 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1255 match cx
1256 .background_executor()
1257 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1258 {
1259 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1260 Err(indent_sizes) => {
1261 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1262 let indent_sizes = indent_sizes.await;
1263 this.update(&mut cx, |this, cx| {
1264 this.apply_autoindents(indent_sizes, cx);
1265 })
1266 .ok();
1267 }));
1268 }
1269 }
1270 } else {
1271 self.autoindent_requests.clear();
1272 }
1273 }
1274
1275 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1276 let max_rows_between_yields = 100;
1277 let snapshot = self.snapshot();
1278 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1279 return None;
1280 }
1281
1282 let autoindent_requests = self.autoindent_requests.clone();
1283 Some(async move {
1284 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1285 for request in autoindent_requests {
1286 // Resolve each edited range to its row in the current buffer and in the
1287 // buffer before this batch of edits.
1288 let mut row_ranges = Vec::new();
1289 let mut old_to_new_rows = BTreeMap::new();
1290 let mut language_indent_sizes_by_new_row = Vec::new();
1291 for entry in &request.entries {
1292 let position = entry.range.start;
1293 let new_row = position.to_point(&snapshot).row;
1294 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1295 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1296
1297 if !entry.first_line_is_new {
1298 let old_row = position.to_point(&request.before_edit).row;
1299 old_to_new_rows.insert(old_row, new_row);
1300 }
1301 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1302 }
1303
1304 // Build a map containing the suggested indentation for each of the edited lines
1305 // with respect to the state of the buffer before these edits. This map is keyed
1306 // by the rows for these lines in the current state of the buffer.
1307 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1308 let old_edited_ranges =
1309 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1310 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1311 let mut language_indent_size = IndentSize::default();
1312 for old_edited_range in old_edited_ranges {
1313 let suggestions = request
1314 .before_edit
1315 .suggest_autoindents(old_edited_range.clone())
1316 .into_iter()
1317 .flatten();
1318 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1319 if let Some(suggestion) = suggestion {
1320 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1321
1322 // Find the indent size based on the language for this row.
1323 while let Some((row, size)) = language_indent_sizes.peek() {
1324 if *row > new_row {
1325 break;
1326 }
1327 language_indent_size = *size;
1328 language_indent_sizes.next();
1329 }
1330
1331 let suggested_indent = old_to_new_rows
1332 .get(&suggestion.basis_row)
1333 .and_then(|from_row| {
1334 Some(old_suggestions.get(from_row).copied()?.0)
1335 })
1336 .unwrap_or_else(|| {
1337 request
1338 .before_edit
1339 .indent_size_for_line(suggestion.basis_row)
1340 })
1341 .with_delta(suggestion.delta, language_indent_size);
1342 old_suggestions
1343 .insert(new_row, (suggested_indent, suggestion.within_error));
1344 }
1345 }
1346 yield_now().await;
1347 }
1348
1349 // Compute new suggestions for each line, but only include them in the result
1350 // if they differ from the old suggestion for that line.
1351 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1352 let mut language_indent_size = IndentSize::default();
1353 for (row_range, original_indent_column) in row_ranges {
1354 let new_edited_row_range = if request.is_block_mode {
1355 row_range.start..row_range.start + 1
1356 } else {
1357 row_range.clone()
1358 };
1359
1360 let suggestions = snapshot
1361 .suggest_autoindents(new_edited_row_range.clone())
1362 .into_iter()
1363 .flatten();
1364 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1365 if let Some(suggestion) = suggestion {
1366 // Find the indent size based on the language for this row.
1367 while let Some((row, size)) = language_indent_sizes.peek() {
1368 if *row > new_row {
1369 break;
1370 }
1371 language_indent_size = *size;
1372 language_indent_sizes.next();
1373 }
1374
1375 let suggested_indent = indent_sizes
1376 .get(&suggestion.basis_row)
1377 .copied()
1378 .map(|e| e.0)
1379 .unwrap_or_else(|| {
1380 snapshot.indent_size_for_line(suggestion.basis_row)
1381 })
1382 .with_delta(suggestion.delta, language_indent_size);
1383
1384 if old_suggestions.get(&new_row).map_or(
1385 true,
1386 |(old_indentation, was_within_error)| {
1387 suggested_indent != *old_indentation
1388 && (!suggestion.within_error || *was_within_error)
1389 },
1390 ) {
1391 indent_sizes.insert(
1392 new_row,
1393 (suggested_indent, request.ignore_empty_lines),
1394 );
1395 }
1396 }
1397 }
1398
1399 if let (true, Some(original_indent_column)) =
1400 (request.is_block_mode, original_indent_column)
1401 {
1402 let new_indent =
1403 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1404 *indent
1405 } else {
1406 snapshot.indent_size_for_line(row_range.start)
1407 };
1408 let delta = new_indent.len as i64 - original_indent_column as i64;
1409 if delta != 0 {
1410 for row in row_range.skip(1) {
1411 indent_sizes.entry(row).or_insert_with(|| {
1412 let mut size = snapshot.indent_size_for_line(row);
1413 if size.kind == new_indent.kind {
1414 match delta.cmp(&0) {
1415 Ordering::Greater => size.len += delta as u32,
1416 Ordering::Less => {
1417 size.len = size.len.saturating_sub(-delta as u32)
1418 }
1419 Ordering::Equal => {}
1420 }
1421 }
1422 (size, request.ignore_empty_lines)
1423 });
1424 }
1425 }
1426 }
1427
1428 yield_now().await;
1429 }
1430 }
1431
1432 indent_sizes
1433 .into_iter()
1434 .filter_map(|(row, (indent, ignore_empty_lines))| {
1435 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1436 None
1437 } else {
1438 Some((row, indent))
1439 }
1440 })
1441 .collect()
1442 })
1443 }
1444
1445 fn apply_autoindents(
1446 &mut self,
1447 indent_sizes: BTreeMap<u32, IndentSize>,
1448 cx: &mut ModelContext<Self>,
1449 ) {
1450 self.autoindent_requests.clear();
1451
1452 let edits: Vec<_> = indent_sizes
1453 .into_iter()
1454 .filter_map(|(row, indent_size)| {
1455 let current_size = indent_size_for_line(self, row);
1456 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1457 })
1458 .collect();
1459
1460 let preserve_preview = self.preserve_preview();
1461 self.edit(edits, None, cx);
1462 if preserve_preview {
1463 self.refresh_preview();
1464 }
1465 }
1466
1467 /// Create a minimal edit that will cause the given row to be indented
1468 /// with the given size. After applying this edit, the length of the line
1469 /// will always be at least `new_size.len`.
1470 pub fn edit_for_indent_size_adjustment(
1471 row: u32,
1472 current_size: IndentSize,
1473 new_size: IndentSize,
1474 ) -> Option<(Range<Point>, String)> {
1475 if new_size.kind == current_size.kind {
1476 match new_size.len.cmp(¤t_size.len) {
1477 Ordering::Greater => {
1478 let point = Point::new(row, 0);
1479 Some((
1480 point..point,
1481 iter::repeat(new_size.char())
1482 .take((new_size.len - current_size.len) as usize)
1483 .collect::<String>(),
1484 ))
1485 }
1486
1487 Ordering::Less => Some((
1488 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1489 String::new(),
1490 )),
1491
1492 Ordering::Equal => None,
1493 }
1494 } else {
1495 Some((
1496 Point::new(row, 0)..Point::new(row, current_size.len),
1497 iter::repeat(new_size.char())
1498 .take(new_size.len as usize)
1499 .collect::<String>(),
1500 ))
1501 }
1502 }
1503
1504 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1505 /// and the given new text.
1506 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1507 let old_text = self.as_rope().clone();
1508 let base_version = self.version();
1509 cx.background_executor()
1510 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1511 let old_text = old_text.to_string();
1512 let line_ending = LineEnding::detect(&new_text);
1513 LineEnding::normalize(&mut new_text);
1514
1515 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1516 let empty: Arc<str> = Arc::default();
1517
1518 let mut edits = Vec::new();
1519 let mut old_offset = 0;
1520 let mut new_offset = 0;
1521 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1522 for change in diff.iter_all_changes().map(Some).chain([None]) {
1523 if let Some(change) = &change {
1524 let len = change.value().len();
1525 match change.tag() {
1526 ChangeTag::Equal => {
1527 old_offset += len;
1528 new_offset += len;
1529 }
1530 ChangeTag::Delete => {
1531 let old_end_offset = old_offset + len;
1532 if let Some((last_old_range, _)) = &mut last_edit {
1533 last_old_range.end = old_end_offset;
1534 } else {
1535 last_edit =
1536 Some((old_offset..old_end_offset, new_offset..new_offset));
1537 }
1538 old_offset = old_end_offset;
1539 }
1540 ChangeTag::Insert => {
1541 let new_end_offset = new_offset + len;
1542 if let Some((_, last_new_range)) = &mut last_edit {
1543 last_new_range.end = new_end_offset;
1544 } else {
1545 last_edit =
1546 Some((old_offset..old_offset, new_offset..new_end_offset));
1547 }
1548 new_offset = new_end_offset;
1549 }
1550 }
1551 }
1552
1553 if let Some((old_range, new_range)) = &last_edit {
1554 if old_offset > old_range.end
1555 || new_offset > new_range.end
1556 || change.is_none()
1557 {
1558 let text = if new_range.is_empty() {
1559 empty.clone()
1560 } else {
1561 new_text[new_range.clone()].into()
1562 };
1563 edits.push((old_range.clone(), text));
1564 last_edit.take();
1565 }
1566 }
1567 }
1568
1569 Diff {
1570 base_version,
1571 line_ending,
1572 edits,
1573 }
1574 })
1575 }
1576
1577 /// Spawns a background task that searches the buffer for any whitespace
1578 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1579 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1580 let old_text = self.as_rope().clone();
1581 let line_ending = self.line_ending();
1582 let base_version = self.version();
1583 cx.background_executor().spawn(async move {
1584 let ranges = trailing_whitespace_ranges(&old_text);
1585 let empty = Arc::<str>::from("");
1586 Diff {
1587 base_version,
1588 line_ending,
1589 edits: ranges
1590 .into_iter()
1591 .map(|range| (range, empty.clone()))
1592 .collect(),
1593 }
1594 })
1595 }
1596
1597 /// Ensures that the buffer ends with a single newline character, and
1598 /// no other whitespace.
1599 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1600 let len = self.len();
1601 let mut offset = len;
1602 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1603 let non_whitespace_len = chunk
1604 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1605 .len();
1606 offset -= chunk.len();
1607 offset += non_whitespace_len;
1608 if non_whitespace_len != 0 {
1609 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1610 return;
1611 }
1612 break;
1613 }
1614 }
1615 self.edit([(offset..len, "\n")], None, cx);
1616 }
1617
1618 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1619 /// calculated, then adjust the diff to account for those changes, and discard any
1620 /// parts of the diff that conflict with those changes.
1621 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1622 // Check for any edits to the buffer that have occurred since this diff
1623 // was computed.
1624 let snapshot = self.snapshot();
1625 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1626 let mut delta = 0;
1627 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1628 while let Some(edit_since) = edits_since.peek() {
1629 // If the edit occurs after a diff hunk, then it does not
1630 // affect that hunk.
1631 if edit_since.old.start > range.end {
1632 break;
1633 }
1634 // If the edit precedes the diff hunk, then adjust the hunk
1635 // to reflect the edit.
1636 else if edit_since.old.end < range.start {
1637 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1638 edits_since.next();
1639 }
1640 // If the edit intersects a diff hunk, then discard that hunk.
1641 else {
1642 return None;
1643 }
1644 }
1645
1646 let start = (range.start as i64 + delta) as usize;
1647 let end = (range.end as i64 + delta) as usize;
1648 Some((start..end, new_text))
1649 });
1650
1651 self.start_transaction();
1652 self.text.set_line_ending(diff.line_ending);
1653 self.edit(adjusted_edits, None, cx);
1654 self.end_transaction(cx)
1655 }
1656
1657 fn has_unsaved_edits(&self) -> bool {
1658 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1659
1660 if last_version == self.version {
1661 self.has_unsaved_edits
1662 .set((last_version, has_unsaved_edits));
1663 return has_unsaved_edits;
1664 }
1665
1666 let has_edits = self.has_edits_since(&self.saved_version);
1667 self.has_unsaved_edits
1668 .set((self.version.clone(), has_edits));
1669 has_edits
1670 }
1671
1672 /// Checks if the buffer has unsaved changes.
1673 pub fn is_dirty(&self) -> bool {
1674 self.capability != Capability::ReadOnly
1675 && (self.has_conflict
1676 || self.file.as_ref().map_or(false, |file| {
1677 matches!(file.disk_state(), DiskState::New | DiskState::Deleted)
1678 })
1679 || self.has_unsaved_edits())
1680 }
1681
1682 /// Checks if the buffer and its file have both changed since the buffer
1683 /// was last saved or reloaded.
1684 pub fn has_conflict(&self) -> bool {
1685 if self.has_conflict {
1686 return true;
1687 }
1688 let Some(file) = self.file.as_ref() else {
1689 return false;
1690 };
1691 match file.disk_state() {
1692 DiskState::New => false,
1693 DiskState::Present { mtime } => match self.saved_mtime {
1694 Some(saved_mtime) => {
1695 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1696 }
1697 None => true,
1698 },
1699 DiskState::Deleted => true,
1700 }
1701 }
1702
1703 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1704 pub fn subscribe(&mut self) -> Subscription {
1705 self.text.subscribe()
1706 }
1707
1708 /// Starts a transaction, if one is not already in-progress. When undoing or
1709 /// redoing edits, all of the edits performed within a transaction are undone
1710 /// or redone together.
1711 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1712 self.start_transaction_at(Instant::now())
1713 }
1714
1715 /// Starts a transaction, providing the current time. Subsequent transactions
1716 /// that occur within a short period of time will be grouped together. This
1717 /// is controlled by the buffer's undo grouping duration.
1718 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1719 self.transaction_depth += 1;
1720 if self.was_dirty_before_starting_transaction.is_none() {
1721 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1722 }
1723 self.text.start_transaction_at(now)
1724 }
1725
1726 /// Terminates the current transaction, if this is the outermost transaction.
1727 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1728 self.end_transaction_at(Instant::now(), cx)
1729 }
1730
1731 /// Terminates the current transaction, providing the current time. Subsequent transactions
1732 /// that occur within a short period of time will be grouped together. This
1733 /// is controlled by the buffer's undo grouping duration.
1734 pub fn end_transaction_at(
1735 &mut self,
1736 now: Instant,
1737 cx: &mut ModelContext<Self>,
1738 ) -> Option<TransactionId> {
1739 assert!(self.transaction_depth > 0);
1740 self.transaction_depth -= 1;
1741 let was_dirty = if self.transaction_depth == 0 {
1742 self.was_dirty_before_starting_transaction.take().unwrap()
1743 } else {
1744 false
1745 };
1746 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1747 self.did_edit(&start_version, was_dirty, cx);
1748 Some(transaction_id)
1749 } else {
1750 None
1751 }
1752 }
1753
1754 /// Manually add a transaction to the buffer's undo history.
1755 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1756 self.text.push_transaction(transaction, now);
1757 }
1758
1759 /// Prevent the last transaction from being grouped with any subsequent transactions,
1760 /// even if they occur with the buffer's undo grouping duration.
1761 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1762 self.text.finalize_last_transaction()
1763 }
1764
1765 /// Manually group all changes since a given transaction.
1766 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1767 self.text.group_until_transaction(transaction_id);
1768 }
1769
1770 /// Manually remove a transaction from the buffer's undo history
1771 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1772 self.text.forget_transaction(transaction_id);
1773 }
1774
1775 /// Manually merge two adjacent transactions in the buffer's undo history.
1776 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1777 self.text.merge_transactions(transaction, destination);
1778 }
1779
1780 /// Waits for the buffer to receive operations with the given timestamps.
1781 pub fn wait_for_edits(
1782 &mut self,
1783 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1784 ) -> impl Future<Output = Result<()>> {
1785 self.text.wait_for_edits(edit_ids)
1786 }
1787
1788 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1789 pub fn wait_for_anchors(
1790 &mut self,
1791 anchors: impl IntoIterator<Item = Anchor>,
1792 ) -> impl 'static + Future<Output = Result<()>> {
1793 self.text.wait_for_anchors(anchors)
1794 }
1795
1796 /// Waits for the buffer to receive operations up to the given version.
1797 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1798 self.text.wait_for_version(version)
1799 }
1800
1801 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1802 /// [`Buffer::wait_for_version`] to resolve with an error.
1803 pub fn give_up_waiting(&mut self) {
1804 self.text.give_up_waiting();
1805 }
1806
1807 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
1808 pub fn set_active_selections(
1809 &mut self,
1810 selections: Arc<[Selection<Anchor>]>,
1811 line_mode: bool,
1812 cursor_shape: CursorShape,
1813 cx: &mut ModelContext<Self>,
1814 ) {
1815 let lamport_timestamp = self.text.lamport_clock.tick();
1816 self.remote_selections.insert(
1817 self.text.replica_id(),
1818 SelectionSet {
1819 selections: selections.clone(),
1820 lamport_timestamp,
1821 line_mode,
1822 cursor_shape,
1823 },
1824 );
1825 self.send_operation(
1826 Operation::UpdateSelections {
1827 selections,
1828 line_mode,
1829 lamport_timestamp,
1830 cursor_shape,
1831 },
1832 true,
1833 cx,
1834 );
1835 self.non_text_state_update_count += 1;
1836 cx.notify();
1837 }
1838
1839 /// Clears the selections, so that other replicas of the buffer do not see any selections for
1840 /// this replica.
1841 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1842 if self
1843 .remote_selections
1844 .get(&self.text.replica_id())
1845 .map_or(true, |set| !set.selections.is_empty())
1846 {
1847 self.set_active_selections(Arc::default(), false, Default::default(), cx);
1848 }
1849 }
1850
1851 /// Replaces the buffer's entire text.
1852 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1853 where
1854 T: Into<Arc<str>>,
1855 {
1856 self.autoindent_requests.clear();
1857 self.edit([(0..self.len(), text)], None, cx)
1858 }
1859
1860 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
1861 /// delete, and a string of text to insert at that location.
1862 ///
1863 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
1864 /// request for the edited ranges, which will be processed when the buffer finishes
1865 /// parsing.
1866 ///
1867 /// Parsing takes place at the end of a transaction, and may compute synchronously
1868 /// or asynchronously, depending on the changes.
1869 pub fn edit<I, S, T>(
1870 &mut self,
1871 edits_iter: I,
1872 autoindent_mode: Option<AutoindentMode>,
1873 cx: &mut ModelContext<Self>,
1874 ) -> Option<clock::Lamport>
1875 where
1876 I: IntoIterator<Item = (Range<S>, T)>,
1877 S: ToOffset,
1878 T: Into<Arc<str>>,
1879 {
1880 // Skip invalid edits and coalesce contiguous ones.
1881 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1882 for (range, new_text) in edits_iter {
1883 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1884 if range.start > range.end {
1885 mem::swap(&mut range.start, &mut range.end);
1886 }
1887 let new_text = new_text.into();
1888 if !new_text.is_empty() || !range.is_empty() {
1889 if let Some((prev_range, prev_text)) = edits.last_mut() {
1890 if prev_range.end >= range.start {
1891 prev_range.end = cmp::max(prev_range.end, range.end);
1892 *prev_text = format!("{prev_text}{new_text}").into();
1893 } else {
1894 edits.push((range, new_text));
1895 }
1896 } else {
1897 edits.push((range, new_text));
1898 }
1899 }
1900 }
1901 if edits.is_empty() {
1902 return None;
1903 }
1904
1905 self.start_transaction();
1906 self.pending_autoindent.take();
1907 let autoindent_request = autoindent_mode
1908 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1909
1910 let edit_operation = self.text.edit(edits.iter().cloned());
1911 let edit_id = edit_operation.timestamp();
1912
1913 if let Some((before_edit, mode)) = autoindent_request {
1914 let mut delta = 0isize;
1915 let entries = edits
1916 .into_iter()
1917 .enumerate()
1918 .zip(&edit_operation.as_edit().unwrap().new_text)
1919 .map(|((ix, (range, _)), new_text)| {
1920 let new_text_length = new_text.len();
1921 let old_start = range.start.to_point(&before_edit);
1922 let new_start = (delta + range.start as isize) as usize;
1923 let range_len = range.end - range.start;
1924 delta += new_text_length as isize - range_len as isize;
1925
1926 // Decide what range of the insertion to auto-indent, and whether
1927 // the first line of the insertion should be considered a newly-inserted line
1928 // or an edit to an existing line.
1929 let mut range_of_insertion_to_indent = 0..new_text_length;
1930 let mut first_line_is_new = true;
1931
1932 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
1933 let old_line_end = before_edit.line_len(old_start.row);
1934
1935 if old_start.column > old_line_start {
1936 first_line_is_new = false;
1937 }
1938
1939 if !new_text.contains('\n')
1940 && (old_start.column + (range_len as u32) < old_line_end
1941 || old_line_end == old_line_start)
1942 {
1943 first_line_is_new = false;
1944 }
1945
1946 // When inserting text starting with a newline, avoid auto-indenting the
1947 // previous line.
1948 if new_text.starts_with('\n') {
1949 range_of_insertion_to_indent.start += 1;
1950 first_line_is_new = true;
1951 }
1952
1953 let mut original_indent_column = None;
1954 if let AutoindentMode::Block {
1955 original_indent_columns,
1956 } = &mode
1957 {
1958 original_indent_column =
1959 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1960 indent_size_for_text(
1961 new_text[range_of_insertion_to_indent.clone()].chars(),
1962 )
1963 .len
1964 }));
1965
1966 // Avoid auto-indenting the line after the edit.
1967 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1968 range_of_insertion_to_indent.end -= 1;
1969 }
1970 }
1971
1972 AutoindentRequestEntry {
1973 first_line_is_new,
1974 original_indent_column,
1975 indent_size: before_edit.language_indent_size_at(range.start, cx),
1976 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1977 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1978 }
1979 })
1980 .collect();
1981
1982 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1983 before_edit,
1984 entries,
1985 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1986 ignore_empty_lines: false,
1987 }));
1988 }
1989
1990 self.end_transaction(cx);
1991 self.send_operation(Operation::Buffer(edit_operation), true, cx);
1992 Some(edit_id)
1993 }
1994
1995 fn did_edit(
1996 &mut self,
1997 old_version: &clock::Global,
1998 was_dirty: bool,
1999 cx: &mut ModelContext<Self>,
2000 ) {
2001 if self.edits_since::<usize>(old_version).next().is_none() {
2002 return;
2003 }
2004
2005 self.reparse(cx);
2006
2007 cx.emit(BufferEvent::Edited);
2008 if was_dirty != self.is_dirty() {
2009 cx.emit(BufferEvent::DirtyChanged);
2010 }
2011 cx.notify();
2012 }
2013
2014 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut ModelContext<Self>)
2015 where
2016 I: IntoIterator<Item = Range<T>>,
2017 T: ToOffset + Copy,
2018 {
2019 let before_edit = self.snapshot();
2020 let entries = ranges
2021 .into_iter()
2022 .map(|range| AutoindentRequestEntry {
2023 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2024 first_line_is_new: true,
2025 indent_size: before_edit.language_indent_size_at(range.start, cx),
2026 original_indent_column: None,
2027 })
2028 .collect();
2029 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2030 before_edit,
2031 entries,
2032 is_block_mode: false,
2033 ignore_empty_lines: true,
2034 }));
2035 self.request_autoindent(cx);
2036 }
2037
2038 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2039 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2040 pub fn insert_empty_line(
2041 &mut self,
2042 position: impl ToPoint,
2043 space_above: bool,
2044 space_below: bool,
2045 cx: &mut ModelContext<Self>,
2046 ) -> Point {
2047 let mut position = position.to_point(self);
2048
2049 self.start_transaction();
2050
2051 self.edit(
2052 [(position..position, "\n")],
2053 Some(AutoindentMode::EachLine),
2054 cx,
2055 );
2056
2057 if position.column > 0 {
2058 position += Point::new(1, 0);
2059 }
2060
2061 if !self.is_line_blank(position.row) {
2062 self.edit(
2063 [(position..position, "\n")],
2064 Some(AutoindentMode::EachLine),
2065 cx,
2066 );
2067 }
2068
2069 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2070 self.edit(
2071 [(position..position, "\n")],
2072 Some(AutoindentMode::EachLine),
2073 cx,
2074 );
2075 position.row += 1;
2076 }
2077
2078 if space_below
2079 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2080 {
2081 self.edit(
2082 [(position..position, "\n")],
2083 Some(AutoindentMode::EachLine),
2084 cx,
2085 );
2086 }
2087
2088 self.end_transaction(cx);
2089
2090 position
2091 }
2092
2093 /// Applies the given remote operations to the buffer.
2094 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
2095 &mut self,
2096 ops: I,
2097 cx: &mut ModelContext<Self>,
2098 ) {
2099 self.pending_autoindent.take();
2100 let was_dirty = self.is_dirty();
2101 let old_version = self.version.clone();
2102 let mut deferred_ops = Vec::new();
2103 let buffer_ops = ops
2104 .into_iter()
2105 .filter_map(|op| match op {
2106 Operation::Buffer(op) => Some(op),
2107 _ => {
2108 if self.can_apply_op(&op) {
2109 self.apply_op(op, cx);
2110 } else {
2111 deferred_ops.push(op);
2112 }
2113 None
2114 }
2115 })
2116 .collect::<Vec<_>>();
2117 for operation in buffer_ops.iter() {
2118 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2119 }
2120 self.text.apply_ops(buffer_ops);
2121 self.deferred_ops.insert(deferred_ops);
2122 self.flush_deferred_ops(cx);
2123 self.did_edit(&old_version, was_dirty, cx);
2124 // Notify independently of whether the buffer was edited as the operations could include a
2125 // selection update.
2126 cx.notify();
2127 }
2128
2129 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
2130 let mut deferred_ops = Vec::new();
2131 for op in self.deferred_ops.drain().iter().cloned() {
2132 if self.can_apply_op(&op) {
2133 self.apply_op(op, cx);
2134 } else {
2135 deferred_ops.push(op);
2136 }
2137 }
2138 self.deferred_ops.insert(deferred_ops);
2139 }
2140
2141 pub fn has_deferred_ops(&self) -> bool {
2142 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2143 }
2144
2145 fn can_apply_op(&self, operation: &Operation) -> bool {
2146 match operation {
2147 Operation::Buffer(_) => {
2148 unreachable!("buffer operations should never be applied at this layer")
2149 }
2150 Operation::UpdateDiagnostics {
2151 diagnostics: diagnostic_set,
2152 ..
2153 } => diagnostic_set.iter().all(|diagnostic| {
2154 self.text.can_resolve(&diagnostic.range.start)
2155 && self.text.can_resolve(&diagnostic.range.end)
2156 }),
2157 Operation::UpdateSelections { selections, .. } => selections
2158 .iter()
2159 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2160 Operation::UpdateCompletionTriggers { .. } => true,
2161 }
2162 }
2163
2164 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
2165 match operation {
2166 Operation::Buffer(_) => {
2167 unreachable!("buffer operations should never be applied at this layer")
2168 }
2169 Operation::UpdateDiagnostics {
2170 server_id,
2171 diagnostics: diagnostic_set,
2172 lamport_timestamp,
2173 } => {
2174 let snapshot = self.snapshot();
2175 self.apply_diagnostic_update(
2176 server_id,
2177 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2178 lamport_timestamp,
2179 cx,
2180 );
2181 }
2182 Operation::UpdateSelections {
2183 selections,
2184 lamport_timestamp,
2185 line_mode,
2186 cursor_shape,
2187 } => {
2188 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2189 if set.lamport_timestamp > lamport_timestamp {
2190 return;
2191 }
2192 }
2193
2194 self.remote_selections.insert(
2195 lamport_timestamp.replica_id,
2196 SelectionSet {
2197 selections,
2198 lamport_timestamp,
2199 line_mode,
2200 cursor_shape,
2201 },
2202 );
2203 self.text.lamport_clock.observe(lamport_timestamp);
2204 self.non_text_state_update_count += 1;
2205 }
2206 Operation::UpdateCompletionTriggers {
2207 triggers,
2208 lamport_timestamp,
2209 server_id,
2210 } => {
2211 if triggers.is_empty() {
2212 self.completion_triggers_per_language_server
2213 .remove(&server_id);
2214 self.completion_triggers = self
2215 .completion_triggers_per_language_server
2216 .values()
2217 .flat_map(|triggers| triggers.into_iter().cloned())
2218 .collect();
2219 } else {
2220 self.completion_triggers_per_language_server
2221 .insert(server_id, triggers.iter().cloned().collect());
2222 self.completion_triggers.extend(triggers);
2223 }
2224 self.text.lamport_clock.observe(lamport_timestamp);
2225 }
2226 }
2227 }
2228
2229 fn apply_diagnostic_update(
2230 &mut self,
2231 server_id: LanguageServerId,
2232 diagnostics: DiagnosticSet,
2233 lamport_timestamp: clock::Lamport,
2234 cx: &mut ModelContext<Self>,
2235 ) {
2236 if lamport_timestamp > self.diagnostics_timestamp {
2237 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2238 if diagnostics.is_empty() {
2239 if let Ok(ix) = ix {
2240 self.diagnostics.remove(ix);
2241 }
2242 } else {
2243 match ix {
2244 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2245 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2246 };
2247 }
2248 self.diagnostics_timestamp = lamport_timestamp;
2249 self.non_text_state_update_count += 1;
2250 self.text.lamport_clock.observe(lamport_timestamp);
2251 cx.notify();
2252 cx.emit(BufferEvent::DiagnosticsUpdated);
2253 }
2254 }
2255
2256 fn send_operation(&self, operation: Operation, is_local: bool, cx: &mut ModelContext<Self>) {
2257 cx.emit(BufferEvent::Operation {
2258 operation,
2259 is_local,
2260 });
2261 }
2262
2263 /// Removes the selections for a given peer.
2264 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
2265 self.remote_selections.remove(&replica_id);
2266 cx.notify();
2267 }
2268
2269 /// Undoes the most recent transaction.
2270 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2271 let was_dirty = self.is_dirty();
2272 let old_version = self.version.clone();
2273
2274 if let Some((transaction_id, operation)) = self.text.undo() {
2275 self.send_operation(Operation::Buffer(operation), true, cx);
2276 self.did_edit(&old_version, was_dirty, cx);
2277 Some(transaction_id)
2278 } else {
2279 None
2280 }
2281 }
2282
2283 /// Manually undoes a specific transaction in the buffer's undo history.
2284 pub fn undo_transaction(
2285 &mut self,
2286 transaction_id: TransactionId,
2287 cx: &mut ModelContext<Self>,
2288 ) -> bool {
2289 let was_dirty = self.is_dirty();
2290 let old_version = self.version.clone();
2291 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2292 self.send_operation(Operation::Buffer(operation), true, cx);
2293 self.did_edit(&old_version, was_dirty, cx);
2294 true
2295 } else {
2296 false
2297 }
2298 }
2299
2300 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2301 pub fn undo_to_transaction(
2302 &mut self,
2303 transaction_id: TransactionId,
2304 cx: &mut ModelContext<Self>,
2305 ) -> bool {
2306 let was_dirty = self.is_dirty();
2307 let old_version = self.version.clone();
2308
2309 let operations = self.text.undo_to_transaction(transaction_id);
2310 let undone = !operations.is_empty();
2311 for operation in operations {
2312 self.send_operation(Operation::Buffer(operation), true, cx);
2313 }
2314 if undone {
2315 self.did_edit(&old_version, was_dirty, cx)
2316 }
2317 undone
2318 }
2319
2320 pub fn undo_operations(
2321 &mut self,
2322 counts: HashMap<Lamport, u32>,
2323 cx: &mut ModelContext<Buffer>,
2324 ) {
2325 let was_dirty = self.is_dirty();
2326 let operation = self.text.undo_operations(counts);
2327 let old_version = self.version.clone();
2328 self.send_operation(Operation::Buffer(operation), true, cx);
2329 self.did_edit(&old_version, was_dirty, cx);
2330 }
2331
2332 /// Manually redoes a specific transaction in the buffer's redo history.
2333 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
2334 let was_dirty = self.is_dirty();
2335 let old_version = self.version.clone();
2336
2337 if let Some((transaction_id, operation)) = self.text.redo() {
2338 self.send_operation(Operation::Buffer(operation), true, cx);
2339 self.did_edit(&old_version, was_dirty, cx);
2340 Some(transaction_id)
2341 } else {
2342 None
2343 }
2344 }
2345
2346 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2347 pub fn redo_to_transaction(
2348 &mut self,
2349 transaction_id: TransactionId,
2350 cx: &mut ModelContext<Self>,
2351 ) -> bool {
2352 let was_dirty = self.is_dirty();
2353 let old_version = self.version.clone();
2354
2355 let operations = self.text.redo_to_transaction(transaction_id);
2356 let redone = !operations.is_empty();
2357 for operation in operations {
2358 self.send_operation(Operation::Buffer(operation), true, cx);
2359 }
2360 if redone {
2361 self.did_edit(&old_version, was_dirty, cx)
2362 }
2363 redone
2364 }
2365
2366 /// Override current completion triggers with the user-provided completion triggers.
2367 pub fn set_completion_triggers(
2368 &mut self,
2369 server_id: LanguageServerId,
2370 triggers: BTreeSet<String>,
2371 cx: &mut ModelContext<Self>,
2372 ) {
2373 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2374 if triggers.is_empty() {
2375 self.completion_triggers_per_language_server
2376 .remove(&server_id);
2377 self.completion_triggers = self
2378 .completion_triggers_per_language_server
2379 .values()
2380 .flat_map(|triggers| triggers.into_iter().cloned())
2381 .collect();
2382 } else {
2383 self.completion_triggers_per_language_server
2384 .insert(server_id, triggers.clone());
2385 self.completion_triggers.extend(triggers.iter().cloned());
2386 }
2387 self.send_operation(
2388 Operation::UpdateCompletionTriggers {
2389 triggers: triggers.iter().cloned().collect(),
2390 lamport_timestamp: self.completion_triggers_timestamp,
2391 server_id,
2392 },
2393 true,
2394 cx,
2395 );
2396 cx.notify();
2397 }
2398
2399 /// Returns a list of strings which trigger a completion menu for this language.
2400 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2401 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2402 &self.completion_triggers
2403 }
2404
2405 /// Call this directly after performing edits to prevent the preview tab
2406 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2407 /// to return false until there are additional edits.
2408 pub fn refresh_preview(&mut self) {
2409 self.preview_version = self.version.clone();
2410 }
2411
2412 /// Whether we should preserve the preview status of a tab containing this buffer.
2413 pub fn preserve_preview(&self) -> bool {
2414 !self.has_edits_since(&self.preview_version)
2415 }
2416}
2417
2418#[doc(hidden)]
2419#[cfg(any(test, feature = "test-support"))]
2420impl Buffer {
2421 pub fn edit_via_marked_text(
2422 &mut self,
2423 marked_string: &str,
2424 autoindent_mode: Option<AutoindentMode>,
2425 cx: &mut ModelContext<Self>,
2426 ) {
2427 let edits = self.edits_for_marked_text(marked_string);
2428 self.edit(edits, autoindent_mode, cx);
2429 }
2430
2431 pub fn set_group_interval(&mut self, group_interval: Duration) {
2432 self.text.set_group_interval(group_interval);
2433 }
2434
2435 pub fn randomly_edit<T>(
2436 &mut self,
2437 rng: &mut T,
2438 old_range_count: usize,
2439 cx: &mut ModelContext<Self>,
2440 ) where
2441 T: rand::Rng,
2442 {
2443 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2444 let mut last_end = None;
2445 for _ in 0..old_range_count {
2446 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2447 break;
2448 }
2449
2450 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2451 let mut range = self.random_byte_range(new_start, rng);
2452 if rng.gen_bool(0.2) {
2453 mem::swap(&mut range.start, &mut range.end);
2454 }
2455 last_end = Some(range.end);
2456
2457 let new_text_len = rng.gen_range(0..10);
2458 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2459
2460 edits.push((range, new_text));
2461 }
2462 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2463 self.edit(edits, None, cx);
2464 }
2465
2466 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
2467 let was_dirty = self.is_dirty();
2468 let old_version = self.version.clone();
2469
2470 let ops = self.text.randomly_undo_redo(rng);
2471 if !ops.is_empty() {
2472 for op in ops {
2473 self.send_operation(Operation::Buffer(op), true, cx);
2474 self.did_edit(&old_version, was_dirty, cx);
2475 }
2476 }
2477 }
2478}
2479
2480impl EventEmitter<BufferEvent> for Buffer {}
2481
2482impl Deref for Buffer {
2483 type Target = TextBuffer;
2484
2485 fn deref(&self) -> &Self::Target {
2486 &self.text
2487 }
2488}
2489
2490impl BufferSnapshot {
2491 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2492 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2493 indent_size_for_line(self, row)
2494 }
2495 /// Returns [`IndentSize`] for a given position that respects user settings
2496 /// and language preferences.
2497 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
2498 let settings = language_settings(
2499 self.language_at(position).map(|l| l.name()),
2500 self.file(),
2501 cx,
2502 );
2503 if settings.hard_tabs {
2504 IndentSize::tab()
2505 } else {
2506 IndentSize::spaces(settings.tab_size.get())
2507 }
2508 }
2509
2510 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2511 /// is passed in as `single_indent_size`.
2512 pub fn suggested_indents(
2513 &self,
2514 rows: impl Iterator<Item = u32>,
2515 single_indent_size: IndentSize,
2516 ) -> BTreeMap<u32, IndentSize> {
2517 let mut result = BTreeMap::new();
2518
2519 for row_range in contiguous_ranges(rows, 10) {
2520 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2521 Some(suggestions) => suggestions,
2522 _ => break,
2523 };
2524
2525 for (row, suggestion) in row_range.zip(suggestions) {
2526 let indent_size = if let Some(suggestion) = suggestion {
2527 result
2528 .get(&suggestion.basis_row)
2529 .copied()
2530 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2531 .with_delta(suggestion.delta, single_indent_size)
2532 } else {
2533 self.indent_size_for_line(row)
2534 };
2535
2536 result.insert(row, indent_size);
2537 }
2538 }
2539
2540 result
2541 }
2542
2543 fn suggest_autoindents(
2544 &self,
2545 row_range: Range<u32>,
2546 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2547 let config = &self.language.as_ref()?.config;
2548 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2549
2550 // Find the suggested indentation ranges based on the syntax tree.
2551 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2552 let end = Point::new(row_range.end, 0);
2553 let range = (start..end).to_offset(&self.text);
2554 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2555 Some(&grammar.indents_config.as_ref()?.query)
2556 });
2557 let indent_configs = matches
2558 .grammars()
2559 .iter()
2560 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2561 .collect::<Vec<_>>();
2562
2563 let mut indent_ranges = Vec::<Range<Point>>::new();
2564 let mut outdent_positions = Vec::<Point>::new();
2565 while let Some(mat) = matches.peek() {
2566 let mut start: Option<Point> = None;
2567 let mut end: Option<Point> = None;
2568
2569 let config = &indent_configs[mat.grammar_index];
2570 for capture in mat.captures {
2571 if capture.index == config.indent_capture_ix {
2572 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2573 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2574 } else if Some(capture.index) == config.start_capture_ix {
2575 start = Some(Point::from_ts_point(capture.node.end_position()));
2576 } else if Some(capture.index) == config.end_capture_ix {
2577 end = Some(Point::from_ts_point(capture.node.start_position()));
2578 } else if Some(capture.index) == config.outdent_capture_ix {
2579 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2580 }
2581 }
2582
2583 matches.advance();
2584 if let Some((start, end)) = start.zip(end) {
2585 if start.row == end.row {
2586 continue;
2587 }
2588
2589 let range = start..end;
2590 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2591 Err(ix) => indent_ranges.insert(ix, range),
2592 Ok(ix) => {
2593 let prev_range = &mut indent_ranges[ix];
2594 prev_range.end = prev_range.end.max(range.end);
2595 }
2596 }
2597 }
2598 }
2599
2600 let mut error_ranges = Vec::<Range<Point>>::new();
2601 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2602 Some(&grammar.error_query)
2603 });
2604 while let Some(mat) = matches.peek() {
2605 let node = mat.captures[0].node;
2606 let start = Point::from_ts_point(node.start_position());
2607 let end = Point::from_ts_point(node.end_position());
2608 let range = start..end;
2609 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2610 Ok(ix) | Err(ix) => ix,
2611 };
2612 let mut end_ix = ix;
2613 while let Some(existing_range) = error_ranges.get(end_ix) {
2614 if existing_range.end < end {
2615 end_ix += 1;
2616 } else {
2617 break;
2618 }
2619 }
2620 error_ranges.splice(ix..end_ix, [range]);
2621 matches.advance();
2622 }
2623
2624 outdent_positions.sort();
2625 for outdent_position in outdent_positions {
2626 // find the innermost indent range containing this outdent_position
2627 // set its end to the outdent position
2628 if let Some(range_to_truncate) = indent_ranges
2629 .iter_mut()
2630 .filter(|indent_range| indent_range.contains(&outdent_position))
2631 .last()
2632 {
2633 range_to_truncate.end = outdent_position;
2634 }
2635 }
2636
2637 // Find the suggested indentation increases and decreased based on regexes.
2638 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2639 self.for_each_line(
2640 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2641 ..Point::new(row_range.end, 0),
2642 |row, line| {
2643 if config
2644 .decrease_indent_pattern
2645 .as_ref()
2646 .map_or(false, |regex| regex.is_match(line))
2647 {
2648 indent_change_rows.push((row, Ordering::Less));
2649 }
2650 if config
2651 .increase_indent_pattern
2652 .as_ref()
2653 .map_or(false, |regex| regex.is_match(line))
2654 {
2655 indent_change_rows.push((row + 1, Ordering::Greater));
2656 }
2657 },
2658 );
2659
2660 let mut indent_changes = indent_change_rows.into_iter().peekable();
2661 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2662 prev_non_blank_row.unwrap_or(0)
2663 } else {
2664 row_range.start.saturating_sub(1)
2665 };
2666 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2667 Some(row_range.map(move |row| {
2668 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2669
2670 let mut indent_from_prev_row = false;
2671 let mut outdent_from_prev_row = false;
2672 let mut outdent_to_row = u32::MAX;
2673
2674 while let Some((indent_row, delta)) = indent_changes.peek() {
2675 match indent_row.cmp(&row) {
2676 Ordering::Equal => match delta {
2677 Ordering::Less => outdent_from_prev_row = true,
2678 Ordering::Greater => indent_from_prev_row = true,
2679 _ => {}
2680 },
2681
2682 Ordering::Greater => break,
2683 Ordering::Less => {}
2684 }
2685
2686 indent_changes.next();
2687 }
2688
2689 for range in &indent_ranges {
2690 if range.start.row >= row {
2691 break;
2692 }
2693 if range.start.row == prev_row && range.end > row_start {
2694 indent_from_prev_row = true;
2695 }
2696 if range.end > prev_row_start && range.end <= row_start {
2697 outdent_to_row = outdent_to_row.min(range.start.row);
2698 }
2699 }
2700
2701 let within_error = error_ranges
2702 .iter()
2703 .any(|e| e.start.row < row && e.end > row_start);
2704
2705 let suggestion = if outdent_to_row == prev_row
2706 || (outdent_from_prev_row && indent_from_prev_row)
2707 {
2708 Some(IndentSuggestion {
2709 basis_row: prev_row,
2710 delta: Ordering::Equal,
2711 within_error,
2712 })
2713 } else if indent_from_prev_row {
2714 Some(IndentSuggestion {
2715 basis_row: prev_row,
2716 delta: Ordering::Greater,
2717 within_error,
2718 })
2719 } else if outdent_to_row < prev_row {
2720 Some(IndentSuggestion {
2721 basis_row: outdent_to_row,
2722 delta: Ordering::Equal,
2723 within_error,
2724 })
2725 } else if outdent_from_prev_row {
2726 Some(IndentSuggestion {
2727 basis_row: prev_row,
2728 delta: Ordering::Less,
2729 within_error,
2730 })
2731 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2732 {
2733 Some(IndentSuggestion {
2734 basis_row: prev_row,
2735 delta: Ordering::Equal,
2736 within_error,
2737 })
2738 } else {
2739 None
2740 };
2741
2742 prev_row = row;
2743 prev_row_start = row_start;
2744 suggestion
2745 }))
2746 }
2747
2748 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2749 while row > 0 {
2750 row -= 1;
2751 if !self.is_line_blank(row) {
2752 return Some(row);
2753 }
2754 }
2755 None
2756 }
2757
2758 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
2759 let captures = self.syntax.captures(range, &self.text, |grammar| {
2760 grammar.highlights_query.as_ref()
2761 });
2762 let highlight_maps = captures
2763 .grammars()
2764 .iter()
2765 .map(|grammar| grammar.highlight_map())
2766 .collect();
2767 (captures, highlight_maps)
2768 }
2769
2770 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2771 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2772 /// returned in chunks where each chunk has a single syntax highlighting style and
2773 /// diagnostic status.
2774 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2775 let range = range.start.to_offset(self)..range.end.to_offset(self);
2776
2777 let mut syntax = None;
2778 if language_aware {
2779 syntax = Some(self.get_highlights(range.clone()));
2780 }
2781 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
2782 let diagnostics = language_aware;
2783 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
2784 }
2785
2786 /// Invokes the given callback for each line of text in the given range of the buffer.
2787 /// Uses callback to avoid allocating a string for each line.
2788 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2789 let mut line = String::new();
2790 let mut row = range.start.row;
2791 for chunk in self
2792 .as_rope()
2793 .chunks_in_range(range.to_offset(self))
2794 .chain(["\n"])
2795 {
2796 for (newline_ix, text) in chunk.split('\n').enumerate() {
2797 if newline_ix > 0 {
2798 callback(row, &line);
2799 row += 1;
2800 line.clear();
2801 }
2802 line.push_str(text);
2803 }
2804 }
2805 }
2806
2807 /// Iterates over every [`SyntaxLayer`] in the buffer.
2808 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2809 self.syntax
2810 .layers_for_range(0..self.len(), &self.text, true)
2811 }
2812
2813 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2814 let offset = position.to_offset(self);
2815 self.syntax
2816 .layers_for_range(offset..offset, &self.text, false)
2817 .filter(|l| l.node().end_byte() > offset)
2818 .last()
2819 }
2820
2821 /// Returns the main [`Language`].
2822 pub fn language(&self) -> Option<&Arc<Language>> {
2823 self.language.as_ref()
2824 }
2825
2826 /// Returns the [`Language`] at the given location.
2827 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2828 self.syntax_layer_at(position)
2829 .map(|info| info.language)
2830 .or(self.language.as_ref())
2831 }
2832
2833 /// Returns the settings for the language at the given location.
2834 pub fn settings_at<'a, D: ToOffset>(
2835 &'a self,
2836 position: D,
2837 cx: &'a AppContext,
2838 ) -> Cow<'a, LanguageSettings> {
2839 language_settings(
2840 self.language_at(position).map(|l| l.name()),
2841 self.file.as_ref(),
2842 cx,
2843 )
2844 }
2845
2846 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
2847 CharClassifier::new(self.language_scope_at(point))
2848 }
2849
2850 /// Returns the [`LanguageScope`] at the given location.
2851 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2852 let offset = position.to_offset(self);
2853 let mut scope = None;
2854 let mut smallest_range: Option<Range<usize>> = None;
2855
2856 // Use the layer that has the smallest node intersecting the given point.
2857 for layer in self
2858 .syntax
2859 .layers_for_range(offset..offset, &self.text, false)
2860 {
2861 let mut cursor = layer.node().walk();
2862
2863 let mut range = None;
2864 loop {
2865 let child_range = cursor.node().byte_range();
2866 if !child_range.to_inclusive().contains(&offset) {
2867 break;
2868 }
2869
2870 range = Some(child_range);
2871 if cursor.goto_first_child_for_byte(offset).is_none() {
2872 break;
2873 }
2874 }
2875
2876 if let Some(range) = range {
2877 if smallest_range
2878 .as_ref()
2879 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2880 {
2881 smallest_range = Some(range);
2882 scope = Some(LanguageScope {
2883 language: layer.language.clone(),
2884 override_id: layer.override_id(offset, &self.text),
2885 });
2886 }
2887 }
2888 }
2889
2890 scope.or_else(|| {
2891 self.language.clone().map(|language| LanguageScope {
2892 language,
2893 override_id: None,
2894 })
2895 })
2896 }
2897
2898 /// Returns a tuple of the range and character kind of the word
2899 /// surrounding the given position.
2900 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2901 let mut start = start.to_offset(self);
2902 let mut end = start;
2903 let mut next_chars = self.chars_at(start).peekable();
2904 let mut prev_chars = self.reversed_chars_at(start).peekable();
2905
2906 let classifier = self.char_classifier_at(start);
2907 let word_kind = cmp::max(
2908 prev_chars.peek().copied().map(|c| classifier.kind(c)),
2909 next_chars.peek().copied().map(|c| classifier.kind(c)),
2910 );
2911
2912 for ch in prev_chars {
2913 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
2914 start -= ch.len_utf8();
2915 } else {
2916 break;
2917 }
2918 }
2919
2920 for ch in next_chars {
2921 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
2922 end += ch.len_utf8();
2923 } else {
2924 break;
2925 }
2926 }
2927
2928 (start..end, word_kind)
2929 }
2930
2931 /// Returns the range for the closes syntax node enclosing the given range.
2932 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2933 let range = range.start.to_offset(self)..range.end.to_offset(self);
2934 let mut result: Option<Range<usize>> = None;
2935 'outer: for layer in self
2936 .syntax
2937 .layers_for_range(range.clone(), &self.text, true)
2938 {
2939 let mut cursor = layer.node().walk();
2940
2941 // Descend to the first leaf that touches the start of the range,
2942 // and if the range is non-empty, extends beyond the start.
2943 while cursor.goto_first_child_for_byte(range.start).is_some() {
2944 if !range.is_empty() && cursor.node().end_byte() == range.start {
2945 cursor.goto_next_sibling();
2946 }
2947 }
2948
2949 // Ascend to the smallest ancestor that strictly contains the range.
2950 loop {
2951 let node_range = cursor.node().byte_range();
2952 if node_range.start <= range.start
2953 && node_range.end >= range.end
2954 && node_range.len() > range.len()
2955 {
2956 break;
2957 }
2958 if !cursor.goto_parent() {
2959 continue 'outer;
2960 }
2961 }
2962
2963 let left_node = cursor.node();
2964 let mut layer_result = left_node.byte_range();
2965
2966 // For an empty range, try to find another node immediately to the right of the range.
2967 if left_node.end_byte() == range.start {
2968 let mut right_node = None;
2969 while !cursor.goto_next_sibling() {
2970 if !cursor.goto_parent() {
2971 break;
2972 }
2973 }
2974
2975 while cursor.node().start_byte() == range.start {
2976 right_node = Some(cursor.node());
2977 if !cursor.goto_first_child() {
2978 break;
2979 }
2980 }
2981
2982 // If there is a candidate node on both sides of the (empty) range, then
2983 // decide between the two by favoring a named node over an anonymous token.
2984 // If both nodes are the same in that regard, favor the right one.
2985 if let Some(right_node) = right_node {
2986 if right_node.is_named() || !left_node.is_named() {
2987 layer_result = right_node.byte_range();
2988 }
2989 }
2990 }
2991
2992 if let Some(previous_result) = &result {
2993 if previous_result.len() < layer_result.len() {
2994 continue;
2995 }
2996 }
2997 result = Some(layer_result);
2998 }
2999
3000 result
3001 }
3002
3003 /// Returns the outline for the buffer.
3004 ///
3005 /// This method allows passing an optional [`SyntaxTheme`] to
3006 /// syntax-highlight the returned symbols.
3007 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3008 self.outline_items_containing(0..self.len(), true, theme)
3009 .map(Outline::new)
3010 }
3011
3012 /// Returns all the symbols that contain the given position.
3013 ///
3014 /// This method allows passing an optional [`SyntaxTheme`] to
3015 /// syntax-highlight the returned symbols.
3016 pub fn symbols_containing<T: ToOffset>(
3017 &self,
3018 position: T,
3019 theme: Option<&SyntaxTheme>,
3020 ) -> Option<Vec<OutlineItem<Anchor>>> {
3021 let position = position.to_offset(self);
3022 let mut items = self.outline_items_containing(
3023 position.saturating_sub(1)..self.len().min(position + 1),
3024 false,
3025 theme,
3026 )?;
3027 let mut prev_depth = None;
3028 items.retain(|item| {
3029 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3030 prev_depth = Some(item.depth);
3031 result
3032 });
3033 Some(items)
3034 }
3035
3036 pub fn outline_items_containing<T: ToOffset>(
3037 &self,
3038 range: Range<T>,
3039 include_extra_context: bool,
3040 theme: Option<&SyntaxTheme>,
3041 ) -> Option<Vec<OutlineItem<Anchor>>> {
3042 let range = range.to_offset(self);
3043 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3044 grammar.outline_config.as_ref().map(|c| &c.query)
3045 });
3046 let configs = matches
3047 .grammars()
3048 .iter()
3049 .map(|g| g.outline_config.as_ref().unwrap())
3050 .collect::<Vec<_>>();
3051
3052 let mut items = Vec::new();
3053 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3054 while let Some(mat) = matches.peek() {
3055 let config = &configs[mat.grammar_index];
3056 if let Some(item) =
3057 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3058 {
3059 items.push(item);
3060 } else if let Some(capture) = mat
3061 .captures
3062 .iter()
3063 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3064 {
3065 let capture_range = capture.node.start_position()..capture.node.end_position();
3066 let mut capture_row_range =
3067 capture_range.start.row as u32..capture_range.end.row as u32;
3068 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3069 {
3070 capture_row_range.end -= 1;
3071 }
3072 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3073 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3074 last_row_range.end = capture_row_range.end;
3075 } else {
3076 annotation_row_ranges.push(capture_row_range);
3077 }
3078 } else {
3079 annotation_row_ranges.push(capture_row_range);
3080 }
3081 }
3082 matches.advance();
3083 }
3084
3085 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3086
3087 // Assign depths based on containment relationships and convert to anchors.
3088 let mut item_ends_stack = Vec::<Point>::new();
3089 let mut anchor_items = Vec::new();
3090 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3091 for item in items {
3092 while let Some(last_end) = item_ends_stack.last().copied() {
3093 if last_end < item.range.end {
3094 item_ends_stack.pop();
3095 } else {
3096 break;
3097 }
3098 }
3099
3100 let mut annotation_row_range = None;
3101 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3102 let row_preceding_item = item.range.start.row.saturating_sub(1);
3103 if next_annotation_row_range.end < row_preceding_item {
3104 annotation_row_ranges.next();
3105 } else {
3106 if next_annotation_row_range.end == row_preceding_item {
3107 annotation_row_range = Some(next_annotation_row_range.clone());
3108 annotation_row_ranges.next();
3109 }
3110 break;
3111 }
3112 }
3113
3114 anchor_items.push(OutlineItem {
3115 depth: item_ends_stack.len(),
3116 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3117 text: item.text,
3118 highlight_ranges: item.highlight_ranges,
3119 name_ranges: item.name_ranges,
3120 body_range: item.body_range.map(|body_range| {
3121 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3122 }),
3123 annotation_range: annotation_row_range.map(|annotation_range| {
3124 self.anchor_after(Point::new(annotation_range.start, 0))
3125 ..self.anchor_before(Point::new(
3126 annotation_range.end,
3127 self.line_len(annotation_range.end),
3128 ))
3129 }),
3130 });
3131 item_ends_stack.push(item.range.end);
3132 }
3133
3134 Some(anchor_items)
3135 }
3136
3137 fn next_outline_item(
3138 &self,
3139 config: &OutlineConfig,
3140 mat: &SyntaxMapMatch,
3141 range: &Range<usize>,
3142 include_extra_context: bool,
3143 theme: Option<&SyntaxTheme>,
3144 ) -> Option<OutlineItem<Point>> {
3145 let item_node = mat.captures.iter().find_map(|cap| {
3146 if cap.index == config.item_capture_ix {
3147 Some(cap.node)
3148 } else {
3149 None
3150 }
3151 })?;
3152
3153 let item_byte_range = item_node.byte_range();
3154 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3155 return None;
3156 }
3157 let item_point_range = Point::from_ts_point(item_node.start_position())
3158 ..Point::from_ts_point(item_node.end_position());
3159
3160 let mut open_point = None;
3161 let mut close_point = None;
3162 let mut buffer_ranges = Vec::new();
3163 for capture in mat.captures {
3164 let node_is_name;
3165 if capture.index == config.name_capture_ix {
3166 node_is_name = true;
3167 } else if Some(capture.index) == config.context_capture_ix
3168 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3169 {
3170 node_is_name = false;
3171 } else {
3172 if Some(capture.index) == config.open_capture_ix {
3173 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3174 } else if Some(capture.index) == config.close_capture_ix {
3175 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3176 }
3177
3178 continue;
3179 }
3180
3181 let mut range = capture.node.start_byte()..capture.node.end_byte();
3182 let start = capture.node.start_position();
3183 if capture.node.end_position().row > start.row {
3184 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3185 }
3186
3187 if !range.is_empty() {
3188 buffer_ranges.push((range, node_is_name));
3189 }
3190 }
3191 if buffer_ranges.is_empty() {
3192 return None;
3193 }
3194 let mut text = String::new();
3195 let mut highlight_ranges = Vec::new();
3196 let mut name_ranges = Vec::new();
3197 let mut chunks = self.chunks(
3198 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3199 true,
3200 );
3201 let mut last_buffer_range_end = 0;
3202 for (buffer_range, is_name) in buffer_ranges {
3203 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
3204 text.push(' ');
3205 }
3206 last_buffer_range_end = buffer_range.end;
3207 if is_name {
3208 let mut start = text.len();
3209 let end = start + buffer_range.len();
3210
3211 // When multiple names are captured, then the matchable text
3212 // includes the whitespace in between the names.
3213 if !name_ranges.is_empty() {
3214 start -= 1;
3215 }
3216
3217 name_ranges.push(start..end);
3218 }
3219
3220 let mut offset = buffer_range.start;
3221 chunks.seek(buffer_range.clone());
3222 for mut chunk in chunks.by_ref() {
3223 if chunk.text.len() > buffer_range.end - offset {
3224 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3225 offset = buffer_range.end;
3226 } else {
3227 offset += chunk.text.len();
3228 }
3229 let style = chunk
3230 .syntax_highlight_id
3231 .zip(theme)
3232 .and_then(|(highlight, theme)| highlight.style(theme));
3233 if let Some(style) = style {
3234 let start = text.len();
3235 let end = start + chunk.text.len();
3236 highlight_ranges.push((start..end, style));
3237 }
3238 text.push_str(chunk.text);
3239 if offset >= buffer_range.end {
3240 break;
3241 }
3242 }
3243 }
3244
3245 Some(OutlineItem {
3246 depth: 0, // We'll calculate the depth later
3247 range: item_point_range,
3248 text,
3249 highlight_ranges,
3250 name_ranges,
3251 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3252 annotation_range: None,
3253 })
3254 }
3255
3256 pub fn function_body_fold_ranges<T: ToOffset>(
3257 &self,
3258 within: Range<T>,
3259 ) -> impl Iterator<Item = Range<usize>> + '_ {
3260 self.text_object_ranges(within, TreeSitterOptions::default())
3261 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3262 }
3263
3264 /// For each grammar in the language, runs the provided
3265 /// [`tree_sitter::Query`] against the given range.
3266 pub fn matches(
3267 &self,
3268 range: Range<usize>,
3269 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3270 ) -> SyntaxMapMatches {
3271 self.syntax.matches(range, self, query)
3272 }
3273
3274 /// Returns bracket range pairs overlapping or adjacent to `range`
3275 pub fn bracket_ranges<T: ToOffset>(
3276 &self,
3277 range: Range<T>,
3278 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3279 // Find bracket pairs that *inclusively* contain the given range.
3280 let range = range.start.to_offset(self).saturating_sub(1)
3281 ..self.len().min(range.end.to_offset(self) + 1);
3282
3283 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3284 grammar.brackets_config.as_ref().map(|c| &c.query)
3285 });
3286 let configs = matches
3287 .grammars()
3288 .iter()
3289 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3290 .collect::<Vec<_>>();
3291
3292 iter::from_fn(move || {
3293 while let Some(mat) = matches.peek() {
3294 let mut open = None;
3295 let mut close = None;
3296 let config = &configs[mat.grammar_index];
3297 for capture in mat.captures {
3298 if capture.index == config.open_capture_ix {
3299 open = Some(capture.node.byte_range());
3300 } else if capture.index == config.close_capture_ix {
3301 close = Some(capture.node.byte_range());
3302 }
3303 }
3304
3305 matches.advance();
3306
3307 let Some((open, close)) = open.zip(close) else {
3308 continue;
3309 };
3310
3311 let bracket_range = open.start..=close.end;
3312 if !bracket_range.overlaps(&range) {
3313 continue;
3314 }
3315
3316 return Some((open, close));
3317 }
3318 None
3319 })
3320 }
3321
3322 pub fn text_object_ranges<T: ToOffset>(
3323 &self,
3324 range: Range<T>,
3325 options: TreeSitterOptions,
3326 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3327 let range = range.start.to_offset(self).saturating_sub(1)
3328 ..self.len().min(range.end.to_offset(self) + 1);
3329
3330 let mut matches =
3331 self.syntax
3332 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3333 grammar.text_object_config.as_ref().map(|c| &c.query)
3334 });
3335
3336 let configs = matches
3337 .grammars()
3338 .iter()
3339 .map(|grammar| grammar.text_object_config.as_ref())
3340 .collect::<Vec<_>>();
3341
3342 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3343
3344 iter::from_fn(move || loop {
3345 while let Some(capture) = captures.pop() {
3346 if capture.0.overlaps(&range) {
3347 return Some(capture);
3348 }
3349 }
3350
3351 let mat = matches.peek()?;
3352
3353 let Some(config) = configs[mat.grammar_index].as_ref() else {
3354 matches.advance();
3355 continue;
3356 };
3357
3358 for capture in mat.captures {
3359 let Some(ix) = config
3360 .text_objects_by_capture_ix
3361 .binary_search_by_key(&capture.index, |e| e.0)
3362 .ok()
3363 else {
3364 continue;
3365 };
3366 let text_object = config.text_objects_by_capture_ix[ix].1;
3367 let byte_range = capture.node.byte_range();
3368
3369 let mut found = false;
3370 for (range, existing) in captures.iter_mut() {
3371 if existing == &text_object {
3372 range.start = range.start.min(byte_range.start);
3373 range.end = range.end.max(byte_range.end);
3374 found = true;
3375 break;
3376 }
3377 }
3378
3379 if !found {
3380 captures.push((byte_range, text_object));
3381 }
3382 }
3383
3384 matches.advance();
3385 })
3386 }
3387
3388 /// Returns enclosing bracket ranges containing the given range
3389 pub fn enclosing_bracket_ranges<T: ToOffset>(
3390 &self,
3391 range: Range<T>,
3392 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3393 let range = range.start.to_offset(self)..range.end.to_offset(self);
3394
3395 self.bracket_ranges(range.clone())
3396 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
3397 }
3398
3399 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3400 ///
3401 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3402 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3403 &self,
3404 range: Range<T>,
3405 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3406 ) -> Option<(Range<usize>, Range<usize>)> {
3407 let range = range.start.to_offset(self)..range.end.to_offset(self);
3408
3409 // Get the ranges of the innermost pair of brackets.
3410 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3411
3412 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
3413 if let Some(range_filter) = range_filter {
3414 if !range_filter(open.clone(), close.clone()) {
3415 continue;
3416 }
3417 }
3418
3419 let len = close.end - open.start;
3420
3421 if let Some((existing_open, existing_close)) = &result {
3422 let existing_len = existing_close.end - existing_open.start;
3423 if len > existing_len {
3424 continue;
3425 }
3426 }
3427
3428 result = Some((open, close));
3429 }
3430
3431 result
3432 }
3433
3434 /// Returns anchor ranges for any matches of the redaction query.
3435 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3436 /// will be run on the relevant section of the buffer.
3437 pub fn redacted_ranges<T: ToOffset>(
3438 &self,
3439 range: Range<T>,
3440 ) -> impl Iterator<Item = Range<usize>> + '_ {
3441 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3442 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3443 grammar
3444 .redactions_config
3445 .as_ref()
3446 .map(|config| &config.query)
3447 });
3448
3449 let configs = syntax_matches
3450 .grammars()
3451 .iter()
3452 .map(|grammar| grammar.redactions_config.as_ref())
3453 .collect::<Vec<_>>();
3454
3455 iter::from_fn(move || {
3456 let redacted_range = syntax_matches
3457 .peek()
3458 .and_then(|mat| {
3459 configs[mat.grammar_index].and_then(|config| {
3460 mat.captures
3461 .iter()
3462 .find(|capture| capture.index == config.redaction_capture_ix)
3463 })
3464 })
3465 .map(|mat| mat.node.byte_range());
3466 syntax_matches.advance();
3467 redacted_range
3468 })
3469 }
3470
3471 pub fn injections_intersecting_range<T: ToOffset>(
3472 &self,
3473 range: Range<T>,
3474 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3475 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3476
3477 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3478 grammar
3479 .injection_config
3480 .as_ref()
3481 .map(|config| &config.query)
3482 });
3483
3484 let configs = syntax_matches
3485 .grammars()
3486 .iter()
3487 .map(|grammar| grammar.injection_config.as_ref())
3488 .collect::<Vec<_>>();
3489
3490 iter::from_fn(move || {
3491 let ranges = syntax_matches.peek().and_then(|mat| {
3492 let config = &configs[mat.grammar_index]?;
3493 let content_capture_range = mat.captures.iter().find_map(|capture| {
3494 if capture.index == config.content_capture_ix {
3495 Some(capture.node.byte_range())
3496 } else {
3497 None
3498 }
3499 })?;
3500 let language = self.language_at(content_capture_range.start)?;
3501 Some((content_capture_range, language))
3502 });
3503 syntax_matches.advance();
3504 ranges
3505 })
3506 }
3507
3508 pub fn runnable_ranges(
3509 &self,
3510 range: Range<Anchor>,
3511 ) -> impl Iterator<Item = RunnableRange> + '_ {
3512 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3513
3514 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3515 grammar.runnable_config.as_ref().map(|config| &config.query)
3516 });
3517
3518 let test_configs = syntax_matches
3519 .grammars()
3520 .iter()
3521 .map(|grammar| grammar.runnable_config.as_ref())
3522 .collect::<Vec<_>>();
3523
3524 iter::from_fn(move || loop {
3525 let mat = syntax_matches.peek()?;
3526
3527 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3528 let mut run_range = None;
3529 let full_range = mat.captures.iter().fold(
3530 Range {
3531 start: usize::MAX,
3532 end: 0,
3533 },
3534 |mut acc, next| {
3535 let byte_range = next.node.byte_range();
3536 if acc.start > byte_range.start {
3537 acc.start = byte_range.start;
3538 }
3539 if acc.end < byte_range.end {
3540 acc.end = byte_range.end;
3541 }
3542 acc
3543 },
3544 );
3545 if full_range.start > full_range.end {
3546 // We did not find a full spanning range of this match.
3547 return None;
3548 }
3549 let extra_captures: SmallVec<[_; 1]> =
3550 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3551 test_configs
3552 .extra_captures
3553 .get(capture.index as usize)
3554 .cloned()
3555 .and_then(|tag_name| match tag_name {
3556 RunnableCapture::Named(name) => {
3557 Some((capture.node.byte_range(), name))
3558 }
3559 RunnableCapture::Run => {
3560 let _ = run_range.insert(capture.node.byte_range());
3561 None
3562 }
3563 })
3564 }));
3565 let run_range = run_range?;
3566 let tags = test_configs
3567 .query
3568 .property_settings(mat.pattern_index)
3569 .iter()
3570 .filter_map(|property| {
3571 if *property.key == *"tag" {
3572 property
3573 .value
3574 .as_ref()
3575 .map(|value| RunnableTag(value.to_string().into()))
3576 } else {
3577 None
3578 }
3579 })
3580 .collect();
3581 let extra_captures = extra_captures
3582 .into_iter()
3583 .map(|(range, name)| {
3584 (
3585 name.to_string(),
3586 self.text_for_range(range.clone()).collect::<String>(),
3587 )
3588 })
3589 .collect();
3590 // All tags should have the same range.
3591 Some(RunnableRange {
3592 run_range,
3593 full_range,
3594 runnable: Runnable {
3595 tags,
3596 language: mat.language,
3597 buffer: self.remote_id(),
3598 },
3599 extra_captures,
3600 buffer_id: self.remote_id(),
3601 })
3602 });
3603
3604 syntax_matches.advance();
3605 if test_range.is_some() {
3606 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3607 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3608 return test_range;
3609 }
3610 })
3611 }
3612
3613 pub fn indent_guides_in_range(
3614 &self,
3615 range: Range<Anchor>,
3616 ignore_disabled_for_language: bool,
3617 cx: &AppContext,
3618 ) -> Vec<IndentGuide> {
3619 let language_settings =
3620 language_settings(self.language().map(|l| l.name()), self.file.as_ref(), cx);
3621 let settings = language_settings.indent_guides;
3622 if !ignore_disabled_for_language && !settings.enabled {
3623 return Vec::new();
3624 }
3625 let tab_size = language_settings.tab_size.get() as u32;
3626
3627 let start_row = range.start.to_point(self).row;
3628 let end_row = range.end.to_point(self).row;
3629 let row_range = start_row..end_row + 1;
3630
3631 let mut row_indents = self.line_indents_in_row_range(row_range.clone());
3632
3633 let mut result_vec = Vec::new();
3634 let mut indent_stack = SmallVec::<[IndentGuide; 8]>::new();
3635
3636 while let Some((first_row, mut line_indent)) = row_indents.next() {
3637 let current_depth = indent_stack.len() as u32;
3638
3639 // When encountering empty, continue until found useful line indent
3640 // then add to the indent stack with the depth found
3641 let mut found_indent = false;
3642 let mut last_row = first_row;
3643 if line_indent.is_line_empty() {
3644 let mut trailing_row = end_row;
3645 while !found_indent {
3646 let (target_row, new_line_indent) =
3647 if let Some(display_row) = row_indents.next() {
3648 display_row
3649 } else {
3650 // This means we reached the end of the given range and found empty lines at the end.
3651 // We need to traverse further until we find a non-empty line to know if we need to add
3652 // an indent guide for the last visible indent.
3653 trailing_row += 1;
3654
3655 const TRAILING_ROW_SEARCH_LIMIT: u32 = 25;
3656 if trailing_row > self.max_point().row
3657 || trailing_row > end_row + TRAILING_ROW_SEARCH_LIMIT
3658 {
3659 break;
3660 }
3661 let new_line_indent = self.line_indent_for_row(trailing_row);
3662 (trailing_row, new_line_indent)
3663 };
3664
3665 if new_line_indent.is_line_empty() {
3666 continue;
3667 }
3668 last_row = target_row.min(end_row);
3669 line_indent = new_line_indent;
3670 found_indent = true;
3671 break;
3672 }
3673 } else {
3674 found_indent = true
3675 }
3676
3677 let depth = if found_indent {
3678 line_indent.len(tab_size) / tab_size
3679 + ((line_indent.len(tab_size) % tab_size) > 0) as u32
3680 } else {
3681 current_depth
3682 };
3683
3684 match depth.cmp(¤t_depth) {
3685 Ordering::Less => {
3686 for _ in 0..(current_depth - depth) {
3687 let mut indent = indent_stack.pop().unwrap();
3688 if last_row != first_row {
3689 // In this case, we landed on an empty row, had to seek forward,
3690 // and discovered that the indent we where on is ending.
3691 // This means that the last display row must
3692 // be on line that ends this indent range, so we
3693 // should display the range up to the first non-empty line
3694 indent.end_row = first_row.saturating_sub(1);
3695 }
3696
3697 result_vec.push(indent)
3698 }
3699 }
3700 Ordering::Greater => {
3701 for next_depth in current_depth..depth {
3702 indent_stack.push(IndentGuide {
3703 buffer_id: self.remote_id(),
3704 start_row: first_row,
3705 end_row: last_row,
3706 depth: next_depth,
3707 tab_size,
3708 settings,
3709 });
3710 }
3711 }
3712 _ => {}
3713 }
3714
3715 for indent in indent_stack.iter_mut() {
3716 indent.end_row = last_row;
3717 }
3718 }
3719
3720 result_vec.extend(indent_stack);
3721
3722 result_vec
3723 }
3724
3725 pub async fn enclosing_indent(
3726 &self,
3727 mut buffer_row: BufferRow,
3728 ) -> Option<(Range<BufferRow>, LineIndent)> {
3729 let max_row = self.max_point().row;
3730 if buffer_row >= max_row {
3731 return None;
3732 }
3733
3734 let mut target_indent = self.line_indent_for_row(buffer_row);
3735
3736 // If the current row is at the start of an indented block, we want to return this
3737 // block as the enclosing indent.
3738 if !target_indent.is_line_empty() && buffer_row < max_row {
3739 let next_line_indent = self.line_indent_for_row(buffer_row + 1);
3740 if !next_line_indent.is_line_empty()
3741 && target_indent.raw_len() < next_line_indent.raw_len()
3742 {
3743 target_indent = next_line_indent;
3744 buffer_row += 1;
3745 }
3746 }
3747
3748 const SEARCH_ROW_LIMIT: u32 = 25000;
3749 const SEARCH_WHITESPACE_ROW_LIMIT: u32 = 2500;
3750 const YIELD_INTERVAL: u32 = 100;
3751
3752 let mut accessed_row_counter = 0;
3753
3754 // If there is a blank line at the current row, search for the next non indented lines
3755 if target_indent.is_line_empty() {
3756 let start = buffer_row.saturating_sub(SEARCH_WHITESPACE_ROW_LIMIT);
3757 let end = (max_row + 1).min(buffer_row + SEARCH_WHITESPACE_ROW_LIMIT);
3758
3759 let mut non_empty_line_above = None;
3760 for (row, indent) in self
3761 .text
3762 .reversed_line_indents_in_row_range(start..buffer_row)
3763 {
3764 accessed_row_counter += 1;
3765 if accessed_row_counter == YIELD_INTERVAL {
3766 accessed_row_counter = 0;
3767 yield_now().await;
3768 }
3769 if !indent.is_line_empty() {
3770 non_empty_line_above = Some((row, indent));
3771 break;
3772 }
3773 }
3774
3775 let mut non_empty_line_below = None;
3776 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3777 accessed_row_counter += 1;
3778 if accessed_row_counter == YIELD_INTERVAL {
3779 accessed_row_counter = 0;
3780 yield_now().await;
3781 }
3782 if !indent.is_line_empty() {
3783 non_empty_line_below = Some((row, indent));
3784 break;
3785 }
3786 }
3787
3788 let (row, indent) = match (non_empty_line_above, non_empty_line_below) {
3789 (Some((above_row, above_indent)), Some((below_row, below_indent))) => {
3790 if above_indent.raw_len() >= below_indent.raw_len() {
3791 (above_row, above_indent)
3792 } else {
3793 (below_row, below_indent)
3794 }
3795 }
3796 (Some(above), None) => above,
3797 (None, Some(below)) => below,
3798 _ => return None,
3799 };
3800
3801 target_indent = indent;
3802 buffer_row = row;
3803 }
3804
3805 let start = buffer_row.saturating_sub(SEARCH_ROW_LIMIT);
3806 let end = (max_row + 1).min(buffer_row + SEARCH_ROW_LIMIT);
3807
3808 let mut start_indent = None;
3809 for (row, indent) in self
3810 .text
3811 .reversed_line_indents_in_row_range(start..buffer_row)
3812 {
3813 accessed_row_counter += 1;
3814 if accessed_row_counter == YIELD_INTERVAL {
3815 accessed_row_counter = 0;
3816 yield_now().await;
3817 }
3818 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3819 start_indent = Some((row, indent));
3820 break;
3821 }
3822 }
3823 let (start_row, start_indent_size) = start_indent?;
3824
3825 let mut end_indent = (end, None);
3826 for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
3827 accessed_row_counter += 1;
3828 if accessed_row_counter == YIELD_INTERVAL {
3829 accessed_row_counter = 0;
3830 yield_now().await;
3831 }
3832 if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
3833 end_indent = (row.saturating_sub(1), Some(indent));
3834 break;
3835 }
3836 }
3837 let (end_row, end_indent_size) = end_indent;
3838
3839 let indent = if let Some(end_indent_size) = end_indent_size {
3840 if start_indent_size.raw_len() > end_indent_size.raw_len() {
3841 start_indent_size
3842 } else {
3843 end_indent_size
3844 }
3845 } else {
3846 start_indent_size
3847 };
3848
3849 Some((start_row..end_row, indent))
3850 }
3851
3852 /// Returns selections for remote peers intersecting the given range.
3853 #[allow(clippy::type_complexity)]
3854 pub fn selections_in_range(
3855 &self,
3856 range: Range<Anchor>,
3857 include_local: bool,
3858 ) -> impl Iterator<
3859 Item = (
3860 ReplicaId,
3861 bool,
3862 CursorShape,
3863 impl Iterator<Item = &Selection<Anchor>> + '_,
3864 ),
3865 > + '_ {
3866 self.remote_selections
3867 .iter()
3868 .filter(move |(replica_id, set)| {
3869 (include_local || **replica_id != self.text.replica_id())
3870 && !set.selections.is_empty()
3871 })
3872 .map(move |(replica_id, set)| {
3873 let start_ix = match set.selections.binary_search_by(|probe| {
3874 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3875 }) {
3876 Ok(ix) | Err(ix) => ix,
3877 };
3878 let end_ix = match set.selections.binary_search_by(|probe| {
3879 probe.start.cmp(&range.end, self).then(Ordering::Less)
3880 }) {
3881 Ok(ix) | Err(ix) => ix,
3882 };
3883
3884 (
3885 *replica_id,
3886 set.line_mode,
3887 set.cursor_shape,
3888 set.selections[start_ix..end_ix].iter(),
3889 )
3890 })
3891 }
3892
3893 /// Returns if the buffer contains any diagnostics.
3894 pub fn has_diagnostics(&self) -> bool {
3895 !self.diagnostics.is_empty()
3896 }
3897
3898 /// Returns all the diagnostics intersecting the given range.
3899 pub fn diagnostics_in_range<'a, T, O>(
3900 &'a self,
3901 search_range: Range<T>,
3902 reversed: bool,
3903 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3904 where
3905 T: 'a + Clone + ToOffset,
3906 O: 'a + FromAnchor + Ord,
3907 {
3908 let mut iterators: Vec<_> = self
3909 .diagnostics
3910 .iter()
3911 .map(|(_, collection)| {
3912 collection
3913 .range::<T, O>(search_range.clone(), self, true, reversed)
3914 .peekable()
3915 })
3916 .collect();
3917
3918 std::iter::from_fn(move || {
3919 let (next_ix, _) = iterators
3920 .iter_mut()
3921 .enumerate()
3922 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3923 .min_by(|(_, a), (_, b)| {
3924 let cmp = a
3925 .range
3926 .start
3927 .cmp(&b.range.start)
3928 // when range is equal, sort by diagnostic severity
3929 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3930 // and stabilize order with group_id
3931 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3932 if reversed {
3933 cmp.reverse()
3934 } else {
3935 cmp
3936 }
3937 })?;
3938 iterators[next_ix].next()
3939 })
3940 }
3941
3942 /// Returns all the diagnostic groups associated with the given
3943 /// language server ID. If no language server ID is provided,
3944 /// all diagnostics groups are returned.
3945 pub fn diagnostic_groups(
3946 &self,
3947 language_server_id: Option<LanguageServerId>,
3948 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3949 let mut groups = Vec::new();
3950
3951 if let Some(language_server_id) = language_server_id {
3952 if let Ok(ix) = self
3953 .diagnostics
3954 .binary_search_by_key(&language_server_id, |e| e.0)
3955 {
3956 self.diagnostics[ix]
3957 .1
3958 .groups(language_server_id, &mut groups, self);
3959 }
3960 } else {
3961 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3962 diagnostics.groups(*language_server_id, &mut groups, self);
3963 }
3964 }
3965
3966 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3967 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3968 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3969 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3970 });
3971
3972 groups
3973 }
3974
3975 /// Returns an iterator over the diagnostics for the given group.
3976 pub fn diagnostic_group<'a, O>(
3977 &'a self,
3978 group_id: usize,
3979 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3980 where
3981 O: 'a + FromAnchor,
3982 {
3983 self.diagnostics
3984 .iter()
3985 .flat_map(move |(_, set)| set.group(group_id, self))
3986 }
3987
3988 /// An integer version number that accounts for all updates besides
3989 /// the buffer's text itself (which is versioned via a version vector).
3990 pub fn non_text_state_update_count(&self) -> usize {
3991 self.non_text_state_update_count
3992 }
3993
3994 /// Returns a snapshot of underlying file.
3995 pub fn file(&self) -> Option<&Arc<dyn File>> {
3996 self.file.as_ref()
3997 }
3998
3999 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4000 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
4001 if let Some(file) = self.file() {
4002 if file.path().file_name().is_none() || include_root {
4003 Some(file.full_path(cx))
4004 } else {
4005 Some(file.path().to_path_buf())
4006 }
4007 } else {
4008 None
4009 }
4010 }
4011}
4012
4013fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4014 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4015}
4016
4017fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4018 let mut result = IndentSize::spaces(0);
4019 for c in text {
4020 let kind = match c {
4021 ' ' => IndentKind::Space,
4022 '\t' => IndentKind::Tab,
4023 _ => break,
4024 };
4025 if result.len == 0 {
4026 result.kind = kind;
4027 }
4028 result.len += 1;
4029 }
4030 result
4031}
4032
4033impl Clone for BufferSnapshot {
4034 fn clone(&self) -> Self {
4035 Self {
4036 text: self.text.clone(),
4037 syntax: self.syntax.clone(),
4038 file: self.file.clone(),
4039 remote_selections: self.remote_selections.clone(),
4040 diagnostics: self.diagnostics.clone(),
4041 language: self.language.clone(),
4042 non_text_state_update_count: self.non_text_state_update_count,
4043 }
4044 }
4045}
4046
4047impl Deref for BufferSnapshot {
4048 type Target = text::BufferSnapshot;
4049
4050 fn deref(&self) -> &Self::Target {
4051 &self.text
4052 }
4053}
4054
4055unsafe impl<'a> Send for BufferChunks<'a> {}
4056
4057impl<'a> BufferChunks<'a> {
4058 pub(crate) fn new(
4059 text: &'a Rope,
4060 range: Range<usize>,
4061 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4062 diagnostics: bool,
4063 buffer_snapshot: Option<&'a BufferSnapshot>,
4064 ) -> Self {
4065 let mut highlights = None;
4066 if let Some((captures, highlight_maps)) = syntax {
4067 highlights = Some(BufferChunkHighlights {
4068 captures,
4069 next_capture: None,
4070 stack: Default::default(),
4071 highlight_maps,
4072 })
4073 }
4074
4075 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4076 let chunks = text.chunks_in_range(range.clone());
4077
4078 let mut this = BufferChunks {
4079 range,
4080 buffer_snapshot,
4081 chunks,
4082 diagnostic_endpoints,
4083 error_depth: 0,
4084 warning_depth: 0,
4085 information_depth: 0,
4086 hint_depth: 0,
4087 unnecessary_depth: 0,
4088 highlights,
4089 };
4090 this.initialize_diagnostic_endpoints();
4091 this
4092 }
4093
4094 /// Seeks to the given byte offset in the buffer.
4095 pub fn seek(&mut self, range: Range<usize>) {
4096 let old_range = std::mem::replace(&mut self.range, range.clone());
4097 self.chunks.set_range(self.range.clone());
4098 if let Some(highlights) = self.highlights.as_mut() {
4099 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4100 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4101 highlights
4102 .stack
4103 .retain(|(end_offset, _)| *end_offset > range.start);
4104 if let Some(capture) = &highlights.next_capture {
4105 if range.start >= capture.node.start_byte() {
4106 let next_capture_end = capture.node.end_byte();
4107 if range.start < next_capture_end {
4108 highlights.stack.push((
4109 next_capture_end,
4110 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4111 ));
4112 }
4113 highlights.next_capture.take();
4114 }
4115 }
4116 } else if let Some(snapshot) = self.buffer_snapshot {
4117 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4118 *highlights = BufferChunkHighlights {
4119 captures,
4120 next_capture: None,
4121 stack: Default::default(),
4122 highlight_maps,
4123 };
4124 } else {
4125 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4126 // Seeking such BufferChunks is not supported.
4127 debug_assert!(false, "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot");
4128 }
4129
4130 highlights.captures.set_byte_range(self.range.clone());
4131 self.initialize_diagnostic_endpoints();
4132 }
4133 }
4134
4135 fn initialize_diagnostic_endpoints(&mut self) {
4136 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4137 if let Some(buffer) = self.buffer_snapshot {
4138 let mut diagnostic_endpoints = Vec::new();
4139 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4140 diagnostic_endpoints.push(DiagnosticEndpoint {
4141 offset: entry.range.start,
4142 is_start: true,
4143 severity: entry.diagnostic.severity,
4144 is_unnecessary: entry.diagnostic.is_unnecessary,
4145 });
4146 diagnostic_endpoints.push(DiagnosticEndpoint {
4147 offset: entry.range.end,
4148 is_start: false,
4149 severity: entry.diagnostic.severity,
4150 is_unnecessary: entry.diagnostic.is_unnecessary,
4151 });
4152 }
4153 diagnostic_endpoints
4154 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4155 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4156 self.hint_depth = 0;
4157 self.error_depth = 0;
4158 self.warning_depth = 0;
4159 self.information_depth = 0;
4160 }
4161 }
4162 }
4163
4164 /// The current byte offset in the buffer.
4165 pub fn offset(&self) -> usize {
4166 self.range.start
4167 }
4168
4169 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4170 let depth = match endpoint.severity {
4171 DiagnosticSeverity::ERROR => &mut self.error_depth,
4172 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4173 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4174 DiagnosticSeverity::HINT => &mut self.hint_depth,
4175 _ => return,
4176 };
4177 if endpoint.is_start {
4178 *depth += 1;
4179 } else {
4180 *depth -= 1;
4181 }
4182
4183 if endpoint.is_unnecessary {
4184 if endpoint.is_start {
4185 self.unnecessary_depth += 1;
4186 } else {
4187 self.unnecessary_depth -= 1;
4188 }
4189 }
4190 }
4191
4192 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4193 if self.error_depth > 0 {
4194 Some(DiagnosticSeverity::ERROR)
4195 } else if self.warning_depth > 0 {
4196 Some(DiagnosticSeverity::WARNING)
4197 } else if self.information_depth > 0 {
4198 Some(DiagnosticSeverity::INFORMATION)
4199 } else if self.hint_depth > 0 {
4200 Some(DiagnosticSeverity::HINT)
4201 } else {
4202 None
4203 }
4204 }
4205
4206 fn current_code_is_unnecessary(&self) -> bool {
4207 self.unnecessary_depth > 0
4208 }
4209}
4210
4211impl<'a> Iterator for BufferChunks<'a> {
4212 type Item = Chunk<'a>;
4213
4214 fn next(&mut self) -> Option<Self::Item> {
4215 let mut next_capture_start = usize::MAX;
4216 let mut next_diagnostic_endpoint = usize::MAX;
4217
4218 if let Some(highlights) = self.highlights.as_mut() {
4219 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4220 if *parent_capture_end <= self.range.start {
4221 highlights.stack.pop();
4222 } else {
4223 break;
4224 }
4225 }
4226
4227 if highlights.next_capture.is_none() {
4228 highlights.next_capture = highlights.captures.next();
4229 }
4230
4231 while let Some(capture) = highlights.next_capture.as_ref() {
4232 if self.range.start < capture.node.start_byte() {
4233 next_capture_start = capture.node.start_byte();
4234 break;
4235 } else {
4236 let highlight_id =
4237 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4238 highlights
4239 .stack
4240 .push((capture.node.end_byte(), highlight_id));
4241 highlights.next_capture = highlights.captures.next();
4242 }
4243 }
4244 }
4245
4246 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4247 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4248 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4249 if endpoint.offset <= self.range.start {
4250 self.update_diagnostic_depths(endpoint);
4251 diagnostic_endpoints.next();
4252 } else {
4253 next_diagnostic_endpoint = endpoint.offset;
4254 break;
4255 }
4256 }
4257 }
4258 self.diagnostic_endpoints = diagnostic_endpoints;
4259
4260 if let Some(chunk) = self.chunks.peek() {
4261 let chunk_start = self.range.start;
4262 let mut chunk_end = (self.chunks.offset() + chunk.len())
4263 .min(next_capture_start)
4264 .min(next_diagnostic_endpoint);
4265 let mut highlight_id = None;
4266 if let Some(highlights) = self.highlights.as_ref() {
4267 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4268 chunk_end = chunk_end.min(*parent_capture_end);
4269 highlight_id = Some(*parent_highlight_id);
4270 }
4271 }
4272
4273 let slice =
4274 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4275 self.range.start = chunk_end;
4276 if self.range.start == self.chunks.offset() + chunk.len() {
4277 self.chunks.next().unwrap();
4278 }
4279
4280 Some(Chunk {
4281 text: slice,
4282 syntax_highlight_id: highlight_id,
4283 diagnostic_severity: self.current_diagnostic_severity(),
4284 is_unnecessary: self.current_code_is_unnecessary(),
4285 ..Default::default()
4286 })
4287 } else {
4288 None
4289 }
4290 }
4291}
4292
4293impl operation_queue::Operation for Operation {
4294 fn lamport_timestamp(&self) -> clock::Lamport {
4295 match self {
4296 Operation::Buffer(_) => {
4297 unreachable!("buffer operations should never be deferred at this layer")
4298 }
4299 Operation::UpdateDiagnostics {
4300 lamport_timestamp, ..
4301 }
4302 | Operation::UpdateSelections {
4303 lamport_timestamp, ..
4304 }
4305 | Operation::UpdateCompletionTriggers {
4306 lamport_timestamp, ..
4307 } => *lamport_timestamp,
4308 }
4309 }
4310}
4311
4312impl Default for Diagnostic {
4313 fn default() -> Self {
4314 Self {
4315 source: Default::default(),
4316 code: None,
4317 severity: DiagnosticSeverity::ERROR,
4318 message: Default::default(),
4319 group_id: 0,
4320 is_primary: false,
4321 is_disk_based: false,
4322 is_unnecessary: false,
4323 data: None,
4324 }
4325 }
4326}
4327
4328impl IndentSize {
4329 /// Returns an [`IndentSize`] representing the given spaces.
4330 pub fn spaces(len: u32) -> Self {
4331 Self {
4332 len,
4333 kind: IndentKind::Space,
4334 }
4335 }
4336
4337 /// Returns an [`IndentSize`] representing a tab.
4338 pub fn tab() -> Self {
4339 Self {
4340 len: 1,
4341 kind: IndentKind::Tab,
4342 }
4343 }
4344
4345 /// An iterator over the characters represented by this [`IndentSize`].
4346 pub fn chars(&self) -> impl Iterator<Item = char> {
4347 iter::repeat(self.char()).take(self.len as usize)
4348 }
4349
4350 /// The character representation of this [`IndentSize`].
4351 pub fn char(&self) -> char {
4352 match self.kind {
4353 IndentKind::Space => ' ',
4354 IndentKind::Tab => '\t',
4355 }
4356 }
4357
4358 /// Consumes the current [`IndentSize`] and returns a new one that has
4359 /// been shrunk or enlarged by the given size along the given direction.
4360 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4361 match direction {
4362 Ordering::Less => {
4363 if self.kind == size.kind && self.len >= size.len {
4364 self.len -= size.len;
4365 }
4366 }
4367 Ordering::Equal => {}
4368 Ordering::Greater => {
4369 if self.len == 0 {
4370 self = size;
4371 } else if self.kind == size.kind {
4372 self.len += size.len;
4373 }
4374 }
4375 }
4376 self
4377 }
4378
4379 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4380 match self.kind {
4381 IndentKind::Space => self.len as usize,
4382 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4383 }
4384 }
4385}
4386
4387#[cfg(any(test, feature = "test-support"))]
4388pub struct TestFile {
4389 pub path: Arc<Path>,
4390 pub root_name: String,
4391}
4392
4393#[cfg(any(test, feature = "test-support"))]
4394impl File for TestFile {
4395 fn path(&self) -> &Arc<Path> {
4396 &self.path
4397 }
4398
4399 fn full_path(&self, _: &gpui::AppContext) -> PathBuf {
4400 PathBuf::from(&self.root_name).join(self.path.as_ref())
4401 }
4402
4403 fn as_local(&self) -> Option<&dyn LocalFile> {
4404 None
4405 }
4406
4407 fn disk_state(&self) -> DiskState {
4408 unimplemented!()
4409 }
4410
4411 fn file_name<'a>(&'a self, _: &'a gpui::AppContext) -> &'a std::ffi::OsStr {
4412 self.path().file_name().unwrap_or(self.root_name.as_ref())
4413 }
4414
4415 fn worktree_id(&self, _: &AppContext) -> WorktreeId {
4416 WorktreeId::from_usize(0)
4417 }
4418
4419 fn as_any(&self) -> &dyn std::any::Any {
4420 unimplemented!()
4421 }
4422
4423 fn to_proto(&self, _: &AppContext) -> rpc::proto::File {
4424 unimplemented!()
4425 }
4426
4427 fn is_private(&self) -> bool {
4428 false
4429 }
4430}
4431
4432pub(crate) fn contiguous_ranges(
4433 values: impl Iterator<Item = u32>,
4434 max_len: usize,
4435) -> impl Iterator<Item = Range<u32>> {
4436 let mut values = values;
4437 let mut current_range: Option<Range<u32>> = None;
4438 std::iter::from_fn(move || loop {
4439 if let Some(value) = values.next() {
4440 if let Some(range) = &mut current_range {
4441 if value == range.end && range.len() < max_len {
4442 range.end += 1;
4443 continue;
4444 }
4445 }
4446
4447 let prev_range = current_range.clone();
4448 current_range = Some(value..(value + 1));
4449 if prev_range.is_some() {
4450 return prev_range;
4451 }
4452 } else {
4453 return current_range.take();
4454 }
4455 })
4456}
4457
4458#[derive(Default, Debug)]
4459pub struct CharClassifier {
4460 scope: Option<LanguageScope>,
4461 for_completion: bool,
4462 ignore_punctuation: bool,
4463}
4464
4465impl CharClassifier {
4466 pub fn new(scope: Option<LanguageScope>) -> Self {
4467 Self {
4468 scope,
4469 for_completion: false,
4470 ignore_punctuation: false,
4471 }
4472 }
4473
4474 pub fn for_completion(self, for_completion: bool) -> Self {
4475 Self {
4476 for_completion,
4477 ..self
4478 }
4479 }
4480
4481 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4482 Self {
4483 ignore_punctuation,
4484 ..self
4485 }
4486 }
4487
4488 pub fn is_whitespace(&self, c: char) -> bool {
4489 self.kind(c) == CharKind::Whitespace
4490 }
4491
4492 pub fn is_word(&self, c: char) -> bool {
4493 self.kind(c) == CharKind::Word
4494 }
4495
4496 pub fn is_punctuation(&self, c: char) -> bool {
4497 self.kind(c) == CharKind::Punctuation
4498 }
4499
4500 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4501 if c.is_whitespace() {
4502 return CharKind::Whitespace;
4503 } else if c.is_alphanumeric() || c == '_' {
4504 return CharKind::Word;
4505 }
4506
4507 if let Some(scope) = &self.scope {
4508 if let Some(characters) = scope.word_characters() {
4509 if characters.contains(&c) {
4510 if c == '-' && !self.for_completion && !ignore_punctuation {
4511 return CharKind::Punctuation;
4512 }
4513 return CharKind::Word;
4514 }
4515 }
4516 }
4517
4518 if ignore_punctuation {
4519 CharKind::Word
4520 } else {
4521 CharKind::Punctuation
4522 }
4523 }
4524
4525 pub fn kind(&self, c: char) -> CharKind {
4526 self.kind_with(c, self.ignore_punctuation)
4527 }
4528}
4529
4530/// Find all of the ranges of whitespace that occur at the ends of lines
4531/// in the given rope.
4532///
4533/// This could also be done with a regex search, but this implementation
4534/// avoids copying text.
4535pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4536 let mut ranges = Vec::new();
4537
4538 let mut offset = 0;
4539 let mut prev_chunk_trailing_whitespace_range = 0..0;
4540 for chunk in rope.chunks() {
4541 let mut prev_line_trailing_whitespace_range = 0..0;
4542 for (i, line) in chunk.split('\n').enumerate() {
4543 let line_end_offset = offset + line.len();
4544 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4545 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4546
4547 if i == 0 && trimmed_line_len == 0 {
4548 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4549 }
4550 if !prev_line_trailing_whitespace_range.is_empty() {
4551 ranges.push(prev_line_trailing_whitespace_range);
4552 }
4553
4554 offset = line_end_offset + 1;
4555 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4556 }
4557
4558 offset -= 1;
4559 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4560 }
4561
4562 if !prev_chunk_trailing_whitespace_range.is_empty() {
4563 ranges.push(prev_chunk_trailing_whitespace_range);
4564 }
4565
4566 ranges
4567}