1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
14 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 task_context::RunnableRange,
17 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
18 TreeSitterOptions,
19};
20use anyhow::{anyhow, Context as _, Result};
21use async_watch as watch;
22use clock::Lamport;
23pub use clock::ReplicaId;
24use collections::HashMap;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 AnyElement, App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, Pixels, Task,
29 TaskLabel, Window,
30};
31use lsp::LanguageServerId;
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use similar::{ChangeTag, TextDiff};
38use smallvec::SmallVec;
39use smol::future::yield_now;
40use std::{
41 any::Any,
42 borrow::Cow,
43 cell::Cell,
44 cmp::{self, Ordering, Reverse},
45 collections::{BTreeMap, BTreeSet},
46 ffi::OsStr,
47 fmt,
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, DerefMut, Range},
53 path::{Path, PathBuf},
54 str,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection, SelectionGoal,
65 Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint, ToPointUtf16,
66 Transaction, TransactionId, Unclipped,
67};
68use theme::SyntaxTheme;
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{debug_panic, maybe, RangeExt};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 pending_autoindent: Option<Task<()>>,
113 sync_parse_timeout: Duration,
114 syntax_map: Mutex<SyntaxMap>,
115 parsing_in_background: bool,
116 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
117 non_text_state_update_count: usize,
118 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
119 remote_selections: TreeMap<ReplicaId, SelectionSet>,
120 diagnostics_timestamp: clock::Lamport,
121 completion_triggers: BTreeSet<String>,
122 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
123 completion_triggers_timestamp: clock::Lamport,
124 deferred_ops: OperationQueue<Operation>,
125 capability: Capability,
126 has_conflict: bool,
127 /// Memoize calls to has_changes_since(saved_version).
128 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
129 has_unsaved_edits: Cell<(clock::Global, bool)>,
130 _subscriptions: Vec<gpui::Subscription>,
131}
132
133#[derive(Copy, Clone, Debug, PartialEq, Eq)]
134pub enum ParseStatus {
135 Idle,
136 Parsing,
137}
138
139struct BufferBranchState {
140 base_buffer: Entity<Buffer>,
141 merged_operations: Vec<Lamport>,
142}
143
144/// An immutable, cheaply cloneable representation of a fixed
145/// state of a buffer.
146pub struct BufferSnapshot {
147 pub text: text::BufferSnapshot,
148 pub(crate) syntax: SyntaxSnapshot,
149 file: Option<Arc<dyn File>>,
150 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
151 remote_selections: TreeMap<ReplicaId, SelectionSet>,
152 language: Option<Arc<Language>>,
153 non_text_state_update_count: usize,
154}
155
156/// The kind and amount of indentation in a particular line. For now,
157/// assumes that indentation is all the same character.
158#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
159pub struct IndentSize {
160 /// The number of bytes that comprise the indentation.
161 pub len: u32,
162 /// The kind of whitespace used for indentation.
163 pub kind: IndentKind,
164}
165
166/// A whitespace character that's used for indentation.
167#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
168pub enum IndentKind {
169 /// An ASCII space character.
170 #[default]
171 Space,
172 /// An ASCII tab character.
173 Tab,
174}
175
176/// The shape of a selection cursor.
177#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
178#[serde(rename_all = "snake_case")]
179pub enum CursorShape {
180 /// A vertical bar
181 #[default]
182 Bar,
183 /// A block that surrounds the following character
184 Block,
185 /// An underline that runs along the following character
186 Underline,
187 /// A box drawn around the following character
188 Hollow,
189}
190
191#[derive(Clone, Debug)]
192struct SelectionSet {
193 line_mode: bool,
194 cursor_shape: CursorShape,
195 selections: Arc<[Selection<Anchor>]>,
196 lamport_timestamp: clock::Lamport,
197}
198
199/// A diagnostic associated with a certain range of a buffer.
200#[derive(Clone, Debug, PartialEq, Eq)]
201pub struct Diagnostic {
202 /// The name of the service that produced this diagnostic.
203 pub source: Option<String>,
204 /// A machine-readable code that identifies this diagnostic.
205 pub code: Option<String>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// An id that identifies the group to which this diagnostic belongs.
211 ///
212 /// When a language server produces a diagnostic with
213 /// one or more associated diagnostics, those diagnostics are all
214 /// assigned a single group ID.
215 pub group_id: usize,
216 /// Whether this diagnostic is the primary diagnostic for its group.
217 ///
218 /// In a given group, the primary diagnostic is the top-level diagnostic
219 /// returned by the language server. The non-primary diagnostics are the
220 /// associated diagnostics.
221 pub is_primary: bool,
222 /// Whether this diagnostic is considered to originate from an analysis of
223 /// files on disk, as opposed to any unsaved buffer contents. This is a
224 /// property of a given diagnostic source, and is configured for a given
225 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
226 /// for the language server.
227 pub is_disk_based: bool,
228 /// Whether this diagnostic marks unnecessary code.
229 pub is_unnecessary: bool,
230 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
231 pub data: Option<Value>,
232}
233
234/// TODO - move this into the `project` crate and make it private.
235pub async fn prepare_completion_documentation(
236 documentation: &lsp::Documentation,
237 language_registry: &Arc<LanguageRegistry>,
238 language: Option<Arc<Language>>,
239) -> Documentation {
240 match documentation {
241 lsp::Documentation::String(text) => {
242 if text.lines().count() <= 1 {
243 Documentation::SingleLine(text.clone())
244 } else {
245 Documentation::MultiLinePlainText(text.clone())
246 }
247 }
248
249 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
250 lsp::MarkupKind::PlainText => {
251 if value.lines().count() <= 1 {
252 Documentation::SingleLine(value.clone())
253 } else {
254 Documentation::MultiLinePlainText(value.clone())
255 }
256 }
257
258 lsp::MarkupKind::Markdown => {
259 let parsed = parse_markdown(value, Some(language_registry), language).await;
260 Documentation::MultiLineMarkdown(parsed)
261 }
262 },
263 }
264}
265
266/// Documentation associated with a [`Completion`].
267#[derive(Clone, Debug)]
268pub enum Documentation {
269 /// There is no documentation for this completion.
270 Undocumented,
271 /// A single line of documentation.
272 SingleLine(String),
273 /// Multiple lines of plain text documentation.
274 MultiLinePlainText(String),
275 /// Markdown documentation.
276 MultiLineMarkdown(ParsedMarkdown),
277}
278
279/// An operation used to synchronize this buffer with its other replicas.
280#[derive(Clone, Debug, PartialEq)]
281pub enum Operation {
282 /// A text operation.
283 Buffer(text::Operation),
284
285 /// An update to the buffer's diagnostics.
286 UpdateDiagnostics {
287 /// The id of the language server that produced the new diagnostics.
288 server_id: LanguageServerId,
289 /// The diagnostics.
290 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
291 /// The buffer's lamport timestamp.
292 lamport_timestamp: clock::Lamport,
293 },
294
295 /// An update to the most recent selections in this buffer.
296 UpdateSelections {
297 /// The selections.
298 selections: Arc<[Selection<Anchor>]>,
299 /// The buffer's lamport timestamp.
300 lamport_timestamp: clock::Lamport,
301 /// Whether the selections are in 'line mode'.
302 line_mode: bool,
303 /// The [`CursorShape`] associated with these selections.
304 cursor_shape: CursorShape,
305 },
306
307 /// An update to the characters that should trigger autocompletion
308 /// for this buffer.
309 UpdateCompletionTriggers {
310 /// The characters that trigger autocompletion.
311 triggers: Vec<String>,
312 /// The buffer's lamport timestamp.
313 lamport_timestamp: clock::Lamport,
314 /// The language server ID.
315 server_id: LanguageServerId,
316 },
317}
318
319/// An event that occurs in a buffer.
320#[derive(Clone, Debug, PartialEq)]
321pub enum BufferEvent {
322 /// The buffer was changed in a way that must be
323 /// propagated to its other replicas.
324 Operation {
325 operation: Operation,
326 is_local: bool,
327 },
328 /// The buffer was edited.
329 Edited,
330 /// The buffer's `dirty` bit changed.
331 DirtyChanged,
332 /// The buffer was saved.
333 Saved,
334 /// The buffer's file was changed on disk.
335 FileHandleChanged,
336 /// The buffer was reloaded.
337 Reloaded,
338 /// The buffer is in need of a reload
339 ReloadNeeded,
340 /// The buffer's language was changed.
341 LanguageChanged,
342 /// The buffer's syntax trees were updated.
343 Reparsed,
344 /// The buffer's diagnostics were updated.
345 DiagnosticsUpdated,
346 /// The buffer gained or lost editing capabilities.
347 CapabilityChanged,
348 /// The buffer was explicitly requested to close.
349 Closed,
350 /// The buffer was discarded when closing.
351 Discarded,
352}
353
354/// The file associated with a buffer.
355pub trait File: Send + Sync {
356 /// Returns the [`LocalFile`] associated with this file, if the
357 /// file is local.
358 fn as_local(&self) -> Option<&dyn LocalFile>;
359
360 /// Returns whether this file is local.
361 fn is_local(&self) -> bool {
362 self.as_local().is_some()
363 }
364
365 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
366 /// only available in some states, such as modification time.
367 fn disk_state(&self) -> DiskState;
368
369 /// Returns the path of this file relative to the worktree's root directory.
370 fn path(&self) -> &Arc<Path>;
371
372 /// Returns the path of this file relative to the worktree's parent directory (this means it
373 /// includes the name of the worktree's root folder).
374 fn full_path(&self, cx: &App) -> PathBuf;
375
376 /// Returns the last component of this handle's absolute path. If this handle refers to the root
377 /// of its worktree, then this method will return the name of the worktree itself.
378 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
379
380 /// Returns the id of the worktree to which this file belongs.
381 ///
382 /// This is needed for looking up project-specific settings.
383 fn worktree_id(&self, cx: &App) -> WorktreeId;
384
385 /// Converts this file into an [`Any`] trait object.
386 fn as_any(&self) -> &dyn Any;
387
388 /// Converts this file into a protobuf message.
389 fn to_proto(&self, cx: &App) -> rpc::proto::File;
390
391 /// Return whether Zed considers this to be a private file.
392 fn is_private(&self) -> bool;
393}
394
395/// The file's storage status - whether it's stored (`Present`), and if so when it was last
396/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
397/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
398/// indicator for new files.
399#[derive(Copy, Clone, Debug, PartialEq)]
400pub enum DiskState {
401 /// File created in Zed that has not been saved.
402 New,
403 /// File present on the filesystem.
404 Present { mtime: MTime },
405 /// Deleted file that was previously present.
406 Deleted,
407}
408
409impl DiskState {
410 /// Returns the file's last known modification time on disk.
411 pub fn mtime(self) -> Option<MTime> {
412 match self {
413 DiskState::New => None,
414 DiskState::Present { mtime } => Some(mtime),
415 DiskState::Deleted => None,
416 }
417 }
418}
419
420/// The file associated with a buffer, in the case where the file is on the local disk.
421pub trait LocalFile: File {
422 /// Returns the absolute path of this file
423 fn abs_path(&self, cx: &App) -> PathBuf;
424
425 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
426 fn load(&self, cx: &App) -> Task<Result<String>>;
427
428 /// Loads the file's contents from disk.
429 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
430}
431
432/// The auto-indent behavior associated with an editing operation.
433/// For some editing operations, each affected line of text has its
434/// indentation recomputed. For other operations, the entire block
435/// of edited text is adjusted uniformly.
436#[derive(Clone, Debug)]
437pub enum AutoindentMode {
438 /// Indent each line of inserted text.
439 EachLine,
440 /// Apply the same indentation adjustment to all of the lines
441 /// in a given insertion.
442 Block {
443 /// The original indentation level of the first line of each
444 /// insertion, if it has been copied.
445 original_indent_columns: Vec<u32>,
446 },
447}
448
449#[derive(Clone)]
450struct AutoindentRequest {
451 before_edit: BufferSnapshot,
452 entries: Vec<AutoindentRequestEntry>,
453 is_block_mode: bool,
454 ignore_empty_lines: bool,
455}
456
457#[derive(Debug, Clone)]
458struct AutoindentRequestEntry {
459 /// A range of the buffer whose indentation should be adjusted.
460 range: Range<Anchor>,
461 /// Whether or not these lines should be considered brand new, for the
462 /// purpose of auto-indent. When text is not new, its indentation will
463 /// only be adjusted if the suggested indentation level has *changed*
464 /// since the edit was made.
465 first_line_is_new: bool,
466 indent_size: IndentSize,
467 original_indent_column: Option<u32>,
468}
469
470#[derive(Debug)]
471struct IndentSuggestion {
472 basis_row: u32,
473 delta: Ordering,
474 within_error: bool,
475}
476
477struct BufferChunkHighlights<'a> {
478 captures: SyntaxMapCaptures<'a>,
479 next_capture: Option<SyntaxMapCapture<'a>>,
480 stack: Vec<(usize, HighlightId)>,
481 highlight_maps: Vec<HighlightMap>,
482}
483
484/// An iterator that yields chunks of a buffer's text, along with their
485/// syntax highlights and diagnostic status.
486pub struct BufferChunks<'a> {
487 buffer_snapshot: Option<&'a BufferSnapshot>,
488 range: Range<usize>,
489 chunks: text::Chunks<'a>,
490 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
491 error_depth: usize,
492 warning_depth: usize,
493 information_depth: usize,
494 hint_depth: usize,
495 unnecessary_depth: usize,
496 highlights: Option<BufferChunkHighlights<'a>>,
497}
498
499/// A chunk of a buffer's text, along with its syntax highlight and
500/// diagnostic status.
501#[derive(Clone, Debug, Default)]
502pub struct Chunk<'a> {
503 /// The text of the chunk.
504 pub text: &'a str,
505 /// The syntax highlighting style of the chunk.
506 pub syntax_highlight_id: Option<HighlightId>,
507 /// The highlight style that has been applied to this chunk in
508 /// the editor.
509 pub highlight_style: Option<HighlightStyle>,
510 /// The severity of diagnostic associated with this chunk, if any.
511 pub diagnostic_severity: Option<DiagnosticSeverity>,
512 /// Whether this chunk of text is marked as unnecessary.
513 pub is_unnecessary: bool,
514 /// Whether this chunk of text was originally a tab character.
515 pub is_tab: bool,
516 /// An optional recipe for how the chunk should be presented.
517 pub renderer: Option<ChunkRenderer>,
518}
519
520/// A recipe for how the chunk should be presented.
521#[derive(Clone)]
522pub struct ChunkRenderer {
523 /// creates a custom element to represent this chunk.
524 pub render: Arc<dyn Send + Sync + Fn(&mut ChunkRendererContext) -> AnyElement>,
525 /// If true, the element is constrained to the shaped width of the text.
526 pub constrain_width: bool,
527}
528
529pub struct ChunkRendererContext<'a, 'b> {
530 pub window: &'a mut Window,
531 pub context: &'b mut App,
532 pub max_width: Pixels,
533}
534
535impl fmt::Debug for ChunkRenderer {
536 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
537 f.debug_struct("ChunkRenderer")
538 .field("constrain_width", &self.constrain_width)
539 .finish()
540 }
541}
542
543impl<'a, 'b> Deref for ChunkRendererContext<'a, 'b> {
544 type Target = App;
545
546 fn deref(&self) -> &Self::Target {
547 self.context
548 }
549}
550
551impl<'a, 'b> DerefMut for ChunkRendererContext<'a, 'b> {
552 fn deref_mut(&mut self) -> &mut Self::Target {
553 self.context
554 }
555}
556
557/// A set of edits to a given version of a buffer, computed asynchronously.
558#[derive(Debug)]
559pub struct Diff {
560 pub(crate) base_version: clock::Global,
561 line_ending: LineEnding,
562 pub edits: Vec<(Range<usize>, Arc<str>)>,
563}
564
565#[derive(Clone, Copy)]
566pub(crate) struct DiagnosticEndpoint {
567 offset: usize,
568 is_start: bool,
569 severity: DiagnosticSeverity,
570 is_unnecessary: bool,
571}
572
573/// A class of characters, used for characterizing a run of text.
574#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
575pub enum CharKind {
576 /// Whitespace.
577 Whitespace,
578 /// Punctuation.
579 Punctuation,
580 /// Word.
581 Word,
582}
583
584/// A runnable is a set of data about a region that could be resolved into a task
585pub struct Runnable {
586 pub tags: SmallVec<[RunnableTag; 1]>,
587 pub language: Arc<Language>,
588 pub buffer: BufferId,
589}
590
591impl Buffer {
592 /// Create a new buffer with the given base text.
593 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
594 Self::build(
595 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
596 None,
597 Capability::ReadWrite,
598 )
599 }
600
601 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
602 pub fn local_normalized(
603 base_text_normalized: Rope,
604 line_ending: LineEnding,
605 cx: &Context<Self>,
606 ) -> Self {
607 Self::build(
608 TextBuffer::new_normalized(
609 0,
610 cx.entity_id().as_non_zero_u64().into(),
611 line_ending,
612 base_text_normalized,
613 ),
614 None,
615 Capability::ReadWrite,
616 )
617 }
618
619 /// Create a new buffer that is a replica of a remote buffer.
620 pub fn remote(
621 remote_id: BufferId,
622 replica_id: ReplicaId,
623 capability: Capability,
624 base_text: impl Into<String>,
625 ) -> Self {
626 Self::build(
627 TextBuffer::new(replica_id, remote_id, base_text.into()),
628 None,
629 capability,
630 )
631 }
632
633 /// Create a new buffer that is a replica of a remote buffer, populating its
634 /// state from the given protobuf message.
635 pub fn from_proto(
636 replica_id: ReplicaId,
637 capability: Capability,
638 message: proto::BufferState,
639 file: Option<Arc<dyn File>>,
640 ) -> Result<Self> {
641 let buffer_id = BufferId::new(message.id)
642 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
643 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
644 let mut this = Self::build(buffer, file, capability);
645 this.text.set_line_ending(proto::deserialize_line_ending(
646 rpc::proto::LineEnding::from_i32(message.line_ending)
647 .ok_or_else(|| anyhow!("missing line_ending"))?,
648 ));
649 this.saved_version = proto::deserialize_version(&message.saved_version);
650 this.saved_mtime = message.saved_mtime.map(|time| time.into());
651 Ok(this)
652 }
653
654 /// Serialize the buffer's state to a protobuf message.
655 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
656 proto::BufferState {
657 id: self.remote_id().into(),
658 file: self.file.as_ref().map(|f| f.to_proto(cx)),
659 base_text: self.base_text().to_string(),
660 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
661 saved_version: proto::serialize_version(&self.saved_version),
662 saved_mtime: self.saved_mtime.map(|time| time.into()),
663 }
664 }
665
666 /// Serialize as protobufs all of the changes to the buffer since the given version.
667 pub fn serialize_ops(
668 &self,
669 since: Option<clock::Global>,
670 cx: &App,
671 ) -> Task<Vec<proto::Operation>> {
672 let mut operations = Vec::new();
673 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
674
675 operations.extend(self.remote_selections.iter().map(|(_, set)| {
676 proto::serialize_operation(&Operation::UpdateSelections {
677 selections: set.selections.clone(),
678 lamport_timestamp: set.lamport_timestamp,
679 line_mode: set.line_mode,
680 cursor_shape: set.cursor_shape,
681 })
682 }));
683
684 for (server_id, diagnostics) in &self.diagnostics {
685 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
686 lamport_timestamp: self.diagnostics_timestamp,
687 server_id: *server_id,
688 diagnostics: diagnostics.iter().cloned().collect(),
689 }));
690 }
691
692 for (server_id, completions) in &self.completion_triggers_per_language_server {
693 operations.push(proto::serialize_operation(
694 &Operation::UpdateCompletionTriggers {
695 triggers: completions.iter().cloned().collect(),
696 lamport_timestamp: self.completion_triggers_timestamp,
697 server_id: *server_id,
698 },
699 ));
700 }
701
702 let text_operations = self.text.operations().clone();
703 cx.background_executor().spawn(async move {
704 let since = since.unwrap_or_default();
705 operations.extend(
706 text_operations
707 .iter()
708 .filter(|(_, op)| !since.observed(op.timestamp()))
709 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
710 );
711 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
712 operations
713 })
714 }
715
716 /// Assign a language to the buffer, returning the buffer.
717 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
718 self.set_language(Some(language), cx);
719 self
720 }
721
722 /// Returns the [`Capability`] of this buffer.
723 pub fn capability(&self) -> Capability {
724 self.capability
725 }
726
727 /// Whether this buffer can only be read.
728 pub fn read_only(&self) -> bool {
729 self.capability == Capability::ReadOnly
730 }
731
732 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
733 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
734 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
735 let snapshot = buffer.snapshot();
736 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
737 Self {
738 saved_mtime,
739 saved_version: buffer.version(),
740 preview_version: buffer.version(),
741 reload_task: None,
742 transaction_depth: 0,
743 was_dirty_before_starting_transaction: None,
744 has_unsaved_edits: Cell::new((buffer.version(), false)),
745 text: buffer,
746 branch_state: None,
747 file,
748 capability,
749 syntax_map,
750 parsing_in_background: false,
751 non_text_state_update_count: 0,
752 sync_parse_timeout: Duration::from_millis(1),
753 parse_status: async_watch::channel(ParseStatus::Idle),
754 autoindent_requests: Default::default(),
755 pending_autoindent: Default::default(),
756 language: None,
757 remote_selections: Default::default(),
758 diagnostics: Default::default(),
759 diagnostics_timestamp: Default::default(),
760 completion_triggers: Default::default(),
761 completion_triggers_per_language_server: Default::default(),
762 completion_triggers_timestamp: Default::default(),
763 deferred_ops: OperationQueue::new(),
764 has_conflict: false,
765 _subscriptions: Vec::new(),
766 }
767 }
768
769 pub fn build_snapshot(
770 text: Rope,
771 language: Option<Arc<Language>>,
772 language_registry: Option<Arc<LanguageRegistry>>,
773 cx: &mut App,
774 ) -> impl Future<Output = BufferSnapshot> {
775 let entity_id = cx.reserve_entity::<Self>().entity_id();
776 let buffer_id = entity_id.as_non_zero_u64().into();
777 async move {
778 let text =
779 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
780 let mut syntax = SyntaxMap::new(&text).snapshot();
781 if let Some(language) = language.clone() {
782 let text = text.clone();
783 let language = language.clone();
784 let language_registry = language_registry.clone();
785 syntax.reparse(&text, language_registry, language);
786 }
787 BufferSnapshot {
788 text,
789 syntax,
790 file: None,
791 diagnostics: Default::default(),
792 remote_selections: Default::default(),
793 language,
794 non_text_state_update_count: 0,
795 }
796 }
797 }
798
799 /// Retrieve a snapshot of the buffer's current state. This is computationally
800 /// cheap, and allows reading from the buffer on a background thread.
801 pub fn snapshot(&self) -> BufferSnapshot {
802 let text = self.text.snapshot();
803 let mut syntax_map = self.syntax_map.lock();
804 syntax_map.interpolate(&text);
805 let syntax = syntax_map.snapshot();
806
807 BufferSnapshot {
808 text,
809 syntax,
810 file: self.file.clone(),
811 remote_selections: self.remote_selections.clone(),
812 diagnostics: self.diagnostics.clone(),
813 language: self.language.clone(),
814 non_text_state_update_count: self.non_text_state_update_count,
815 }
816 }
817
818 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
819 let this = cx.entity();
820 cx.new(|cx| {
821 let mut branch = Self {
822 branch_state: Some(BufferBranchState {
823 base_buffer: this.clone(),
824 merged_operations: Default::default(),
825 }),
826 language: self.language.clone(),
827 has_conflict: self.has_conflict,
828 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
829 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
830 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
831 };
832 if let Some(language_registry) = self.language_registry() {
833 branch.set_language_registry(language_registry);
834 }
835
836 // Reparse the branch buffer so that we get syntax highlighting immediately.
837 branch.reparse(cx);
838
839 branch
840 })
841 }
842
843 /// Applies all of the changes in this buffer that intersect any of the
844 /// given `ranges` to its base buffer.
845 ///
846 /// If `ranges` is empty, then all changes will be applied. This buffer must
847 /// be a branch buffer to call this method.
848 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
849 let Some(base_buffer) = self.base_buffer() else {
850 debug_panic!("not a branch buffer");
851 return;
852 };
853
854 let mut ranges = if ranges.is_empty() {
855 &[0..usize::MAX]
856 } else {
857 ranges.as_slice()
858 }
859 .into_iter()
860 .peekable();
861
862 let mut edits = Vec::new();
863 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
864 let mut is_included = false;
865 while let Some(range) = ranges.peek() {
866 if range.end < edit.new.start {
867 ranges.next().unwrap();
868 } else {
869 if range.start <= edit.new.end {
870 is_included = true;
871 }
872 break;
873 }
874 }
875
876 if is_included {
877 edits.push((
878 edit.old.clone(),
879 self.text_for_range(edit.new.clone()).collect::<String>(),
880 ));
881 }
882 }
883
884 let operation = base_buffer.update(cx, |base_buffer, cx| {
885 // cx.emit(BufferEvent::DiffBaseChanged);
886 base_buffer.edit(edits, None, cx)
887 });
888
889 if let Some(operation) = operation {
890 if let Some(BufferBranchState {
891 merged_operations, ..
892 }) = &mut self.branch_state
893 {
894 merged_operations.push(operation);
895 }
896 }
897 }
898
899 fn on_base_buffer_event(
900 &mut self,
901 _: Entity<Buffer>,
902 event: &BufferEvent,
903 cx: &mut Context<Self>,
904 ) {
905 let BufferEvent::Operation { operation, .. } = event else {
906 return;
907 };
908 let Some(BufferBranchState {
909 merged_operations, ..
910 }) = &mut self.branch_state
911 else {
912 return;
913 };
914
915 let mut operation_to_undo = None;
916 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
917 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
918 merged_operations.remove(ix);
919 operation_to_undo = Some(operation.timestamp);
920 }
921 }
922
923 self.apply_ops([operation.clone()], cx);
924
925 if let Some(timestamp) = operation_to_undo {
926 let counts = [(timestamp, u32::MAX)].into_iter().collect();
927 self.undo_operations(counts, cx);
928 }
929 }
930
931 #[cfg(test)]
932 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
933 &self.text
934 }
935
936 /// Retrieve a snapshot of the buffer's raw text, without any
937 /// language-related state like the syntax tree or diagnostics.
938 pub fn text_snapshot(&self) -> text::BufferSnapshot {
939 self.text.snapshot()
940 }
941
942 /// The file associated with the buffer, if any.
943 pub fn file(&self) -> Option<&Arc<dyn File>> {
944 self.file.as_ref()
945 }
946
947 /// The version of the buffer that was last saved or reloaded from disk.
948 pub fn saved_version(&self) -> &clock::Global {
949 &self.saved_version
950 }
951
952 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
953 pub fn saved_mtime(&self) -> Option<MTime> {
954 self.saved_mtime
955 }
956
957 /// Assign a language to the buffer.
958 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
959 self.non_text_state_update_count += 1;
960 self.syntax_map.lock().clear(&self.text);
961 self.language = language;
962 self.reparse(cx);
963 cx.emit(BufferEvent::LanguageChanged);
964 }
965
966 /// Assign a language registry to the buffer. This allows the buffer to retrieve
967 /// other languages if parts of the buffer are written in different languages.
968 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
969 self.syntax_map
970 .lock()
971 .set_language_registry(language_registry);
972 }
973
974 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
975 self.syntax_map.lock().language_registry()
976 }
977
978 /// Assign the buffer a new [`Capability`].
979 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
980 self.capability = capability;
981 cx.emit(BufferEvent::CapabilityChanged)
982 }
983
984 /// This method is called to signal that the buffer has been saved.
985 pub fn did_save(
986 &mut self,
987 version: clock::Global,
988 mtime: Option<MTime>,
989 cx: &mut Context<Self>,
990 ) {
991 self.saved_version = version;
992 self.has_unsaved_edits
993 .set((self.saved_version().clone(), false));
994 self.has_conflict = false;
995 self.saved_mtime = mtime;
996 cx.emit(BufferEvent::Saved);
997 cx.notify();
998 }
999
1000 /// This method is called to signal that the buffer has been discarded.
1001 pub fn discarded(&self, cx: &mut Context<Self>) {
1002 cx.emit(BufferEvent::Discarded);
1003 cx.notify();
1004 }
1005
1006 /// Reloads the contents of the buffer from disk.
1007 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1008 let (tx, rx) = futures::channel::oneshot::channel();
1009 let prev_version = self.text.version();
1010 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
1011 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
1012 let file = this.file.as_ref()?.as_local()?;
1013 Some((file.disk_state().mtime(), file.load(cx)))
1014 })?
1015 else {
1016 return Ok(());
1017 };
1018
1019 let new_text = new_text.await?;
1020 let diff = this
1021 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
1022 .await;
1023 this.update(&mut cx, |this, cx| {
1024 if this.version() == diff.base_version {
1025 this.finalize_last_transaction();
1026 this.apply_diff(diff, cx);
1027 tx.send(this.finalize_last_transaction().cloned()).ok();
1028 this.has_conflict = false;
1029 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1030 } else {
1031 if !diff.edits.is_empty()
1032 || this
1033 .edits_since::<usize>(&diff.base_version)
1034 .next()
1035 .is_some()
1036 {
1037 this.has_conflict = true;
1038 }
1039
1040 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1041 }
1042
1043 this.reload_task.take();
1044 })
1045 }));
1046 rx
1047 }
1048
1049 /// This method is called to signal that the buffer has been reloaded.
1050 pub fn did_reload(
1051 &mut self,
1052 version: clock::Global,
1053 line_ending: LineEnding,
1054 mtime: Option<MTime>,
1055 cx: &mut Context<Self>,
1056 ) {
1057 self.saved_version = version;
1058 self.has_unsaved_edits
1059 .set((self.saved_version.clone(), false));
1060 self.text.set_line_ending(line_ending);
1061 self.saved_mtime = mtime;
1062 cx.emit(BufferEvent::Reloaded);
1063 cx.notify();
1064 }
1065
1066 /// Updates the [`File`] backing this buffer. This should be called when
1067 /// the file has changed or has been deleted.
1068 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1069 let was_dirty = self.is_dirty();
1070 let mut file_changed = false;
1071
1072 if let Some(old_file) = self.file.as_ref() {
1073 if new_file.path() != old_file.path() {
1074 file_changed = true;
1075 }
1076
1077 let old_state = old_file.disk_state();
1078 let new_state = new_file.disk_state();
1079 if old_state != new_state {
1080 file_changed = true;
1081 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1082 cx.emit(BufferEvent::ReloadNeeded)
1083 }
1084 }
1085 } else {
1086 file_changed = true;
1087 };
1088
1089 self.file = Some(new_file);
1090 if file_changed {
1091 self.non_text_state_update_count += 1;
1092 if was_dirty != self.is_dirty() {
1093 cx.emit(BufferEvent::DirtyChanged);
1094 }
1095 cx.emit(BufferEvent::FileHandleChanged);
1096 cx.notify();
1097 }
1098 }
1099
1100 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1101 Some(self.branch_state.as_ref()?.base_buffer.clone())
1102 }
1103
1104 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1105 pub fn language(&self) -> Option<&Arc<Language>> {
1106 self.language.as_ref()
1107 }
1108
1109 /// Returns the [`Language`] at the given location.
1110 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1111 let offset = position.to_offset(self);
1112 self.syntax_map
1113 .lock()
1114 .layers_for_range(offset..offset, &self.text, false)
1115 .last()
1116 .map(|info| info.language.clone())
1117 .or_else(|| self.language.clone())
1118 }
1119
1120 /// An integer version number that accounts for all updates besides
1121 /// the buffer's text itself (which is versioned via a version vector).
1122 pub fn non_text_state_update_count(&self) -> usize {
1123 self.non_text_state_update_count
1124 }
1125
1126 /// Whether the buffer is being parsed in the background.
1127 #[cfg(any(test, feature = "test-support"))]
1128 pub fn is_parsing(&self) -> bool {
1129 self.parsing_in_background
1130 }
1131
1132 /// Indicates whether the buffer contains any regions that may be
1133 /// written in a language that hasn't been loaded yet.
1134 pub fn contains_unknown_injections(&self) -> bool {
1135 self.syntax_map.lock().contains_unknown_injections()
1136 }
1137
1138 #[cfg(test)]
1139 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1140 self.sync_parse_timeout = timeout;
1141 }
1142
1143 /// Called after an edit to synchronize the buffer's main parse tree with
1144 /// the buffer's new underlying state.
1145 ///
1146 /// Locks the syntax map and interpolates the edits since the last reparse
1147 /// into the foreground syntax tree.
1148 ///
1149 /// Then takes a stable snapshot of the syntax map before unlocking it.
1150 /// The snapshot with the interpolated edits is sent to a background thread,
1151 /// where we ask Tree-sitter to perform an incremental parse.
1152 ///
1153 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1154 /// waiting on the parse to complete. As soon as it completes, we proceed
1155 /// synchronously, unless a 1ms timeout elapses.
1156 ///
1157 /// If we time out waiting on the parse, we spawn a second task waiting
1158 /// until the parse does complete and return with the interpolated tree still
1159 /// in the foreground. When the background parse completes, call back into
1160 /// the main thread and assign the foreground parse state.
1161 ///
1162 /// If the buffer or grammar changed since the start of the background parse,
1163 /// initiate an additional reparse recursively. To avoid concurrent parses
1164 /// for the same buffer, we only initiate a new parse if we are not already
1165 /// parsing in the background.
1166 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1167 if self.parsing_in_background {
1168 return;
1169 }
1170 let language = if let Some(language) = self.language.clone() {
1171 language
1172 } else {
1173 return;
1174 };
1175
1176 let text = self.text_snapshot();
1177 let parsed_version = self.version();
1178
1179 let mut syntax_map = self.syntax_map.lock();
1180 syntax_map.interpolate(&text);
1181 let language_registry = syntax_map.language_registry();
1182 let mut syntax_snapshot = syntax_map.snapshot();
1183 drop(syntax_map);
1184
1185 let parse_task = cx.background_executor().spawn({
1186 let language = language.clone();
1187 let language_registry = language_registry.clone();
1188 async move {
1189 syntax_snapshot.reparse(&text, language_registry, language);
1190 syntax_snapshot
1191 }
1192 });
1193
1194 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1195 match cx
1196 .background_executor()
1197 .block_with_timeout(self.sync_parse_timeout, parse_task)
1198 {
1199 Ok(new_syntax_snapshot) => {
1200 self.did_finish_parsing(new_syntax_snapshot, cx);
1201 }
1202 Err(parse_task) => {
1203 self.parsing_in_background = true;
1204 cx.spawn(move |this, mut cx| async move {
1205 let new_syntax_map = parse_task.await;
1206 this.update(&mut cx, move |this, cx| {
1207 let grammar_changed =
1208 this.language.as_ref().map_or(true, |current_language| {
1209 !Arc::ptr_eq(&language, current_language)
1210 });
1211 let language_registry_changed = new_syntax_map
1212 .contains_unknown_injections()
1213 && language_registry.map_or(false, |registry| {
1214 registry.version() != new_syntax_map.language_registry_version()
1215 });
1216 let parse_again = language_registry_changed
1217 || grammar_changed
1218 || this.version.changed_since(&parsed_version);
1219 this.did_finish_parsing(new_syntax_map, cx);
1220 this.parsing_in_background = false;
1221 if parse_again {
1222 this.reparse(cx);
1223 }
1224 })
1225 .ok();
1226 })
1227 .detach();
1228 }
1229 }
1230 }
1231
1232 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1233 self.non_text_state_update_count += 1;
1234 self.syntax_map.lock().did_parse(syntax_snapshot);
1235 self.request_autoindent(cx);
1236 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1237 cx.emit(BufferEvent::Reparsed);
1238 cx.notify();
1239 }
1240
1241 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1242 self.parse_status.1.clone()
1243 }
1244
1245 /// Assign to the buffer a set of diagnostics created by a given language server.
1246 pub fn update_diagnostics(
1247 &mut self,
1248 server_id: LanguageServerId,
1249 diagnostics: DiagnosticSet,
1250 cx: &mut Context<Self>,
1251 ) {
1252 let lamport_timestamp = self.text.lamport_clock.tick();
1253 let op = Operation::UpdateDiagnostics {
1254 server_id,
1255 diagnostics: diagnostics.iter().cloned().collect(),
1256 lamport_timestamp,
1257 };
1258 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1259 self.send_operation(op, true, cx);
1260 }
1261
1262 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1263 if let Some(indent_sizes) = self.compute_autoindents() {
1264 let indent_sizes = cx.background_executor().spawn(indent_sizes);
1265 match cx
1266 .background_executor()
1267 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1268 {
1269 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1270 Err(indent_sizes) => {
1271 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1272 let indent_sizes = indent_sizes.await;
1273 this.update(&mut cx, |this, cx| {
1274 this.apply_autoindents(indent_sizes, cx);
1275 })
1276 .ok();
1277 }));
1278 }
1279 }
1280 } else {
1281 self.autoindent_requests.clear();
1282 }
1283 }
1284
1285 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
1286 let max_rows_between_yields = 100;
1287 let snapshot = self.snapshot();
1288 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1289 return None;
1290 }
1291
1292 let autoindent_requests = self.autoindent_requests.clone();
1293 Some(async move {
1294 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1295 for request in autoindent_requests {
1296 // Resolve each edited range to its row in the current buffer and in the
1297 // buffer before this batch of edits.
1298 let mut row_ranges = Vec::new();
1299 let mut old_to_new_rows = BTreeMap::new();
1300 let mut language_indent_sizes_by_new_row = Vec::new();
1301 for entry in &request.entries {
1302 let position = entry.range.start;
1303 let new_row = position.to_point(&snapshot).row;
1304 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1305 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1306
1307 if !entry.first_line_is_new {
1308 let old_row = position.to_point(&request.before_edit).row;
1309 old_to_new_rows.insert(old_row, new_row);
1310 }
1311 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1312 }
1313
1314 // Build a map containing the suggested indentation for each of the edited lines
1315 // with respect to the state of the buffer before these edits. This map is keyed
1316 // by the rows for these lines in the current state of the buffer.
1317 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1318 let old_edited_ranges =
1319 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1320 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1321 let mut language_indent_size = IndentSize::default();
1322 for old_edited_range in old_edited_ranges {
1323 let suggestions = request
1324 .before_edit
1325 .suggest_autoindents(old_edited_range.clone())
1326 .into_iter()
1327 .flatten();
1328 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1329 if let Some(suggestion) = suggestion {
1330 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1331
1332 // Find the indent size based on the language for this row.
1333 while let Some((row, size)) = language_indent_sizes.peek() {
1334 if *row > new_row {
1335 break;
1336 }
1337 language_indent_size = *size;
1338 language_indent_sizes.next();
1339 }
1340
1341 let suggested_indent = old_to_new_rows
1342 .get(&suggestion.basis_row)
1343 .and_then(|from_row| {
1344 Some(old_suggestions.get(from_row).copied()?.0)
1345 })
1346 .unwrap_or_else(|| {
1347 request
1348 .before_edit
1349 .indent_size_for_line(suggestion.basis_row)
1350 })
1351 .with_delta(suggestion.delta, language_indent_size);
1352 old_suggestions
1353 .insert(new_row, (suggested_indent, suggestion.within_error));
1354 }
1355 }
1356 yield_now().await;
1357 }
1358
1359 // Compute new suggestions for each line, but only include them in the result
1360 // if they differ from the old suggestion for that line.
1361 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1362 let mut language_indent_size = IndentSize::default();
1363 for (row_range, original_indent_column) in row_ranges {
1364 let new_edited_row_range = if request.is_block_mode {
1365 row_range.start..row_range.start + 1
1366 } else {
1367 row_range.clone()
1368 };
1369
1370 let suggestions = snapshot
1371 .suggest_autoindents(new_edited_row_range.clone())
1372 .into_iter()
1373 .flatten();
1374 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1375 if let Some(suggestion) = suggestion {
1376 // Find the indent size based on the language for this row.
1377 while let Some((row, size)) = language_indent_sizes.peek() {
1378 if *row > new_row {
1379 break;
1380 }
1381 language_indent_size = *size;
1382 language_indent_sizes.next();
1383 }
1384
1385 let suggested_indent = indent_sizes
1386 .get(&suggestion.basis_row)
1387 .copied()
1388 .map(|e| e.0)
1389 .unwrap_or_else(|| {
1390 snapshot.indent_size_for_line(suggestion.basis_row)
1391 })
1392 .with_delta(suggestion.delta, language_indent_size);
1393
1394 if old_suggestions.get(&new_row).map_or(
1395 true,
1396 |(old_indentation, was_within_error)| {
1397 suggested_indent != *old_indentation
1398 && (!suggestion.within_error || *was_within_error)
1399 },
1400 ) {
1401 indent_sizes.insert(
1402 new_row,
1403 (suggested_indent, request.ignore_empty_lines),
1404 );
1405 }
1406 }
1407 }
1408
1409 if let (true, Some(original_indent_column)) =
1410 (request.is_block_mode, original_indent_column)
1411 {
1412 let new_indent =
1413 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1414 *indent
1415 } else {
1416 snapshot.indent_size_for_line(row_range.start)
1417 };
1418 let delta = new_indent.len as i64 - original_indent_column as i64;
1419 if delta != 0 {
1420 for row in row_range.skip(1) {
1421 indent_sizes.entry(row).or_insert_with(|| {
1422 let mut size = snapshot.indent_size_for_line(row);
1423 if size.kind == new_indent.kind {
1424 match delta.cmp(&0) {
1425 Ordering::Greater => size.len += delta as u32,
1426 Ordering::Less => {
1427 size.len = size.len.saturating_sub(-delta as u32)
1428 }
1429 Ordering::Equal => {}
1430 }
1431 }
1432 (size, request.ignore_empty_lines)
1433 });
1434 }
1435 }
1436 }
1437
1438 yield_now().await;
1439 }
1440 }
1441
1442 indent_sizes
1443 .into_iter()
1444 .filter_map(|(row, (indent, ignore_empty_lines))| {
1445 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1446 None
1447 } else {
1448 Some((row, indent))
1449 }
1450 })
1451 .collect()
1452 })
1453 }
1454
1455 fn apply_autoindents(
1456 &mut self,
1457 indent_sizes: BTreeMap<u32, IndentSize>,
1458 cx: &mut Context<Self>,
1459 ) {
1460 self.autoindent_requests.clear();
1461
1462 let edits: Vec<_> = indent_sizes
1463 .into_iter()
1464 .filter_map(|(row, indent_size)| {
1465 let current_size = indent_size_for_line(self, row);
1466 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1467 })
1468 .collect();
1469
1470 let preserve_preview = self.preserve_preview();
1471 self.edit(edits, None, cx);
1472 if preserve_preview {
1473 self.refresh_preview();
1474 }
1475 }
1476
1477 /// Create a minimal edit that will cause the given row to be indented
1478 /// with the given size. After applying this edit, the length of the line
1479 /// will always be at least `new_size.len`.
1480 pub fn edit_for_indent_size_adjustment(
1481 row: u32,
1482 current_size: IndentSize,
1483 new_size: IndentSize,
1484 ) -> Option<(Range<Point>, String)> {
1485 if new_size.kind == current_size.kind {
1486 match new_size.len.cmp(¤t_size.len) {
1487 Ordering::Greater => {
1488 let point = Point::new(row, 0);
1489 Some((
1490 point..point,
1491 iter::repeat(new_size.char())
1492 .take((new_size.len - current_size.len) as usize)
1493 .collect::<String>(),
1494 ))
1495 }
1496
1497 Ordering::Less => Some((
1498 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1499 String::new(),
1500 )),
1501
1502 Ordering::Equal => None,
1503 }
1504 } else {
1505 Some((
1506 Point::new(row, 0)..Point::new(row, current_size.len),
1507 iter::repeat(new_size.char())
1508 .take(new_size.len as usize)
1509 .collect::<String>(),
1510 ))
1511 }
1512 }
1513
1514 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1515 /// and the given new text.
1516 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1517 let old_text = self.as_rope().clone();
1518 let base_version = self.version();
1519 cx.background_executor()
1520 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1521 let old_text = old_text.to_string();
1522 let line_ending = LineEnding::detect(&new_text);
1523 LineEnding::normalize(&mut new_text);
1524
1525 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1526 let empty: Arc<str> = Arc::default();
1527
1528 let mut edits = Vec::new();
1529 let mut old_offset = 0;
1530 let mut new_offset = 0;
1531 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1532 for change in diff.iter_all_changes().map(Some).chain([None]) {
1533 if let Some(change) = &change {
1534 let len = change.value().len();
1535 match change.tag() {
1536 ChangeTag::Equal => {
1537 old_offset += len;
1538 new_offset += len;
1539 }
1540 ChangeTag::Delete => {
1541 let old_end_offset = old_offset + len;
1542 if let Some((last_old_range, _)) = &mut last_edit {
1543 last_old_range.end = old_end_offset;
1544 } else {
1545 last_edit =
1546 Some((old_offset..old_end_offset, new_offset..new_offset));
1547 }
1548 old_offset = old_end_offset;
1549 }
1550 ChangeTag::Insert => {
1551 let new_end_offset = new_offset + len;
1552 if let Some((_, last_new_range)) = &mut last_edit {
1553 last_new_range.end = new_end_offset;
1554 } else {
1555 last_edit =
1556 Some((old_offset..old_offset, new_offset..new_end_offset));
1557 }
1558 new_offset = new_end_offset;
1559 }
1560 }
1561 }
1562
1563 if let Some((old_range, new_range)) = &last_edit {
1564 if old_offset > old_range.end
1565 || new_offset > new_range.end
1566 || change.is_none()
1567 {
1568 let text = if new_range.is_empty() {
1569 empty.clone()
1570 } else {
1571 new_text[new_range.clone()].into()
1572 };
1573 edits.push((old_range.clone(), text));
1574 last_edit.take();
1575 }
1576 }
1577 }
1578
1579 Diff {
1580 base_version,
1581 line_ending,
1582 edits,
1583 }
1584 })
1585 }
1586
1587 /// Spawns a background task that searches the buffer for any whitespace
1588 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1589 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1590 let old_text = self.as_rope().clone();
1591 let line_ending = self.line_ending();
1592 let base_version = self.version();
1593 cx.background_executor().spawn(async move {
1594 let ranges = trailing_whitespace_ranges(&old_text);
1595 let empty = Arc::<str>::from("");
1596 Diff {
1597 base_version,
1598 line_ending,
1599 edits: ranges
1600 .into_iter()
1601 .map(|range| (range, empty.clone()))
1602 .collect(),
1603 }
1604 })
1605 }
1606
1607 /// Ensures that the buffer ends with a single newline character, and
1608 /// no other whitespace.
1609 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1610 let len = self.len();
1611 let mut offset = len;
1612 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1613 let non_whitespace_len = chunk
1614 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1615 .len();
1616 offset -= chunk.len();
1617 offset += non_whitespace_len;
1618 if non_whitespace_len != 0 {
1619 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1620 return;
1621 }
1622 break;
1623 }
1624 }
1625 self.edit([(offset..len, "\n")], None, cx);
1626 }
1627
1628 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1629 /// calculated, then adjust the diff to account for those changes, and discard any
1630 /// parts of the diff that conflict with those changes.
1631 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1632 // Check for any edits to the buffer that have occurred since this diff
1633 // was computed.
1634 let snapshot = self.snapshot();
1635 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1636 let mut delta = 0;
1637 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1638 while let Some(edit_since) = edits_since.peek() {
1639 // If the edit occurs after a diff hunk, then it does not
1640 // affect that hunk.
1641 if edit_since.old.start > range.end {
1642 break;
1643 }
1644 // If the edit precedes the diff hunk, then adjust the hunk
1645 // to reflect the edit.
1646 else if edit_since.old.end < range.start {
1647 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1648 edits_since.next();
1649 }
1650 // If the edit intersects a diff hunk, then discard that hunk.
1651 else {
1652 return None;
1653 }
1654 }
1655
1656 let start = (range.start as i64 + delta) as usize;
1657 let end = (range.end as i64 + delta) as usize;
1658 Some((start..end, new_text))
1659 });
1660
1661 self.start_transaction();
1662 self.text.set_line_ending(diff.line_ending);
1663 self.edit(adjusted_edits, None, cx);
1664 self.end_transaction(cx)
1665 }
1666
1667 fn has_unsaved_edits(&self) -> bool {
1668 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1669
1670 if last_version == self.version {
1671 self.has_unsaved_edits
1672 .set((last_version, has_unsaved_edits));
1673 return has_unsaved_edits;
1674 }
1675
1676 let has_edits = self.has_edits_since(&self.saved_version);
1677 self.has_unsaved_edits
1678 .set((self.version.clone(), has_edits));
1679 has_edits
1680 }
1681
1682 /// Checks if the buffer has unsaved changes.
1683 pub fn is_dirty(&self) -> bool {
1684 self.capability != Capability::ReadOnly
1685 && (self.has_conflict
1686 || self.file.as_ref().map_or(false, |file| {
1687 matches!(file.disk_state(), DiskState::New | DiskState::Deleted)
1688 })
1689 || self.has_unsaved_edits())
1690 }
1691
1692 /// Checks if the buffer and its file have both changed since the buffer
1693 /// was last saved or reloaded.
1694 pub fn has_conflict(&self) -> bool {
1695 if self.has_conflict {
1696 return true;
1697 }
1698 let Some(file) = self.file.as_ref() else {
1699 return false;
1700 };
1701 match file.disk_state() {
1702 DiskState::New => false,
1703 DiskState::Present { mtime } => match self.saved_mtime {
1704 Some(saved_mtime) => {
1705 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1706 }
1707 None => true,
1708 },
1709 DiskState::Deleted => true,
1710 }
1711 }
1712
1713 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1714 pub fn subscribe(&mut self) -> Subscription {
1715 self.text.subscribe()
1716 }
1717
1718 /// Starts a transaction, if one is not already in-progress. When undoing or
1719 /// redoing edits, all of the edits performed within a transaction are undone
1720 /// or redone together.
1721 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1722 self.start_transaction_at(Instant::now())
1723 }
1724
1725 /// Starts a transaction, providing the current time. Subsequent transactions
1726 /// that occur within a short period of time will be grouped together. This
1727 /// is controlled by the buffer's undo grouping duration.
1728 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1729 self.transaction_depth += 1;
1730 if self.was_dirty_before_starting_transaction.is_none() {
1731 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1732 }
1733 self.text.start_transaction_at(now)
1734 }
1735
1736 /// Terminates the current transaction, if this is the outermost transaction.
1737 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
1738 self.end_transaction_at(Instant::now(), cx)
1739 }
1740
1741 /// Terminates the current transaction, providing the current time. Subsequent transactions
1742 /// that occur within a short period of time will be grouped together. This
1743 /// is controlled by the buffer's undo grouping duration.
1744 pub fn end_transaction_at(
1745 &mut self,
1746 now: Instant,
1747 cx: &mut Context<Self>,
1748 ) -> Option<TransactionId> {
1749 assert!(self.transaction_depth > 0);
1750 self.transaction_depth -= 1;
1751 let was_dirty = if self.transaction_depth == 0 {
1752 self.was_dirty_before_starting_transaction.take().unwrap()
1753 } else {
1754 false
1755 };
1756 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1757 self.did_edit(&start_version, was_dirty, cx);
1758 Some(transaction_id)
1759 } else {
1760 None
1761 }
1762 }
1763
1764 /// Manually add a transaction to the buffer's undo history.
1765 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1766 self.text.push_transaction(transaction, now);
1767 }
1768
1769 /// Prevent the last transaction from being grouped with any subsequent transactions,
1770 /// even if they occur with the buffer's undo grouping duration.
1771 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1772 self.text.finalize_last_transaction()
1773 }
1774
1775 /// Manually group all changes since a given transaction.
1776 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1777 self.text.group_until_transaction(transaction_id);
1778 }
1779
1780 /// Manually remove a transaction from the buffer's undo history
1781 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1782 self.text.forget_transaction(transaction_id);
1783 }
1784
1785 /// Manually merge two adjacent transactions in the buffer's undo history.
1786 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1787 self.text.merge_transactions(transaction, destination);
1788 }
1789
1790 /// Waits for the buffer to receive operations with the given timestamps.
1791 pub fn wait_for_edits(
1792 &mut self,
1793 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1794 ) -> impl Future<Output = Result<()>> {
1795 self.text.wait_for_edits(edit_ids)
1796 }
1797
1798 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
1799 pub fn wait_for_anchors(
1800 &mut self,
1801 anchors: impl IntoIterator<Item = Anchor>,
1802 ) -> impl 'static + Future<Output = Result<()>> {
1803 self.text.wait_for_anchors(anchors)
1804 }
1805
1806 /// Waits for the buffer to receive operations up to the given version.
1807 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1808 self.text.wait_for_version(version)
1809 }
1810
1811 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
1812 /// [`Buffer::wait_for_version`] to resolve with an error.
1813 pub fn give_up_waiting(&mut self) {
1814 self.text.give_up_waiting();
1815 }
1816
1817 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
1818 pub fn set_active_selections(
1819 &mut self,
1820 selections: Arc<[Selection<Anchor>]>,
1821 line_mode: bool,
1822 cursor_shape: CursorShape,
1823 cx: &mut Context<Self>,
1824 ) {
1825 let lamport_timestamp = self.text.lamport_clock.tick();
1826 self.remote_selections.insert(
1827 self.text.replica_id(),
1828 SelectionSet {
1829 selections: selections.clone(),
1830 lamport_timestamp,
1831 line_mode,
1832 cursor_shape,
1833 },
1834 );
1835 self.send_operation(
1836 Operation::UpdateSelections {
1837 selections,
1838 line_mode,
1839 lamport_timestamp,
1840 cursor_shape,
1841 },
1842 true,
1843 cx,
1844 );
1845 self.non_text_state_update_count += 1;
1846 cx.notify();
1847 }
1848
1849 /// Clears the selections, so that other replicas of the buffer do not see any selections for
1850 /// this replica.
1851 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
1852 if self
1853 .remote_selections
1854 .get(&self.text.replica_id())
1855 .map_or(true, |set| !set.selections.is_empty())
1856 {
1857 self.set_active_selections(Arc::default(), false, Default::default(), cx);
1858 }
1859 }
1860
1861 /// Replaces the buffer's entire text.
1862 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
1863 where
1864 T: Into<Arc<str>>,
1865 {
1866 self.autoindent_requests.clear();
1867 self.edit([(0..self.len(), text)], None, cx)
1868 }
1869
1870 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
1871 /// delete, and a string of text to insert at that location.
1872 ///
1873 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
1874 /// request for the edited ranges, which will be processed when the buffer finishes
1875 /// parsing.
1876 ///
1877 /// Parsing takes place at the end of a transaction, and may compute synchronously
1878 /// or asynchronously, depending on the changes.
1879 pub fn edit<I, S, T>(
1880 &mut self,
1881 edits_iter: I,
1882 autoindent_mode: Option<AutoindentMode>,
1883 cx: &mut Context<Self>,
1884 ) -> Option<clock::Lamport>
1885 where
1886 I: IntoIterator<Item = (Range<S>, T)>,
1887 S: ToOffset,
1888 T: Into<Arc<str>>,
1889 {
1890 // Skip invalid edits and coalesce contiguous ones.
1891 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1892 for (range, new_text) in edits_iter {
1893 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1894 if range.start > range.end {
1895 mem::swap(&mut range.start, &mut range.end);
1896 }
1897 let new_text = new_text.into();
1898 if !new_text.is_empty() || !range.is_empty() {
1899 if let Some((prev_range, prev_text)) = edits.last_mut() {
1900 if prev_range.end >= range.start {
1901 prev_range.end = cmp::max(prev_range.end, range.end);
1902 *prev_text = format!("{prev_text}{new_text}").into();
1903 } else {
1904 edits.push((range, new_text));
1905 }
1906 } else {
1907 edits.push((range, new_text));
1908 }
1909 }
1910 }
1911 if edits.is_empty() {
1912 return None;
1913 }
1914
1915 self.start_transaction();
1916 self.pending_autoindent.take();
1917 let autoindent_request = autoindent_mode
1918 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1919
1920 let edit_operation = self.text.edit(edits.iter().cloned());
1921 let edit_id = edit_operation.timestamp();
1922
1923 if let Some((before_edit, mode)) = autoindent_request {
1924 let mut delta = 0isize;
1925 let entries = edits
1926 .into_iter()
1927 .enumerate()
1928 .zip(&edit_operation.as_edit().unwrap().new_text)
1929 .map(|((ix, (range, _)), new_text)| {
1930 let new_text_length = new_text.len();
1931 let old_start = range.start.to_point(&before_edit);
1932 let new_start = (delta + range.start as isize) as usize;
1933 let range_len = range.end - range.start;
1934 delta += new_text_length as isize - range_len as isize;
1935
1936 // Decide what range of the insertion to auto-indent, and whether
1937 // the first line of the insertion should be considered a newly-inserted line
1938 // or an edit to an existing line.
1939 let mut range_of_insertion_to_indent = 0..new_text_length;
1940 let mut first_line_is_new = true;
1941
1942 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
1943 let old_line_end = before_edit.line_len(old_start.row);
1944
1945 if old_start.column > old_line_start {
1946 first_line_is_new = false;
1947 }
1948
1949 if !new_text.contains('\n')
1950 && (old_start.column + (range_len as u32) < old_line_end
1951 || old_line_end == old_line_start)
1952 {
1953 first_line_is_new = false;
1954 }
1955
1956 // When inserting text starting with a newline, avoid auto-indenting the
1957 // previous line.
1958 if new_text.starts_with('\n') {
1959 range_of_insertion_to_indent.start += 1;
1960 first_line_is_new = true;
1961 }
1962
1963 let mut original_indent_column = None;
1964 if let AutoindentMode::Block {
1965 original_indent_columns,
1966 } = &mode
1967 {
1968 original_indent_column =
1969 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1970 indent_size_for_text(
1971 new_text[range_of_insertion_to_indent.clone()].chars(),
1972 )
1973 .len
1974 }));
1975
1976 // Avoid auto-indenting the line after the edit.
1977 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1978 range_of_insertion_to_indent.end -= 1;
1979 }
1980 }
1981
1982 AutoindentRequestEntry {
1983 first_line_is_new,
1984 original_indent_column,
1985 indent_size: before_edit.language_indent_size_at(range.start, cx),
1986 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1987 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1988 }
1989 })
1990 .collect();
1991
1992 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1993 before_edit,
1994 entries,
1995 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1996 ignore_empty_lines: false,
1997 }));
1998 }
1999
2000 self.end_transaction(cx);
2001 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2002 Some(edit_id)
2003 }
2004
2005 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2006 if self.edits_since::<usize>(old_version).next().is_none() {
2007 return;
2008 }
2009
2010 self.reparse(cx);
2011
2012 cx.emit(BufferEvent::Edited);
2013 if was_dirty != self.is_dirty() {
2014 cx.emit(BufferEvent::DirtyChanged);
2015 }
2016 cx.notify();
2017 }
2018
2019 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2020 where
2021 I: IntoIterator<Item = Range<T>>,
2022 T: ToOffset + Copy,
2023 {
2024 let before_edit = self.snapshot();
2025 let entries = ranges
2026 .into_iter()
2027 .map(|range| AutoindentRequestEntry {
2028 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2029 first_line_is_new: true,
2030 indent_size: before_edit.language_indent_size_at(range.start, cx),
2031 original_indent_column: None,
2032 })
2033 .collect();
2034 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2035 before_edit,
2036 entries,
2037 is_block_mode: false,
2038 ignore_empty_lines: true,
2039 }));
2040 self.request_autoindent(cx);
2041 }
2042
2043 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2044 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2045 pub fn insert_empty_line(
2046 &mut self,
2047 position: impl ToPoint,
2048 space_above: bool,
2049 space_below: bool,
2050 cx: &mut Context<Self>,
2051 ) -> Point {
2052 let mut position = position.to_point(self);
2053
2054 self.start_transaction();
2055
2056 self.edit(
2057 [(position..position, "\n")],
2058 Some(AutoindentMode::EachLine),
2059 cx,
2060 );
2061
2062 if position.column > 0 {
2063 position += Point::new(1, 0);
2064 }
2065
2066 if !self.is_line_blank(position.row) {
2067 self.edit(
2068 [(position..position, "\n")],
2069 Some(AutoindentMode::EachLine),
2070 cx,
2071 );
2072 }
2073
2074 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2075 self.edit(
2076 [(position..position, "\n")],
2077 Some(AutoindentMode::EachLine),
2078 cx,
2079 );
2080 position.row += 1;
2081 }
2082
2083 if space_below
2084 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2085 {
2086 self.edit(
2087 [(position..position, "\n")],
2088 Some(AutoindentMode::EachLine),
2089 cx,
2090 );
2091 }
2092
2093 self.end_transaction(cx);
2094
2095 position
2096 }
2097
2098 /// Applies the given remote operations to the buffer.
2099 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2100 self.pending_autoindent.take();
2101 let was_dirty = self.is_dirty();
2102 let old_version = self.version.clone();
2103 let mut deferred_ops = Vec::new();
2104 let buffer_ops = ops
2105 .into_iter()
2106 .filter_map(|op| match op {
2107 Operation::Buffer(op) => Some(op),
2108 _ => {
2109 if self.can_apply_op(&op) {
2110 self.apply_op(op, cx);
2111 } else {
2112 deferred_ops.push(op);
2113 }
2114 None
2115 }
2116 })
2117 .collect::<Vec<_>>();
2118 for operation in buffer_ops.iter() {
2119 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2120 }
2121 self.text.apply_ops(buffer_ops);
2122 self.deferred_ops.insert(deferred_ops);
2123 self.flush_deferred_ops(cx);
2124 self.did_edit(&old_version, was_dirty, cx);
2125 // Notify independently of whether the buffer was edited as the operations could include a
2126 // selection update.
2127 cx.notify();
2128 }
2129
2130 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2131 let mut deferred_ops = Vec::new();
2132 for op in self.deferred_ops.drain().iter().cloned() {
2133 if self.can_apply_op(&op) {
2134 self.apply_op(op, cx);
2135 } else {
2136 deferred_ops.push(op);
2137 }
2138 }
2139 self.deferred_ops.insert(deferred_ops);
2140 }
2141
2142 pub fn has_deferred_ops(&self) -> bool {
2143 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2144 }
2145
2146 fn can_apply_op(&self, operation: &Operation) -> bool {
2147 match operation {
2148 Operation::Buffer(_) => {
2149 unreachable!("buffer operations should never be applied at this layer")
2150 }
2151 Operation::UpdateDiagnostics {
2152 diagnostics: diagnostic_set,
2153 ..
2154 } => diagnostic_set.iter().all(|diagnostic| {
2155 self.text.can_resolve(&diagnostic.range.start)
2156 && self.text.can_resolve(&diagnostic.range.end)
2157 }),
2158 Operation::UpdateSelections { selections, .. } => selections
2159 .iter()
2160 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2161 Operation::UpdateCompletionTriggers { .. } => true,
2162 }
2163 }
2164
2165 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2166 match operation {
2167 Operation::Buffer(_) => {
2168 unreachable!("buffer operations should never be applied at this layer")
2169 }
2170 Operation::UpdateDiagnostics {
2171 server_id,
2172 diagnostics: diagnostic_set,
2173 lamport_timestamp,
2174 } => {
2175 let snapshot = self.snapshot();
2176 self.apply_diagnostic_update(
2177 server_id,
2178 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2179 lamport_timestamp,
2180 cx,
2181 );
2182 }
2183 Operation::UpdateSelections {
2184 selections,
2185 lamport_timestamp,
2186 line_mode,
2187 cursor_shape,
2188 } => {
2189 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2190 if set.lamport_timestamp > lamport_timestamp {
2191 return;
2192 }
2193 }
2194
2195 self.remote_selections.insert(
2196 lamport_timestamp.replica_id,
2197 SelectionSet {
2198 selections,
2199 lamport_timestamp,
2200 line_mode,
2201 cursor_shape,
2202 },
2203 );
2204 self.text.lamport_clock.observe(lamport_timestamp);
2205 self.non_text_state_update_count += 1;
2206 }
2207 Operation::UpdateCompletionTriggers {
2208 triggers,
2209 lamport_timestamp,
2210 server_id,
2211 } => {
2212 if triggers.is_empty() {
2213 self.completion_triggers_per_language_server
2214 .remove(&server_id);
2215 self.completion_triggers = self
2216 .completion_triggers_per_language_server
2217 .values()
2218 .flat_map(|triggers| triggers.into_iter().cloned())
2219 .collect();
2220 } else {
2221 self.completion_triggers_per_language_server
2222 .insert(server_id, triggers.iter().cloned().collect());
2223 self.completion_triggers.extend(triggers);
2224 }
2225 self.text.lamport_clock.observe(lamport_timestamp);
2226 }
2227 }
2228 }
2229
2230 fn apply_diagnostic_update(
2231 &mut self,
2232 server_id: LanguageServerId,
2233 diagnostics: DiagnosticSet,
2234 lamport_timestamp: clock::Lamport,
2235 cx: &mut Context<Self>,
2236 ) {
2237 if lamport_timestamp > self.diagnostics_timestamp {
2238 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2239 if diagnostics.is_empty() {
2240 if let Ok(ix) = ix {
2241 self.diagnostics.remove(ix);
2242 }
2243 } else {
2244 match ix {
2245 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2246 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2247 };
2248 }
2249 self.diagnostics_timestamp = lamport_timestamp;
2250 self.non_text_state_update_count += 1;
2251 self.text.lamport_clock.observe(lamport_timestamp);
2252 cx.notify();
2253 cx.emit(BufferEvent::DiagnosticsUpdated);
2254 }
2255 }
2256
2257 fn send_operation(&self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2258 cx.emit(BufferEvent::Operation {
2259 operation,
2260 is_local,
2261 });
2262 }
2263
2264 /// Removes the selections for a given peer.
2265 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2266 self.remote_selections.remove(&replica_id);
2267 cx.notify();
2268 }
2269
2270 /// Undoes the most recent transaction.
2271 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2272 let was_dirty = self.is_dirty();
2273 let old_version = self.version.clone();
2274
2275 if let Some((transaction_id, operation)) = self.text.undo() {
2276 self.send_operation(Operation::Buffer(operation), true, cx);
2277 self.did_edit(&old_version, was_dirty, cx);
2278 Some(transaction_id)
2279 } else {
2280 None
2281 }
2282 }
2283
2284 /// Manually undoes a specific transaction in the buffer's undo history.
2285 pub fn undo_transaction(
2286 &mut self,
2287 transaction_id: TransactionId,
2288 cx: &mut Context<Self>,
2289 ) -> bool {
2290 let was_dirty = self.is_dirty();
2291 let old_version = self.version.clone();
2292 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2293 self.send_operation(Operation::Buffer(operation), true, cx);
2294 self.did_edit(&old_version, was_dirty, cx);
2295 true
2296 } else {
2297 false
2298 }
2299 }
2300
2301 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2302 pub fn undo_to_transaction(
2303 &mut self,
2304 transaction_id: TransactionId,
2305 cx: &mut Context<Self>,
2306 ) -> bool {
2307 let was_dirty = self.is_dirty();
2308 let old_version = self.version.clone();
2309
2310 let operations = self.text.undo_to_transaction(transaction_id);
2311 let undone = !operations.is_empty();
2312 for operation in operations {
2313 self.send_operation(Operation::Buffer(operation), true, cx);
2314 }
2315 if undone {
2316 self.did_edit(&old_version, was_dirty, cx)
2317 }
2318 undone
2319 }
2320
2321 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2322 let was_dirty = self.is_dirty();
2323 let operation = self.text.undo_operations(counts);
2324 let old_version = self.version.clone();
2325 self.send_operation(Operation::Buffer(operation), true, cx);
2326 self.did_edit(&old_version, was_dirty, cx);
2327 }
2328
2329 /// Manually redoes a specific transaction in the buffer's redo history.
2330 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2331 let was_dirty = self.is_dirty();
2332 let old_version = self.version.clone();
2333
2334 if let Some((transaction_id, operation)) = self.text.redo() {
2335 self.send_operation(Operation::Buffer(operation), true, cx);
2336 self.did_edit(&old_version, was_dirty, cx);
2337 Some(transaction_id)
2338 } else {
2339 None
2340 }
2341 }
2342
2343 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2344 pub fn redo_to_transaction(
2345 &mut self,
2346 transaction_id: TransactionId,
2347 cx: &mut Context<Self>,
2348 ) -> bool {
2349 let was_dirty = self.is_dirty();
2350 let old_version = self.version.clone();
2351
2352 let operations = self.text.redo_to_transaction(transaction_id);
2353 let redone = !operations.is_empty();
2354 for operation in operations {
2355 self.send_operation(Operation::Buffer(operation), true, cx);
2356 }
2357 if redone {
2358 self.did_edit(&old_version, was_dirty, cx)
2359 }
2360 redone
2361 }
2362
2363 /// Override current completion triggers with the user-provided completion triggers.
2364 pub fn set_completion_triggers(
2365 &mut self,
2366 server_id: LanguageServerId,
2367 triggers: BTreeSet<String>,
2368 cx: &mut Context<Self>,
2369 ) {
2370 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2371 if triggers.is_empty() {
2372 self.completion_triggers_per_language_server
2373 .remove(&server_id);
2374 self.completion_triggers = self
2375 .completion_triggers_per_language_server
2376 .values()
2377 .flat_map(|triggers| triggers.into_iter().cloned())
2378 .collect();
2379 } else {
2380 self.completion_triggers_per_language_server
2381 .insert(server_id, triggers.clone());
2382 self.completion_triggers.extend(triggers.iter().cloned());
2383 }
2384 self.send_operation(
2385 Operation::UpdateCompletionTriggers {
2386 triggers: triggers.iter().cloned().collect(),
2387 lamport_timestamp: self.completion_triggers_timestamp,
2388 server_id,
2389 },
2390 true,
2391 cx,
2392 );
2393 cx.notify();
2394 }
2395
2396 /// Returns a list of strings which trigger a completion menu for this language.
2397 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2398 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2399 &self.completion_triggers
2400 }
2401
2402 /// Call this directly after performing edits to prevent the preview tab
2403 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2404 /// to return false until there are additional edits.
2405 pub fn refresh_preview(&mut self) {
2406 self.preview_version = self.version.clone();
2407 }
2408
2409 /// Whether we should preserve the preview status of a tab containing this buffer.
2410 pub fn preserve_preview(&self) -> bool {
2411 !self.has_edits_since(&self.preview_version)
2412 }
2413}
2414
2415#[doc(hidden)]
2416#[cfg(any(test, feature = "test-support"))]
2417impl Buffer {
2418 pub fn edit_via_marked_text(
2419 &mut self,
2420 marked_string: &str,
2421 autoindent_mode: Option<AutoindentMode>,
2422 cx: &mut Context<Self>,
2423 ) {
2424 let edits = self.edits_for_marked_text(marked_string);
2425 self.edit(edits, autoindent_mode, cx);
2426 }
2427
2428 pub fn set_group_interval(&mut self, group_interval: Duration) {
2429 self.text.set_group_interval(group_interval);
2430 }
2431
2432 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2433 where
2434 T: rand::Rng,
2435 {
2436 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2437 let mut last_end = None;
2438 for _ in 0..old_range_count {
2439 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2440 break;
2441 }
2442
2443 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2444 let mut range = self.random_byte_range(new_start, rng);
2445 if rng.gen_bool(0.2) {
2446 mem::swap(&mut range.start, &mut range.end);
2447 }
2448 last_end = Some(range.end);
2449
2450 let new_text_len = rng.gen_range(0..10);
2451 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2452 new_text = new_text.to_uppercase();
2453
2454 edits.push((range, new_text));
2455 }
2456 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2457 self.edit(edits, None, cx);
2458 }
2459
2460 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2461 let was_dirty = self.is_dirty();
2462 let old_version = self.version.clone();
2463
2464 let ops = self.text.randomly_undo_redo(rng);
2465 if !ops.is_empty() {
2466 for op in ops {
2467 self.send_operation(Operation::Buffer(op), true, cx);
2468 self.did_edit(&old_version, was_dirty, cx);
2469 }
2470 }
2471 }
2472}
2473
2474impl EventEmitter<BufferEvent> for Buffer {}
2475
2476impl Deref for Buffer {
2477 type Target = TextBuffer;
2478
2479 fn deref(&self) -> &Self::Target {
2480 &self.text
2481 }
2482}
2483
2484impl BufferSnapshot {
2485 /// Returns [`IndentSize`] for a given line that respects user settings and /// language preferences.
2486 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2487 indent_size_for_line(self, row)
2488 }
2489 /// Returns [`IndentSize`] for a given position that respects user settings
2490 /// and language preferences.
2491 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2492 let settings = language_settings(
2493 self.language_at(position).map(|l| l.name()),
2494 self.file(),
2495 cx,
2496 );
2497 if settings.hard_tabs {
2498 IndentSize::tab()
2499 } else {
2500 IndentSize::spaces(settings.tab_size.get())
2501 }
2502 }
2503
2504 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2505 /// is passed in as `single_indent_size`.
2506 pub fn suggested_indents(
2507 &self,
2508 rows: impl Iterator<Item = u32>,
2509 single_indent_size: IndentSize,
2510 ) -> BTreeMap<u32, IndentSize> {
2511 let mut result = BTreeMap::new();
2512
2513 for row_range in contiguous_ranges(rows, 10) {
2514 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2515 Some(suggestions) => suggestions,
2516 _ => break,
2517 };
2518
2519 for (row, suggestion) in row_range.zip(suggestions) {
2520 let indent_size = if let Some(suggestion) = suggestion {
2521 result
2522 .get(&suggestion.basis_row)
2523 .copied()
2524 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2525 .with_delta(suggestion.delta, single_indent_size)
2526 } else {
2527 self.indent_size_for_line(row)
2528 };
2529
2530 result.insert(row, indent_size);
2531 }
2532 }
2533
2534 result
2535 }
2536
2537 fn suggest_autoindents(
2538 &self,
2539 row_range: Range<u32>,
2540 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2541 let config = &self.language.as_ref()?.config;
2542 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2543
2544 // Find the suggested indentation ranges based on the syntax tree.
2545 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2546 let end = Point::new(row_range.end, 0);
2547 let range = (start..end).to_offset(&self.text);
2548 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2549 Some(&grammar.indents_config.as_ref()?.query)
2550 });
2551 let indent_configs = matches
2552 .grammars()
2553 .iter()
2554 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2555 .collect::<Vec<_>>();
2556
2557 let mut indent_ranges = Vec::<Range<Point>>::new();
2558 let mut outdent_positions = Vec::<Point>::new();
2559 while let Some(mat) = matches.peek() {
2560 let mut start: Option<Point> = None;
2561 let mut end: Option<Point> = None;
2562
2563 let config = &indent_configs[mat.grammar_index];
2564 for capture in mat.captures {
2565 if capture.index == config.indent_capture_ix {
2566 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2567 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2568 } else if Some(capture.index) == config.start_capture_ix {
2569 start = Some(Point::from_ts_point(capture.node.end_position()));
2570 } else if Some(capture.index) == config.end_capture_ix {
2571 end = Some(Point::from_ts_point(capture.node.start_position()));
2572 } else if Some(capture.index) == config.outdent_capture_ix {
2573 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2574 }
2575 }
2576
2577 matches.advance();
2578 if let Some((start, end)) = start.zip(end) {
2579 if start.row == end.row {
2580 continue;
2581 }
2582
2583 let range = start..end;
2584 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2585 Err(ix) => indent_ranges.insert(ix, range),
2586 Ok(ix) => {
2587 let prev_range = &mut indent_ranges[ix];
2588 prev_range.end = prev_range.end.max(range.end);
2589 }
2590 }
2591 }
2592 }
2593
2594 let mut error_ranges = Vec::<Range<Point>>::new();
2595 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2596 Some(&grammar.error_query)
2597 });
2598 while let Some(mat) = matches.peek() {
2599 let node = mat.captures[0].node;
2600 let start = Point::from_ts_point(node.start_position());
2601 let end = Point::from_ts_point(node.end_position());
2602 let range = start..end;
2603 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2604 Ok(ix) | Err(ix) => ix,
2605 };
2606 let mut end_ix = ix;
2607 while let Some(existing_range) = error_ranges.get(end_ix) {
2608 if existing_range.end < end {
2609 end_ix += 1;
2610 } else {
2611 break;
2612 }
2613 }
2614 error_ranges.splice(ix..end_ix, [range]);
2615 matches.advance();
2616 }
2617
2618 outdent_positions.sort();
2619 for outdent_position in outdent_positions {
2620 // find the innermost indent range containing this outdent_position
2621 // set its end to the outdent position
2622 if let Some(range_to_truncate) = indent_ranges
2623 .iter_mut()
2624 .filter(|indent_range| indent_range.contains(&outdent_position))
2625 .last()
2626 {
2627 range_to_truncate.end = outdent_position;
2628 }
2629 }
2630
2631 // Find the suggested indentation increases and decreased based on regexes.
2632 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2633 self.for_each_line(
2634 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2635 ..Point::new(row_range.end, 0),
2636 |row, line| {
2637 if config
2638 .decrease_indent_pattern
2639 .as_ref()
2640 .map_or(false, |regex| regex.is_match(line))
2641 {
2642 indent_change_rows.push((row, Ordering::Less));
2643 }
2644 if config
2645 .increase_indent_pattern
2646 .as_ref()
2647 .map_or(false, |regex| regex.is_match(line))
2648 {
2649 indent_change_rows.push((row + 1, Ordering::Greater));
2650 }
2651 },
2652 );
2653
2654 let mut indent_changes = indent_change_rows.into_iter().peekable();
2655 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2656 prev_non_blank_row.unwrap_or(0)
2657 } else {
2658 row_range.start.saturating_sub(1)
2659 };
2660 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2661 Some(row_range.map(move |row| {
2662 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2663
2664 let mut indent_from_prev_row = false;
2665 let mut outdent_from_prev_row = false;
2666 let mut outdent_to_row = u32::MAX;
2667
2668 while let Some((indent_row, delta)) = indent_changes.peek() {
2669 match indent_row.cmp(&row) {
2670 Ordering::Equal => match delta {
2671 Ordering::Less => outdent_from_prev_row = true,
2672 Ordering::Greater => indent_from_prev_row = true,
2673 _ => {}
2674 },
2675
2676 Ordering::Greater => break,
2677 Ordering::Less => {}
2678 }
2679
2680 indent_changes.next();
2681 }
2682
2683 for range in &indent_ranges {
2684 if range.start.row >= row {
2685 break;
2686 }
2687 if range.start.row == prev_row && range.end > row_start {
2688 indent_from_prev_row = true;
2689 }
2690 if range.end > prev_row_start && range.end <= row_start {
2691 outdent_to_row = outdent_to_row.min(range.start.row);
2692 }
2693 }
2694
2695 let within_error = error_ranges
2696 .iter()
2697 .any(|e| e.start.row < row && e.end > row_start);
2698
2699 let suggestion = if outdent_to_row == prev_row
2700 || (outdent_from_prev_row && indent_from_prev_row)
2701 {
2702 Some(IndentSuggestion {
2703 basis_row: prev_row,
2704 delta: Ordering::Equal,
2705 within_error,
2706 })
2707 } else if indent_from_prev_row {
2708 Some(IndentSuggestion {
2709 basis_row: prev_row,
2710 delta: Ordering::Greater,
2711 within_error,
2712 })
2713 } else if outdent_to_row < prev_row {
2714 Some(IndentSuggestion {
2715 basis_row: outdent_to_row,
2716 delta: Ordering::Equal,
2717 within_error,
2718 })
2719 } else if outdent_from_prev_row {
2720 Some(IndentSuggestion {
2721 basis_row: prev_row,
2722 delta: Ordering::Less,
2723 within_error,
2724 })
2725 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2726 {
2727 Some(IndentSuggestion {
2728 basis_row: prev_row,
2729 delta: Ordering::Equal,
2730 within_error,
2731 })
2732 } else {
2733 None
2734 };
2735
2736 prev_row = row;
2737 prev_row_start = row_start;
2738 suggestion
2739 }))
2740 }
2741
2742 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2743 while row > 0 {
2744 row -= 1;
2745 if !self.is_line_blank(row) {
2746 return Some(row);
2747 }
2748 }
2749 None
2750 }
2751
2752 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
2753 let captures = self.syntax.captures(range, &self.text, |grammar| {
2754 grammar.highlights_query.as_ref()
2755 });
2756 let highlight_maps = captures
2757 .grammars()
2758 .iter()
2759 .map(|grammar| grammar.highlight_map())
2760 .collect();
2761 (captures, highlight_maps)
2762 }
2763
2764 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
2765 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
2766 /// returned in chunks where each chunk has a single syntax highlighting style and
2767 /// diagnostic status.
2768 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2769 let range = range.start.to_offset(self)..range.end.to_offset(self);
2770
2771 let mut syntax = None;
2772 if language_aware {
2773 syntax = Some(self.get_highlights(range.clone()));
2774 }
2775 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
2776 let diagnostics = language_aware;
2777 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
2778 }
2779
2780 /// Invokes the given callback for each line of text in the given range of the buffer.
2781 /// Uses callback to avoid allocating a string for each line.
2782 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2783 let mut line = String::new();
2784 let mut row = range.start.row;
2785 for chunk in self
2786 .as_rope()
2787 .chunks_in_range(range.to_offset(self))
2788 .chain(["\n"])
2789 {
2790 for (newline_ix, text) in chunk.split('\n').enumerate() {
2791 if newline_ix > 0 {
2792 callback(row, &line);
2793 row += 1;
2794 line.clear();
2795 }
2796 line.push_str(text);
2797 }
2798 }
2799 }
2800
2801 /// Iterates over every [`SyntaxLayer`] in the buffer.
2802 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
2803 self.syntax
2804 .layers_for_range(0..self.len(), &self.text, true)
2805 }
2806
2807 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
2808 let offset = position.to_offset(self);
2809 self.syntax
2810 .layers_for_range(offset..offset, &self.text, false)
2811 .filter(|l| l.node().end_byte() > offset)
2812 .last()
2813 }
2814
2815 /// Returns the main [`Language`].
2816 pub fn language(&self) -> Option<&Arc<Language>> {
2817 self.language.as_ref()
2818 }
2819
2820 /// Returns the [`Language`] at the given location.
2821 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2822 self.syntax_layer_at(position)
2823 .map(|info| info.language)
2824 .or(self.language.as_ref())
2825 }
2826
2827 /// Returns the settings for the language at the given location.
2828 pub fn settings_at<'a, D: ToOffset>(
2829 &'a self,
2830 position: D,
2831 cx: &'a App,
2832 ) -> Cow<'a, LanguageSettings> {
2833 language_settings(
2834 self.language_at(position).map(|l| l.name()),
2835 self.file.as_ref(),
2836 cx,
2837 )
2838 }
2839
2840 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
2841 CharClassifier::new(self.language_scope_at(point))
2842 }
2843
2844 /// Returns the [`LanguageScope`] at the given location.
2845 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2846 let offset = position.to_offset(self);
2847 let mut scope = None;
2848 let mut smallest_range: Option<Range<usize>> = None;
2849
2850 // Use the layer that has the smallest node intersecting the given point.
2851 for layer in self
2852 .syntax
2853 .layers_for_range(offset..offset, &self.text, false)
2854 {
2855 let mut cursor = layer.node().walk();
2856
2857 let mut range = None;
2858 loop {
2859 let child_range = cursor.node().byte_range();
2860 if !child_range.to_inclusive().contains(&offset) {
2861 break;
2862 }
2863
2864 range = Some(child_range);
2865 if cursor.goto_first_child_for_byte(offset).is_none() {
2866 break;
2867 }
2868 }
2869
2870 if let Some(range) = range {
2871 if smallest_range
2872 .as_ref()
2873 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2874 {
2875 smallest_range = Some(range);
2876 scope = Some(LanguageScope {
2877 language: layer.language.clone(),
2878 override_id: layer.override_id(offset, &self.text),
2879 });
2880 }
2881 }
2882 }
2883
2884 scope.or_else(|| {
2885 self.language.clone().map(|language| LanguageScope {
2886 language,
2887 override_id: None,
2888 })
2889 })
2890 }
2891
2892 /// Returns a tuple of the range and character kind of the word
2893 /// surrounding the given position.
2894 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2895 let mut start = start.to_offset(self);
2896 let mut end = start;
2897 let mut next_chars = self.chars_at(start).peekable();
2898 let mut prev_chars = self.reversed_chars_at(start).peekable();
2899
2900 let classifier = self.char_classifier_at(start);
2901 let word_kind = cmp::max(
2902 prev_chars.peek().copied().map(|c| classifier.kind(c)),
2903 next_chars.peek().copied().map(|c| classifier.kind(c)),
2904 );
2905
2906 for ch in prev_chars {
2907 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
2908 start -= ch.len_utf8();
2909 } else {
2910 break;
2911 }
2912 }
2913
2914 for ch in next_chars {
2915 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
2916 end += ch.len_utf8();
2917 } else {
2918 break;
2919 }
2920 }
2921
2922 (start..end, word_kind)
2923 }
2924
2925 /// Returns the closest syntax node enclosing the given range.
2926 pub fn syntax_ancestor<'a, T: ToOffset>(
2927 &'a self,
2928 range: Range<T>,
2929 ) -> Option<tree_sitter::Node<'a>> {
2930 let range = range.start.to_offset(self)..range.end.to_offset(self);
2931 let mut result: Option<tree_sitter::Node<'a>> = None;
2932 'outer: for layer in self
2933 .syntax
2934 .layers_for_range(range.clone(), &self.text, true)
2935 {
2936 let mut cursor = layer.node().walk();
2937
2938 // Descend to the first leaf that touches the start of the range,
2939 // and if the range is non-empty, extends beyond the start.
2940 while cursor.goto_first_child_for_byte(range.start).is_some() {
2941 if !range.is_empty() && cursor.node().end_byte() == range.start {
2942 cursor.goto_next_sibling();
2943 }
2944 }
2945
2946 // Ascend to the smallest ancestor that strictly contains the range.
2947 loop {
2948 let node_range = cursor.node().byte_range();
2949 if node_range.start <= range.start
2950 && node_range.end >= range.end
2951 && node_range.len() > range.len()
2952 {
2953 break;
2954 }
2955 if !cursor.goto_parent() {
2956 continue 'outer;
2957 }
2958 }
2959
2960 let left_node = cursor.node();
2961 let mut layer_result = left_node;
2962
2963 // For an empty range, try to find another node immediately to the right of the range.
2964 if left_node.end_byte() == range.start {
2965 let mut right_node = None;
2966 while !cursor.goto_next_sibling() {
2967 if !cursor.goto_parent() {
2968 break;
2969 }
2970 }
2971
2972 while cursor.node().start_byte() == range.start {
2973 right_node = Some(cursor.node());
2974 if !cursor.goto_first_child() {
2975 break;
2976 }
2977 }
2978
2979 // If there is a candidate node on both sides of the (empty) range, then
2980 // decide between the two by favoring a named node over an anonymous token.
2981 // If both nodes are the same in that regard, favor the right one.
2982 if let Some(right_node) = right_node {
2983 if right_node.is_named() || !left_node.is_named() {
2984 layer_result = right_node;
2985 }
2986 }
2987 }
2988
2989 if let Some(previous_result) = &result {
2990 if previous_result.byte_range().len() < layer_result.byte_range().len() {
2991 continue;
2992 }
2993 }
2994 result = Some(layer_result);
2995 }
2996
2997 result
2998 }
2999
3000 /// Returns the outline for the buffer.
3001 ///
3002 /// This method allows passing an optional [`SyntaxTheme`] to
3003 /// syntax-highlight the returned symbols.
3004 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3005 self.outline_items_containing(0..self.len(), true, theme)
3006 .map(Outline::new)
3007 }
3008
3009 /// Returns all the symbols that contain the given position.
3010 ///
3011 /// This method allows passing an optional [`SyntaxTheme`] to
3012 /// syntax-highlight the returned symbols.
3013 pub fn symbols_containing<T: ToOffset>(
3014 &self,
3015 position: T,
3016 theme: Option<&SyntaxTheme>,
3017 ) -> Option<Vec<OutlineItem<Anchor>>> {
3018 let position = position.to_offset(self);
3019 let mut items = self.outline_items_containing(
3020 position.saturating_sub(1)..self.len().min(position + 1),
3021 false,
3022 theme,
3023 )?;
3024 let mut prev_depth = None;
3025 items.retain(|item| {
3026 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3027 prev_depth = Some(item.depth);
3028 result
3029 });
3030 Some(items)
3031 }
3032
3033 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3034 let range = range.to_offset(self);
3035 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3036 grammar.outline_config.as_ref().map(|c| &c.query)
3037 });
3038 let configs = matches
3039 .grammars()
3040 .iter()
3041 .map(|g| g.outline_config.as_ref().unwrap())
3042 .collect::<Vec<_>>();
3043
3044 while let Some(mat) = matches.peek() {
3045 let config = &configs[mat.grammar_index];
3046 let containing_item_node = maybe!({
3047 let item_node = mat.captures.iter().find_map(|cap| {
3048 if cap.index == config.item_capture_ix {
3049 Some(cap.node)
3050 } else {
3051 None
3052 }
3053 })?;
3054
3055 let item_byte_range = item_node.byte_range();
3056 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3057 None
3058 } else {
3059 Some(item_node)
3060 }
3061 });
3062
3063 if let Some(item_node) = containing_item_node {
3064 return Some(
3065 Point::from_ts_point(item_node.start_position())
3066 ..Point::from_ts_point(item_node.end_position()),
3067 );
3068 }
3069
3070 matches.advance();
3071 }
3072 None
3073 }
3074
3075 pub fn outline_items_containing<T: ToOffset>(
3076 &self,
3077 range: Range<T>,
3078 include_extra_context: bool,
3079 theme: Option<&SyntaxTheme>,
3080 ) -> Option<Vec<OutlineItem<Anchor>>> {
3081 let range = range.to_offset(self);
3082 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3083 grammar.outline_config.as_ref().map(|c| &c.query)
3084 });
3085 let configs = matches
3086 .grammars()
3087 .iter()
3088 .map(|g| g.outline_config.as_ref().unwrap())
3089 .collect::<Vec<_>>();
3090
3091 let mut items = Vec::new();
3092 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3093 while let Some(mat) = matches.peek() {
3094 let config = &configs[mat.grammar_index];
3095 if let Some(item) =
3096 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3097 {
3098 items.push(item);
3099 } else if let Some(capture) = mat
3100 .captures
3101 .iter()
3102 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3103 {
3104 let capture_range = capture.node.start_position()..capture.node.end_position();
3105 let mut capture_row_range =
3106 capture_range.start.row as u32..capture_range.end.row as u32;
3107 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3108 {
3109 capture_row_range.end -= 1;
3110 }
3111 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3112 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3113 last_row_range.end = capture_row_range.end;
3114 } else {
3115 annotation_row_ranges.push(capture_row_range);
3116 }
3117 } else {
3118 annotation_row_ranges.push(capture_row_range);
3119 }
3120 }
3121 matches.advance();
3122 }
3123
3124 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3125
3126 // Assign depths based on containment relationships and convert to anchors.
3127 let mut item_ends_stack = Vec::<Point>::new();
3128 let mut anchor_items = Vec::new();
3129 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3130 for item in items {
3131 while let Some(last_end) = item_ends_stack.last().copied() {
3132 if last_end < item.range.end {
3133 item_ends_stack.pop();
3134 } else {
3135 break;
3136 }
3137 }
3138
3139 let mut annotation_row_range = None;
3140 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3141 let row_preceding_item = item.range.start.row.saturating_sub(1);
3142 if next_annotation_row_range.end < row_preceding_item {
3143 annotation_row_ranges.next();
3144 } else {
3145 if next_annotation_row_range.end == row_preceding_item {
3146 annotation_row_range = Some(next_annotation_row_range.clone());
3147 annotation_row_ranges.next();
3148 }
3149 break;
3150 }
3151 }
3152
3153 anchor_items.push(OutlineItem {
3154 depth: item_ends_stack.len(),
3155 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3156 text: item.text,
3157 highlight_ranges: item.highlight_ranges,
3158 name_ranges: item.name_ranges,
3159 body_range: item.body_range.map(|body_range| {
3160 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3161 }),
3162 annotation_range: annotation_row_range.map(|annotation_range| {
3163 self.anchor_after(Point::new(annotation_range.start, 0))
3164 ..self.anchor_before(Point::new(
3165 annotation_range.end,
3166 self.line_len(annotation_range.end),
3167 ))
3168 }),
3169 });
3170 item_ends_stack.push(item.range.end);
3171 }
3172
3173 Some(anchor_items)
3174 }
3175
3176 fn next_outline_item(
3177 &self,
3178 config: &OutlineConfig,
3179 mat: &SyntaxMapMatch,
3180 range: &Range<usize>,
3181 include_extra_context: bool,
3182 theme: Option<&SyntaxTheme>,
3183 ) -> Option<OutlineItem<Point>> {
3184 let item_node = mat.captures.iter().find_map(|cap| {
3185 if cap.index == config.item_capture_ix {
3186 Some(cap.node)
3187 } else {
3188 None
3189 }
3190 })?;
3191
3192 let item_byte_range = item_node.byte_range();
3193 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3194 return None;
3195 }
3196 let item_point_range = Point::from_ts_point(item_node.start_position())
3197 ..Point::from_ts_point(item_node.end_position());
3198
3199 let mut open_point = None;
3200 let mut close_point = None;
3201 let mut buffer_ranges = Vec::new();
3202 for capture in mat.captures {
3203 let node_is_name;
3204 if capture.index == config.name_capture_ix {
3205 node_is_name = true;
3206 } else if Some(capture.index) == config.context_capture_ix
3207 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3208 {
3209 node_is_name = false;
3210 } else {
3211 if Some(capture.index) == config.open_capture_ix {
3212 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3213 } else if Some(capture.index) == config.close_capture_ix {
3214 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3215 }
3216
3217 continue;
3218 }
3219
3220 let mut range = capture.node.start_byte()..capture.node.end_byte();
3221 let start = capture.node.start_position();
3222 if capture.node.end_position().row > start.row {
3223 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3224 }
3225
3226 if !range.is_empty() {
3227 buffer_ranges.push((range, node_is_name));
3228 }
3229 }
3230 if buffer_ranges.is_empty() {
3231 return None;
3232 }
3233 let mut text = String::new();
3234 let mut highlight_ranges = Vec::new();
3235 let mut name_ranges = Vec::new();
3236 let mut chunks = self.chunks(
3237 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3238 true,
3239 );
3240 let mut last_buffer_range_end = 0;
3241 for (buffer_range, is_name) in buffer_ranges {
3242 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
3243 text.push(' ');
3244 }
3245 last_buffer_range_end = buffer_range.end;
3246 if is_name {
3247 let mut start = text.len();
3248 let end = start + buffer_range.len();
3249
3250 // When multiple names are captured, then the matchable text
3251 // includes the whitespace in between the names.
3252 if !name_ranges.is_empty() {
3253 start -= 1;
3254 }
3255
3256 name_ranges.push(start..end);
3257 }
3258
3259 let mut offset = buffer_range.start;
3260 chunks.seek(buffer_range.clone());
3261 for mut chunk in chunks.by_ref() {
3262 if chunk.text.len() > buffer_range.end - offset {
3263 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3264 offset = buffer_range.end;
3265 } else {
3266 offset += chunk.text.len();
3267 }
3268 let style = chunk
3269 .syntax_highlight_id
3270 .zip(theme)
3271 .and_then(|(highlight, theme)| highlight.style(theme));
3272 if let Some(style) = style {
3273 let start = text.len();
3274 let end = start + chunk.text.len();
3275 highlight_ranges.push((start..end, style));
3276 }
3277 text.push_str(chunk.text);
3278 if offset >= buffer_range.end {
3279 break;
3280 }
3281 }
3282 }
3283
3284 Some(OutlineItem {
3285 depth: 0, // We'll calculate the depth later
3286 range: item_point_range,
3287 text,
3288 highlight_ranges,
3289 name_ranges,
3290 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3291 annotation_range: None,
3292 })
3293 }
3294
3295 pub fn function_body_fold_ranges<T: ToOffset>(
3296 &self,
3297 within: Range<T>,
3298 ) -> impl Iterator<Item = Range<usize>> + '_ {
3299 self.text_object_ranges(within, TreeSitterOptions::default())
3300 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3301 }
3302
3303 /// For each grammar in the language, runs the provided
3304 /// [`tree_sitter::Query`] against the given range.
3305 pub fn matches(
3306 &self,
3307 range: Range<usize>,
3308 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3309 ) -> SyntaxMapMatches {
3310 self.syntax.matches(range, self, query)
3311 }
3312
3313 /// Returns bracket range pairs overlapping or adjacent to `range`
3314 pub fn bracket_ranges<T: ToOffset>(
3315 &self,
3316 range: Range<T>,
3317 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3318 // Find bracket pairs that *inclusively* contain the given range.
3319 let range = range.start.to_offset(self).saturating_sub(1)
3320 ..self.len().min(range.end.to_offset(self) + 1);
3321
3322 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3323 grammar.brackets_config.as_ref().map(|c| &c.query)
3324 });
3325 let configs = matches
3326 .grammars()
3327 .iter()
3328 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3329 .collect::<Vec<_>>();
3330
3331 iter::from_fn(move || {
3332 while let Some(mat) = matches.peek() {
3333 let mut open = None;
3334 let mut close = None;
3335 let config = &configs[mat.grammar_index];
3336 for capture in mat.captures {
3337 if capture.index == config.open_capture_ix {
3338 open = Some(capture.node.byte_range());
3339 } else if capture.index == config.close_capture_ix {
3340 close = Some(capture.node.byte_range());
3341 }
3342 }
3343
3344 matches.advance();
3345
3346 let Some((open, close)) = open.zip(close) else {
3347 continue;
3348 };
3349
3350 let bracket_range = open.start..=close.end;
3351 if !bracket_range.overlaps(&range) {
3352 continue;
3353 }
3354
3355 return Some((open, close));
3356 }
3357 None
3358 })
3359 }
3360
3361 pub fn text_object_ranges<T: ToOffset>(
3362 &self,
3363 range: Range<T>,
3364 options: TreeSitterOptions,
3365 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3366 let range = range.start.to_offset(self).saturating_sub(1)
3367 ..self.len().min(range.end.to_offset(self) + 1);
3368
3369 let mut matches =
3370 self.syntax
3371 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3372 grammar.text_object_config.as_ref().map(|c| &c.query)
3373 });
3374
3375 let configs = matches
3376 .grammars()
3377 .iter()
3378 .map(|grammar| grammar.text_object_config.as_ref())
3379 .collect::<Vec<_>>();
3380
3381 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3382
3383 iter::from_fn(move || loop {
3384 while let Some(capture) = captures.pop() {
3385 if capture.0.overlaps(&range) {
3386 return Some(capture);
3387 }
3388 }
3389
3390 let mat = matches.peek()?;
3391
3392 let Some(config) = configs[mat.grammar_index].as_ref() else {
3393 matches.advance();
3394 continue;
3395 };
3396
3397 for capture in mat.captures {
3398 let Some(ix) = config
3399 .text_objects_by_capture_ix
3400 .binary_search_by_key(&capture.index, |e| e.0)
3401 .ok()
3402 else {
3403 continue;
3404 };
3405 let text_object = config.text_objects_by_capture_ix[ix].1;
3406 let byte_range = capture.node.byte_range();
3407
3408 let mut found = false;
3409 for (range, existing) in captures.iter_mut() {
3410 if existing == &text_object {
3411 range.start = range.start.min(byte_range.start);
3412 range.end = range.end.max(byte_range.end);
3413 found = true;
3414 break;
3415 }
3416 }
3417
3418 if !found {
3419 captures.push((byte_range, text_object));
3420 }
3421 }
3422
3423 matches.advance();
3424 })
3425 }
3426
3427 /// Returns enclosing bracket ranges containing the given range
3428 pub fn enclosing_bracket_ranges<T: ToOffset>(
3429 &self,
3430 range: Range<T>,
3431 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + '_ {
3432 let range = range.start.to_offset(self)..range.end.to_offset(self);
3433
3434 self.bracket_ranges(range.clone())
3435 .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
3436 }
3437
3438 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3439 ///
3440 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3441 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3442 &self,
3443 range: Range<T>,
3444 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3445 ) -> Option<(Range<usize>, Range<usize>)> {
3446 let range = range.start.to_offset(self)..range.end.to_offset(self);
3447
3448 // Get the ranges of the innermost pair of brackets.
3449 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3450
3451 for (open, close) in self.enclosing_bracket_ranges(range.clone()) {
3452 if let Some(range_filter) = range_filter {
3453 if !range_filter(open.clone(), close.clone()) {
3454 continue;
3455 }
3456 }
3457
3458 let len = close.end - open.start;
3459
3460 if let Some((existing_open, existing_close)) = &result {
3461 let existing_len = existing_close.end - existing_open.start;
3462 if len > existing_len {
3463 continue;
3464 }
3465 }
3466
3467 result = Some((open, close));
3468 }
3469
3470 result
3471 }
3472
3473 /// Returns anchor ranges for any matches of the redaction query.
3474 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3475 /// will be run on the relevant section of the buffer.
3476 pub fn redacted_ranges<T: ToOffset>(
3477 &self,
3478 range: Range<T>,
3479 ) -> impl Iterator<Item = Range<usize>> + '_ {
3480 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3481 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3482 grammar
3483 .redactions_config
3484 .as_ref()
3485 .map(|config| &config.query)
3486 });
3487
3488 let configs = syntax_matches
3489 .grammars()
3490 .iter()
3491 .map(|grammar| grammar.redactions_config.as_ref())
3492 .collect::<Vec<_>>();
3493
3494 iter::from_fn(move || {
3495 let redacted_range = syntax_matches
3496 .peek()
3497 .and_then(|mat| {
3498 configs[mat.grammar_index].and_then(|config| {
3499 mat.captures
3500 .iter()
3501 .find(|capture| capture.index == config.redaction_capture_ix)
3502 })
3503 })
3504 .map(|mat| mat.node.byte_range());
3505 syntax_matches.advance();
3506 redacted_range
3507 })
3508 }
3509
3510 pub fn injections_intersecting_range<T: ToOffset>(
3511 &self,
3512 range: Range<T>,
3513 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3514 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3515
3516 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3517 grammar
3518 .injection_config
3519 .as_ref()
3520 .map(|config| &config.query)
3521 });
3522
3523 let configs = syntax_matches
3524 .grammars()
3525 .iter()
3526 .map(|grammar| grammar.injection_config.as_ref())
3527 .collect::<Vec<_>>();
3528
3529 iter::from_fn(move || {
3530 let ranges = syntax_matches.peek().and_then(|mat| {
3531 let config = &configs[mat.grammar_index]?;
3532 let content_capture_range = mat.captures.iter().find_map(|capture| {
3533 if capture.index == config.content_capture_ix {
3534 Some(capture.node.byte_range())
3535 } else {
3536 None
3537 }
3538 })?;
3539 let language = self.language_at(content_capture_range.start)?;
3540 Some((content_capture_range, language))
3541 });
3542 syntax_matches.advance();
3543 ranges
3544 })
3545 }
3546
3547 pub fn runnable_ranges(
3548 &self,
3549 offset_range: Range<usize>,
3550 ) -> impl Iterator<Item = RunnableRange> + '_ {
3551 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3552 grammar.runnable_config.as_ref().map(|config| &config.query)
3553 });
3554
3555 let test_configs = syntax_matches
3556 .grammars()
3557 .iter()
3558 .map(|grammar| grammar.runnable_config.as_ref())
3559 .collect::<Vec<_>>();
3560
3561 iter::from_fn(move || loop {
3562 let mat = syntax_matches.peek()?;
3563
3564 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3565 let mut run_range = None;
3566 let full_range = mat.captures.iter().fold(
3567 Range {
3568 start: usize::MAX,
3569 end: 0,
3570 },
3571 |mut acc, next| {
3572 let byte_range = next.node.byte_range();
3573 if acc.start > byte_range.start {
3574 acc.start = byte_range.start;
3575 }
3576 if acc.end < byte_range.end {
3577 acc.end = byte_range.end;
3578 }
3579 acc
3580 },
3581 );
3582 if full_range.start > full_range.end {
3583 // We did not find a full spanning range of this match.
3584 return None;
3585 }
3586 let extra_captures: SmallVec<[_; 1]> =
3587 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
3588 test_configs
3589 .extra_captures
3590 .get(capture.index as usize)
3591 .cloned()
3592 .and_then(|tag_name| match tag_name {
3593 RunnableCapture::Named(name) => {
3594 Some((capture.node.byte_range(), name))
3595 }
3596 RunnableCapture::Run => {
3597 let _ = run_range.insert(capture.node.byte_range());
3598 None
3599 }
3600 })
3601 }));
3602 let run_range = run_range?;
3603 let tags = test_configs
3604 .query
3605 .property_settings(mat.pattern_index)
3606 .iter()
3607 .filter_map(|property| {
3608 if *property.key == *"tag" {
3609 property
3610 .value
3611 .as_ref()
3612 .map(|value| RunnableTag(value.to_string().into()))
3613 } else {
3614 None
3615 }
3616 })
3617 .collect();
3618 let extra_captures = extra_captures
3619 .into_iter()
3620 .map(|(range, name)| {
3621 (
3622 name.to_string(),
3623 self.text_for_range(range.clone()).collect::<String>(),
3624 )
3625 })
3626 .collect();
3627 // All tags should have the same range.
3628 Some(RunnableRange {
3629 run_range,
3630 full_range,
3631 runnable: Runnable {
3632 tags,
3633 language: mat.language,
3634 buffer: self.remote_id(),
3635 },
3636 extra_captures,
3637 buffer_id: self.remote_id(),
3638 })
3639 });
3640
3641 syntax_matches.advance();
3642 if test_range.is_some() {
3643 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3644 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3645 return test_range;
3646 }
3647 })
3648 }
3649
3650 /// Returns selections for remote peers intersecting the given range.
3651 #[allow(clippy::type_complexity)]
3652 pub fn selections_in_range(
3653 &self,
3654 range: Range<Anchor>,
3655 include_local: bool,
3656 ) -> impl Iterator<
3657 Item = (
3658 ReplicaId,
3659 bool,
3660 CursorShape,
3661 impl Iterator<Item = &Selection<Anchor>> + '_,
3662 ),
3663 > + '_ {
3664 self.remote_selections
3665 .iter()
3666 .filter(move |(replica_id, set)| {
3667 (include_local || **replica_id != self.text.replica_id())
3668 && !set.selections.is_empty()
3669 })
3670 .map(move |(replica_id, set)| {
3671 let start_ix = match set.selections.binary_search_by(|probe| {
3672 probe.end.cmp(&range.start, self).then(Ordering::Greater)
3673 }) {
3674 Ok(ix) | Err(ix) => ix,
3675 };
3676 let end_ix = match set.selections.binary_search_by(|probe| {
3677 probe.start.cmp(&range.end, self).then(Ordering::Less)
3678 }) {
3679 Ok(ix) | Err(ix) => ix,
3680 };
3681
3682 (
3683 *replica_id,
3684 set.line_mode,
3685 set.cursor_shape,
3686 set.selections[start_ix..end_ix].iter(),
3687 )
3688 })
3689 }
3690
3691 /// Returns if the buffer contains any diagnostics.
3692 pub fn has_diagnostics(&self) -> bool {
3693 !self.diagnostics.is_empty()
3694 }
3695
3696 /// Returns all the diagnostics intersecting the given range.
3697 pub fn diagnostics_in_range<'a, T, O>(
3698 &'a self,
3699 search_range: Range<T>,
3700 reversed: bool,
3701 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
3702 where
3703 T: 'a + Clone + ToOffset,
3704 O: 'a + FromAnchor,
3705 {
3706 let mut iterators: Vec<_> = self
3707 .diagnostics
3708 .iter()
3709 .map(|(_, collection)| {
3710 collection
3711 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
3712 .peekable()
3713 })
3714 .collect();
3715
3716 std::iter::from_fn(move || {
3717 let (next_ix, _) = iterators
3718 .iter_mut()
3719 .enumerate()
3720 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
3721 .min_by(|(_, a), (_, b)| {
3722 let cmp = a
3723 .range
3724 .start
3725 .cmp(&b.range.start, self)
3726 // when range is equal, sort by diagnostic severity
3727 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
3728 // and stabilize order with group_id
3729 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
3730 if reversed {
3731 cmp.reverse()
3732 } else {
3733 cmp
3734 }
3735 })?;
3736 iterators[next_ix]
3737 .next()
3738 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
3739 diagnostic,
3740 range: FromAnchor::from_anchor(&range.start, self)
3741 ..FromAnchor::from_anchor(&range.end, self),
3742 })
3743 })
3744 }
3745
3746 /// Returns all the diagnostic groups associated with the given
3747 /// language server ID. If no language server ID is provided,
3748 /// all diagnostics groups are returned.
3749 pub fn diagnostic_groups(
3750 &self,
3751 language_server_id: Option<LanguageServerId>,
3752 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
3753 let mut groups = Vec::new();
3754
3755 if let Some(language_server_id) = language_server_id {
3756 if let Ok(ix) = self
3757 .diagnostics
3758 .binary_search_by_key(&language_server_id, |e| e.0)
3759 {
3760 self.diagnostics[ix]
3761 .1
3762 .groups(language_server_id, &mut groups, self);
3763 }
3764 } else {
3765 for (language_server_id, diagnostics) in self.diagnostics.iter() {
3766 diagnostics.groups(*language_server_id, &mut groups, self);
3767 }
3768 }
3769
3770 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
3771 let a_start = &group_a.entries[group_a.primary_ix].range.start;
3772 let b_start = &group_b.entries[group_b.primary_ix].range.start;
3773 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
3774 });
3775
3776 groups
3777 }
3778
3779 /// Returns an iterator over the diagnostics for the given group.
3780 pub fn diagnostic_group<O>(
3781 &self,
3782 group_id: usize,
3783 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
3784 where
3785 O: FromAnchor + 'static,
3786 {
3787 self.diagnostics
3788 .iter()
3789 .flat_map(move |(_, set)| set.group(group_id, self))
3790 }
3791
3792 /// An integer version number that accounts for all updates besides
3793 /// the buffer's text itself (which is versioned via a version vector).
3794 pub fn non_text_state_update_count(&self) -> usize {
3795 self.non_text_state_update_count
3796 }
3797
3798 /// Returns a snapshot of underlying file.
3799 pub fn file(&self) -> Option<&Arc<dyn File>> {
3800 self.file.as_ref()
3801 }
3802
3803 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
3804 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
3805 if let Some(file) = self.file() {
3806 if file.path().file_name().is_none() || include_root {
3807 Some(file.full_path(cx))
3808 } else {
3809 Some(file.path().to_path_buf())
3810 }
3811 } else {
3812 None
3813 }
3814 }
3815}
3816
3817fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
3818 indent_size_for_text(text.chars_at(Point::new(row, 0)))
3819}
3820
3821fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
3822 let mut result = IndentSize::spaces(0);
3823 for c in text {
3824 let kind = match c {
3825 ' ' => IndentKind::Space,
3826 '\t' => IndentKind::Tab,
3827 _ => break,
3828 };
3829 if result.len == 0 {
3830 result.kind = kind;
3831 }
3832 result.len += 1;
3833 }
3834 result
3835}
3836
3837impl Clone for BufferSnapshot {
3838 fn clone(&self) -> Self {
3839 Self {
3840 text: self.text.clone(),
3841 syntax: self.syntax.clone(),
3842 file: self.file.clone(),
3843 remote_selections: self.remote_selections.clone(),
3844 diagnostics: self.diagnostics.clone(),
3845 language: self.language.clone(),
3846 non_text_state_update_count: self.non_text_state_update_count,
3847 }
3848 }
3849}
3850
3851impl Deref for BufferSnapshot {
3852 type Target = text::BufferSnapshot;
3853
3854 fn deref(&self) -> &Self::Target {
3855 &self.text
3856 }
3857}
3858
3859unsafe impl<'a> Send for BufferChunks<'a> {}
3860
3861impl<'a> BufferChunks<'a> {
3862 pub(crate) fn new(
3863 text: &'a Rope,
3864 range: Range<usize>,
3865 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
3866 diagnostics: bool,
3867 buffer_snapshot: Option<&'a BufferSnapshot>,
3868 ) -> Self {
3869 let mut highlights = None;
3870 if let Some((captures, highlight_maps)) = syntax {
3871 highlights = Some(BufferChunkHighlights {
3872 captures,
3873 next_capture: None,
3874 stack: Default::default(),
3875 highlight_maps,
3876 })
3877 }
3878
3879 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
3880 let chunks = text.chunks_in_range(range.clone());
3881
3882 let mut this = BufferChunks {
3883 range,
3884 buffer_snapshot,
3885 chunks,
3886 diagnostic_endpoints,
3887 error_depth: 0,
3888 warning_depth: 0,
3889 information_depth: 0,
3890 hint_depth: 0,
3891 unnecessary_depth: 0,
3892 highlights,
3893 };
3894 this.initialize_diagnostic_endpoints();
3895 this
3896 }
3897
3898 /// Seeks to the given byte offset in the buffer.
3899 pub fn seek(&mut self, range: Range<usize>) {
3900 let old_range = std::mem::replace(&mut self.range, range.clone());
3901 self.chunks.set_range(self.range.clone());
3902 if let Some(highlights) = self.highlights.as_mut() {
3903 if old_range.start <= self.range.start && old_range.end >= self.range.end {
3904 // Reuse existing highlights stack, as the new range is a subrange of the old one.
3905 highlights
3906 .stack
3907 .retain(|(end_offset, _)| *end_offset > range.start);
3908 if let Some(capture) = &highlights.next_capture {
3909 if range.start >= capture.node.start_byte() {
3910 let next_capture_end = capture.node.end_byte();
3911 if range.start < next_capture_end {
3912 highlights.stack.push((
3913 next_capture_end,
3914 highlights.highlight_maps[capture.grammar_index].get(capture.index),
3915 ));
3916 }
3917 highlights.next_capture.take();
3918 }
3919 }
3920 } else if let Some(snapshot) = self.buffer_snapshot {
3921 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
3922 *highlights = BufferChunkHighlights {
3923 captures,
3924 next_capture: None,
3925 stack: Default::default(),
3926 highlight_maps,
3927 };
3928 } else {
3929 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
3930 // Seeking such BufferChunks is not supported.
3931 debug_assert!(false, "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot");
3932 }
3933
3934 highlights.captures.set_byte_range(self.range.clone());
3935 self.initialize_diagnostic_endpoints();
3936 }
3937 }
3938
3939 fn initialize_diagnostic_endpoints(&mut self) {
3940 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
3941 if let Some(buffer) = self.buffer_snapshot {
3942 let mut diagnostic_endpoints = Vec::new();
3943 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
3944 diagnostic_endpoints.push(DiagnosticEndpoint {
3945 offset: entry.range.start,
3946 is_start: true,
3947 severity: entry.diagnostic.severity,
3948 is_unnecessary: entry.diagnostic.is_unnecessary,
3949 });
3950 diagnostic_endpoints.push(DiagnosticEndpoint {
3951 offset: entry.range.end,
3952 is_start: false,
3953 severity: entry.diagnostic.severity,
3954 is_unnecessary: entry.diagnostic.is_unnecessary,
3955 });
3956 }
3957 diagnostic_endpoints
3958 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
3959 *diagnostics = diagnostic_endpoints.into_iter().peekable();
3960 self.hint_depth = 0;
3961 self.error_depth = 0;
3962 self.warning_depth = 0;
3963 self.information_depth = 0;
3964 }
3965 }
3966 }
3967
3968 /// The current byte offset in the buffer.
3969 pub fn offset(&self) -> usize {
3970 self.range.start
3971 }
3972
3973 pub fn range(&self) -> Range<usize> {
3974 self.range.clone()
3975 }
3976
3977 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
3978 let depth = match endpoint.severity {
3979 DiagnosticSeverity::ERROR => &mut self.error_depth,
3980 DiagnosticSeverity::WARNING => &mut self.warning_depth,
3981 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
3982 DiagnosticSeverity::HINT => &mut self.hint_depth,
3983 _ => return,
3984 };
3985 if endpoint.is_start {
3986 *depth += 1;
3987 } else {
3988 *depth -= 1;
3989 }
3990
3991 if endpoint.is_unnecessary {
3992 if endpoint.is_start {
3993 self.unnecessary_depth += 1;
3994 } else {
3995 self.unnecessary_depth -= 1;
3996 }
3997 }
3998 }
3999
4000 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4001 if self.error_depth > 0 {
4002 Some(DiagnosticSeverity::ERROR)
4003 } else if self.warning_depth > 0 {
4004 Some(DiagnosticSeverity::WARNING)
4005 } else if self.information_depth > 0 {
4006 Some(DiagnosticSeverity::INFORMATION)
4007 } else if self.hint_depth > 0 {
4008 Some(DiagnosticSeverity::HINT)
4009 } else {
4010 None
4011 }
4012 }
4013
4014 fn current_code_is_unnecessary(&self) -> bool {
4015 self.unnecessary_depth > 0
4016 }
4017}
4018
4019impl<'a> Iterator for BufferChunks<'a> {
4020 type Item = Chunk<'a>;
4021
4022 fn next(&mut self) -> Option<Self::Item> {
4023 let mut next_capture_start = usize::MAX;
4024 let mut next_diagnostic_endpoint = usize::MAX;
4025
4026 if let Some(highlights) = self.highlights.as_mut() {
4027 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4028 if *parent_capture_end <= self.range.start {
4029 highlights.stack.pop();
4030 } else {
4031 break;
4032 }
4033 }
4034
4035 if highlights.next_capture.is_none() {
4036 highlights.next_capture = highlights.captures.next();
4037 }
4038
4039 while let Some(capture) = highlights.next_capture.as_ref() {
4040 if self.range.start < capture.node.start_byte() {
4041 next_capture_start = capture.node.start_byte();
4042 break;
4043 } else {
4044 let highlight_id =
4045 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4046 highlights
4047 .stack
4048 .push((capture.node.end_byte(), highlight_id));
4049 highlights.next_capture = highlights.captures.next();
4050 }
4051 }
4052 }
4053
4054 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4055 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4056 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4057 if endpoint.offset <= self.range.start {
4058 self.update_diagnostic_depths(endpoint);
4059 diagnostic_endpoints.next();
4060 } else {
4061 next_diagnostic_endpoint = endpoint.offset;
4062 break;
4063 }
4064 }
4065 }
4066 self.diagnostic_endpoints = diagnostic_endpoints;
4067
4068 if let Some(chunk) = self.chunks.peek() {
4069 let chunk_start = self.range.start;
4070 let mut chunk_end = (self.chunks.offset() + chunk.len())
4071 .min(next_capture_start)
4072 .min(next_diagnostic_endpoint);
4073 let mut highlight_id = None;
4074 if let Some(highlights) = self.highlights.as_ref() {
4075 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4076 chunk_end = chunk_end.min(*parent_capture_end);
4077 highlight_id = Some(*parent_highlight_id);
4078 }
4079 }
4080
4081 let slice =
4082 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4083 self.range.start = chunk_end;
4084 if self.range.start == self.chunks.offset() + chunk.len() {
4085 self.chunks.next().unwrap();
4086 }
4087
4088 Some(Chunk {
4089 text: slice,
4090 syntax_highlight_id: highlight_id,
4091 diagnostic_severity: self.current_diagnostic_severity(),
4092 is_unnecessary: self.current_code_is_unnecessary(),
4093 ..Default::default()
4094 })
4095 } else {
4096 None
4097 }
4098 }
4099}
4100
4101impl operation_queue::Operation for Operation {
4102 fn lamport_timestamp(&self) -> clock::Lamport {
4103 match self {
4104 Operation::Buffer(_) => {
4105 unreachable!("buffer operations should never be deferred at this layer")
4106 }
4107 Operation::UpdateDiagnostics {
4108 lamport_timestamp, ..
4109 }
4110 | Operation::UpdateSelections {
4111 lamport_timestamp, ..
4112 }
4113 | Operation::UpdateCompletionTriggers {
4114 lamport_timestamp, ..
4115 } => *lamport_timestamp,
4116 }
4117 }
4118}
4119
4120impl Default for Diagnostic {
4121 fn default() -> Self {
4122 Self {
4123 source: Default::default(),
4124 code: None,
4125 severity: DiagnosticSeverity::ERROR,
4126 message: Default::default(),
4127 group_id: 0,
4128 is_primary: false,
4129 is_disk_based: false,
4130 is_unnecessary: false,
4131 data: None,
4132 }
4133 }
4134}
4135
4136impl IndentSize {
4137 /// Returns an [`IndentSize`] representing the given spaces.
4138 pub fn spaces(len: u32) -> Self {
4139 Self {
4140 len,
4141 kind: IndentKind::Space,
4142 }
4143 }
4144
4145 /// Returns an [`IndentSize`] representing a tab.
4146 pub fn tab() -> Self {
4147 Self {
4148 len: 1,
4149 kind: IndentKind::Tab,
4150 }
4151 }
4152
4153 /// An iterator over the characters represented by this [`IndentSize`].
4154 pub fn chars(&self) -> impl Iterator<Item = char> {
4155 iter::repeat(self.char()).take(self.len as usize)
4156 }
4157
4158 /// The character representation of this [`IndentSize`].
4159 pub fn char(&self) -> char {
4160 match self.kind {
4161 IndentKind::Space => ' ',
4162 IndentKind::Tab => '\t',
4163 }
4164 }
4165
4166 /// Consumes the current [`IndentSize`] and returns a new one that has
4167 /// been shrunk or enlarged by the given size along the given direction.
4168 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4169 match direction {
4170 Ordering::Less => {
4171 if self.kind == size.kind && self.len >= size.len {
4172 self.len -= size.len;
4173 }
4174 }
4175 Ordering::Equal => {}
4176 Ordering::Greater => {
4177 if self.len == 0 {
4178 self = size;
4179 } else if self.kind == size.kind {
4180 self.len += size.len;
4181 }
4182 }
4183 }
4184 self
4185 }
4186
4187 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4188 match self.kind {
4189 IndentKind::Space => self.len as usize,
4190 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4191 }
4192 }
4193}
4194
4195#[cfg(any(test, feature = "test-support"))]
4196pub struct TestFile {
4197 pub path: Arc<Path>,
4198 pub root_name: String,
4199}
4200
4201#[cfg(any(test, feature = "test-support"))]
4202impl File for TestFile {
4203 fn path(&self) -> &Arc<Path> {
4204 &self.path
4205 }
4206
4207 fn full_path(&self, _: &gpui::App) -> PathBuf {
4208 PathBuf::from(&self.root_name).join(self.path.as_ref())
4209 }
4210
4211 fn as_local(&self) -> Option<&dyn LocalFile> {
4212 None
4213 }
4214
4215 fn disk_state(&self) -> DiskState {
4216 unimplemented!()
4217 }
4218
4219 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4220 self.path().file_name().unwrap_or(self.root_name.as_ref())
4221 }
4222
4223 fn worktree_id(&self, _: &App) -> WorktreeId {
4224 WorktreeId::from_usize(0)
4225 }
4226
4227 fn as_any(&self) -> &dyn std::any::Any {
4228 unimplemented!()
4229 }
4230
4231 fn to_proto(&self, _: &App) -> rpc::proto::File {
4232 unimplemented!()
4233 }
4234
4235 fn is_private(&self) -> bool {
4236 false
4237 }
4238}
4239
4240pub(crate) fn contiguous_ranges(
4241 values: impl Iterator<Item = u32>,
4242 max_len: usize,
4243) -> impl Iterator<Item = Range<u32>> {
4244 let mut values = values;
4245 let mut current_range: Option<Range<u32>> = None;
4246 std::iter::from_fn(move || loop {
4247 if let Some(value) = values.next() {
4248 if let Some(range) = &mut current_range {
4249 if value == range.end && range.len() < max_len {
4250 range.end += 1;
4251 continue;
4252 }
4253 }
4254
4255 let prev_range = current_range.clone();
4256 current_range = Some(value..(value + 1));
4257 if prev_range.is_some() {
4258 return prev_range;
4259 }
4260 } else {
4261 return current_range.take();
4262 }
4263 })
4264}
4265
4266#[derive(Default, Debug)]
4267pub struct CharClassifier {
4268 scope: Option<LanguageScope>,
4269 for_completion: bool,
4270 ignore_punctuation: bool,
4271}
4272
4273impl CharClassifier {
4274 pub fn new(scope: Option<LanguageScope>) -> Self {
4275 Self {
4276 scope,
4277 for_completion: false,
4278 ignore_punctuation: false,
4279 }
4280 }
4281
4282 pub fn for_completion(self, for_completion: bool) -> Self {
4283 Self {
4284 for_completion,
4285 ..self
4286 }
4287 }
4288
4289 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4290 Self {
4291 ignore_punctuation,
4292 ..self
4293 }
4294 }
4295
4296 pub fn is_whitespace(&self, c: char) -> bool {
4297 self.kind(c) == CharKind::Whitespace
4298 }
4299
4300 pub fn is_word(&self, c: char) -> bool {
4301 self.kind(c) == CharKind::Word
4302 }
4303
4304 pub fn is_punctuation(&self, c: char) -> bool {
4305 self.kind(c) == CharKind::Punctuation
4306 }
4307
4308 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4309 if c.is_whitespace() {
4310 return CharKind::Whitespace;
4311 } else if c.is_alphanumeric() || c == '_' {
4312 return CharKind::Word;
4313 }
4314
4315 if let Some(scope) = &self.scope {
4316 if let Some(characters) = scope.word_characters() {
4317 if characters.contains(&c) {
4318 if c == '-' && !self.for_completion && !ignore_punctuation {
4319 return CharKind::Punctuation;
4320 }
4321 return CharKind::Word;
4322 }
4323 }
4324 }
4325
4326 if ignore_punctuation {
4327 CharKind::Word
4328 } else {
4329 CharKind::Punctuation
4330 }
4331 }
4332
4333 pub fn kind(&self, c: char) -> CharKind {
4334 self.kind_with(c, self.ignore_punctuation)
4335 }
4336}
4337
4338/// Find all of the ranges of whitespace that occur at the ends of lines
4339/// in the given rope.
4340///
4341/// This could also be done with a regex search, but this implementation
4342/// avoids copying text.
4343pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4344 let mut ranges = Vec::new();
4345
4346 let mut offset = 0;
4347 let mut prev_chunk_trailing_whitespace_range = 0..0;
4348 for chunk in rope.chunks() {
4349 let mut prev_line_trailing_whitespace_range = 0..0;
4350 for (i, line) in chunk.split('\n').enumerate() {
4351 let line_end_offset = offset + line.len();
4352 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4353 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4354
4355 if i == 0 && trimmed_line_len == 0 {
4356 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4357 }
4358 if !prev_line_trailing_whitespace_range.is_empty() {
4359 ranges.push(prev_line_trailing_whitespace_range);
4360 }
4361
4362 offset = line_end_offset + 1;
4363 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4364 }
4365
4366 offset -= 1;
4367 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4368 }
4369
4370 if !prev_chunk_trailing_whitespace_range.is_empty() {
4371 ranges.push(prev_chunk_trailing_whitespace_range);
4372 }
4373
4374 ranges
4375}