buffer.rs

   1pub mod row_chunk;
   2
   3use crate::{
   4    DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
   5    RunnableTag, TextObject, TreeSitterOptions,
   6    diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
   7    language_settings::{LanguageSettings, language_settings},
   8    outline::OutlineItem,
   9    row_chunk::RowChunks,
  10    syntax_map::{
  11        MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
  12        SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
  13    },
  14    task_context::RunnableRange,
  15    text_diff::text_diff,
  16    unified_diff_with_offsets,
  17};
  18pub use crate::{
  19    Grammar, Language, LanguageRegistry,
  20    diagnostic_set::DiagnosticSet,
  21    highlight_map::{HighlightId, HighlightMap},
  22    proto,
  23};
  24use anyhow::{Context as _, Result};
  25use clock::Lamport;
  26pub use clock::ReplicaId;
  27use collections::{HashMap, HashSet};
  28use encoding_rs::Encoding;
  29use fs::MTime;
  30use futures::channel::oneshot;
  31use gpui::{
  32    App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
  33    Task, TextStyle,
  34};
  35
  36use lsp::{LanguageServerId, NumberOrString};
  37use parking_lot::Mutex;
  38use serde::{Deserialize, Serialize};
  39use serde_json::Value;
  40use settings::WorktreeId;
  41use smallvec::SmallVec;
  42use smol::future::yield_now;
  43use std::{
  44    any::Any,
  45    borrow::Cow,
  46    cell::Cell,
  47    cmp::{self, Ordering, Reverse},
  48    collections::{BTreeMap, BTreeSet},
  49    future::Future,
  50    iter::{self, Iterator, Peekable},
  51    mem,
  52    num::NonZeroU32,
  53    ops::{Deref, Range},
  54    path::PathBuf,
  55    rc,
  56    sync::Arc,
  57    time::{Duration, Instant},
  58    vec,
  59};
  60use sum_tree::TreeMap;
  61use text::operation_queue::OperationQueue;
  62use text::*;
  63pub use text::{
  64    Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
  65    LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
  66    SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
  67    ToPointUtf16, Transaction, TransactionId, Unclipped,
  68};
  69use theme::{ActiveTheme as _, SyntaxTheme};
  70#[cfg(any(test, feature = "test-support"))]
  71use util::RandomCharIter;
  72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
  73
  74#[cfg(any(test, feature = "test-support"))]
  75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
  76
  77pub use lsp::DiagnosticSeverity;
  78
  79/// Indicate whether a [`Buffer`] has permissions to edit.
  80#[derive(PartialEq, Clone, Copy, Debug)]
  81pub enum Capability {
  82    /// The buffer is a mutable replica.
  83    ReadWrite,
  84    /// The buffer is a mutable replica, but toggled to be only readable.
  85    Read,
  86    /// The buffer is a read-only replica.
  87    ReadOnly,
  88}
  89
  90impl Capability {
  91    /// Returns `true` if the capability is `ReadWrite`.
  92    pub fn editable(self) -> bool {
  93        matches!(self, Capability::ReadWrite)
  94    }
  95}
  96
  97pub type BufferRow = u32;
  98
  99/// An in-memory representation of a source code file, including its text,
 100/// syntax trees, git status, and diagnostics.
 101pub struct Buffer {
 102    text: TextBuffer,
 103    branch_state: Option<BufferBranchState>,
 104    /// Filesystem state, `None` when there is no path.
 105    file: Option<Arc<dyn File>>,
 106    /// The mtime of the file when this buffer was last loaded from
 107    /// or saved to disk.
 108    saved_mtime: Option<MTime>,
 109    /// The version vector when this buffer was last loaded from
 110    /// or saved to disk.
 111    saved_version: clock::Global,
 112    preview_version: clock::Global,
 113    transaction_depth: usize,
 114    was_dirty_before_starting_transaction: Option<bool>,
 115    reload_task: Option<Task<Result<()>>>,
 116    language: Option<Arc<Language>>,
 117    autoindent_requests: Vec<Arc<AutoindentRequest>>,
 118    wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
 119    pending_autoindent: Option<Task<()>>,
 120    sync_parse_timeout: Option<Duration>,
 121    syntax_map: Mutex<SyntaxMap>,
 122    reparse: Option<Task<()>>,
 123    parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
 124    non_text_state_update_count: usize,
 125    diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
 126    remote_selections: TreeMap<ReplicaId, SelectionSet>,
 127    diagnostics_timestamp: clock::Lamport,
 128    completion_triggers: BTreeSet<String>,
 129    completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
 130    completion_triggers_timestamp: clock::Lamport,
 131    deferred_ops: OperationQueue<Operation>,
 132    capability: Capability,
 133    has_conflict: bool,
 134    /// Memoize calls to has_changes_since(saved_version).
 135    /// The contents of a cell are (self.version, has_changes) at the time of a last call.
 136    has_unsaved_edits: Cell<(clock::Global, bool)>,
 137    change_bits: Vec<rc::Weak<Cell<bool>>>,
 138    _subscriptions: Vec<gpui::Subscription>,
 139    tree_sitter_data: Arc<TreeSitterData>,
 140    encoding: &'static Encoding,
 141    has_bom: bool,
 142    reload_with_encoding_txns: HashMap<TransactionId, (&'static Encoding, bool)>,
 143}
 144
 145#[derive(Debug)]
 146pub struct TreeSitterData {
 147    chunks: RowChunks,
 148    brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
 149}
 150
 151const MAX_ROWS_IN_A_CHUNK: u32 = 50;
 152
 153impl TreeSitterData {
 154    fn clear(&mut self, snapshot: text::BufferSnapshot) {
 155        self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
 156        self.brackets_by_chunks.get_mut().clear();
 157        self.brackets_by_chunks
 158            .get_mut()
 159            .resize(self.chunks.len(), None);
 160    }
 161
 162    fn new(snapshot: text::BufferSnapshot) -> Self {
 163        let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
 164        Self {
 165            brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
 166            chunks,
 167        }
 168    }
 169
 170    fn version(&self) -> &clock::Global {
 171        self.chunks.version()
 172    }
 173}
 174
 175#[derive(Copy, Clone, Debug, PartialEq, Eq)]
 176pub enum ParseStatus {
 177    Idle,
 178    Parsing,
 179}
 180
 181struct BufferBranchState {
 182    base_buffer: Entity<Buffer>,
 183    merged_operations: Vec<Lamport>,
 184}
 185
 186/// An immutable, cheaply cloneable representation of a fixed
 187/// state of a buffer.
 188pub struct BufferSnapshot {
 189    pub text: text::BufferSnapshot,
 190    pub syntax: SyntaxSnapshot,
 191    file: Option<Arc<dyn File>>,
 192    diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
 193    remote_selections: TreeMap<ReplicaId, SelectionSet>,
 194    language: Option<Arc<Language>>,
 195    non_text_state_update_count: usize,
 196    tree_sitter_data: Arc<TreeSitterData>,
 197    pub capability: Capability,
 198}
 199
 200/// The kind and amount of indentation in a particular line. For now,
 201/// assumes that indentation is all the same character.
 202#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
 203pub struct IndentSize {
 204    /// The number of bytes that comprise the indentation.
 205    pub len: u32,
 206    /// The kind of whitespace used for indentation.
 207    pub kind: IndentKind,
 208}
 209
 210/// A whitespace character that's used for indentation.
 211#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
 212pub enum IndentKind {
 213    /// An ASCII space character.
 214    #[default]
 215    Space,
 216    /// An ASCII tab character.
 217    Tab,
 218}
 219
 220/// The shape of a selection cursor.
 221#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
 222pub enum CursorShape {
 223    /// A vertical bar
 224    #[default]
 225    Bar,
 226    /// A block that surrounds the following character
 227    Block,
 228    /// An underline that runs along the following character
 229    Underline,
 230    /// A box drawn around the following character
 231    Hollow,
 232}
 233
 234impl From<settings::CursorShape> for CursorShape {
 235    fn from(shape: settings::CursorShape) -> Self {
 236        match shape {
 237            settings::CursorShape::Bar => CursorShape::Bar,
 238            settings::CursorShape::Block => CursorShape::Block,
 239            settings::CursorShape::Underline => CursorShape::Underline,
 240            settings::CursorShape::Hollow => CursorShape::Hollow,
 241        }
 242    }
 243}
 244
 245#[derive(Clone, Debug)]
 246struct SelectionSet {
 247    line_mode: bool,
 248    cursor_shape: CursorShape,
 249    selections: Arc<[Selection<Anchor>]>,
 250    lamport_timestamp: clock::Lamport,
 251}
 252
 253/// A diagnostic associated with a certain range of a buffer.
 254#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
 255pub struct Diagnostic {
 256    /// The name of the service that produced this diagnostic.
 257    pub source: Option<String>,
 258    /// The ID provided by the dynamic registration that produced this diagnostic.
 259    pub registration_id: Option<SharedString>,
 260    /// A machine-readable code that identifies this diagnostic.
 261    pub code: Option<NumberOrString>,
 262    pub code_description: Option<lsp::Uri>,
 263    /// Whether this diagnostic is a hint, warning, or error.
 264    pub severity: DiagnosticSeverity,
 265    /// The human-readable message associated with this diagnostic.
 266    pub message: String,
 267    /// The human-readable message (in markdown format)
 268    pub markdown: Option<String>,
 269    /// An id that identifies the group to which this diagnostic belongs.
 270    ///
 271    /// When a language server produces a diagnostic with
 272    /// one or more associated diagnostics, those diagnostics are all
 273    /// assigned a single group ID.
 274    pub group_id: usize,
 275    /// Whether this diagnostic is the primary diagnostic for its group.
 276    ///
 277    /// In a given group, the primary diagnostic is the top-level diagnostic
 278    /// returned by the language server. The non-primary diagnostics are the
 279    /// associated diagnostics.
 280    pub is_primary: bool,
 281    /// Whether this diagnostic is considered to originate from an analysis of
 282    /// files on disk, as opposed to any unsaved buffer contents. This is a
 283    /// property of a given diagnostic source, and is configured for a given
 284    /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
 285    /// for the language server.
 286    pub is_disk_based: bool,
 287    /// Whether this diagnostic marks unnecessary code.
 288    pub is_unnecessary: bool,
 289    /// Quick separation of diagnostics groups based by their source.
 290    pub source_kind: DiagnosticSourceKind,
 291    /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
 292    pub data: Option<Value>,
 293    /// Whether to underline the corresponding text range in the editor.
 294    pub underline: bool,
 295}
 296
 297#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
 298pub enum DiagnosticSourceKind {
 299    Pulled,
 300    Pushed,
 301    Other,
 302}
 303
 304/// An operation used to synchronize this buffer with its other replicas.
 305#[derive(Clone, Debug, PartialEq)]
 306pub enum Operation {
 307    /// A text operation.
 308    Buffer(text::Operation),
 309
 310    /// An update to the buffer's diagnostics.
 311    UpdateDiagnostics {
 312        /// The id of the language server that produced the new diagnostics.
 313        server_id: LanguageServerId,
 314        /// The diagnostics.
 315        diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
 316        /// The buffer's lamport timestamp.
 317        lamport_timestamp: clock::Lamport,
 318    },
 319
 320    /// An update to the most recent selections in this buffer.
 321    UpdateSelections {
 322        /// The selections.
 323        selections: Arc<[Selection<Anchor>]>,
 324        /// The buffer's lamport timestamp.
 325        lamport_timestamp: clock::Lamport,
 326        /// Whether the selections are in 'line mode'.
 327        line_mode: bool,
 328        /// The [`CursorShape`] associated with these selections.
 329        cursor_shape: CursorShape,
 330    },
 331
 332    /// An update to the characters that should trigger autocompletion
 333    /// for this buffer.
 334    UpdateCompletionTriggers {
 335        /// The characters that trigger autocompletion.
 336        triggers: Vec<String>,
 337        /// The buffer's lamport timestamp.
 338        lamport_timestamp: clock::Lamport,
 339        /// The language server ID.
 340        server_id: LanguageServerId,
 341    },
 342
 343    /// An update to the line ending type of this buffer.
 344    UpdateLineEnding {
 345        /// The line ending type.
 346        line_ending: LineEnding,
 347        /// The buffer's lamport timestamp.
 348        lamport_timestamp: clock::Lamport,
 349    },
 350}
 351
 352/// An event that occurs in a buffer.
 353#[derive(Clone, Debug, PartialEq)]
 354pub enum BufferEvent {
 355    /// The buffer was changed in a way that must be
 356    /// propagated to its other replicas.
 357    Operation {
 358        operation: Operation,
 359        is_local: bool,
 360    },
 361    /// The buffer was edited.
 362    Edited,
 363    /// The buffer's `dirty` bit changed.
 364    DirtyChanged,
 365    /// The buffer was saved.
 366    Saved,
 367    /// The buffer's file was changed on disk.
 368    FileHandleChanged,
 369    /// The buffer was reloaded.
 370    Reloaded,
 371    /// The buffer is in need of a reload
 372    ReloadNeeded,
 373    /// The buffer's language was changed.
 374    /// The boolean indicates whether this buffer did not have a language before, but does now.
 375    LanguageChanged(bool),
 376    /// The buffer's syntax trees were updated.
 377    Reparsed,
 378    /// The buffer's diagnostics were updated.
 379    DiagnosticsUpdated,
 380    /// The buffer gained or lost editing capabilities.
 381    CapabilityChanged,
 382}
 383
 384/// The file associated with a buffer.
 385pub trait File: Send + Sync + Any {
 386    /// Returns the [`LocalFile`] associated with this file, if the
 387    /// file is local.
 388    fn as_local(&self) -> Option<&dyn LocalFile>;
 389
 390    /// Returns whether this file is local.
 391    fn is_local(&self) -> bool {
 392        self.as_local().is_some()
 393    }
 394
 395    /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
 396    /// only available in some states, such as modification time.
 397    fn disk_state(&self) -> DiskState;
 398
 399    /// Returns the path of this file relative to the worktree's root directory.
 400    fn path(&self) -> &Arc<RelPath>;
 401
 402    /// Returns the path of this file relative to the worktree's parent directory (this means it
 403    /// includes the name of the worktree's root folder).
 404    fn full_path(&self, cx: &App) -> PathBuf;
 405
 406    /// Returns the path style of this file.
 407    fn path_style(&self, cx: &App) -> PathStyle;
 408
 409    /// Returns the last component of this handle's absolute path. If this handle refers to the root
 410    /// of its worktree, then this method will return the name of the worktree itself.
 411    fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
 412
 413    /// Returns the id of the worktree to which this file belongs.
 414    ///
 415    /// This is needed for looking up project-specific settings.
 416    fn worktree_id(&self, cx: &App) -> WorktreeId;
 417
 418    /// Converts this file into a protobuf message.
 419    fn to_proto(&self, cx: &App) -> rpc::proto::File;
 420
 421    /// Return whether Zed considers this to be a private file.
 422    fn is_private(&self) -> bool;
 423
 424    fn can_open(&self) -> bool {
 425        !self.is_local()
 426    }
 427}
 428
 429/// The file's storage status - whether it's stored (`Present`), and if so when it was last
 430/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
 431/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
 432/// indicator for new files.
 433#[derive(Copy, Clone, Debug, PartialEq)]
 434pub enum DiskState {
 435    /// File created in Zed that has not been saved.
 436    New,
 437    /// File present on the filesystem.
 438    Present { mtime: MTime },
 439    /// Deleted file that was previously present.
 440    Deleted,
 441    /// An old version of a file that was previously present
 442    /// usually from a version control system. e.g. A git blob
 443    Historic { was_deleted: bool },
 444}
 445
 446impl DiskState {
 447    /// Returns the file's last known modification time on disk.
 448    pub fn mtime(self) -> Option<MTime> {
 449        match self {
 450            DiskState::New => None,
 451            DiskState::Present { mtime } => Some(mtime),
 452            DiskState::Deleted => None,
 453            DiskState::Historic { .. } => None,
 454        }
 455    }
 456
 457    pub fn exists(&self) -> bool {
 458        match self {
 459            DiskState::New => false,
 460            DiskState::Present { .. } => true,
 461            DiskState::Deleted => false,
 462            DiskState::Historic { .. } => false,
 463        }
 464    }
 465
 466    /// Returns true if this state represents a deleted file.
 467    pub fn is_deleted(&self) -> bool {
 468        match self {
 469            DiskState::Deleted => true,
 470            DiskState::Historic { was_deleted } => *was_deleted,
 471            _ => false,
 472        }
 473    }
 474}
 475
 476/// The file associated with a buffer, in the case where the file is on the local disk.
 477pub trait LocalFile: File {
 478    /// Returns the absolute path of this file
 479    fn abs_path(&self, cx: &App) -> PathBuf;
 480
 481    /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
 482    fn load(&self, cx: &App) -> Task<Result<String>>;
 483
 484    /// Loads the file's contents from disk.
 485    fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
 486}
 487
 488/// The auto-indent behavior associated with an editing operation.
 489/// For some editing operations, each affected line of text has its
 490/// indentation recomputed. For other operations, the entire block
 491/// of edited text is adjusted uniformly.
 492#[derive(Clone, Debug)]
 493pub enum AutoindentMode {
 494    /// Indent each line of inserted text.
 495    EachLine,
 496    /// Apply the same indentation adjustment to all of the lines
 497    /// in a given insertion.
 498    Block {
 499        /// The original indentation column of the first line of each
 500        /// insertion, if it has been copied.
 501        ///
 502        /// Knowing this makes it possible to preserve the relative indentation
 503        /// of every line in the insertion from when it was copied.
 504        ///
 505        /// If the original indent column is `a`, and the first line of insertion
 506        /// is then auto-indented to column `b`, then every other line of
 507        /// the insertion will be auto-indented to column `b - a`
 508        original_indent_columns: Vec<Option<u32>>,
 509    },
 510}
 511
 512#[derive(Clone)]
 513struct AutoindentRequest {
 514    before_edit: BufferSnapshot,
 515    entries: Vec<AutoindentRequestEntry>,
 516    is_block_mode: bool,
 517    ignore_empty_lines: bool,
 518}
 519
 520#[derive(Debug, Clone)]
 521struct AutoindentRequestEntry {
 522    /// A range of the buffer whose indentation should be adjusted.
 523    range: Range<Anchor>,
 524    /// The row of the edit start in the buffer before the edit was applied.
 525    /// This is stored here because the anchor in range is created after
 526    /// the edit, so it cannot be used with the before_edit snapshot.
 527    old_row: Option<u32>,
 528    indent_size: IndentSize,
 529    original_indent_column: Option<u32>,
 530}
 531
 532#[derive(Debug)]
 533struct IndentSuggestion {
 534    basis_row: u32,
 535    delta: Ordering,
 536    within_error: bool,
 537}
 538
 539struct BufferChunkHighlights<'a> {
 540    captures: SyntaxMapCaptures<'a>,
 541    next_capture: Option<SyntaxMapCapture<'a>>,
 542    stack: Vec<(usize, HighlightId)>,
 543    highlight_maps: Vec<HighlightMap>,
 544}
 545
 546/// An iterator that yields chunks of a buffer's text, along with their
 547/// syntax highlights and diagnostic status.
 548pub struct BufferChunks<'a> {
 549    buffer_snapshot: Option<&'a BufferSnapshot>,
 550    range: Range<usize>,
 551    chunks: text::Chunks<'a>,
 552    diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
 553    error_depth: usize,
 554    warning_depth: usize,
 555    information_depth: usize,
 556    hint_depth: usize,
 557    unnecessary_depth: usize,
 558    underline: bool,
 559    highlights: Option<BufferChunkHighlights<'a>>,
 560}
 561
 562/// A chunk of a buffer's text, along with its syntax highlight and
 563/// diagnostic status.
 564#[derive(Clone, Debug, Default)]
 565pub struct Chunk<'a> {
 566    /// The text of the chunk.
 567    pub text: &'a str,
 568    /// The syntax highlighting style of the chunk.
 569    pub syntax_highlight_id: Option<HighlightId>,
 570    /// The highlight style that has been applied to this chunk in
 571    /// the editor.
 572    pub highlight_style: Option<HighlightStyle>,
 573    /// The severity of diagnostic associated with this chunk, if any.
 574    pub diagnostic_severity: Option<DiagnosticSeverity>,
 575    /// A bitset of which characters are tabs in this string.
 576    pub tabs: u128,
 577    /// Bitmap of character indices in this chunk
 578    pub chars: u128,
 579    /// Whether this chunk of text is marked as unnecessary.
 580    pub is_unnecessary: bool,
 581    /// Whether this chunk of text was originally a tab character.
 582    pub is_tab: bool,
 583    /// Whether this chunk of text was originally an inlay.
 584    pub is_inlay: bool,
 585    /// Whether to underline the corresponding text range in the editor.
 586    pub underline: bool,
 587}
 588
 589/// A set of edits to a given version of a buffer, computed asynchronously.
 590#[derive(Debug, Clone)]
 591pub struct Diff {
 592    pub base_version: clock::Global,
 593    pub line_ending: LineEnding,
 594    pub edits: Vec<(Range<usize>, Arc<str>)>,
 595}
 596
 597#[derive(Debug, Clone, Copy)]
 598pub(crate) struct DiagnosticEndpoint {
 599    offset: usize,
 600    is_start: bool,
 601    underline: bool,
 602    severity: DiagnosticSeverity,
 603    is_unnecessary: bool,
 604}
 605
 606/// A class of characters, used for characterizing a run of text.
 607#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
 608pub enum CharKind {
 609    /// Whitespace.
 610    Whitespace,
 611    /// Punctuation.
 612    Punctuation,
 613    /// Word.
 614    Word,
 615}
 616
 617/// Context for character classification within a specific scope.
 618#[derive(Copy, Clone, Eq, PartialEq, Debug)]
 619pub enum CharScopeContext {
 620    /// Character classification for completion queries.
 621    ///
 622    /// This context treats certain characters as word constituents that would
 623    /// normally be considered punctuation, such as '-' in Tailwind classes
 624    /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
 625    Completion,
 626    /// Character classification for linked edits.
 627    ///
 628    /// This context handles characters that should be treated as part of
 629    /// identifiers during linked editing operations, such as '.' in JSX
 630    /// component names like `<Animated.View>`.
 631    LinkedEdit,
 632}
 633
 634/// A runnable is a set of data about a region that could be resolved into a task
 635pub struct Runnable {
 636    pub tags: SmallVec<[RunnableTag; 1]>,
 637    pub language: Arc<Language>,
 638    pub buffer: BufferId,
 639}
 640
 641#[derive(Default, Clone, Debug)]
 642pub struct HighlightedText {
 643    pub text: SharedString,
 644    pub highlights: Vec<(Range<usize>, HighlightStyle)>,
 645}
 646
 647#[derive(Default, Debug)]
 648struct HighlightedTextBuilder {
 649    pub text: String,
 650    highlights: Vec<(Range<usize>, HighlightStyle)>,
 651}
 652
 653impl HighlightedText {
 654    pub fn from_buffer_range<T: ToOffset>(
 655        range: Range<T>,
 656        snapshot: &text::BufferSnapshot,
 657        syntax_snapshot: &SyntaxSnapshot,
 658        override_style: Option<HighlightStyle>,
 659        syntax_theme: &SyntaxTheme,
 660    ) -> Self {
 661        let mut highlighted_text = HighlightedTextBuilder::default();
 662        highlighted_text.add_text_from_buffer_range(
 663            range,
 664            snapshot,
 665            syntax_snapshot,
 666            override_style,
 667            syntax_theme,
 668        );
 669        highlighted_text.build()
 670    }
 671
 672    pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
 673        gpui::StyledText::new(self.text.clone())
 674            .with_default_highlights(default_style, self.highlights.iter().cloned())
 675    }
 676
 677    /// Returns the first line without leading whitespace unless highlighted
 678    /// and a boolean indicating if there are more lines after
 679    pub fn first_line_preview(self) -> (Self, bool) {
 680        let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
 681        let first_line = &self.text[..newline_ix];
 682
 683        // Trim leading whitespace, unless an edit starts prior to it.
 684        let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
 685        if let Some((first_highlight_range, _)) = self.highlights.first() {
 686            preview_start_ix = preview_start_ix.min(first_highlight_range.start);
 687        }
 688
 689        let preview_text = &first_line[preview_start_ix..];
 690        let preview_highlights = self
 691            .highlights
 692            .into_iter()
 693            .skip_while(|(range, _)| range.end <= preview_start_ix)
 694            .take_while(|(range, _)| range.start < newline_ix)
 695            .filter_map(|(mut range, highlight)| {
 696                range.start = range.start.saturating_sub(preview_start_ix);
 697                range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
 698                if range.is_empty() {
 699                    None
 700                } else {
 701                    Some((range, highlight))
 702                }
 703            });
 704
 705        let preview = Self {
 706            text: SharedString::new(preview_text),
 707            highlights: preview_highlights.collect(),
 708        };
 709
 710        (preview, self.text.len() > newline_ix)
 711    }
 712}
 713
 714impl HighlightedTextBuilder {
 715    pub fn build(self) -> HighlightedText {
 716        HighlightedText {
 717            text: self.text.into(),
 718            highlights: self.highlights,
 719        }
 720    }
 721
 722    pub fn add_text_from_buffer_range<T: ToOffset>(
 723        &mut self,
 724        range: Range<T>,
 725        snapshot: &text::BufferSnapshot,
 726        syntax_snapshot: &SyntaxSnapshot,
 727        override_style: Option<HighlightStyle>,
 728        syntax_theme: &SyntaxTheme,
 729    ) {
 730        let range = range.to_offset(snapshot);
 731        for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
 732            let start = self.text.len();
 733            self.text.push_str(chunk.text);
 734            let end = self.text.len();
 735
 736            if let Some(highlight_style) = chunk
 737                .syntax_highlight_id
 738                .and_then(|id| id.style(syntax_theme))
 739            {
 740                let highlight_style = override_style.map_or(highlight_style, |override_style| {
 741                    highlight_style.highlight(override_style)
 742                });
 743                self.highlights.push((start..end, highlight_style));
 744            } else if let Some(override_style) = override_style {
 745                self.highlights.push((start..end, override_style));
 746            }
 747        }
 748    }
 749
 750    fn highlighted_chunks<'a>(
 751        range: Range<usize>,
 752        snapshot: &'a text::BufferSnapshot,
 753        syntax_snapshot: &'a SyntaxSnapshot,
 754    ) -> BufferChunks<'a> {
 755        let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
 756            grammar
 757                .highlights_config
 758                .as_ref()
 759                .map(|config| &config.query)
 760        });
 761
 762        let highlight_maps = captures
 763            .grammars()
 764            .iter()
 765            .map(|grammar| grammar.highlight_map())
 766            .collect();
 767
 768        BufferChunks::new(
 769            snapshot.as_rope(),
 770            range,
 771            Some((captures, highlight_maps)),
 772            false,
 773            None,
 774        )
 775    }
 776}
 777
 778#[derive(Clone)]
 779pub struct EditPreview {
 780    old_snapshot: text::BufferSnapshot,
 781    applied_edits_snapshot: text::BufferSnapshot,
 782    syntax_snapshot: SyntaxSnapshot,
 783}
 784
 785impl EditPreview {
 786    pub fn as_unified_diff(
 787        &self,
 788        file: Option<&Arc<dyn File>>,
 789        edits: &[(Range<Anchor>, impl AsRef<str>)],
 790    ) -> Option<String> {
 791        let (first, _) = edits.first()?;
 792        let (last, _) = edits.last()?;
 793
 794        let start = first.start.to_point(&self.old_snapshot);
 795        let old_end = last.end.to_point(&self.old_snapshot);
 796        let new_end = last
 797            .end
 798            .bias_right(&self.old_snapshot)
 799            .to_point(&self.applied_edits_snapshot);
 800
 801        let start = Point::new(start.row.saturating_sub(3), 0);
 802        let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
 803        let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
 804
 805        let diff_body = unified_diff_with_offsets(
 806            &self
 807                .old_snapshot
 808                .text_for_range(start..old_end)
 809                .collect::<String>(),
 810            &self
 811                .applied_edits_snapshot
 812                .text_for_range(start..new_end)
 813                .collect::<String>(),
 814            start.row,
 815            start.row,
 816        );
 817
 818        let path = file.map(|f| f.path().as_unix_str());
 819        let header = match path {
 820            Some(p) => format!("--- a/{}\n+++ b/{}\n", p, p),
 821            None => String::new(),
 822        };
 823
 824        Some(format!("{}{}", header, diff_body))
 825    }
 826
 827    pub fn highlight_edits(
 828        &self,
 829        current_snapshot: &BufferSnapshot,
 830        edits: &[(Range<Anchor>, impl AsRef<str>)],
 831        include_deletions: bool,
 832        cx: &App,
 833    ) -> HighlightedText {
 834        let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
 835            return HighlightedText::default();
 836        };
 837
 838        let mut highlighted_text = HighlightedTextBuilder::default();
 839
 840        let visible_range_in_preview_snapshot =
 841            visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
 842        let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
 843
 844        let insertion_highlight_style = HighlightStyle {
 845            background_color: Some(cx.theme().status().created_background),
 846            ..Default::default()
 847        };
 848        let deletion_highlight_style = HighlightStyle {
 849            background_color: Some(cx.theme().status().deleted_background),
 850            ..Default::default()
 851        };
 852        let syntax_theme = cx.theme().syntax();
 853
 854        for (range, edit_text) in edits {
 855            let edit_new_end_in_preview_snapshot = range
 856                .end
 857                .bias_right(&self.old_snapshot)
 858                .to_offset(&self.applied_edits_snapshot);
 859            let edit_start_in_preview_snapshot =
 860                edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
 861
 862            let unchanged_range_in_preview_snapshot =
 863                offset_in_preview_snapshot..edit_start_in_preview_snapshot;
 864            if !unchanged_range_in_preview_snapshot.is_empty() {
 865                highlighted_text.add_text_from_buffer_range(
 866                    unchanged_range_in_preview_snapshot,
 867                    &self.applied_edits_snapshot,
 868                    &self.syntax_snapshot,
 869                    None,
 870                    syntax_theme,
 871                );
 872            }
 873
 874            let range_in_current_snapshot = range.to_offset(current_snapshot);
 875            if include_deletions && !range_in_current_snapshot.is_empty() {
 876                highlighted_text.add_text_from_buffer_range(
 877                    range_in_current_snapshot,
 878                    &current_snapshot.text,
 879                    &current_snapshot.syntax,
 880                    Some(deletion_highlight_style),
 881                    syntax_theme,
 882                );
 883            }
 884
 885            if !edit_text.as_ref().is_empty() {
 886                highlighted_text.add_text_from_buffer_range(
 887                    edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
 888                    &self.applied_edits_snapshot,
 889                    &self.syntax_snapshot,
 890                    Some(insertion_highlight_style),
 891                    syntax_theme,
 892                );
 893            }
 894
 895            offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
 896        }
 897
 898        highlighted_text.add_text_from_buffer_range(
 899            offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
 900            &self.applied_edits_snapshot,
 901            &self.syntax_snapshot,
 902            None,
 903            syntax_theme,
 904        );
 905
 906        highlighted_text.build()
 907    }
 908
 909    pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
 910        cx.new(|cx| {
 911            let mut buffer = Buffer::local_normalized(
 912                self.applied_edits_snapshot.as_rope().clone(),
 913                self.applied_edits_snapshot.line_ending(),
 914                cx,
 915            );
 916            buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
 917            buffer
 918        })
 919    }
 920
 921    pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
 922        let (first, _) = edits.first()?;
 923        let (last, _) = edits.last()?;
 924
 925        let start = first
 926            .start
 927            .bias_left(&self.old_snapshot)
 928            .to_point(&self.applied_edits_snapshot);
 929        let end = last
 930            .end
 931            .bias_right(&self.old_snapshot)
 932            .to_point(&self.applied_edits_snapshot);
 933
 934        // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
 935        let range = Point::new(start.row, 0)
 936            ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
 937
 938        Some(range)
 939    }
 940}
 941
 942#[derive(Clone, Debug, PartialEq, Eq)]
 943pub struct BracketMatch<T> {
 944    pub open_range: Range<T>,
 945    pub close_range: Range<T>,
 946    pub newline_only: bool,
 947    pub syntax_layer_depth: usize,
 948    pub color_index: Option<usize>,
 949}
 950
 951impl<T> BracketMatch<T> {
 952    pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
 953        (self.open_range, self.close_range)
 954    }
 955}
 956
 957impl Buffer {
 958    /// Create a new buffer with the given base text.
 959    pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
 960        Self::build(
 961            TextBuffer::new(
 962                ReplicaId::LOCAL,
 963                cx.entity_id().as_non_zero_u64().into(),
 964                base_text.into(),
 965            ),
 966            None,
 967            Capability::ReadWrite,
 968        )
 969    }
 970
 971    /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
 972    pub fn local_normalized(
 973        base_text_normalized: Rope,
 974        line_ending: LineEnding,
 975        cx: &Context<Self>,
 976    ) -> Self {
 977        Self::build(
 978            TextBuffer::new_normalized(
 979                ReplicaId::LOCAL,
 980                cx.entity_id().as_non_zero_u64().into(),
 981                line_ending,
 982                base_text_normalized,
 983            ),
 984            None,
 985            Capability::ReadWrite,
 986        )
 987    }
 988
 989    /// Create a new buffer that is a replica of a remote buffer.
 990    pub fn remote(
 991        remote_id: BufferId,
 992        replica_id: ReplicaId,
 993        capability: Capability,
 994        base_text: impl Into<String>,
 995    ) -> Self {
 996        Self::build(
 997            TextBuffer::new(replica_id, remote_id, base_text.into()),
 998            None,
 999            capability,
1000        )
1001    }
1002
1003    /// Create a new buffer that is a replica of a remote buffer, populating its
1004    /// state from the given protobuf message.
1005    pub fn from_proto(
1006        replica_id: ReplicaId,
1007        capability: Capability,
1008        message: proto::BufferState,
1009        file: Option<Arc<dyn File>>,
1010    ) -> Result<Self> {
1011        let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
1012        let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
1013        let mut this = Self::build(buffer, file, capability);
1014        this.text.set_line_ending(proto::deserialize_line_ending(
1015            rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
1016        ));
1017        this.saved_version = proto::deserialize_version(&message.saved_version);
1018        this.saved_mtime = message.saved_mtime.map(|time| time.into());
1019        Ok(this)
1020    }
1021
1022    /// Serialize the buffer's state to a protobuf message.
1023    pub fn to_proto(&self, cx: &App) -> proto::BufferState {
1024        proto::BufferState {
1025            id: self.remote_id().into(),
1026            file: self.file.as_ref().map(|f| f.to_proto(cx)),
1027            base_text: self.base_text().to_string(),
1028            line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
1029            saved_version: proto::serialize_version(&self.saved_version),
1030            saved_mtime: self.saved_mtime.map(|time| time.into()),
1031        }
1032    }
1033
1034    /// Serialize as protobufs all of the changes to the buffer since the given version.
1035    pub fn serialize_ops(
1036        &self,
1037        since: Option<clock::Global>,
1038        cx: &App,
1039    ) -> Task<Vec<proto::Operation>> {
1040        let mut operations = Vec::new();
1041        operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1042
1043        operations.extend(self.remote_selections.iter().map(|(_, set)| {
1044            proto::serialize_operation(&Operation::UpdateSelections {
1045                selections: set.selections.clone(),
1046                lamport_timestamp: set.lamport_timestamp,
1047                line_mode: set.line_mode,
1048                cursor_shape: set.cursor_shape,
1049            })
1050        }));
1051
1052        for (server_id, diagnostics) in &self.diagnostics {
1053            operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1054                lamport_timestamp: self.diagnostics_timestamp,
1055                server_id: *server_id,
1056                diagnostics: diagnostics.iter().cloned().collect(),
1057            }));
1058        }
1059
1060        for (server_id, completions) in &self.completion_triggers_per_language_server {
1061            operations.push(proto::serialize_operation(
1062                &Operation::UpdateCompletionTriggers {
1063                    triggers: completions.iter().cloned().collect(),
1064                    lamport_timestamp: self.completion_triggers_timestamp,
1065                    server_id: *server_id,
1066                },
1067            ));
1068        }
1069
1070        let text_operations = self.text.operations().clone();
1071        cx.background_spawn(async move {
1072            let since = since.unwrap_or_default();
1073            operations.extend(
1074                text_operations
1075                    .iter()
1076                    .filter(|(_, op)| !since.observed(op.timestamp()))
1077                    .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1078            );
1079            operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1080            operations
1081        })
1082    }
1083
1084    /// Assign a language to the buffer, returning the buffer.
1085    pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1086        self.set_language_async(Some(language), cx);
1087        self
1088    }
1089
1090    /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1091    #[ztracing::instrument(skip_all, fields(lang = language.config.name.0.as_str()))]
1092    pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1093        self.set_language(Some(language), cx);
1094        self
1095    }
1096
1097    /// Returns the [`Capability`] of this buffer.
1098    pub fn capability(&self) -> Capability {
1099        self.capability
1100    }
1101
1102    /// Whether this buffer can only be read.
1103    pub fn read_only(&self) -> bool {
1104        !self.capability.editable()
1105    }
1106
1107    /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1108    pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1109        let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1110        let snapshot = buffer.snapshot();
1111        let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1112        let tree_sitter_data = TreeSitterData::new(snapshot);
1113        Self {
1114            saved_mtime,
1115            tree_sitter_data: Arc::new(tree_sitter_data),
1116            saved_version: buffer.version(),
1117            preview_version: buffer.version(),
1118            reload_task: None,
1119            transaction_depth: 0,
1120            was_dirty_before_starting_transaction: None,
1121            has_unsaved_edits: Cell::new((buffer.version(), false)),
1122            text: buffer,
1123            branch_state: None,
1124            file,
1125            capability,
1126            syntax_map,
1127            reparse: None,
1128            non_text_state_update_count: 0,
1129            sync_parse_timeout: if cfg!(any(test, feature = "test-support")) {
1130                Some(Duration::from_millis(10))
1131            } else {
1132                Some(Duration::from_millis(1))
1133            },
1134            parse_status: watch::channel(ParseStatus::Idle),
1135            autoindent_requests: Default::default(),
1136            wait_for_autoindent_txs: Default::default(),
1137            pending_autoindent: Default::default(),
1138            language: None,
1139            remote_selections: Default::default(),
1140            diagnostics: Default::default(),
1141            diagnostics_timestamp: Lamport::MIN,
1142            completion_triggers: Default::default(),
1143            completion_triggers_per_language_server: Default::default(),
1144            completion_triggers_timestamp: Lamport::MIN,
1145            deferred_ops: OperationQueue::new(),
1146            has_conflict: false,
1147            change_bits: Default::default(),
1148            _subscriptions: Vec::new(),
1149            encoding: encoding_rs::UTF_8,
1150            has_bom: false,
1151            reload_with_encoding_txns: HashMap::default(),
1152        }
1153    }
1154
1155    pub fn build_snapshot(
1156        text: Rope,
1157        language: Option<Arc<Language>>,
1158        language_registry: Option<Arc<LanguageRegistry>>,
1159        cx: &mut App,
1160    ) -> impl Future<Output = BufferSnapshot> + use<> {
1161        let entity_id = cx.reserve_entity::<Self>().entity_id();
1162        let buffer_id = entity_id.as_non_zero_u64().into();
1163        async move {
1164            let text =
1165                TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1166                    .snapshot();
1167            let mut syntax = SyntaxMap::new(&text).snapshot();
1168            if let Some(language) = language.clone() {
1169                let language_registry = language_registry.clone();
1170                syntax.reparse(&text, language_registry, language);
1171            }
1172            let tree_sitter_data = TreeSitterData::new(text.clone());
1173            BufferSnapshot {
1174                text,
1175                syntax,
1176                file: None,
1177                diagnostics: Default::default(),
1178                remote_selections: Default::default(),
1179                tree_sitter_data: Arc::new(tree_sitter_data),
1180                language,
1181                non_text_state_update_count: 0,
1182                capability: Capability::ReadOnly,
1183            }
1184        }
1185    }
1186
1187    pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1188        let entity_id = cx.reserve_entity::<Self>().entity_id();
1189        let buffer_id = entity_id.as_non_zero_u64().into();
1190        let text = TextBuffer::new_normalized(
1191            ReplicaId::LOCAL,
1192            buffer_id,
1193            Default::default(),
1194            Rope::new(),
1195        )
1196        .snapshot();
1197        let syntax = SyntaxMap::new(&text).snapshot();
1198        let tree_sitter_data = TreeSitterData::new(text.clone());
1199        BufferSnapshot {
1200            text,
1201            syntax,
1202            tree_sitter_data: Arc::new(tree_sitter_data),
1203            file: None,
1204            diagnostics: Default::default(),
1205            remote_selections: Default::default(),
1206            language: None,
1207            non_text_state_update_count: 0,
1208            capability: Capability::ReadOnly,
1209        }
1210    }
1211
1212    #[cfg(any(test, feature = "test-support"))]
1213    pub fn build_snapshot_sync(
1214        text: Rope,
1215        language: Option<Arc<Language>>,
1216        language_registry: Option<Arc<LanguageRegistry>>,
1217        cx: &mut App,
1218    ) -> BufferSnapshot {
1219        let entity_id = cx.reserve_entity::<Self>().entity_id();
1220        let buffer_id = entity_id.as_non_zero_u64().into();
1221        let text =
1222            TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1223                .snapshot();
1224        let mut syntax = SyntaxMap::new(&text).snapshot();
1225        if let Some(language) = language.clone() {
1226            syntax.reparse(&text, language_registry, language);
1227        }
1228        let tree_sitter_data = TreeSitterData::new(text.clone());
1229        BufferSnapshot {
1230            text,
1231            syntax,
1232            tree_sitter_data: Arc::new(tree_sitter_data),
1233            file: None,
1234            diagnostics: Default::default(),
1235            remote_selections: Default::default(),
1236            language,
1237            non_text_state_update_count: 0,
1238            capability: Capability::ReadOnly,
1239        }
1240    }
1241
1242    /// Retrieve a snapshot of the buffer's current state. This is computationally
1243    /// cheap, and allows reading from the buffer on a background thread.
1244    pub fn snapshot(&self) -> BufferSnapshot {
1245        let text = self.text.snapshot();
1246        let mut syntax_map = self.syntax_map.lock();
1247        syntax_map.interpolate(&text);
1248        let syntax = syntax_map.snapshot();
1249
1250        let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1251            Arc::new(TreeSitterData::new(text.clone()))
1252        } else {
1253            self.tree_sitter_data.clone()
1254        };
1255
1256        BufferSnapshot {
1257            text,
1258            syntax,
1259            tree_sitter_data,
1260            file: self.file.clone(),
1261            remote_selections: self.remote_selections.clone(),
1262            diagnostics: self.diagnostics.clone(),
1263            language: self.language.clone(),
1264            non_text_state_update_count: self.non_text_state_update_count,
1265            capability: self.capability,
1266        }
1267    }
1268
1269    pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1270        let this = cx.entity();
1271        cx.new(|cx| {
1272            let mut branch = Self {
1273                branch_state: Some(BufferBranchState {
1274                    base_buffer: this.clone(),
1275                    merged_operations: Default::default(),
1276                }),
1277                language: self.language.clone(),
1278                has_conflict: self.has_conflict,
1279                has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1280                _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1281                ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1282            };
1283            if let Some(language_registry) = self.language_registry() {
1284                branch.set_language_registry(language_registry);
1285            }
1286
1287            // Reparse the branch buffer so that we get syntax highlighting immediately.
1288            branch.reparse(cx, true);
1289
1290            branch
1291        })
1292    }
1293
1294    pub fn preview_edits(
1295        &self,
1296        edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1297        cx: &App,
1298    ) -> Task<EditPreview> {
1299        let registry = self.language_registry();
1300        let language = self.language().cloned();
1301        let old_snapshot = self.text.snapshot();
1302        let mut branch_buffer = self.text.branch();
1303        let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1304        cx.background_spawn(async move {
1305            if !edits.is_empty() {
1306                if let Some(language) = language.clone() {
1307                    syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1308                }
1309
1310                branch_buffer.edit(edits.iter().cloned());
1311                let snapshot = branch_buffer.snapshot();
1312                syntax_snapshot.interpolate(&snapshot);
1313
1314                if let Some(language) = language {
1315                    syntax_snapshot.reparse(&snapshot, registry, language);
1316                }
1317            }
1318            EditPreview {
1319                old_snapshot,
1320                applied_edits_snapshot: branch_buffer.snapshot(),
1321                syntax_snapshot,
1322            }
1323        })
1324    }
1325
1326    /// Applies all of the changes in this buffer that intersect any of the
1327    /// given `ranges` to its base buffer.
1328    ///
1329    /// If `ranges` is empty, then all changes will be applied. This buffer must
1330    /// be a branch buffer to call this method.
1331    pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1332        let Some(base_buffer) = self.base_buffer() else {
1333            debug_panic!("not a branch buffer");
1334            return;
1335        };
1336
1337        let mut ranges = if ranges.is_empty() {
1338            &[0..usize::MAX]
1339        } else {
1340            ranges.as_slice()
1341        }
1342        .iter()
1343        .peekable();
1344
1345        let mut edits = Vec::new();
1346        for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1347            let mut is_included = false;
1348            while let Some(range) = ranges.peek() {
1349                if range.end < edit.new.start {
1350                    ranges.next().unwrap();
1351                } else {
1352                    if range.start <= edit.new.end {
1353                        is_included = true;
1354                    }
1355                    break;
1356                }
1357            }
1358
1359            if is_included {
1360                edits.push((
1361                    edit.old.clone(),
1362                    self.text_for_range(edit.new.clone()).collect::<String>(),
1363                ));
1364            }
1365        }
1366
1367        let operation = base_buffer.update(cx, |base_buffer, cx| {
1368            // cx.emit(BufferEvent::DiffBaseChanged);
1369            base_buffer.edit(edits, None, cx)
1370        });
1371
1372        if let Some(operation) = operation
1373            && let Some(BufferBranchState {
1374                merged_operations, ..
1375            }) = &mut self.branch_state
1376        {
1377            merged_operations.push(operation);
1378        }
1379    }
1380
1381    fn on_base_buffer_event(
1382        &mut self,
1383        _: Entity<Buffer>,
1384        event: &BufferEvent,
1385        cx: &mut Context<Self>,
1386    ) {
1387        let BufferEvent::Operation { operation, .. } = event else {
1388            return;
1389        };
1390        let Some(BufferBranchState {
1391            merged_operations, ..
1392        }) = &mut self.branch_state
1393        else {
1394            return;
1395        };
1396
1397        let mut operation_to_undo = None;
1398        if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1399            && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1400        {
1401            merged_operations.remove(ix);
1402            operation_to_undo = Some(operation.timestamp);
1403        }
1404
1405        self.apply_ops([operation.clone()], cx);
1406
1407        if let Some(timestamp) = operation_to_undo {
1408            let counts = [(timestamp, u32::MAX)].into_iter().collect();
1409            self.undo_operations(counts, cx);
1410        }
1411    }
1412
1413    #[cfg(test)]
1414    pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1415        &self.text
1416    }
1417
1418    /// Retrieve a snapshot of the buffer's raw text, without any
1419    /// language-related state like the syntax tree or diagnostics.
1420    #[ztracing::instrument(skip_all)]
1421    pub fn text_snapshot(&self) -> text::BufferSnapshot {
1422        self.text.snapshot()
1423    }
1424
1425    /// The file associated with the buffer, if any.
1426    pub fn file(&self) -> Option<&Arc<dyn File>> {
1427        self.file.as_ref()
1428    }
1429
1430    /// The version of the buffer that was last saved or reloaded from disk.
1431    pub fn saved_version(&self) -> &clock::Global {
1432        &self.saved_version
1433    }
1434
1435    /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1436    pub fn saved_mtime(&self) -> Option<MTime> {
1437        self.saved_mtime
1438    }
1439
1440    /// Returns the character encoding of the buffer's file.
1441    pub fn encoding(&self) -> &'static Encoding {
1442        self.encoding
1443    }
1444
1445    /// Sets the character encoding of the buffer.
1446    pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1447        self.encoding = encoding;
1448    }
1449
1450    /// Returns whether the buffer has a Byte Order Mark.
1451    pub fn has_bom(&self) -> bool {
1452        self.has_bom
1453    }
1454
1455    /// Sets whether the buffer has a Byte Order Mark.
1456    pub fn set_has_bom(&mut self, has_bom: bool) {
1457        self.has_bom = has_bom;
1458    }
1459
1460    /// Assign a language to the buffer.
1461    pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1462        self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1463    }
1464
1465    /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1466    pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1467        self.set_language_(language, true, cx);
1468    }
1469
1470    #[ztracing::instrument(skip_all)]
1471    fn set_language_(
1472        &mut self,
1473        language: Option<Arc<Language>>,
1474        may_block: bool,
1475        cx: &mut Context<Self>,
1476    ) {
1477        self.non_text_state_update_count += 1;
1478        self.syntax_map.lock().clear(&self.text);
1479        let old_language = std::mem::replace(&mut self.language, language);
1480        self.was_changed();
1481        self.reparse(cx, may_block);
1482        let has_fresh_language =
1483            self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1484        cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1485    }
1486
1487    /// Assign a language registry to the buffer. This allows the buffer to retrieve
1488    /// other languages if parts of the buffer are written in different languages.
1489    pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1490        self.syntax_map
1491            .lock()
1492            .set_language_registry(language_registry);
1493    }
1494
1495    pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1496        self.syntax_map.lock().language_registry()
1497    }
1498
1499    /// Assign the line ending type to the buffer.
1500    pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1501        self.text.set_line_ending(line_ending);
1502
1503        let lamport_timestamp = self.text.lamport_clock.tick();
1504        self.send_operation(
1505            Operation::UpdateLineEnding {
1506                line_ending,
1507                lamport_timestamp,
1508            },
1509            true,
1510            cx,
1511        );
1512    }
1513
1514    /// Assign the buffer a new [`Capability`].
1515    pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1516        if self.capability != capability {
1517            self.capability = capability;
1518            cx.emit(BufferEvent::CapabilityChanged)
1519        }
1520    }
1521
1522    /// This method is called to signal that the buffer has been saved.
1523    pub fn did_save(
1524        &mut self,
1525        version: clock::Global,
1526        mtime: Option<MTime>,
1527        cx: &mut Context<Self>,
1528    ) {
1529        self.saved_version = version.clone();
1530        self.has_unsaved_edits.set((version, false));
1531        self.has_conflict = false;
1532        self.saved_mtime = mtime;
1533        self.was_changed();
1534        cx.emit(BufferEvent::Saved);
1535        cx.notify();
1536    }
1537
1538    /// Reloads the contents of the buffer from disk.
1539    pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1540        self.reload_impl(None, cx)
1541    }
1542
1543    /// Reloads the contents of the buffer from disk using the specified encoding.
1544    ///
1545    /// This bypasses automatic encoding detection heuristics (like BOM checks) for non-Unicode encodings,
1546    /// allowing users to force a specific interpretation of the bytes.
1547    pub fn reload_with_encoding(
1548        &mut self,
1549        encoding: &'static Encoding,
1550        cx: &Context<Self>,
1551    ) -> oneshot::Receiver<Option<Transaction>> {
1552        self.reload_impl(Some(encoding), cx)
1553    }
1554
1555    fn reload_impl(
1556        &mut self,
1557        force_encoding: Option<&'static Encoding>,
1558        cx: &Context<Self>,
1559    ) -> oneshot::Receiver<Option<Transaction>> {
1560        let (tx, rx) = futures::channel::oneshot::channel();
1561        let prev_version = self.text.version();
1562
1563        self.reload_task = Some(cx.spawn(async move |this, cx| {
1564            let Some((new_mtime, load_bytes_task, current_encoding)) =
1565                this.update(cx, |this, cx| {
1566                    let file = this.file.as_ref()?.as_local()?;
1567                    Some((
1568                        file.disk_state().mtime(),
1569                        file.load_bytes(cx),
1570                        this.encoding,
1571                    ))
1572                })?
1573            else {
1574                return Ok(());
1575            };
1576
1577            let target_encoding = force_encoding.unwrap_or(current_encoding);
1578
1579            let is_unicode = target_encoding == encoding_rs::UTF_8
1580                || target_encoding == encoding_rs::UTF_16LE
1581                || target_encoding == encoding_rs::UTF_16BE;
1582
1583            let (new_text, has_bom, encoding_used) = if force_encoding.is_some() && !is_unicode {
1584                let bytes = load_bytes_task.await?;
1585                let (cow, _had_errors) = target_encoding.decode_without_bom_handling(&bytes);
1586                (cow.into_owned(), false, target_encoding)
1587            } else {
1588                let bytes = load_bytes_task.await?;
1589                let (cow, used_enc, _had_errors) = target_encoding.decode(&bytes);
1590
1591                let actual_has_bom = if used_enc == encoding_rs::UTF_8 {
1592                    bytes.starts_with(&[0xEF, 0xBB, 0xBF])
1593                } else if used_enc == encoding_rs::UTF_16LE {
1594                    bytes.starts_with(&[0xFF, 0xFE])
1595                } else if used_enc == encoding_rs::UTF_16BE {
1596                    bytes.starts_with(&[0xFE, 0xFF])
1597                } else {
1598                    false
1599                };
1600                (cow.into_owned(), actual_has_bom, used_enc)
1601            };
1602
1603            let diff = this.update(cx, |this, cx| this.diff(new_text, cx))?.await;
1604            this.update(cx, |this, cx| {
1605                if this.version() == diff.base_version {
1606                    this.finalize_last_transaction();
1607                    let old_encoding = this.encoding;
1608                    let old_has_bom = this.has_bom;
1609                    this.apply_diff(diff, cx);
1610                    this.encoding = encoding_used;
1611                    this.has_bom = has_bom;
1612                    let transaction = this.finalize_last_transaction().cloned();
1613                    if let Some(ref txn) = transaction {
1614                        if old_encoding != encoding_used || old_has_bom != has_bom {
1615                            this.reload_with_encoding_txns
1616                                .insert(txn.id, (old_encoding, old_has_bom));
1617                        }
1618                    }
1619                    tx.send(transaction).ok();
1620                    this.has_conflict = false;
1621                    this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1622                } else {
1623                    if !diff.edits.is_empty()
1624                        || this
1625                            .edits_since::<usize>(&diff.base_version)
1626                            .next()
1627                            .is_some()
1628                    {
1629                        this.has_conflict = true;
1630                    }
1631
1632                    this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1633                }
1634
1635                this.reload_task.take();
1636            })
1637        }));
1638        rx
1639    }
1640
1641    /// This method is called to signal that the buffer has been reloaded.
1642    pub fn did_reload(
1643        &mut self,
1644        version: clock::Global,
1645        line_ending: LineEnding,
1646        mtime: Option<MTime>,
1647        cx: &mut Context<Self>,
1648    ) {
1649        self.saved_version = version;
1650        self.has_unsaved_edits
1651            .set((self.saved_version.clone(), false));
1652        self.text.set_line_ending(line_ending);
1653        self.saved_mtime = mtime;
1654        cx.emit(BufferEvent::Reloaded);
1655        cx.notify();
1656    }
1657
1658    /// Updates the [`File`] backing this buffer. This should be called when
1659    /// the file has changed or has been deleted.
1660    pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1661        let was_dirty = self.is_dirty();
1662        let mut file_changed = false;
1663
1664        if let Some(old_file) = self.file.as_ref() {
1665            if new_file.path() != old_file.path() {
1666                file_changed = true;
1667            }
1668
1669            let old_state = old_file.disk_state();
1670            let new_state = new_file.disk_state();
1671            if old_state != new_state {
1672                file_changed = true;
1673                if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1674                    cx.emit(BufferEvent::ReloadNeeded)
1675                }
1676            }
1677        } else {
1678            file_changed = true;
1679        };
1680
1681        self.file = Some(new_file);
1682        if file_changed {
1683            self.was_changed();
1684            self.non_text_state_update_count += 1;
1685            if was_dirty != self.is_dirty() {
1686                cx.emit(BufferEvent::DirtyChanged);
1687            }
1688            cx.emit(BufferEvent::FileHandleChanged);
1689            cx.notify();
1690        }
1691    }
1692
1693    pub fn base_buffer(&self) -> Option<Entity<Self>> {
1694        Some(self.branch_state.as_ref()?.base_buffer.clone())
1695    }
1696
1697    /// Returns the primary [`Language`] assigned to this [`Buffer`].
1698    pub fn language(&self) -> Option<&Arc<Language>> {
1699        self.language.as_ref()
1700    }
1701
1702    /// Returns the [`Language`] at the given location.
1703    pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1704        let offset = position.to_offset(self);
1705        let mut is_first = true;
1706        let start_anchor = self.anchor_before(offset);
1707        let end_anchor = self.anchor_after(offset);
1708        self.syntax_map
1709            .lock()
1710            .layers_for_range(offset..offset, &self.text, false)
1711            .filter(|layer| {
1712                if is_first {
1713                    is_first = false;
1714                    return true;
1715                }
1716
1717                layer
1718                    .included_sub_ranges
1719                    .map(|sub_ranges| {
1720                        sub_ranges.iter().any(|sub_range| {
1721                            let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1722                            let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1723                            !is_before_start && !is_after_end
1724                        })
1725                    })
1726                    .unwrap_or(true)
1727            })
1728            .last()
1729            .map(|info| info.language.clone())
1730            .or_else(|| self.language.clone())
1731    }
1732
1733    /// Returns each [`Language`] for the active syntax layers at the given location.
1734    pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1735        let offset = position.to_offset(self);
1736        let mut languages: Vec<Arc<Language>> = self
1737            .syntax_map
1738            .lock()
1739            .layers_for_range(offset..offset, &self.text, false)
1740            .map(|info| info.language.clone())
1741            .collect();
1742
1743        if languages.is_empty()
1744            && let Some(buffer_language) = self.language()
1745        {
1746            languages.push(buffer_language.clone());
1747        }
1748
1749        languages
1750    }
1751
1752    /// An integer version number that accounts for all updates besides
1753    /// the buffer's text itself (which is versioned via a version vector).
1754    pub fn non_text_state_update_count(&self) -> usize {
1755        self.non_text_state_update_count
1756    }
1757
1758    /// Whether the buffer is being parsed in the background.
1759    #[cfg(any(test, feature = "test-support"))]
1760    pub fn is_parsing(&self) -> bool {
1761        self.reparse.is_some()
1762    }
1763
1764    /// Indicates whether the buffer contains any regions that may be
1765    /// written in a language that hasn't been loaded yet.
1766    pub fn contains_unknown_injections(&self) -> bool {
1767        self.syntax_map.lock().contains_unknown_injections()
1768    }
1769
1770    #[cfg(any(test, feature = "test-support"))]
1771    pub fn set_sync_parse_timeout(&mut self, timeout: Option<Duration>) {
1772        self.sync_parse_timeout = timeout;
1773    }
1774
1775    fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1776        match Arc::get_mut(&mut self.tree_sitter_data) {
1777            Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1778            None => {
1779                let tree_sitter_data = TreeSitterData::new(snapshot);
1780                self.tree_sitter_data = Arc::new(tree_sitter_data)
1781            }
1782        }
1783    }
1784
1785    /// Called after an edit to synchronize the buffer's main parse tree with
1786    /// the buffer's new underlying state.
1787    ///
1788    /// Locks the syntax map and interpolates the edits since the last reparse
1789    /// into the foreground syntax tree.
1790    ///
1791    /// Then takes a stable snapshot of the syntax map before unlocking it.
1792    /// The snapshot with the interpolated edits is sent to a background thread,
1793    /// where we ask Tree-sitter to perform an incremental parse.
1794    ///
1795    /// Meanwhile, in the foreground if `may_block` is true, we block the main
1796    /// thread for up to 1ms waiting on the parse to complete. As soon as it
1797    /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1798    ///
1799    /// If we time out waiting on the parse, we spawn a second task waiting
1800    /// until the parse does complete and return with the interpolated tree still
1801    /// in the foreground. When the background parse completes, call back into
1802    /// the main thread and assign the foreground parse state.
1803    ///
1804    /// If the buffer or grammar changed since the start of the background parse,
1805    /// initiate an additional reparse recursively. To avoid concurrent parses
1806    /// for the same buffer, we only initiate a new parse if we are not already
1807    /// parsing in the background.
1808    #[ztracing::instrument(skip_all)]
1809    pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1810        if self.text.version() != *self.tree_sitter_data.version() {
1811            self.invalidate_tree_sitter_data(self.text.snapshot());
1812        }
1813        if self.reparse.is_some() {
1814            return;
1815        }
1816        let language = if let Some(language) = self.language.clone() {
1817            language
1818        } else {
1819            return;
1820        };
1821
1822        let text = self.text_snapshot();
1823        let parsed_version = self.version();
1824
1825        let mut syntax_map = self.syntax_map.lock();
1826        syntax_map.interpolate(&text);
1827        let language_registry = syntax_map.language_registry();
1828        let mut syntax_snapshot = syntax_map.snapshot();
1829        drop(syntax_map);
1830
1831        self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1832        if may_block && let Some(sync_parse_timeout) = self.sync_parse_timeout {
1833            if let Ok(()) = syntax_snapshot.reparse_with_timeout(
1834                &text,
1835                language_registry.clone(),
1836                language.clone(),
1837                sync_parse_timeout,
1838            ) {
1839                self.did_finish_parsing(syntax_snapshot, Duration::from_millis(300), cx);
1840                self.reparse = None;
1841                return;
1842            }
1843        }
1844
1845        let parse_task = cx.background_spawn({
1846            let language = language.clone();
1847            let language_registry = language_registry.clone();
1848            async move {
1849                syntax_snapshot.reparse(&text, language_registry, language);
1850                syntax_snapshot
1851            }
1852        });
1853
1854        self.reparse = Some(cx.spawn(async move |this, cx| {
1855            let new_syntax_map = parse_task.await;
1856            this.update(cx, move |this, cx| {
1857                let grammar_changed = || {
1858                    this.language
1859                        .as_ref()
1860                        .is_none_or(|current_language| !Arc::ptr_eq(&language, current_language))
1861                };
1862                let language_registry_changed = || {
1863                    new_syntax_map.contains_unknown_injections()
1864                        && language_registry.is_some_and(|registry| {
1865                            registry.version() != new_syntax_map.language_registry_version()
1866                        })
1867                };
1868                let parse_again = this.version.changed_since(&parsed_version)
1869                    || language_registry_changed()
1870                    || grammar_changed();
1871                this.did_finish_parsing(new_syntax_map, Duration::ZERO, cx);
1872                this.reparse = None;
1873                if parse_again {
1874                    this.reparse(cx, false);
1875                }
1876            })
1877            .ok();
1878        }));
1879    }
1880
1881    fn did_finish_parsing(
1882        &mut self,
1883        syntax_snapshot: SyntaxSnapshot,
1884        block_budget: Duration,
1885        cx: &mut Context<Self>,
1886    ) {
1887        self.non_text_state_update_count += 1;
1888        self.syntax_map.lock().did_parse(syntax_snapshot);
1889        self.was_changed();
1890        self.request_autoindent(cx, block_budget);
1891        self.parse_status.0.send(ParseStatus::Idle).unwrap();
1892        self.invalidate_tree_sitter_data(self.text.snapshot());
1893        cx.emit(BufferEvent::Reparsed);
1894        cx.notify();
1895    }
1896
1897    pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1898        self.parse_status.1.clone()
1899    }
1900
1901    /// Wait until the buffer is no longer parsing
1902    pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1903        let mut parse_status = self.parse_status();
1904        async move {
1905            while *parse_status.borrow() != ParseStatus::Idle {
1906                if parse_status.changed().await.is_err() {
1907                    break;
1908                }
1909            }
1910        }
1911    }
1912
1913    /// Assign to the buffer a set of diagnostics created by a given language server.
1914    pub fn update_diagnostics(
1915        &mut self,
1916        server_id: LanguageServerId,
1917        diagnostics: DiagnosticSet,
1918        cx: &mut Context<Self>,
1919    ) {
1920        let lamport_timestamp = self.text.lamport_clock.tick();
1921        let op = Operation::UpdateDiagnostics {
1922            server_id,
1923            diagnostics: diagnostics.iter().cloned().collect(),
1924            lamport_timestamp,
1925        };
1926
1927        self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1928        self.send_operation(op, true, cx);
1929    }
1930
1931    pub fn buffer_diagnostics(
1932        &self,
1933        for_server: Option<LanguageServerId>,
1934    ) -> Vec<&DiagnosticEntry<Anchor>> {
1935        match for_server {
1936            Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1937                Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1938                Err(_) => Vec::new(),
1939            },
1940            None => self
1941                .diagnostics
1942                .iter()
1943                .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1944                .collect(),
1945        }
1946    }
1947
1948    fn request_autoindent(&mut self, cx: &mut Context<Self>, block_budget: Duration) {
1949        if let Some(indent_sizes) = self.compute_autoindents() {
1950            let indent_sizes = cx.background_spawn(indent_sizes);
1951            match cx
1952                .foreground_executor()
1953                .block_with_timeout(block_budget, indent_sizes)
1954            {
1955                Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1956                Err(indent_sizes) => {
1957                    self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1958                        let indent_sizes = indent_sizes.await;
1959                        this.update(cx, |this, cx| {
1960                            this.apply_autoindents(indent_sizes, cx);
1961                        })
1962                        .ok();
1963                    }));
1964                }
1965            }
1966        } else {
1967            self.autoindent_requests.clear();
1968            for tx in self.wait_for_autoindent_txs.drain(..) {
1969                tx.send(()).ok();
1970            }
1971        }
1972    }
1973
1974    fn compute_autoindents(
1975        &self,
1976    ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1977        let max_rows_between_yields = 100;
1978        let snapshot = self.snapshot();
1979        if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1980            return None;
1981        }
1982
1983        let autoindent_requests = self.autoindent_requests.clone();
1984        Some(async move {
1985            let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1986            for request in autoindent_requests {
1987                // Resolve each edited range to its row in the current buffer and in the
1988                // buffer before this batch of edits.
1989                let mut row_ranges = Vec::new();
1990                let mut old_to_new_rows = BTreeMap::new();
1991                let mut language_indent_sizes_by_new_row = Vec::new();
1992                for entry in &request.entries {
1993                    let position = entry.range.start;
1994                    let new_row = position.to_point(&snapshot).row;
1995                    let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1996                    language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1997
1998                    if let Some(old_row) = entry.old_row {
1999                        old_to_new_rows.insert(old_row, new_row);
2000                    }
2001                    row_ranges.push((new_row..new_end_row, entry.original_indent_column));
2002                }
2003
2004                // Build a map containing the suggested indentation for each of the edited lines
2005                // with respect to the state of the buffer before these edits. This map is keyed
2006                // by the rows for these lines in the current state of the buffer.
2007                let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
2008                let old_edited_ranges =
2009                    contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
2010                let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
2011                let mut language_indent_size = IndentSize::default();
2012                for old_edited_range in old_edited_ranges {
2013                    let suggestions = request
2014                        .before_edit
2015                        .suggest_autoindents(old_edited_range.clone())
2016                        .into_iter()
2017                        .flatten();
2018                    for (old_row, suggestion) in old_edited_range.zip(suggestions) {
2019                        if let Some(suggestion) = suggestion {
2020                            let new_row = *old_to_new_rows.get(&old_row).unwrap();
2021
2022                            // Find the indent size based on the language for this row.
2023                            while let Some((row, size)) = language_indent_sizes.peek() {
2024                                if *row > new_row {
2025                                    break;
2026                                }
2027                                language_indent_size = *size;
2028                                language_indent_sizes.next();
2029                            }
2030
2031                            let suggested_indent = old_to_new_rows
2032                                .get(&suggestion.basis_row)
2033                                .and_then(|from_row| {
2034                                    Some(old_suggestions.get(from_row).copied()?.0)
2035                                })
2036                                .unwrap_or_else(|| {
2037                                    request
2038                                        .before_edit
2039                                        .indent_size_for_line(suggestion.basis_row)
2040                                })
2041                                .with_delta(suggestion.delta, language_indent_size);
2042                            old_suggestions
2043                                .insert(new_row, (suggested_indent, suggestion.within_error));
2044                        }
2045                    }
2046                    yield_now().await;
2047                }
2048
2049                // Compute new suggestions for each line, but only include them in the result
2050                // if they differ from the old suggestion for that line.
2051                let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
2052                let mut language_indent_size = IndentSize::default();
2053                for (row_range, original_indent_column) in row_ranges {
2054                    let new_edited_row_range = if request.is_block_mode {
2055                        row_range.start..row_range.start + 1
2056                    } else {
2057                        row_range.clone()
2058                    };
2059
2060                    let suggestions = snapshot
2061                        .suggest_autoindents(new_edited_row_range.clone())
2062                        .into_iter()
2063                        .flatten();
2064                    for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
2065                        if let Some(suggestion) = suggestion {
2066                            // Find the indent size based on the language for this row.
2067                            while let Some((row, size)) = language_indent_sizes.peek() {
2068                                if *row > new_row {
2069                                    break;
2070                                }
2071                                language_indent_size = *size;
2072                                language_indent_sizes.next();
2073                            }
2074
2075                            let suggested_indent = indent_sizes
2076                                .get(&suggestion.basis_row)
2077                                .copied()
2078                                .map(|e| e.0)
2079                                .unwrap_or_else(|| {
2080                                    snapshot.indent_size_for_line(suggestion.basis_row)
2081                                })
2082                                .with_delta(suggestion.delta, language_indent_size);
2083
2084                            if old_suggestions.get(&new_row).is_none_or(
2085                                |(old_indentation, was_within_error)| {
2086                                    suggested_indent != *old_indentation
2087                                        && (!suggestion.within_error || *was_within_error)
2088                                },
2089                            ) {
2090                                indent_sizes.insert(
2091                                    new_row,
2092                                    (suggested_indent, request.ignore_empty_lines),
2093                                );
2094                            }
2095                        }
2096                    }
2097
2098                    if let (true, Some(original_indent_column)) =
2099                        (request.is_block_mode, original_indent_column)
2100                    {
2101                        let new_indent =
2102                            if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2103                                *indent
2104                            } else {
2105                                snapshot.indent_size_for_line(row_range.start)
2106                            };
2107                        let delta = new_indent.len as i64 - original_indent_column as i64;
2108                        if delta != 0 {
2109                            for row in row_range.skip(1) {
2110                                indent_sizes.entry(row).or_insert_with(|| {
2111                                    let mut size = snapshot.indent_size_for_line(row);
2112                                    if size.kind == new_indent.kind {
2113                                        match delta.cmp(&0) {
2114                                            Ordering::Greater => size.len += delta as u32,
2115                                            Ordering::Less => {
2116                                                size.len = size.len.saturating_sub(-delta as u32)
2117                                            }
2118                                            Ordering::Equal => {}
2119                                        }
2120                                    }
2121                                    (size, request.ignore_empty_lines)
2122                                });
2123                            }
2124                        }
2125                    }
2126
2127                    yield_now().await;
2128                }
2129            }
2130
2131            indent_sizes
2132                .into_iter()
2133                .filter_map(|(row, (indent, ignore_empty_lines))| {
2134                    if ignore_empty_lines && snapshot.line_len(row) == 0 {
2135                        None
2136                    } else {
2137                        Some((row, indent))
2138                    }
2139                })
2140                .collect()
2141        })
2142    }
2143
2144    fn apply_autoindents(
2145        &mut self,
2146        indent_sizes: BTreeMap<u32, IndentSize>,
2147        cx: &mut Context<Self>,
2148    ) {
2149        self.autoindent_requests.clear();
2150        for tx in self.wait_for_autoindent_txs.drain(..) {
2151            tx.send(()).ok();
2152        }
2153
2154        let edits: Vec<_> = indent_sizes
2155            .into_iter()
2156            .filter_map(|(row, indent_size)| {
2157                let current_size = indent_size_for_line(self, row);
2158                Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2159            })
2160            .collect();
2161
2162        let preserve_preview = self.preserve_preview();
2163        self.edit(edits, None, cx);
2164        if preserve_preview {
2165            self.refresh_preview();
2166        }
2167    }
2168
2169    /// Create a minimal edit that will cause the given row to be indented
2170    /// with the given size. After applying this edit, the length of the line
2171    /// will always be at least `new_size.len`.
2172    pub fn edit_for_indent_size_adjustment(
2173        row: u32,
2174        current_size: IndentSize,
2175        new_size: IndentSize,
2176    ) -> Option<(Range<Point>, String)> {
2177        if new_size.kind == current_size.kind {
2178            match new_size.len.cmp(&current_size.len) {
2179                Ordering::Greater => {
2180                    let point = Point::new(row, 0);
2181                    Some((
2182                        point..point,
2183                        iter::repeat(new_size.char())
2184                            .take((new_size.len - current_size.len) as usize)
2185                            .collect::<String>(),
2186                    ))
2187                }
2188
2189                Ordering::Less => Some((
2190                    Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2191                    String::new(),
2192                )),
2193
2194                Ordering::Equal => None,
2195            }
2196        } else {
2197            Some((
2198                Point::new(row, 0)..Point::new(row, current_size.len),
2199                iter::repeat(new_size.char())
2200                    .take(new_size.len as usize)
2201                    .collect::<String>(),
2202            ))
2203        }
2204    }
2205
2206    /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2207    /// and the given new text.
2208    pub fn diff<T>(&self, new_text: T, cx: &App) -> Task<Diff>
2209    where
2210        T: AsRef<str> + Send + 'static,
2211    {
2212        let old_text = self.as_rope().clone();
2213        let base_version = self.version();
2214        cx.background_spawn(async move {
2215            let old_text = old_text.to_string();
2216            let mut new_text = new_text.as_ref().to_owned();
2217            let line_ending = LineEnding::detect(&new_text);
2218            LineEnding::normalize(&mut new_text);
2219            let edits = text_diff(&old_text, &new_text);
2220            Diff {
2221                base_version,
2222                line_ending,
2223                edits,
2224            }
2225        })
2226    }
2227
2228    /// Spawns a background task that searches the buffer for any whitespace
2229    /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2230    pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2231        let old_text = self.as_rope().clone();
2232        let line_ending = self.line_ending();
2233        let base_version = self.version();
2234        cx.background_spawn(async move {
2235            let ranges = trailing_whitespace_ranges(&old_text);
2236            let empty = Arc::<str>::from("");
2237            Diff {
2238                base_version,
2239                line_ending,
2240                edits: ranges
2241                    .into_iter()
2242                    .map(|range| (range, empty.clone()))
2243                    .collect(),
2244            }
2245        })
2246    }
2247
2248    /// Ensures that the buffer ends with a single newline character, and
2249    /// no other whitespace. Skips if the buffer is empty.
2250    pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2251        let len = self.len();
2252        if len == 0 {
2253            return;
2254        }
2255        let mut offset = len;
2256        for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2257            let non_whitespace_len = chunk
2258                .trim_end_matches(|c: char| c.is_ascii_whitespace())
2259                .len();
2260            offset -= chunk.len();
2261            offset += non_whitespace_len;
2262            if non_whitespace_len != 0 {
2263                if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2264                    return;
2265                }
2266                break;
2267            }
2268        }
2269        self.edit([(offset..len, "\n")], None, cx);
2270    }
2271
2272    /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2273    /// calculated, then adjust the diff to account for those changes, and discard any
2274    /// parts of the diff that conflict with those changes.
2275    pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2276        let snapshot = self.snapshot();
2277        let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2278        let mut delta = 0;
2279        let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2280            while let Some(edit_since) = edits_since.peek() {
2281                // If the edit occurs after a diff hunk, then it does not
2282                // affect that hunk.
2283                if edit_since.old.start > range.end {
2284                    break;
2285                }
2286                // If the edit precedes the diff hunk, then adjust the hunk
2287                // to reflect the edit.
2288                else if edit_since.old.end < range.start {
2289                    delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2290                    edits_since.next();
2291                }
2292                // If the edit intersects a diff hunk, then discard that hunk.
2293                else {
2294                    return None;
2295                }
2296            }
2297
2298            let start = (range.start as i64 + delta) as usize;
2299            let end = (range.end as i64 + delta) as usize;
2300            Some((start..end, new_text))
2301        });
2302
2303        self.start_transaction();
2304        self.text.set_line_ending(diff.line_ending);
2305        self.edit(adjusted_edits, None, cx);
2306        self.end_transaction(cx)
2307    }
2308
2309    pub fn has_unsaved_edits(&self) -> bool {
2310        let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2311
2312        if last_version == self.version {
2313            self.has_unsaved_edits
2314                .set((last_version, has_unsaved_edits));
2315            return has_unsaved_edits;
2316        }
2317
2318        let has_edits = self.has_edits_since(&self.saved_version);
2319        self.has_unsaved_edits
2320            .set((self.version.clone(), has_edits));
2321        has_edits
2322    }
2323
2324    /// Checks if the buffer has unsaved changes.
2325    pub fn is_dirty(&self) -> bool {
2326        if self.capability == Capability::ReadOnly {
2327            return false;
2328        }
2329        if self.has_conflict {
2330            return true;
2331        }
2332        match self.file.as_ref().map(|f| f.disk_state()) {
2333            Some(DiskState::New) | Some(DiskState::Deleted) => {
2334                !self.is_empty() && self.has_unsaved_edits()
2335            }
2336            _ => self.has_unsaved_edits(),
2337        }
2338    }
2339
2340    /// Marks the buffer as having a conflict regardless of current buffer state.
2341    pub fn set_conflict(&mut self) {
2342        self.has_conflict = true;
2343    }
2344
2345    /// Checks if the buffer and its file have both changed since the buffer
2346    /// was last saved or reloaded.
2347    pub fn has_conflict(&self) -> bool {
2348        if self.has_conflict {
2349            return true;
2350        }
2351        let Some(file) = self.file.as_ref() else {
2352            return false;
2353        };
2354        match file.disk_state() {
2355            DiskState::New => false,
2356            DiskState::Present { mtime } => match self.saved_mtime {
2357                Some(saved_mtime) => {
2358                    mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2359                }
2360                None => true,
2361            },
2362            DiskState::Deleted => false,
2363            DiskState::Historic { .. } => false,
2364        }
2365    }
2366
2367    /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2368    pub fn subscribe(&mut self) -> Subscription<usize> {
2369        self.text.subscribe()
2370    }
2371
2372    /// Adds a bit to the list of bits that are set when the buffer's text changes.
2373    ///
2374    /// This allows downstream code to check if the buffer's text has changed without
2375    /// waiting for an effect cycle, which would be required if using eents.
2376    pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2377        if let Err(ix) = self
2378            .change_bits
2379            .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2380        {
2381            self.change_bits.insert(ix, bit);
2382        }
2383    }
2384
2385    /// Set the change bit for all "listeners".
2386    fn was_changed(&mut self) {
2387        self.change_bits.retain(|change_bit| {
2388            change_bit
2389                .upgrade()
2390                .inspect(|bit| {
2391                    _ = bit.replace(true);
2392                })
2393                .is_some()
2394        });
2395    }
2396
2397    /// Starts a transaction, if one is not already in-progress. When undoing or
2398    /// redoing edits, all of the edits performed within a transaction are undone
2399    /// or redone together.
2400    pub fn start_transaction(&mut self) -> Option<TransactionId> {
2401        self.start_transaction_at(Instant::now())
2402    }
2403
2404    /// Starts a transaction, providing the current time. Subsequent transactions
2405    /// that occur within a short period of time will be grouped together. This
2406    /// is controlled by the buffer's undo grouping duration.
2407    pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2408        self.transaction_depth += 1;
2409        if self.was_dirty_before_starting_transaction.is_none() {
2410            self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2411        }
2412        self.text.start_transaction_at(now)
2413    }
2414
2415    /// Terminates the current transaction, if this is the outermost transaction.
2416    pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2417        self.end_transaction_at(Instant::now(), cx)
2418    }
2419
2420    /// Terminates the current transaction, providing the current time. Subsequent transactions
2421    /// that occur within a short period of time will be grouped together. This
2422    /// is controlled by the buffer's undo grouping duration.
2423    pub fn end_transaction_at(
2424        &mut self,
2425        now: Instant,
2426        cx: &mut Context<Self>,
2427    ) -> Option<TransactionId> {
2428        assert!(self.transaction_depth > 0);
2429        self.transaction_depth -= 1;
2430        let was_dirty = if self.transaction_depth == 0 {
2431            self.was_dirty_before_starting_transaction.take().unwrap()
2432        } else {
2433            false
2434        };
2435        if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2436            self.did_edit(&start_version, was_dirty, cx);
2437            Some(transaction_id)
2438        } else {
2439            None
2440        }
2441    }
2442
2443    /// Manually add a transaction to the buffer's undo history.
2444    pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2445        self.text.push_transaction(transaction, now);
2446    }
2447
2448    /// Differs from `push_transaction` in that it does not clear the redo
2449    /// stack. Intended to be used to create a parent transaction to merge
2450    /// potential child transactions into.
2451    ///
2452    /// The caller is responsible for removing it from the undo history using
2453    /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2454    /// are merged into this transaction, the caller is responsible for ensuring
2455    /// the redo stack is cleared. The easiest way to ensure the redo stack is
2456    /// cleared is to create transactions with the usual `start_transaction` and
2457    /// `end_transaction` methods and merging the resulting transactions into
2458    /// the transaction created by this method
2459    pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2460        self.text.push_empty_transaction(now)
2461    }
2462
2463    /// Prevent the last transaction from being grouped with any subsequent transactions,
2464    /// even if they occur with the buffer's undo grouping duration.
2465    pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2466        self.text.finalize_last_transaction()
2467    }
2468
2469    /// Manually group all changes since a given transaction.
2470    pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2471        self.text.group_until_transaction(transaction_id);
2472    }
2473
2474    /// Manually remove a transaction from the buffer's undo history
2475    pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2476        self.text.forget_transaction(transaction_id)
2477    }
2478
2479    /// Retrieve a transaction from the buffer's undo history
2480    pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2481        self.text.get_transaction(transaction_id)
2482    }
2483
2484    /// Manually merge two transactions in the buffer's undo history.
2485    pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2486        self.text.merge_transactions(transaction, destination);
2487    }
2488
2489    /// Waits for the buffer to receive operations with the given timestamps.
2490    pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2491        &mut self,
2492        edit_ids: It,
2493    ) -> impl Future<Output = Result<()>> + use<It> {
2494        self.text.wait_for_edits(edit_ids)
2495    }
2496
2497    /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2498    pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2499        &mut self,
2500        anchors: It,
2501    ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2502        self.text.wait_for_anchors(anchors)
2503    }
2504
2505    /// Waits for the buffer to receive operations up to the given version.
2506    pub fn wait_for_version(
2507        &mut self,
2508        version: clock::Global,
2509    ) -> impl Future<Output = Result<()>> + use<> {
2510        self.text.wait_for_version(version)
2511    }
2512
2513    /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2514    /// [`Buffer::wait_for_version`] to resolve with an error.
2515    pub fn give_up_waiting(&mut self) {
2516        self.text.give_up_waiting();
2517    }
2518
2519    pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2520        let mut rx = None;
2521        if !self.autoindent_requests.is_empty() {
2522            let channel = oneshot::channel();
2523            self.wait_for_autoindent_txs.push(channel.0);
2524            rx = Some(channel.1);
2525        }
2526        rx
2527    }
2528
2529    /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2530    pub fn set_active_selections(
2531        &mut self,
2532        selections: Arc<[Selection<Anchor>]>,
2533        line_mode: bool,
2534        cursor_shape: CursorShape,
2535        cx: &mut Context<Self>,
2536    ) {
2537        let lamport_timestamp = self.text.lamport_clock.tick();
2538        self.remote_selections.insert(
2539            self.text.replica_id(),
2540            SelectionSet {
2541                selections: selections.clone(),
2542                lamport_timestamp,
2543                line_mode,
2544                cursor_shape,
2545            },
2546        );
2547        self.send_operation(
2548            Operation::UpdateSelections {
2549                selections,
2550                line_mode,
2551                lamport_timestamp,
2552                cursor_shape,
2553            },
2554            true,
2555            cx,
2556        );
2557        self.non_text_state_update_count += 1;
2558        cx.notify();
2559    }
2560
2561    /// Clears the selections, so that other replicas of the buffer do not see any selections for
2562    /// this replica.
2563    pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2564        if self
2565            .remote_selections
2566            .get(&self.text.replica_id())
2567            .is_none_or(|set| !set.selections.is_empty())
2568        {
2569            self.set_active_selections(Arc::default(), false, Default::default(), cx);
2570        }
2571    }
2572
2573    pub fn set_agent_selections(
2574        &mut self,
2575        selections: Arc<[Selection<Anchor>]>,
2576        line_mode: bool,
2577        cursor_shape: CursorShape,
2578        cx: &mut Context<Self>,
2579    ) {
2580        let lamport_timestamp = self.text.lamport_clock.tick();
2581        self.remote_selections.insert(
2582            ReplicaId::AGENT,
2583            SelectionSet {
2584                selections,
2585                lamport_timestamp,
2586                line_mode,
2587                cursor_shape,
2588            },
2589        );
2590        self.non_text_state_update_count += 1;
2591        cx.notify();
2592    }
2593
2594    pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2595        self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2596    }
2597
2598    /// Replaces the buffer's entire text.
2599    pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2600    where
2601        T: Into<Arc<str>>,
2602    {
2603        self.autoindent_requests.clear();
2604        self.edit([(0..self.len(), text)], None, cx)
2605    }
2606
2607    /// Appends the given text to the end of the buffer.
2608    pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2609    where
2610        T: Into<Arc<str>>,
2611    {
2612        self.edit([(self.len()..self.len(), text)], None, cx)
2613    }
2614
2615    /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2616    /// delete, and a string of text to insert at that location. Adjacent edits are coalesced.
2617    ///
2618    /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2619    /// request for the edited ranges, which will be processed when the buffer finishes
2620    /// parsing.
2621    ///
2622    /// Parsing takes place at the end of a transaction, and may compute synchronously
2623    /// or asynchronously, depending on the changes.
2624    pub fn edit<I, S, T>(
2625        &mut self,
2626        edits_iter: I,
2627        autoindent_mode: Option<AutoindentMode>,
2628        cx: &mut Context<Self>,
2629    ) -> Option<clock::Lamport>
2630    where
2631        I: IntoIterator<Item = (Range<S>, T)>,
2632        S: ToOffset,
2633        T: Into<Arc<str>>,
2634    {
2635        self.edit_internal(edits_iter, autoindent_mode, true, cx)
2636    }
2637
2638    /// Like [`edit`](Self::edit), but does not coalesce adjacent edits.
2639    pub fn edit_non_coalesce<I, S, T>(
2640        &mut self,
2641        edits_iter: I,
2642        autoindent_mode: Option<AutoindentMode>,
2643        cx: &mut Context<Self>,
2644    ) -> Option<clock::Lamport>
2645    where
2646        I: IntoIterator<Item = (Range<S>, T)>,
2647        S: ToOffset,
2648        T: Into<Arc<str>>,
2649    {
2650        self.edit_internal(edits_iter, autoindent_mode, false, cx)
2651    }
2652
2653    fn edit_internal<I, S, T>(
2654        &mut self,
2655        edits_iter: I,
2656        autoindent_mode: Option<AutoindentMode>,
2657        coalesce_adjacent: bool,
2658        cx: &mut Context<Self>,
2659    ) -> Option<clock::Lamport>
2660    where
2661        I: IntoIterator<Item = (Range<S>, T)>,
2662        S: ToOffset,
2663        T: Into<Arc<str>>,
2664    {
2665        // Skip invalid edits and coalesce contiguous ones.
2666        let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2667
2668        for (range, new_text) in edits_iter {
2669            let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2670
2671            if range.start > range.end {
2672                mem::swap(&mut range.start, &mut range.end);
2673            }
2674            let new_text = new_text.into();
2675            if !new_text.is_empty() || !range.is_empty() {
2676                let prev_edit = edits.last_mut();
2677                let should_coalesce = prev_edit.as_ref().is_some_and(|(prev_range, _)| {
2678                    if coalesce_adjacent {
2679                        prev_range.end >= range.start
2680                    } else {
2681                        prev_range.end > range.start
2682                    }
2683                });
2684
2685                if let Some((prev_range, prev_text)) = prev_edit
2686                    && should_coalesce
2687                {
2688                    prev_range.end = cmp::max(prev_range.end, range.end);
2689                    *prev_text = format!("{prev_text}{new_text}").into();
2690                } else {
2691                    edits.push((range, new_text));
2692                }
2693            }
2694        }
2695        if edits.is_empty() {
2696            return None;
2697        }
2698
2699        self.start_transaction();
2700        self.pending_autoindent.take();
2701        let autoindent_request = autoindent_mode
2702            .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2703
2704        let edit_operation = self.text.edit(edits.iter().cloned());
2705        let edit_id = edit_operation.timestamp();
2706
2707        if let Some((before_edit, mode)) = autoindent_request {
2708            let mut delta = 0isize;
2709            let mut previous_setting = None;
2710            let entries: Vec<_> = edits
2711                .into_iter()
2712                .enumerate()
2713                .zip(&edit_operation.as_edit().unwrap().new_text)
2714                .filter(|((_, (range, _)), _)| {
2715                    let language = before_edit.language_at(range.start);
2716                    let language_id = language.map(|l| l.id());
2717                    if let Some((cached_language_id, auto_indent)) = previous_setting
2718                        && cached_language_id == language_id
2719                    {
2720                        auto_indent
2721                    } else {
2722                        // The auto-indent setting is not present in editorconfigs, hence
2723                        // we can avoid passing the file here.
2724                        let auto_indent =
2725                            language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2726                        previous_setting = Some((language_id, auto_indent));
2727                        auto_indent
2728                    }
2729                })
2730                .map(|((ix, (range, _)), new_text)| {
2731                    let new_text_length = new_text.len();
2732                    let old_start = range.start.to_point(&before_edit);
2733                    let new_start = (delta + range.start as isize) as usize;
2734                    let range_len = range.end - range.start;
2735                    delta += new_text_length as isize - range_len as isize;
2736
2737                    // Decide what range of the insertion to auto-indent, and whether
2738                    // the first line of the insertion should be considered a newly-inserted line
2739                    // or an edit to an existing line.
2740                    let mut range_of_insertion_to_indent = 0..new_text_length;
2741                    let mut first_line_is_new = true;
2742
2743                    let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2744                    let old_line_end = before_edit.line_len(old_start.row);
2745
2746                    if old_start.column > old_line_start {
2747                        first_line_is_new = false;
2748                    }
2749
2750                    if !new_text.contains('\n')
2751                        && (old_start.column + (range_len as u32) < old_line_end
2752                            || old_line_end == old_line_start)
2753                    {
2754                        first_line_is_new = false;
2755                    }
2756
2757                    // When inserting text starting with a newline, avoid auto-indenting the
2758                    // previous line.
2759                    if new_text.starts_with('\n') {
2760                        range_of_insertion_to_indent.start += 1;
2761                        first_line_is_new = true;
2762                    }
2763
2764                    let mut original_indent_column = None;
2765                    if let AutoindentMode::Block {
2766                        original_indent_columns,
2767                    } = &mode
2768                    {
2769                        original_indent_column = Some(if new_text.starts_with('\n') {
2770                            indent_size_for_text(
2771                                new_text[range_of_insertion_to_indent.clone()].chars(),
2772                            )
2773                            .len
2774                        } else {
2775                            original_indent_columns
2776                                .get(ix)
2777                                .copied()
2778                                .flatten()
2779                                .unwrap_or_else(|| {
2780                                    indent_size_for_text(
2781                                        new_text[range_of_insertion_to_indent.clone()].chars(),
2782                                    )
2783                                    .len
2784                                })
2785                        });
2786
2787                        // Avoid auto-indenting the line after the edit.
2788                        if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2789                            range_of_insertion_to_indent.end -= 1;
2790                        }
2791                    }
2792
2793                    AutoindentRequestEntry {
2794                        original_indent_column,
2795                        old_row: if first_line_is_new {
2796                            None
2797                        } else {
2798                            Some(old_start.row)
2799                        },
2800                        indent_size: before_edit.language_indent_size_at(range.start, cx),
2801                        range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2802                            ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2803                    }
2804                })
2805                .collect();
2806
2807            if !entries.is_empty() {
2808                self.autoindent_requests.push(Arc::new(AutoindentRequest {
2809                    before_edit,
2810                    entries,
2811                    is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2812                    ignore_empty_lines: false,
2813                }));
2814            }
2815        }
2816
2817        self.end_transaction(cx);
2818        self.send_operation(Operation::Buffer(edit_operation), true, cx);
2819        Some(edit_id)
2820    }
2821
2822    fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2823        self.was_changed();
2824
2825        if self.edits_since::<usize>(old_version).next().is_none() {
2826            return;
2827        }
2828
2829        self.reparse(cx, true);
2830        cx.emit(BufferEvent::Edited);
2831        if was_dirty != self.is_dirty() {
2832            cx.emit(BufferEvent::DirtyChanged);
2833        }
2834        cx.notify();
2835    }
2836
2837    pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2838    where
2839        I: IntoIterator<Item = Range<T>>,
2840        T: ToOffset + Copy,
2841    {
2842        let before_edit = self.snapshot();
2843        let entries = ranges
2844            .into_iter()
2845            .map(|range| AutoindentRequestEntry {
2846                range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2847                old_row: None,
2848                indent_size: before_edit.language_indent_size_at(range.start, cx),
2849                original_indent_column: None,
2850            })
2851            .collect();
2852        self.autoindent_requests.push(Arc::new(AutoindentRequest {
2853            before_edit,
2854            entries,
2855            is_block_mode: false,
2856            ignore_empty_lines: true,
2857        }));
2858        self.request_autoindent(cx, Duration::from_micros(300));
2859    }
2860
2861    // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2862    // You can also request the insertion of empty lines above and below the line starting at the returned point.
2863    pub fn insert_empty_line(
2864        &mut self,
2865        position: impl ToPoint,
2866        space_above: bool,
2867        space_below: bool,
2868        cx: &mut Context<Self>,
2869    ) -> Point {
2870        let mut position = position.to_point(self);
2871
2872        self.start_transaction();
2873
2874        self.edit(
2875            [(position..position, "\n")],
2876            Some(AutoindentMode::EachLine),
2877            cx,
2878        );
2879
2880        if position.column > 0 {
2881            position += Point::new(1, 0);
2882        }
2883
2884        if !self.is_line_blank(position.row) {
2885            self.edit(
2886                [(position..position, "\n")],
2887                Some(AutoindentMode::EachLine),
2888                cx,
2889            );
2890        }
2891
2892        if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2893            self.edit(
2894                [(position..position, "\n")],
2895                Some(AutoindentMode::EachLine),
2896                cx,
2897            );
2898            position.row += 1;
2899        }
2900
2901        if space_below
2902            && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2903        {
2904            self.edit(
2905                [(position..position, "\n")],
2906                Some(AutoindentMode::EachLine),
2907                cx,
2908            );
2909        }
2910
2911        self.end_transaction(cx);
2912
2913        position
2914    }
2915
2916    /// Applies the given remote operations to the buffer.
2917    pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2918        self.pending_autoindent.take();
2919        let was_dirty = self.is_dirty();
2920        let old_version = self.version.clone();
2921        let mut deferred_ops = Vec::new();
2922        let buffer_ops = ops
2923            .into_iter()
2924            .filter_map(|op| match op {
2925                Operation::Buffer(op) => Some(op),
2926                _ => {
2927                    if self.can_apply_op(&op) {
2928                        self.apply_op(op, cx);
2929                    } else {
2930                        deferred_ops.push(op);
2931                    }
2932                    None
2933                }
2934            })
2935            .collect::<Vec<_>>();
2936        for operation in buffer_ops.iter() {
2937            self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2938        }
2939        self.text.apply_ops(buffer_ops);
2940        self.deferred_ops.insert(deferred_ops);
2941        self.flush_deferred_ops(cx);
2942        self.did_edit(&old_version, was_dirty, cx);
2943        // Notify independently of whether the buffer was edited as the operations could include a
2944        // selection update.
2945        cx.notify();
2946    }
2947
2948    fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2949        let mut deferred_ops = Vec::new();
2950        for op in self.deferred_ops.drain().iter().cloned() {
2951            if self.can_apply_op(&op) {
2952                self.apply_op(op, cx);
2953            } else {
2954                deferred_ops.push(op);
2955            }
2956        }
2957        self.deferred_ops.insert(deferred_ops);
2958    }
2959
2960    pub fn has_deferred_ops(&self) -> bool {
2961        !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2962    }
2963
2964    fn can_apply_op(&self, operation: &Operation) -> bool {
2965        match operation {
2966            Operation::Buffer(_) => {
2967                unreachable!("buffer operations should never be applied at this layer")
2968            }
2969            Operation::UpdateDiagnostics {
2970                diagnostics: diagnostic_set,
2971                ..
2972            } => diagnostic_set.iter().all(|diagnostic| {
2973                self.text.can_resolve(&diagnostic.range.start)
2974                    && self.text.can_resolve(&diagnostic.range.end)
2975            }),
2976            Operation::UpdateSelections { selections, .. } => selections
2977                .iter()
2978                .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2979            Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2980        }
2981    }
2982
2983    fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2984        match operation {
2985            Operation::Buffer(_) => {
2986                unreachable!("buffer operations should never be applied at this layer")
2987            }
2988            Operation::UpdateDiagnostics {
2989                server_id,
2990                diagnostics: diagnostic_set,
2991                lamport_timestamp,
2992            } => {
2993                let snapshot = self.snapshot();
2994                self.apply_diagnostic_update(
2995                    server_id,
2996                    DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2997                    lamport_timestamp,
2998                    cx,
2999                );
3000            }
3001            Operation::UpdateSelections {
3002                selections,
3003                lamport_timestamp,
3004                line_mode,
3005                cursor_shape,
3006            } => {
3007                if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
3008                    && set.lamport_timestamp > lamport_timestamp
3009                {
3010                    return;
3011                }
3012
3013                self.remote_selections.insert(
3014                    lamport_timestamp.replica_id,
3015                    SelectionSet {
3016                        selections,
3017                        lamport_timestamp,
3018                        line_mode,
3019                        cursor_shape,
3020                    },
3021                );
3022                self.text.lamport_clock.observe(lamport_timestamp);
3023                self.non_text_state_update_count += 1;
3024            }
3025            Operation::UpdateCompletionTriggers {
3026                triggers,
3027                lamport_timestamp,
3028                server_id,
3029            } => {
3030                if triggers.is_empty() {
3031                    self.completion_triggers_per_language_server
3032                        .remove(&server_id);
3033                    self.completion_triggers = self
3034                        .completion_triggers_per_language_server
3035                        .values()
3036                        .flat_map(|triggers| triggers.iter().cloned())
3037                        .collect();
3038                } else {
3039                    self.completion_triggers_per_language_server
3040                        .insert(server_id, triggers.iter().cloned().collect());
3041                    self.completion_triggers.extend(triggers);
3042                }
3043                self.text.lamport_clock.observe(lamport_timestamp);
3044            }
3045            Operation::UpdateLineEnding {
3046                line_ending,
3047                lamport_timestamp,
3048            } => {
3049                self.text.set_line_ending(line_ending);
3050                self.text.lamport_clock.observe(lamport_timestamp);
3051            }
3052        }
3053    }
3054
3055    fn apply_diagnostic_update(
3056        &mut self,
3057        server_id: LanguageServerId,
3058        diagnostics: DiagnosticSet,
3059        lamport_timestamp: clock::Lamport,
3060        cx: &mut Context<Self>,
3061    ) {
3062        if lamport_timestamp > self.diagnostics_timestamp {
3063            let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
3064            if diagnostics.is_empty() {
3065                if let Ok(ix) = ix {
3066                    self.diagnostics.remove(ix);
3067                }
3068            } else {
3069                match ix {
3070                    Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
3071                    Ok(ix) => self.diagnostics[ix].1 = diagnostics,
3072                };
3073            }
3074            self.diagnostics_timestamp = lamport_timestamp;
3075            self.non_text_state_update_count += 1;
3076            self.text.lamport_clock.observe(lamport_timestamp);
3077            cx.notify();
3078            cx.emit(BufferEvent::DiagnosticsUpdated);
3079        }
3080    }
3081
3082    fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
3083        self.was_changed();
3084        cx.emit(BufferEvent::Operation {
3085            operation,
3086            is_local,
3087        });
3088    }
3089
3090    /// Removes the selections for a given peer.
3091    pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
3092        self.remote_selections.remove(&replica_id);
3093        cx.notify();
3094    }
3095
3096    /// Undoes the most recent transaction.
3097    pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3098        let was_dirty = self.is_dirty();
3099        let old_version = self.version.clone();
3100
3101        if let Some((transaction_id, operation)) = self.text.undo() {
3102            self.send_operation(Operation::Buffer(operation), true, cx);
3103            self.did_edit(&old_version, was_dirty, cx);
3104            self.restore_encoding_for_transaction(transaction_id, was_dirty);
3105            Some(transaction_id)
3106        } else {
3107            None
3108        }
3109    }
3110
3111    /// Manually undoes a specific transaction in the buffer's undo history.
3112    pub fn undo_transaction(
3113        &mut self,
3114        transaction_id: TransactionId,
3115        cx: &mut Context<Self>,
3116    ) -> bool {
3117        let was_dirty = self.is_dirty();
3118        let old_version = self.version.clone();
3119        if let Some(operation) = self.text.undo_transaction(transaction_id) {
3120            self.send_operation(Operation::Buffer(operation), true, cx);
3121            self.did_edit(&old_version, was_dirty, cx);
3122            true
3123        } else {
3124            false
3125        }
3126    }
3127
3128    /// Manually undoes all changes after a given transaction in the buffer's undo history.
3129    pub fn undo_to_transaction(
3130        &mut self,
3131        transaction_id: TransactionId,
3132        cx: &mut Context<Self>,
3133    ) -> bool {
3134        let was_dirty = self.is_dirty();
3135        let old_version = self.version.clone();
3136
3137        let operations = self.text.undo_to_transaction(transaction_id);
3138        let undone = !operations.is_empty();
3139        for operation in operations {
3140            self.send_operation(Operation::Buffer(operation), true, cx);
3141        }
3142        if undone {
3143            self.did_edit(&old_version, was_dirty, cx)
3144        }
3145        undone
3146    }
3147
3148    pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3149        let was_dirty = self.is_dirty();
3150        let operation = self.text.undo_operations(counts);
3151        let old_version = self.version.clone();
3152        self.send_operation(Operation::Buffer(operation), true, cx);
3153        self.did_edit(&old_version, was_dirty, cx);
3154    }
3155
3156    /// Manually redoes a specific transaction in the buffer's redo history.
3157    pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3158        let was_dirty = self.is_dirty();
3159        let old_version = self.version.clone();
3160
3161        if let Some((transaction_id, operation)) = self.text.redo() {
3162            self.send_operation(Operation::Buffer(operation), true, cx);
3163            self.did_edit(&old_version, was_dirty, cx);
3164            self.restore_encoding_for_transaction(transaction_id, was_dirty);
3165            Some(transaction_id)
3166        } else {
3167            None
3168        }
3169    }
3170
3171    fn restore_encoding_for_transaction(&mut self, transaction_id: TransactionId, was_dirty: bool) {
3172        if let Some((old_encoding, old_has_bom)) =
3173            self.reload_with_encoding_txns.get(&transaction_id)
3174        {
3175            let current_encoding = self.encoding;
3176            let current_has_bom = self.has_bom;
3177            self.encoding = *old_encoding;
3178            self.has_bom = *old_has_bom;
3179            if !was_dirty {
3180                self.saved_version = self.version.clone();
3181                self.has_unsaved_edits
3182                    .set((self.saved_version.clone(), false));
3183            }
3184            self.reload_with_encoding_txns
3185                .insert(transaction_id, (current_encoding, current_has_bom));
3186        }
3187    }
3188
3189    /// Manually undoes all changes until a given transaction in the buffer's redo history.
3190    pub fn redo_to_transaction(
3191        &mut self,
3192        transaction_id: TransactionId,
3193        cx: &mut Context<Self>,
3194    ) -> bool {
3195        let was_dirty = self.is_dirty();
3196        let old_version = self.version.clone();
3197
3198        let operations = self.text.redo_to_transaction(transaction_id);
3199        let redone = !operations.is_empty();
3200        for operation in operations {
3201            self.send_operation(Operation::Buffer(operation), true, cx);
3202        }
3203        if redone {
3204            self.did_edit(&old_version, was_dirty, cx)
3205        }
3206        redone
3207    }
3208
3209    /// Override current completion triggers with the user-provided completion triggers.
3210    pub fn set_completion_triggers(
3211        &mut self,
3212        server_id: LanguageServerId,
3213        triggers: BTreeSet<String>,
3214        cx: &mut Context<Self>,
3215    ) {
3216        self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3217        if triggers.is_empty() {
3218            self.completion_triggers_per_language_server
3219                .remove(&server_id);
3220            self.completion_triggers = self
3221                .completion_triggers_per_language_server
3222                .values()
3223                .flat_map(|triggers| triggers.iter().cloned())
3224                .collect();
3225        } else {
3226            self.completion_triggers_per_language_server
3227                .insert(server_id, triggers.clone());
3228            self.completion_triggers.extend(triggers.iter().cloned());
3229        }
3230        self.send_operation(
3231            Operation::UpdateCompletionTriggers {
3232                triggers: triggers.into_iter().collect(),
3233                lamport_timestamp: self.completion_triggers_timestamp,
3234                server_id,
3235            },
3236            true,
3237            cx,
3238        );
3239        cx.notify();
3240    }
3241
3242    /// Returns a list of strings which trigger a completion menu for this language.
3243    /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3244    pub fn completion_triggers(&self) -> &BTreeSet<String> {
3245        &self.completion_triggers
3246    }
3247
3248    /// Call this directly after performing edits to prevent the preview tab
3249    /// from being dismissed by those edits. It causes `should_dismiss_preview`
3250    /// to return false until there are additional edits.
3251    pub fn refresh_preview(&mut self) {
3252        self.preview_version = self.version.clone();
3253    }
3254
3255    /// Whether we should preserve the preview status of a tab containing this buffer.
3256    pub fn preserve_preview(&self) -> bool {
3257        !self.has_edits_since(&self.preview_version)
3258    }
3259}
3260
3261#[doc(hidden)]
3262#[cfg(any(test, feature = "test-support"))]
3263impl Buffer {
3264    pub fn edit_via_marked_text(
3265        &mut self,
3266        marked_string: &str,
3267        autoindent_mode: Option<AutoindentMode>,
3268        cx: &mut Context<Self>,
3269    ) {
3270        let edits = self.edits_for_marked_text(marked_string);
3271        self.edit(edits, autoindent_mode, cx);
3272    }
3273
3274    pub fn set_group_interval(&mut self, group_interval: Duration) {
3275        self.text.set_group_interval(group_interval);
3276    }
3277
3278    pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3279    where
3280        T: rand::Rng,
3281    {
3282        let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3283        let mut last_end = None;
3284        for _ in 0..old_range_count {
3285            if last_end.is_some_and(|last_end| last_end >= self.len()) {
3286                break;
3287            }
3288
3289            let new_start = last_end.map_or(0, |last_end| last_end + 1);
3290            let mut range = self.random_byte_range(new_start, rng);
3291            if rng.random_bool(0.2) {
3292                mem::swap(&mut range.start, &mut range.end);
3293            }
3294            last_end = Some(range.end);
3295
3296            let new_text_len = rng.random_range(0..10);
3297            let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3298            new_text = new_text.to_uppercase();
3299
3300            edits.push((range, new_text));
3301        }
3302        log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3303        self.edit(edits, None, cx);
3304    }
3305
3306    pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3307        let was_dirty = self.is_dirty();
3308        let old_version = self.version.clone();
3309
3310        let ops = self.text.randomly_undo_redo(rng);
3311        if !ops.is_empty() {
3312            for op in ops {
3313                self.send_operation(Operation::Buffer(op), true, cx);
3314                self.did_edit(&old_version, was_dirty, cx);
3315            }
3316        }
3317    }
3318}
3319
3320impl EventEmitter<BufferEvent> for Buffer {}
3321
3322impl Deref for Buffer {
3323    type Target = TextBuffer;
3324
3325    fn deref(&self) -> &Self::Target {
3326        &self.text
3327    }
3328}
3329
3330impl BufferSnapshot {
3331    /// Returns [`IndentSize`] for a given line that respects user settings and
3332    /// language preferences.
3333    pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3334        indent_size_for_line(self, row)
3335    }
3336
3337    /// Returns [`IndentSize`] for a given position that respects user settings
3338    /// and language preferences.
3339    pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3340        let settings = language_settings(
3341            self.language_at(position).map(|l| l.name()),
3342            self.file(),
3343            cx,
3344        );
3345        if settings.hard_tabs {
3346            IndentSize::tab()
3347        } else {
3348            IndentSize::spaces(settings.tab_size.get())
3349        }
3350    }
3351
3352    /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3353    /// is passed in as `single_indent_size`.
3354    pub fn suggested_indents(
3355        &self,
3356        rows: impl Iterator<Item = u32>,
3357        single_indent_size: IndentSize,
3358    ) -> BTreeMap<u32, IndentSize> {
3359        let mut result = BTreeMap::new();
3360
3361        for row_range in contiguous_ranges(rows, 10) {
3362            let suggestions = match self.suggest_autoindents(row_range.clone()) {
3363                Some(suggestions) => suggestions,
3364                _ => break,
3365            };
3366
3367            for (row, suggestion) in row_range.zip(suggestions) {
3368                let indent_size = if let Some(suggestion) = suggestion {
3369                    result
3370                        .get(&suggestion.basis_row)
3371                        .copied()
3372                        .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3373                        .with_delta(suggestion.delta, single_indent_size)
3374                } else {
3375                    self.indent_size_for_line(row)
3376                };
3377
3378                result.insert(row, indent_size);
3379            }
3380        }
3381
3382        result
3383    }
3384
3385    fn suggest_autoindents(
3386        &self,
3387        row_range: Range<u32>,
3388    ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3389        let config = &self.language.as_ref()?.config;
3390        let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3391
3392        #[derive(Debug, Clone)]
3393        struct StartPosition {
3394            start: Point,
3395            suffix: SharedString,
3396            language: Arc<Language>,
3397        }
3398
3399        // Find the suggested indentation ranges based on the syntax tree.
3400        let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3401        let end = Point::new(row_range.end, 0);
3402        let range = (start..end).to_offset(&self.text);
3403        let mut matches = self.syntax.matches_with_options(
3404            range.clone(),
3405            &self.text,
3406            TreeSitterOptions {
3407                max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3408                max_start_depth: None,
3409            },
3410            |grammar| Some(&grammar.indents_config.as_ref()?.query),
3411        );
3412        let indent_configs = matches
3413            .grammars()
3414            .iter()
3415            .map(|grammar| grammar.indents_config.as_ref().unwrap())
3416            .collect::<Vec<_>>();
3417
3418        let mut indent_ranges = Vec::<Range<Point>>::new();
3419        let mut start_positions = Vec::<StartPosition>::new();
3420        let mut outdent_positions = Vec::<Point>::new();
3421        while let Some(mat) = matches.peek() {
3422            let mut start: Option<Point> = None;
3423            let mut end: Option<Point> = None;
3424
3425            let config = indent_configs[mat.grammar_index];
3426            for capture in mat.captures {
3427                if capture.index == config.indent_capture_ix {
3428                    start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3429                    end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3430                } else if Some(capture.index) == config.start_capture_ix {
3431                    start = Some(Point::from_ts_point(capture.node.end_position()));
3432                } else if Some(capture.index) == config.end_capture_ix {
3433                    end = Some(Point::from_ts_point(capture.node.start_position()));
3434                } else if Some(capture.index) == config.outdent_capture_ix {
3435                    outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3436                } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3437                    start_positions.push(StartPosition {
3438                        start: Point::from_ts_point(capture.node.start_position()),
3439                        suffix: suffix.clone(),
3440                        language: mat.language.clone(),
3441                    });
3442                }
3443            }
3444
3445            matches.advance();
3446            if let Some((start, end)) = start.zip(end) {
3447                if start.row == end.row {
3448                    continue;
3449                }
3450                let range = start..end;
3451                match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3452                    Err(ix) => indent_ranges.insert(ix, range),
3453                    Ok(ix) => {
3454                        let prev_range = &mut indent_ranges[ix];
3455                        prev_range.end = prev_range.end.max(range.end);
3456                    }
3457                }
3458            }
3459        }
3460
3461        let mut error_ranges = Vec::<Range<Point>>::new();
3462        let mut matches = self
3463            .syntax
3464            .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3465        while let Some(mat) = matches.peek() {
3466            let node = mat.captures[0].node;
3467            let start = Point::from_ts_point(node.start_position());
3468            let end = Point::from_ts_point(node.end_position());
3469            let range = start..end;
3470            let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3471                Ok(ix) | Err(ix) => ix,
3472            };
3473            let mut end_ix = ix;
3474            while let Some(existing_range) = error_ranges.get(end_ix) {
3475                if existing_range.end < end {
3476                    end_ix += 1;
3477                } else {
3478                    break;
3479                }
3480            }
3481            error_ranges.splice(ix..end_ix, [range]);
3482            matches.advance();
3483        }
3484
3485        outdent_positions.sort();
3486        for outdent_position in outdent_positions {
3487            // find the innermost indent range containing this outdent_position
3488            // set its end to the outdent position
3489            if let Some(range_to_truncate) = indent_ranges
3490                .iter_mut()
3491                .rfind(|indent_range| indent_range.contains(&outdent_position))
3492            {
3493                range_to_truncate.end = outdent_position;
3494            }
3495        }
3496
3497        start_positions.sort_by_key(|b| b.start);
3498
3499        // Find the suggested indentation increases and decreased based on regexes.
3500        let mut regex_outdent_map = HashMap::default();
3501        let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3502        let mut start_positions_iter = start_positions.iter().peekable();
3503
3504        let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3505        self.for_each_line(
3506            Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3507                ..Point::new(row_range.end, 0),
3508            |row, line| {
3509                let indent_len = self.indent_size_for_line(row).len;
3510                let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3511                let row_language_config = row_language
3512                    .as_ref()
3513                    .map(|lang| lang.config())
3514                    .unwrap_or(config);
3515
3516                if row_language_config
3517                    .decrease_indent_pattern
3518                    .as_ref()
3519                    .is_some_and(|regex| regex.is_match(line))
3520                {
3521                    indent_change_rows.push((row, Ordering::Less));
3522                }
3523                if row_language_config
3524                    .increase_indent_pattern
3525                    .as_ref()
3526                    .is_some_and(|regex| regex.is_match(line))
3527                {
3528                    indent_change_rows.push((row + 1, Ordering::Greater));
3529                }
3530                while let Some(pos) = start_positions_iter.peek() {
3531                    if pos.start.row < row {
3532                        let pos = start_positions_iter.next().unwrap().clone();
3533                        last_seen_suffix
3534                            .entry(pos.suffix.to_string())
3535                            .or_default()
3536                            .push(pos);
3537                    } else {
3538                        break;
3539                    }
3540                }
3541                for rule in &row_language_config.decrease_indent_patterns {
3542                    if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3543                        let row_start_column = self.indent_size_for_line(row).len;
3544                        let basis_row = rule
3545                            .valid_after
3546                            .iter()
3547                            .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3548                            .flatten()
3549                            .filter(|pos| {
3550                                row_language
3551                                    .as_ref()
3552                                    .or(self.language.as_ref())
3553                                    .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3554                            })
3555                            .filter(|pos| pos.start.column <= row_start_column)
3556                            .max_by_key(|pos| pos.start.row);
3557                        if let Some(outdent_to) = basis_row {
3558                            regex_outdent_map.insert(row, outdent_to.start.row);
3559                        }
3560                        break;
3561                    }
3562                }
3563            },
3564        );
3565
3566        let mut indent_changes = indent_change_rows.into_iter().peekable();
3567        let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3568            prev_non_blank_row.unwrap_or(0)
3569        } else {
3570            row_range.start.saturating_sub(1)
3571        };
3572
3573        let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3574        Some(row_range.map(move |row| {
3575            let row_start = Point::new(row, self.indent_size_for_line(row).len);
3576
3577            let mut indent_from_prev_row = false;
3578            let mut outdent_from_prev_row = false;
3579            let mut outdent_to_row = u32::MAX;
3580            let mut from_regex = false;
3581
3582            while let Some((indent_row, delta)) = indent_changes.peek() {
3583                match indent_row.cmp(&row) {
3584                    Ordering::Equal => match delta {
3585                        Ordering::Less => {
3586                            from_regex = true;
3587                            outdent_from_prev_row = true
3588                        }
3589                        Ordering::Greater => {
3590                            indent_from_prev_row = true;
3591                            from_regex = true
3592                        }
3593                        _ => {}
3594                    },
3595
3596                    Ordering::Greater => break,
3597                    Ordering::Less => {}
3598                }
3599
3600                indent_changes.next();
3601            }
3602
3603            for range in &indent_ranges {
3604                if range.start.row >= row {
3605                    break;
3606                }
3607                if range.start.row == prev_row && range.end > row_start {
3608                    indent_from_prev_row = true;
3609                }
3610                if range.end > prev_row_start && range.end <= row_start {
3611                    outdent_to_row = outdent_to_row.min(range.start.row);
3612                }
3613            }
3614
3615            if let Some(basis_row) = regex_outdent_map.get(&row) {
3616                indent_from_prev_row = false;
3617                outdent_to_row = *basis_row;
3618                from_regex = true;
3619            }
3620
3621            let within_error = error_ranges
3622                .iter()
3623                .any(|e| e.start.row < row && e.end > row_start);
3624
3625            let suggestion = if outdent_to_row == prev_row
3626                || (outdent_from_prev_row && indent_from_prev_row)
3627            {
3628                Some(IndentSuggestion {
3629                    basis_row: prev_row,
3630                    delta: Ordering::Equal,
3631                    within_error: within_error && !from_regex,
3632                })
3633            } else if indent_from_prev_row {
3634                Some(IndentSuggestion {
3635                    basis_row: prev_row,
3636                    delta: Ordering::Greater,
3637                    within_error: within_error && !from_regex,
3638                })
3639            } else if outdent_to_row < prev_row {
3640                Some(IndentSuggestion {
3641                    basis_row: outdent_to_row,
3642                    delta: Ordering::Equal,
3643                    within_error: within_error && !from_regex,
3644                })
3645            } else if outdent_from_prev_row {
3646                Some(IndentSuggestion {
3647                    basis_row: prev_row,
3648                    delta: Ordering::Less,
3649                    within_error: within_error && !from_regex,
3650                })
3651            } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3652            {
3653                Some(IndentSuggestion {
3654                    basis_row: prev_row,
3655                    delta: Ordering::Equal,
3656                    within_error: within_error && !from_regex,
3657                })
3658            } else {
3659                None
3660            };
3661
3662            prev_row = row;
3663            prev_row_start = row_start;
3664            suggestion
3665        }))
3666    }
3667
3668    fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3669        while row > 0 {
3670            row -= 1;
3671            if !self.is_line_blank(row) {
3672                return Some(row);
3673            }
3674        }
3675        None
3676    }
3677
3678    #[ztracing::instrument(skip_all)]
3679    fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3680        let captures = self.syntax.captures(range, &self.text, |grammar| {
3681            grammar
3682                .highlights_config
3683                .as_ref()
3684                .map(|config| &config.query)
3685        });
3686        let highlight_maps = captures
3687            .grammars()
3688            .iter()
3689            .map(|grammar| grammar.highlight_map())
3690            .collect();
3691        (captures, highlight_maps)
3692    }
3693
3694    /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3695    /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3696    /// returned in chunks where each chunk has a single syntax highlighting style and
3697    /// diagnostic status.
3698    #[ztracing::instrument(skip_all)]
3699    pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3700        let range = range.start.to_offset(self)..range.end.to_offset(self);
3701
3702        let mut syntax = None;
3703        if language_aware {
3704            syntax = Some(self.get_highlights(range.clone()));
3705        }
3706        // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3707        let diagnostics = language_aware;
3708        BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3709    }
3710
3711    pub fn highlighted_text_for_range<T: ToOffset>(
3712        &self,
3713        range: Range<T>,
3714        override_style: Option<HighlightStyle>,
3715        syntax_theme: &SyntaxTheme,
3716    ) -> HighlightedText {
3717        HighlightedText::from_buffer_range(
3718            range,
3719            &self.text,
3720            &self.syntax,
3721            override_style,
3722            syntax_theme,
3723        )
3724    }
3725
3726    /// Invokes the given callback for each line of text in the given range of the buffer.
3727    /// Uses callback to avoid allocating a string for each line.
3728    fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3729        let mut line = String::new();
3730        let mut row = range.start.row;
3731        for chunk in self
3732            .as_rope()
3733            .chunks_in_range(range.to_offset(self))
3734            .chain(["\n"])
3735        {
3736            for (newline_ix, text) in chunk.split('\n').enumerate() {
3737                if newline_ix > 0 {
3738                    callback(row, &line);
3739                    row += 1;
3740                    line.clear();
3741                }
3742                line.push_str(text);
3743            }
3744        }
3745    }
3746
3747    /// Iterates over every [`SyntaxLayer`] in the buffer.
3748    pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3749        self.syntax_layers_for_range(0..self.len(), true)
3750    }
3751
3752    pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3753        let offset = position.to_offset(self);
3754        self.syntax_layers_for_range(offset..offset, false)
3755            .filter(|l| {
3756                if let Some(ranges) = l.included_sub_ranges {
3757                    ranges.iter().any(|range| {
3758                        let start = range.start.to_offset(self);
3759                        start <= offset && {
3760                            let end = range.end.to_offset(self);
3761                            offset < end
3762                        }
3763                    })
3764                } else {
3765                    l.node().start_byte() <= offset && l.node().end_byte() > offset
3766                }
3767            })
3768            .last()
3769    }
3770
3771    pub fn syntax_layers_for_range<D: ToOffset>(
3772        &self,
3773        range: Range<D>,
3774        include_hidden: bool,
3775    ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3776        self.syntax
3777            .layers_for_range(range, &self.text, include_hidden)
3778    }
3779
3780    pub fn smallest_syntax_layer_containing<D: ToOffset>(
3781        &self,
3782        range: Range<D>,
3783    ) -> Option<SyntaxLayer<'_>> {
3784        let range = range.to_offset(self);
3785        self.syntax
3786            .layers_for_range(range, &self.text, false)
3787            .max_by(|a, b| {
3788                if a.depth != b.depth {
3789                    a.depth.cmp(&b.depth)
3790                } else if a.offset.0 != b.offset.0 {
3791                    a.offset.0.cmp(&b.offset.0)
3792                } else {
3793                    a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3794                }
3795            })
3796    }
3797
3798    /// Returns the main [`Language`].
3799    pub fn language(&self) -> Option<&Arc<Language>> {
3800        self.language.as_ref()
3801    }
3802
3803    /// Returns the [`Language`] at the given location.
3804    pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3805        self.syntax_layer_at(position)
3806            .map(|info| info.language)
3807            .or(self.language.as_ref())
3808    }
3809
3810    /// Returns the settings for the language at the given location.
3811    pub fn settings_at<'a, D: ToOffset>(
3812        &'a self,
3813        position: D,
3814        cx: &'a App,
3815    ) -> Cow<'a, LanguageSettings> {
3816        language_settings(
3817            self.language_at(position).map(|l| l.name()),
3818            self.file.as_ref(),
3819            cx,
3820        )
3821    }
3822
3823    pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3824        CharClassifier::new(self.language_scope_at(point))
3825    }
3826
3827    /// Returns the [`LanguageScope`] at the given location.
3828    pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3829        let offset = position.to_offset(self);
3830        let mut scope = None;
3831        let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3832
3833        // Use the layer that has the smallest node intersecting the given point.
3834        for layer in self
3835            .syntax
3836            .layers_for_range(offset..offset, &self.text, false)
3837        {
3838            let mut cursor = layer.node().walk();
3839
3840            let mut range = None;
3841            loop {
3842                let child_range = cursor.node().byte_range();
3843                if !child_range.contains(&offset) {
3844                    break;
3845                }
3846
3847                range = Some(child_range);
3848                if cursor.goto_first_child_for_byte(offset).is_none() {
3849                    break;
3850                }
3851            }
3852
3853            if let Some(range) = range
3854                && smallest_range_and_depth.as_ref().is_none_or(
3855                    |(smallest_range, smallest_range_depth)| {
3856                        if layer.depth > *smallest_range_depth {
3857                            true
3858                        } else if layer.depth == *smallest_range_depth {
3859                            range.len() < smallest_range.len()
3860                        } else {
3861                            false
3862                        }
3863                    },
3864                )
3865            {
3866                smallest_range_and_depth = Some((range, layer.depth));
3867                scope = Some(LanguageScope {
3868                    language: layer.language.clone(),
3869                    override_id: layer.override_id(offset, &self.text),
3870                });
3871            }
3872        }
3873
3874        scope.or_else(|| {
3875            self.language.clone().map(|language| LanguageScope {
3876                language,
3877                override_id: None,
3878            })
3879        })
3880    }
3881
3882    /// Returns a tuple of the range and character kind of the word
3883    /// surrounding the given position.
3884    pub fn surrounding_word<T: ToOffset>(
3885        &self,
3886        start: T,
3887        scope_context: Option<CharScopeContext>,
3888    ) -> (Range<usize>, Option<CharKind>) {
3889        let mut start = start.to_offset(self);
3890        let mut end = start;
3891        let mut next_chars = self.chars_at(start).take(128).peekable();
3892        let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3893
3894        let classifier = self.char_classifier_at(start).scope_context(scope_context);
3895        let word_kind = cmp::max(
3896            prev_chars.peek().copied().map(|c| classifier.kind(c)),
3897            next_chars.peek().copied().map(|c| classifier.kind(c)),
3898        );
3899
3900        for ch in prev_chars {
3901            if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3902                start -= ch.len_utf8();
3903            } else {
3904                break;
3905            }
3906        }
3907
3908        for ch in next_chars {
3909            if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3910                end += ch.len_utf8();
3911            } else {
3912                break;
3913            }
3914        }
3915
3916        (start..end, word_kind)
3917    }
3918
3919    /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3920    /// range. When `require_larger` is true, the node found must be larger than the query range.
3921    ///
3922    /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3923    /// be moved to the root of the tree.
3924    fn goto_node_enclosing_range(
3925        cursor: &mut tree_sitter::TreeCursor,
3926        query_range: &Range<usize>,
3927        require_larger: bool,
3928    ) -> bool {
3929        let mut ascending = false;
3930        loop {
3931            let mut range = cursor.node().byte_range();
3932            if query_range.is_empty() {
3933                // When the query range is empty and the current node starts after it, move to the
3934                // previous sibling to find the node the containing node.
3935                if range.start > query_range.start {
3936                    cursor.goto_previous_sibling();
3937                    range = cursor.node().byte_range();
3938                }
3939            } else {
3940                // When the query range is non-empty and the current node ends exactly at the start,
3941                // move to the next sibling to find a node that extends beyond the start.
3942                if range.end == query_range.start {
3943                    cursor.goto_next_sibling();
3944                    range = cursor.node().byte_range();
3945                }
3946            }
3947
3948            let encloses = range.contains_inclusive(query_range)
3949                && (!require_larger || range.len() > query_range.len());
3950            if !encloses {
3951                ascending = true;
3952                if !cursor.goto_parent() {
3953                    return false;
3954                }
3955                continue;
3956            } else if ascending {
3957                return true;
3958            }
3959
3960            // Descend into the current node.
3961            if cursor
3962                .goto_first_child_for_byte(query_range.start)
3963                .is_none()
3964            {
3965                return true;
3966            }
3967        }
3968    }
3969
3970    pub fn syntax_ancestor<'a, T: ToOffset>(
3971        &'a self,
3972        range: Range<T>,
3973    ) -> Option<tree_sitter::Node<'a>> {
3974        let range = range.start.to_offset(self)..range.end.to_offset(self);
3975        let mut result: Option<tree_sitter::Node<'a>> = None;
3976        for layer in self
3977            .syntax
3978            .layers_for_range(range.clone(), &self.text, true)
3979        {
3980            let mut cursor = layer.node().walk();
3981
3982            // Find the node that both contains the range and is larger than it.
3983            if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3984                continue;
3985            }
3986
3987            let left_node = cursor.node();
3988            let mut layer_result = left_node;
3989
3990            // For an empty range, try to find another node immediately to the right of the range.
3991            if left_node.end_byte() == range.start {
3992                let mut right_node = None;
3993                while !cursor.goto_next_sibling() {
3994                    if !cursor.goto_parent() {
3995                        break;
3996                    }
3997                }
3998
3999                while cursor.node().start_byte() == range.start {
4000                    right_node = Some(cursor.node());
4001                    if !cursor.goto_first_child() {
4002                        break;
4003                    }
4004                }
4005
4006                // If there is a candidate node on both sides of the (empty) range, then
4007                // decide between the two by favoring a named node over an anonymous token.
4008                // If both nodes are the same in that regard, favor the right one.
4009                if let Some(right_node) = right_node
4010                    && (right_node.is_named() || !left_node.is_named())
4011                {
4012                    layer_result = right_node;
4013                }
4014            }
4015
4016            if let Some(previous_result) = &result
4017                && previous_result.byte_range().len() < layer_result.byte_range().len()
4018            {
4019                continue;
4020            }
4021            result = Some(layer_result);
4022        }
4023
4024        result
4025    }
4026
4027    /// Find the previous sibling syntax node at the given range.
4028    ///
4029    /// This function locates the syntax node that precedes the node containing
4030    /// the given range. It searches hierarchically by:
4031    /// 1. Finding the node that contains the given range
4032    /// 2. Looking for the previous sibling at the same tree level
4033    /// 3. If no sibling is found, moving up to parent levels and searching for siblings
4034    ///
4035    /// Returns `None` if there is no previous sibling at any ancestor level.
4036    pub fn syntax_prev_sibling<'a, T: ToOffset>(
4037        &'a self,
4038        range: Range<T>,
4039    ) -> Option<tree_sitter::Node<'a>> {
4040        let range = range.start.to_offset(self)..range.end.to_offset(self);
4041        let mut result: Option<tree_sitter::Node<'a>> = None;
4042
4043        for layer in self
4044            .syntax
4045            .layers_for_range(range.clone(), &self.text, true)
4046        {
4047            let mut cursor = layer.node().walk();
4048
4049            // Find the node that contains the range
4050            if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
4051                continue;
4052            }
4053
4054            // Look for the previous sibling, moving up ancestor levels if needed
4055            loop {
4056                if cursor.goto_previous_sibling() {
4057                    let layer_result = cursor.node();
4058
4059                    if let Some(previous_result) = &result {
4060                        if previous_result.byte_range().end < layer_result.byte_range().end {
4061                            continue;
4062                        }
4063                    }
4064                    result = Some(layer_result);
4065                    break;
4066                }
4067
4068                // No sibling found at this level, try moving up to parent
4069                if !cursor.goto_parent() {
4070                    break;
4071                }
4072            }
4073        }
4074
4075        result
4076    }
4077
4078    /// Find the next sibling syntax node at the given range.
4079    ///
4080    /// This function locates the syntax node that follows the node containing
4081    /// the given range. It searches hierarchically by:
4082    /// 1. Finding the node that contains the given range
4083    /// 2. Looking for the next sibling at the same tree level
4084    /// 3. If no sibling is found, moving up to parent levels and searching for siblings
4085    ///
4086    /// Returns `None` if there is no next sibling at any ancestor level.
4087    pub fn syntax_next_sibling<'a, T: ToOffset>(
4088        &'a self,
4089        range: Range<T>,
4090    ) -> Option<tree_sitter::Node<'a>> {
4091        let range = range.start.to_offset(self)..range.end.to_offset(self);
4092        let mut result: Option<tree_sitter::Node<'a>> = None;
4093
4094        for layer in self
4095            .syntax
4096            .layers_for_range(range.clone(), &self.text, true)
4097        {
4098            let mut cursor = layer.node().walk();
4099
4100            // Find the node that contains the range
4101            if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
4102                continue;
4103            }
4104
4105            // Look for the next sibling, moving up ancestor levels if needed
4106            loop {
4107                if cursor.goto_next_sibling() {
4108                    let layer_result = cursor.node();
4109
4110                    if let Some(previous_result) = &result {
4111                        if previous_result.byte_range().start > layer_result.byte_range().start {
4112                            continue;
4113                        }
4114                    }
4115                    result = Some(layer_result);
4116                    break;
4117                }
4118
4119                // No sibling found at this level, try moving up to parent
4120                if !cursor.goto_parent() {
4121                    break;
4122                }
4123            }
4124        }
4125
4126        result
4127    }
4128
4129    /// Returns the root syntax node within the given row
4130    pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
4131        let start_offset = position.to_offset(self);
4132
4133        let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
4134
4135        let layer = self
4136            .syntax
4137            .layers_for_range(start_offset..start_offset, &self.text, true)
4138            .next()?;
4139
4140        let mut cursor = layer.node().walk();
4141
4142        // Descend to the first leaf that touches the start of the range.
4143        while cursor.goto_first_child_for_byte(start_offset).is_some() {
4144            if cursor.node().end_byte() == start_offset {
4145                cursor.goto_next_sibling();
4146            }
4147        }
4148
4149        // Ascend to the root node within the same row.
4150        while cursor.goto_parent() {
4151            if cursor.node().start_position().row != row {
4152                break;
4153            }
4154        }
4155
4156        Some(cursor.node())
4157    }
4158
4159    /// Returns the outline for the buffer.
4160    ///
4161    /// This method allows passing an optional [`SyntaxTheme`] to
4162    /// syntax-highlight the returned symbols.
4163    pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4164        Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4165    }
4166
4167    /// Returns all the symbols that contain the given position.
4168    ///
4169    /// This method allows passing an optional [`SyntaxTheme`] to
4170    /// syntax-highlight the returned symbols.
4171    pub fn symbols_containing<T: ToOffset>(
4172        &self,
4173        position: T,
4174        theme: Option<&SyntaxTheme>,
4175    ) -> Vec<OutlineItem<Anchor>> {
4176        let position = position.to_offset(self);
4177        let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4178        let end = self.clip_offset(position + 1, Bias::Right);
4179        let mut items = self.outline_items_containing(start..end, false, theme);
4180        let mut prev_depth = None;
4181        items.retain(|item| {
4182            let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4183            prev_depth = Some(item.depth);
4184            result
4185        });
4186        items
4187    }
4188
4189    pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4190        let range = range.to_offset(self);
4191        let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4192            grammar.outline_config.as_ref().map(|c| &c.query)
4193        });
4194        let configs = matches
4195            .grammars()
4196            .iter()
4197            .map(|g| g.outline_config.as_ref().unwrap())
4198            .collect::<Vec<_>>();
4199
4200        while let Some(mat) = matches.peek() {
4201            let config = &configs[mat.grammar_index];
4202            let containing_item_node = maybe!({
4203                let item_node = mat.captures.iter().find_map(|cap| {
4204                    if cap.index == config.item_capture_ix {
4205                        Some(cap.node)
4206                    } else {
4207                        None
4208                    }
4209                })?;
4210
4211                let item_byte_range = item_node.byte_range();
4212                if item_byte_range.end < range.start || item_byte_range.start > range.end {
4213                    None
4214                } else {
4215                    Some(item_node)
4216                }
4217            });
4218
4219            if let Some(item_node) = containing_item_node {
4220                return Some(
4221                    Point::from_ts_point(item_node.start_position())
4222                        ..Point::from_ts_point(item_node.end_position()),
4223                );
4224            }
4225
4226            matches.advance();
4227        }
4228        None
4229    }
4230
4231    pub fn outline_items_containing<T: ToOffset>(
4232        &self,
4233        range: Range<T>,
4234        include_extra_context: bool,
4235        theme: Option<&SyntaxTheme>,
4236    ) -> Vec<OutlineItem<Anchor>> {
4237        self.outline_items_containing_internal(
4238            range,
4239            include_extra_context,
4240            theme,
4241            |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4242        )
4243    }
4244
4245    pub fn outline_items_as_points_containing<T: ToOffset>(
4246        &self,
4247        range: Range<T>,
4248        include_extra_context: bool,
4249        theme: Option<&SyntaxTheme>,
4250    ) -> Vec<OutlineItem<Point>> {
4251        self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4252            range
4253        })
4254    }
4255
4256    pub fn outline_items_as_offsets_containing<T: ToOffset>(
4257        &self,
4258        range: Range<T>,
4259        include_extra_context: bool,
4260        theme: Option<&SyntaxTheme>,
4261    ) -> Vec<OutlineItem<usize>> {
4262        self.outline_items_containing_internal(
4263            range,
4264            include_extra_context,
4265            theme,
4266            |buffer, range| range.to_offset(buffer),
4267        )
4268    }
4269
4270    fn outline_items_containing_internal<T: ToOffset, U>(
4271        &self,
4272        range: Range<T>,
4273        include_extra_context: bool,
4274        theme: Option<&SyntaxTheme>,
4275        range_callback: fn(&Self, Range<Point>) -> Range<U>,
4276    ) -> Vec<OutlineItem<U>> {
4277        let range = range.to_offset(self);
4278        let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4279            grammar.outline_config.as_ref().map(|c| &c.query)
4280        });
4281
4282        let mut items = Vec::new();
4283        let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4284        while let Some(mat) = matches.peek() {
4285            let config = matches.grammars()[mat.grammar_index]
4286                .outline_config
4287                .as_ref()
4288                .unwrap();
4289            if let Some(item) =
4290                self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4291            {
4292                items.push(item);
4293            } else if let Some(capture) = mat
4294                .captures
4295                .iter()
4296                .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4297            {
4298                let capture_range = capture.node.start_position()..capture.node.end_position();
4299                let mut capture_row_range =
4300                    capture_range.start.row as u32..capture_range.end.row as u32;
4301                if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4302                {
4303                    capture_row_range.end -= 1;
4304                }
4305                if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4306                    if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4307                        last_row_range.end = capture_row_range.end;
4308                    } else {
4309                        annotation_row_ranges.push(capture_row_range);
4310                    }
4311                } else {
4312                    annotation_row_ranges.push(capture_row_range);
4313                }
4314            }
4315            matches.advance();
4316        }
4317
4318        items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4319
4320        // Assign depths based on containment relationships and convert to anchors.
4321        let mut item_ends_stack = Vec::<Point>::new();
4322        let mut anchor_items = Vec::new();
4323        let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4324        for item in items {
4325            while let Some(last_end) = item_ends_stack.last().copied() {
4326                if last_end < item.range.end {
4327                    item_ends_stack.pop();
4328                } else {
4329                    break;
4330                }
4331            }
4332
4333            let mut annotation_row_range = None;
4334            while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4335                let row_preceding_item = item.range.start.row.saturating_sub(1);
4336                if next_annotation_row_range.end < row_preceding_item {
4337                    annotation_row_ranges.next();
4338                } else {
4339                    if next_annotation_row_range.end == row_preceding_item {
4340                        annotation_row_range = Some(next_annotation_row_range.clone());
4341                        annotation_row_ranges.next();
4342                    }
4343                    break;
4344                }
4345            }
4346
4347            anchor_items.push(OutlineItem {
4348                depth: item_ends_stack.len(),
4349                range: range_callback(self, item.range.clone()),
4350                source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4351                text: item.text,
4352                highlight_ranges: item.highlight_ranges,
4353                name_ranges: item.name_ranges,
4354                body_range: item.body_range.map(|r| range_callback(self, r)),
4355                annotation_range: annotation_row_range.map(|annotation_range| {
4356                    let point_range = Point::new(annotation_range.start, 0)
4357                        ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4358                    range_callback(self, point_range)
4359                }),
4360            });
4361            item_ends_stack.push(item.range.end);
4362        }
4363
4364        anchor_items
4365    }
4366
4367    fn next_outline_item(
4368        &self,
4369        config: &OutlineConfig,
4370        mat: &SyntaxMapMatch,
4371        range: &Range<usize>,
4372        include_extra_context: bool,
4373        theme: Option<&SyntaxTheme>,
4374    ) -> Option<OutlineItem<Point>> {
4375        let item_node = mat.captures.iter().find_map(|cap| {
4376            if cap.index == config.item_capture_ix {
4377                Some(cap.node)
4378            } else {
4379                None
4380            }
4381        })?;
4382
4383        let item_byte_range = item_node.byte_range();
4384        if item_byte_range.end < range.start || item_byte_range.start > range.end {
4385            return None;
4386        }
4387        let item_point_range = Point::from_ts_point(item_node.start_position())
4388            ..Point::from_ts_point(item_node.end_position());
4389
4390        let mut open_point = None;
4391        let mut close_point = None;
4392
4393        let mut buffer_ranges = Vec::new();
4394        let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4395            let mut range = node.start_byte()..node.end_byte();
4396            let start = node.start_position();
4397            if node.end_position().row > start.row {
4398                range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4399            }
4400
4401            if !range.is_empty() {
4402                buffer_ranges.push((range, node_is_name));
4403            }
4404        };
4405
4406        for capture in mat.captures {
4407            if capture.index == config.name_capture_ix {
4408                add_to_buffer_ranges(capture.node, true);
4409            } else if Some(capture.index) == config.context_capture_ix
4410                || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4411            {
4412                add_to_buffer_ranges(capture.node, false);
4413            } else {
4414                if Some(capture.index) == config.open_capture_ix {
4415                    open_point = Some(Point::from_ts_point(capture.node.end_position()));
4416                } else if Some(capture.index) == config.close_capture_ix {
4417                    close_point = Some(Point::from_ts_point(capture.node.start_position()));
4418                }
4419            }
4420        }
4421
4422        if buffer_ranges.is_empty() {
4423            return None;
4424        }
4425        let source_range_for_text =
4426            buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4427
4428        let mut text = String::new();
4429        let mut highlight_ranges = Vec::new();
4430        let mut name_ranges = Vec::new();
4431        let mut chunks = self.chunks(source_range_for_text.clone(), true);
4432        let mut last_buffer_range_end = 0;
4433        for (buffer_range, is_name) in buffer_ranges {
4434            let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4435            if space_added {
4436                text.push(' ');
4437            }
4438            let before_append_len = text.len();
4439            let mut offset = buffer_range.start;
4440            chunks.seek(buffer_range.clone());
4441            for mut chunk in chunks.by_ref() {
4442                if chunk.text.len() > buffer_range.end - offset {
4443                    chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4444                    offset = buffer_range.end;
4445                } else {
4446                    offset += chunk.text.len();
4447                }
4448                let style = chunk
4449                    .syntax_highlight_id
4450                    .zip(theme)
4451                    .and_then(|(highlight, theme)| highlight.style(theme));
4452                if let Some(style) = style {
4453                    let start = text.len();
4454                    let end = start + chunk.text.len();
4455                    highlight_ranges.push((start..end, style));
4456                }
4457                text.push_str(chunk.text);
4458                if offset >= buffer_range.end {
4459                    break;
4460                }
4461            }
4462            if is_name {
4463                let after_append_len = text.len();
4464                let start = if space_added && !name_ranges.is_empty() {
4465                    before_append_len - 1
4466                } else {
4467                    before_append_len
4468                };
4469                name_ranges.push(start..after_append_len);
4470            }
4471            last_buffer_range_end = buffer_range.end;
4472        }
4473
4474        Some(OutlineItem {
4475            depth: 0, // We'll calculate the depth later
4476            range: item_point_range,
4477            source_range_for_text: source_range_for_text.to_point(self),
4478            text,
4479            highlight_ranges,
4480            name_ranges,
4481            body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4482            annotation_range: None,
4483        })
4484    }
4485
4486    pub fn function_body_fold_ranges<T: ToOffset>(
4487        &self,
4488        within: Range<T>,
4489    ) -> impl Iterator<Item = Range<usize>> + '_ {
4490        self.text_object_ranges(within, TreeSitterOptions::default())
4491            .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4492    }
4493
4494    /// For each grammar in the language, runs the provided
4495    /// [`tree_sitter::Query`] against the given range.
4496    pub fn matches(
4497        &self,
4498        range: Range<usize>,
4499        query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4500    ) -> SyntaxMapMatches<'_> {
4501        self.syntax.matches(range, self, query)
4502    }
4503
4504    /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4505    /// Hence, may return more bracket pairs than the range contains.
4506    ///
4507    /// Will omit known chunks.
4508    /// The resulting bracket match collections are not ordered.
4509    pub fn fetch_bracket_ranges(
4510        &self,
4511        range: Range<usize>,
4512        known_chunks: Option<&HashSet<Range<BufferRow>>>,
4513    ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4514        let mut all_bracket_matches = HashMap::default();
4515
4516        for chunk in self
4517            .tree_sitter_data
4518            .chunks
4519            .applicable_chunks(&[range.to_point(self)])
4520        {
4521            if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4522                continue;
4523            }
4524            let chunk_range = chunk.anchor_range();
4525            let chunk_range = chunk_range.to_offset(&self);
4526
4527            if let Some(cached_brackets) =
4528                &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4529            {
4530                all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4531                continue;
4532            }
4533
4534            let mut all_brackets: Vec<(BracketMatch<usize>, bool)> = Vec::new();
4535            let mut opens = Vec::new();
4536            let mut color_pairs = Vec::new();
4537
4538            let mut matches = self.syntax.matches_with_options(
4539                chunk_range.clone(),
4540                &self.text,
4541                TreeSitterOptions {
4542                    max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4543                    max_start_depth: None,
4544                },
4545                |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4546            );
4547            let configs = matches
4548                .grammars()
4549                .iter()
4550                .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4551                .collect::<Vec<_>>();
4552
4553            // Group matches by open range so we can either trust grammar output
4554            // or repair it by picking a single closest close per open.
4555            let mut open_to_close_ranges = BTreeMap::new();
4556            while let Some(mat) = matches.peek() {
4557                let mut open = None;
4558                let mut close = None;
4559                let syntax_layer_depth = mat.depth;
4560                let config = configs[mat.grammar_index];
4561                let pattern = &config.patterns[mat.pattern_index];
4562                for capture in mat.captures {
4563                    if capture.index == config.open_capture_ix {
4564                        open = Some(capture.node.byte_range());
4565                    } else if capture.index == config.close_capture_ix {
4566                        close = Some(capture.node.byte_range());
4567                    }
4568                }
4569
4570                matches.advance();
4571
4572                let Some((open_range, close_range)) = open.zip(close) else {
4573                    continue;
4574                };
4575
4576                let bracket_range = open_range.start..=close_range.end;
4577                if !bracket_range.overlaps(&chunk_range) {
4578                    continue;
4579                }
4580
4581                open_to_close_ranges
4582                    .entry((open_range.start, open_range.end))
4583                    .or_insert_with(BTreeMap::new)
4584                    .insert(
4585                        (close_range.start, close_range.end),
4586                        BracketMatch {
4587                            open_range: open_range.clone(),
4588                            close_range: close_range.clone(),
4589                            syntax_layer_depth,
4590                            newline_only: pattern.newline_only,
4591                            color_index: None,
4592                        },
4593                    );
4594
4595                all_brackets.push((
4596                    BracketMatch {
4597                        open_range,
4598                        close_range,
4599                        syntax_layer_depth,
4600                        newline_only: pattern.newline_only,
4601                        color_index: None,
4602                    },
4603                    pattern.rainbow_exclude,
4604                ));
4605            }
4606
4607            let has_bogus_matches = open_to_close_ranges
4608                .iter()
4609                .any(|(_, end_ranges)| end_ranges.len() > 1);
4610            if has_bogus_matches {
4611                // Grammar is producing bogus matches where one open is paired with multiple
4612                // closes. Build a valid stack by walking through positions in order.
4613                // For each close, we know the expected open_len from tree-sitter matches.
4614
4615                // Map each close to its expected open length (for inferring opens)
4616                let close_to_open_len: HashMap<(usize, usize), usize> = all_brackets
4617                    .iter()
4618                    .map(|(m, _)| ((m.close_range.start, m.close_range.end), m.open_range.len()))
4619                    .collect();
4620
4621                // Collect unique opens and closes within this chunk
4622                let mut unique_opens: HashSet<(usize, usize)> = all_brackets
4623                    .iter()
4624                    .map(|(m, _)| (m.open_range.start, m.open_range.end))
4625                    .filter(|(start, _)| chunk_range.contains(start))
4626                    .collect();
4627
4628                let mut unique_closes: Vec<(usize, usize)> = all_brackets
4629                    .iter()
4630                    .map(|(m, _)| (m.close_range.start, m.close_range.end))
4631                    .filter(|(start, _)| chunk_range.contains(start))
4632                    .collect();
4633                unique_closes.sort();
4634                unique_closes.dedup();
4635
4636                // Build valid pairs by walking through closes in order
4637                let mut unique_opens_vec: Vec<_> = unique_opens.iter().copied().collect();
4638                unique_opens_vec.sort();
4639
4640                let mut valid_pairs: HashSet<((usize, usize), (usize, usize))> = HashSet::default();
4641                let mut open_stack: Vec<(usize, usize)> = Vec::new();
4642                let mut open_idx = 0;
4643
4644                for close in &unique_closes {
4645                    // Push all opens before this close onto stack
4646                    while open_idx < unique_opens_vec.len()
4647                        && unique_opens_vec[open_idx].0 < close.0
4648                    {
4649                        open_stack.push(unique_opens_vec[open_idx]);
4650                        open_idx += 1;
4651                    }
4652
4653                    // Try to match with most recent open
4654                    if let Some(open) = open_stack.pop() {
4655                        valid_pairs.insert((open, *close));
4656                    } else if let Some(&open_len) = close_to_open_len.get(close) {
4657                        // No open on stack - infer one based on expected open_len
4658                        if close.0 >= open_len {
4659                            let inferred = (close.0 - open_len, close.0);
4660                            unique_opens.insert(inferred);
4661                            valid_pairs.insert((inferred, *close));
4662                            all_brackets.push((
4663                                BracketMatch {
4664                                    open_range: inferred.0..inferred.1,
4665                                    close_range: close.0..close.1,
4666                                    newline_only: false,
4667                                    syntax_layer_depth: 0,
4668                                    color_index: None,
4669                                },
4670                                false,
4671                            ));
4672                        }
4673                    }
4674                }
4675
4676                all_brackets.retain(|(m, _)| {
4677                    let open = (m.open_range.start, m.open_range.end);
4678                    let close = (m.close_range.start, m.close_range.end);
4679                    valid_pairs.contains(&(open, close))
4680                });
4681            }
4682
4683            let mut all_brackets = all_brackets
4684                .into_iter()
4685                .enumerate()
4686                .map(|(index, (bracket_match, rainbow_exclude))| {
4687                    // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4688                    // bracket will match the entire tag with all text inside.
4689                    // For now, avoid highlighting any pair that has more than single char in each bracket.
4690                    // We need to  colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4691                    let should_color = !rainbow_exclude
4692                        && (bracket_match.open_range.len() == 1
4693                            || bracket_match.close_range.len() == 1);
4694                    if should_color {
4695                        opens.push(bracket_match.open_range.clone());
4696                        color_pairs.push((
4697                            bracket_match.open_range.clone(),
4698                            bracket_match.close_range.clone(),
4699                            index,
4700                        ));
4701                    }
4702                    bracket_match
4703                })
4704                .collect::<Vec<_>>();
4705
4706            opens.sort_by_key(|r| (r.start, r.end));
4707            opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4708            color_pairs.sort_by_key(|(_, close, _)| close.end);
4709
4710            let mut open_stack = Vec::new();
4711            let mut open_index = 0;
4712            for (open, close, index) in color_pairs {
4713                while open_index < opens.len() && opens[open_index].start < close.start {
4714                    open_stack.push(opens[open_index].clone());
4715                    open_index += 1;
4716                }
4717
4718                if open_stack.last() == Some(&open) {
4719                    let depth_index = open_stack.len() - 1;
4720                    all_brackets[index].color_index = Some(depth_index);
4721                    open_stack.pop();
4722                }
4723            }
4724
4725            all_brackets.sort_by_key(|bracket_match| {
4726                (bracket_match.open_range.start, bracket_match.open_range.end)
4727            });
4728
4729            if let empty_slot @ None =
4730                &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4731            {
4732                *empty_slot = Some(all_brackets.clone());
4733            }
4734            all_bracket_matches.insert(chunk.row_range(), all_brackets);
4735        }
4736
4737        all_bracket_matches
4738    }
4739
4740    pub fn all_bracket_ranges(
4741        &self,
4742        range: Range<usize>,
4743    ) -> impl Iterator<Item = BracketMatch<usize>> {
4744        self.fetch_bracket_ranges(range.clone(), None)
4745            .into_values()
4746            .flatten()
4747            .filter(move |bracket_match| {
4748                let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4749                bracket_range.overlaps(&range)
4750            })
4751    }
4752
4753    /// Returns bracket range pairs overlapping or adjacent to `range`
4754    pub fn bracket_ranges<T: ToOffset>(
4755        &self,
4756        range: Range<T>,
4757    ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4758        // Find bracket pairs that *inclusively* contain the given range.
4759        let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4760        self.all_bracket_ranges(range)
4761            .filter(|pair| !pair.newline_only)
4762    }
4763
4764    pub fn debug_variables_query<T: ToOffset>(
4765        &self,
4766        range: Range<T>,
4767    ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4768        let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4769
4770        let mut matches = self.syntax.matches_with_options(
4771            range.clone(),
4772            &self.text,
4773            TreeSitterOptions::default(),
4774            |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4775        );
4776
4777        let configs = matches
4778            .grammars()
4779            .iter()
4780            .map(|grammar| grammar.debug_variables_config.as_ref())
4781            .collect::<Vec<_>>();
4782
4783        let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4784
4785        iter::from_fn(move || {
4786            loop {
4787                while let Some(capture) = captures.pop() {
4788                    if capture.0.overlaps(&range) {
4789                        return Some(capture);
4790                    }
4791                }
4792
4793                let mat = matches.peek()?;
4794
4795                let Some(config) = configs[mat.grammar_index].as_ref() else {
4796                    matches.advance();
4797                    continue;
4798                };
4799
4800                for capture in mat.captures {
4801                    let Some(ix) = config
4802                        .objects_by_capture_ix
4803                        .binary_search_by_key(&capture.index, |e| e.0)
4804                        .ok()
4805                    else {
4806                        continue;
4807                    };
4808                    let text_object = config.objects_by_capture_ix[ix].1;
4809                    let byte_range = capture.node.byte_range();
4810
4811                    let mut found = false;
4812                    for (range, existing) in captures.iter_mut() {
4813                        if existing == &text_object {
4814                            range.start = range.start.min(byte_range.start);
4815                            range.end = range.end.max(byte_range.end);
4816                            found = true;
4817                            break;
4818                        }
4819                    }
4820
4821                    if !found {
4822                        captures.push((byte_range, text_object));
4823                    }
4824                }
4825
4826                matches.advance();
4827            }
4828        })
4829    }
4830
4831    pub fn text_object_ranges<T: ToOffset>(
4832        &self,
4833        range: Range<T>,
4834        options: TreeSitterOptions,
4835    ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4836        let range =
4837            range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4838
4839        let mut matches =
4840            self.syntax
4841                .matches_with_options(range.clone(), &self.text, options, |grammar| {
4842                    grammar.text_object_config.as_ref().map(|c| &c.query)
4843                });
4844
4845        let configs = matches
4846            .grammars()
4847            .iter()
4848            .map(|grammar| grammar.text_object_config.as_ref())
4849            .collect::<Vec<_>>();
4850
4851        let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4852
4853        iter::from_fn(move || {
4854            loop {
4855                while let Some(capture) = captures.pop() {
4856                    if capture.0.overlaps(&range) {
4857                        return Some(capture);
4858                    }
4859                }
4860
4861                let mat = matches.peek()?;
4862
4863                let Some(config) = configs[mat.grammar_index].as_ref() else {
4864                    matches.advance();
4865                    continue;
4866                };
4867
4868                for capture in mat.captures {
4869                    let Some(ix) = config
4870                        .text_objects_by_capture_ix
4871                        .binary_search_by_key(&capture.index, |e| e.0)
4872                        .ok()
4873                    else {
4874                        continue;
4875                    };
4876                    let text_object = config.text_objects_by_capture_ix[ix].1;
4877                    let byte_range = capture.node.byte_range();
4878
4879                    let mut found = false;
4880                    for (range, existing) in captures.iter_mut() {
4881                        if existing == &text_object {
4882                            range.start = range.start.min(byte_range.start);
4883                            range.end = range.end.max(byte_range.end);
4884                            found = true;
4885                            break;
4886                        }
4887                    }
4888
4889                    if !found {
4890                        captures.push((byte_range, text_object));
4891                    }
4892                }
4893
4894                matches.advance();
4895            }
4896        })
4897    }
4898
4899    /// Returns enclosing bracket ranges containing the given range
4900    pub fn enclosing_bracket_ranges<T: ToOffset>(
4901        &self,
4902        range: Range<T>,
4903    ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4904        let range = range.start.to_offset(self)..range.end.to_offset(self);
4905
4906        let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4907        let max_depth = result
4908            .iter()
4909            .map(|mat| mat.syntax_layer_depth)
4910            .max()
4911            .unwrap_or(0);
4912        result.into_iter().filter(move |pair| {
4913            pair.open_range.start <= range.start
4914                && pair.close_range.end >= range.end
4915                && pair.syntax_layer_depth == max_depth
4916        })
4917    }
4918
4919    /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4920    ///
4921    /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4922    pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4923        &self,
4924        range: Range<T>,
4925        range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4926    ) -> Option<(Range<usize>, Range<usize>)> {
4927        let range = range.start.to_offset(self)..range.end.to_offset(self);
4928
4929        // Get the ranges of the innermost pair of brackets.
4930        let mut result: Option<(Range<usize>, Range<usize>)> = None;
4931
4932        for pair in self.enclosing_bracket_ranges(range) {
4933            if let Some(range_filter) = range_filter
4934                && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4935            {
4936                continue;
4937            }
4938
4939            let len = pair.close_range.end - pair.open_range.start;
4940
4941            if let Some((existing_open, existing_close)) = &result {
4942                let existing_len = existing_close.end - existing_open.start;
4943                if len > existing_len {
4944                    continue;
4945                }
4946            }
4947
4948            result = Some((pair.open_range, pair.close_range));
4949        }
4950
4951        result
4952    }
4953
4954    /// Returns anchor ranges for any matches of the redaction query.
4955    /// The buffer can be associated with multiple languages, and the redaction query associated with each
4956    /// will be run on the relevant section of the buffer.
4957    pub fn redacted_ranges<T: ToOffset>(
4958        &self,
4959        range: Range<T>,
4960    ) -> impl Iterator<Item = Range<usize>> + '_ {
4961        let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4962        let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4963            grammar
4964                .redactions_config
4965                .as_ref()
4966                .map(|config| &config.query)
4967        });
4968
4969        let configs = syntax_matches
4970            .grammars()
4971            .iter()
4972            .map(|grammar| grammar.redactions_config.as_ref())
4973            .collect::<Vec<_>>();
4974
4975        iter::from_fn(move || {
4976            let redacted_range = syntax_matches
4977                .peek()
4978                .and_then(|mat| {
4979                    configs[mat.grammar_index].and_then(|config| {
4980                        mat.captures
4981                            .iter()
4982                            .find(|capture| capture.index == config.redaction_capture_ix)
4983                    })
4984                })
4985                .map(|mat| mat.node.byte_range());
4986            syntax_matches.advance();
4987            redacted_range
4988        })
4989    }
4990
4991    pub fn injections_intersecting_range<T: ToOffset>(
4992        &self,
4993        range: Range<T>,
4994    ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4995        let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4996
4997        let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4998            grammar
4999                .injection_config
5000                .as_ref()
5001                .map(|config| &config.query)
5002        });
5003
5004        let configs = syntax_matches
5005            .grammars()
5006            .iter()
5007            .map(|grammar| grammar.injection_config.as_ref())
5008            .collect::<Vec<_>>();
5009
5010        iter::from_fn(move || {
5011            let ranges = syntax_matches.peek().and_then(|mat| {
5012                let config = &configs[mat.grammar_index]?;
5013                let content_capture_range = mat.captures.iter().find_map(|capture| {
5014                    if capture.index == config.content_capture_ix {
5015                        Some(capture.node.byte_range())
5016                    } else {
5017                        None
5018                    }
5019                })?;
5020                let language = self.language_at(content_capture_range.start)?;
5021                Some((content_capture_range, language))
5022            });
5023            syntax_matches.advance();
5024            ranges
5025        })
5026    }
5027
5028    pub fn runnable_ranges(
5029        &self,
5030        offset_range: Range<usize>,
5031    ) -> impl Iterator<Item = RunnableRange> + '_ {
5032        let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
5033            grammar.runnable_config.as_ref().map(|config| &config.query)
5034        });
5035
5036        let test_configs = syntax_matches
5037            .grammars()
5038            .iter()
5039            .map(|grammar| grammar.runnable_config.as_ref())
5040            .collect::<Vec<_>>();
5041
5042        iter::from_fn(move || {
5043            loop {
5044                let mat = syntax_matches.peek()?;
5045
5046                let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
5047                    let mut run_range = None;
5048                    let full_range = mat.captures.iter().fold(
5049                        Range {
5050                            start: usize::MAX,
5051                            end: 0,
5052                        },
5053                        |mut acc, next| {
5054                            let byte_range = next.node.byte_range();
5055                            if acc.start > byte_range.start {
5056                                acc.start = byte_range.start;
5057                            }
5058                            if acc.end < byte_range.end {
5059                                acc.end = byte_range.end;
5060                            }
5061                            acc
5062                        },
5063                    );
5064                    if full_range.start > full_range.end {
5065                        // We did not find a full spanning range of this match.
5066                        return None;
5067                    }
5068                    let extra_captures: SmallVec<[_; 1]> =
5069                        SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
5070                            test_configs
5071                                .extra_captures
5072                                .get(capture.index as usize)
5073                                .cloned()
5074                                .and_then(|tag_name| match tag_name {
5075                                    RunnableCapture::Named(name) => {
5076                                        Some((capture.node.byte_range(), name))
5077                                    }
5078                                    RunnableCapture::Run => {
5079                                        let _ = run_range.insert(capture.node.byte_range());
5080                                        None
5081                                    }
5082                                })
5083                        }));
5084                    let run_range = run_range?;
5085                    let tags = test_configs
5086                        .query
5087                        .property_settings(mat.pattern_index)
5088                        .iter()
5089                        .filter_map(|property| {
5090                            if *property.key == *"tag" {
5091                                property
5092                                    .value
5093                                    .as_ref()
5094                                    .map(|value| RunnableTag(value.to_string().into()))
5095                            } else {
5096                                None
5097                            }
5098                        })
5099                        .collect();
5100                    let extra_captures = extra_captures
5101                        .into_iter()
5102                        .map(|(range, name)| {
5103                            (
5104                                name.to_string(),
5105                                self.text_for_range(range).collect::<String>(),
5106                            )
5107                        })
5108                        .collect();
5109                    // All tags should have the same range.
5110                    Some(RunnableRange {
5111                        run_range,
5112                        full_range,
5113                        runnable: Runnable {
5114                            tags,
5115                            language: mat.language,
5116                            buffer: self.remote_id(),
5117                        },
5118                        extra_captures,
5119                        buffer_id: self.remote_id(),
5120                    })
5121                });
5122
5123                syntax_matches.advance();
5124                if test_range.is_some() {
5125                    // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
5126                    // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
5127                    return test_range;
5128                }
5129            }
5130        })
5131    }
5132
5133    /// Returns selections for remote peers intersecting the given range.
5134    #[allow(clippy::type_complexity)]
5135    pub fn selections_in_range(
5136        &self,
5137        range: Range<Anchor>,
5138        include_local: bool,
5139    ) -> impl Iterator<
5140        Item = (
5141            ReplicaId,
5142            bool,
5143            CursorShape,
5144            impl Iterator<Item = &Selection<Anchor>> + '_,
5145        ),
5146    > + '_ {
5147        self.remote_selections
5148            .iter()
5149            .filter(move |(replica_id, set)| {
5150                (include_local || **replica_id != self.text.replica_id())
5151                    && !set.selections.is_empty()
5152            })
5153            .map(move |(replica_id, set)| {
5154                let start_ix = match set.selections.binary_search_by(|probe| {
5155                    probe.end.cmp(&range.start, self).then(Ordering::Greater)
5156                }) {
5157                    Ok(ix) | Err(ix) => ix,
5158                };
5159                let end_ix = match set.selections.binary_search_by(|probe| {
5160                    probe.start.cmp(&range.end, self).then(Ordering::Less)
5161                }) {
5162                    Ok(ix) | Err(ix) => ix,
5163                };
5164
5165                (
5166                    *replica_id,
5167                    set.line_mode,
5168                    set.cursor_shape,
5169                    set.selections[start_ix..end_ix].iter(),
5170                )
5171            })
5172    }
5173
5174    /// Returns if the buffer contains any diagnostics.
5175    pub fn has_diagnostics(&self) -> bool {
5176        !self.diagnostics.is_empty()
5177    }
5178
5179    /// Returns all the diagnostics intersecting the given range.
5180    pub fn diagnostics_in_range<'a, T, O>(
5181        &'a self,
5182        search_range: Range<T>,
5183        reversed: bool,
5184    ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
5185    where
5186        T: 'a + Clone + ToOffset,
5187        O: 'a + FromAnchor,
5188    {
5189        let mut iterators: Vec<_> = self
5190            .diagnostics
5191            .iter()
5192            .map(|(_, collection)| {
5193                collection
5194                    .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
5195                    .peekable()
5196            })
5197            .collect();
5198
5199        std::iter::from_fn(move || {
5200            let (next_ix, _) = iterators
5201                .iter_mut()
5202                .enumerate()
5203                .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
5204                .min_by(|(_, a), (_, b)| {
5205                    let cmp = a
5206                        .range
5207                        .start
5208                        .cmp(&b.range.start, self)
5209                        // when range is equal, sort by diagnostic severity
5210                        .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
5211                        // and stabilize order with group_id
5212                        .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
5213                    if reversed { cmp.reverse() } else { cmp }
5214                })?;
5215            iterators[next_ix]
5216                .next()
5217                .map(
5218                    |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
5219                        diagnostic,
5220                        range: FromAnchor::from_anchor(&range.start, self)
5221                            ..FromAnchor::from_anchor(&range.end, self),
5222                    },
5223                )
5224        })
5225    }
5226
5227    /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
5228    /// should be used instead.
5229    pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
5230        &self.diagnostics
5231    }
5232
5233    /// Returns all the diagnostic groups associated with the given
5234    /// language server ID. If no language server ID is provided,
5235    /// all diagnostics groups are returned.
5236    pub fn diagnostic_groups(
5237        &self,
5238        language_server_id: Option<LanguageServerId>,
5239    ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
5240        let mut groups = Vec::new();
5241
5242        if let Some(language_server_id) = language_server_id {
5243            if let Ok(ix) = self
5244                .diagnostics
5245                .binary_search_by_key(&language_server_id, |e| e.0)
5246            {
5247                self.diagnostics[ix]
5248                    .1
5249                    .groups(language_server_id, &mut groups, self);
5250            }
5251        } else {
5252            for (language_server_id, diagnostics) in self.diagnostics.iter() {
5253                diagnostics.groups(*language_server_id, &mut groups, self);
5254            }
5255        }
5256
5257        groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
5258            let a_start = &group_a.entries[group_a.primary_ix].range.start;
5259            let b_start = &group_b.entries[group_b.primary_ix].range.start;
5260            a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
5261        });
5262
5263        groups
5264    }
5265
5266    /// Returns an iterator over the diagnostics for the given group.
5267    pub fn diagnostic_group<O>(
5268        &self,
5269        group_id: usize,
5270    ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5271    where
5272        O: FromAnchor + 'static,
5273    {
5274        self.diagnostics
5275            .iter()
5276            .flat_map(move |(_, set)| set.group(group_id, self))
5277    }
5278
5279    /// An integer version number that accounts for all updates besides
5280    /// the buffer's text itself (which is versioned via a version vector).
5281    pub fn non_text_state_update_count(&self) -> usize {
5282        self.non_text_state_update_count
5283    }
5284
5285    /// An integer version that changes when the buffer's syntax changes.
5286    pub fn syntax_update_count(&self) -> usize {
5287        self.syntax.update_count()
5288    }
5289
5290    /// Returns a snapshot of underlying file.
5291    pub fn file(&self) -> Option<&Arc<dyn File>> {
5292        self.file.as_ref()
5293    }
5294
5295    pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5296        if let Some(file) = self.file() {
5297            if file.path().file_name().is_none() || include_root {
5298                Some(file.full_path(cx).to_string_lossy().into_owned())
5299            } else {
5300                Some(file.path().display(file.path_style(cx)).to_string())
5301            }
5302        } else {
5303            None
5304        }
5305    }
5306
5307    pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5308        let query_str = query.fuzzy_contents;
5309        if query_str.is_some_and(|query| query.is_empty()) {
5310            return BTreeMap::default();
5311        }
5312
5313        let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5314            language,
5315            override_id: None,
5316        }));
5317
5318        let mut query_ix = 0;
5319        let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5320        let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5321
5322        let mut words = BTreeMap::default();
5323        let mut current_word_start_ix = None;
5324        let mut chunk_ix = query.range.start;
5325        for chunk in self.chunks(query.range, false) {
5326            for (i, c) in chunk.text.char_indices() {
5327                let ix = chunk_ix + i;
5328                if classifier.is_word(c) {
5329                    if current_word_start_ix.is_none() {
5330                        current_word_start_ix = Some(ix);
5331                    }
5332
5333                    if let Some(query_chars) = &query_chars
5334                        && query_ix < query_len
5335                        && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5336                    {
5337                        query_ix += 1;
5338                    }
5339                    continue;
5340                } else if let Some(word_start) = current_word_start_ix.take()
5341                    && query_ix == query_len
5342                {
5343                    let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5344                    let mut word_text = self.text_for_range(word_start..ix).peekable();
5345                    let first_char = word_text
5346                        .peek()
5347                        .and_then(|first_chunk| first_chunk.chars().next());
5348                    // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5349                    if !query.skip_digits
5350                        || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5351                    {
5352                        words.insert(word_text.collect(), word_range);
5353                    }
5354                }
5355                query_ix = 0;
5356            }
5357            chunk_ix += chunk.text.len();
5358        }
5359
5360        words
5361    }
5362}
5363
5364pub struct WordsQuery<'a> {
5365    /// Only returns words with all chars from the fuzzy string in them.
5366    pub fuzzy_contents: Option<&'a str>,
5367    /// Skips words that start with a digit.
5368    pub skip_digits: bool,
5369    /// Buffer offset range, to look for words.
5370    pub range: Range<usize>,
5371}
5372
5373fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5374    indent_size_for_text(text.chars_at(Point::new(row, 0)))
5375}
5376
5377fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5378    let mut result = IndentSize::spaces(0);
5379    for c in text {
5380        let kind = match c {
5381            ' ' => IndentKind::Space,
5382            '\t' => IndentKind::Tab,
5383            _ => break,
5384        };
5385        if result.len == 0 {
5386            result.kind = kind;
5387        }
5388        result.len += 1;
5389    }
5390    result
5391}
5392
5393impl Clone for BufferSnapshot {
5394    fn clone(&self) -> Self {
5395        Self {
5396            text: self.text.clone(),
5397            syntax: self.syntax.clone(),
5398            file: self.file.clone(),
5399            remote_selections: self.remote_selections.clone(),
5400            diagnostics: self.diagnostics.clone(),
5401            language: self.language.clone(),
5402            tree_sitter_data: self.tree_sitter_data.clone(),
5403            non_text_state_update_count: self.non_text_state_update_count,
5404            capability: self.capability,
5405        }
5406    }
5407}
5408
5409impl Deref for BufferSnapshot {
5410    type Target = text::BufferSnapshot;
5411
5412    fn deref(&self) -> &Self::Target {
5413        &self.text
5414    }
5415}
5416
5417unsafe impl Send for BufferChunks<'_> {}
5418
5419impl<'a> BufferChunks<'a> {
5420    pub(crate) fn new(
5421        text: &'a Rope,
5422        range: Range<usize>,
5423        syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5424        diagnostics: bool,
5425        buffer_snapshot: Option<&'a BufferSnapshot>,
5426    ) -> Self {
5427        let mut highlights = None;
5428        if let Some((captures, highlight_maps)) = syntax {
5429            highlights = Some(BufferChunkHighlights {
5430                captures,
5431                next_capture: None,
5432                stack: Default::default(),
5433                highlight_maps,
5434            })
5435        }
5436
5437        let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5438        let chunks = text.chunks_in_range(range.clone());
5439
5440        let mut this = BufferChunks {
5441            range,
5442            buffer_snapshot,
5443            chunks,
5444            diagnostic_endpoints,
5445            error_depth: 0,
5446            warning_depth: 0,
5447            information_depth: 0,
5448            hint_depth: 0,
5449            unnecessary_depth: 0,
5450            underline: true,
5451            highlights,
5452        };
5453        this.initialize_diagnostic_endpoints();
5454        this
5455    }
5456
5457    /// Seeks to the given byte offset in the buffer.
5458    pub fn seek(&mut self, range: Range<usize>) {
5459        let old_range = std::mem::replace(&mut self.range, range.clone());
5460        self.chunks.set_range(self.range.clone());
5461        if let Some(highlights) = self.highlights.as_mut() {
5462            if old_range.start <= self.range.start && old_range.end >= self.range.end {
5463                // Reuse existing highlights stack, as the new range is a subrange of the old one.
5464                highlights
5465                    .stack
5466                    .retain(|(end_offset, _)| *end_offset > range.start);
5467                if let Some(capture) = &highlights.next_capture
5468                    && range.start >= capture.node.start_byte()
5469                {
5470                    let next_capture_end = capture.node.end_byte();
5471                    if range.start < next_capture_end {
5472                        highlights.stack.push((
5473                            next_capture_end,
5474                            highlights.highlight_maps[capture.grammar_index].get(capture.index),
5475                        ));
5476                    }
5477                    highlights.next_capture.take();
5478                }
5479            } else if let Some(snapshot) = self.buffer_snapshot {
5480                let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5481                *highlights = BufferChunkHighlights {
5482                    captures,
5483                    next_capture: None,
5484                    stack: Default::default(),
5485                    highlight_maps,
5486                };
5487            } else {
5488                // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5489                // Seeking such BufferChunks is not supported.
5490                debug_assert!(
5491                    false,
5492                    "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5493                );
5494            }
5495
5496            highlights.captures.set_byte_range(self.range.clone());
5497            self.initialize_diagnostic_endpoints();
5498        }
5499    }
5500
5501    fn initialize_diagnostic_endpoints(&mut self) {
5502        if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5503            && let Some(buffer) = self.buffer_snapshot
5504        {
5505            let mut diagnostic_endpoints = Vec::new();
5506            for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5507                diagnostic_endpoints.push(DiagnosticEndpoint {
5508                    offset: entry.range.start,
5509                    is_start: true,
5510                    severity: entry.diagnostic.severity,
5511                    is_unnecessary: entry.diagnostic.is_unnecessary,
5512                    underline: entry.diagnostic.underline,
5513                });
5514                diagnostic_endpoints.push(DiagnosticEndpoint {
5515                    offset: entry.range.end,
5516                    is_start: false,
5517                    severity: entry.diagnostic.severity,
5518                    is_unnecessary: entry.diagnostic.is_unnecessary,
5519                    underline: entry.diagnostic.underline,
5520                });
5521            }
5522            diagnostic_endpoints
5523                .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5524            *diagnostics = diagnostic_endpoints.into_iter().peekable();
5525            self.hint_depth = 0;
5526            self.error_depth = 0;
5527            self.warning_depth = 0;
5528            self.information_depth = 0;
5529        }
5530    }
5531
5532    /// The current byte offset in the buffer.
5533    pub fn offset(&self) -> usize {
5534        self.range.start
5535    }
5536
5537    pub fn range(&self) -> Range<usize> {
5538        self.range.clone()
5539    }
5540
5541    fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5542        let depth = match endpoint.severity {
5543            DiagnosticSeverity::ERROR => &mut self.error_depth,
5544            DiagnosticSeverity::WARNING => &mut self.warning_depth,
5545            DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5546            DiagnosticSeverity::HINT => &mut self.hint_depth,
5547            _ => return,
5548        };
5549        if endpoint.is_start {
5550            *depth += 1;
5551        } else {
5552            *depth -= 1;
5553        }
5554
5555        if endpoint.is_unnecessary {
5556            if endpoint.is_start {
5557                self.unnecessary_depth += 1;
5558            } else {
5559                self.unnecessary_depth -= 1;
5560            }
5561        }
5562    }
5563
5564    fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5565        if self.error_depth > 0 {
5566            Some(DiagnosticSeverity::ERROR)
5567        } else if self.warning_depth > 0 {
5568            Some(DiagnosticSeverity::WARNING)
5569        } else if self.information_depth > 0 {
5570            Some(DiagnosticSeverity::INFORMATION)
5571        } else if self.hint_depth > 0 {
5572            Some(DiagnosticSeverity::HINT)
5573        } else {
5574            None
5575        }
5576    }
5577
5578    fn current_code_is_unnecessary(&self) -> bool {
5579        self.unnecessary_depth > 0
5580    }
5581}
5582
5583impl<'a> Iterator for BufferChunks<'a> {
5584    type Item = Chunk<'a>;
5585
5586    fn next(&mut self) -> Option<Self::Item> {
5587        let mut next_capture_start = usize::MAX;
5588        let mut next_diagnostic_endpoint = usize::MAX;
5589
5590        if let Some(highlights) = self.highlights.as_mut() {
5591            while let Some((parent_capture_end, _)) = highlights.stack.last() {
5592                if *parent_capture_end <= self.range.start {
5593                    highlights.stack.pop();
5594                } else {
5595                    break;
5596                }
5597            }
5598
5599            if highlights.next_capture.is_none() {
5600                highlights.next_capture = highlights.captures.next();
5601            }
5602
5603            while let Some(capture) = highlights.next_capture.as_ref() {
5604                if self.range.start < capture.node.start_byte() {
5605                    next_capture_start = capture.node.start_byte();
5606                    break;
5607                } else {
5608                    let highlight_id =
5609                        highlights.highlight_maps[capture.grammar_index].get(capture.index);
5610                    highlights
5611                        .stack
5612                        .push((capture.node.end_byte(), highlight_id));
5613                    highlights.next_capture = highlights.captures.next();
5614                }
5615            }
5616        }
5617
5618        let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5619        if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5620            while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5621                if endpoint.offset <= self.range.start {
5622                    self.update_diagnostic_depths(endpoint);
5623                    diagnostic_endpoints.next();
5624                    self.underline = endpoint.underline;
5625                } else {
5626                    next_diagnostic_endpoint = endpoint.offset;
5627                    break;
5628                }
5629            }
5630        }
5631        self.diagnostic_endpoints = diagnostic_endpoints;
5632
5633        if let Some(ChunkBitmaps {
5634            text: chunk,
5635            chars: chars_map,
5636            tabs,
5637        }) = self.chunks.peek_with_bitmaps()
5638        {
5639            let chunk_start = self.range.start;
5640            let mut chunk_end = (self.chunks.offset() + chunk.len())
5641                .min(next_capture_start)
5642                .min(next_diagnostic_endpoint);
5643            let mut highlight_id = None;
5644            if let Some(highlights) = self.highlights.as_ref()
5645                && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5646            {
5647                chunk_end = chunk_end.min(*parent_capture_end);
5648                highlight_id = Some(*parent_highlight_id);
5649            }
5650            let bit_start = chunk_start - self.chunks.offset();
5651            let bit_end = chunk_end - self.chunks.offset();
5652
5653            let slice = &chunk[bit_start..bit_end];
5654
5655            let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5656            let tabs = (tabs >> bit_start) & mask;
5657            let chars = (chars_map >> bit_start) & mask;
5658
5659            self.range.start = chunk_end;
5660            if self.range.start == self.chunks.offset() + chunk.len() {
5661                self.chunks.next().unwrap();
5662            }
5663
5664            Some(Chunk {
5665                text: slice,
5666                syntax_highlight_id: highlight_id,
5667                underline: self.underline,
5668                diagnostic_severity: self.current_diagnostic_severity(),
5669                is_unnecessary: self.current_code_is_unnecessary(),
5670                tabs,
5671                chars,
5672                ..Chunk::default()
5673            })
5674        } else {
5675            None
5676        }
5677    }
5678}
5679
5680impl operation_queue::Operation for Operation {
5681    fn lamport_timestamp(&self) -> clock::Lamport {
5682        match self {
5683            Operation::Buffer(_) => {
5684                unreachable!("buffer operations should never be deferred at this layer")
5685            }
5686            Operation::UpdateDiagnostics {
5687                lamport_timestamp, ..
5688            }
5689            | Operation::UpdateSelections {
5690                lamport_timestamp, ..
5691            }
5692            | Operation::UpdateCompletionTriggers {
5693                lamport_timestamp, ..
5694            }
5695            | Operation::UpdateLineEnding {
5696                lamport_timestamp, ..
5697            } => *lamport_timestamp,
5698        }
5699    }
5700}
5701
5702impl Default for Diagnostic {
5703    fn default() -> Self {
5704        Self {
5705            source: Default::default(),
5706            source_kind: DiagnosticSourceKind::Other,
5707            code: None,
5708            code_description: None,
5709            severity: DiagnosticSeverity::ERROR,
5710            message: Default::default(),
5711            markdown: None,
5712            group_id: 0,
5713            is_primary: false,
5714            is_disk_based: false,
5715            is_unnecessary: false,
5716            underline: true,
5717            data: None,
5718            registration_id: None,
5719        }
5720    }
5721}
5722
5723impl IndentSize {
5724    /// Returns an [`IndentSize`] representing the given spaces.
5725    pub fn spaces(len: u32) -> Self {
5726        Self {
5727            len,
5728            kind: IndentKind::Space,
5729        }
5730    }
5731
5732    /// Returns an [`IndentSize`] representing a tab.
5733    pub fn tab() -> Self {
5734        Self {
5735            len: 1,
5736            kind: IndentKind::Tab,
5737        }
5738    }
5739
5740    /// An iterator over the characters represented by this [`IndentSize`].
5741    pub fn chars(&self) -> impl Iterator<Item = char> {
5742        iter::repeat(self.char()).take(self.len as usize)
5743    }
5744
5745    /// The character representation of this [`IndentSize`].
5746    pub fn char(&self) -> char {
5747        match self.kind {
5748            IndentKind::Space => ' ',
5749            IndentKind::Tab => '\t',
5750        }
5751    }
5752
5753    /// Consumes the current [`IndentSize`] and returns a new one that has
5754    /// been shrunk or enlarged by the given size along the given direction.
5755    pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5756        match direction {
5757            Ordering::Less => {
5758                if self.kind == size.kind && self.len >= size.len {
5759                    self.len -= size.len;
5760                }
5761            }
5762            Ordering::Equal => {}
5763            Ordering::Greater => {
5764                if self.len == 0 {
5765                    self = size;
5766                } else if self.kind == size.kind {
5767                    self.len += size.len;
5768                }
5769            }
5770        }
5771        self
5772    }
5773
5774    pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5775        match self.kind {
5776            IndentKind::Space => self.len as usize,
5777            IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5778        }
5779    }
5780}
5781
5782#[cfg(any(test, feature = "test-support"))]
5783pub struct TestFile {
5784    pub path: Arc<RelPath>,
5785    pub root_name: String,
5786    pub local_root: Option<PathBuf>,
5787}
5788
5789#[cfg(any(test, feature = "test-support"))]
5790impl File for TestFile {
5791    fn path(&self) -> &Arc<RelPath> {
5792        &self.path
5793    }
5794
5795    fn full_path(&self, _: &gpui::App) -> PathBuf {
5796        PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5797    }
5798
5799    fn as_local(&self) -> Option<&dyn LocalFile> {
5800        if self.local_root.is_some() {
5801            Some(self)
5802        } else {
5803            None
5804        }
5805    }
5806
5807    fn disk_state(&self) -> DiskState {
5808        unimplemented!()
5809    }
5810
5811    fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5812        self.path().file_name().unwrap_or(self.root_name.as_ref())
5813    }
5814
5815    fn worktree_id(&self, _: &App) -> WorktreeId {
5816        WorktreeId::from_usize(0)
5817    }
5818
5819    fn to_proto(&self, _: &App) -> rpc::proto::File {
5820        unimplemented!()
5821    }
5822
5823    fn is_private(&self) -> bool {
5824        false
5825    }
5826
5827    fn path_style(&self, _cx: &App) -> PathStyle {
5828        PathStyle::local()
5829    }
5830}
5831
5832#[cfg(any(test, feature = "test-support"))]
5833impl LocalFile for TestFile {
5834    fn abs_path(&self, _cx: &App) -> PathBuf {
5835        PathBuf::from(self.local_root.as_ref().unwrap())
5836            .join(&self.root_name)
5837            .join(self.path.as_std_path())
5838    }
5839
5840    fn load(&self, _cx: &App) -> Task<Result<String>> {
5841        unimplemented!()
5842    }
5843
5844    fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5845        unimplemented!()
5846    }
5847}
5848
5849pub(crate) fn contiguous_ranges(
5850    values: impl Iterator<Item = u32>,
5851    max_len: usize,
5852) -> impl Iterator<Item = Range<u32>> {
5853    let mut values = values;
5854    let mut current_range: Option<Range<u32>> = None;
5855    std::iter::from_fn(move || {
5856        loop {
5857            if let Some(value) = values.next() {
5858                if let Some(range) = &mut current_range
5859                    && value == range.end
5860                    && range.len() < max_len
5861                {
5862                    range.end += 1;
5863                    continue;
5864                }
5865
5866                let prev_range = current_range.clone();
5867                current_range = Some(value..(value + 1));
5868                if prev_range.is_some() {
5869                    return prev_range;
5870                }
5871            } else {
5872                return current_range.take();
5873            }
5874        }
5875    })
5876}
5877
5878#[derive(Default, Debug)]
5879pub struct CharClassifier {
5880    scope: Option<LanguageScope>,
5881    scope_context: Option<CharScopeContext>,
5882    ignore_punctuation: bool,
5883}
5884
5885impl CharClassifier {
5886    pub fn new(scope: Option<LanguageScope>) -> Self {
5887        Self {
5888            scope,
5889            scope_context: None,
5890            ignore_punctuation: false,
5891        }
5892    }
5893
5894    pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5895        Self {
5896            scope_context,
5897            ..self
5898        }
5899    }
5900
5901    pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5902        Self {
5903            ignore_punctuation,
5904            ..self
5905        }
5906    }
5907
5908    pub fn is_whitespace(&self, c: char) -> bool {
5909        self.kind(c) == CharKind::Whitespace
5910    }
5911
5912    pub fn is_word(&self, c: char) -> bool {
5913        self.kind(c) == CharKind::Word
5914    }
5915
5916    pub fn is_punctuation(&self, c: char) -> bool {
5917        self.kind(c) == CharKind::Punctuation
5918    }
5919
5920    pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5921        if c.is_alphanumeric() || c == '_' {
5922            return CharKind::Word;
5923        }
5924
5925        if let Some(scope) = &self.scope {
5926            let characters = match self.scope_context {
5927                Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5928                Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5929                None => scope.word_characters(),
5930            };
5931            if let Some(characters) = characters
5932                && characters.contains(&c)
5933            {
5934                return CharKind::Word;
5935            }
5936        }
5937
5938        if c.is_whitespace() {
5939            return CharKind::Whitespace;
5940        }
5941
5942        if ignore_punctuation {
5943            CharKind::Word
5944        } else {
5945            CharKind::Punctuation
5946        }
5947    }
5948
5949    pub fn kind(&self, c: char) -> CharKind {
5950        self.kind_with(c, self.ignore_punctuation)
5951    }
5952}
5953
5954/// Find all of the ranges of whitespace that occur at the ends of lines
5955/// in the given rope.
5956///
5957/// This could also be done with a regex search, but this implementation
5958/// avoids copying text.
5959pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5960    let mut ranges = Vec::new();
5961
5962    let mut offset = 0;
5963    let mut prev_chunk_trailing_whitespace_range = 0..0;
5964    for chunk in rope.chunks() {
5965        let mut prev_line_trailing_whitespace_range = 0..0;
5966        for (i, line) in chunk.split('\n').enumerate() {
5967            let line_end_offset = offset + line.len();
5968            let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5969            let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5970
5971            if i == 0 && trimmed_line_len == 0 {
5972                trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5973            }
5974            if !prev_line_trailing_whitespace_range.is_empty() {
5975                ranges.push(prev_line_trailing_whitespace_range);
5976            }
5977
5978            offset = line_end_offset + 1;
5979            prev_line_trailing_whitespace_range = trailing_whitespace_range;
5980        }
5981
5982        offset -= 1;
5983        prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5984    }
5985
5986    if !prev_chunk_trailing_whitespace_range.is_empty() {
5987        ranges.push(prev_chunk_trailing_whitespace_range);
5988    }
5989
5990    ranges
5991}