Detailed changes
@@ -4160,6 +4160,60 @@ dependencies = [
"util",
]
+[[package]]
+name = "language2"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "async-broadcast",
+ "async-trait",
+ "client2",
+ "clock",
+ "collections",
+ "ctor",
+ "env_logger 0.9.3",
+ "fs",
+ "futures 0.3.28",
+ "fuzzy",
+ "git",
+ "globset",
+ "gpui2",
+ "indoc",
+ "lazy_static",
+ "log",
+ "lsp",
+ "parking_lot 0.11.2",
+ "postage",
+ "rand 0.8.5",
+ "regex",
+ "rpc",
+ "schemars",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "settings",
+ "settings2",
+ "similar",
+ "smallvec",
+ "smol",
+ "sum_tree",
+ "text",
+ "tree-sitter",
+ "tree-sitter-elixir",
+ "tree-sitter-embedded-template",
+ "tree-sitter-heex",
+ "tree-sitter-html",
+ "tree-sitter-json 0.20.0",
+ "tree-sitter-markdown",
+ "tree-sitter-python",
+ "tree-sitter-ruby",
+ "tree-sitter-rust",
+ "tree-sitter-typescript",
+ "unicase",
+ "unindent",
+ "util",
+]
+
[[package]]
name = "language_selector"
version = "0.1.0"
@@ -43,6 +43,7 @@ members = [
"crates/install_cli",
"crates/journal",
"crates/language",
+ "crates/language2",
"crates/language_selector",
"crates/language_tools",
"crates/live_kit_client",
@@ -22,6 +22,8 @@ fn generate_dispatch_bindings() {
.allowlist_var("DISPATCH_QUEUE_PRIORITY_DEFAULT")
.allowlist_function("dispatch_get_global_queue")
.allowlist_function("dispatch_async_f")
+ .allowlist_function("dispatch_after_f")
+ .allowlist_function("dispatch_time")
.parse_callbacks(Box::new(bindgen::CargoCallbacks))
.layout_tests(false)
.generate()
@@ -2,9 +2,13 @@ use crate::{
AppContext, AsyncAppContext, Context, Effect, EntityId, EventEmitter, Handle, Reference,
Subscription, Task, WeakHandle,
};
+use derive_more::{Deref, DerefMut};
use std::{future::Future, marker::PhantomData};
+#[derive(Deref, DerefMut)]
pub struct ModelContext<'a, T> {
+ #[deref]
+ #[deref_mut]
app: Reference<'a, AppContext>,
entity_type: PhantomData<T>,
entity_id: EntityId,
@@ -152,6 +152,32 @@ impl Executor {
}
}
+ pub fn block_with_timeout<F, R>(
+ &self,
+ duration: Duration,
+ future: F,
+ ) -> Result<R, impl Future<Output = R>>
+ where
+ F: Future<Output = R> + Send + Sync + 'static,
+ {
+ let mut future = Box::pin(future);
+ let timeout = {
+ let future = &mut future;
+ async {
+ let timer = async {
+ self.timer(duration).await;
+ Err(())
+ };
+ let future = async move { Ok(future.await) };
+ timer.race(future).await
+ }
+ };
+ match self.block(timeout) {
+ Ok(value) => Ok(value),
+ Err(_) => Err(future),
+ }
+ }
+
pub async fn scoped<'scope, F>(&self, scheduler: F)
where
F: FnOnce(&mut Scope<'scope>),
@@ -167,9 +193,13 @@ impl Executor {
}
}
- pub fn timer(&self, duration: Duration) -> smol::Timer {
- // todo!("integrate with deterministic dispatcher")
- smol::Timer::after(duration)
+ pub fn timer(&self, duration: Duration) -> Task<()> {
+ let (runnable, task) = async_task::spawn(async move {}, {
+ let dispatcher = self.dispatcher.clone();
+ move |runnable| dispatcher.dispatch_after(duration, runnable)
+ });
+ runnable.schedule();
+ Task::Spawned(task)
}
pub fn is_main_thread(&self) -> bool {
@@ -16,6 +16,7 @@ use seahash::SeaHasher;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
use std::hash::{Hash, Hasher};
+use std::time::Duration;
use std::{
any::Any,
fmt::{self, Debug, Display},
@@ -157,8 +158,9 @@ pub(crate) trait PlatformWindow {
pub trait PlatformDispatcher: Send + Sync {
fn is_main_thread(&self) -> bool;
- fn dispatch(&self, task: Runnable);
- fn dispatch_on_main_thread(&self, task: Runnable);
+ fn dispatch(&self, runnable: Runnable);
+ fn dispatch_on_main_thread(&self, runnable: Runnable);
+ fn dispatch_after(&self, duration: Duration, runnable: Runnable);
}
pub trait PlatformTextSystem: Send + Sync {
@@ -9,7 +9,10 @@ use objc::{
runtime::{BOOL, YES},
sel, sel_impl,
};
-use std::ffi::c_void;
+use std::{
+ ffi::c_void,
+ time::{Duration, SystemTime},
+};
include!(concat!(env!("OUT_DIR"), "/dispatch_sys.rs"));
@@ -44,6 +47,26 @@ impl PlatformDispatcher for MacDispatcher {
);
}
}
+
+ fn dispatch_after(&self, duration: Duration, runnable: Runnable) {
+ let now = SystemTime::now();
+ let after_duration = now
+ .duration_since(SystemTime::UNIX_EPOCH)
+ .unwrap()
+ .as_nanos() as u64
+ + duration.as_nanos() as u64;
+ unsafe {
+ let queue =
+ dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT.try_into().unwrap(), 0);
+ let when = dispatch_time(0, after_duration as i64);
+ dispatch_after_f(
+ when,
+ queue,
+ runnable.into_raw() as *mut c_void,
+ Some(trampoline),
+ );
+ }
+ }
}
extern "C" fn trampoline(runnable: *mut c_void) {
@@ -182,7 +182,7 @@ impl TextStyle {
}
}
-#[derive(Clone, Debug, Default, PartialEq)]
+#[derive(Copy, Clone, Debug, Default, PartialEq)]
pub struct HighlightStyle {
pub color: Option<Hsla>,
pub font_weight: Option<FontWeight>,
@@ -324,7 +324,7 @@ impl Default for Style {
}
}
-#[derive(Refineable, Clone, Default, Debug, PartialEq, Eq)]
+#[derive(Refineable, Copy, Clone, Default, Debug, PartialEq, Eq)]
#[refineable(debug)]
pub struct UnderlineStyle {
pub thickness: Pixels,
@@ -0,0 +1,86 @@
+[package]
+name = "language2"
+version = "0.1.0"
+edition = "2021"
+publish = false
+
+[lib]
+path = "src/language2.rs"
+doctest = false
+
+[features]
+test-support = [
+ "rand",
+ "client2/test-support",
+ "collections/test-support",
+ "lsp/test-support",
+ "text/test-support",
+ "tree-sitter-rust",
+ "tree-sitter-typescript",
+ "settings/test-support",
+ "util/test-support",
+]
+
+[dependencies]
+clock = { path = "../clock" }
+collections = { path = "../collections" }
+fuzzy = { path = "../fuzzy" }
+fs = { path = "../fs" }
+git = { path = "../git" }
+gpui2 = { path = "../gpui2" }
+lsp = { path = "../lsp" }
+rpc = { path = "../rpc" }
+settings2 = { path = "../settings2" }
+sum_tree = { path = "../sum_tree" }
+text = { path = "../text" }
+# theme = { path = "../theme" }
+util = { path = "../util" }
+
+anyhow.workspace = true
+async-broadcast = "0.4"
+async-trait.workspace = true
+futures.workspace = true
+globset.workspace = true
+lazy_static.workspace = true
+log.workspace = true
+parking_lot.workspace = true
+postage.workspace = true
+regex.workspace = true
+schemars.workspace = true
+serde.workspace = true
+serde_derive.workspace = true
+serde_json.workspace = true
+similar = "1.3"
+smallvec.workspace = true
+smol.workspace = true
+tree-sitter.workspace = true
+unicase = "2.6"
+
+rand = { workspace = true, optional = true }
+tree-sitter-rust = { workspace = true, optional = true }
+tree-sitter-typescript = { workspace = true, optional = true }
+
+[dev-dependencies]
+client2 = { path = "../client2", features = ["test-support"] }
+collections = { path = "../collections", features = ["test-support"] }
+gpui2 = { path = "../gpui2", features = ["test-support"] }
+lsp = { path = "../lsp", features = ["test-support"] }
+text = { path = "../text", features = ["test-support"] }
+settings = { path = "../settings", features = ["test-support"] }
+util = { path = "../util", features = ["test-support"] }
+ctor.workspace = true
+env_logger.workspace = true
+indoc.workspace = true
+rand.workspace = true
+unindent.workspace = true
+
+tree-sitter-embedded-template.workspace = true
+tree-sitter-html.workspace = true
+tree-sitter-json.workspace = true
+tree-sitter-markdown.workspace = true
+tree-sitter-rust.workspace = true
+tree-sitter-python.workspace = true
+tree-sitter-typescript.workspace = true
+tree-sitter-ruby.workspace = true
+tree-sitter-elixir.workspace = true
+tree-sitter-heex.workspace = true
@@ -0,0 +1,5 @@
+fn main() {
+ if let Ok(bundled) = std::env::var("ZED_BUNDLE") {
+ println!("cargo:rustc-env=ZED_BUNDLE={}", bundled);
+ }
+}
@@ -0,0 +1,3095 @@
+pub use crate::{
+ diagnostic_set::DiagnosticSet,
+ highlight_map::{HighlightId, HighlightMap},
+ proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, PLAIN_TEXT,
+};
+use crate::{
+ diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
+ language_settings::{language_settings, LanguageSettings},
+ outline::OutlineItem,
+ syntax_map::{
+ SyntaxLayerInfo, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatches,
+ SyntaxSnapshot, ToTreeSitterPoint,
+ },
+ CodeLabel, LanguageScope, Outline,
+};
+use anyhow::{anyhow, Result};
+pub use clock::ReplicaId;
+use futures::FutureExt as _;
+use gpui2::{AppContext, EventEmitter, HighlightStyle, ModelContext, Task};
+use lsp::LanguageServerId;
+use parking_lot::Mutex;
+use similar::{ChangeTag, TextDiff};
+use smallvec::SmallVec;
+use smol::future::yield_now;
+use std::{
+ any::Any,
+ cmp::{self, Ordering},
+ collections::BTreeMap,
+ ffi::OsStr,
+ future::Future,
+ iter::{self, Iterator, Peekable},
+ mem,
+ ops::{Deref, Range},
+ path::{Path, PathBuf},
+ str,
+ sync::Arc,
+ time::{Duration, Instant, SystemTime, UNIX_EPOCH},
+ vec,
+};
+use sum_tree::TreeMap;
+use text::operation_queue::OperationQueue;
+pub use text::{Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, *};
+#[cfg(any(test, feature = "test-support"))]
+use util::RandomCharIter;
+use util::{RangeExt, TryFutureExt as _};
+
+#[cfg(any(test, feature = "test-support"))]
+pub use {tree_sitter_rust, tree_sitter_typescript};
+
+pub use lsp::DiagnosticSeverity;
+
+pub struct Buffer {
+ text: TextBuffer,
+ diff_base: Option<String>,
+ git_diff: git::diff::BufferDiff,
+ file: Option<Arc<dyn File>>,
+ saved_version: clock::Global,
+ saved_version_fingerprint: RopeFingerprint,
+ saved_mtime: SystemTime,
+ transaction_depth: usize,
+ was_dirty_before_starting_transaction: Option<bool>,
+ language: Option<Arc<Language>>,
+ autoindent_requests: Vec<Arc<AutoindentRequest>>,
+ pending_autoindent: Option<Task<()>>,
+ sync_parse_timeout: Duration,
+ syntax_map: Mutex<SyntaxMap>,
+ parsing_in_background: bool,
+ parse_count: usize,
+ diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
+ remote_selections: TreeMap<ReplicaId, SelectionSet>,
+ selections_update_count: usize,
+ diagnostics_update_count: usize,
+ diagnostics_timestamp: clock::Lamport,
+ file_update_count: usize,
+ git_diff_update_count: usize,
+ completion_triggers: Vec<String>,
+ completion_triggers_timestamp: clock::Lamport,
+ deferred_ops: OperationQueue<Operation>,
+}
+
+pub struct BufferSnapshot {
+ text: text::BufferSnapshot,
+ pub git_diff: git::diff::BufferDiff,
+ pub(crate) syntax: SyntaxSnapshot,
+ file: Option<Arc<dyn File>>,
+ diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
+ diagnostics_update_count: usize,
+ file_update_count: usize,
+ git_diff_update_count: usize,
+ remote_selections: TreeMap<ReplicaId, SelectionSet>,
+ selections_update_count: usize,
+ language: Option<Arc<Language>>,
+ parse_count: usize,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
+pub struct IndentSize {
+ pub len: u32,
+ pub kind: IndentKind,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
+pub enum IndentKind {
+ #[default]
+ Space,
+ Tab,
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
+pub enum CursorShape {
+ #[default]
+ Bar,
+ Block,
+ Underscore,
+ Hollow,
+}
+
+#[derive(Clone, Debug)]
+struct SelectionSet {
+ line_mode: bool,
+ cursor_shape: CursorShape,
+ selections: Arc<[Selection<Anchor>]>,
+ lamport_timestamp: clock::Lamport,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct GroupId {
+ source: Arc<str>,
+ id: usize,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct Diagnostic {
+ pub source: Option<String>,
+ pub code: Option<String>,
+ pub severity: DiagnosticSeverity,
+ pub message: String,
+ pub group_id: usize,
+ pub is_valid: bool,
+ pub is_primary: bool,
+ pub is_disk_based: bool,
+ pub is_unnecessary: bool,
+}
+
+#[derive(Clone, Debug)]
+pub struct Completion {
+ pub old_range: Range<Anchor>,
+ pub new_text: String,
+ pub label: CodeLabel,
+ pub server_id: LanguageServerId,
+ pub lsp_completion: lsp::CompletionItem,
+}
+
+#[derive(Clone, Debug)]
+pub struct CodeAction {
+ pub server_id: LanguageServerId,
+ pub range: Range<Anchor>,
+ pub lsp_action: lsp::CodeAction,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum Operation {
+ Buffer(text::Operation),
+
+ UpdateDiagnostics {
+ server_id: LanguageServerId,
+ diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
+ lamport_timestamp: clock::Lamport,
+ },
+
+ UpdateSelections {
+ selections: Arc<[Selection<Anchor>]>,
+ lamport_timestamp: clock::Lamport,
+ line_mode: bool,
+ cursor_shape: CursorShape,
+ },
+
+ UpdateCompletionTriggers {
+ triggers: Vec<String>,
+ lamport_timestamp: clock::Lamport,
+ },
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum Event {
+ Operation(Operation),
+ Edited,
+ DirtyChanged,
+ Saved,
+ FileHandleChanged,
+ Reloaded,
+ DiffBaseChanged,
+ LanguageChanged,
+ Reparsed,
+ DiagnosticsUpdated,
+ Closed,
+}
+
+pub trait File: Send + Sync {
+ fn as_local(&self) -> Option<&dyn LocalFile>;
+
+ fn is_local(&self) -> bool {
+ self.as_local().is_some()
+ }
+
+ fn mtime(&self) -> SystemTime;
+
+ /// Returns the path of this file relative to the worktree's root directory.
+ fn path(&self) -> &Arc<Path>;
+
+ /// Returns the path of this file relative to the worktree's parent directory (this means it
+ /// includes the name of the worktree's root folder).
+ fn full_path(&self, cx: &AppContext) -> PathBuf;
+
+ /// Returns the last component of this handle's absolute path. If this handle refers to the root
+ /// of its worktree, then this method will return the name of the worktree itself.
+ fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
+
+ /// Returns the id of the worktree to which this file belongs.
+ ///
+ /// This is needed for looking up project-specific settings.
+ fn worktree_id(&self) -> usize;
+
+ fn is_deleted(&self) -> bool;
+
+ fn as_any(&self) -> &dyn Any;
+
+ fn to_proto(&self) -> rpc::proto::File;
+}
+
+pub trait LocalFile: File {
+ /// Returns the absolute path of this file.
+ fn abs_path(&self, cx: &AppContext) -> PathBuf;
+
+ fn load(&self, cx: &AppContext) -> Task<Result<String>>;
+
+ fn buffer_reloaded(
+ &self,
+ buffer_id: u64,
+ version: &clock::Global,
+ fingerprint: RopeFingerprint,
+ line_ending: LineEnding,
+ mtime: SystemTime,
+ cx: &mut AppContext,
+ );
+}
+
+#[derive(Clone, Debug)]
+pub enum AutoindentMode {
+ /// Indent each line of inserted text.
+ EachLine,
+ /// Apply the same indentation adjustment to all of the lines
+ /// in a given insertion.
+ Block {
+ /// The original indentation level of the first line of each
+ /// insertion, if it has been copied.
+ original_indent_columns: Vec<u32>,
+ },
+}
+
+#[derive(Clone)]
+struct AutoindentRequest {
+ before_edit: BufferSnapshot,
+ entries: Vec<AutoindentRequestEntry>,
+ is_block_mode: bool,
+}
+
+#[derive(Clone)]
+struct AutoindentRequestEntry {
+ /// A range of the buffer whose indentation should be adjusted.
+ range: Range<Anchor>,
+ /// Whether or not these lines should be considered brand new, for the
+ /// purpose of auto-indent. When text is not new, its indentation will
+ /// only be adjusted if the suggested indentation level has *changed*
+ /// since the edit was made.
+ first_line_is_new: bool,
+ indent_size: IndentSize,
+ original_indent_column: Option<u32>,
+}
+
+#[derive(Debug)]
+struct IndentSuggestion {
+ basis_row: u32,
+ delta: Ordering,
+ within_error: bool,
+}
+
+struct BufferChunkHighlights<'a> {
+ captures: SyntaxMapCaptures<'a>,
+ next_capture: Option<SyntaxMapCapture<'a>>,
+ stack: Vec<(usize, HighlightId)>,
+ highlight_maps: Vec<HighlightMap>,
+}
+
+pub struct BufferChunks<'a> {
+ range: Range<usize>,
+ chunks: text::Chunks<'a>,
+ diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
+ error_depth: usize,
+ warning_depth: usize,
+ information_depth: usize,
+ hint_depth: usize,
+ unnecessary_depth: usize,
+ highlights: Option<BufferChunkHighlights<'a>>,
+}
+
+#[derive(Clone, Copy, Debug, Default)]
+pub struct Chunk<'a> {
+ pub text: &'a str,
+ pub syntax_highlight_id: Option<HighlightId>,
+ pub highlight_style: Option<HighlightStyle>,
+ pub diagnostic_severity: Option<DiagnosticSeverity>,
+ pub is_unnecessary: bool,
+ pub is_tab: bool,
+}
+
+pub struct Diff {
+ pub(crate) base_version: clock::Global,
+ line_ending: LineEnding,
+ edits: Vec<(Range<usize>, Arc<str>)>,
+}
+
+#[derive(Clone, Copy)]
+pub(crate) struct DiagnosticEndpoint {
+ offset: usize,
+ is_start: bool,
+ severity: DiagnosticSeverity,
+ is_unnecessary: bool,
+}
+
+#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
+pub enum CharKind {
+ Punctuation,
+ Whitespace,
+ Word,
+}
+
+impl CharKind {
+ pub fn coerce_punctuation(self, treat_punctuation_as_word: bool) -> Self {
+ if treat_punctuation_as_word && self == CharKind::Punctuation {
+ CharKind::Word
+ } else {
+ self
+ }
+ }
+}
+
+impl Buffer {
+ pub fn new<T: Into<String>>(replica_id: ReplicaId, id: u64, base_text: T) -> Self {
+ Self::build(
+ TextBuffer::new(replica_id, id, base_text.into()),
+ None,
+ None,
+ )
+ }
+
+ pub fn remote(remote_id: u64, replica_id: ReplicaId, base_text: String) -> Self {
+ Self::build(
+ TextBuffer::new(replica_id, remote_id, base_text),
+ None,
+ None,
+ )
+ }
+
+ pub fn from_proto(
+ replica_id: ReplicaId,
+ message: proto::BufferState,
+ file: Option<Arc<dyn File>>,
+ ) -> Result<Self> {
+ let buffer = TextBuffer::new(replica_id, message.id, message.base_text);
+ let mut this = Self::build(
+ buffer,
+ message.diff_base.map(|text| text.into_boxed_str().into()),
+ file,
+ );
+ this.text.set_line_ending(proto::deserialize_line_ending(
+ rpc::proto::LineEnding::from_i32(message.line_ending)
+ .ok_or_else(|| anyhow!("missing line_ending"))?,
+ ));
+ this.saved_version = proto::deserialize_version(&message.saved_version);
+ this.saved_version_fingerprint =
+ proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
+ this.saved_mtime = message
+ .saved_mtime
+ .ok_or_else(|| anyhow!("invalid saved_mtime"))?
+ .into();
+ Ok(this)
+ }
+
+ pub fn to_proto(&self) -> proto::BufferState {
+ proto::BufferState {
+ id: self.remote_id(),
+ file: self.file.as_ref().map(|f| f.to_proto()),
+ base_text: self.base_text().to_string(),
+ diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
+ line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
+ saved_version: proto::serialize_version(&self.saved_version),
+ saved_version_fingerprint: proto::serialize_fingerprint(self.saved_version_fingerprint),
+ saved_mtime: Some(self.saved_mtime.into()),
+ }
+ }
+
+ pub fn serialize_ops(
+ &self,
+ since: Option<clock::Global>,
+ cx: &AppContext,
+ ) -> Task<Vec<proto::Operation>> {
+ let mut operations = Vec::new();
+ operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
+
+ operations.extend(self.remote_selections.iter().map(|(_, set)| {
+ proto::serialize_operation(&Operation::UpdateSelections {
+ selections: set.selections.clone(),
+ lamport_timestamp: set.lamport_timestamp,
+ line_mode: set.line_mode,
+ cursor_shape: set.cursor_shape,
+ })
+ }));
+
+ for (server_id, diagnostics) in &self.diagnostics {
+ operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
+ lamport_timestamp: self.diagnostics_timestamp,
+ server_id: *server_id,
+ diagnostics: diagnostics.iter().cloned().collect(),
+ }));
+ }
+
+ operations.push(proto::serialize_operation(
+ &Operation::UpdateCompletionTriggers {
+ triggers: self.completion_triggers.clone(),
+ lamport_timestamp: self.completion_triggers_timestamp,
+ },
+ ));
+
+ let text_operations = self.text.operations().clone();
+ cx.spawn(|_| async move {
+ let since = since.unwrap_or_default();
+ operations.extend(
+ text_operations
+ .iter()
+ .filter(|(_, op)| !since.observed(op.timestamp()))
+ .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
+ );
+ operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
+ operations
+ })
+ }
+
+ pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
+ self.set_language(Some(language), cx);
+ self
+ }
+
+ pub fn build(
+ buffer: TextBuffer,
+ diff_base: Option<String>,
+ file: Option<Arc<dyn File>>,
+ ) -> Self {
+ let saved_mtime = if let Some(file) = file.as_ref() {
+ file.mtime()
+ } else {
+ UNIX_EPOCH
+ };
+
+ Self {
+ saved_mtime,
+ saved_version: buffer.version(),
+ saved_version_fingerprint: buffer.as_rope().fingerprint(),
+ transaction_depth: 0,
+ was_dirty_before_starting_transaction: None,
+ text: buffer,
+ diff_base,
+ git_diff: git::diff::BufferDiff::new(),
+ file,
+ syntax_map: Mutex::new(SyntaxMap::new()),
+ parsing_in_background: false,
+ parse_count: 0,
+ sync_parse_timeout: Duration::from_millis(1),
+ autoindent_requests: Default::default(),
+ pending_autoindent: Default::default(),
+ language: None,
+ remote_selections: Default::default(),
+ selections_update_count: 0,
+ diagnostics: Default::default(),
+ diagnostics_update_count: 0,
+ diagnostics_timestamp: Default::default(),
+ file_update_count: 0,
+ git_diff_update_count: 0,
+ completion_triggers: Default::default(),
+ completion_triggers_timestamp: Default::default(),
+ deferred_ops: OperationQueue::new(),
+ }
+ }
+
+ pub fn snapshot(&self) -> BufferSnapshot {
+ let text = self.text.snapshot();
+ let mut syntax_map = self.syntax_map.lock();
+ syntax_map.interpolate(&text);
+ let syntax = syntax_map.snapshot();
+
+ BufferSnapshot {
+ text,
+ syntax,
+ git_diff: self.git_diff.clone(),
+ file: self.file.clone(),
+ remote_selections: self.remote_selections.clone(),
+ diagnostics: self.diagnostics.clone(),
+ diagnostics_update_count: self.diagnostics_update_count,
+ file_update_count: self.file_update_count,
+ git_diff_update_count: self.git_diff_update_count,
+ language: self.language.clone(),
+ parse_count: self.parse_count,
+ selections_update_count: self.selections_update_count,
+ }
+ }
+
+ pub fn as_text_snapshot(&self) -> &text::BufferSnapshot {
+ &self.text
+ }
+
+ pub fn text_snapshot(&self) -> text::BufferSnapshot {
+ self.text.snapshot()
+ }
+
+ pub fn file(&self) -> Option<&Arc<dyn File>> {
+ self.file.as_ref()
+ }
+
+ pub fn saved_version(&self) -> &clock::Global {
+ &self.saved_version
+ }
+
+ pub fn saved_version_fingerprint(&self) -> RopeFingerprint {
+ self.saved_version_fingerprint
+ }
+
+ pub fn saved_mtime(&self) -> SystemTime {
+ self.saved_mtime
+ }
+
+ pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
+ self.syntax_map.lock().clear();
+ self.language = language;
+ self.reparse(cx);
+ cx.emit(Event::LanguageChanged);
+ }
+
+ pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
+ self.syntax_map
+ .lock()
+ .set_language_registry(language_registry);
+ }
+
+ pub fn did_save(
+ &mut self,
+ version: clock::Global,
+ fingerprint: RopeFingerprint,
+ mtime: SystemTime,
+ cx: &mut ModelContext<Self>,
+ ) {
+ self.saved_version = version;
+ self.saved_version_fingerprint = fingerprint;
+ self.saved_mtime = mtime;
+ cx.emit(Event::Saved);
+ cx.notify();
+ }
+
+ pub fn reload(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<Option<Transaction>>> {
+ cx.spawn(|this, mut cx| async move {
+ if let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
+ let file = this.file.as_ref()?.as_local()?;
+ Some((file.mtime(), file.load(cx)))
+ })? {
+ let new_text = new_text.await?;
+ let diff = this
+ .update(&mut cx, |this, cx| this.diff(new_text, cx))?
+ .await;
+ this.update(&mut cx, |this, cx| {
+ if this.version() == diff.base_version {
+ this.finalize_last_transaction();
+ this.apply_diff(diff, cx);
+ if let Some(transaction) = this.finalize_last_transaction().cloned() {
+ this.did_reload(
+ this.version(),
+ this.as_rope().fingerprint(),
+ this.line_ending(),
+ new_mtime,
+ cx,
+ );
+ return Some(transaction);
+ }
+ }
+ None
+ })
+ } else {
+ Ok(None)
+ }
+ })
+ }
+
+ pub fn did_reload(
+ &mut self,
+ version: clock::Global,
+ fingerprint: RopeFingerprint,
+ line_ending: LineEnding,
+ mtime: SystemTime,
+ cx: &mut ModelContext<Self>,
+ ) {
+ self.saved_version = version;
+ self.saved_version_fingerprint = fingerprint;
+ self.text.set_line_ending(line_ending);
+ self.saved_mtime = mtime;
+ if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
+ file.buffer_reloaded(
+ self.remote_id(),
+ &self.saved_version,
+ self.saved_version_fingerprint,
+ self.line_ending(),
+ self.saved_mtime,
+ cx,
+ );
+ }
+ cx.emit(Event::Reloaded);
+ cx.notify();
+ }
+
+ pub fn file_updated(
+ &mut self,
+ new_file: Arc<dyn File>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<()> {
+ let mut file_changed = false;
+ let mut task = Task::ready(());
+
+ if let Some(old_file) = self.file.as_ref() {
+ if new_file.path() != old_file.path() {
+ file_changed = true;
+ }
+
+ if new_file.is_deleted() {
+ if !old_file.is_deleted() {
+ file_changed = true;
+ if !self.is_dirty() {
+ cx.emit(Event::DirtyChanged);
+ }
+ }
+ } else {
+ let new_mtime = new_file.mtime();
+ if new_mtime != old_file.mtime() {
+ file_changed = true;
+
+ if !self.is_dirty() {
+ let reload = self.reload(cx).log_err().map(drop);
+ task = cx.executor().spawn(reload);
+ }
+ }
+ }
+ } else {
+ file_changed = true;
+ };
+
+ self.file = Some(new_file);
+ if file_changed {
+ self.file_update_count += 1;
+ cx.emit(Event::FileHandleChanged);
+ cx.notify();
+ }
+ task
+ }
+
+ pub fn diff_base(&self) -> Option<&str> {
+ self.diff_base.as_deref()
+ }
+
+ pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
+ self.diff_base = diff_base;
+ self.git_diff_recalc(cx);
+ cx.emit(Event::DiffBaseChanged);
+ }
+
+ pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
+ let diff_base = self.diff_base.clone()?; // TODO: Make this an Arc
+ let snapshot = self.snapshot();
+
+ let mut diff = self.git_diff.clone();
+ let diff = cx.executor().spawn(async move {
+ diff.update(&diff_base, &snapshot).await;
+ diff
+ });
+
+ Some(cx.spawn(|this, mut cx| async move {
+ let buffer_diff = diff.await;
+ this.update(&mut cx, |this, _| {
+ this.git_diff = buffer_diff;
+ this.git_diff_update_count += 1;
+ })
+ .ok();
+ }))
+ }
+
+ pub fn close(&mut self, cx: &mut ModelContext<Self>) {
+ cx.emit(Event::Closed);
+ }
+
+ pub fn language(&self) -> Option<&Arc<Language>> {
+ self.language.as_ref()
+ }
+
+ pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
+ let offset = position.to_offset(self);
+ self.syntax_map
+ .lock()
+ .layers_for_range(offset..offset, &self.text)
+ .last()
+ .map(|info| info.language.clone())
+ .or_else(|| self.language.clone())
+ }
+
+ pub fn parse_count(&self) -> usize {
+ self.parse_count
+ }
+
+ pub fn selections_update_count(&self) -> usize {
+ self.selections_update_count
+ }
+
+ pub fn diagnostics_update_count(&self) -> usize {
+ self.diagnostics_update_count
+ }
+
+ pub fn file_update_count(&self) -> usize {
+ self.file_update_count
+ }
+
+ pub fn git_diff_update_count(&self) -> usize {
+ self.git_diff_update_count
+ }
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn is_parsing(&self) -> bool {
+ self.parsing_in_background
+ }
+
+ pub fn contains_unknown_injections(&self) -> bool {
+ self.syntax_map.lock().contains_unknown_injections()
+ }
+
+ #[cfg(test)]
+ pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
+ self.sync_parse_timeout = timeout;
+ }
+
+ /// Called after an edit to synchronize the buffer's main parse tree with
+ /// the buffer's new underlying state.
+ ///
+ /// Locks the syntax map and interpolates the edits since the last reparse
+ /// into the foreground syntax tree.
+ ///
+ /// Then takes a stable snapshot of the syntax map before unlocking it.
+ /// The snapshot with the interpolated edits is sent to a background thread,
+ /// where we ask Tree-sitter to perform an incremental parse.
+ ///
+ /// Meanwhile, in the foreground, we block the main thread for up to 1ms
+ /// waiting on the parse to complete. As soon as it completes, we proceed
+ /// synchronously, unless a 1ms timeout elapses.
+ ///
+ /// If we time out waiting on the parse, we spawn a second task waiting
+ /// until the parse does complete and return with the interpolated tree still
+ /// in the foreground. When the background parse completes, call back into
+ /// the main thread and assign the foreground parse state.
+ ///
+ /// If the buffer or grammar changed since the start of the background parse,
+ /// initiate an additional reparse recursively. To avoid concurrent parses
+ /// for the same buffer, we only initiate a new parse if we are not already
+ /// parsing in the background.
+ pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
+ if self.parsing_in_background {
+ return;
+ }
+ let language = if let Some(language) = self.language.clone() {
+ language
+ } else {
+ return;
+ };
+
+ let text = self.text_snapshot();
+ let parsed_version = self.version();
+
+ let mut syntax_map = self.syntax_map.lock();
+ syntax_map.interpolate(&text);
+ let language_registry = syntax_map.language_registry();
+ let mut syntax_snapshot = syntax_map.snapshot();
+ drop(syntax_map);
+
+ let parse_task = cx.executor().spawn({
+ let language = language.clone();
+ let language_registry = language_registry.clone();
+ async move {
+ syntax_snapshot.reparse(&text, language_registry, language);
+ syntax_snapshot
+ }
+ });
+
+ match cx
+ .executor()
+ .block_with_timeout(self.sync_parse_timeout, parse_task)
+ {
+ Ok(new_syntax_snapshot) => {
+ self.did_finish_parsing(new_syntax_snapshot, cx);
+ return;
+ }
+ Err(parse_task) => {
+ self.parsing_in_background = true;
+ cx.spawn(move |this, mut cx| async move {
+ let new_syntax_map = parse_task.await;
+ this.update(&mut cx, move |this, cx| {
+ let grammar_changed =
+ this.language.as_ref().map_or(true, |current_language| {
+ !Arc::ptr_eq(&language, current_language)
+ });
+ let language_registry_changed = new_syntax_map
+ .contains_unknown_injections()
+ && language_registry.map_or(false, |registry| {
+ registry.version() != new_syntax_map.language_registry_version()
+ });
+ let parse_again = language_registry_changed
+ || grammar_changed
+ || this.version.changed_since(&parsed_version);
+ this.did_finish_parsing(new_syntax_map, cx);
+ this.parsing_in_background = false;
+ if parse_again {
+ this.reparse(cx);
+ }
+ });
+ })
+ .detach();
+ }
+ }
+ }
+
+ fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
+ self.parse_count += 1;
+ self.syntax_map.lock().did_parse(syntax_snapshot);
+ self.request_autoindent(cx);
+ cx.emit(Event::Reparsed);
+ cx.notify();
+ }
+
+ pub fn update_diagnostics(
+ &mut self,
+ server_id: LanguageServerId,
+ diagnostics: DiagnosticSet,
+ cx: &mut ModelContext<Self>,
+ ) {
+ let lamport_timestamp = self.text.lamport_clock.tick();
+ let op = Operation::UpdateDiagnostics {
+ server_id,
+ diagnostics: diagnostics.iter().cloned().collect(),
+ lamport_timestamp,
+ };
+ self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
+ self.send_operation(op, cx);
+ }
+
+ fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
+ if let Some(indent_sizes) = self.compute_autoindents() {
+ let indent_sizes = cx.executor().spawn(indent_sizes);
+ match cx
+ .executor()
+ .block_with_timeout(Duration::from_micros(500), indent_sizes)
+ {
+ Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
+ Err(indent_sizes) => {
+ self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
+ let indent_sizes = indent_sizes.await;
+ this.update(&mut cx, |this, cx| {
+ this.apply_autoindents(indent_sizes, cx);
+ });
+ }));
+ }
+ }
+ } else {
+ self.autoindent_requests.clear();
+ }
+ }
+
+ fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
+ let max_rows_between_yields = 100;
+ let snapshot = self.snapshot();
+ if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
+ return None;
+ }
+
+ let autoindent_requests = self.autoindent_requests.clone();
+ Some(async move {
+ let mut indent_sizes = BTreeMap::new();
+ for request in autoindent_requests {
+ // Resolve each edited range to its row in the current buffer and in the
+ // buffer before this batch of edits.
+ let mut row_ranges = Vec::new();
+ let mut old_to_new_rows = BTreeMap::new();
+ let mut language_indent_sizes_by_new_row = Vec::new();
+ for entry in &request.entries {
+ let position = entry.range.start;
+ let new_row = position.to_point(&snapshot).row;
+ let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
+ language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
+
+ if !entry.first_line_is_new {
+ let old_row = position.to_point(&request.before_edit).row;
+ old_to_new_rows.insert(old_row, new_row);
+ }
+ row_ranges.push((new_row..new_end_row, entry.original_indent_column));
+ }
+
+ // Build a map containing the suggested indentation for each of the edited lines
+ // with respect to the state of the buffer before these edits. This map is keyed
+ // by the rows for these lines in the current state of the buffer.
+ let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
+ let old_edited_ranges =
+ contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
+ let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
+ let mut language_indent_size = IndentSize::default();
+ for old_edited_range in old_edited_ranges {
+ let suggestions = request
+ .before_edit
+ .suggest_autoindents(old_edited_range.clone())
+ .into_iter()
+ .flatten();
+ for (old_row, suggestion) in old_edited_range.zip(suggestions) {
+ if let Some(suggestion) = suggestion {
+ let new_row = *old_to_new_rows.get(&old_row).unwrap();
+
+ // Find the indent size based on the language for this row.
+ while let Some((row, size)) = language_indent_sizes.peek() {
+ if *row > new_row {
+ break;
+ }
+ language_indent_size = *size;
+ language_indent_sizes.next();
+ }
+
+ let suggested_indent = old_to_new_rows
+ .get(&suggestion.basis_row)
+ .and_then(|from_row| {
+ Some(old_suggestions.get(from_row).copied()?.0)
+ })
+ .unwrap_or_else(|| {
+ request
+ .before_edit
+ .indent_size_for_line(suggestion.basis_row)
+ })
+ .with_delta(suggestion.delta, language_indent_size);
+ old_suggestions
+ .insert(new_row, (suggested_indent, suggestion.within_error));
+ }
+ }
+ yield_now().await;
+ }
+
+ // In block mode, only compute indentation suggestions for the first line
+ // of each insertion. Otherwise, compute suggestions for every inserted line.
+ let new_edited_row_ranges = contiguous_ranges(
+ row_ranges.iter().flat_map(|(range, _)| {
+ if request.is_block_mode {
+ range.start..range.start + 1
+ } else {
+ range.clone()
+ }
+ }),
+ max_rows_between_yields,
+ );
+
+ // Compute new suggestions for each line, but only include them in the result
+ // if they differ from the old suggestion for that line.
+ let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
+ let mut language_indent_size = IndentSize::default();
+ for new_edited_row_range in new_edited_row_ranges {
+ let suggestions = snapshot
+ .suggest_autoindents(new_edited_row_range.clone())
+ .into_iter()
+ .flatten();
+ for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
+ if let Some(suggestion) = suggestion {
+ // Find the indent size based on the language for this row.
+ while let Some((row, size)) = language_indent_sizes.peek() {
+ if *row > new_row {
+ break;
+ }
+ language_indent_size = *size;
+ language_indent_sizes.next();
+ }
+
+ let suggested_indent = indent_sizes
+ .get(&suggestion.basis_row)
+ .copied()
+ .unwrap_or_else(|| {
+ snapshot.indent_size_for_line(suggestion.basis_row)
+ })
+ .with_delta(suggestion.delta, language_indent_size);
+ if old_suggestions.get(&new_row).map_or(
+ true,
+ |(old_indentation, was_within_error)| {
+ suggested_indent != *old_indentation
+ && (!suggestion.within_error || *was_within_error)
+ },
+ ) {
+ indent_sizes.insert(new_row, suggested_indent);
+ }
+ }
+ }
+ yield_now().await;
+ }
+
+ // For each block of inserted text, adjust the indentation of the remaining
+ // lines of the block by the same amount as the first line was adjusted.
+ if request.is_block_mode {
+ for (row_range, original_indent_column) in
+ row_ranges
+ .into_iter()
+ .filter_map(|(range, original_indent_column)| {
+ if range.len() > 1 {
+ Some((range, original_indent_column?))
+ } else {
+ None
+ }
+ })
+ {
+ let new_indent = indent_sizes
+ .get(&row_range.start)
+ .copied()
+ .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
+ let delta = new_indent.len as i64 - original_indent_column as i64;
+ if delta != 0 {
+ for row in row_range.skip(1) {
+ indent_sizes.entry(row).or_insert_with(|| {
+ let mut size = snapshot.indent_size_for_line(row);
+ if size.kind == new_indent.kind {
+ match delta.cmp(&0) {
+ Ordering::Greater => size.len += delta as u32,
+ Ordering::Less => {
+ size.len = size.len.saturating_sub(-delta as u32)
+ }
+ Ordering::Equal => {}
+ }
+ }
+ size
+ });
+ }
+ }
+ }
+ }
+ }
+
+ indent_sizes
+ })
+ }
+
+ fn apply_autoindents(
+ &mut self,
+ indent_sizes: BTreeMap<u32, IndentSize>,
+ cx: &mut ModelContext<Self>,
+ ) {
+ self.autoindent_requests.clear();
+
+ let edits: Vec<_> = indent_sizes
+ .into_iter()
+ .filter_map(|(row, indent_size)| {
+ let current_size = indent_size_for_line(self, row);
+ Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
+ })
+ .collect();
+
+ self.edit(edits, None, cx);
+ }
+
+ // Create a minimal edit that will cause the the given row to be indented
+ // with the given size. After applying this edit, the length of the line
+ // will always be at least `new_size.len`.
+ pub fn edit_for_indent_size_adjustment(
+ row: u32,
+ current_size: IndentSize,
+ new_size: IndentSize,
+ ) -> Option<(Range<Point>, String)> {
+ if new_size.kind != current_size.kind {
+ Some((
+ Point::new(row, 0)..Point::new(row, current_size.len),
+ iter::repeat(new_size.char())
+ .take(new_size.len as usize)
+ .collect::<String>(),
+ ))
+ } else {
+ match new_size.len.cmp(¤t_size.len) {
+ Ordering::Greater => {
+ let point = Point::new(row, 0);
+ Some((
+ point..point,
+ iter::repeat(new_size.char())
+ .take((new_size.len - current_size.len) as usize)
+ .collect::<String>(),
+ ))
+ }
+
+ Ordering::Less => Some((
+ Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
+ String::new(),
+ )),
+
+ Ordering::Equal => None,
+ }
+ }
+ }
+
+ pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
+ let old_text = self.as_rope().clone();
+ let base_version = self.version();
+ cx.executor().spawn(async move {
+ let old_text = old_text.to_string();
+ let line_ending = LineEnding::detect(&new_text);
+ LineEnding::normalize(&mut new_text);
+ let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
+ let mut edits = Vec::new();
+ let mut offset = 0;
+ let empty: Arc<str> = "".into();
+ for change in diff.iter_all_changes() {
+ let value = change.value();
+ let end_offset = offset + value.len();
+ match change.tag() {
+ ChangeTag::Equal => {
+ offset = end_offset;
+ }
+ ChangeTag::Delete => {
+ edits.push((offset..end_offset, empty.clone()));
+ offset = end_offset;
+ }
+ ChangeTag::Insert => {
+ edits.push((offset..offset, value.into()));
+ }
+ }
+ }
+ Diff {
+ base_version,
+ line_ending,
+ edits,
+ }
+ })
+ }
+
+ /// Spawn a background task that searches the buffer for any whitespace
+ /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
+ pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
+ let old_text = self.as_rope().clone();
+ let line_ending = self.line_ending();
+ let base_version = self.version();
+ cx.executor().spawn(async move {
+ let ranges = trailing_whitespace_ranges(&old_text);
+ let empty = Arc::<str>::from("");
+ Diff {
+ base_version,
+ line_ending,
+ edits: ranges
+ .into_iter()
+ .map(|range| (range, empty.clone()))
+ .collect(),
+ }
+ })
+ }
+
+ /// Ensure that the buffer ends with a single newline character, and
+ /// no other whitespace.
+ pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
+ let len = self.len();
+ let mut offset = len;
+ for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
+ let non_whitespace_len = chunk
+ .trim_end_matches(|c: char| c.is_ascii_whitespace())
+ .len();
+ offset -= chunk.len();
+ offset += non_whitespace_len;
+ if non_whitespace_len != 0 {
+ if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
+ return;
+ }
+ break;
+ }
+ }
+ self.edit([(offset..len, "\n")], None, cx);
+ }
+
+ /// Apply a diff to the buffer. If the buffer has changed since the given diff was
+ /// calculated, then adjust the diff to account for those changes, and discard any
+ /// parts of the diff that conflict with those changes.
+ pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
+ // Check for any edits to the buffer that have occurred since this diff
+ // was computed.
+ let snapshot = self.snapshot();
+ let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
+ let mut delta = 0;
+ let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
+ while let Some(edit_since) = edits_since.peek() {
+ // If the edit occurs after a diff hunk, then it does not
+ // affect that hunk.
+ if edit_since.old.start > range.end {
+ break;
+ }
+ // If the edit precedes the diff hunk, then adjust the hunk
+ // to reflect the edit.
+ else if edit_since.old.end < range.start {
+ delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
+ edits_since.next();
+ }
+ // If the edit intersects a diff hunk, then discard that hunk.
+ else {
+ return None;
+ }
+ }
+
+ let start = (range.start as i64 + delta) as usize;
+ let end = (range.end as i64 + delta) as usize;
+ Some((start..end, new_text))
+ });
+
+ self.start_transaction();
+ self.text.set_line_ending(diff.line_ending);
+ self.edit(adjusted_edits, None, cx);
+ self.end_transaction(cx)
+ }
+
+ pub fn is_dirty(&self) -> bool {
+ self.saved_version_fingerprint != self.as_rope().fingerprint()
+ || self.file.as_ref().map_or(false, |file| file.is_deleted())
+ }
+
+ pub fn has_conflict(&self) -> bool {
+ self.saved_version_fingerprint != self.as_rope().fingerprint()
+ && self
+ .file
+ .as_ref()
+ .map_or(false, |file| file.mtime() > self.saved_mtime)
+ }
+
+ pub fn subscribe(&mut self) -> Subscription {
+ self.text.subscribe()
+ }
+
+ pub fn start_transaction(&mut self) -> Option<TransactionId> {
+ self.start_transaction_at(Instant::now())
+ }
+
+ pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
+ self.transaction_depth += 1;
+ if self.was_dirty_before_starting_transaction.is_none() {
+ self.was_dirty_before_starting_transaction = Some(self.is_dirty());
+ }
+ self.text.start_transaction_at(now)
+ }
+
+ pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
+ self.end_transaction_at(Instant::now(), cx)
+ }
+
+ pub fn end_transaction_at(
+ &mut self,
+ now: Instant,
+ cx: &mut ModelContext<Self>,
+ ) -> Option<TransactionId> {
+ assert!(self.transaction_depth > 0);
+ self.transaction_depth -= 1;
+ let was_dirty = if self.transaction_depth == 0 {
+ self.was_dirty_before_starting_transaction.take().unwrap()
+ } else {
+ false
+ };
+ if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
+ self.did_edit(&start_version, was_dirty, cx);
+ Some(transaction_id)
+ } else {
+ None
+ }
+ }
+
+ pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
+ self.text.push_transaction(transaction, now);
+ }
+
+ pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
+ self.text.finalize_last_transaction()
+ }
+
+ pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
+ self.text.group_until_transaction(transaction_id);
+ }
+
+ pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
+ self.text.forget_transaction(transaction_id);
+ }
+
+ pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
+ self.text.merge_transactions(transaction, destination);
+ }
+
+ pub fn wait_for_edits(
+ &mut self,
+ edit_ids: impl IntoIterator<Item = clock::Lamport>,
+ ) -> impl Future<Output = Result<()>> {
+ self.text.wait_for_edits(edit_ids)
+ }
+
+ pub fn wait_for_anchors(
+ &mut self,
+ anchors: impl IntoIterator<Item = Anchor>,
+ ) -> impl 'static + Future<Output = Result<()>> {
+ self.text.wait_for_anchors(anchors)
+ }
+
+ pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
+ self.text.wait_for_version(version)
+ }
+
+ pub fn give_up_waiting(&mut self) {
+ self.text.give_up_waiting();
+ }
+
+ pub fn set_active_selections(
+ &mut self,
+ selections: Arc<[Selection<Anchor>]>,
+ line_mode: bool,
+ cursor_shape: CursorShape,
+ cx: &mut ModelContext<Self>,
+ ) {
+ let lamport_timestamp = self.text.lamport_clock.tick();
+ self.remote_selections.insert(
+ self.text.replica_id(),
+ SelectionSet {
+ selections: selections.clone(),
+ lamport_timestamp,
+ line_mode,
+ cursor_shape,
+ },
+ );
+ self.send_operation(
+ Operation::UpdateSelections {
+ selections,
+ line_mode,
+ lamport_timestamp,
+ cursor_shape,
+ },
+ cx,
+ );
+ }
+
+ pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
+ if self
+ .remote_selections
+ .get(&self.text.replica_id())
+ .map_or(true, |set| !set.selections.is_empty())
+ {
+ self.set_active_selections(Arc::from([]), false, Default::default(), cx);
+ }
+ }
+
+ pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
+ where
+ T: Into<Arc<str>>,
+ {
+ self.autoindent_requests.clear();
+ self.edit([(0..self.len(), text)], None, cx)
+ }
+
+ pub fn edit<I, S, T>(
+ &mut self,
+ edits_iter: I,
+ autoindent_mode: Option<AutoindentMode>,
+ cx: &mut ModelContext<Self>,
+ ) -> Option<clock::Lamport>
+ where
+ I: IntoIterator<Item = (Range<S>, T)>,
+ S: ToOffset,
+ T: Into<Arc<str>>,
+ {
+ // Skip invalid edits and coalesce contiguous ones.
+ let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
+ for (range, new_text) in edits_iter {
+ let mut range = range.start.to_offset(self)..range.end.to_offset(self);
+ if range.start > range.end {
+ mem::swap(&mut range.start, &mut range.end);
+ }
+ let new_text = new_text.into();
+ if !new_text.is_empty() || !range.is_empty() {
+ if let Some((prev_range, prev_text)) = edits.last_mut() {
+ if prev_range.end >= range.start {
+ prev_range.end = cmp::max(prev_range.end, range.end);
+ *prev_text = format!("{prev_text}{new_text}").into();
+ } else {
+ edits.push((range, new_text));
+ }
+ } else {
+ edits.push((range, new_text));
+ }
+ }
+ }
+ if edits.is_empty() {
+ return None;
+ }
+
+ self.start_transaction();
+ self.pending_autoindent.take();
+ let autoindent_request = autoindent_mode
+ .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
+
+ let edit_operation = self.text.edit(edits.iter().cloned());
+ let edit_id = edit_operation.timestamp();
+
+ if let Some((before_edit, mode)) = autoindent_request {
+ let mut delta = 0isize;
+ let entries = edits
+ .into_iter()
+ .enumerate()
+ .zip(&edit_operation.as_edit().unwrap().new_text)
+ .map(|((ix, (range, _)), new_text)| {
+ let new_text_length = new_text.len();
+ let old_start = range.start.to_point(&before_edit);
+ let new_start = (delta + range.start as isize) as usize;
+ delta += new_text_length as isize - (range.end as isize - range.start as isize);
+
+ let mut range_of_insertion_to_indent = 0..new_text_length;
+ let mut first_line_is_new = false;
+ let mut original_indent_column = None;
+
+ // When inserting an entire line at the beginning of an existing line,
+ // treat the insertion as new.
+ if new_text.contains('\n')
+ && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
+ {
+ first_line_is_new = true;
+ }
+
+ // When inserting text starting with a newline, avoid auto-indenting the
+ // previous line.
+ if new_text.starts_with('\n') {
+ range_of_insertion_to_indent.start += 1;
+ first_line_is_new = true;
+ }
+
+ // Avoid auto-indenting after the insertion.
+ if let AutoindentMode::Block {
+ original_indent_columns,
+ } = &mode
+ {
+ original_indent_column =
+ Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
+ indent_size_for_text(
+ new_text[range_of_insertion_to_indent.clone()].chars(),
+ )
+ .len
+ }));
+ if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
+ range_of_insertion_to_indent.end -= 1;
+ }
+ }
+
+ AutoindentRequestEntry {
+ first_line_is_new,
+ original_indent_column,
+ indent_size: before_edit.language_indent_size_at(range.start, cx),
+ range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
+ ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
+ }
+ })
+ .collect();
+
+ self.autoindent_requests.push(Arc::new(AutoindentRequest {
+ before_edit,
+ entries,
+ is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
+ }));
+ }
+
+ self.end_transaction(cx);
+ self.send_operation(Operation::Buffer(edit_operation), cx);
+ Some(edit_id)
+ }
+
+ fn did_edit(
+ &mut self,
+ old_version: &clock::Global,
+ was_dirty: bool,
+ cx: &mut ModelContext<Self>,
+ ) {
+ if self.edits_since::<usize>(old_version).next().is_none() {
+ return;
+ }
+
+ self.reparse(cx);
+
+ cx.emit(Event::Edited);
+ if was_dirty != self.is_dirty() {
+ cx.emit(Event::DirtyChanged);
+ }
+ cx.notify();
+ }
+
+ pub fn apply_ops<I: IntoIterator<Item = Operation>>(
+ &mut self,
+ ops: I,
+ cx: &mut ModelContext<Self>,
+ ) -> Result<()> {
+ self.pending_autoindent.take();
+ let was_dirty = self.is_dirty();
+ let old_version = self.version.clone();
+ let mut deferred_ops = Vec::new();
+ let buffer_ops = ops
+ .into_iter()
+ .filter_map(|op| match op {
+ Operation::Buffer(op) => Some(op),
+ _ => {
+ if self.can_apply_op(&op) {
+ self.apply_op(op, cx);
+ } else {
+ deferred_ops.push(op);
+ }
+ None
+ }
+ })
+ .collect::<Vec<_>>();
+ self.text.apply_ops(buffer_ops)?;
+ self.deferred_ops.insert(deferred_ops);
+ self.flush_deferred_ops(cx);
+ self.did_edit(&old_version, was_dirty, cx);
+ // Notify independently of whether the buffer was edited as the operations could include a
+ // selection update.
+ cx.notify();
+ Ok(())
+ }
+
+ fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
+ let mut deferred_ops = Vec::new();
+ for op in self.deferred_ops.drain().iter().cloned() {
+ if self.can_apply_op(&op) {
+ self.apply_op(op, cx);
+ } else {
+ deferred_ops.push(op);
+ }
+ }
+ self.deferred_ops.insert(deferred_ops);
+ }
+
+ fn can_apply_op(&self, operation: &Operation) -> bool {
+ match operation {
+ Operation::Buffer(_) => {
+ unreachable!("buffer operations should never be applied at this layer")
+ }
+ Operation::UpdateDiagnostics {
+ diagnostics: diagnostic_set,
+ ..
+ } => diagnostic_set.iter().all(|diagnostic| {
+ self.text.can_resolve(&diagnostic.range.start)
+ && self.text.can_resolve(&diagnostic.range.end)
+ }),
+ Operation::UpdateSelections { selections, .. } => selections
+ .iter()
+ .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
+ Operation::UpdateCompletionTriggers { .. } => true,
+ }
+ }
+
+ fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
+ match operation {
+ Operation::Buffer(_) => {
+ unreachable!("buffer operations should never be applied at this layer")
+ }
+ Operation::UpdateDiagnostics {
+ server_id,
+ diagnostics: diagnostic_set,
+ lamport_timestamp,
+ } => {
+ let snapshot = self.snapshot();
+ self.apply_diagnostic_update(
+ server_id,
+ DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
+ lamport_timestamp,
+ cx,
+ );
+ }
+ Operation::UpdateSelections {
+ selections,
+ lamport_timestamp,
+ line_mode,
+ cursor_shape,
+ } => {
+ if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
+ if set.lamport_timestamp > lamport_timestamp {
+ return;
+ }
+ }
+
+ self.remote_selections.insert(
+ lamport_timestamp.replica_id,
+ SelectionSet {
+ selections,
+ lamport_timestamp,
+ line_mode,
+ cursor_shape,
+ },
+ );
+ self.text.lamport_clock.observe(lamport_timestamp);
+ self.selections_update_count += 1;
+ }
+ Operation::UpdateCompletionTriggers {
+ triggers,
+ lamport_timestamp,
+ } => {
+ self.completion_triggers = triggers;
+ self.text.lamport_clock.observe(lamport_timestamp);
+ }
+ }
+ }
+
+ fn apply_diagnostic_update(
+ &mut self,
+ server_id: LanguageServerId,
+ diagnostics: DiagnosticSet,
+ lamport_timestamp: clock::Lamport,
+ cx: &mut ModelContext<Self>,
+ ) {
+ if lamport_timestamp > self.diagnostics_timestamp {
+ let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
+ if diagnostics.len() == 0 {
+ if let Ok(ix) = ix {
+ self.diagnostics.remove(ix);
+ }
+ } else {
+ match ix {
+ Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
+ Ok(ix) => self.diagnostics[ix].1 = diagnostics,
+ };
+ }
+ self.diagnostics_timestamp = lamport_timestamp;
+ self.diagnostics_update_count += 1;
+ self.text.lamport_clock.observe(lamport_timestamp);
+ cx.notify();
+ cx.emit(Event::DiagnosticsUpdated);
+ }
+ }
+
+ fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
+ cx.emit(Event::Operation(operation));
+ }
+
+ pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
+ self.remote_selections.remove(&replica_id);
+ cx.notify();
+ }
+
+ pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
+ let was_dirty = self.is_dirty();
+ let old_version = self.version.clone();
+
+ if let Some((transaction_id, operation)) = self.text.undo() {
+ self.send_operation(Operation::Buffer(operation), cx);
+ self.did_edit(&old_version, was_dirty, cx);
+ Some(transaction_id)
+ } else {
+ None
+ }
+ }
+
+ pub fn undo_transaction(
+ &mut self,
+ transaction_id: TransactionId,
+ cx: &mut ModelContext<Self>,
+ ) -> bool {
+ let was_dirty = self.is_dirty();
+ let old_version = self.version.clone();
+ if let Some(operation) = self.text.undo_transaction(transaction_id) {
+ self.send_operation(Operation::Buffer(operation), cx);
+ self.did_edit(&old_version, was_dirty, cx);
+ true
+ } else {
+ false
+ }
+ }
+
+ pub fn undo_to_transaction(
+ &mut self,
+ transaction_id: TransactionId,
+ cx: &mut ModelContext<Self>,
+ ) -> bool {
+ let was_dirty = self.is_dirty();
+ let old_version = self.version.clone();
+
+ let operations = self.text.undo_to_transaction(transaction_id);
+ let undone = !operations.is_empty();
+ for operation in operations {
+ self.send_operation(Operation::Buffer(operation), cx);
+ }
+ if undone {
+ self.did_edit(&old_version, was_dirty, cx)
+ }
+ undone
+ }
+
+ pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
+ let was_dirty = self.is_dirty();
+ let old_version = self.version.clone();
+
+ if let Some((transaction_id, operation)) = self.text.redo() {
+ self.send_operation(Operation::Buffer(operation), cx);
+ self.did_edit(&old_version, was_dirty, cx);
+ Some(transaction_id)
+ } else {
+ None
+ }
+ }
+
+ pub fn redo_to_transaction(
+ &mut self,
+ transaction_id: TransactionId,
+ cx: &mut ModelContext<Self>,
+ ) -> bool {
+ let was_dirty = self.is_dirty();
+ let old_version = self.version.clone();
+
+ let operations = self.text.redo_to_transaction(transaction_id);
+ let redone = !operations.is_empty();
+ for operation in operations {
+ self.send_operation(Operation::Buffer(operation), cx);
+ }
+ if redone {
+ self.did_edit(&old_version, was_dirty, cx)
+ }
+ redone
+ }
+
+ pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
+ self.completion_triggers = triggers.clone();
+ self.completion_triggers_timestamp = self.text.lamport_clock.tick();
+ self.send_operation(
+ Operation::UpdateCompletionTriggers {
+ triggers,
+ lamport_timestamp: self.completion_triggers_timestamp,
+ },
+ cx,
+ );
+ cx.notify();
+ }
+
+ pub fn completion_triggers(&self) -> &[String] {
+ &self.completion_triggers
+ }
+}
+
+#[cfg(any(test, feature = "test-support"))]
+impl Buffer {
+ pub fn edit_via_marked_text(
+ &mut self,
+ marked_string: &str,
+ autoindent_mode: Option<AutoindentMode>,
+ cx: &mut ModelContext<Self>,
+ ) {
+ let edits = self.edits_for_marked_text(marked_string);
+ self.edit(edits, autoindent_mode, cx);
+ }
+
+ pub fn set_group_interval(&mut self, group_interval: Duration) {
+ self.text.set_group_interval(group_interval);
+ }
+
+ pub fn randomly_edit<T>(
+ &mut self,
+ rng: &mut T,
+ old_range_count: usize,
+ cx: &mut ModelContext<Self>,
+ ) where
+ T: rand::Rng,
+ {
+ let mut edits: Vec<(Range<usize>, String)> = Vec::new();
+ let mut last_end = None;
+ for _ in 0..old_range_count {
+ if last_end.map_or(false, |last_end| last_end >= self.len()) {
+ break;
+ }
+
+ let new_start = last_end.map_or(0, |last_end| last_end + 1);
+ let mut range = self.random_byte_range(new_start, rng);
+ if rng.gen_bool(0.2) {
+ mem::swap(&mut range.start, &mut range.end);
+ }
+ last_end = Some(range.end);
+
+ let new_text_len = rng.gen_range(0..10);
+ let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
+
+ edits.push((range, new_text));
+ }
+ log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
+ self.edit(edits, None, cx);
+ }
+
+ pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
+ let was_dirty = self.is_dirty();
+ let old_version = self.version.clone();
+
+ let ops = self.text.randomly_undo_redo(rng);
+ if !ops.is_empty() {
+ for op in ops {
+ self.send_operation(Operation::Buffer(op), cx);
+ self.did_edit(&old_version, was_dirty, cx);
+ }
+ }
+ }
+}
+
+impl EventEmitter for Buffer {
+ type Event = Event;
+}
+
+impl Deref for Buffer {
+ type Target = TextBuffer;
+
+ fn deref(&self) -> &Self::Target {
+ &self.text
+ }
+}
+
+impl BufferSnapshot {
+ pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
+ indent_size_for_line(self, row)
+ }
+
+ pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
+ let settings = language_settings(self.language_at(position), self.file(), cx);
+ if settings.hard_tabs {
+ IndentSize::tab()
+ } else {
+ IndentSize::spaces(settings.tab_size.get())
+ }
+ }
+
+ pub fn suggested_indents(
+ &self,
+ rows: impl Iterator<Item = u32>,
+ single_indent_size: IndentSize,
+ ) -> BTreeMap<u32, IndentSize> {
+ let mut result = BTreeMap::new();
+
+ for row_range in contiguous_ranges(rows, 10) {
+ let suggestions = match self.suggest_autoindents(row_range.clone()) {
+ Some(suggestions) => suggestions,
+ _ => break,
+ };
+
+ for (row, suggestion) in row_range.zip(suggestions) {
+ let indent_size = if let Some(suggestion) = suggestion {
+ result
+ .get(&suggestion.basis_row)
+ .copied()
+ .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
+ .with_delta(suggestion.delta, single_indent_size)
+ } else {
+ self.indent_size_for_line(row)
+ };
+
+ result.insert(row, indent_size);
+ }
+ }
+
+ result
+ }
+
+ fn suggest_autoindents(
+ &self,
+ row_range: Range<u32>,
+ ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
+ let config = &self.language.as_ref()?.config;
+ let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
+
+ // Find the suggested indentation ranges based on the syntax tree.
+ let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
+ let end = Point::new(row_range.end, 0);
+ let range = (start..end).to_offset(&self.text);
+ let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
+ Some(&grammar.indents_config.as_ref()?.query)
+ });
+ let indent_configs = matches
+ .grammars()
+ .iter()
+ .map(|grammar| grammar.indents_config.as_ref().unwrap())
+ .collect::<Vec<_>>();
+
+ let mut indent_ranges = Vec::<Range<Point>>::new();
+ let mut outdent_positions = Vec::<Point>::new();
+ while let Some(mat) = matches.peek() {
+ let mut start: Option<Point> = None;
+ let mut end: Option<Point> = None;
+
+ let config = &indent_configs[mat.grammar_index];
+ for capture in mat.captures {
+ if capture.index == config.indent_capture_ix {
+ start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
+ end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
+ } else if Some(capture.index) == config.start_capture_ix {
+ start = Some(Point::from_ts_point(capture.node.end_position()));
+ } else if Some(capture.index) == config.end_capture_ix {
+ end = Some(Point::from_ts_point(capture.node.start_position()));
+ } else if Some(capture.index) == config.outdent_capture_ix {
+ outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
+ }
+ }
+
+ matches.advance();
+ if let Some((start, end)) = start.zip(end) {
+ if start.row == end.row {
+ continue;
+ }
+
+ let range = start..end;
+ match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
+ Err(ix) => indent_ranges.insert(ix, range),
+ Ok(ix) => {
+ let prev_range = &mut indent_ranges[ix];
+ prev_range.end = prev_range.end.max(range.end);
+ }
+ }
+ }
+ }
+
+ let mut error_ranges = Vec::<Range<Point>>::new();
+ let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
+ Some(&grammar.error_query)
+ });
+ while let Some(mat) = matches.peek() {
+ let node = mat.captures[0].node;
+ let start = Point::from_ts_point(node.start_position());
+ let end = Point::from_ts_point(node.end_position());
+ let range = start..end;
+ let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
+ Ok(ix) | Err(ix) => ix,
+ };
+ let mut end_ix = ix;
+ while let Some(existing_range) = error_ranges.get(end_ix) {
+ if existing_range.end < end {
+ end_ix += 1;
+ } else {
+ break;
+ }
+ }
+ error_ranges.splice(ix..end_ix, [range]);
+ matches.advance();
+ }
+
+ outdent_positions.sort();
+ for outdent_position in outdent_positions {
+ // find the innermost indent range containing this outdent_position
+ // set its end to the outdent position
+ if let Some(range_to_truncate) = indent_ranges
+ .iter_mut()
+ .filter(|indent_range| indent_range.contains(&outdent_position))
+ .last()
+ {
+ range_to_truncate.end = outdent_position;
+ }
+ }
+
+ // Find the suggested indentation increases and decreased based on regexes.
+ let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
+ self.for_each_line(
+ Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
+ ..Point::new(row_range.end, 0),
+ |row, line| {
+ if config
+ .decrease_indent_pattern
+ .as_ref()
+ .map_or(false, |regex| regex.is_match(line))
+ {
+ indent_change_rows.push((row, Ordering::Less));
+ }
+ if config
+ .increase_indent_pattern
+ .as_ref()
+ .map_or(false, |regex| regex.is_match(line))
+ {
+ indent_change_rows.push((row + 1, Ordering::Greater));
+ }
+ },
+ );
+
+ let mut indent_changes = indent_change_rows.into_iter().peekable();
+ let mut prev_row = if config.auto_indent_using_last_non_empty_line {
+ prev_non_blank_row.unwrap_or(0)
+ } else {
+ row_range.start.saturating_sub(1)
+ };
+ let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
+ Some(row_range.map(move |row| {
+ let row_start = Point::new(row, self.indent_size_for_line(row).len);
+
+ let mut indent_from_prev_row = false;
+ let mut outdent_from_prev_row = false;
+ let mut outdent_to_row = u32::MAX;
+
+ while let Some((indent_row, delta)) = indent_changes.peek() {
+ match indent_row.cmp(&row) {
+ Ordering::Equal => match delta {
+ Ordering::Less => outdent_from_prev_row = true,
+ Ordering::Greater => indent_from_prev_row = true,
+ _ => {}
+ },
+
+ Ordering::Greater => break,
+ Ordering::Less => {}
+ }
+
+ indent_changes.next();
+ }
+
+ for range in &indent_ranges {
+ if range.start.row >= row {
+ break;
+ }
+ if range.start.row == prev_row && range.end > row_start {
+ indent_from_prev_row = true;
+ }
+ if range.end > prev_row_start && range.end <= row_start {
+ outdent_to_row = outdent_to_row.min(range.start.row);
+ }
+ }
+
+ let within_error = error_ranges
+ .iter()
+ .any(|e| e.start.row < row && e.end > row_start);
+
+ let suggestion = if outdent_to_row == prev_row
+ || (outdent_from_prev_row && indent_from_prev_row)
+ {
+ Some(IndentSuggestion {
+ basis_row: prev_row,
+ delta: Ordering::Equal,
+ within_error,
+ })
+ } else if indent_from_prev_row {
+ Some(IndentSuggestion {
+ basis_row: prev_row,
+ delta: Ordering::Greater,
+ within_error,
+ })
+ } else if outdent_to_row < prev_row {
+ Some(IndentSuggestion {
+ basis_row: outdent_to_row,
+ delta: Ordering::Equal,
+ within_error,
+ })
+ } else if outdent_from_prev_row {
+ Some(IndentSuggestion {
+ basis_row: prev_row,
+ delta: Ordering::Less,
+ within_error,
+ })
+ } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
+ {
+ Some(IndentSuggestion {
+ basis_row: prev_row,
+ delta: Ordering::Equal,
+ within_error,
+ })
+ } else {
+ None
+ };
+
+ prev_row = row;
+ prev_row_start = row_start;
+ suggestion
+ }))
+ }
+
+ fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
+ while row > 0 {
+ row -= 1;
+ if !self.is_line_blank(row) {
+ return Some(row);
+ }
+ }
+ None
+ }
+
+ pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
+ let range = range.start.to_offset(self)..range.end.to_offset(self);
+
+ let mut syntax = None;
+ let mut diagnostic_endpoints = Vec::new();
+ if language_aware {
+ let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
+ grammar.highlights_query.as_ref()
+ });
+ let highlight_maps = captures
+ .grammars()
+ .into_iter()
+ .map(|grammar| grammar.highlight_map())
+ .collect();
+ syntax = Some((captures, highlight_maps));
+ for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
+ diagnostic_endpoints.push(DiagnosticEndpoint {
+ offset: entry.range.start,
+ is_start: true,
+ severity: entry.diagnostic.severity,
+ is_unnecessary: entry.diagnostic.is_unnecessary,
+ });
+ diagnostic_endpoints.push(DiagnosticEndpoint {
+ offset: entry.range.end,
+ is_start: false,
+ severity: entry.diagnostic.severity,
+ is_unnecessary: entry.diagnostic.is_unnecessary,
+ });
+ }
+ diagnostic_endpoints
+ .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
+ }
+
+ BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
+ }
+
+ pub fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
+ let mut line = String::new();
+ let mut row = range.start.row;
+ for chunk in self
+ .as_rope()
+ .chunks_in_range(range.to_offset(self))
+ .chain(["\n"])
+ {
+ for (newline_ix, text) in chunk.split('\n').enumerate() {
+ if newline_ix > 0 {
+ callback(row, &line);
+ row += 1;
+ line.clear();
+ }
+ line.push_str(text);
+ }
+ }
+ }
+
+ pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayerInfo> + '_ {
+ self.syntax.layers_for_range(0..self.len(), &self.text)
+ }
+
+ pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayerInfo> {
+ let offset = position.to_offset(self);
+ self.syntax
+ .layers_for_range(offset..offset, &self.text)
+ .filter(|l| l.node().end_byte() > offset)
+ .last()
+ }
+
+ pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
+ self.syntax_layer_at(position)
+ .map(|info| info.language)
+ .or(self.language.as_ref())
+ }
+
+ pub fn settings_at<'a, D: ToOffset>(
+ &self,
+ position: D,
+ cx: &'a AppContext,
+ ) -> &'a LanguageSettings {
+ language_settings(self.language_at(position), self.file.as_ref(), cx)
+ }
+
+ pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
+ let offset = position.to_offset(self);
+ let mut scope = None;
+ let mut smallest_range: Option<Range<usize>> = None;
+
+ // Use the layer that has the smallest node intersecting the given point.
+ for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
+ let mut cursor = layer.node().walk();
+
+ let mut range = None;
+ loop {
+ let child_range = cursor.node().byte_range();
+ if !child_range.to_inclusive().contains(&offset) {
+ break;
+ }
+
+ range = Some(child_range);
+ if cursor.goto_first_child_for_byte(offset).is_none() {
+ break;
+ }
+ }
+
+ if let Some(range) = range {
+ if smallest_range
+ .as_ref()
+ .map_or(true, |smallest_range| range.len() < smallest_range.len())
+ {
+ smallest_range = Some(range);
+ scope = Some(LanguageScope {
+ language: layer.language.clone(),
+ override_id: layer.override_id(offset, &self.text),
+ });
+ }
+ }
+ }
+
+ scope.or_else(|| {
+ self.language.clone().map(|language| LanguageScope {
+ language,
+ override_id: None,
+ })
+ })
+ }
+
+ pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
+ let mut start = start.to_offset(self);
+ let mut end = start;
+ let mut next_chars = self.chars_at(start).peekable();
+ let mut prev_chars = self.reversed_chars_at(start).peekable();
+
+ let scope = self.language_scope_at(start);
+ let kind = |c| char_kind(&scope, c);
+ let word_kind = cmp::max(
+ prev_chars.peek().copied().map(kind),
+ next_chars.peek().copied().map(kind),
+ );
+
+ for ch in prev_chars {
+ if Some(kind(ch)) == word_kind && ch != '\n' {
+ start -= ch.len_utf8();
+ } else {
+ break;
+ }
+ }
+
+ for ch in next_chars {
+ if Some(kind(ch)) == word_kind && ch != '\n' {
+ end += ch.len_utf8();
+ } else {
+ break;
+ }
+ }
+
+ (start..end, word_kind)
+ }
+
+ pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
+ let range = range.start.to_offset(self)..range.end.to_offset(self);
+ let mut result: Option<Range<usize>> = None;
+ 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
+ let mut cursor = layer.node().walk();
+
+ // Descend to the first leaf that touches the start of the range,
+ // and if the range is non-empty, extends beyond the start.
+ while cursor.goto_first_child_for_byte(range.start).is_some() {
+ if !range.is_empty() && cursor.node().end_byte() == range.start {
+ cursor.goto_next_sibling();
+ }
+ }
+
+ // Ascend to the smallest ancestor that strictly contains the range.
+ loop {
+ let node_range = cursor.node().byte_range();
+ if node_range.start <= range.start
+ && node_range.end >= range.end
+ && node_range.len() > range.len()
+ {
+ break;
+ }
+ if !cursor.goto_parent() {
+ continue 'outer;
+ }
+ }
+
+ let left_node = cursor.node();
+ let mut layer_result = left_node.byte_range();
+
+ // For an empty range, try to find another node immediately to the right of the range.
+ if left_node.end_byte() == range.start {
+ let mut right_node = None;
+ while !cursor.goto_next_sibling() {
+ if !cursor.goto_parent() {
+ break;
+ }
+ }
+
+ while cursor.node().start_byte() == range.start {
+ right_node = Some(cursor.node());
+ if !cursor.goto_first_child() {
+ break;
+ }
+ }
+
+ // If there is a candidate node on both sides of the (empty) range, then
+ // decide between the two by favoring a named node over an anonymous token.
+ // If both nodes are the same in that regard, favor the right one.
+ if let Some(right_node) = right_node {
+ if right_node.is_named() || !left_node.is_named() {
+ layer_result = right_node.byte_range();
+ }
+ }
+ }
+
+ if let Some(previous_result) = &result {
+ if previous_result.len() < layer_result.len() {
+ continue;
+ }
+ }
+ result = Some(layer_result);
+ }
+
+ result
+ }
+
+ pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
+ self.outline_items_containing(0..self.len(), true, theme)
+ .map(Outline::new)
+ }
+
+ pub fn symbols_containing<T: ToOffset>(
+ &self,
+ position: T,
+ theme: Option<&SyntaxTheme>,
+ ) -> Option<Vec<OutlineItem<Anchor>>> {
+ let position = position.to_offset(self);
+ let mut items = self.outline_items_containing(
+ position.saturating_sub(1)..self.len().min(position + 1),
+ false,
+ theme,
+ )?;
+ let mut prev_depth = None;
+ items.retain(|item| {
+ let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
+ prev_depth = Some(item.depth);
+ result
+ });
+ Some(items)
+ }
+
+ fn outline_items_containing(
+ &self,
+ range: Range<usize>,
+ include_extra_context: bool,
+ theme: Option<&SyntaxTheme>,
+ ) -> Option<Vec<OutlineItem<Anchor>>> {
+ let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
+ grammar.outline_config.as_ref().map(|c| &c.query)
+ });
+ let configs = matches
+ .grammars()
+ .iter()
+ .map(|g| g.outline_config.as_ref().unwrap())
+ .collect::<Vec<_>>();
+
+ let mut stack = Vec::<Range<usize>>::new();
+ let mut items = Vec::new();
+ while let Some(mat) = matches.peek() {
+ let config = &configs[mat.grammar_index];
+ let item_node = mat.captures.iter().find_map(|cap| {
+ if cap.index == config.item_capture_ix {
+ Some(cap.node)
+ } else {
+ None
+ }
+ })?;
+
+ let item_range = item_node.byte_range();
+ if item_range.end < range.start || item_range.start > range.end {
+ matches.advance();
+ continue;
+ }
+
+ let mut buffer_ranges = Vec::new();
+ for capture in mat.captures {
+ let node_is_name;
+ if capture.index == config.name_capture_ix {
+ node_is_name = true;
+ } else if Some(capture.index) == config.context_capture_ix
+ || (Some(capture.index) == config.extra_context_capture_ix
+ && include_extra_context)
+ {
+ node_is_name = false;
+ } else {
+ continue;
+ }
+
+ let mut range = capture.node.start_byte()..capture.node.end_byte();
+ let start = capture.node.start_position();
+ if capture.node.end_position().row > start.row {
+ range.end =
+ range.start + self.line_len(start.row as u32) as usize - start.column;
+ }
+
+ buffer_ranges.push((range, node_is_name));
+ }
+
+ if buffer_ranges.is_empty() {
+ continue;
+ }
+
+ let mut text = String::new();
+ let mut highlight_ranges = Vec::new();
+ let mut name_ranges = Vec::new();
+ let mut chunks = self.chunks(
+ buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
+ true,
+ );
+ let mut last_buffer_range_end = 0;
+ for (buffer_range, is_name) in buffer_ranges {
+ if !text.is_empty() && buffer_range.start > last_buffer_range_end {
+ text.push(' ');
+ }
+ last_buffer_range_end = buffer_range.end;
+ if is_name {
+ let mut start = text.len();
+ let end = start + buffer_range.len();
+
+ // When multiple names are captured, then the matcheable text
+ // includes the whitespace in between the names.
+ if !name_ranges.is_empty() {
+ start -= 1;
+ }
+
+ name_ranges.push(start..end);
+ }
+
+ let mut offset = buffer_range.start;
+ chunks.seek(offset);
+ for mut chunk in chunks.by_ref() {
+ if chunk.text.len() > buffer_range.end - offset {
+ chunk.text = &chunk.text[0..(buffer_range.end - offset)];
+ offset = buffer_range.end;
+ } else {
+ offset += chunk.text.len();
+ }
+ let style = chunk
+ .syntax_highlight_id
+ .zip(theme)
+ .and_then(|(highlight, theme)| highlight.style(theme));
+ if let Some(style) = style {
+ let start = text.len();
+ let end = start + chunk.text.len();
+ highlight_ranges.push((start..end, style));
+ }
+ text.push_str(chunk.text);
+ if offset >= buffer_range.end {
+ break;
+ }
+ }
+ }
+
+ matches.advance();
+ while stack.last().map_or(false, |prev_range| {
+ prev_range.start > item_range.start || prev_range.end < item_range.end
+ }) {
+ stack.pop();
+ }
+ stack.push(item_range.clone());
+
+ items.push(OutlineItem {
+ depth: stack.len() - 1,
+ range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
+ text,
+ highlight_ranges,
+ name_ranges,
+ })
+ }
+ Some(items)
+ }
+
+ pub fn matches(
+ &self,
+ range: Range<usize>,
+ query: fn(&Grammar) -> Option<&tree_sitter::Query>,
+ ) -> SyntaxMapMatches {
+ self.syntax.matches(range, self, query)
+ }
+
+ /// Returns bracket range pairs overlapping or adjacent to `range`
+ pub fn bracket_ranges<'a, T: ToOffset>(
+ &'a self,
+ range: Range<T>,
+ ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + 'a {
+ // Find bracket pairs that *inclusively* contain the given range.
+ let range = range.start.to_offset(self).saturating_sub(1)
+ ..self.len().min(range.end.to_offset(self) + 1);
+
+ let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
+ grammar.brackets_config.as_ref().map(|c| &c.query)
+ });
+ let configs = matches
+ .grammars()
+ .iter()
+ .map(|grammar| grammar.brackets_config.as_ref().unwrap())
+ .collect::<Vec<_>>();
+
+ iter::from_fn(move || {
+ while let Some(mat) = matches.peek() {
+ let mut open = None;
+ let mut close = None;
+ let config = &configs[mat.grammar_index];
+ for capture in mat.captures {
+ if capture.index == config.open_capture_ix {
+ open = Some(capture.node.byte_range());
+ } else if capture.index == config.close_capture_ix {
+ close = Some(capture.node.byte_range());
+ }
+ }
+
+ matches.advance();
+
+ let Some((open, close)) = open.zip(close) else {
+ continue;
+ };
+
+ let bracket_range = open.start..=close.end;
+ if !bracket_range.overlaps(&range) {
+ continue;
+ }
+
+ return Some((open, close));
+ }
+ None
+ })
+ }
+
+ #[allow(clippy::type_complexity)]
+ pub fn remote_selections_in_range(
+ &self,
+ range: Range<Anchor>,
+ ) -> impl Iterator<
+ Item = (
+ ReplicaId,
+ bool,
+ CursorShape,
+ impl Iterator<Item = &Selection<Anchor>> + '_,
+ ),
+ > + '_ {
+ self.remote_selections
+ .iter()
+ .filter(|(replica_id, set)| {
+ **replica_id != self.text.replica_id() && !set.selections.is_empty()
+ })
+ .map(move |(replica_id, set)| {
+ let start_ix = match set.selections.binary_search_by(|probe| {
+ probe.end.cmp(&range.start, self).then(Ordering::Greater)
+ }) {
+ Ok(ix) | Err(ix) => ix,
+ };
+ let end_ix = match set.selections.binary_search_by(|probe| {
+ probe.start.cmp(&range.end, self).then(Ordering::Less)
+ }) {
+ Ok(ix) | Err(ix) => ix,
+ };
+
+ (
+ *replica_id,
+ set.line_mode,
+ set.cursor_shape,
+ set.selections[start_ix..end_ix].iter(),
+ )
+ })
+ }
+
+ pub fn git_diff_hunks_in_row_range<'a>(
+ &'a self,
+ range: Range<u32>,
+ ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
+ self.git_diff.hunks_in_row_range(range, self)
+ }
+
+ pub fn git_diff_hunks_intersecting_range<'a>(
+ &'a self,
+ range: Range<Anchor>,
+ ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
+ self.git_diff.hunks_intersecting_range(range, self)
+ }
+
+ pub fn git_diff_hunks_intersecting_range_rev<'a>(
+ &'a self,
+ range: Range<Anchor>,
+ ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
+ self.git_diff.hunks_intersecting_range_rev(range, self)
+ }
+
+ pub fn diagnostics_in_range<'a, T, O>(
+ &'a self,
+ search_range: Range<T>,
+ reversed: bool,
+ ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
+ where
+ T: 'a + Clone + ToOffset,
+ O: 'a + FromAnchor + Ord,
+ {
+ let mut iterators: Vec<_> = self
+ .diagnostics
+ .iter()
+ .map(|(_, collection)| {
+ collection
+ .range::<T, O>(search_range.clone(), self, true, reversed)
+ .peekable()
+ })
+ .collect();
+
+ std::iter::from_fn(move || {
+ let (next_ix, _) = iterators
+ .iter_mut()
+ .enumerate()
+ .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
+ .min_by(|(_, a), (_, b)| a.range.start.cmp(&b.range.start))?;
+ iterators[next_ix].next()
+ })
+ }
+
+ pub fn diagnostic_groups(
+ &self,
+ language_server_id: Option<LanguageServerId>,
+ ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
+ let mut groups = Vec::new();
+
+ if let Some(language_server_id) = language_server_id {
+ if let Ok(ix) = self
+ .diagnostics
+ .binary_search_by_key(&language_server_id, |e| e.0)
+ {
+ self.diagnostics[ix]
+ .1
+ .groups(language_server_id, &mut groups, self);
+ }
+ } else {
+ for (language_server_id, diagnostics) in self.diagnostics.iter() {
+ diagnostics.groups(*language_server_id, &mut groups, self);
+ }
+ }
+
+ groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
+ let a_start = &group_a.entries[group_a.primary_ix].range.start;
+ let b_start = &group_b.entries[group_b.primary_ix].range.start;
+ a_start.cmp(b_start, self).then_with(|| id_a.cmp(&id_b))
+ });
+
+ groups
+ }
+
+ pub fn diagnostic_group<'a, O>(
+ &'a self,
+ group_id: usize,
+ ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
+ where
+ O: 'a + FromAnchor,
+ {
+ self.diagnostics
+ .iter()
+ .flat_map(move |(_, set)| set.group(group_id, self))
+ }
+
+ pub fn diagnostics_update_count(&self) -> usize {
+ self.diagnostics_update_count
+ }
+
+ pub fn parse_count(&self) -> usize {
+ self.parse_count
+ }
+
+ pub fn selections_update_count(&self) -> usize {
+ self.selections_update_count
+ }
+
+ pub fn file(&self) -> Option<&Arc<dyn File>> {
+ self.file.as_ref()
+ }
+
+ pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
+ if let Some(file) = self.file() {
+ if file.path().file_name().is_none() || include_root {
+ Some(file.full_path(cx))
+ } else {
+ Some(file.path().to_path_buf())
+ }
+ } else {
+ None
+ }
+ }
+
+ pub fn file_update_count(&self) -> usize {
+ self.file_update_count
+ }
+
+ pub fn git_diff_update_count(&self) -> usize {
+ self.git_diff_update_count
+ }
+}
+
+fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
+ indent_size_for_text(text.chars_at(Point::new(row, 0)))
+}
+
+pub fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
+ let mut result = IndentSize::spaces(0);
+ for c in text {
+ let kind = match c {
+ ' ' => IndentKind::Space,
+ '\t' => IndentKind::Tab,
+ _ => break,
+ };
+ if result.len == 0 {
+ result.kind = kind;
+ }
+ result.len += 1;
+ }
+ result
+}
+
+impl Clone for BufferSnapshot {
+ fn clone(&self) -> Self {
+ Self {
+ text: self.text.clone(),
+ git_diff: self.git_diff.clone(),
+ syntax: self.syntax.clone(),
+ file: self.file.clone(),
+ remote_selections: self.remote_selections.clone(),
+ diagnostics: self.diagnostics.clone(),
+ selections_update_count: self.selections_update_count,
+ diagnostics_update_count: self.diagnostics_update_count,
+ file_update_count: self.file_update_count,
+ git_diff_update_count: self.git_diff_update_count,
+ language: self.language.clone(),
+ parse_count: self.parse_count,
+ }
+ }
+}
+
+impl Deref for BufferSnapshot {
+ type Target = text::BufferSnapshot;
+
+ fn deref(&self) -> &Self::Target {
+ &self.text
+ }
+}
+
+unsafe impl<'a> Send for BufferChunks<'a> {}
+
+impl<'a> BufferChunks<'a> {
+ pub(crate) fn new(
+ text: &'a Rope,
+ range: Range<usize>,
+ syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
+ diagnostic_endpoints: Vec<DiagnosticEndpoint>,
+ ) -> Self {
+ let mut highlights = None;
+ if let Some((captures, highlight_maps)) = syntax {
+ highlights = Some(BufferChunkHighlights {
+ captures,
+ next_capture: None,
+ stack: Default::default(),
+ highlight_maps,
+ })
+ }
+
+ let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
+ let chunks = text.chunks_in_range(range.clone());
+
+ BufferChunks {
+ range,
+ chunks,
+ diagnostic_endpoints,
+ error_depth: 0,
+ warning_depth: 0,
+ information_depth: 0,
+ hint_depth: 0,
+ unnecessary_depth: 0,
+ highlights,
+ }
+ }
+
+ pub fn seek(&mut self, offset: usize) {
+ self.range.start = offset;
+ self.chunks.seek(self.range.start);
+ if let Some(highlights) = self.highlights.as_mut() {
+ highlights
+ .stack
+ .retain(|(end_offset, _)| *end_offset > offset);
+ if let Some(capture) = &highlights.next_capture {
+ if offset >= capture.node.start_byte() {
+ let next_capture_end = capture.node.end_byte();
+ if offset < next_capture_end {
+ highlights.stack.push((
+ next_capture_end,
+ highlights.highlight_maps[capture.grammar_index].get(capture.index),
+ ));
+ }
+ highlights.next_capture.take();
+ }
+ }
+ highlights.captures.set_byte_range(self.range.clone());
+ }
+ }
+
+ pub fn offset(&self) -> usize {
+ self.range.start
+ }
+
+ fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
+ let depth = match endpoint.severity {
+ DiagnosticSeverity::ERROR => &mut self.error_depth,
+ DiagnosticSeverity::WARNING => &mut self.warning_depth,
+ DiagnosticSeverity::INFORMATION => &mut self.information_depth,
+ DiagnosticSeverity::HINT => &mut self.hint_depth,
+ _ => return,
+ };
+ if endpoint.is_start {
+ *depth += 1;
+ } else {
+ *depth -= 1;
+ }
+
+ if endpoint.is_unnecessary {
+ if endpoint.is_start {
+ self.unnecessary_depth += 1;
+ } else {
+ self.unnecessary_depth -= 1;
+ }
+ }
+ }
+
+ fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
+ if self.error_depth > 0 {
+ Some(DiagnosticSeverity::ERROR)
+ } else if self.warning_depth > 0 {
+ Some(DiagnosticSeverity::WARNING)
+ } else if self.information_depth > 0 {
+ Some(DiagnosticSeverity::INFORMATION)
+ } else if self.hint_depth > 0 {
+ Some(DiagnosticSeverity::HINT)
+ } else {
+ None
+ }
+ }
+
+ fn current_code_is_unnecessary(&self) -> bool {
+ self.unnecessary_depth > 0
+ }
+}
+
+impl<'a> Iterator for BufferChunks<'a> {
+ type Item = Chunk<'a>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let mut next_capture_start = usize::MAX;
+ let mut next_diagnostic_endpoint = usize::MAX;
+
+ if let Some(highlights) = self.highlights.as_mut() {
+ while let Some((parent_capture_end, _)) = highlights.stack.last() {
+ if *parent_capture_end <= self.range.start {
+ highlights.stack.pop();
+ } else {
+ break;
+ }
+ }
+
+ if highlights.next_capture.is_none() {
+ highlights.next_capture = highlights.captures.next();
+ }
+
+ while let Some(capture) = highlights.next_capture.as_ref() {
+ if self.range.start < capture.node.start_byte() {
+ next_capture_start = capture.node.start_byte();
+ break;
+ } else {
+ let highlight_id =
+ highlights.highlight_maps[capture.grammar_index].get(capture.index);
+ highlights
+ .stack
+ .push((capture.node.end_byte(), highlight_id));
+ highlights.next_capture = highlights.captures.next();
+ }
+ }
+ }
+
+ while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
+ if endpoint.offset <= self.range.start {
+ self.update_diagnostic_depths(endpoint);
+ self.diagnostic_endpoints.next();
+ } else {
+ next_diagnostic_endpoint = endpoint.offset;
+ break;
+ }
+ }
+
+ if let Some(chunk) = self.chunks.peek() {
+ let chunk_start = self.range.start;
+ let mut chunk_end = (self.chunks.offset() + chunk.len())
+ .min(next_capture_start)
+ .min(next_diagnostic_endpoint);
+ let mut highlight_id = None;
+ if let Some(highlights) = self.highlights.as_ref() {
+ if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
+ chunk_end = chunk_end.min(*parent_capture_end);
+ highlight_id = Some(*parent_highlight_id);
+ }
+ }
+
+ let slice =
+ &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
+ self.range.start = chunk_end;
+ if self.range.start == self.chunks.offset() + chunk.len() {
+ self.chunks.next().unwrap();
+ }
+
+ Some(Chunk {
+ text: slice,
+ syntax_highlight_id: highlight_id,
+ diagnostic_severity: self.current_diagnostic_severity(),
+ is_unnecessary: self.current_code_is_unnecessary(),
+ ..Default::default()
+ })
+ } else {
+ None
+ }
+ }
+}
+
+impl operation_queue::Operation for Operation {
+ fn lamport_timestamp(&self) -> clock::Lamport {
+ match self {
+ Operation::Buffer(_) => {
+ unreachable!("buffer operations should never be deferred at this layer")
+ }
+ Operation::UpdateDiagnostics {
+ lamport_timestamp, ..
+ }
+ | Operation::UpdateSelections {
+ lamport_timestamp, ..
+ }
+ | Operation::UpdateCompletionTriggers {
+ lamport_timestamp, ..
+ } => *lamport_timestamp,
+ }
+ }
+}
+
+impl Default for Diagnostic {
+ fn default() -> Self {
+ Self {
+ source: Default::default(),
+ code: None,
+ severity: DiagnosticSeverity::ERROR,
+ message: Default::default(),
+ group_id: 0,
+ is_primary: false,
+ is_valid: true,
+ is_disk_based: false,
+ is_unnecessary: false,
+ }
+ }
+}
+
+impl IndentSize {
+ pub fn spaces(len: u32) -> Self {
+ Self {
+ len,
+ kind: IndentKind::Space,
+ }
+ }
+
+ pub fn tab() -> Self {
+ Self {
+ len: 1,
+ kind: IndentKind::Tab,
+ }
+ }
+
+ pub fn chars(&self) -> impl Iterator<Item = char> {
+ iter::repeat(self.char()).take(self.len as usize)
+ }
+
+ pub fn char(&self) -> char {
+ match self.kind {
+ IndentKind::Space => ' ',
+ IndentKind::Tab => '\t',
+ }
+ }
+
+ pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
+ match direction {
+ Ordering::Less => {
+ if self.kind == size.kind && self.len >= size.len {
+ self.len -= size.len;
+ }
+ }
+ Ordering::Equal => {}
+ Ordering::Greater => {
+ if self.len == 0 {
+ self = size;
+ } else if self.kind == size.kind {
+ self.len += size.len;
+ }
+ }
+ }
+ self
+ }
+}
+
+impl Completion {
+ pub fn sort_key(&self) -> (usize, &str) {
+ let kind_key = match self.lsp_completion.kind {
+ Some(lsp::CompletionItemKind::VARIABLE) => 0,
+ _ => 1,
+ };
+ (kind_key, &self.label.text[self.label.filter_range.clone()])
+ }
+
+ pub fn is_snippet(&self) -> bool {
+ self.lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
+ }
+}
+
+pub fn contiguous_ranges(
+ values: impl Iterator<Item = u32>,
+ max_len: usize,
+) -> impl Iterator<Item = Range<u32>> {
+ let mut values = values;
+ let mut current_range: Option<Range<u32>> = None;
+ std::iter::from_fn(move || loop {
+ if let Some(value) = values.next() {
+ if let Some(range) = &mut current_range {
+ if value == range.end && range.len() < max_len {
+ range.end += 1;
+ continue;
+ }
+ }
+
+ let prev_range = current_range.clone();
+ current_range = Some(value..(value + 1));
+ if prev_range.is_some() {
+ return prev_range;
+ }
+ } else {
+ return current_range.take();
+ }
+ })
+}
+
+pub fn char_kind(scope: &Option<LanguageScope>, c: char) -> CharKind {
+ if c.is_whitespace() {
+ return CharKind::Whitespace;
+ } else if c.is_alphanumeric() || c == '_' {
+ return CharKind::Word;
+ }
+
+ if let Some(scope) = scope {
+ if let Some(characters) = scope.word_characters() {
+ if characters.contains(&c) {
+ return CharKind::Word;
+ }
+ }
+ }
+
+ CharKind::Punctuation
+}
+
+/// Find all of the ranges of whitespace that occur at the ends of lines
+/// in the given rope.
+///
+/// This could also be done with a regex search, but this implementation
+/// avoids copying text.
+pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
+ let mut ranges = Vec::new();
+
+ let mut offset = 0;
+ let mut prev_chunk_trailing_whitespace_range = 0..0;
+ for chunk in rope.chunks() {
+ let mut prev_line_trailing_whitespace_range = 0..0;
+ for (i, line) in chunk.split('\n').enumerate() {
+ let line_end_offset = offset + line.len();
+ let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
+ let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
+
+ if i == 0 && trimmed_line_len == 0 {
+ trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
+ }
+ if !prev_line_trailing_whitespace_range.is_empty() {
+ ranges.push(prev_line_trailing_whitespace_range);
+ }
+
+ offset = line_end_offset + 1;
+ prev_line_trailing_whitespace_range = trailing_whitespace_range;
+ }
+
+ offset -= 1;
+ prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
+ }
+
+ if !prev_chunk_trailing_whitespace_range.is_empty() {
+ ranges.push(prev_chunk_trailing_whitespace_range);
+ }
+
+ ranges
+}
@@ -0,0 +1,2442 @@
+use crate::language_settings::{
+ AllLanguageSettings, AllLanguageSettingsContent, LanguageSettingsContent,
+};
+
+use super::*;
+use clock::ReplicaId;
+use collections::BTreeMap;
+use gpui::{AppContext, ModelHandle};
+use indoc::indoc;
+use proto::deserialize_operation;
+use rand::prelude::*;
+use regex::RegexBuilder;
+use settings::SettingsStore;
+use std::{
+ cell::RefCell,
+ env,
+ ops::Range,
+ rc::Rc,
+ time::{Duration, Instant},
+};
+use text::network::Network;
+use text::LineEnding;
+use unindent::Unindent as _;
+use util::{assert_set_eq, post_inc, test::marked_text_ranges, RandomCharIter};
+
+lazy_static! {
+ static ref TRAILING_WHITESPACE_REGEX: Regex = RegexBuilder::new("[ \t]+$")
+ .multi_line(true)
+ .build()
+ .unwrap();
+}
+
+#[cfg(test)]
+#[ctor::ctor]
+fn init_logger() {
+ if std::env::var("RUST_LOG").is_ok() {
+ env_logger::init();
+ }
+}
+
+#[gpui::test]
+fn test_line_endings(cx: &mut gpui::AppContext) {
+ init_settings(cx, |_| {});
+
+ cx.add_model(|cx| {
+ let mut buffer = Buffer::new(0, cx.model_id() as u64, "one\r\ntwo\rthree")
+ .with_language(Arc::new(rust_lang()), cx);
+ assert_eq!(buffer.text(), "one\ntwo\nthree");
+ assert_eq!(buffer.line_ending(), LineEnding::Windows);
+
+ buffer.check_invariants();
+ buffer.edit(
+ [(buffer.len()..buffer.len(), "\r\nfour")],
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ buffer.edit([(0..0, "zero\r\n")], None, cx);
+ assert_eq!(buffer.text(), "zero\none\ntwo\nthree\nfour");
+ assert_eq!(buffer.line_ending(), LineEnding::Windows);
+ buffer.check_invariants();
+
+ buffer
+ });
+}
+
+#[gpui::test]
+fn test_select_language() {
+ let registry = Arc::new(LanguageRegistry::test());
+ registry.add(Arc::new(Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ path_suffixes: vec!["rs".to_string()],
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ )));
+ registry.add(Arc::new(Language::new(
+ LanguageConfig {
+ name: "Make".into(),
+ path_suffixes: vec!["Makefile".to_string(), "mk".to_string()],
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ )));
+
+ // matching file extension
+ assert_eq!(
+ registry
+ .language_for_file("zed/lib.rs", None)
+ .now_or_never()
+ .and_then(|l| Some(l.ok()?.name())),
+ Some("Rust".into())
+ );
+ assert_eq!(
+ registry
+ .language_for_file("zed/lib.mk", None)
+ .now_or_never()
+ .and_then(|l| Some(l.ok()?.name())),
+ Some("Make".into())
+ );
+
+ // matching filename
+ assert_eq!(
+ registry
+ .language_for_file("zed/Makefile", None)
+ .now_or_never()
+ .and_then(|l| Some(l.ok()?.name())),
+ Some("Make".into())
+ );
+
+ // matching suffix that is not the full file extension or filename
+ assert_eq!(
+ registry
+ .language_for_file("zed/cars", None)
+ .now_or_never()
+ .and_then(|l| Some(l.ok()?.name())),
+ None
+ );
+ assert_eq!(
+ registry
+ .language_for_file("zed/a.cars", None)
+ .now_or_never()
+ .and_then(|l| Some(l.ok()?.name())),
+ None
+ );
+ assert_eq!(
+ registry
+ .language_for_file("zed/sumk", None)
+ .now_or_never()
+ .and_then(|l| Some(l.ok()?.name())),
+ None
+ );
+}
+
+#[gpui::test]
+fn test_edit_events(cx: &mut gpui::AppContext) {
+ let mut now = Instant::now();
+ let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
+ let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
+
+ let buffer1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abcdef"));
+ let buffer2 = cx.add_model(|cx| Buffer::new(1, cx.model_id() as u64, "abcdef"));
+ let buffer1_ops = Rc::new(RefCell::new(Vec::new()));
+ buffer1.update(cx, {
+ let buffer1_ops = buffer1_ops.clone();
+ |buffer, cx| {
+ let buffer_1_events = buffer_1_events.clone();
+ cx.subscribe(&buffer1, move |_, _, event, _| match event.clone() {
+ Event::Operation(op) => buffer1_ops.borrow_mut().push(op),
+ event => buffer_1_events.borrow_mut().push(event),
+ })
+ .detach();
+ let buffer_2_events = buffer_2_events.clone();
+ cx.subscribe(&buffer2, move |_, _, event, _| {
+ buffer_2_events.borrow_mut().push(event.clone())
+ })
+ .detach();
+
+ // An edit emits an edited event, followed by a dirty changed event,
+ // since the buffer was previously in a clean state.
+ buffer.edit([(2..4, "XYZ")], None, cx);
+
+ // An empty transaction does not emit any events.
+ buffer.start_transaction();
+ buffer.end_transaction(cx);
+
+ // A transaction containing two edits emits one edited event.
+ now += Duration::from_secs(1);
+ buffer.start_transaction_at(now);
+ buffer.edit([(5..5, "u")], None, cx);
+ buffer.edit([(6..6, "w")], None, cx);
+ buffer.end_transaction_at(now, cx);
+
+ // Undoing a transaction emits one edited event.
+ buffer.undo(cx);
+ }
+ });
+
+ // Incorporating a set of remote ops emits a single edited event,
+ // followed by a dirty changed event.
+ buffer2.update(cx, |buffer, cx| {
+ buffer
+ .apply_ops(buffer1_ops.borrow_mut().drain(..), cx)
+ .unwrap();
+ });
+ assert_eq!(
+ mem::take(&mut *buffer_1_events.borrow_mut()),
+ vec![
+ Event::Edited,
+ Event::DirtyChanged,
+ Event::Edited,
+ Event::Edited,
+ ]
+ );
+ assert_eq!(
+ mem::take(&mut *buffer_2_events.borrow_mut()),
+ vec![Event::Edited, Event::DirtyChanged]
+ );
+
+ buffer1.update(cx, |buffer, cx| {
+ // Undoing the first transaction emits edited event, followed by a
+ // dirty changed event, since the buffer is again in a clean state.
+ buffer.undo(cx);
+ });
+ // Incorporating the remote ops again emits a single edited event,
+ // followed by a dirty changed event.
+ buffer2.update(cx, |buffer, cx| {
+ buffer
+ .apply_ops(buffer1_ops.borrow_mut().drain(..), cx)
+ .unwrap();
+ });
+ assert_eq!(
+ mem::take(&mut *buffer_1_events.borrow_mut()),
+ vec![Event::Edited, Event::DirtyChanged,]
+ );
+ assert_eq!(
+ mem::take(&mut *buffer_2_events.borrow_mut()),
+ vec![Event::Edited, Event::DirtyChanged]
+ );
+}
+
+#[gpui::test]
+async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
+ let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
+ let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
+ let anchor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(3, 3)));
+
+ let text = "a\nccc\ndddd\nffffff\n";
+ let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await;
+ buffer.update(cx, |buffer, cx| {
+ buffer.apply_diff(diff, cx).unwrap();
+ assert_eq!(buffer.text(), text);
+ assert_eq!(anchor.to_point(buffer), Point::new(2, 3));
+ });
+
+ let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
+ let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await;
+ buffer.update(cx, |buffer, cx| {
+ buffer.apply_diff(diff, cx).unwrap();
+ assert_eq!(buffer.text(), text);
+ assert_eq!(anchor.to_point(buffer), Point::new(4, 4));
+ });
+}
+
+#[gpui::test(iterations = 10)]
+async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
+ let text = [
+ "zero", //
+ "one ", // 2 trailing spaces
+ "two", //
+ "three ", // 3 trailing spaces
+ "four", //
+ "five ", // 4 trailing spaces
+ ]
+ .join("\n");
+
+ let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
+
+ // Spawn a task to format the buffer's whitespace.
+ // Pause so that the foratting task starts running.
+ let format = buffer.read_with(cx, |buffer, cx| buffer.remove_trailing_whitespace(cx));
+ smol::future::yield_now().await;
+
+ // Edit the buffer while the normalization task is running.
+ let version_before_edit = buffer.read_with(cx, |buffer, _| buffer.version());
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit(
+ [
+ (Point::new(0, 1)..Point::new(0, 1), "EE"),
+ (Point::new(3, 5)..Point::new(3, 5), "EEE"),
+ ],
+ None,
+ cx,
+ );
+ });
+
+ let format_diff = format.await;
+ buffer.update(cx, |buffer, cx| {
+ let version_before_format = format_diff.base_version.clone();
+ buffer.apply_diff(format_diff, cx);
+
+ // The outcome depends on the order of concurrent taks.
+ //
+ // If the edit occurred while searching for trailing whitespace ranges,
+ // then the trailing whitespace region touched by the edit is left intact.
+ if version_before_format == version_before_edit {
+ assert_eq!(
+ buffer.text(),
+ [
+ "zEEero", //
+ "one", //
+ "two", //
+ "threeEEE ", //
+ "four", //
+ "five", //
+ ]
+ .join("\n")
+ );
+ }
+ // Otherwise, all trailing whitespace is removed.
+ else {
+ assert_eq!(
+ buffer.text(),
+ [
+ "zEEero", //
+ "one", //
+ "two", //
+ "threeEEE", //
+ "four", //
+ "five", //
+ ]
+ .join("\n")
+ );
+ }
+ });
+}
+
+#[gpui::test]
+async fn test_reparse(cx: &mut gpui::TestAppContext) {
+ let text = "fn a() {}";
+ let buffer = cx.add_model(|cx| {
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
+ });
+
+ // Wait for the initial text to parse
+ buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
+ assert_eq!(
+ get_tree_sexp(&buffer, cx),
+ concat!(
+ "(source_file (function_item name: (identifier) ",
+ "parameters: (parameters) ",
+ "body: (block)))"
+ )
+ );
+
+ buffer.update(cx, |buffer, _| {
+ buffer.set_sync_parse_timeout(Duration::ZERO)
+ });
+
+ // Perform some edits (add parameter and variable reference)
+ // Parsing doesn't begin until the transaction is complete
+ buffer.update(cx, |buf, cx| {
+ buf.start_transaction();
+
+ let offset = buf.text().find(')').unwrap();
+ buf.edit([(offset..offset, "b: C")], None, cx);
+ assert!(!buf.is_parsing());
+
+ let offset = buf.text().find('}').unwrap();
+ buf.edit([(offset..offset, " d; ")], None, cx);
+ assert!(!buf.is_parsing());
+
+ buf.end_transaction(cx);
+ assert_eq!(buf.text(), "fn a(b: C) { d; }");
+ assert!(buf.is_parsing());
+ });
+ buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
+ assert_eq!(
+ get_tree_sexp(&buffer, cx),
+ concat!(
+ "(source_file (function_item name: (identifier) ",
+ "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
+ "body: (block (expression_statement (identifier)))))"
+ )
+ );
+
+ // Perform a series of edits without waiting for the current parse to complete:
+ // * turn identifier into a field expression
+ // * turn field expression into a method call
+ // * add a turbofish to the method call
+ buffer.update(cx, |buf, cx| {
+ let offset = buf.text().find(';').unwrap();
+ buf.edit([(offset..offset, ".e")], None, cx);
+ assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
+ assert!(buf.is_parsing());
+ });
+ buffer.update(cx, |buf, cx| {
+ let offset = buf.text().find(';').unwrap();
+ buf.edit([(offset..offset, "(f)")], None, cx);
+ assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
+ assert!(buf.is_parsing());
+ });
+ buffer.update(cx, |buf, cx| {
+ let offset = buf.text().find("(f)").unwrap();
+ buf.edit([(offset..offset, "::<G>")], None, cx);
+ assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
+ assert!(buf.is_parsing());
+ });
+ buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
+ assert_eq!(
+ get_tree_sexp(&buffer, cx),
+ concat!(
+ "(source_file (function_item name: (identifier) ",
+ "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
+ "body: (block (expression_statement (call_expression ",
+ "function: (generic_function ",
+ "function: (field_expression value: (identifier) field: (field_identifier)) ",
+ "type_arguments: (type_arguments (type_identifier))) ",
+ "arguments: (arguments (identifier)))))))",
+ )
+ );
+
+ buffer.update(cx, |buf, cx| {
+ buf.undo(cx);
+ buf.undo(cx);
+ buf.undo(cx);
+ buf.undo(cx);
+ assert_eq!(buf.text(), "fn a() {}");
+ assert!(buf.is_parsing());
+ });
+ buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
+ assert_eq!(
+ get_tree_sexp(&buffer, cx),
+ concat!(
+ "(source_file (function_item name: (identifier) ",
+ "parameters: (parameters) ",
+ "body: (block)))"
+ )
+ );
+
+ buffer.update(cx, |buf, cx| {
+ buf.redo(cx);
+ buf.redo(cx);
+ buf.redo(cx);
+ buf.redo(cx);
+ assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
+ assert!(buf.is_parsing());
+ });
+ buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
+ assert_eq!(
+ get_tree_sexp(&buffer, cx),
+ concat!(
+ "(source_file (function_item name: (identifier) ",
+ "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
+ "body: (block (expression_statement (call_expression ",
+ "function: (generic_function ",
+ "function: (field_expression value: (identifier) field: (field_identifier)) ",
+ "type_arguments: (type_arguments (type_identifier))) ",
+ "arguments: (arguments (identifier)))))))",
+ )
+ );
+}
+
+#[gpui::test]
+async fn test_resetting_language(cx: &mut gpui::TestAppContext) {
+ let buffer = cx.add_model(|cx| {
+ let mut buffer =
+ Buffer::new(0, cx.model_id() as u64, "{}").with_language(Arc::new(rust_lang()), cx);
+ buffer.set_sync_parse_timeout(Duration::ZERO);
+ buffer
+ });
+
+ // Wait for the initial text to parse
+ buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
+ assert_eq!(
+ get_tree_sexp(&buffer, cx),
+ "(source_file (expression_statement (block)))"
+ );
+
+ buffer.update(cx, |buffer, cx| {
+ buffer.set_language(Some(Arc::new(json_lang())), cx)
+ });
+ buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
+ assert_eq!(get_tree_sexp(&buffer, cx), "(document (object))");
+}
+
+#[gpui::test]
+async fn test_outline(cx: &mut gpui::TestAppContext) {
+ let text = r#"
+ struct Person {
+ name: String,
+ age: usize,
+ }
+
+ mod module {
+ enum LoginState {
+ LoggedOut,
+ LoggingOn,
+ LoggedIn {
+ person: Person,
+ time: Instant,
+ }
+ }
+ }
+
+ impl Eq for Person {}
+
+ impl Drop for Person {
+ fn drop(&mut self) {
+ println!("bye");
+ }
+ }
+ "#
+ .unindent();
+
+ let buffer = cx.add_model(|cx| {
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
+ });
+ let outline = buffer
+ .read_with(cx, |buffer, _| buffer.snapshot().outline(None))
+ .unwrap();
+
+ assert_eq!(
+ outline
+ .items
+ .iter()
+ .map(|item| (item.text.as_str(), item.depth))
+ .collect::<Vec<_>>(),
+ &[
+ ("struct Person", 0),
+ ("name", 1),
+ ("age", 1),
+ ("mod module", 0),
+ ("enum LoginState", 1),
+ ("LoggedOut", 2),
+ ("LoggingOn", 2),
+ ("LoggedIn", 2),
+ ("person", 3),
+ ("time", 3),
+ ("impl Eq for Person", 0),
+ ("impl Drop for Person", 0),
+ ("fn drop", 1),
+ ]
+ );
+
+ // Without space, we only match on names
+ assert_eq!(
+ search(&outline, "oon", cx).await,
+ &[
+ ("mod module", vec![]), // included as the parent of a match
+ ("enum LoginState", vec![]), // included as the parent of a match
+ ("LoggingOn", vec![1, 7, 8]), // matches
+ ("impl Drop for Person", vec![7, 18, 19]), // matches in two disjoint names
+ ]
+ );
+
+ assert_eq!(
+ search(&outline, "dp p", cx).await,
+ &[
+ ("impl Drop for Person", vec![5, 8, 9, 14]),
+ ("fn drop", vec![]),
+ ]
+ );
+ assert_eq!(
+ search(&outline, "dpn", cx).await,
+ &[("impl Drop for Person", vec![5, 14, 19])]
+ );
+ assert_eq!(
+ search(&outline, "impl ", cx).await,
+ &[
+ ("impl Eq for Person", vec![0, 1, 2, 3, 4]),
+ ("impl Drop for Person", vec![0, 1, 2, 3, 4]),
+ ("fn drop", vec![]),
+ ]
+ );
+
+ async fn search<'a>(
+ outline: &'a Outline<Anchor>,
+ query: &'a str,
+ cx: &'a gpui::TestAppContext,
+ ) -> Vec<(&'a str, Vec<usize>)> {
+ let matches = cx
+ .read(|cx| outline.search(query, cx.background().clone()))
+ .await;
+ matches
+ .into_iter()
+ .map(|mat| (outline.items[mat.candidate_id].text.as_str(), mat.positions))
+ .collect::<Vec<_>>()
+ }
+}
+
+#[gpui::test]
+async fn test_outline_nodes_with_newlines(cx: &mut gpui::TestAppContext) {
+ let text = r#"
+ impl A for B<
+ C
+ > {
+ };
+ "#
+ .unindent();
+
+ let buffer = cx.add_model(|cx| {
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
+ });
+ let outline = buffer
+ .read_with(cx, |buffer, _| buffer.snapshot().outline(None))
+ .unwrap();
+
+ assert_eq!(
+ outline
+ .items
+ .iter()
+ .map(|item| (item.text.as_str(), item.depth))
+ .collect::<Vec<_>>(),
+ &[("impl A for B<", 0)]
+ );
+}
+
+#[gpui::test]
+async fn test_outline_with_extra_context(cx: &mut gpui::TestAppContext) {
+ let language = javascript_lang()
+ .with_outline_query(
+ r#"
+ (function_declaration
+ "function" @context
+ name: (_) @name
+ parameters: (formal_parameters
+ "(" @context.extra
+ ")" @context.extra)) @item
+ "#,
+ )
+ .unwrap();
+
+ let text = r#"
+ function a() {}
+ function b(c) {}
+ "#
+ .unindent();
+
+ let buffer = cx.add_model(|cx| {
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(language), cx)
+ });
+ let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
+
+ // extra context nodes are included in the outline.
+ let outline = snapshot.outline(None).unwrap();
+ assert_eq!(
+ outline
+ .items
+ .iter()
+ .map(|item| (item.text.as_str(), item.depth))
+ .collect::<Vec<_>>(),
+ &[("function a()", 0), ("function b( )", 0),]
+ );
+
+ // extra context nodes do not appear in breadcrumbs.
+ let symbols = snapshot.symbols_containing(3, None).unwrap();
+ assert_eq!(
+ symbols
+ .iter()
+ .map(|item| (item.text.as_str(), item.depth))
+ .collect::<Vec<_>>(),
+ &[("function a", 0)]
+ );
+}
+
+#[gpui::test]
+async fn test_symbols_containing(cx: &mut gpui::TestAppContext) {
+ let text = r#"
+ impl Person {
+ fn one() {
+ 1
+ }
+
+ fn two() {
+ 2
+ }fn three() {
+ 3
+ }
+ }
+ "#
+ .unindent();
+
+ let buffer = cx.add_model(|cx| {
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
+ });
+ let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
+
+ // point is at the start of an item
+ assert_eq!(
+ symbols_containing(Point::new(1, 4), &snapshot),
+ vec![
+ (
+ "impl Person".to_string(),
+ Point::new(0, 0)..Point::new(10, 1)
+ ),
+ ("fn one".to_string(), Point::new(1, 4)..Point::new(3, 5))
+ ]
+ );
+
+ // point is in the middle of an item
+ assert_eq!(
+ symbols_containing(Point::new(2, 8), &snapshot),
+ vec![
+ (
+ "impl Person".to_string(),
+ Point::new(0, 0)..Point::new(10, 1)
+ ),
+ ("fn one".to_string(), Point::new(1, 4)..Point::new(3, 5))
+ ]
+ );
+
+ // point is at the end of an item
+ assert_eq!(
+ symbols_containing(Point::new(3, 5), &snapshot),
+ vec![
+ (
+ "impl Person".to_string(),
+ Point::new(0, 0)..Point::new(10, 1)
+ ),
+ ("fn one".to_string(), Point::new(1, 4)..Point::new(3, 5))
+ ]
+ );
+
+ // point is in between two adjacent items
+ assert_eq!(
+ symbols_containing(Point::new(7, 5), &snapshot),
+ vec![
+ (
+ "impl Person".to_string(),
+ Point::new(0, 0)..Point::new(10, 1)
+ ),
+ ("fn two".to_string(), Point::new(5, 4)..Point::new(7, 5))
+ ]
+ );
+
+ fn symbols_containing(
+ position: Point,
+ snapshot: &BufferSnapshot,
+ ) -> Vec<(String, Range<Point>)> {
+ snapshot
+ .symbols_containing(position, None)
+ .unwrap()
+ .into_iter()
+ .map(|item| {
+ (
+ item.text,
+ item.range.start.to_point(snapshot)..item.range.end.to_point(snapshot),
+ )
+ })
+ .collect()
+ }
+}
+
+#[gpui::test]
+fn test_enclosing_bracket_ranges(cx: &mut AppContext) {
+ let mut assert = |selection_text, range_markers| {
+ assert_bracket_pairs(selection_text, range_markers, rust_lang(), cx)
+ };
+
+ assert(
+ indoc! {"
+ mod x {
+ moˇd y {
+
+ }
+ }
+ let foo = 1;"},
+ vec![indoc! {"
+ mod x «{»
+ mod y {
+
+ }
+ «}»
+ let foo = 1;"}],
+ );
+
+ assert(
+ indoc! {"
+ mod x {
+ mod y ˇ{
+
+ }
+ }
+ let foo = 1;"},
+ vec![
+ indoc! {"
+ mod x «{»
+ mod y {
+
+ }
+ «}»
+ let foo = 1;"},
+ indoc! {"
+ mod x {
+ mod y «{»
+
+ «}»
+ }
+ let foo = 1;"},
+ ],
+ );
+
+ assert(
+ indoc! {"
+ mod x {
+ mod y {
+
+ }ˇ
+ }
+ let foo = 1;"},
+ vec![
+ indoc! {"
+ mod x «{»
+ mod y {
+
+ }
+ «}»
+ let foo = 1;"},
+ indoc! {"
+ mod x {
+ mod y «{»
+
+ «}»
+ }
+ let foo = 1;"},
+ ],
+ );
+
+ assert(
+ indoc! {"
+ mod x {
+ mod y {
+
+ }
+ ˇ}
+ let foo = 1;"},
+ vec![indoc! {"
+ mod x «{»
+ mod y {
+
+ }
+ «}»
+ let foo = 1;"}],
+ );
+
+ assert(
+ indoc! {"
+ mod x {
+ mod y {
+
+ }
+ }
+ let fˇoo = 1;"},
+ vec![],
+ );
+
+ // Regression test: avoid crash when querying at the end of the buffer.
+ assert(
+ indoc! {"
+ mod x {
+ mod y {
+
+ }
+ }
+ let foo = 1;ˇ"},
+ vec![],
+ );
+}
+
+#[gpui::test]
+fn test_enclosing_bracket_ranges_where_brackets_are_not_outermost_children(cx: &mut AppContext) {
+ let mut assert = |selection_text, bracket_pair_texts| {
+ assert_bracket_pairs(selection_text, bracket_pair_texts, javascript_lang(), cx)
+ };
+
+ assert(
+ indoc! {"
+ for (const a in b)ˇ {
+ // a comment that's longer than the for-loop header
+ }"},
+ vec![indoc! {"
+ for «(»const a in b«)» {
+ // a comment that's longer than the for-loop header
+ }"}],
+ );
+
+ // Regression test: even though the parent node of the parentheses (the for loop) does
+ // intersect the given range, the parentheses themselves do not contain the range, so
+ // they should not be returned. Only the curly braces contain the range.
+ assert(
+ indoc! {"
+ for (const a in b) {ˇ
+ // a comment that's longer than the for-loop header
+ }"},
+ vec![indoc! {"
+ for (const a in b) «{»
+ // a comment that's longer than the for-loop header
+ «}»"}],
+ );
+}
+
+#[gpui::test]
+fn test_range_for_syntax_ancestor(cx: &mut AppContext) {
+ cx.add_model(|cx| {
+ let text = "fn a() { b(|c| {}) }";
+ let buffer =
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
+ let snapshot = buffer.snapshot();
+
+ assert_eq!(
+ snapshot.range_for_syntax_ancestor(empty_range_at(text, "|")),
+ Some(range_of(text, "|"))
+ );
+ assert_eq!(
+ snapshot.range_for_syntax_ancestor(range_of(text, "|")),
+ Some(range_of(text, "|c|"))
+ );
+ assert_eq!(
+ snapshot.range_for_syntax_ancestor(range_of(text, "|c|")),
+ Some(range_of(text, "|c| {}"))
+ );
+ assert_eq!(
+ snapshot.range_for_syntax_ancestor(range_of(text, "|c| {}")),
+ Some(range_of(text, "(|c| {})"))
+ );
+
+ buffer
+ });
+
+ fn empty_range_at(text: &str, part: &str) -> Range<usize> {
+ let start = text.find(part).unwrap();
+ start..start
+ }
+
+ fn range_of(text: &str, part: &str) -> Range<usize> {
+ let start = text.find(part).unwrap();
+ start..start + part.len()
+ }
+}
+
+#[gpui::test]
+fn test_autoindent_with_soft_tabs(cx: &mut AppContext) {
+ init_settings(cx, |_| {});
+
+ cx.add_model(|cx| {
+ let text = "fn a() {}";
+ let mut buffer =
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
+
+ buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
+ assert_eq!(buffer.text(), "fn a() {\n \n}");
+
+ buffer.edit(
+ [(Point::new(1, 4)..Point::new(1, 4), "b()\n")],
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(buffer.text(), "fn a() {\n b()\n \n}");
+
+ // Create a field expression on a new line, causing that line
+ // to be indented.
+ buffer.edit(
+ [(Point::new(2, 4)..Point::new(2, 4), ".c")],
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}");
+
+ // Remove the dot so that the line is no longer a field expression,
+ // causing the line to be outdented.
+ buffer.edit(
+ [(Point::new(2, 8)..Point::new(2, 9), "")],
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(buffer.text(), "fn a() {\n b()\n c\n}");
+
+ buffer
+ });
+}
+
+#[gpui::test]
+fn test_autoindent_with_hard_tabs(cx: &mut AppContext) {
+ init_settings(cx, |settings| {
+ settings.defaults.hard_tabs = Some(true);
+ });
+
+ cx.add_model(|cx| {
+ let text = "fn a() {}";
+ let mut buffer =
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
+
+ buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
+ assert_eq!(buffer.text(), "fn a() {\n\t\n}");
+
+ buffer.edit(
+ [(Point::new(1, 1)..Point::new(1, 1), "b()\n")],
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(buffer.text(), "fn a() {\n\tb()\n\t\n}");
+
+ // Create a field expression on a new line, causing that line
+ // to be indented.
+ buffer.edit(
+ [(Point::new(2, 1)..Point::new(2, 1), ".c")],
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(buffer.text(), "fn a() {\n\tb()\n\t\t.c\n}");
+
+ // Remove the dot so that the line is no longer a field expression,
+ // causing the line to be outdented.
+ buffer.edit(
+ [(Point::new(2, 2)..Point::new(2, 3), "")],
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(buffer.text(), "fn a() {\n\tb()\n\tc\n}");
+
+ buffer
+ });
+}
+
+#[gpui::test]
+fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppContext) {
+ init_settings(cx, |_| {});
+
+ cx.add_model(|cx| {
+ let mut buffer = Buffer::new(
+ 0,
+ cx.model_id() as u64,
+ "
+ fn a() {
+ c;
+ d;
+ }
+ "
+ .unindent(),
+ )
+ .with_language(Arc::new(rust_lang()), cx);
+
+ // Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
+ // their indentation is not adjusted.
+ buffer.edit_via_marked_text(
+ &"
+ fn a() {
+ c«()»;
+ d«()»;
+ }
+ "
+ .unindent(),
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(
+ buffer.text(),
+ "
+ fn a() {
+ c();
+ d();
+ }
+ "
+ .unindent()
+ );
+
+ // When appending new content after these lines, the indentation is based on the
+ // preceding lines' actual indentation.
+ buffer.edit_via_marked_text(
+ &"
+ fn a() {
+ c«
+ .f
+ .g()»;
+ d«
+ .f
+ .g()»;
+ }
+ "
+ .unindent(),
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+
+ assert_eq!(
+ buffer.text(),
+ "
+ fn a() {
+ c
+ .f
+ .g();
+ d
+ .f
+ .g();
+ }
+ "
+ .unindent()
+ );
+ buffer
+ });
+
+ cx.add_model(|cx| {
+ let mut buffer = Buffer::new(
+ 0,
+ cx.model_id() as u64,
+ "
+ fn a() {
+ b();
+ |
+ "
+ .replace("|", "") // marker to preserve trailing whitespace
+ .unindent(),
+ )
+ .with_language(Arc::new(rust_lang()), cx);
+
+ // Insert a closing brace. It is outdented.
+ buffer.edit_via_marked_text(
+ &"
+ fn a() {
+ b();
+ «}»
+ "
+ .unindent(),
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(
+ buffer.text(),
+ "
+ fn a() {
+ b();
+ }
+ "
+ .unindent()
+ );
+
+ // Manually edit the leading whitespace. The edit is preserved.
+ buffer.edit_via_marked_text(
+ &"
+ fn a() {
+ b();
+ « »}
+ "
+ .unindent(),
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(
+ buffer.text(),
+ "
+ fn a() {
+ b();
+ }
+ "
+ .unindent()
+ );
+ buffer
+ });
+}
+
+#[gpui::test]
+fn test_autoindent_does_not_adjust_lines_within_newly_created_errors(cx: &mut AppContext) {
+ init_settings(cx, |_| {});
+
+ cx.add_model(|cx| {
+ let mut buffer = Buffer::new(
+ 0,
+ cx.model_id() as u64,
+ "
+ fn a() {
+ i
+ }
+ "
+ .unindent(),
+ )
+ .with_language(Arc::new(rust_lang()), cx);
+
+ // Regression test: line does not get outdented due to syntax error
+ buffer.edit_via_marked_text(
+ &"
+ fn a() {
+ i«f let Some(x) = y»
+ }
+ "
+ .unindent(),
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(
+ buffer.text(),
+ "
+ fn a() {
+ if let Some(x) = y
+ }
+ "
+ .unindent()
+ );
+
+ buffer.edit_via_marked_text(
+ &"
+ fn a() {
+ if let Some(x) = y« {»
+ }
+ "
+ .unindent(),
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(
+ buffer.text(),
+ "
+ fn a() {
+ if let Some(x) = y {
+ }
+ "
+ .unindent()
+ );
+
+ buffer
+ });
+}
+
+#[gpui::test]
+fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut AppContext) {
+ init_settings(cx, |_| {});
+
+ cx.add_model(|cx| {
+ let mut buffer = Buffer::new(
+ 0,
+ cx.model_id() as u64,
+ "
+ fn a() {}
+ "
+ .unindent(),
+ )
+ .with_language(Arc::new(rust_lang()), cx);
+
+ buffer.edit_via_marked_text(
+ &"
+ fn a(«
+ b») {}
+ "
+ .unindent(),
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(
+ buffer.text(),
+ "
+ fn a(
+ b) {}
+ "
+ .unindent()
+ );
+
+ // The indentation suggestion changed because `@end` node (a close paren)
+ // is now at the beginning of the line.
+ buffer.edit_via_marked_text(
+ &"
+ fn a(
+ ˇ) {}
+ "
+ .unindent(),
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(
+ buffer.text(),
+ "
+ fn a(
+ ) {}
+ "
+ .unindent()
+ );
+
+ buffer
+ });
+}
+
+#[gpui::test]
+fn test_autoindent_with_edit_at_end_of_buffer(cx: &mut AppContext) {
+ init_settings(cx, |_| {});
+
+ cx.add_model(|cx| {
+ let text = "a\nb";
+ let mut buffer =
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
+ buffer.edit(
+ [(0..1, "\n"), (2..3, "\n")],
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(buffer.text(), "\n\n\n");
+ buffer
+ });
+}
+
+#[gpui::test]
+fn test_autoindent_multi_line_insertion(cx: &mut AppContext) {
+ init_settings(cx, |_| {});
+
+ cx.add_model(|cx| {
+ let text = "
+ const a: usize = 1;
+ fn b() {
+ if c {
+ let d = 2;
+ }
+ }
+ "
+ .unindent();
+
+ let mut buffer =
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
+ buffer.edit(
+ [(Point::new(3, 0)..Point::new(3, 0), "e(\n f()\n);\n")],
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(
+ buffer.text(),
+ "
+ const a: usize = 1;
+ fn b() {
+ if c {
+ e(
+ f()
+ );
+ let d = 2;
+ }
+ }
+ "
+ .unindent()
+ );
+
+ buffer
+ });
+}
+
+#[gpui::test]
+fn test_autoindent_block_mode(cx: &mut AppContext) {
+ init_settings(cx, |_| {});
+
+ cx.add_model(|cx| {
+ let text = r#"
+ fn a() {
+ b();
+ }
+ "#
+ .unindent();
+ let mut buffer =
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
+
+ // When this text was copied, both of the quotation marks were at the same
+ // indent level, but the indentation of the first line was not included in
+ // the copied text. This information is retained in the
+ // 'original_indent_columns' vector.
+ let original_indent_columns = vec![4];
+ let inserted_text = r#"
+ "
+ c
+ d
+ e
+ "
+ "#
+ .unindent();
+
+ // Insert the block at column zero. The entire block is indented
+ // so that the first line matches the previous line's indentation.
+ buffer.edit(
+ [(Point::new(2, 0)..Point::new(2, 0), inserted_text.clone())],
+ Some(AutoindentMode::Block {
+ original_indent_columns: original_indent_columns.clone(),
+ }),
+ cx,
+ );
+ assert_eq!(
+ buffer.text(),
+ r#"
+ fn a() {
+ b();
+ "
+ c
+ d
+ e
+ "
+ }
+ "#
+ .unindent()
+ );
+
+ // Grouping is disabled in tests, so we need 2 undos
+ buffer.undo(cx); // Undo the auto-indent
+ buffer.undo(cx); // Undo the original edit
+
+ // Insert the block at a deeper indent level. The entire block is outdented.
+ buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
+ buffer.edit(
+ [(Point::new(2, 8)..Point::new(2, 8), inserted_text)],
+ Some(AutoindentMode::Block {
+ original_indent_columns: original_indent_columns.clone(),
+ }),
+ cx,
+ );
+ assert_eq!(
+ buffer.text(),
+ r#"
+ fn a() {
+ b();
+ "
+ c
+ d
+ e
+ "
+ }
+ "#
+ .unindent()
+ );
+
+ buffer
+ });
+}
+
+#[gpui::test]
+fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut AppContext) {
+ init_settings(cx, |_| {});
+
+ cx.add_model(|cx| {
+ let text = r#"
+ fn a() {
+ if b() {
+
+ }
+ }
+ "#
+ .unindent();
+ let mut buffer =
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
+
+ // The original indent columns are not known, so this text is
+ // auto-indented in a block as if the first line was copied in
+ // its entirety.
+ let original_indent_columns = Vec::new();
+ let inserted_text = " c\n .d()\n .e();";
+
+ // Insert the block at column zero. The entire block is indented
+ // so that the first line matches the previous line's indentation.
+ buffer.edit(
+ [(Point::new(2, 0)..Point::new(2, 0), inserted_text)],
+ Some(AutoindentMode::Block {
+ original_indent_columns: original_indent_columns.clone(),
+ }),
+ cx,
+ );
+ assert_eq!(
+ buffer.text(),
+ r#"
+ fn a() {
+ if b() {
+ c
+ .d()
+ .e();
+ }
+ }
+ "#
+ .unindent()
+ );
+
+ // Grouping is disabled in tests, so we need 2 undos
+ buffer.undo(cx); // Undo the auto-indent
+ buffer.undo(cx); // Undo the original edit
+
+ // Insert the block at a deeper indent level. The entire block is outdented.
+ buffer.edit(
+ [(Point::new(2, 0)..Point::new(2, 0), " ".repeat(12))],
+ None,
+ cx,
+ );
+ buffer.edit(
+ [(Point::new(2, 12)..Point::new(2, 12), inserted_text)],
+ Some(AutoindentMode::Block {
+ original_indent_columns: Vec::new(),
+ }),
+ cx,
+ );
+ assert_eq!(
+ buffer.text(),
+ r#"
+ fn a() {
+ if b() {
+ c
+ .d()
+ .e();
+ }
+ }
+ "#
+ .unindent()
+ );
+
+ buffer
+ });
+}
+
+#[gpui::test]
+fn test_autoindent_language_without_indents_query(cx: &mut AppContext) {
+ init_settings(cx, |_| {});
+
+ cx.add_model(|cx| {
+ let text = "
+ * one
+ - a
+ - b
+ * two
+ "
+ .unindent();
+
+ let mut buffer = Buffer::new(0, cx.model_id() as u64, text).with_language(
+ Arc::new(Language::new(
+ LanguageConfig {
+ name: "Markdown".into(),
+ auto_indent_using_last_non_empty_line: false,
+ ..Default::default()
+ },
+ Some(tree_sitter_json::language()),
+ )),
+ cx,
+ );
+ buffer.edit(
+ [(Point::new(3, 0)..Point::new(3, 0), "\n")],
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(
+ buffer.text(),
+ "
+ * one
+ - a
+ - b
+
+ * two
+ "
+ .unindent()
+ );
+ buffer
+ });
+}
+
+#[gpui::test]
+fn test_autoindent_with_injected_languages(cx: &mut AppContext) {
+ init_settings(cx, |settings| {
+ settings.languages.extend([
+ (
+ "HTML".into(),
+ LanguageSettingsContent {
+ tab_size: Some(2.try_into().unwrap()),
+ ..Default::default()
+ },
+ ),
+ (
+ "JavaScript".into(),
+ LanguageSettingsContent {
+ tab_size: Some(8.try_into().unwrap()),
+ ..Default::default()
+ },
+ ),
+ ])
+ });
+
+ let html_language = Arc::new(html_lang());
+
+ let javascript_language = Arc::new(javascript_lang());
+
+ let language_registry = Arc::new(LanguageRegistry::test());
+ language_registry.add(html_language.clone());
+ language_registry.add(javascript_language.clone());
+
+ cx.add_model(|cx| {
+ let (text, ranges) = marked_text_ranges(
+ &"
+ <div>ˇ
+ </div>
+ <script>
+ init({ˇ
+ })
+ </script>
+ <span>ˇ
+ </span>
+ "
+ .unindent(),
+ false,
+ );
+
+ let mut buffer = Buffer::new(0, cx.model_id() as u64, text);
+ buffer.set_language_registry(language_registry);
+ buffer.set_language(Some(html_language), cx);
+ buffer.edit(
+ ranges.into_iter().map(|range| (range, "\na")),
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(
+ buffer.text(),
+ "
+ <div>
+ a
+ </div>
+ <script>
+ init({
+ a
+ })
+ </script>
+ <span>
+ a
+ </span>
+ "
+ .unindent()
+ );
+ buffer
+ });
+}
+
+#[gpui::test]
+fn test_autoindent_query_with_outdent_captures(cx: &mut AppContext) {
+ init_settings(cx, |settings| {
+ settings.defaults.tab_size = Some(2.try_into().unwrap());
+ });
+
+ cx.add_model(|cx| {
+ let mut buffer =
+ Buffer::new(0, cx.model_id() as u64, "").with_language(Arc::new(ruby_lang()), cx);
+
+ let text = r#"
+ class C
+ def a(b, c)
+ puts b
+ puts c
+ rescue
+ puts "errored"
+ exit 1
+ end
+ end
+ "#
+ .unindent();
+
+ buffer.edit([(0..0, text)], Some(AutoindentMode::EachLine), cx);
+
+ assert_eq!(
+ buffer.text(),
+ r#"
+ class C
+ def a(b, c)
+ puts b
+ puts c
+ rescue
+ puts "errored"
+ exit 1
+ end
+ end
+ "#
+ .unindent()
+ );
+
+ buffer
+ });
+}
+
+#[gpui::test]
+fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
+ init_settings(cx, |_| {});
+
+ cx.add_model(|cx| {
+ let language = Language::new(
+ LanguageConfig {
+ name: "JavaScript".into(),
+ line_comment: Some("// ".into()),
+ brackets: BracketPairConfig {
+ pairs: vec![
+ BracketPair {
+ start: "{".into(),
+ end: "}".into(),
+ close: true,
+ newline: false,
+ },
+ BracketPair {
+ start: "'".into(),
+ end: "'".into(),
+ close: true,
+ newline: false,
+ },
+ ],
+ disabled_scopes_by_bracket_ix: vec![
+ Vec::new(), //
+ vec!["string".into()],
+ ],
+ },
+ overrides: [(
+ "element".into(),
+ LanguageConfigOverride {
+ line_comment: Override::Remove { remove: true },
+ block_comment: Override::Set(("{/*".into(), "*/}".into())),
+ ..Default::default()
+ },
+ )]
+ .into_iter()
+ .collect(),
+ ..Default::default()
+ },
+ Some(tree_sitter_typescript::language_tsx()),
+ )
+ .with_override_query(
+ r#"
+ (jsx_element) @element
+ (string) @string
+ "#,
+ )
+ .unwrap();
+
+ let text = r#"a["b"] = <C d="e"></C>;"#;
+
+ let buffer =
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(language), cx);
+ let snapshot = buffer.snapshot();
+
+ let config = snapshot.language_scope_at(0).unwrap();
+ assert_eq!(config.line_comment_prefix().unwrap().as_ref(), "// ");
+ // Both bracket pairs are enabled
+ assert_eq!(
+ config.brackets().map(|e| e.1).collect::<Vec<_>>(),
+ &[true, true]
+ );
+
+ let string_config = snapshot.language_scope_at(3).unwrap();
+ assert_eq!(string_config.line_comment_prefix().unwrap().as_ref(), "// ");
+ // Second bracket pair is disabled
+ assert_eq!(
+ string_config.brackets().map(|e| e.1).collect::<Vec<_>>(),
+ &[true, false]
+ );
+
+ let element_config = snapshot.language_scope_at(10).unwrap();
+ assert_eq!(element_config.line_comment_prefix(), None);
+ assert_eq!(
+ element_config.block_comment_delimiters(),
+ Some((&"{/*".into(), &"*/}".into()))
+ );
+ // Both bracket pairs are enabled
+ assert_eq!(
+ element_config.brackets().map(|e| e.1).collect::<Vec<_>>(),
+ &[true, true]
+ );
+
+ buffer
+ });
+}
+
+#[gpui::test]
+fn test_language_scope_at_with_rust(cx: &mut AppContext) {
+ init_settings(cx, |_| {});
+
+ cx.add_model(|cx| {
+ let language = Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ brackets: BracketPairConfig {
+ pairs: vec![
+ BracketPair {
+ start: "{".into(),
+ end: "}".into(),
+ close: true,
+ newline: false,
+ },
+ BracketPair {
+ start: "'".into(),
+ end: "'".into(),
+ close: true,
+ newline: false,
+ },
+ ],
+ disabled_scopes_by_bracket_ix: vec![
+ Vec::new(), //
+ vec!["string".into()],
+ ],
+ },
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ )
+ .with_override_query(
+ r#"
+ (string_literal) @string
+ "#,
+ )
+ .unwrap();
+
+ let text = r#"
+ const S: &'static str = "hello";
+ "#
+ .unindent();
+
+ let buffer = Buffer::new(0, cx.model_id() as u64, text.clone())
+ .with_language(Arc::new(language), cx);
+ let snapshot = buffer.snapshot();
+
+ // By default, all brackets are enabled
+ let config = snapshot.language_scope_at(0).unwrap();
+ assert_eq!(
+ config.brackets().map(|e| e.1).collect::<Vec<_>>(),
+ &[true, true]
+ );
+
+ // Within a string, the quotation brackets are disabled.
+ let string_config = snapshot
+ .language_scope_at(text.find("ello").unwrap())
+ .unwrap();
+ assert_eq!(
+ string_config.brackets().map(|e| e.1).collect::<Vec<_>>(),
+ &[true, false]
+ );
+
+ buffer
+ });
+}
+
+#[gpui::test]
+fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
+ init_settings(cx, |_| {});
+
+ cx.add_model(|cx| {
+ let text = r#"
+ <ol>
+ <% people.each do |person| %>
+ <li>
+ <%= person.name %>
+ </li>
+ <% end %>
+ </ol>
+ "#
+ .unindent();
+
+ let language_registry = Arc::new(LanguageRegistry::test());
+ language_registry.add(Arc::new(ruby_lang()));
+ language_registry.add(Arc::new(html_lang()));
+ language_registry.add(Arc::new(erb_lang()));
+
+ let mut buffer = Buffer::new(0, cx.model_id() as u64, text);
+ buffer.set_language_registry(language_registry.clone());
+ buffer.set_language(
+ language_registry
+ .language_for_name("ERB")
+ .now_or_never()
+ .unwrap()
+ .ok(),
+ cx,
+ );
+
+ let snapshot = buffer.snapshot();
+ let html_config = snapshot.language_scope_at(Point::new(2, 4)).unwrap();
+ assert_eq!(html_config.line_comment_prefix(), None);
+ assert_eq!(
+ html_config.block_comment_delimiters(),
+ Some((&"<!--".into(), &"-->".into()))
+ );
+
+ let ruby_config = snapshot.language_scope_at(Point::new(3, 12)).unwrap();
+ assert_eq!(ruby_config.line_comment_prefix().unwrap().as_ref(), "# ");
+ assert_eq!(ruby_config.block_comment_delimiters(), None);
+
+ buffer
+ });
+}
+
+#[gpui::test]
+fn test_serialization(cx: &mut gpui::AppContext) {
+ let mut now = Instant::now();
+
+ let buffer1 = cx.add_model(|cx| {
+ let mut buffer = Buffer::new(0, cx.model_id() as u64, "abc");
+ buffer.edit([(3..3, "D")], None, cx);
+
+ now += Duration::from_secs(1);
+ buffer.start_transaction_at(now);
+ buffer.edit([(4..4, "E")], None, cx);
+ buffer.end_transaction_at(now, cx);
+ assert_eq!(buffer.text(), "abcDE");
+
+ buffer.undo(cx);
+ assert_eq!(buffer.text(), "abcD");
+
+ buffer.edit([(4..4, "F")], None, cx);
+ assert_eq!(buffer.text(), "abcDF");
+ buffer
+ });
+ assert_eq!(buffer1.read(cx).text(), "abcDF");
+
+ let state = buffer1.read(cx).to_proto();
+ let ops = cx
+ .background()
+ .block(buffer1.read(cx).serialize_ops(None, cx));
+ let buffer2 = cx.add_model(|cx| {
+ let mut buffer = Buffer::from_proto(1, state, None).unwrap();
+ buffer
+ .apply_ops(
+ ops.into_iter()
+ .map(|op| proto::deserialize_operation(op).unwrap()),
+ cx,
+ )
+ .unwrap();
+ buffer
+ });
+ assert_eq!(buffer2.read(cx).text(), "abcDF");
+}
+
+#[gpui::test(iterations = 100)]
+fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
+ let min_peers = env::var("MIN_PEERS")
+ .map(|i| i.parse().expect("invalid `MIN_PEERS` variable"))
+ .unwrap_or(1);
+ let max_peers = env::var("MAX_PEERS")
+ .map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
+ .unwrap_or(5);
+ let operations = env::var("OPERATIONS")
+ .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+ .unwrap_or(10);
+
+ let base_text_len = rng.gen_range(0..10);
+ let base_text = RandomCharIter::new(&mut rng)
+ .take(base_text_len)
+ .collect::<String>();
+ let mut replica_ids = Vec::new();
+ let mut buffers = Vec::new();
+ let network = Rc::new(RefCell::new(Network::new(rng.clone())));
+ let base_buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, base_text.as_str()));
+
+ for i in 0..rng.gen_range(min_peers..=max_peers) {
+ let buffer = cx.add_model(|cx| {
+ let state = base_buffer.read(cx).to_proto();
+ let ops = cx
+ .background()
+ .block(base_buffer.read(cx).serialize_ops(None, cx));
+ let mut buffer = Buffer::from_proto(i as ReplicaId, state, None).unwrap();
+ buffer
+ .apply_ops(
+ ops.into_iter()
+ .map(|op| proto::deserialize_operation(op).unwrap()),
+ cx,
+ )
+ .unwrap();
+ buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
+ let network = network.clone();
+ cx.subscribe(&cx.handle(), move |buffer, _, event, _| {
+ if let Event::Operation(op) = event {
+ network
+ .borrow_mut()
+ .broadcast(buffer.replica_id(), vec![proto::serialize_operation(op)]);
+ }
+ })
+ .detach();
+ buffer
+ });
+ buffers.push(buffer);
+ replica_ids.push(i as ReplicaId);
+ network.borrow_mut().add_peer(i as ReplicaId);
+ log::info!("Adding initial peer with replica id {}", i);
+ }
+
+ log::info!("initial text: {:?}", base_text);
+
+ let mut now = Instant::now();
+ let mut mutation_count = operations;
+ let mut next_diagnostic_id = 0;
+ let mut active_selections = BTreeMap::default();
+ loop {
+ let replica_index = rng.gen_range(0..replica_ids.len());
+ let replica_id = replica_ids[replica_index];
+ let buffer = &mut buffers[replica_index];
+ let mut new_buffer = None;
+ match rng.gen_range(0..100) {
+ 0..=29 if mutation_count != 0 => {
+ buffer.update(cx, |buffer, cx| {
+ buffer.start_transaction_at(now);
+ buffer.randomly_edit(&mut rng, 5, cx);
+ buffer.end_transaction_at(now, cx);
+ log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
+ });
+ mutation_count -= 1;
+ }
+ 30..=39 if mutation_count != 0 => {
+ buffer.update(cx, |buffer, cx| {
+ if rng.gen_bool(0.2) {
+ log::info!("peer {} clearing active selections", replica_id);
+ active_selections.remove(&replica_id);
+ buffer.remove_active_selections(cx);
+ } else {
+ let mut selections = Vec::new();
+ for id in 0..rng.gen_range(1..=5) {
+ let range = buffer.random_byte_range(0, &mut rng);
+ selections.push(Selection {
+ id,
+ start: buffer.anchor_before(range.start),
+ end: buffer.anchor_before(range.end),
+ reversed: false,
+ goal: SelectionGoal::None,
+ });
+ }
+ let selections: Arc<[Selection<Anchor>]> = selections.into();
+ log::info!(
+ "peer {} setting active selections: {:?}",
+ replica_id,
+ selections
+ );
+ active_selections.insert(replica_id, selections.clone());
+ buffer.set_active_selections(selections, false, Default::default(), cx);
+ }
+ });
+ mutation_count -= 1;
+ }
+ 40..=49 if mutation_count != 0 && replica_id == 0 => {
+ let entry_count = rng.gen_range(1..=5);
+ buffer.update(cx, |buffer, cx| {
+ let diagnostics = DiagnosticSet::new(
+ (0..entry_count).map(|_| {
+ let range = buffer.random_byte_range(0, &mut rng);
+ let range = range.to_point_utf16(buffer);
+ let range = range.start..range.end;
+ DiagnosticEntry {
+ range,
+ diagnostic: Diagnostic {
+ message: post_inc(&mut next_diagnostic_id).to_string(),
+ ..Default::default()
+ },
+ }
+ }),
+ buffer,
+ );
+ log::info!("peer {} setting diagnostics: {:?}", replica_id, diagnostics);
+ buffer.update_diagnostics(LanguageServerId(0), diagnostics, cx);
+ });
+ mutation_count -= 1;
+ }
+ 50..=59 if replica_ids.len() < max_peers => {
+ let old_buffer_state = buffer.read(cx).to_proto();
+ let old_buffer_ops = cx
+ .background()
+ .block(buffer.read(cx).serialize_ops(None, cx));
+ let new_replica_id = (0..=replica_ids.len() as ReplicaId)
+ .filter(|replica_id| *replica_id != buffer.read(cx).replica_id())
+ .choose(&mut rng)
+ .unwrap();
+ log::info!(
+ "Adding new replica {} (replicating from {})",
+ new_replica_id,
+ replica_id
+ );
+ new_buffer = Some(cx.add_model(|cx| {
+ let mut new_buffer =
+ Buffer::from_proto(new_replica_id, old_buffer_state, None).unwrap();
+ new_buffer
+ .apply_ops(
+ old_buffer_ops
+ .into_iter()
+ .map(|op| deserialize_operation(op).unwrap()),
+ cx,
+ )
+ .unwrap();
+ log::info!(
+ "New replica {} text: {:?}",
+ new_buffer.replica_id(),
+ new_buffer.text()
+ );
+ new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
+ let network = network.clone();
+ cx.subscribe(&cx.handle(), move |buffer, _, event, _| {
+ if let Event::Operation(op) = event {
+ network.borrow_mut().broadcast(
+ buffer.replica_id(),
+ vec![proto::serialize_operation(op)],
+ );
+ }
+ })
+ .detach();
+ new_buffer
+ }));
+ network.borrow_mut().replicate(replica_id, new_replica_id);
+
+ if new_replica_id as usize == replica_ids.len() {
+ replica_ids.push(new_replica_id);
+ } else {
+ let new_buffer = new_buffer.take().unwrap();
+ while network.borrow().has_unreceived(new_replica_id) {
+ let ops = network
+ .borrow_mut()
+ .receive(new_replica_id)
+ .into_iter()
+ .map(|op| proto::deserialize_operation(op).unwrap());
+ if ops.len() > 0 {
+ log::info!(
+ "peer {} (version: {:?}) applying {} ops from the network. {:?}",
+ new_replica_id,
+ buffer.read(cx).version(),
+ ops.len(),
+ ops
+ );
+ new_buffer.update(cx, |new_buffer, cx| {
+ new_buffer.apply_ops(ops, cx).unwrap();
+ });
+ }
+ }
+ buffers[new_replica_id as usize] = new_buffer;
+ }
+ }
+ 60..=69 if mutation_count != 0 => {
+ buffer.update(cx, |buffer, cx| {
+ buffer.randomly_undo_redo(&mut rng, cx);
+ log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
+ });
+ mutation_count -= 1;
+ }
+ _ if network.borrow().has_unreceived(replica_id) => {
+ let ops = network
+ .borrow_mut()
+ .receive(replica_id)
+ .into_iter()
+ .map(|op| proto::deserialize_operation(op).unwrap());
+ if ops.len() > 0 {
+ log::info!(
+ "peer {} (version: {:?}) applying {} ops from the network. {:?}",
+ replica_id,
+ buffer.read(cx).version(),
+ ops.len(),
+ ops
+ );
+ buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx).unwrap());
+ }
+ }
+ _ => {}
+ }
+
+ now += Duration::from_millis(rng.gen_range(0..=200));
+ buffers.extend(new_buffer);
+
+ for buffer in &buffers {
+ buffer.read(cx).check_invariants();
+ }
+
+ if mutation_count == 0 && network.borrow().is_idle() {
+ break;
+ }
+ }
+
+ let first_buffer = buffers[0].read(cx).snapshot();
+ for buffer in &buffers[1..] {
+ let buffer = buffer.read(cx).snapshot();
+ assert_eq!(
+ buffer.version(),
+ first_buffer.version(),
+ "Replica {} version != Replica 0 version",
+ buffer.replica_id()
+ );
+ assert_eq!(
+ buffer.text(),
+ first_buffer.text(),
+ "Replica {} text != Replica 0 text",
+ buffer.replica_id()
+ );
+ assert_eq!(
+ buffer
+ .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
+ .collect::<Vec<_>>(),
+ first_buffer
+ .diagnostics_in_range::<_, usize>(0..first_buffer.len(), false)
+ .collect::<Vec<_>>(),
+ "Replica {} diagnostics != Replica 0 diagnostics",
+ buffer.replica_id()
+ );
+ }
+
+ for buffer in &buffers {
+ let buffer = buffer.read(cx).snapshot();
+ let actual_remote_selections = buffer
+ .remote_selections_in_range(Anchor::MIN..Anchor::MAX)
+ .map(|(replica_id, _, _, selections)| (replica_id, selections.collect::<Vec<_>>()))
+ .collect::<Vec<_>>();
+ let expected_remote_selections = active_selections
+ .iter()
+ .filter(|(replica_id, _)| **replica_id != buffer.replica_id())
+ .map(|(replica_id, selections)| (*replica_id, selections.iter().collect::<Vec<_>>()))
+ .collect::<Vec<_>>();
+ assert_eq!(
+ actual_remote_selections,
+ expected_remote_selections,
+ "Replica {} remote selections != expected selections",
+ buffer.replica_id()
+ );
+ }
+}
+
+#[test]
+fn test_contiguous_ranges() {
+ assert_eq!(
+ contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
+ &[1..4, 5..7, 9..13]
+ );
+
+ // Respects the `max_len` parameter
+ assert_eq!(
+ contiguous_ranges(
+ [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
+ 3
+ )
+ .collect::<Vec<_>>(),
+ &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
+ );
+}
+
+#[gpui::test(iterations = 500)]
+fn test_trailing_whitespace_ranges(mut rng: StdRng) {
+ // Generate a random multi-line string containing
+ // some lines with trailing whitespace.
+ let mut text = String::new();
+ for _ in 0..rng.gen_range(0..16) {
+ for _ in 0..rng.gen_range(0..36) {
+ text.push(match rng.gen_range(0..10) {
+ 0..=1 => ' ',
+ 3 => '\t',
+ _ => rng.gen_range('a'..'z'),
+ });
+ }
+ text.push('\n');
+ }
+
+ match rng.gen_range(0..10) {
+ // sometimes remove the last newline
+ 0..=1 => drop(text.pop()), //
+
+ // sometimes add extra newlines
+ 2..=3 => text.push_str(&"\n".repeat(rng.gen_range(1..5))),
+ _ => {}
+ }
+
+ let rope = Rope::from(text.as_str());
+ let actual_ranges = trailing_whitespace_ranges(&rope);
+ let expected_ranges = TRAILING_WHITESPACE_REGEX
+ .find_iter(&text)
+ .map(|m| m.range())
+ .collect::<Vec<_>>();
+ assert_eq!(
+ actual_ranges,
+ expected_ranges,
+ "wrong ranges for text lines:\n{:?}",
+ text.split("\n").collect::<Vec<_>>()
+ );
+}
+
+fn ruby_lang() -> Language {
+ Language::new(
+ LanguageConfig {
+ name: "Ruby".into(),
+ path_suffixes: vec!["rb".to_string()],
+ line_comment: Some("# ".into()),
+ ..Default::default()
+ },
+ Some(tree_sitter_ruby::language()),
+ )
+ .with_indents_query(
+ r#"
+ (class "end" @end) @indent
+ (method "end" @end) @indent
+ (rescue) @outdent
+ (then) @indent
+ "#,
+ )
+ .unwrap()
+}
+
+fn html_lang() -> Language {
+ Language::new(
+ LanguageConfig {
+ name: "HTML".into(),
+ block_comment: Some(("<!--".into(), "-->".into())),
+ ..Default::default()
+ },
+ Some(tree_sitter_html::language()),
+ )
+ .with_indents_query(
+ "
+ (element
+ (start_tag) @start
+ (end_tag)? @end) @indent
+ ",
+ )
+ .unwrap()
+ .with_injection_query(
+ r#"
+ (script_element
+ (raw_text) @content
+ (#set! "language" "javascript"))
+ "#,
+ )
+ .unwrap()
+}
+
+fn erb_lang() -> Language {
+ Language::new(
+ LanguageConfig {
+ name: "ERB".into(),
+ path_suffixes: vec!["erb".to_string()],
+ block_comment: Some(("<%#".into(), "%>".into())),
+ ..Default::default()
+ },
+ Some(tree_sitter_embedded_template::language()),
+ )
+ .with_injection_query(
+ r#"
+ (
+ (code) @content
+ (#set! "language" "ruby")
+ (#set! "combined")
+ )
+
+ (
+ (content) @content
+ (#set! "language" "html")
+ (#set! "combined")
+ )
+ "#,
+ )
+ .unwrap()
+}
+
+fn rust_lang() -> Language {
+ Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ path_suffixes: vec!["rs".to_string()],
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ )
+ .with_indents_query(
+ r#"
+ (call_expression) @indent
+ (field_expression) @indent
+ (_ "(" ")" @end) @indent
+ (_ "{" "}" @end) @indent
+ "#,
+ )
+ .unwrap()
+ .with_brackets_query(
+ r#"
+ ("{" @open "}" @close)
+ "#,
+ )
+ .unwrap()
+ .with_outline_query(
+ r#"
+ (struct_item
+ "struct" @context
+ name: (_) @name) @item
+ (enum_item
+ "enum" @context
+ name: (_) @name) @item
+ (enum_variant
+ name: (_) @name) @item
+ (field_declaration
+ name: (_) @name) @item
+ (impl_item
+ "impl" @context
+ trait: (_)? @name
+ "for"? @context
+ type: (_) @name) @item
+ (function_item
+ "fn" @context
+ name: (_) @name) @item
+ (mod_item
+ "mod" @context
+ name: (_) @name) @item
+ "#,
+ )
+ .unwrap()
+}
+
+fn json_lang() -> Language {
+ Language::new(
+ LanguageConfig {
+ name: "Json".into(),
+ path_suffixes: vec!["js".to_string()],
+ ..Default::default()
+ },
+ Some(tree_sitter_json::language()),
+ )
+}
+
+fn javascript_lang() -> Language {
+ Language::new(
+ LanguageConfig {
+ name: "JavaScript".into(),
+ ..Default::default()
+ },
+ Some(tree_sitter_typescript::language_tsx()),
+ )
+ .with_brackets_query(
+ r#"
+ ("{" @open "}" @close)
+ ("(" @open ")" @close)
+ "#,
+ )
+ .unwrap()
+ .with_indents_query(
+ r#"
+ (object "}" @end) @indent
+ "#,
+ )
+ .unwrap()
+}
+
+fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
+ buffer.read_with(cx, |buffer, _| {
+ let snapshot = buffer.snapshot();
+ let layers = snapshot.syntax.layers(buffer.as_text_snapshot());
+ layers[0].node().to_sexp()
+ })
+}
+
+// Assert that the enclosing bracket ranges around the selection match the pairs indicated by the marked text in `range_markers`
+fn assert_bracket_pairs(
+ selection_text: &'static str,
+ bracket_pair_texts: Vec<&'static str>,
+ language: Language,
+ cx: &mut AppContext,
+) {
+ let (expected_text, selection_ranges) = marked_text_ranges(selection_text, false);
+ let buffer = cx.add_model(|cx| {
+ Buffer::new(0, cx.model_id() as u64, expected_text.clone())
+ .with_language(Arc::new(language), cx)
+ });
+ let buffer = buffer.update(cx, |buffer, _cx| buffer.snapshot());
+
+ let selection_range = selection_ranges[0].clone();
+
+ let bracket_pairs = bracket_pair_texts
+ .into_iter()
+ .map(|pair_text| {
+ let (bracket_text, ranges) = marked_text_ranges(pair_text, false);
+ assert_eq!(bracket_text, expected_text);
+ (ranges[0].clone(), ranges[1].clone())
+ })
+ .collect::<Vec<_>>();
+
+ assert_set_eq!(
+ buffer.bracket_ranges(selection_range).collect::<Vec<_>>(),
+ bracket_pairs
+ );
+}
+
+fn init_settings(cx: &mut AppContext, f: fn(&mut AllLanguageSettingsContent)) {
+ cx.set_global(SettingsStore::test(cx));
+ crate::init(cx);
+ cx.update_global::<SettingsStore, _, _>(|settings, cx| {
+ settings.update_user_settings::<AllLanguageSettings>(cx, f);
+ });
+}
@@ -0,0 +1,236 @@
+use crate::Diagnostic;
+use collections::HashMap;
+use lsp::LanguageServerId;
+use std::{
+ cmp::{Ordering, Reverse},
+ iter,
+ ops::Range,
+};
+use sum_tree::{self, Bias, SumTree};
+use text::{Anchor, FromAnchor, PointUtf16, ToOffset};
+
+#[derive(Clone, Debug, Default)]
+pub struct DiagnosticSet {
+ diagnostics: SumTree<DiagnosticEntry<Anchor>>,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct DiagnosticEntry<T> {
+ pub range: Range<T>,
+ pub diagnostic: Diagnostic,
+}
+
+#[derive(Debug)]
+pub struct DiagnosticGroup<T> {
+ pub entries: Vec<DiagnosticEntry<T>>,
+ pub primary_ix: usize,
+}
+
+#[derive(Clone, Debug)]
+pub struct Summary {
+ start: Anchor,
+ end: Anchor,
+ min_start: Anchor,
+ max_end: Anchor,
+ count: usize,
+}
+
+impl<T> DiagnosticEntry<T> {
+ // Used to provide diagnostic context to lsp codeAction request
+ pub fn to_lsp_diagnostic_stub(&self) -> lsp::Diagnostic {
+ let code = self
+ .diagnostic
+ .code
+ .clone()
+ .map(lsp::NumberOrString::String);
+
+ lsp::Diagnostic {
+ code,
+ severity: Some(self.diagnostic.severity),
+ ..Default::default()
+ }
+ }
+}
+
+impl DiagnosticSet {
+ pub fn from_sorted_entries<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
+ where
+ I: IntoIterator<Item = DiagnosticEntry<Anchor>>,
+ {
+ Self {
+ diagnostics: SumTree::from_iter(iter, buffer),
+ }
+ }
+
+ pub fn new<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
+ where
+ I: IntoIterator<Item = DiagnosticEntry<PointUtf16>>,
+ {
+ let mut entries = iter.into_iter().collect::<Vec<_>>();
+ entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end)));
+ Self {
+ diagnostics: SumTree::from_iter(
+ entries.into_iter().map(|entry| DiagnosticEntry {
+ range: buffer.anchor_before(entry.range.start)
+ ..buffer.anchor_before(entry.range.end),
+ diagnostic: entry.diagnostic,
+ }),
+ buffer,
+ ),
+ }
+ }
+
+ pub fn len(&self) -> usize {
+ self.diagnostics.summary().count
+ }
+
+ pub fn iter(&self) -> impl Iterator<Item = &DiagnosticEntry<Anchor>> {
+ self.diagnostics.iter()
+ }
+
+ pub fn range<'a, T, O>(
+ &'a self,
+ range: Range<T>,
+ buffer: &'a text::BufferSnapshot,
+ inclusive: bool,
+ reversed: bool,
+ ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
+ where
+ T: 'a + ToOffset,
+ O: FromAnchor,
+ {
+ let end_bias = if inclusive { Bias::Right } else { Bias::Left };
+ let range = buffer.anchor_before(range.start)..buffer.anchor_at(range.end, end_bias);
+ let mut cursor = self.diagnostics.filter::<_, ()>({
+ move |summary: &Summary| {
+ let start_cmp = range.start.cmp(&summary.max_end, buffer);
+ let end_cmp = range.end.cmp(&summary.min_start, buffer);
+ if inclusive {
+ start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal
+ } else {
+ start_cmp == Ordering::Less && end_cmp == Ordering::Greater
+ }
+ }
+ });
+
+ if reversed {
+ cursor.prev(buffer);
+ } else {
+ cursor.next(buffer);
+ }
+ iter::from_fn({
+ move || {
+ if let Some(diagnostic) = cursor.item() {
+ if reversed {
+ cursor.prev(buffer);
+ } else {
+ cursor.next(buffer);
+ }
+ Some(diagnostic.resolve(buffer))
+ } else {
+ None
+ }
+ }
+ })
+ }
+
+ pub fn groups(
+ &self,
+ language_server_id: LanguageServerId,
+ output: &mut Vec<(LanguageServerId, DiagnosticGroup<Anchor>)>,
+ buffer: &text::BufferSnapshot,
+ ) {
+ let mut groups = HashMap::default();
+ for entry in self.diagnostics.iter() {
+ groups
+ .entry(entry.diagnostic.group_id)
+ .or_insert(Vec::new())
+ .push(entry.clone());
+ }
+
+ let start_ix = output.len();
+ output.extend(groups.into_values().filter_map(|mut entries| {
+ entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start, buffer));
+ entries
+ .iter()
+ .position(|entry| entry.diagnostic.is_primary)
+ .map(|primary_ix| {
+ (
+ language_server_id,
+ DiagnosticGroup {
+ entries,
+ primary_ix,
+ },
+ )
+ })
+ }));
+ output[start_ix..].sort_unstable_by(|(id_a, group_a), (id_b, group_b)| {
+ group_a.entries[group_a.primary_ix]
+ .range
+ .start
+ .cmp(&group_b.entries[group_b.primary_ix].range.start, buffer)
+ .then_with(|| id_a.cmp(&id_b))
+ });
+ }
+
+ pub fn group<'a, O: FromAnchor>(
+ &'a self,
+ group_id: usize,
+ buffer: &'a text::BufferSnapshot,
+ ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>> {
+ self.iter()
+ .filter(move |entry| entry.diagnostic.group_id == group_id)
+ .map(|entry| entry.resolve(buffer))
+ }
+}
+impl sum_tree::Item for DiagnosticEntry<Anchor> {
+ type Summary = Summary;
+
+ fn summary(&self) -> Self::Summary {
+ Summary {
+ start: self.range.start,
+ end: self.range.end,
+ min_start: self.range.start,
+ max_end: self.range.end,
+ count: 1,
+ }
+ }
+}
+
+impl DiagnosticEntry<Anchor> {
+ pub fn resolve<O: FromAnchor>(&self, buffer: &text::BufferSnapshot) -> DiagnosticEntry<O> {
+ DiagnosticEntry {
+ range: O::from_anchor(&self.range.start, buffer)
+ ..O::from_anchor(&self.range.end, buffer),
+ diagnostic: self.diagnostic.clone(),
+ }
+ }
+}
+
+impl Default for Summary {
+ fn default() -> Self {
+ Self {
+ start: Anchor::MIN,
+ end: Anchor::MAX,
+ min_start: Anchor::MAX,
+ max_end: Anchor::MIN,
+ count: 0,
+ }
+ }
+}
+
+impl sum_tree::Summary for Summary {
+ type Context = text::BufferSnapshot;
+
+ fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
+ if other.min_start.cmp(&self.min_start, buffer).is_lt() {
+ self.min_start = other.min_start;
+ }
+ if other.max_end.cmp(&self.max_end, buffer).is_gt() {
+ self.max_end = other.max_end;
+ }
+ self.start = other.start;
+ self.end = other.end;
+ self.count += other.count;
+ }
+}
@@ -0,0 +1,111 @@
+use gpui2::HighlightStyle;
+use std::sync::Arc;
+use theme::SyntaxTheme;
+
+#[derive(Clone, Debug)]
+pub struct HighlightMap(Arc<[HighlightId]>);
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub struct HighlightId(pub u32);
+
+const DEFAULT_SYNTAX_HIGHLIGHT_ID: HighlightId = HighlightId(u32::MAX);
+
+impl HighlightMap {
+ pub fn new(capture_names: &[String], theme: &SyntaxTheme) -> Self {
+ // For each capture name in the highlight query, find the longest
+ // key in the theme's syntax styles that matches all of the
+ // dot-separated components of the capture name.
+ HighlightMap(
+ capture_names
+ .iter()
+ .map(|capture_name| {
+ theme
+ .highlights
+ .iter()
+ .enumerate()
+ .filter_map(|(i, (key, _))| {
+ let mut len = 0;
+ let capture_parts = capture_name.split('.');
+ for key_part in key.split('.') {
+ if capture_parts.clone().any(|part| part == key_part) {
+ len += 1;
+ } else {
+ return None;
+ }
+ }
+ Some((i, len))
+ })
+ .max_by_key(|(_, len)| *len)
+ .map_or(DEFAULT_SYNTAX_HIGHLIGHT_ID, |(i, _)| HighlightId(i as u32))
+ })
+ .collect(),
+ )
+ }
+
+ pub fn get(&self, capture_id: u32) -> HighlightId {
+ self.0
+ .get(capture_id as usize)
+ .copied()
+ .unwrap_or(DEFAULT_SYNTAX_HIGHLIGHT_ID)
+ }
+}
+
+impl HighlightId {
+ pub fn is_default(&self) -> bool {
+ *self == DEFAULT_SYNTAX_HIGHLIGHT_ID
+ }
+
+ pub fn style(&self, theme: &SyntaxTheme) -> Option<HighlightStyle> {
+ theme.highlights.get(self.0 as usize).map(|entry| entry.1)
+ }
+
+ pub fn name<'a>(&self, theme: &'a SyntaxTheme) -> Option<&'a str> {
+ theme.highlights.get(self.0 as usize).map(|e| e.0.as_str())
+ }
+}
+
+impl Default for HighlightMap {
+ fn default() -> Self {
+ Self(Arc::new([]))
+ }
+}
+
+impl Default for HighlightId {
+ fn default() -> Self {
+ DEFAULT_SYNTAX_HIGHLIGHT_ID
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use gpui::color::Color;
+
+ #[test]
+ fn test_highlight_map() {
+ let theme = SyntaxTheme::new(
+ [
+ ("function", Color::from_u32(0x100000ff)),
+ ("function.method", Color::from_u32(0x200000ff)),
+ ("function.async", Color::from_u32(0x300000ff)),
+ ("variable.builtin.self.rust", Color::from_u32(0x400000ff)),
+ ("variable.builtin", Color::from_u32(0x500000ff)),
+ ("variable", Color::from_u32(0x600000ff)),
+ ]
+ .iter()
+ .map(|(name, color)| (name.to_string(), (*color).into()))
+ .collect(),
+ );
+
+ let capture_names = &[
+ "function.special".to_string(),
+ "function.async.rust".to_string(),
+ "variable.builtin.self".to_string(),
+ ];
+
+ let map = HighlightMap::new(capture_names, &theme);
+ assert_eq!(map.get(0).name(&theme), Some("function"));
+ assert_eq!(map.get(1).name(&theme), Some("function.async"));
+ assert_eq!(map.get(2).name(&theme), Some("variable.builtin"));
+ }
+}
@@ -0,0 +1,1969 @@
+mod buffer;
+mod diagnostic_set;
+mod highlight_map;
+pub mod language_settings;
+mod outline;
+pub mod proto;
+mod syntax_map;
+
+#[cfg(test)]
+mod buffer_tests;
+
+use anyhow::{anyhow, Context, Result};
+use async_trait::async_trait;
+use collections::{HashMap, HashSet};
+use futures::{
+ channel::{mpsc, oneshot},
+ future::{BoxFuture, Shared},
+ FutureExt, TryFutureExt as _,
+};
+use gpui2::{AppContext, AsyncAppContext, Executor, Task};
+pub use highlight_map::HighlightMap;
+use lazy_static::lazy_static;
+use lsp::{CodeActionKind, LanguageServerBinary};
+use parking_lot::{Mutex, RwLock};
+use postage::watch;
+use regex::Regex;
+use serde::{de, Deserialize, Deserializer};
+use serde_json::Value;
+use std::{
+ any::Any,
+ borrow::Cow,
+ cell::RefCell,
+ fmt::Debug,
+ hash::Hash,
+ mem,
+ ops::{Not, Range},
+ path::{Path, PathBuf},
+ str,
+ sync::{
+ atomic::{AtomicUsize, Ordering::SeqCst},
+ Arc,
+ },
+};
+use syntax_map::SyntaxSnapshot;
+use theme::{SyntaxTheme, Theme};
+use tree_sitter::{self, Query};
+use unicase::UniCase;
+use util::{http::HttpClient, paths::PathExt};
+use util::{post_inc, ResultExt, TryFutureExt as _, UnwrapFuture};
+
+pub use buffer::Operation;
+pub use buffer::*;
+pub use diagnostic_set::DiagnosticEntry;
+pub use lsp::LanguageServerId;
+pub use outline::{Outline, OutlineItem};
+pub use syntax_map::{OwnedSyntaxLayerInfo, SyntaxLayerInfo};
+pub use text::LineEnding;
+pub use tree_sitter::{Parser, Tree};
+
+pub fn init(cx: &mut AppContext) {
+ language_settings::init(cx);
+}
+
+#[derive(Clone, Default)]
+struct LspBinaryStatusSender {
+ txs: Arc<Mutex<Vec<mpsc::UnboundedSender<(Arc<Language>, LanguageServerBinaryStatus)>>>>,
+}
+
+impl LspBinaryStatusSender {
+ fn subscribe(&self) -> mpsc::UnboundedReceiver<(Arc<Language>, LanguageServerBinaryStatus)> {
+ let (tx, rx) = mpsc::unbounded();
+ self.txs.lock().push(tx);
+ rx
+ }
+
+ fn send(&self, language: Arc<Language>, status: LanguageServerBinaryStatus) {
+ let mut txs = self.txs.lock();
+ txs.retain(|tx| {
+ tx.unbounded_send((language.clone(), status.clone()))
+ .is_ok()
+ });
+ }
+}
+
+thread_local! {
+ static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
+}
+
+lazy_static! {
+ pub static ref NEXT_GRAMMAR_ID: AtomicUsize = Default::default();
+ pub static ref PLAIN_TEXT: Arc<Language> = Arc::new(Language::new(
+ LanguageConfig {
+ name: "Plain Text".into(),
+ ..Default::default()
+ },
+ None,
+ ));
+}
+
+pub trait ToLspPosition {
+ fn to_lsp_position(self) -> lsp::Position;
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub struct LanguageServerName(pub Arc<str>);
+
+/// Represents a Language Server, with certain cached sync properties.
+/// Uses [`LspAdapter`] under the hood, but calls all 'static' methods
+/// once at startup, and caches the results.
+pub struct CachedLspAdapter {
+ pub name: LanguageServerName,
+ pub short_name: &'static str,
+ pub initialization_options: Option<Value>,
+ pub disk_based_diagnostic_sources: Vec<String>,
+ pub disk_based_diagnostics_progress_token: Option<String>,
+ pub language_ids: HashMap<String, String>,
+ pub adapter: Arc<dyn LspAdapter>,
+}
+
+impl CachedLspAdapter {
+ pub async fn new(adapter: Arc<dyn LspAdapter>) -> Arc<Self> {
+ let name = adapter.name().await;
+ let short_name = adapter.short_name();
+ let initialization_options = adapter.initialization_options().await;
+ let disk_based_diagnostic_sources = adapter.disk_based_diagnostic_sources().await;
+ let disk_based_diagnostics_progress_token =
+ adapter.disk_based_diagnostics_progress_token().await;
+ let language_ids = adapter.language_ids().await;
+
+ Arc::new(CachedLspAdapter {
+ name,
+ short_name,
+ initialization_options,
+ disk_based_diagnostic_sources,
+ disk_based_diagnostics_progress_token,
+ language_ids,
+ adapter,
+ })
+ }
+
+ pub async fn fetch_latest_server_version(
+ &self,
+ delegate: &dyn LspAdapterDelegate,
+ ) -> Result<Box<dyn 'static + Send + Any>> {
+ self.adapter.fetch_latest_server_version(delegate).await
+ }
+
+ pub fn will_fetch_server(
+ &self,
+ delegate: &Arc<dyn LspAdapterDelegate>,
+ cx: &mut AsyncAppContext,
+ ) -> Option<Task<Result<()>>> {
+ self.adapter.will_fetch_server(delegate, cx)
+ }
+
+ pub fn will_start_server(
+ &self,
+ delegate: &Arc<dyn LspAdapterDelegate>,
+ cx: &mut AsyncAppContext,
+ ) -> Option<Task<Result<()>>> {
+ self.adapter.will_start_server(delegate, cx)
+ }
+
+ pub async fn fetch_server_binary(
+ &self,
+ version: Box<dyn 'static + Send + Any>,
+ container_dir: PathBuf,
+ delegate: &dyn LspAdapterDelegate,
+ ) -> Result<LanguageServerBinary> {
+ self.adapter
+ .fetch_server_binary(version, container_dir, delegate)
+ .await
+ }
+
+ pub async fn cached_server_binary(
+ &self,
+ container_dir: PathBuf,
+ delegate: &dyn LspAdapterDelegate,
+ ) -> Option<LanguageServerBinary> {
+ self.adapter
+ .cached_server_binary(container_dir, delegate)
+ .await
+ }
+
+ pub fn can_be_reinstalled(&self) -> bool {
+ self.adapter.can_be_reinstalled()
+ }
+
+ pub async fn installation_test_binary(
+ &self,
+ container_dir: PathBuf,
+ ) -> Option<LanguageServerBinary> {
+ self.adapter.installation_test_binary(container_dir).await
+ }
+
+ pub fn code_action_kinds(&self) -> Option<Vec<CodeActionKind>> {
+ self.adapter.code_action_kinds()
+ }
+
+ pub fn workspace_configuration(&self, cx: &mut AppContext) -> BoxFuture<'static, Value> {
+ self.adapter.workspace_configuration(cx)
+ }
+
+ pub fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) {
+ self.adapter.process_diagnostics(params)
+ }
+
+ pub async fn process_completion(&self, completion_item: &mut lsp::CompletionItem) {
+ self.adapter.process_completion(completion_item).await
+ }
+
+ pub async fn label_for_completion(
+ &self,
+ completion_item: &lsp::CompletionItem,
+ language: &Arc<Language>,
+ ) -> Option<CodeLabel> {
+ self.adapter
+ .label_for_completion(completion_item, language)
+ .await
+ }
+
+ pub async fn label_for_symbol(
+ &self,
+ name: &str,
+ kind: lsp::SymbolKind,
+ language: &Arc<Language>,
+ ) -> Option<CodeLabel> {
+ self.adapter.label_for_symbol(name, kind, language).await
+ }
+
+ pub fn enabled_formatters(&self) -> Vec<BundledFormatter> {
+ self.adapter.enabled_formatters()
+ }
+}
+
+pub trait LspAdapterDelegate: Send + Sync {
+ fn show_notification(&self, message: &str, cx: &mut AppContext);
+ fn http_client(&self) -> Arc<dyn HttpClient>;
+}
+
+#[async_trait]
+pub trait LspAdapter: 'static + Send + Sync {
+ async fn name(&self) -> LanguageServerName;
+
+ fn short_name(&self) -> &'static str;
+
+ async fn fetch_latest_server_version(
+ &self,
+ delegate: &dyn LspAdapterDelegate,
+ ) -> Result<Box<dyn 'static + Send + Any>>;
+
+ fn will_fetch_server(
+ &self,
+ _: &Arc<dyn LspAdapterDelegate>,
+ _: &mut AsyncAppContext,
+ ) -> Option<Task<Result<()>>> {
+ None
+ }
+
+ fn will_start_server(
+ &self,
+ _: &Arc<dyn LspAdapterDelegate>,
+ _: &mut AsyncAppContext,
+ ) -> Option<Task<Result<()>>> {
+ None
+ }
+
+ async fn fetch_server_binary(
+ &self,
+ version: Box<dyn 'static + Send + Any>,
+ container_dir: PathBuf,
+ delegate: &dyn LspAdapterDelegate,
+ ) -> Result<LanguageServerBinary>;
+
+ async fn cached_server_binary(
+ &self,
+ container_dir: PathBuf,
+ delegate: &dyn LspAdapterDelegate,
+ ) -> Option<LanguageServerBinary>;
+
+ fn can_be_reinstalled(&self) -> bool {
+ true
+ }
+
+ async fn installation_test_binary(
+ &self,
+ container_dir: PathBuf,
+ ) -> Option<LanguageServerBinary>;
+
+ fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
+
+ async fn process_completion(&self, _: &mut lsp::CompletionItem) {}
+
+ async fn label_for_completion(
+ &self,
+ _: &lsp::CompletionItem,
+ _: &Arc<Language>,
+ ) -> Option<CodeLabel> {
+ None
+ }
+
+ async fn label_for_symbol(
+ &self,
+ _: &str,
+ _: lsp::SymbolKind,
+ _: &Arc<Language>,
+ ) -> Option<CodeLabel> {
+ None
+ }
+
+ async fn initialization_options(&self) -> Option<Value> {
+ None
+ }
+
+ fn workspace_configuration(&self, _: &mut AppContext) -> BoxFuture<'static, Value> {
+ futures::future::ready(serde_json::json!({})).boxed()
+ }
+
+ fn code_action_kinds(&self) -> Option<Vec<CodeActionKind>> {
+ Some(vec![
+ CodeActionKind::EMPTY,
+ CodeActionKind::QUICKFIX,
+ CodeActionKind::REFACTOR,
+ CodeActionKind::REFACTOR_EXTRACT,
+ CodeActionKind::SOURCE,
+ ])
+ }
+
+ async fn disk_based_diagnostic_sources(&self) -> Vec<String> {
+ Default::default()
+ }
+
+ async fn disk_based_diagnostics_progress_token(&self) -> Option<String> {
+ None
+ }
+
+ async fn language_ids(&self) -> HashMap<String, String> {
+ Default::default()
+ }
+
+ fn enabled_formatters(&self) -> Vec<BundledFormatter> {
+ Vec::new()
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum BundledFormatter {
+ Prettier {
+ // See https://prettier.io/docs/en/options.html#parser for a list of valid values.
+ // Usually, every language has a single parser (standard or plugin-provided), hence `Some("parser_name")` can be used.
+ // There can not be multiple parsers for a single language, in case of a conflict, we would attempt to select the one with most plugins.
+ //
+ // But exceptions like Tailwind CSS exist, which uses standard parsers for CSS/JS/HTML/etc. but require an extra plugin to be installed.
+ // For those cases, `None` will install the plugin but apply other, regular parser defined for the language, and this would not be a conflict.
+ parser_name: Option<&'static str>,
+ plugin_names: Vec<&'static str>,
+ },
+}
+
+impl BundledFormatter {
+ pub fn prettier(parser_name: &'static str) -> Self {
+ Self::Prettier {
+ parser_name: Some(parser_name),
+ plugin_names: Vec::new(),
+ }
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct CodeLabel {
+ pub text: String,
+ pub runs: Vec<(Range<usize>, HighlightId)>,
+ pub filter_range: Range<usize>,
+}
+
+#[derive(Clone, Deserialize)]
+pub struct LanguageConfig {
+ pub name: Arc<str>,
+ pub path_suffixes: Vec<String>,
+ pub brackets: BracketPairConfig,
+ #[serde(default, deserialize_with = "deserialize_regex")]
+ pub first_line_pattern: Option<Regex>,
+ #[serde(default = "auto_indent_using_last_non_empty_line_default")]
+ pub auto_indent_using_last_non_empty_line: bool,
+ #[serde(default, deserialize_with = "deserialize_regex")]
+ pub increase_indent_pattern: Option<Regex>,
+ #[serde(default, deserialize_with = "deserialize_regex")]
+ pub decrease_indent_pattern: Option<Regex>,
+ #[serde(default)]
+ pub autoclose_before: String,
+ #[serde(default)]
+ pub line_comment: Option<Arc<str>>,
+ #[serde(default)]
+ pub collapsed_placeholder: String,
+ #[serde(default)]
+ pub block_comment: Option<(Arc<str>, Arc<str>)>,
+ #[serde(default)]
+ pub scope_opt_in_language_servers: Vec<String>,
+ #[serde(default)]
+ pub overrides: HashMap<String, LanguageConfigOverride>,
+ #[serde(default)]
+ pub word_characters: HashSet<char>,
+}
+
+#[derive(Debug, Default)]
+pub struct LanguageQueries {
+ pub highlights: Option<Cow<'static, str>>,
+ pub brackets: Option<Cow<'static, str>>,
+ pub indents: Option<Cow<'static, str>>,
+ pub outline: Option<Cow<'static, str>>,
+ pub embedding: Option<Cow<'static, str>>,
+ pub injections: Option<Cow<'static, str>>,
+ pub overrides: Option<Cow<'static, str>>,
+}
+
+#[derive(Clone, Debug)]
+pub struct LanguageScope {
+ language: Arc<Language>,
+ override_id: Option<u32>,
+}
+
+#[derive(Clone, Deserialize, Default, Debug)]
+pub struct LanguageConfigOverride {
+ #[serde(default)]
+ pub line_comment: Override<Arc<str>>,
+ #[serde(default)]
+ pub block_comment: Override<(Arc<str>, Arc<str>)>,
+ #[serde(skip_deserializing)]
+ pub disabled_bracket_ixs: Vec<u16>,
+ #[serde(default)]
+ pub word_characters: Override<HashSet<char>>,
+ #[serde(default)]
+ pub opt_into_language_servers: Vec<String>,
+}
+
+#[derive(Clone, Deserialize, Debug)]
+#[serde(untagged)]
+pub enum Override<T> {
+ Remove { remove: bool },
+ Set(T),
+}
+
+impl<T> Default for Override<T> {
+ fn default() -> Self {
+ Override::Remove { remove: false }
+ }
+}
+
+impl<T> Override<T> {
+ fn as_option<'a>(this: Option<&'a Self>, original: Option<&'a T>) -> Option<&'a T> {
+ match this {
+ Some(Self::Set(value)) => Some(value),
+ Some(Self::Remove { remove: true }) => None,
+ Some(Self::Remove { remove: false }) | None => original,
+ }
+ }
+}
+
+impl Default for LanguageConfig {
+ fn default() -> Self {
+ Self {
+ name: "".into(),
+ path_suffixes: Default::default(),
+ brackets: Default::default(),
+ auto_indent_using_last_non_empty_line: auto_indent_using_last_non_empty_line_default(),
+ first_line_pattern: Default::default(),
+ increase_indent_pattern: Default::default(),
+ decrease_indent_pattern: Default::default(),
+ autoclose_before: Default::default(),
+ line_comment: Default::default(),
+ block_comment: Default::default(),
+ scope_opt_in_language_servers: Default::default(),
+ overrides: Default::default(),
+ collapsed_placeholder: Default::default(),
+ word_characters: Default::default(),
+ }
+ }
+}
+
+fn auto_indent_using_last_non_empty_line_default() -> bool {
+ true
+}
+
+fn deserialize_regex<'de, D: Deserializer<'de>>(d: D) -> Result<Option<Regex>, D::Error> {
+ let source = Option::<String>::deserialize(d)?;
+ if let Some(source) = source {
+ Ok(Some(regex::Regex::new(&source).map_err(de::Error::custom)?))
+ } else {
+ Ok(None)
+ }
+}
+
+#[cfg(any(test, feature = "test-support"))]
+pub struct FakeLspAdapter {
+ pub name: &'static str,
+ pub initialization_options: Option<Value>,
+ pub capabilities: lsp::ServerCapabilities,
+ pub initializer: Option<Box<dyn 'static + Send + Sync + Fn(&mut lsp::FakeLanguageServer)>>,
+ pub disk_based_diagnostics_progress_token: Option<String>,
+ pub disk_based_diagnostics_sources: Vec<String>,
+ pub enabled_formatters: Vec<BundledFormatter>,
+}
+
+#[derive(Clone, Debug, Default)]
+pub struct BracketPairConfig {
+ pub pairs: Vec<BracketPair>,
+ pub disabled_scopes_by_bracket_ix: Vec<Vec<String>>,
+}
+
+impl<'de> Deserialize<'de> for BracketPairConfig {
+ fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
+ where
+ D: Deserializer<'de>,
+ {
+ #[derive(Deserialize)]
+ pub struct Entry {
+ #[serde(flatten)]
+ pub bracket_pair: BracketPair,
+ #[serde(default)]
+ pub not_in: Vec<String>,
+ }
+
+ let result = Vec::<Entry>::deserialize(deserializer)?;
+ let mut brackets = Vec::with_capacity(result.len());
+ let mut disabled_scopes_by_bracket_ix = Vec::with_capacity(result.len());
+ for entry in result {
+ brackets.push(entry.bracket_pair);
+ disabled_scopes_by_bracket_ix.push(entry.not_in);
+ }
+
+ Ok(BracketPairConfig {
+ pairs: brackets,
+ disabled_scopes_by_bracket_ix,
+ })
+ }
+}
+
+#[derive(Clone, Debug, Default, Deserialize, PartialEq)]
+pub struct BracketPair {
+ pub start: String,
+ pub end: String,
+ pub close: bool,
+ pub newline: bool,
+}
+
+pub struct Language {
+ pub(crate) config: LanguageConfig,
+ pub(crate) grammar: Option<Arc<Grammar>>,
+ pub(crate) adapters: Vec<Arc<CachedLspAdapter>>,
+
+ #[cfg(any(test, feature = "test-support"))]
+ fake_adapter: Option<(
+ mpsc::UnboundedSender<lsp::FakeLanguageServer>,
+ Arc<FakeLspAdapter>,
+ )>,
+}
+
+pub struct Grammar {
+ id: usize,
+ pub ts_language: tree_sitter::Language,
+ pub(crate) error_query: Query,
+ pub(crate) highlights_query: Option<Query>,
+ pub(crate) brackets_config: Option<BracketConfig>,
+ pub(crate) indents_config: Option<IndentConfig>,
+ pub outline_config: Option<OutlineConfig>,
+ pub embedding_config: Option<EmbeddingConfig>,
+ pub(crate) injection_config: Option<InjectionConfig>,
+ pub(crate) override_config: Option<OverrideConfig>,
+ pub(crate) highlight_map: Mutex<HighlightMap>,
+}
+
+struct IndentConfig {
+ query: Query,
+ indent_capture_ix: u32,
+ start_capture_ix: Option<u32>,
+ end_capture_ix: Option<u32>,
+ outdent_capture_ix: Option<u32>,
+}
+
+pub struct OutlineConfig {
+ pub query: Query,
+ pub item_capture_ix: u32,
+ pub name_capture_ix: u32,
+ pub context_capture_ix: Option<u32>,
+ pub extra_context_capture_ix: Option<u32>,
+}
+
+#[derive(Debug)]
+pub struct EmbeddingConfig {
+ pub query: Query,
+ pub item_capture_ix: u32,
+ pub name_capture_ix: Option<u32>,
+ pub context_capture_ix: Option<u32>,
+ pub collapse_capture_ix: Option<u32>,
+ pub keep_capture_ix: Option<u32>,
+}
+
+struct InjectionConfig {
+ query: Query,
+ content_capture_ix: u32,
+ language_capture_ix: Option<u32>,
+ patterns: Vec<InjectionPatternConfig>,
+}
+
+struct OverrideConfig {
+ query: Query,
+ values: HashMap<u32, (String, LanguageConfigOverride)>,
+}
+
+#[derive(Default, Clone)]
+struct InjectionPatternConfig {
+ language: Option<Box<str>>,
+ combined: bool,
+}
+
+struct BracketConfig {
+ query: Query,
+ open_capture_ix: u32,
+ close_capture_ix: u32,
+}
+
+#[derive(Clone)]
+pub enum LanguageServerBinaryStatus {
+ CheckingForUpdate,
+ Downloading,
+ Downloaded,
+ Cached,
+ Failed { error: String },
+}
+
+type AvailableLanguageId = usize;
+
+#[derive(Clone)]
+struct AvailableLanguage {
+ id: AvailableLanguageId,
+ path: &'static str,
+ config: LanguageConfig,
+ grammar: tree_sitter::Language,
+ lsp_adapters: Vec<Arc<dyn LspAdapter>>,
+ get_queries: fn(&str) -> LanguageQueries,
+ loaded: bool,
+}
+
+pub struct LanguageRegistry {
+ state: RwLock<LanguageRegistryState>,
+ language_server_download_dir: Option<Arc<Path>>,
+ login_shell_env_loaded: Shared<Task<()>>,
+ #[allow(clippy::type_complexity)]
+ lsp_binary_paths: Mutex<
+ HashMap<LanguageServerName, Shared<Task<Result<LanguageServerBinary, Arc<anyhow::Error>>>>>,
+ >,
+ executor: Option<Executor>,
+ lsp_binary_status_tx: LspBinaryStatusSender,
+}
+
+struct LanguageRegistryState {
+ next_language_server_id: usize,
+ languages: Vec<Arc<Language>>,
+ available_languages: Vec<AvailableLanguage>,
+ next_available_language_id: AvailableLanguageId,
+ loading_languages: HashMap<AvailableLanguageId, Vec<oneshot::Sender<Result<Arc<Language>>>>>,
+ subscription: (watch::Sender<()>, watch::Receiver<()>),
+ theme: Option<Arc<Theme>>,
+ version: usize,
+ reload_count: usize,
+}
+
+pub struct PendingLanguageServer {
+ pub server_id: LanguageServerId,
+ pub task: Task<Result<Option<lsp::LanguageServer>>>,
+ pub container_dir: Option<Arc<Path>>,
+}
+
+impl LanguageRegistry {
+ pub fn new(login_shell_env_loaded: Task<()>) -> Self {
+ Self {
+ state: RwLock::new(LanguageRegistryState {
+ next_language_server_id: 0,
+ languages: vec![PLAIN_TEXT.clone()],
+ available_languages: Default::default(),
+ next_available_language_id: 0,
+ loading_languages: Default::default(),
+ subscription: watch::channel(),
+ theme: Default::default(),
+ version: 0,
+ reload_count: 0,
+ }),
+ language_server_download_dir: None,
+ login_shell_env_loaded: login_shell_env_loaded.shared(),
+ lsp_binary_paths: Default::default(),
+ executor: None,
+ lsp_binary_status_tx: Default::default(),
+ }
+ }
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn test() -> Self {
+ Self::new(Task::ready(()))
+ }
+
+ pub fn set_executor(&mut self, executor: Executor) {
+ self.executor = Some(executor);
+ }
+
+ /// Clear out all of the loaded languages and reload them from scratch.
+ ///
+ /// This is useful in development, when queries have changed.
+ #[cfg(debug_assertions)]
+ pub fn reload(&self) {
+ self.state.write().reload();
+ }
+
+ pub fn register(
+ &self,
+ path: &'static str,
+ config: LanguageConfig,
+ grammar: tree_sitter::Language,
+ lsp_adapters: Vec<Arc<dyn LspAdapter>>,
+ get_queries: fn(&str) -> LanguageQueries,
+ ) {
+ let state = &mut *self.state.write();
+ state.available_languages.push(AvailableLanguage {
+ id: post_inc(&mut state.next_available_language_id),
+ path,
+ config,
+ grammar,
+ lsp_adapters,
+ get_queries,
+ loaded: false,
+ });
+ }
+
+ pub fn language_names(&self) -> Vec<String> {
+ let state = self.state.read();
+ let mut result = state
+ .available_languages
+ .iter()
+ .filter_map(|l| l.loaded.not().then_some(l.config.name.to_string()))
+ .chain(state.languages.iter().map(|l| l.config.name.to_string()))
+ .collect::<Vec<_>>();
+ result.sort_unstable_by_key(|language_name| language_name.to_lowercase());
+ result
+ }
+
+ pub fn add(&self, language: Arc<Language>) {
+ self.state.write().add(language);
+ }
+
+ pub fn subscribe(&self) -> watch::Receiver<()> {
+ self.state.read().subscription.1.clone()
+ }
+
+ /// The number of times that the registry has been changed,
+ /// by adding languages or reloading.
+ pub fn version(&self) -> usize {
+ self.state.read().version
+ }
+
+ /// The number of times that the registry has been reloaded.
+ pub fn reload_count(&self) -> usize {
+ self.state.read().reload_count
+ }
+
+ pub fn set_theme(&self, theme: Arc<Theme>) {
+ let mut state = self.state.write();
+ state.theme = Some(theme.clone());
+ for language in &state.languages {
+ language.set_theme(&theme.editor.syntax);
+ }
+ }
+
+ pub fn set_language_server_download_dir(&mut self, path: impl Into<Arc<Path>>) {
+ self.language_server_download_dir = Some(path.into());
+ }
+
+ pub fn language_for_name(
+ self: &Arc<Self>,
+ name: &str,
+ ) -> UnwrapFuture<oneshot::Receiver<Result<Arc<Language>>>> {
+ let name = UniCase::new(name);
+ self.get_or_load_language(|config| UniCase::new(config.name.as_ref()) == name)
+ }
+
+ pub fn language_for_name_or_extension(
+ self: &Arc<Self>,
+ string: &str,
+ ) -> UnwrapFuture<oneshot::Receiver<Result<Arc<Language>>>> {
+ let string = UniCase::new(string);
+ self.get_or_load_language(|config| {
+ UniCase::new(config.name.as_ref()) == string
+ || config
+ .path_suffixes
+ .iter()
+ .any(|suffix| UniCase::new(suffix) == string)
+ })
+ }
+
+ pub fn language_for_file(
+ self: &Arc<Self>,
+ path: impl AsRef<Path>,
+ content: Option<&Rope>,
+ ) -> UnwrapFuture<oneshot::Receiver<Result<Arc<Language>>>> {
+ let path = path.as_ref();
+ let filename = path.file_name().and_then(|name| name.to_str());
+ let extension = path.extension_or_hidden_file_name();
+ let path_suffixes = [extension, filename];
+ self.get_or_load_language(|config| {
+ let path_matches = config
+ .path_suffixes
+ .iter()
+ .any(|suffix| path_suffixes.contains(&Some(suffix.as_str())));
+ let content_matches = content.zip(config.first_line_pattern.as_ref()).map_or(
+ false,
+ |(content, pattern)| {
+ let end = content.clip_point(Point::new(0, 256), Bias::Left);
+ let end = content.point_to_offset(end);
+ let text = content.chunks_in_range(0..end).collect::<String>();
+ pattern.is_match(&text)
+ },
+ );
+ path_matches || content_matches
+ })
+ }
+
+ fn get_or_load_language(
+ self: &Arc<Self>,
+ callback: impl Fn(&LanguageConfig) -> bool,
+ ) -> UnwrapFuture<oneshot::Receiver<Result<Arc<Language>>>> {
+ let (tx, rx) = oneshot::channel();
+
+ let mut state = self.state.write();
+ if let Some(language) = state
+ .languages
+ .iter()
+ .find(|language| callback(&language.config))
+ {
+ let _ = tx.send(Ok(language.clone()));
+ } else if let Some(executor) = self.executor.clone() {
+ if let Some(language) = state
+ .available_languages
+ .iter()
+ .find(|l| !l.loaded && callback(&l.config))
+ .cloned()
+ {
+ let txs = state
+ .loading_languages
+ .entry(language.id)
+ .or_insert_with(|| {
+ let this = self.clone();
+ executor
+ .spawn(async move {
+ let id = language.id;
+ let queries = (language.get_queries)(&language.path);
+ let language =
+ Language::new(language.config, Some(language.grammar))
+ .with_lsp_adapters(language.lsp_adapters)
+ .await;
+ let name = language.name();
+ match language.with_queries(queries) {
+ Ok(language) => {
+ let language = Arc::new(language);
+ let mut state = this.state.write();
+
+ state.add(language.clone());
+ state.mark_language_loaded(id);
+ if let Some(mut txs) = state.loading_languages.remove(&id) {
+ for tx in txs.drain(..) {
+ let _ = tx.send(Ok(language.clone()));
+ }
+ }
+ }
+ Err(e) => {
+ log::error!("failed to load language {name}:\n{:?}", e);
+ let mut state = this.state.write();
+ state.mark_language_loaded(id);
+ if let Some(mut txs) = state.loading_languages.remove(&id) {
+ for tx in txs.drain(..) {
+ let _ = tx.send(Err(anyhow!(
+ "failed to load language {}: {}",
+ name,
+ e
+ )));
+ }
+ }
+ }
+ };
+ })
+ .detach();
+
+ Vec::new()
+ });
+ txs.push(tx);
+ } else {
+ let _ = tx.send(Err(anyhow!("language not found")));
+ }
+ } else {
+ let _ = tx.send(Err(anyhow!("executor does not exist")));
+ }
+
+ rx.unwrap()
+ }
+
+ pub fn to_vec(&self) -> Vec<Arc<Language>> {
+ self.state.read().languages.iter().cloned().collect()
+ }
+
+ pub fn create_pending_language_server(
+ self: &Arc<Self>,
+ language: Arc<Language>,
+ adapter: Arc<CachedLspAdapter>,
+ root_path: Arc<Path>,
+ delegate: Arc<dyn LspAdapterDelegate>,
+ cx: &mut AppContext,
+ ) -> Option<PendingLanguageServer> {
+ let server_id = self.state.write().next_language_server_id();
+ log::info!(
+ "starting language server {:?}, path: {root_path:?}, id: {server_id}",
+ adapter.name.0
+ );
+
+ #[cfg(any(test, feature = "test-support"))]
+ if language.fake_adapter.is_some() {
+ let task = cx.spawn(|cx| async move {
+ let (servers_tx, fake_adapter) = language.fake_adapter.as_ref().unwrap();
+ let (server, mut fake_server) = lsp::LanguageServer::fake(
+ fake_adapter.name.to_string(),
+ fake_adapter.capabilities.clone(),
+ cx.clone(),
+ );
+
+ if let Some(initializer) = &fake_adapter.initializer {
+ initializer(&mut fake_server);
+ }
+
+ let servers_tx = servers_tx.clone();
+ cx.background()
+ .spawn(async move {
+ if fake_server
+ .try_receive_notification::<lsp::notification::Initialized>()
+ .await
+ .is_some()
+ {
+ servers_tx.unbounded_send(fake_server).ok();
+ }
+ })
+ .detach();
+
+ Ok(Some(server))
+ });
+
+ return Some(PendingLanguageServer {
+ server_id,
+ task,
+ container_dir: None,
+ });
+ }
+
+ let download_dir = self
+ .language_server_download_dir
+ .clone()
+ .ok_or_else(|| anyhow!("language server download directory has not been assigned before starting server"))
+ .log_err()?;
+ let this = self.clone();
+ let language = language.clone();
+ let container_dir: Arc<Path> = Arc::from(download_dir.join(adapter.name.0.as_ref()));
+ let root_path = root_path.clone();
+ let adapter = adapter.clone();
+ let login_shell_env_loaded = self.login_shell_env_loaded.clone();
+ let lsp_binary_statuses = self.lsp_binary_status_tx.clone();
+
+ let task = {
+ let container_dir = container_dir.clone();
+ cx.spawn(|mut cx| async move {
+ login_shell_env_loaded.await;
+
+ let mut lock = this.lsp_binary_paths.lock();
+ let entry = lock
+ .entry(adapter.name.clone())
+ .or_insert_with(|| {
+ cx.spawn(|cx| {
+ get_binary(
+ adapter.clone(),
+ language.clone(),
+ delegate.clone(),
+ container_dir,
+ lsp_binary_statuses,
+ cx,
+ )
+ .map_err(Arc::new)
+ })
+ .shared()
+ })
+ .clone();
+ drop(lock);
+
+ let binary = match entry.clone().await.log_err() {
+ Some(binary) => binary,
+ None => return Ok(None),
+ };
+
+ if let Some(task) = adapter.will_start_server(&delegate, &mut cx) {
+ if task.await.log_err().is_none() {
+ return Ok(None);
+ }
+ }
+
+ Ok(Some(lsp::LanguageServer::new(
+ server_id,
+ binary,
+ &root_path,
+ adapter.code_action_kinds(),
+ cx,
+ )?))
+ })
+ };
+
+ Some(PendingLanguageServer {
+ server_id,
+ task,
+ container_dir: Some(container_dir),
+ })
+ }
+
+ pub fn language_server_binary_statuses(
+ &self,
+ ) -> mpsc::UnboundedReceiver<(Arc<Language>, LanguageServerBinaryStatus)> {
+ self.lsp_binary_status_tx.subscribe()
+ }
+
+ pub fn delete_server_container(
+ &self,
+ adapter: Arc<CachedLspAdapter>,
+ cx: &mut AppContext,
+ ) -> Task<()> {
+ log::info!("deleting server container");
+
+ let mut lock = self.lsp_binary_paths.lock();
+ lock.remove(&adapter.name);
+
+ let download_dir = self
+ .language_server_download_dir
+ .clone()
+ .expect("language server download directory has not been assigned before deleting server container");
+
+ cx.spawn(|_| async move {
+ let container_dir = download_dir.join(adapter.name.0.as_ref());
+ smol::fs::remove_dir_all(container_dir)
+ .await
+ .context("server container removal")
+ .log_err();
+ })
+ }
+
+ pub fn next_language_server_id(&self) -> LanguageServerId {
+ self.state.write().next_language_server_id()
+ }
+}
+
+impl LanguageRegistryState {
+ fn next_language_server_id(&mut self) -> LanguageServerId {
+ LanguageServerId(post_inc(&mut self.next_language_server_id))
+ }
+
+ fn add(&mut self, language: Arc<Language>) {
+ if let Some(theme) = self.theme.as_ref() {
+ language.set_theme(&theme.editor.syntax);
+ }
+ self.languages.push(language);
+ self.version += 1;
+ *self.subscription.0.borrow_mut() = ();
+ }
+
+ #[cfg(debug_assertions)]
+ fn reload(&mut self) {
+ self.languages.clear();
+ self.version += 1;
+ self.reload_count += 1;
+ for language in &mut self.available_languages {
+ language.loaded = false;
+ }
+ *self.subscription.0.borrow_mut() = ();
+ }
+
+ /// Mark the given language a having been loaded, so that the
+ /// language registry won't try to load it again.
+ fn mark_language_loaded(&mut self, id: AvailableLanguageId) {
+ for language in &mut self.available_languages {
+ if language.id == id {
+ language.loaded = true;
+ break;
+ }
+ }
+ }
+}
+
+#[cfg(any(test, feature = "test-support"))]
+impl Default for LanguageRegistry {
+ fn default() -> Self {
+ Self::test()
+ }
+}
+
+async fn get_binary(
+ adapter: Arc<CachedLspAdapter>,
+ language: Arc<Language>,
+ delegate: Arc<dyn LspAdapterDelegate>,
+ container_dir: Arc<Path>,
+ statuses: LspBinaryStatusSender,
+ mut cx: AsyncAppContext,
+) -> Result<LanguageServerBinary> {
+ if !container_dir.exists() {
+ smol::fs::create_dir_all(&container_dir)
+ .await
+ .context("failed to create container directory")?;
+ }
+
+ if let Some(task) = adapter.will_fetch_server(&delegate, &mut cx) {
+ task.await?;
+ }
+
+ let binary = fetch_latest_binary(
+ adapter.clone(),
+ language.clone(),
+ delegate.as_ref(),
+ &container_dir,
+ statuses.clone(),
+ )
+ .await;
+
+ if let Err(error) = binary.as_ref() {
+ if let Some(binary) = adapter
+ .cached_server_binary(container_dir.to_path_buf(), delegate.as_ref())
+ .await
+ {
+ statuses.send(language.clone(), LanguageServerBinaryStatus::Cached);
+ return Ok(binary);
+ } else {
+ statuses.send(
+ language.clone(),
+ LanguageServerBinaryStatus::Failed {
+ error: format!("{:?}", error),
+ },
+ );
+ }
+ }
+
+ binary
+}
+
+async fn fetch_latest_binary(
+ adapter: Arc<CachedLspAdapter>,
+ language: Arc<Language>,
+ delegate: &dyn LspAdapterDelegate,
+ container_dir: &Path,
+ lsp_binary_statuses_tx: LspBinaryStatusSender,
+) -> Result<LanguageServerBinary> {
+ let container_dir: Arc<Path> = container_dir.into();
+ lsp_binary_statuses_tx.send(
+ language.clone(),
+ LanguageServerBinaryStatus::CheckingForUpdate,
+ );
+
+ let version_info = adapter.fetch_latest_server_version(delegate).await?;
+ lsp_binary_statuses_tx.send(language.clone(), LanguageServerBinaryStatus::Downloading);
+
+ let binary = adapter
+ .fetch_server_binary(version_info, container_dir.to_path_buf(), delegate)
+ .await?;
+ lsp_binary_statuses_tx.send(language.clone(), LanguageServerBinaryStatus::Downloaded);
+
+ Ok(binary)
+}
+
+impl Language {
+ pub fn new(config: LanguageConfig, ts_language: Option<tree_sitter::Language>) -> Self {
+ Self {
+ config,
+ grammar: ts_language.map(|ts_language| {
+ Arc::new(Grammar {
+ id: NEXT_GRAMMAR_ID.fetch_add(1, SeqCst),
+ highlights_query: None,
+ brackets_config: None,
+ outline_config: None,
+ embedding_config: None,
+ indents_config: None,
+ injection_config: None,
+ override_config: None,
+ error_query: Query::new(ts_language, "(ERROR) @error").unwrap(),
+ ts_language,
+ highlight_map: Default::default(),
+ })
+ }),
+ adapters: Vec::new(),
+
+ #[cfg(any(test, feature = "test-support"))]
+ fake_adapter: None,
+ }
+ }
+
+ pub fn lsp_adapters(&self) -> &[Arc<CachedLspAdapter>] {
+ &self.adapters
+ }
+
+ pub fn id(&self) -> Option<usize> {
+ self.grammar.as_ref().map(|g| g.id)
+ }
+
+ pub fn with_queries(mut self, queries: LanguageQueries) -> Result<Self> {
+ if let Some(query) = queries.highlights {
+ self = self
+ .with_highlights_query(query.as_ref())
+ .context("Error loading highlights query")?;
+ }
+ if let Some(query) = queries.brackets {
+ self = self
+ .with_brackets_query(query.as_ref())
+ .context("Error loading brackets query")?;
+ }
+ if let Some(query) = queries.indents {
+ self = self
+ .with_indents_query(query.as_ref())
+ .context("Error loading indents query")?;
+ }
+ if let Some(query) = queries.outline {
+ self = self
+ .with_outline_query(query.as_ref())
+ .context("Error loading outline query")?;
+ }
+ if let Some(query) = queries.embedding {
+ self = self
+ .with_embedding_query(query.as_ref())
+ .context("Error loading embedding query")?;
+ }
+ if let Some(query) = queries.injections {
+ self = self
+ .with_injection_query(query.as_ref())
+ .context("Error loading injection query")?;
+ }
+ if let Some(query) = queries.overrides {
+ self = self
+ .with_override_query(query.as_ref())
+ .context("Error loading override query")?;
+ }
+ Ok(self)
+ }
+
+ pub fn with_highlights_query(mut self, source: &str) -> Result<Self> {
+ let grammar = self.grammar_mut();
+ grammar.highlights_query = Some(Query::new(grammar.ts_language, source)?);
+ Ok(self)
+ }
+
+ pub fn with_outline_query(mut self, source: &str) -> Result<Self> {
+ let grammar = self.grammar_mut();
+ let query = Query::new(grammar.ts_language, source)?;
+ let mut item_capture_ix = None;
+ let mut name_capture_ix = None;
+ let mut context_capture_ix = None;
+ let mut extra_context_capture_ix = None;
+ get_capture_indices(
+ &query,
+ &mut [
+ ("item", &mut item_capture_ix),
+ ("name", &mut name_capture_ix),
+ ("context", &mut context_capture_ix),
+ ("context.extra", &mut extra_context_capture_ix),
+ ],
+ );
+ if let Some((item_capture_ix, name_capture_ix)) = item_capture_ix.zip(name_capture_ix) {
+ grammar.outline_config = Some(OutlineConfig {
+ query,
+ item_capture_ix,
+ name_capture_ix,
+ context_capture_ix,
+ extra_context_capture_ix,
+ });
+ }
+ Ok(self)
+ }
+
+ pub fn with_embedding_query(mut self, source: &str) -> Result<Self> {
+ let grammar = self.grammar_mut();
+ let query = Query::new(grammar.ts_language, source)?;
+ let mut item_capture_ix = None;
+ let mut name_capture_ix = None;
+ let mut context_capture_ix = None;
+ let mut collapse_capture_ix = None;
+ let mut keep_capture_ix = None;
+ get_capture_indices(
+ &query,
+ &mut [
+ ("item", &mut item_capture_ix),
+ ("name", &mut name_capture_ix),
+ ("context", &mut context_capture_ix),
+ ("keep", &mut keep_capture_ix),
+ ("collapse", &mut collapse_capture_ix),
+ ],
+ );
+ if let Some(item_capture_ix) = item_capture_ix {
+ grammar.embedding_config = Some(EmbeddingConfig {
+ query,
+ item_capture_ix,
+ name_capture_ix,
+ context_capture_ix,
+ collapse_capture_ix,
+ keep_capture_ix,
+ });
+ }
+ Ok(self)
+ }
+
+ pub fn with_brackets_query(mut self, source: &str) -> Result<Self> {
+ let grammar = self.grammar_mut();
+ let query = Query::new(grammar.ts_language, source)?;
+ let mut open_capture_ix = None;
+ let mut close_capture_ix = None;
+ get_capture_indices(
+ &query,
+ &mut [
+ ("open", &mut open_capture_ix),
+ ("close", &mut close_capture_ix),
+ ],
+ );
+ if let Some((open_capture_ix, close_capture_ix)) = open_capture_ix.zip(close_capture_ix) {
+ grammar.brackets_config = Some(BracketConfig {
+ query,
+ open_capture_ix,
+ close_capture_ix,
+ });
+ }
+ Ok(self)
+ }
+
+ pub fn with_indents_query(mut self, source: &str) -> Result<Self> {
+ let grammar = self.grammar_mut();
+ let query = Query::new(grammar.ts_language, source)?;
+ let mut indent_capture_ix = None;
+ let mut start_capture_ix = None;
+ let mut end_capture_ix = None;
+ let mut outdent_capture_ix = None;
+ get_capture_indices(
+ &query,
+ &mut [
+ ("indent", &mut indent_capture_ix),
+ ("start", &mut start_capture_ix),
+ ("end", &mut end_capture_ix),
+ ("outdent", &mut outdent_capture_ix),
+ ],
+ );
+ if let Some(indent_capture_ix) = indent_capture_ix {
+ grammar.indents_config = Some(IndentConfig {
+ query,
+ indent_capture_ix,
+ start_capture_ix,
+ end_capture_ix,
+ outdent_capture_ix,
+ });
+ }
+ Ok(self)
+ }
+
+ pub fn with_injection_query(mut self, source: &str) -> Result<Self> {
+ let grammar = self.grammar_mut();
+ let query = Query::new(grammar.ts_language, source)?;
+ let mut language_capture_ix = None;
+ let mut content_capture_ix = None;
+ get_capture_indices(
+ &query,
+ &mut [
+ ("language", &mut language_capture_ix),
+ ("content", &mut content_capture_ix),
+ ],
+ );
+ let patterns = (0..query.pattern_count())
+ .map(|ix| {
+ let mut config = InjectionPatternConfig::default();
+ for setting in query.property_settings(ix) {
+ match setting.key.as_ref() {
+ "language" => {
+ config.language = setting.value.clone();
+ }
+ "combined" => {
+ config.combined = true;
+ }
+ _ => {}
+ }
+ }
+ config
+ })
+ .collect();
+ if let Some(content_capture_ix) = content_capture_ix {
+ grammar.injection_config = Some(InjectionConfig {
+ query,
+ language_capture_ix,
+ content_capture_ix,
+ patterns,
+ });
+ }
+ Ok(self)
+ }
+
+ pub fn with_override_query(mut self, source: &str) -> anyhow::Result<Self> {
+ let query = Query::new(self.grammar_mut().ts_language, source)?;
+
+ let mut override_configs_by_id = HashMap::default();
+ for (ix, name) in query.capture_names().iter().enumerate() {
+ if !name.starts_with('_') {
+ let value = self.config.overrides.remove(name).unwrap_or_default();
+ for server_name in &value.opt_into_language_servers {
+ if !self
+ .config
+ .scope_opt_in_language_servers
+ .contains(server_name)
+ {
+ util::debug_panic!("Server {server_name:?} has been opted-in by scope {name:?} but has not been marked as an opt-in server");
+ }
+ }
+
+ override_configs_by_id.insert(ix as u32, (name.clone(), value));
+ }
+ }
+
+ if !self.config.overrides.is_empty() {
+ let keys = self.config.overrides.keys().collect::<Vec<_>>();
+ Err(anyhow!(
+ "language {:?} has overrides in config not in query: {keys:?}",
+ self.config.name
+ ))?;
+ }
+
+ for disabled_scope_name in self
+ .config
+ .brackets
+ .disabled_scopes_by_bracket_ix
+ .iter()
+ .flatten()
+ {
+ if !override_configs_by_id
+ .values()
+ .any(|(scope_name, _)| scope_name == disabled_scope_name)
+ {
+ Err(anyhow!(
+ "language {:?} has overrides in config not in query: {disabled_scope_name:?}",
+ self.config.name
+ ))?;
+ }
+ }
+
+ for (name, override_config) in override_configs_by_id.values_mut() {
+ override_config.disabled_bracket_ixs = self
+ .config
+ .brackets
+ .disabled_scopes_by_bracket_ix
+ .iter()
+ .enumerate()
+ .filter_map(|(ix, disabled_scope_names)| {
+ if disabled_scope_names.contains(name) {
+ Some(ix as u16)
+ } else {
+ None
+ }
+ })
+ .collect();
+ }
+
+ self.config.brackets.disabled_scopes_by_bracket_ix.clear();
+ self.grammar_mut().override_config = Some(OverrideConfig {
+ query,
+ values: override_configs_by_id,
+ });
+ Ok(self)
+ }
+
+ fn grammar_mut(&mut self) -> &mut Grammar {
+ Arc::get_mut(self.grammar.as_mut().unwrap()).unwrap()
+ }
+
+ pub async fn with_lsp_adapters(mut self, lsp_adapters: Vec<Arc<dyn LspAdapter>>) -> Self {
+ for adapter in lsp_adapters {
+ self.adapters.push(CachedLspAdapter::new(adapter).await);
+ }
+ self
+ }
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub async fn set_fake_lsp_adapter(
+ &mut self,
+ fake_lsp_adapter: Arc<FakeLspAdapter>,
+ ) -> mpsc::UnboundedReceiver<lsp::FakeLanguageServer> {
+ let (servers_tx, servers_rx) = mpsc::unbounded();
+ self.fake_adapter = Some((servers_tx, fake_lsp_adapter.clone()));
+ let adapter = CachedLspAdapter::new(Arc::new(fake_lsp_adapter)).await;
+ self.adapters = vec![adapter];
+ servers_rx
+ }
+
+ pub fn name(&self) -> Arc<str> {
+ self.config.name.clone()
+ }
+
+ pub async fn disk_based_diagnostic_sources(&self) -> &[String] {
+ match self.adapters.first().as_ref() {
+ Some(adapter) => &adapter.disk_based_diagnostic_sources,
+ None => &[],
+ }
+ }
+
+ pub async fn disk_based_diagnostics_progress_token(&self) -> Option<&str> {
+ for adapter in &self.adapters {
+ let token = adapter.disk_based_diagnostics_progress_token.as_deref();
+ if token.is_some() {
+ return token;
+ }
+ }
+
+ None
+ }
+
+ pub async fn process_completion(self: &Arc<Self>, completion: &mut lsp::CompletionItem) {
+ for adapter in &self.adapters {
+ adapter.process_completion(completion).await;
+ }
+ }
+
+ pub async fn label_for_completion(
+ self: &Arc<Self>,
+ completion: &lsp::CompletionItem,
+ ) -> Option<CodeLabel> {
+ self.adapters
+ .first()
+ .as_ref()?
+ .label_for_completion(completion, self)
+ .await
+ }
+
+ pub async fn label_for_symbol(
+ self: &Arc<Self>,
+ name: &str,
+ kind: lsp::SymbolKind,
+ ) -> Option<CodeLabel> {
+ self.adapters
+ .first()
+ .as_ref()?
+ .label_for_symbol(name, kind, self)
+ .await
+ }
+
+ pub fn highlight_text<'a>(
+ self: &'a Arc<Self>,
+ text: &'a Rope,
+ range: Range<usize>,
+ ) -> Vec<(Range<usize>, HighlightId)> {
+ let mut result = Vec::new();
+ if let Some(grammar) = &self.grammar {
+ let tree = grammar.parse_text(text, None);
+ let captures =
+ SyntaxSnapshot::single_tree_captures(range.clone(), text, &tree, self, |grammar| {
+ grammar.highlights_query.as_ref()
+ });
+ let highlight_maps = vec![grammar.highlight_map()];
+ let mut offset = 0;
+ for chunk in BufferChunks::new(text, range, Some((captures, highlight_maps)), vec![]) {
+ let end_offset = offset + chunk.text.len();
+ if let Some(highlight_id) = chunk.syntax_highlight_id {
+ if !highlight_id.is_default() {
+ result.push((offset..end_offset, highlight_id));
+ }
+ }
+ offset = end_offset;
+ }
+ }
+ result
+ }
+
+ pub fn path_suffixes(&self) -> &[String] {
+ &self.config.path_suffixes
+ }
+
+ pub fn should_autoclose_before(&self, c: char) -> bool {
+ c.is_whitespace() || self.config.autoclose_before.contains(c)
+ }
+
+ pub fn set_theme(&self, theme: &SyntaxTheme) {
+ if let Some(grammar) = self.grammar.as_ref() {
+ if let Some(highlights_query) = &grammar.highlights_query {
+ *grammar.highlight_map.lock() =
+ HighlightMap::new(highlights_query.capture_names(), theme);
+ }
+ }
+ }
+
+ pub fn grammar(&self) -> Option<&Arc<Grammar>> {
+ self.grammar.as_ref()
+ }
+
+ pub fn default_scope(self: &Arc<Self>) -> LanguageScope {
+ LanguageScope {
+ language: self.clone(),
+ override_id: None,
+ }
+ }
+}
+
+impl LanguageScope {
+ pub fn collapsed_placeholder(&self) -> &str {
+ self.language.config.collapsed_placeholder.as_ref()
+ }
+
+ pub fn line_comment_prefix(&self) -> Option<&Arc<str>> {
+ Override::as_option(
+ self.config_override().map(|o| &o.line_comment),
+ self.language.config.line_comment.as_ref(),
+ )
+ }
+
+ pub fn block_comment_delimiters(&self) -> Option<(&Arc<str>, &Arc<str>)> {
+ Override::as_option(
+ self.config_override().map(|o| &o.block_comment),
+ self.language.config.block_comment.as_ref(),
+ )
+ .map(|e| (&e.0, &e.1))
+ }
+
+ pub fn word_characters(&self) -> Option<&HashSet<char>> {
+ Override::as_option(
+ self.config_override().map(|o| &o.word_characters),
+ Some(&self.language.config.word_characters),
+ )
+ }
+
+ pub fn brackets(&self) -> impl Iterator<Item = (&BracketPair, bool)> {
+ let mut disabled_ids = self
+ .config_override()
+ .map_or(&[] as _, |o| o.disabled_bracket_ixs.as_slice());
+ self.language
+ .config
+ .brackets
+ .pairs
+ .iter()
+ .enumerate()
+ .map(move |(ix, bracket)| {
+ let mut is_enabled = true;
+ if let Some(next_disabled_ix) = disabled_ids.first() {
+ if ix == *next_disabled_ix as usize {
+ disabled_ids = &disabled_ids[1..];
+ is_enabled = false;
+ }
+ }
+ (bracket, is_enabled)
+ })
+ }
+
+ pub fn should_autoclose_before(&self, c: char) -> bool {
+ c.is_whitespace() || self.language.config.autoclose_before.contains(c)
+ }
+
+ pub fn language_allowed(&self, name: &LanguageServerName) -> bool {
+ let config = &self.language.config;
+ let opt_in_servers = &config.scope_opt_in_language_servers;
+ if opt_in_servers.iter().any(|o| *o == *name.0) {
+ if let Some(over) = self.config_override() {
+ over.opt_into_language_servers.iter().any(|o| *o == *name.0)
+ } else {
+ false
+ }
+ } else {
+ true
+ }
+ }
+
+ fn config_override(&self) -> Option<&LanguageConfigOverride> {
+ let id = self.override_id?;
+ let grammar = self.language.grammar.as_ref()?;
+ let override_config = grammar.override_config.as_ref()?;
+ override_config.values.get(&id).map(|e| &e.1)
+ }
+}
+
+impl Hash for Language {
+ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ self.id().hash(state)
+ }
+}
+
+impl PartialEq for Language {
+ fn eq(&self, other: &Self) -> bool {
+ self.id().eq(&other.id())
+ }
+}
+
+impl Eq for Language {}
+
+impl Debug for Language {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("Language")
+ .field("name", &self.config.name)
+ .finish()
+ }
+}
+
+impl Grammar {
+ pub fn id(&self) -> usize {
+ self.id
+ }
+
+ fn parse_text(&self, text: &Rope, old_tree: Option<Tree>) -> Tree {
+ PARSER.with(|parser| {
+ let mut parser = parser.borrow_mut();
+ parser
+ .set_language(self.ts_language)
+ .expect("incompatible grammar");
+ let mut chunks = text.chunks_in_range(0..text.len());
+ parser
+ .parse_with(
+ &mut move |offset, _| {
+ chunks.seek(offset);
+ chunks.next().unwrap_or("").as_bytes()
+ },
+ old_tree.as_ref(),
+ )
+ .unwrap()
+ })
+ }
+
+ pub fn highlight_map(&self) -> HighlightMap {
+ self.highlight_map.lock().clone()
+ }
+
+ pub fn highlight_id_for_name(&self, name: &str) -> Option<HighlightId> {
+ let capture_id = self
+ .highlights_query
+ .as_ref()?
+ .capture_index_for_name(name)?;
+ Some(self.highlight_map.lock().get(capture_id))
+ }
+}
+
+impl CodeLabel {
+ pub fn plain(text: String, filter_text: Option<&str>) -> Self {
+ let mut result = Self {
+ runs: Vec::new(),
+ filter_range: 0..text.len(),
+ text,
+ };
+ if let Some(filter_text) = filter_text {
+ if let Some(ix) = result.text.find(filter_text) {
+ result.filter_range = ix..ix + filter_text.len();
+ }
+ }
+ result
+ }
+}
+
+#[cfg(any(test, feature = "test-support"))]
+impl Default for FakeLspAdapter {
+ fn default() -> Self {
+ Self {
+ name: "the-fake-language-server",
+ capabilities: lsp::LanguageServer::full_capabilities(),
+ initializer: None,
+ disk_based_diagnostics_progress_token: None,
+ initialization_options: None,
+ disk_based_diagnostics_sources: Vec::new(),
+ enabled_formatters: Vec::new(),
+ }
+ }
+}
+
+#[cfg(any(test, feature = "test-support"))]
+#[async_trait]
+impl LspAdapter for Arc<FakeLspAdapter> {
+ async fn name(&self) -> LanguageServerName {
+ LanguageServerName(self.name.into())
+ }
+
+ fn short_name(&self) -> &'static str {
+ "FakeLspAdapter"
+ }
+
+ async fn fetch_latest_server_version(
+ &self,
+ _: &dyn LspAdapterDelegate,
+ ) -> Result<Box<dyn 'static + Send + Any>> {
+ unreachable!();
+ }
+
+ async fn fetch_server_binary(
+ &self,
+ _: Box<dyn 'static + Send + Any>,
+ _: PathBuf,
+ _: &dyn LspAdapterDelegate,
+ ) -> Result<LanguageServerBinary> {
+ unreachable!();
+ }
+
+ async fn cached_server_binary(
+ &self,
+ _: PathBuf,
+ _: &dyn LspAdapterDelegate,
+ ) -> Option<LanguageServerBinary> {
+ unreachable!();
+ }
+
+ async fn installation_test_binary(&self, _: PathBuf) -> Option<LanguageServerBinary> {
+ unreachable!();
+ }
+
+ fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
+
+ async fn disk_based_diagnostic_sources(&self) -> Vec<String> {
+ self.disk_based_diagnostics_sources.clone()
+ }
+
+ async fn disk_based_diagnostics_progress_token(&self) -> Option<String> {
+ self.disk_based_diagnostics_progress_token.clone()
+ }
+
+ async fn initialization_options(&self) -> Option<Value> {
+ self.initialization_options.clone()
+ }
+
+ fn enabled_formatters(&self) -> Vec<BundledFormatter> {
+ self.enabled_formatters.clone()
+ }
+}
+
+fn get_capture_indices(query: &Query, captures: &mut [(&str, &mut Option<u32>)]) {
+ for (ix, name) in query.capture_names().iter().enumerate() {
+ for (capture_name, index) in captures.iter_mut() {
+ if capture_name == name {
+ **index = Some(ix as u32);
+ break;
+ }
+ }
+ }
+}
+
+pub fn point_to_lsp(point: PointUtf16) -> lsp::Position {
+ lsp::Position::new(point.row, point.column)
+}
+
+pub fn point_from_lsp(point: lsp::Position) -> Unclipped<PointUtf16> {
+ Unclipped(PointUtf16::new(point.line, point.character))
+}
+
+pub fn range_to_lsp(range: Range<PointUtf16>) -> lsp::Range {
+ lsp::Range {
+ start: point_to_lsp(range.start),
+ end: point_to_lsp(range.end),
+ }
+}
+
+pub fn range_from_lsp(range: lsp::Range) -> Range<Unclipped<PointUtf16>> {
+ let mut start = point_from_lsp(range.start);
+ let mut end = point_from_lsp(range.end);
+ if start > end {
+ mem::swap(&mut start, &mut end);
+ }
+ start..end
+}
+
+// #[cfg(test)]
+// mod tests {
+// use super::*;
+// use gpui::TestAppContext;
+
+// #[gpui::test(iterations = 10)]
+// async fn test_first_line_pattern(cx: &mut TestAppContext) {
+// let mut languages = LanguageRegistry::test();
+// languages.set_executor(cx.background());
+// let languages = Arc::new(languages);
+// languages.register(
+// "/javascript",
+// LanguageConfig {
+// name: "JavaScript".into(),
+// path_suffixes: vec!["js".into()],
+// first_line_pattern: Some(Regex::new(r"\bnode\b").unwrap()),
+// ..Default::default()
+// },
+// tree_sitter_typescript::language_tsx(),
+// vec![],
+// |_| Default::default(),
+// );
+
+// languages
+// .language_for_file("the/script", None)
+// .await
+// .unwrap_err();
+// languages
+// .language_for_file("the/script", Some(&"nothing".into()))
+// .await
+// .unwrap_err();
+// assert_eq!(
+// languages
+// .language_for_file("the/script", Some(&"#!/bin/env node".into()))
+// .await
+// .unwrap()
+// .name()
+// .as_ref(),
+// "JavaScript"
+// );
+// }
+
+// #[gpui::test(iterations = 10)]
+// async fn test_language_loading(cx: &mut TestAppContext) {
+// let mut languages = LanguageRegistry::test();
+// languages.set_executor(cx.background());
+// let languages = Arc::new(languages);
+// languages.register(
+// "/JSON",
+// LanguageConfig {
+// name: "JSON".into(),
+// path_suffixes: vec!["json".into()],
+// ..Default::default()
+// },
+// tree_sitter_json::language(),
+// vec![],
+// |_| Default::default(),
+// );
+// languages.register(
+// "/rust",
+// LanguageConfig {
+// name: "Rust".into(),
+// path_suffixes: vec!["rs".into()],
+// ..Default::default()
+// },
+// tree_sitter_rust::language(),
+// vec![],
+// |_| Default::default(),
+// );
+// assert_eq!(
+// languages.language_names(),
+// &[
+// "JSON".to_string(),
+// "Plain Text".to_string(),
+// "Rust".to_string(),
+// ]
+// );
+
+// let rust1 = languages.language_for_name("Rust");
+// let rust2 = languages.language_for_name("Rust");
+
+// // Ensure language is still listed even if it's being loaded.
+// assert_eq!(
+// languages.language_names(),
+// &[
+// "JSON".to_string(),
+// "Plain Text".to_string(),
+// "Rust".to_string(),
+// ]
+// );
+
+// let (rust1, rust2) = futures::join!(rust1, rust2);
+// assert!(Arc::ptr_eq(&rust1.unwrap(), &rust2.unwrap()));
+
+// // Ensure language is still listed even after loading it.
+// assert_eq!(
+// languages.language_names(),
+// &[
+// "JSON".to_string(),
+// "Plain Text".to_string(),
+// "Rust".to_string(),
+// ]
+// );
+
+// // Loading an unknown language returns an error.
+// assert!(languages.language_for_name("Unknown").await.is_err());
+// }
+// }
@@ -0,0 +1,430 @@
+use crate::{File, Language};
+use anyhow::Result;
+use collections::{HashMap, HashSet};
+use globset::GlobMatcher;
+use gpui2::AppContext;
+use schemars::{
+ schema::{InstanceType, ObjectValidation, Schema, SchemaObject},
+ JsonSchema,
+};
+use serde::{Deserialize, Serialize};
+use std::{num::NonZeroU32, path::Path, sync::Arc};
+
+pub fn init(cx: &mut AppContext) {
+ settings2::register::<AllLanguageSettings>(cx);
+}
+
+pub fn language_settings<'a>(
+ language: Option<&Arc<Language>>,
+ file: Option<&Arc<dyn File>>,
+ cx: &'a AppContext,
+) -> &'a LanguageSettings {
+ let language_name = language.map(|l| l.name());
+ all_language_settings(file, cx).language(language_name.as_deref())
+}
+
+pub fn all_language_settings<'a>(
+ file: Option<&Arc<dyn File>>,
+ cx: &'a AppContext,
+) -> &'a AllLanguageSettings {
+ let location = file.map(|f| (f.worktree_id(), f.path().as_ref()));
+ settings2::get_local(location, cx)
+}
+
+#[derive(Debug, Clone)]
+pub struct AllLanguageSettings {
+ pub copilot: CopilotSettings,
+ defaults: LanguageSettings,
+ languages: HashMap<Arc<str>, LanguageSettings>,
+}
+
+#[derive(Debug, Clone, Deserialize)]
+pub struct LanguageSettings {
+ pub tab_size: NonZeroU32,
+ pub hard_tabs: bool,
+ pub soft_wrap: SoftWrap,
+ pub preferred_line_length: u32,
+ pub show_wrap_guides: bool,
+ pub wrap_guides: Vec<usize>,
+ pub format_on_save: FormatOnSave,
+ pub remove_trailing_whitespace_on_save: bool,
+ pub ensure_final_newline_on_save: bool,
+ pub formatter: Formatter,
+ pub prettier: HashMap<String, serde_json::Value>,
+ pub enable_language_server: bool,
+ pub show_copilot_suggestions: bool,
+ pub show_whitespaces: ShowWhitespaceSetting,
+ pub extend_comment_on_newline: bool,
+ pub inlay_hints: InlayHintSettings,
+}
+
+#[derive(Clone, Debug, Default)]
+pub struct CopilotSettings {
+ pub feature_enabled: bool,
+ pub disabled_globs: Vec<GlobMatcher>,
+}
+
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
+pub struct AllLanguageSettingsContent {
+ #[serde(default)]
+ pub features: Option<FeaturesContent>,
+ #[serde(default)]
+ pub copilot: Option<CopilotSettingsContent>,
+ #[serde(flatten)]
+ pub defaults: LanguageSettingsContent,
+ #[serde(default, alias = "language_overrides")]
+ pub languages: HashMap<Arc<str>, LanguageSettingsContent>,
+}
+
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
+pub struct LanguageSettingsContent {
+ #[serde(default)]
+ pub tab_size: Option<NonZeroU32>,
+ #[serde(default)]
+ pub hard_tabs: Option<bool>,
+ #[serde(default)]
+ pub soft_wrap: Option<SoftWrap>,
+ #[serde(default)]
+ pub preferred_line_length: Option<u32>,
+ #[serde(default)]
+ pub show_wrap_guides: Option<bool>,
+ #[serde(default)]
+ pub wrap_guides: Option<Vec<usize>>,
+ #[serde(default)]
+ pub format_on_save: Option<FormatOnSave>,
+ #[serde(default)]
+ pub remove_trailing_whitespace_on_save: Option<bool>,
+ #[serde(default)]
+ pub ensure_final_newline_on_save: Option<bool>,
+ #[serde(default)]
+ pub formatter: Option<Formatter>,
+ #[serde(default)]
+ pub prettier: Option<HashMap<String, serde_json::Value>>,
+ #[serde(default)]
+ pub enable_language_server: Option<bool>,
+ #[serde(default)]
+ pub show_copilot_suggestions: Option<bool>,
+ #[serde(default)]
+ pub show_whitespaces: Option<ShowWhitespaceSetting>,
+ #[serde(default)]
+ pub extend_comment_on_newline: Option<bool>,
+ #[serde(default)]
+ pub inlay_hints: Option<InlayHintSettings>,
+}
+
+#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
+pub struct CopilotSettingsContent {
+ #[serde(default)]
+ pub disabled_globs: Option<Vec<String>>,
+}
+
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub struct FeaturesContent {
+ pub copilot: Option<bool>,
+}
+
+#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub enum SoftWrap {
+ None,
+ EditorWidth,
+ PreferredLineLength,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub enum FormatOnSave {
+ On,
+ Off,
+ LanguageServer,
+ External {
+ command: Arc<str>,
+ arguments: Arc<[String]>,
+ },
+}
+
+#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub enum ShowWhitespaceSetting {
+ Selection,
+ None,
+ All,
+}
+
+#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub enum Formatter {
+ #[default]
+ Auto,
+ LanguageServer,
+ Prettier,
+ External {
+ command: Arc<str>,
+ arguments: Arc<[String]>,
+ },
+}
+
+#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
+pub struct InlayHintSettings {
+ #[serde(default)]
+ pub enabled: bool,
+ #[serde(default = "default_true")]
+ pub show_type_hints: bool,
+ #[serde(default = "default_true")]
+ pub show_parameter_hints: bool,
+ #[serde(default = "default_true")]
+ pub show_other_hints: bool,
+}
+
+fn default_true() -> bool {
+ true
+}
+
+impl InlayHintSettings {
+ pub fn enabled_inlay_hint_kinds(&self) -> HashSet<Option<InlayHintKind>> {
+ let mut kinds = HashSet::default();
+ if self.show_type_hints {
+ kinds.insert(Some(InlayHintKind::Type));
+ }
+ if self.show_parameter_hints {
+ kinds.insert(Some(InlayHintKind::Parameter));
+ }
+ if self.show_other_hints {
+ kinds.insert(None);
+ }
+ kinds
+ }
+}
+
+impl AllLanguageSettings {
+ pub fn language<'a>(&'a self, language_name: Option<&str>) -> &'a LanguageSettings {
+ if let Some(name) = language_name {
+ if let Some(overrides) = self.languages.get(name) {
+ return overrides;
+ }
+ }
+ &self.defaults
+ }
+
+ pub fn copilot_enabled_for_path(&self, path: &Path) -> bool {
+ !self
+ .copilot
+ .disabled_globs
+ .iter()
+ .any(|glob| glob.is_match(path))
+ }
+
+ pub fn copilot_enabled(&self, language: Option<&Arc<Language>>, path: Option<&Path>) -> bool {
+ if !self.copilot.feature_enabled {
+ return false;
+ }
+
+ if let Some(path) = path {
+ if !self.copilot_enabled_for_path(path) {
+ return false;
+ }
+ }
+
+ self.language(language.map(|l| l.name()).as_deref())
+ .show_copilot_suggestions
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum InlayHintKind {
+ Type,
+ Parameter,
+}
+
+impl InlayHintKind {
+ pub fn from_name(name: &str) -> Option<Self> {
+ match name {
+ "type" => Some(InlayHintKind::Type),
+ "parameter" => Some(InlayHintKind::Parameter),
+ _ => None,
+ }
+ }
+
+ pub fn name(&self) -> &'static str {
+ match self {
+ InlayHintKind::Type => "type",
+ InlayHintKind::Parameter => "parameter",
+ }
+ }
+}
+
+impl settings2::Setting for AllLanguageSettings {
+ const KEY: Option<&'static str> = None;
+
+ type FileContent = AllLanguageSettingsContent;
+
+ fn load(
+ default_value: &Self::FileContent,
+ user_settings: &[&Self::FileContent],
+ _: &AppContext,
+ ) -> Result<Self> {
+ // A default is provided for all settings.
+ let mut defaults: LanguageSettings =
+ serde_json::from_value(serde_json::to_value(&default_value.defaults)?)?;
+
+ let mut languages = HashMap::default();
+ for (language_name, settings) in &default_value.languages {
+ let mut language_settings = defaults.clone();
+ merge_settings(&mut language_settings, &settings);
+ languages.insert(language_name.clone(), language_settings);
+ }
+
+ let mut copilot_enabled = default_value
+ .features
+ .as_ref()
+ .and_then(|f| f.copilot)
+ .ok_or_else(Self::missing_default)?;
+ let mut copilot_globs = default_value
+ .copilot
+ .as_ref()
+ .and_then(|c| c.disabled_globs.as_ref())
+ .ok_or_else(Self::missing_default)?;
+
+ for user_settings in user_settings {
+ if let Some(copilot) = user_settings.features.as_ref().and_then(|f| f.copilot) {
+ copilot_enabled = copilot;
+ }
+ if let Some(globs) = user_settings
+ .copilot
+ .as_ref()
+ .and_then(|f| f.disabled_globs.as_ref())
+ {
+ copilot_globs = globs;
+ }
+
+ // A user's global settings override the default global settings and
+ // all default language-specific settings.
+ merge_settings(&mut defaults, &user_settings.defaults);
+ for language_settings in languages.values_mut() {
+ merge_settings(language_settings, &user_settings.defaults);
+ }
+
+ // A user's language-specific settings override default language-specific settings.
+ for (language_name, user_language_settings) in &user_settings.languages {
+ merge_settings(
+ languages
+ .entry(language_name.clone())
+ .or_insert_with(|| defaults.clone()),
+ &user_language_settings,
+ );
+ }
+ }
+
+ Ok(Self {
+ copilot: CopilotSettings {
+ feature_enabled: copilot_enabled,
+ disabled_globs: copilot_globs
+ .iter()
+ .filter_map(|g| Some(globset::Glob::new(g).ok()?.compile_matcher()))
+ .collect(),
+ },
+ defaults,
+ languages,
+ })
+ }
+
+ fn json_schema(
+ generator: &mut schemars::gen::SchemaGenerator,
+ params: &settings2::SettingsJsonSchemaParams,
+ _: &AppContext,
+ ) -> schemars::schema::RootSchema {
+ let mut root_schema = generator.root_schema_for::<Self::FileContent>();
+
+ // Create a schema for a 'languages overrides' object, associating editor
+ // settings with specific languages.
+ assert!(root_schema
+ .definitions
+ .contains_key("LanguageSettingsContent"));
+
+ let languages_object_schema = SchemaObject {
+ instance_type: Some(InstanceType::Object.into()),
+ object: Some(Box::new(ObjectValidation {
+ properties: params
+ .language_names
+ .iter()
+ .map(|name| {
+ (
+ name.clone(),
+ Schema::new_ref("#/definitions/LanguageSettingsContent".into()),
+ )
+ })
+ .collect(),
+ ..Default::default()
+ })),
+ ..Default::default()
+ };
+
+ root_schema
+ .definitions
+ .extend([("Languages".into(), languages_object_schema.into())]);
+
+ root_schema
+ .schema
+ .object
+ .as_mut()
+ .unwrap()
+ .properties
+ .extend([
+ (
+ "languages".to_owned(),
+ Schema::new_ref("#/definitions/Languages".into()),
+ ),
+ // For backward compatibility
+ (
+ "language_overrides".to_owned(),
+ Schema::new_ref("#/definitions/Languages".into()),
+ ),
+ ]);
+
+ root_schema
+ }
+}
+
+fn merge_settings(settings: &mut LanguageSettings, src: &LanguageSettingsContent) {
+ merge(&mut settings.tab_size, src.tab_size);
+ merge(&mut settings.hard_tabs, src.hard_tabs);
+ merge(&mut settings.soft_wrap, src.soft_wrap);
+ merge(&mut settings.show_wrap_guides, src.show_wrap_guides);
+ merge(&mut settings.wrap_guides, src.wrap_guides.clone());
+
+ merge(
+ &mut settings.preferred_line_length,
+ src.preferred_line_length,
+ );
+ merge(&mut settings.formatter, src.formatter.clone());
+ merge(&mut settings.prettier, src.prettier.clone());
+ merge(&mut settings.format_on_save, src.format_on_save.clone());
+ merge(
+ &mut settings.remove_trailing_whitespace_on_save,
+ src.remove_trailing_whitespace_on_save,
+ );
+ merge(
+ &mut settings.ensure_final_newline_on_save,
+ src.ensure_final_newline_on_save,
+ );
+ merge(
+ &mut settings.enable_language_server,
+ src.enable_language_server,
+ );
+ merge(
+ &mut settings.show_copilot_suggestions,
+ src.show_copilot_suggestions,
+ );
+ merge(&mut settings.show_whitespaces, src.show_whitespaces);
+ merge(
+ &mut settings.extend_comment_on_newline,
+ src.extend_comment_on_newline,
+ );
+ merge(&mut settings.inlay_hints, src.inlay_hints);
+ fn merge<T>(target: &mut T, value: Option<T>) {
+ if let Some(value) = value {
+ *target = value;
+ }
+ }
+}
@@ -0,0 +1,138 @@
+use fuzzy::{StringMatch, StringMatchCandidate};
+use gpui2::{Executor, HighlightStyle};
+use std::{ops::Range, sync::Arc};
+
+#[derive(Debug)]
+pub struct Outline<T> {
+ pub items: Vec<OutlineItem<T>>,
+ candidates: Vec<StringMatchCandidate>,
+ path_candidates: Vec<StringMatchCandidate>,
+ path_candidate_prefixes: Vec<usize>,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct OutlineItem<T> {
+ pub depth: usize,
+ pub range: Range<T>,
+ pub text: String,
+ pub highlight_ranges: Vec<(Range<usize>, HighlightStyle)>,
+ pub name_ranges: Vec<Range<usize>>,
+}
+
+impl<T> Outline<T> {
+ pub fn new(items: Vec<OutlineItem<T>>) -> Self {
+ let mut candidates = Vec::new();
+ let mut path_candidates = Vec::new();
+ let mut path_candidate_prefixes = Vec::new();
+ let mut path_text = String::new();
+ let mut path_stack = Vec::new();
+
+ for (id, item) in items.iter().enumerate() {
+ if item.depth < path_stack.len() {
+ path_stack.truncate(item.depth);
+ path_text.truncate(path_stack.last().copied().unwrap_or(0));
+ }
+ if !path_text.is_empty() {
+ path_text.push(' ');
+ }
+ path_candidate_prefixes.push(path_text.len());
+ path_text.push_str(&item.text);
+ path_stack.push(path_text.len());
+
+ let candidate_text = item
+ .name_ranges
+ .iter()
+ .map(|range| &item.text[range.start as usize..range.end as usize])
+ .collect::<String>();
+
+ path_candidates.push(StringMatchCandidate::new(id, path_text.clone()));
+ candidates.push(StringMatchCandidate::new(id, candidate_text));
+ }
+
+ Self {
+ candidates,
+ path_candidates,
+ path_candidate_prefixes,
+ items,
+ }
+ }
+
+ pub async fn search(&self, query: &str, executor: Executor) -> Vec<StringMatch> {
+ let query = query.trim_start();
+ let is_path_query = query.contains(' ');
+ let smart_case = query.chars().any(|c| c.is_uppercase());
+ let mut matches = fuzzy::match_strings(
+ if is_path_query {
+ &self.path_candidates
+ } else {
+ &self.candidates
+ },
+ query,
+ smart_case,
+ 100,
+ &Default::default(),
+ executor.clone(),
+ )
+ .await;
+ matches.sort_unstable_by_key(|m| m.candidate_id);
+
+ let mut tree_matches = Vec::new();
+
+ let mut prev_item_ix = 0;
+ for mut string_match in matches {
+ let outline_match = &self.items[string_match.candidate_id];
+
+ if is_path_query {
+ let prefix_len = self.path_candidate_prefixes[string_match.candidate_id];
+ string_match
+ .positions
+ .retain(|position| *position >= prefix_len);
+ for position in &mut string_match.positions {
+ *position -= prefix_len;
+ }
+ } else {
+ let mut name_ranges = outline_match.name_ranges.iter();
+ let mut name_range = name_ranges.next().unwrap();
+ let mut preceding_ranges_len = 0;
+ for position in &mut string_match.positions {
+ while *position >= preceding_ranges_len + name_range.len() as usize {
+ preceding_ranges_len += name_range.len();
+ name_range = name_ranges.next().unwrap();
+ }
+ *position = name_range.start as usize + (*position - preceding_ranges_len);
+ }
+ }
+
+ let insertion_ix = tree_matches.len();
+ let mut cur_depth = outline_match.depth;
+ for (ix, item) in self.items[prev_item_ix..string_match.candidate_id]
+ .iter()
+ .enumerate()
+ .rev()
+ {
+ if cur_depth == 0 {
+ break;
+ }
+
+ let candidate_index = ix + prev_item_ix;
+ if item.depth == cur_depth - 1 {
+ tree_matches.insert(
+ insertion_ix,
+ StringMatch {
+ candidate_id: candidate_index,
+ score: Default::default(),
+ positions: Default::default(),
+ string: Default::default(),
+ },
+ );
+ cur_depth -= 1;
+ }
+ }
+
+ prev_item_ix = string_match.candidate_id + 1;
+ tree_matches.push(string_match);
+ }
+
+ tree_matches
+ }
+}
@@ -0,0 +1,589 @@
+use crate::{
+ diagnostic_set::DiagnosticEntry, CodeAction, CodeLabel, Completion, CursorShape, Diagnostic,
+ Language,
+};
+use anyhow::{anyhow, Result};
+use clock::ReplicaId;
+use lsp::{DiagnosticSeverity, LanguageServerId};
+use rpc::proto;
+use std::{ops::Range, sync::Arc};
+use text::*;
+
+pub use proto::{BufferState, Operation};
+
+pub fn serialize_fingerprint(fingerprint: RopeFingerprint) -> String {
+ fingerprint.to_hex()
+}
+
+pub fn deserialize_fingerprint(fingerprint: &str) -> Result<RopeFingerprint> {
+ RopeFingerprint::from_hex(fingerprint)
+ .map_err(|error| anyhow!("invalid fingerprint: {}", error))
+}
+
+pub fn deserialize_line_ending(message: proto::LineEnding) -> text::LineEnding {
+ match message {
+ proto::LineEnding::Unix => text::LineEnding::Unix,
+ proto::LineEnding::Windows => text::LineEnding::Windows,
+ }
+}
+
+pub fn serialize_line_ending(message: text::LineEnding) -> proto::LineEnding {
+ match message {
+ text::LineEnding::Unix => proto::LineEnding::Unix,
+ text::LineEnding::Windows => proto::LineEnding::Windows,
+ }
+}
+
+pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation {
+ proto::Operation {
+ variant: Some(match operation {
+ crate::Operation::Buffer(text::Operation::Edit(edit)) => {
+ proto::operation::Variant::Edit(serialize_edit_operation(edit))
+ }
+
+ crate::Operation::Buffer(text::Operation::Undo(undo)) => {
+ proto::operation::Variant::Undo(proto::operation::Undo {
+ replica_id: undo.timestamp.replica_id as u32,
+ lamport_timestamp: undo.timestamp.value,
+ version: serialize_version(&undo.version),
+ counts: undo
+ .counts
+ .iter()
+ .map(|(edit_id, count)| proto::UndoCount {
+ replica_id: edit_id.replica_id as u32,
+ lamport_timestamp: edit_id.value,
+ count: *count,
+ })
+ .collect(),
+ })
+ }
+
+ crate::Operation::UpdateSelections {
+ selections,
+ line_mode,
+ lamport_timestamp,
+ cursor_shape,
+ } => proto::operation::Variant::UpdateSelections(proto::operation::UpdateSelections {
+ replica_id: lamport_timestamp.replica_id as u32,
+ lamport_timestamp: lamport_timestamp.value,
+ selections: serialize_selections(selections),
+ line_mode: *line_mode,
+ cursor_shape: serialize_cursor_shape(cursor_shape) as i32,
+ }),
+
+ crate::Operation::UpdateDiagnostics {
+ lamport_timestamp,
+ server_id,
+ diagnostics,
+ } => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics {
+ replica_id: lamport_timestamp.replica_id as u32,
+ lamport_timestamp: lamport_timestamp.value,
+ server_id: server_id.0 as u64,
+ diagnostics: serialize_diagnostics(diagnostics.iter()),
+ }),
+
+ crate::Operation::UpdateCompletionTriggers {
+ triggers,
+ lamport_timestamp,
+ } => proto::operation::Variant::UpdateCompletionTriggers(
+ proto::operation::UpdateCompletionTriggers {
+ replica_id: lamport_timestamp.replica_id as u32,
+ lamport_timestamp: lamport_timestamp.value,
+ triggers: triggers.clone(),
+ },
+ ),
+ }),
+ }
+}
+
+pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation::Edit {
+ proto::operation::Edit {
+ replica_id: operation.timestamp.replica_id as u32,
+ lamport_timestamp: operation.timestamp.value,
+ version: serialize_version(&operation.version),
+ ranges: operation.ranges.iter().map(serialize_range).collect(),
+ new_text: operation
+ .new_text
+ .iter()
+ .map(|text| text.to_string())
+ .collect(),
+ }
+}
+
+pub fn serialize_undo_map_entry(
+ (edit_id, counts): (&clock::Lamport, &[(clock::Lamport, u32)]),
+) -> proto::UndoMapEntry {
+ proto::UndoMapEntry {
+ replica_id: edit_id.replica_id as u32,
+ local_timestamp: edit_id.value,
+ counts: counts
+ .iter()
+ .map(|(undo_id, count)| proto::UndoCount {
+ replica_id: undo_id.replica_id as u32,
+ lamport_timestamp: undo_id.value,
+ count: *count,
+ })
+ .collect(),
+ }
+}
+
+pub fn split_operations(
+ mut operations: Vec<proto::Operation>,
+) -> impl Iterator<Item = Vec<proto::Operation>> {
+ #[cfg(any(test, feature = "test-support"))]
+ const CHUNK_SIZE: usize = 5;
+
+ #[cfg(not(any(test, feature = "test-support")))]
+ const CHUNK_SIZE: usize = 100;
+
+ let mut done = false;
+ std::iter::from_fn(move || {
+ if done {
+ return None;
+ }
+
+ let operations = operations
+ .drain(..std::cmp::min(CHUNK_SIZE, operations.len()))
+ .collect::<Vec<_>>();
+ if operations.is_empty() {
+ done = true;
+ }
+ Some(operations)
+ })
+}
+
+pub fn serialize_selections(selections: &Arc<[Selection<Anchor>]>) -> Vec<proto::Selection> {
+ selections.iter().map(serialize_selection).collect()
+}
+
+pub fn serialize_selection(selection: &Selection<Anchor>) -> proto::Selection {
+ proto::Selection {
+ id: selection.id as u64,
+ start: Some(proto::EditorAnchor {
+ anchor: Some(serialize_anchor(&selection.start)),
+ excerpt_id: 0,
+ }),
+ end: Some(proto::EditorAnchor {
+ anchor: Some(serialize_anchor(&selection.end)),
+ excerpt_id: 0,
+ }),
+ reversed: selection.reversed,
+ }
+}
+
+pub fn serialize_cursor_shape(cursor_shape: &CursorShape) -> proto::CursorShape {
+ match cursor_shape {
+ CursorShape::Bar => proto::CursorShape::CursorBar,
+ CursorShape::Block => proto::CursorShape::CursorBlock,
+ CursorShape::Underscore => proto::CursorShape::CursorUnderscore,
+ CursorShape::Hollow => proto::CursorShape::CursorHollow,
+ }
+}
+
+pub fn deserialize_cursor_shape(cursor_shape: proto::CursorShape) -> CursorShape {
+ match cursor_shape {
+ proto::CursorShape::CursorBar => CursorShape::Bar,
+ proto::CursorShape::CursorBlock => CursorShape::Block,
+ proto::CursorShape::CursorUnderscore => CursorShape::Underscore,
+ proto::CursorShape::CursorHollow => CursorShape::Hollow,
+ }
+}
+
+pub fn serialize_diagnostics<'a>(
+ diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<Anchor>>,
+) -> Vec<proto::Diagnostic> {
+ diagnostics
+ .into_iter()
+ .map(|entry| proto::Diagnostic {
+ source: entry.diagnostic.source.clone(),
+ start: Some(serialize_anchor(&entry.range.start)),
+ end: Some(serialize_anchor(&entry.range.end)),
+ message: entry.diagnostic.message.clone(),
+ severity: match entry.diagnostic.severity {
+ DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error,
+ DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning,
+ DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information,
+ DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint,
+ _ => proto::diagnostic::Severity::None,
+ } as i32,
+ group_id: entry.diagnostic.group_id as u64,
+ is_primary: entry.diagnostic.is_primary,
+ is_valid: entry.diagnostic.is_valid,
+ code: entry.diagnostic.code.clone(),
+ is_disk_based: entry.diagnostic.is_disk_based,
+ is_unnecessary: entry.diagnostic.is_unnecessary,
+ })
+ .collect()
+}
+
+pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
+ proto::Anchor {
+ replica_id: anchor.timestamp.replica_id as u32,
+ timestamp: anchor.timestamp.value,
+ offset: anchor.offset as u64,
+ bias: match anchor.bias {
+ Bias::Left => proto::Bias::Left as i32,
+ Bias::Right => proto::Bias::Right as i32,
+ },
+ buffer_id: anchor.buffer_id,
+ }
+}
+
+// This behavior is currently copied in the collab database, for snapshotting channel notes
+pub fn deserialize_operation(message: proto::Operation) -> Result<crate::Operation> {
+ Ok(
+ match message
+ .variant
+ .ok_or_else(|| anyhow!("missing operation variant"))?
+ {
+ proto::operation::Variant::Edit(edit) => {
+ crate::Operation::Buffer(text::Operation::Edit(deserialize_edit_operation(edit)))
+ }
+ proto::operation::Variant::Undo(undo) => {
+ crate::Operation::Buffer(text::Operation::Undo(UndoOperation {
+ timestamp: clock::Lamport {
+ replica_id: undo.replica_id as ReplicaId,
+ value: undo.lamport_timestamp,
+ },
+ version: deserialize_version(&undo.version),
+ counts: undo
+ .counts
+ .into_iter()
+ .map(|c| {
+ (
+ clock::Lamport {
+ replica_id: c.replica_id as ReplicaId,
+ value: c.lamport_timestamp,
+ },
+ c.count,
+ )
+ })
+ .collect(),
+ }))
+ }
+ proto::operation::Variant::UpdateSelections(message) => {
+ let selections = message
+ .selections
+ .into_iter()
+ .filter_map(|selection| {
+ Some(Selection {
+ id: selection.id as usize,
+ start: deserialize_anchor(selection.start?.anchor?)?,
+ end: deserialize_anchor(selection.end?.anchor?)?,
+ reversed: selection.reversed,
+ goal: SelectionGoal::None,
+ })
+ })
+ .collect::<Vec<_>>();
+
+ crate::Operation::UpdateSelections {
+ lamport_timestamp: clock::Lamport {
+ replica_id: message.replica_id as ReplicaId,
+ value: message.lamport_timestamp,
+ },
+ selections: Arc::from(selections),
+ line_mode: message.line_mode,
+ cursor_shape: deserialize_cursor_shape(
+ proto::CursorShape::from_i32(message.cursor_shape)
+ .ok_or_else(|| anyhow!("Missing cursor shape"))?,
+ ),
+ }
+ }
+ proto::operation::Variant::UpdateDiagnostics(message) => {
+ crate::Operation::UpdateDiagnostics {
+ lamport_timestamp: clock::Lamport {
+ replica_id: message.replica_id as ReplicaId,
+ value: message.lamport_timestamp,
+ },
+ server_id: LanguageServerId(message.server_id as usize),
+ diagnostics: deserialize_diagnostics(message.diagnostics),
+ }
+ }
+ proto::operation::Variant::UpdateCompletionTriggers(message) => {
+ crate::Operation::UpdateCompletionTriggers {
+ triggers: message.triggers,
+ lamport_timestamp: clock::Lamport {
+ replica_id: message.replica_id as ReplicaId,
+ value: message.lamport_timestamp,
+ },
+ }
+ }
+ },
+ )
+}
+
+pub fn deserialize_edit_operation(edit: proto::operation::Edit) -> EditOperation {
+ EditOperation {
+ timestamp: clock::Lamport {
+ replica_id: edit.replica_id as ReplicaId,
+ value: edit.lamport_timestamp,
+ },
+ version: deserialize_version(&edit.version),
+ ranges: edit.ranges.into_iter().map(deserialize_range).collect(),
+ new_text: edit.new_text.into_iter().map(Arc::from).collect(),
+ }
+}
+
+pub fn deserialize_undo_map_entry(
+ entry: proto::UndoMapEntry,
+) -> (clock::Lamport, Vec<(clock::Lamport, u32)>) {
+ (
+ clock::Lamport {
+ replica_id: entry.replica_id as u16,
+ value: entry.local_timestamp,
+ },
+ entry
+ .counts
+ .into_iter()
+ .map(|undo_count| {
+ (
+ clock::Lamport {
+ replica_id: undo_count.replica_id as u16,
+ value: undo_count.lamport_timestamp,
+ },
+ undo_count.count,
+ )
+ })
+ .collect(),
+ )
+}
+
+pub fn deserialize_selections(selections: Vec<proto::Selection>) -> Arc<[Selection<Anchor>]> {
+ Arc::from(
+ selections
+ .into_iter()
+ .filter_map(deserialize_selection)
+ .collect::<Vec<_>>(),
+ )
+}
+
+pub fn deserialize_selection(selection: proto::Selection) -> Option<Selection<Anchor>> {
+ Some(Selection {
+ id: selection.id as usize,
+ start: deserialize_anchor(selection.start?.anchor?)?,
+ end: deserialize_anchor(selection.end?.anchor?)?,
+ reversed: selection.reversed,
+ goal: SelectionGoal::None,
+ })
+}
+
+pub fn deserialize_diagnostics(
+ diagnostics: Vec<proto::Diagnostic>,
+) -> Arc<[DiagnosticEntry<Anchor>]> {
+ diagnostics
+ .into_iter()
+ .filter_map(|diagnostic| {
+ Some(DiagnosticEntry {
+ range: deserialize_anchor(diagnostic.start?)?..deserialize_anchor(diagnostic.end?)?,
+ diagnostic: Diagnostic {
+ source: diagnostic.source,
+ severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? {
+ proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR,
+ proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING,
+ proto::diagnostic::Severity::Information => DiagnosticSeverity::INFORMATION,
+ proto::diagnostic::Severity::Hint => DiagnosticSeverity::HINT,
+ proto::diagnostic::Severity::None => return None,
+ },
+ message: diagnostic.message,
+ group_id: diagnostic.group_id as usize,
+ code: diagnostic.code,
+ is_valid: diagnostic.is_valid,
+ is_primary: diagnostic.is_primary,
+ is_disk_based: diagnostic.is_disk_based,
+ is_unnecessary: diagnostic.is_unnecessary,
+ },
+ })
+ })
+ .collect()
+}
+
+pub fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
+ Some(Anchor {
+ timestamp: clock::Lamport {
+ replica_id: anchor.replica_id as ReplicaId,
+ value: anchor.timestamp,
+ },
+ offset: anchor.offset as usize,
+ bias: match proto::Bias::from_i32(anchor.bias)? {
+ proto::Bias::Left => Bias::Left,
+ proto::Bias::Right => Bias::Right,
+ },
+ buffer_id: anchor.buffer_id,
+ })
+}
+
+pub fn lamport_timestamp_for_operation(operation: &proto::Operation) -> Option<clock::Lamport> {
+ let replica_id;
+ let value;
+ match operation.variant.as_ref()? {
+ proto::operation::Variant::Edit(op) => {
+ replica_id = op.replica_id;
+ value = op.lamport_timestamp;
+ }
+ proto::operation::Variant::Undo(op) => {
+ replica_id = op.replica_id;
+ value = op.lamport_timestamp;
+ }
+ proto::operation::Variant::UpdateDiagnostics(op) => {
+ replica_id = op.replica_id;
+ value = op.lamport_timestamp;
+ }
+ proto::operation::Variant::UpdateSelections(op) => {
+ replica_id = op.replica_id;
+ value = op.lamport_timestamp;
+ }
+ proto::operation::Variant::UpdateCompletionTriggers(op) => {
+ replica_id = op.replica_id;
+ value = op.lamport_timestamp;
+ }
+ }
+
+ Some(clock::Lamport {
+ replica_id: replica_id as ReplicaId,
+ value,
+ })
+}
+
+pub fn serialize_completion(completion: &Completion) -> proto::Completion {
+ proto::Completion {
+ old_start: Some(serialize_anchor(&completion.old_range.start)),
+ old_end: Some(serialize_anchor(&completion.old_range.end)),
+ new_text: completion.new_text.clone(),
+ server_id: completion.server_id.0 as u64,
+ lsp_completion: serde_json::to_vec(&completion.lsp_completion).unwrap(),
+ }
+}
+
+pub async fn deserialize_completion(
+ completion: proto::Completion,
+ language: Option<Arc<Language>>,
+) -> Result<Completion> {
+ let old_start = completion
+ .old_start
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("invalid old start"))?;
+ let old_end = completion
+ .old_end
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("invalid old end"))?;
+ let lsp_completion = serde_json::from_slice(&completion.lsp_completion)?;
+
+ let mut label = None;
+ if let Some(language) = language {
+ label = language.label_for_completion(&lsp_completion).await;
+ }
+
+ Ok(Completion {
+ old_range: old_start..old_end,
+ new_text: completion.new_text,
+ label: label.unwrap_or_else(|| {
+ CodeLabel::plain(
+ lsp_completion.label.clone(),
+ lsp_completion.filter_text.as_deref(),
+ )
+ }),
+ server_id: LanguageServerId(completion.server_id as usize),
+ lsp_completion,
+ })
+}
+
+pub fn serialize_code_action(action: &CodeAction) -> proto::CodeAction {
+ proto::CodeAction {
+ server_id: action.server_id.0 as u64,
+ start: Some(serialize_anchor(&action.range.start)),
+ end: Some(serialize_anchor(&action.range.end)),
+ lsp_action: serde_json::to_vec(&action.lsp_action).unwrap(),
+ }
+}
+
+pub fn deserialize_code_action(action: proto::CodeAction) -> Result<CodeAction> {
+ let start = action
+ .start
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("invalid start"))?;
+ let end = action
+ .end
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("invalid end"))?;
+ let lsp_action = serde_json::from_slice(&action.lsp_action)?;
+ Ok(CodeAction {
+ server_id: LanguageServerId(action.server_id as usize),
+ range: start..end,
+ lsp_action,
+ })
+}
+
+pub fn serialize_transaction(transaction: &Transaction) -> proto::Transaction {
+ proto::Transaction {
+ id: Some(serialize_timestamp(transaction.id)),
+ edit_ids: transaction
+ .edit_ids
+ .iter()
+ .copied()
+ .map(serialize_timestamp)
+ .collect(),
+ start: serialize_version(&transaction.start),
+ }
+}
+
+pub fn deserialize_transaction(transaction: proto::Transaction) -> Result<Transaction> {
+ Ok(Transaction {
+ id: deserialize_timestamp(
+ transaction
+ .id
+ .ok_or_else(|| anyhow!("missing transaction id"))?,
+ ),
+ edit_ids: transaction
+ .edit_ids
+ .into_iter()
+ .map(deserialize_timestamp)
+ .collect(),
+ start: deserialize_version(&transaction.start),
+ })
+}
+
+pub fn serialize_timestamp(timestamp: clock::Lamport) -> proto::LamportTimestamp {
+ proto::LamportTimestamp {
+ replica_id: timestamp.replica_id as u32,
+ value: timestamp.value,
+ }
+}
+
+pub fn deserialize_timestamp(timestamp: proto::LamportTimestamp) -> clock::Lamport {
+ clock::Lamport {
+ replica_id: timestamp.replica_id as ReplicaId,
+ value: timestamp.value,
+ }
+}
+
+pub fn serialize_range(range: &Range<FullOffset>) -> proto::Range {
+ proto::Range {
+ start: range.start.0 as u64,
+ end: range.end.0 as u64,
+ }
+}
+
+pub fn deserialize_range(range: proto::Range) -> Range<FullOffset> {
+ FullOffset(range.start as usize)..FullOffset(range.end as usize)
+}
+
+pub fn deserialize_version(message: &[proto::VectorClockEntry]) -> clock::Global {
+ let mut version = clock::Global::new();
+ for entry in message {
+ version.observe(clock::Lamport {
+ replica_id: entry.replica_id as ReplicaId,
+ value: entry.timestamp,
+ });
+ }
+ version
+}
+
+pub fn serialize_version(version: &clock::Global) -> Vec<proto::VectorClockEntry> {
+ version
+ .iter()
+ .map(|entry| proto::VectorClockEntry {
+ replica_id: entry.replica_id as u32,
+ timestamp: entry.value,
+ })
+ .collect()
+}
@@ -0,0 +1,1811 @@
+#[cfg(test)]
+mod syntax_map_tests;
+
+use crate::{Grammar, InjectionConfig, Language, LanguageRegistry};
+use collections::HashMap;
+use futures::FutureExt;
+use parking_lot::Mutex;
+use std::{
+ borrow::Cow,
+ cell::RefCell,
+ cmp::{self, Ordering, Reverse},
+ collections::BinaryHeap,
+ fmt, iter,
+ ops::{Deref, DerefMut, Range},
+ sync::Arc,
+};
+use sum_tree::{Bias, SeekTarget, SumTree};
+use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint};
+use tree_sitter::{
+ Node, Parser, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatches, Tree,
+};
+
+thread_local! {
+ static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
+}
+
+static QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Mutex::new(vec![]);
+
+#[derive(Default)]
+pub struct SyntaxMap {
+ snapshot: SyntaxSnapshot,
+ language_registry: Option<Arc<LanguageRegistry>>,
+}
+
+#[derive(Clone, Default)]
+pub struct SyntaxSnapshot {
+ layers: SumTree<SyntaxLayer>,
+ parsed_version: clock::Global,
+ interpolated_version: clock::Global,
+ language_registry_version: usize,
+}
+
+#[derive(Default)]
+pub struct SyntaxMapCaptures<'a> {
+ layers: Vec<SyntaxMapCapturesLayer<'a>>,
+ active_layer_count: usize,
+ grammars: Vec<&'a Grammar>,
+}
+
+#[derive(Default)]
+pub struct SyntaxMapMatches<'a> {
+ layers: Vec<SyntaxMapMatchesLayer<'a>>,
+ active_layer_count: usize,
+ grammars: Vec<&'a Grammar>,
+}
+
+#[derive(Debug)]
+pub struct SyntaxMapCapture<'a> {
+ pub depth: usize,
+ pub node: Node<'a>,
+ pub index: u32,
+ pub grammar_index: usize,
+}
+
+#[derive(Debug)]
+pub struct SyntaxMapMatch<'a> {
+ pub depth: usize,
+ pub pattern_index: usize,
+ pub captures: &'a [QueryCapture<'a>],
+ pub grammar_index: usize,
+}
+
+struct SyntaxMapCapturesLayer<'a> {
+ depth: usize,
+ captures: QueryCaptures<'a, 'a, TextProvider<'a>, &'a [u8]>,
+ next_capture: Option<QueryCapture<'a>>,
+ grammar_index: usize,
+ _query_cursor: QueryCursorHandle,
+}
+
+struct SyntaxMapMatchesLayer<'a> {
+ depth: usize,
+ next_pattern_index: usize,
+ next_captures: Vec<QueryCapture<'a>>,
+ has_next: bool,
+ matches: QueryMatches<'a, 'a, TextProvider<'a>, &'a [u8]>,
+ grammar_index: usize,
+ _query_cursor: QueryCursorHandle,
+}
+
+#[derive(Clone)]
+struct SyntaxLayer {
+ depth: usize,
+ range: Range<Anchor>,
+ content: SyntaxLayerContent,
+}
+
+#[derive(Clone)]
+enum SyntaxLayerContent {
+ Parsed {
+ tree: tree_sitter::Tree,
+ language: Arc<Language>,
+ },
+ Pending {
+ language_name: Arc<str>,
+ },
+}
+
+impl SyntaxLayerContent {
+ fn language_id(&self) -> Option<usize> {
+ match self {
+ SyntaxLayerContent::Parsed { language, .. } => language.id(),
+ SyntaxLayerContent::Pending { .. } => None,
+ }
+ }
+
+ fn tree(&self) -> Option<&Tree> {
+ match self {
+ SyntaxLayerContent::Parsed { tree, .. } => Some(tree),
+ SyntaxLayerContent::Pending { .. } => None,
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct SyntaxLayerInfo<'a> {
+ pub depth: usize,
+ pub language: &'a Arc<Language>,
+ tree: &'a Tree,
+ offset: (usize, tree_sitter::Point),
+}
+
+#[derive(Clone)]
+pub struct OwnedSyntaxLayerInfo {
+ pub depth: usize,
+ pub language: Arc<Language>,
+ tree: tree_sitter::Tree,
+ offset: (usize, tree_sitter::Point),
+}
+
+#[derive(Debug, Clone)]
+struct SyntaxLayerSummary {
+ min_depth: usize,
+ max_depth: usize,
+ range: Range<Anchor>,
+ last_layer_range: Range<Anchor>,
+ last_layer_language: Option<usize>,
+ contains_unknown_injections: bool,
+}
+
+#[derive(Clone, Debug)]
+struct SyntaxLayerPosition {
+ depth: usize,
+ range: Range<Anchor>,
+ language: Option<usize>,
+}
+
+#[derive(Clone, Debug)]
+struct ChangeStartPosition {
+ depth: usize,
+ position: Anchor,
+}
+
+#[derive(Clone, Debug)]
+struct SyntaxLayerPositionBeforeChange {
+ position: SyntaxLayerPosition,
+ change: ChangeStartPosition,
+}
+
+struct ParseStep {
+ depth: usize,
+ language: ParseStepLanguage,
+ range: Range<Anchor>,
+ included_ranges: Vec<tree_sitter::Range>,
+ mode: ParseMode,
+}
+
+#[derive(Debug)]
+enum ParseStepLanguage {
+ Loaded { language: Arc<Language> },
+ Pending { name: Arc<str> },
+}
+
+impl ParseStepLanguage {
+ fn id(&self) -> Option<usize> {
+ match self {
+ ParseStepLanguage::Loaded { language } => language.id(),
+ ParseStepLanguage::Pending { .. } => None,
+ }
+ }
+}
+
+enum ParseMode {
+ Single,
+ Combined {
+ parent_layer_range: Range<usize>,
+ parent_layer_changed_ranges: Vec<Range<usize>>,
+ },
+}
+
+#[derive(Debug, PartialEq, Eq)]
+struct ChangedRegion {
+ depth: usize,
+ range: Range<Anchor>,
+}
+
+#[derive(Default)]
+struct ChangeRegionSet(Vec<ChangedRegion>);
+
+struct TextProvider<'a>(&'a Rope);
+
+struct ByteChunks<'a>(text::Chunks<'a>);
+
+struct QueryCursorHandle(Option<QueryCursor>);
+
+impl SyntaxMap {
+ pub fn new() -> Self {
+ Self::default()
+ }
+
+ pub fn set_language_registry(&mut self, registry: Arc<LanguageRegistry>) {
+ self.language_registry = Some(registry);
+ }
+
+ pub fn snapshot(&self) -> SyntaxSnapshot {
+ self.snapshot.clone()
+ }
+
+ pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
+ self.language_registry.clone()
+ }
+
+ pub fn interpolate(&mut self, text: &BufferSnapshot) {
+ self.snapshot.interpolate(text);
+ }
+
+ #[cfg(test)]
+ pub fn reparse(&mut self, language: Arc<Language>, text: &BufferSnapshot) {
+ self.snapshot
+ .reparse(text, self.language_registry.clone(), language);
+ }
+
+ pub fn did_parse(&mut self, snapshot: SyntaxSnapshot) {
+ self.snapshot = snapshot;
+ }
+
+ pub fn clear(&mut self) {
+ self.snapshot = SyntaxSnapshot::default();
+ }
+}
+
+impl SyntaxSnapshot {
+ pub fn is_empty(&self) -> bool {
+ self.layers.is_empty()
+ }
+
+ fn interpolate(&mut self, text: &BufferSnapshot) {
+ let edits = text
+ .anchored_edits_since::<(usize, Point)>(&self.interpolated_version)
+ .collect::<Vec<_>>();
+ self.interpolated_version = text.version().clone();
+
+ if edits.is_empty() {
+ return;
+ }
+
+ let mut layers = SumTree::new();
+ let mut first_edit_ix_for_depth = 0;
+ let mut prev_depth = 0;
+ let mut cursor = self.layers.cursor::<SyntaxLayerSummary>();
+ cursor.next(text);
+
+ 'outer: loop {
+ let depth = cursor.end(text).max_depth;
+ if depth > prev_depth {
+ first_edit_ix_for_depth = 0;
+ prev_depth = depth;
+ }
+
+ // Preserve any layers at this depth that precede the first edit.
+ if let Some((_, edit_range)) = edits.get(first_edit_ix_for_depth) {
+ let target = ChangeStartPosition {
+ depth,
+ position: edit_range.start,
+ };
+ if target.cmp(&cursor.start(), text).is_gt() {
+ let slice = cursor.slice(&target, Bias::Left, text);
+ layers.append(slice, text);
+ }
+ }
+ // If this layer follows all of the edits, then preserve it and any
+ // subsequent layers at this same depth.
+ else if cursor.item().is_some() {
+ let slice = cursor.slice(
+ &SyntaxLayerPosition {
+ depth: depth + 1,
+ range: Anchor::MIN..Anchor::MAX,
+ language: None,
+ },
+ Bias::Left,
+ text,
+ );
+ layers.append(slice, text);
+ continue;
+ };
+
+ let Some(layer) = cursor.item() else { break };
+ let (start_byte, start_point) = layer.range.start.summary::<(usize, Point)>(text);
+
+ // Ignore edits that end before the start of this layer, and don't consider them
+ // for any subsequent layers at this same depth.
+ loop {
+ let Some((_, edit_range)) = edits.get(first_edit_ix_for_depth) else {
+ continue 'outer;
+ };
+ if edit_range.end.cmp(&layer.range.start, text).is_le() {
+ first_edit_ix_for_depth += 1;
+ } else {
+ break;
+ }
+ }
+
+ let mut layer = layer.clone();
+ if let SyntaxLayerContent::Parsed { tree, .. } = &mut layer.content {
+ for (edit, edit_range) in &edits[first_edit_ix_for_depth..] {
+ // Ignore any edits that follow this layer.
+ if edit_range.start.cmp(&layer.range.end, text).is_ge() {
+ break;
+ }
+
+ // Apply any edits that intersect this layer to the layer's syntax tree.
+ let tree_edit = if edit_range.start.cmp(&layer.range.start, text).is_ge() {
+ tree_sitter::InputEdit {
+ start_byte: edit.new.start.0 - start_byte,
+ old_end_byte: edit.new.start.0 - start_byte
+ + (edit.old.end.0 - edit.old.start.0),
+ new_end_byte: edit.new.end.0 - start_byte,
+ start_position: (edit.new.start.1 - start_point).to_ts_point(),
+ old_end_position: (edit.new.start.1 - start_point
+ + (edit.old.end.1 - edit.old.start.1))
+ .to_ts_point(),
+ new_end_position: (edit.new.end.1 - start_point).to_ts_point(),
+ }
+ } else {
+ let node = tree.root_node();
+ tree_sitter::InputEdit {
+ start_byte: 0,
+ old_end_byte: node.end_byte(),
+ new_end_byte: 0,
+ start_position: Default::default(),
+ old_end_position: node.end_position(),
+ new_end_position: Default::default(),
+ }
+ };
+
+ tree.edit(&tree_edit);
+ }
+
+ debug_assert!(
+ tree.root_node().end_byte() <= text.len(),
+ "tree's size {}, is larger than text size {}",
+ tree.root_node().end_byte(),
+ text.len(),
+ );
+ }
+
+ layers.push(layer, text);
+ cursor.next(text);
+ }
+
+ layers.append(cursor.suffix(&text), &text);
+ drop(cursor);
+ self.layers = layers;
+ }
+
+ pub fn reparse(
+ &mut self,
+ text: &BufferSnapshot,
+ registry: Option<Arc<LanguageRegistry>>,
+ root_language: Arc<Language>,
+ ) {
+ let edit_ranges = text
+ .edits_since::<usize>(&self.parsed_version)
+ .map(|edit| edit.new)
+ .collect::<Vec<_>>();
+ self.reparse_with_ranges(text, root_language.clone(), edit_ranges, registry.as_ref());
+
+ if let Some(registry) = registry {
+ if registry.version() != self.language_registry_version {
+ let mut resolved_injection_ranges = Vec::new();
+ let mut cursor = self
+ .layers
+ .filter::<_, ()>(|summary| summary.contains_unknown_injections);
+ cursor.next(text);
+ while let Some(layer) = cursor.item() {
+ let SyntaxLayerContent::Pending { language_name } = &layer.content else {
+ unreachable!()
+ };
+ if registry
+ .language_for_name_or_extension(language_name)
+ .now_or_never()
+ .and_then(|language| language.ok())
+ .is_some()
+ {
+ resolved_injection_ranges.push(layer.range.to_offset(text));
+ }
+
+ cursor.next(text);
+ }
+ drop(cursor);
+
+ if !resolved_injection_ranges.is_empty() {
+ self.reparse_with_ranges(
+ text,
+ root_language,
+ resolved_injection_ranges,
+ Some(®istry),
+ );
+ }
+ self.language_registry_version = registry.version();
+ }
+ }
+ }
+
+ fn reparse_with_ranges(
+ &mut self,
+ text: &BufferSnapshot,
+ root_language: Arc<Language>,
+ invalidated_ranges: Vec<Range<usize>>,
+ registry: Option<&Arc<LanguageRegistry>>,
+ ) {
+ log::trace!("reparse. invalidated ranges:{:?}", invalidated_ranges);
+
+ let max_depth = self.layers.summary().max_depth;
+ let mut cursor = self.layers.cursor::<SyntaxLayerSummary>();
+ cursor.next(&text);
+ let mut layers = SumTree::new();
+
+ let mut changed_regions = ChangeRegionSet::default();
+ let mut queue = BinaryHeap::new();
+ let mut combined_injection_ranges = HashMap::default();
+ queue.push(ParseStep {
+ depth: 0,
+ language: ParseStepLanguage::Loaded {
+ language: root_language,
+ },
+ included_ranges: vec![tree_sitter::Range {
+ start_byte: 0,
+ end_byte: text.len(),
+ start_point: Point::zero().to_ts_point(),
+ end_point: text.max_point().to_ts_point(),
+ }],
+ range: Anchor::MIN..Anchor::MAX,
+ mode: ParseMode::Single,
+ });
+
+ loop {
+ let step = queue.pop();
+ let position = if let Some(step) = &step {
+ SyntaxLayerPosition {
+ depth: step.depth,
+ range: step.range.clone(),
+ language: step.language.id(),
+ }
+ } else {
+ SyntaxLayerPosition {
+ depth: max_depth + 1,
+ range: Anchor::MAX..Anchor::MAX,
+ language: None,
+ }
+ };
+
+ let mut done = cursor.item().is_none();
+ while !done && position.cmp(&cursor.end(text), &text).is_gt() {
+ done = true;
+
+ let bounded_position = SyntaxLayerPositionBeforeChange {
+ position: position.clone(),
+ change: changed_regions.start_position(),
+ };
+ if bounded_position.cmp(&cursor.start(), &text).is_gt() {
+ let slice = cursor.slice(&bounded_position, Bias::Left, text);
+ if !slice.is_empty() {
+ layers.append(slice, &text);
+ if changed_regions.prune(cursor.end(text), text) {
+ done = false;
+ }
+ }
+ }
+
+ while position.cmp(&cursor.end(text), text).is_gt() {
+ let Some(layer) = cursor.item() else { break };
+
+ if changed_regions.intersects(&layer, text) {
+ if let SyntaxLayerContent::Parsed { language, .. } = &layer.content {
+ log::trace!(
+ "discard layer. language:{}, range:{:?}. changed_regions:{:?}",
+ language.name(),
+ LogAnchorRange(&layer.range, text),
+ LogChangedRegions(&changed_regions, text),
+ );
+ }
+
+ changed_regions.insert(
+ ChangedRegion {
+ depth: layer.depth + 1,
+ range: layer.range.clone(),
+ },
+ text,
+ );
+ } else {
+ layers.push(layer.clone(), text);
+ }
+
+ cursor.next(text);
+ if changed_regions.prune(cursor.end(text), text) {
+ done = false;
+ }
+ }
+ }
+
+ let Some(step) = step else { break };
+ let (step_start_byte, step_start_point) =
+ step.range.start.summary::<(usize, Point)>(text);
+ let step_end_byte = step.range.end.to_offset(text);
+
+ let mut old_layer = cursor.item();
+ if let Some(layer) = old_layer {
+ if layer.range.to_offset(text) == (step_start_byte..step_end_byte)
+ && layer.content.language_id() == step.language.id()
+ {
+ cursor.next(&text);
+ } else {
+ old_layer = None;
+ }
+ }
+
+ let content = match step.language {
+ ParseStepLanguage::Loaded { language } => {
+ let Some(grammar) = language.grammar() else {
+ continue;
+ };
+ let tree;
+ let changed_ranges;
+
+ let mut included_ranges = step.included_ranges;
+ for range in &mut included_ranges {
+ range.start_byte -= step_start_byte;
+ range.end_byte -= step_start_byte;
+ range.start_point = (Point::from_ts_point(range.start_point)
+ - step_start_point)
+ .to_ts_point();
+ range.end_point = (Point::from_ts_point(range.end_point)
+ - step_start_point)
+ .to_ts_point();
+ }
+
+ if let Some((SyntaxLayerContent::Parsed { tree: old_tree, .. }, layer_start)) =
+ old_layer.map(|layer| (&layer.content, layer.range.start))
+ {
+ log::trace!(
+ "existing layer. language:{}, start:{:?}, ranges:{:?}",
+ language.name(),
+ LogPoint(layer_start.to_point(&text)),
+ LogIncludedRanges(&old_tree.included_ranges())
+ );
+
+ if let ParseMode::Combined {
+ mut parent_layer_changed_ranges,
+ ..
+ } = step.mode
+ {
+ for range in &mut parent_layer_changed_ranges {
+ range.start = range.start.saturating_sub(step_start_byte);
+ range.end = range.end.saturating_sub(step_start_byte);
+ }
+
+ let changed_indices;
+ (included_ranges, changed_indices) = splice_included_ranges(
+ old_tree.included_ranges(),
+ &parent_layer_changed_ranges,
+ &included_ranges,
+ );
+ insert_newlines_between_ranges(
+ changed_indices,
+ &mut included_ranges,
+ &text,
+ step_start_byte,
+ step_start_point,
+ );
+ }
+
+ if included_ranges.is_empty() {
+ included_ranges.push(tree_sitter::Range {
+ start_byte: 0,
+ end_byte: 0,
+ start_point: Default::default(),
+ end_point: Default::default(),
+ });
+ }
+
+ log::trace!(
+ "update layer. language:{}, start:{:?}, included_ranges:{:?}",
+ language.name(),
+ LogAnchorRange(&step.range, text),
+ LogIncludedRanges(&included_ranges),
+ );
+
+ tree = parse_text(
+ grammar,
+ text.as_rope(),
+ step_start_byte,
+ included_ranges,
+ Some(old_tree.clone()),
+ );
+ changed_ranges = join_ranges(
+ invalidated_ranges.iter().cloned().filter(|range| {
+ range.start <= step_end_byte && range.end >= step_start_byte
+ }),
+ old_tree.changed_ranges(&tree).map(|r| {
+ step_start_byte + r.start_byte..step_start_byte + r.end_byte
+ }),
+ );
+ } else {
+ if matches!(step.mode, ParseMode::Combined { .. }) {
+ insert_newlines_between_ranges(
+ 0..included_ranges.len(),
+ &mut included_ranges,
+ text,
+ step_start_byte,
+ step_start_point,
+ );
+ }
+
+ if included_ranges.is_empty() {
+ included_ranges.push(tree_sitter::Range {
+ start_byte: 0,
+ end_byte: 0,
+ start_point: Default::default(),
+ end_point: Default::default(),
+ });
+ }
+
+ log::trace!(
+ "create layer. language:{}, range:{:?}, included_ranges:{:?}",
+ language.name(),
+ LogAnchorRange(&step.range, text),
+ LogIncludedRanges(&included_ranges),
+ );
+
+ tree = parse_text(
+ grammar,
+ text.as_rope(),
+ step_start_byte,
+ included_ranges,
+ None,
+ );
+ changed_ranges = vec![step_start_byte..step_end_byte];
+ }
+
+ if let (Some((config, registry)), false) = (
+ grammar.injection_config.as_ref().zip(registry.as_ref()),
+ changed_ranges.is_empty(),
+ ) {
+ for range in &changed_ranges {
+ changed_regions.insert(
+ ChangedRegion {
+ depth: step.depth + 1,
+ range: text.anchor_before(range.start)
+ ..text.anchor_after(range.end),
+ },
+ text,
+ );
+ }
+ get_injections(
+ config,
+ text,
+ step.range.clone(),
+ tree.root_node_with_offset(
+ step_start_byte,
+ step_start_point.to_ts_point(),
+ ),
+ registry,
+ step.depth + 1,
+ &changed_ranges,
+ &mut combined_injection_ranges,
+ &mut queue,
+ );
+ }
+
+ SyntaxLayerContent::Parsed { tree, language }
+ }
+ ParseStepLanguage::Pending { name } => SyntaxLayerContent::Pending {
+ language_name: name,
+ },
+ };
+
+ layers.push(
+ SyntaxLayer {
+ depth: step.depth,
+ range: step.range,
+ content,
+ },
+ &text,
+ );
+ }
+
+ drop(cursor);
+ self.layers = layers;
+ self.interpolated_version = text.version.clone();
+ self.parsed_version = text.version.clone();
+ #[cfg(debug_assertions)]
+ self.check_invariants(text);
+ }
+
+ #[cfg(debug_assertions)]
+ fn check_invariants(&self, text: &BufferSnapshot) {
+ let mut max_depth = 0;
+ let mut prev_range: Option<Range<Anchor>> = None;
+ for layer in self.layers.iter() {
+ if layer.depth == max_depth {
+ if let Some(prev_range) = prev_range {
+ match layer.range.start.cmp(&prev_range.start, text) {
+ Ordering::Less => panic!("layers out of order"),
+ Ordering::Equal => {
+ assert!(layer.range.end.cmp(&prev_range.end, text).is_ge())
+ }
+ Ordering::Greater => {}
+ }
+ }
+ } else if layer.depth < max_depth {
+ panic!("layers out of order")
+ }
+ max_depth = layer.depth;
+ prev_range = Some(layer.range.clone());
+ }
+ }
+
+ pub fn single_tree_captures<'a>(
+ range: Range<usize>,
+ text: &'a Rope,
+ tree: &'a Tree,
+ language: &'a Arc<Language>,
+ query: fn(&Grammar) -> Option<&Query>,
+ ) -> SyntaxMapCaptures<'a> {
+ SyntaxMapCaptures::new(
+ range.clone(),
+ text,
+ [SyntaxLayerInfo {
+ language,
+ tree,
+ depth: 0,
+ offset: (0, tree_sitter::Point::new(0, 0)),
+ }]
+ .into_iter(),
+ query,
+ )
+ }
+
+ pub fn captures<'a>(
+ &'a self,
+ range: Range<usize>,
+ buffer: &'a BufferSnapshot,
+ query: fn(&Grammar) -> Option<&Query>,
+ ) -> SyntaxMapCaptures {
+ SyntaxMapCaptures::new(
+ range.clone(),
+ buffer.as_rope(),
+ self.layers_for_range(range, buffer).into_iter(),
+ query,
+ )
+ }
+
+ pub fn matches<'a>(
+ &'a self,
+ range: Range<usize>,
+ buffer: &'a BufferSnapshot,
+ query: fn(&Grammar) -> Option<&Query>,
+ ) -> SyntaxMapMatches {
+ SyntaxMapMatches::new(
+ range.clone(),
+ buffer.as_rope(),
+ self.layers_for_range(range, buffer).into_iter(),
+ query,
+ )
+ }
+
+ #[cfg(test)]
+ pub fn layers<'a>(&'a self, buffer: &'a BufferSnapshot) -> Vec<SyntaxLayerInfo> {
+ self.layers_for_range(0..buffer.len(), buffer).collect()
+ }
+
+ pub fn layers_for_range<'a, T: ToOffset>(
+ &'a self,
+ range: Range<T>,
+ buffer: &'a BufferSnapshot,
+ ) -> impl 'a + Iterator<Item = SyntaxLayerInfo> {
+ let start_offset = range.start.to_offset(buffer);
+ let end_offset = range.end.to_offset(buffer);
+ let start = buffer.anchor_before(start_offset);
+ let end = buffer.anchor_after(end_offset);
+
+ let mut cursor = self.layers.filter::<_, ()>(move |summary| {
+ if summary.max_depth > summary.min_depth {
+ true
+ } else {
+ let is_before_start = summary.range.end.cmp(&start, buffer).is_lt();
+ let is_after_end = summary.range.start.cmp(&end, buffer).is_gt();
+ !is_before_start && !is_after_end
+ }
+ });
+
+ cursor.next(buffer);
+ iter::from_fn(move || {
+ while let Some(layer) = cursor.item() {
+ let mut info = None;
+ if let SyntaxLayerContent::Parsed { tree, language } = &layer.content {
+ let layer_start_offset = layer.range.start.to_offset(buffer);
+ let layer_start_point = layer.range.start.to_point(buffer).to_ts_point();
+
+ info = Some(SyntaxLayerInfo {
+ tree,
+ language,
+ depth: layer.depth,
+ offset: (layer_start_offset, layer_start_point),
+ });
+ }
+ cursor.next(buffer);
+ if info.is_some() {
+ return info;
+ }
+ }
+ None
+ })
+ }
+
+ pub fn contains_unknown_injections(&self) -> bool {
+ self.layers.summary().contains_unknown_injections
+ }
+
+ pub fn language_registry_version(&self) -> usize {
+ self.language_registry_version
+ }
+}
+
+impl<'a> SyntaxMapCaptures<'a> {
+ fn new(
+ range: Range<usize>,
+ text: &'a Rope,
+ layers: impl Iterator<Item = SyntaxLayerInfo<'a>>,
+ query: fn(&Grammar) -> Option<&Query>,
+ ) -> Self {
+ let mut result = Self {
+ layers: Vec::new(),
+ grammars: Vec::new(),
+ active_layer_count: 0,
+ };
+ for layer in layers {
+ let grammar = match &layer.language.grammar {
+ Some(grammar) => grammar,
+ None => continue,
+ };
+ let query = match query(&grammar) {
+ Some(query) => query,
+ None => continue,
+ };
+
+ let mut query_cursor = QueryCursorHandle::new();
+
+ // TODO - add a Tree-sitter API to remove the need for this.
+ let cursor = unsafe {
+ std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
+ };
+
+ cursor.set_byte_range(range.clone());
+ let captures = cursor.captures(query, layer.node(), TextProvider(text));
+ let grammar_index = result
+ .grammars
+ .iter()
+ .position(|g| g.id == grammar.id())
+ .unwrap_or_else(|| {
+ result.grammars.push(grammar);
+ result.grammars.len() - 1
+ });
+ let mut layer = SyntaxMapCapturesLayer {
+ depth: layer.depth,
+ grammar_index,
+ next_capture: None,
+ captures,
+ _query_cursor: query_cursor,
+ };
+
+ layer.advance();
+ if layer.next_capture.is_some() {
+ let key = layer.sort_key();
+ let ix = match result.layers[..result.active_layer_count]
+ .binary_search_by_key(&key, |layer| layer.sort_key())
+ {
+ Ok(ix) | Err(ix) => ix,
+ };
+ result.layers.insert(ix, layer);
+ result.active_layer_count += 1;
+ } else {
+ result.layers.push(layer);
+ }
+ }
+
+ result
+ }
+
+ pub fn grammars(&self) -> &[&'a Grammar] {
+ &self.grammars
+ }
+
+ pub fn peek(&self) -> Option<SyntaxMapCapture<'a>> {
+ let layer = self.layers[..self.active_layer_count].first()?;
+ let capture = layer.next_capture?;
+ Some(SyntaxMapCapture {
+ depth: layer.depth,
+ grammar_index: layer.grammar_index,
+ index: capture.index,
+ node: capture.node,
+ })
+ }
+
+ pub fn advance(&mut self) -> bool {
+ let layer = if let Some(layer) = self.layers[..self.active_layer_count].first_mut() {
+ layer
+ } else {
+ return false;
+ };
+
+ layer.advance();
+ if layer.next_capture.is_some() {
+ let key = layer.sort_key();
+ let i = 1 + self.layers[1..self.active_layer_count]
+ .iter()
+ .position(|later_layer| key < later_layer.sort_key())
+ .unwrap_or(self.active_layer_count - 1);
+ self.layers[0..i].rotate_left(1);
+ } else {
+ self.layers[0..self.active_layer_count].rotate_left(1);
+ self.active_layer_count -= 1;
+ }
+
+ true
+ }
+
+ pub fn set_byte_range(&mut self, range: Range<usize>) {
+ for layer in &mut self.layers {
+ layer.captures.set_byte_range(range.clone());
+ if let Some(capture) = &layer.next_capture {
+ if capture.node.end_byte() > range.start {
+ continue;
+ }
+ }
+ layer.advance();
+ }
+ self.layers.sort_unstable_by_key(|layer| layer.sort_key());
+ self.active_layer_count = self
+ .layers
+ .iter()
+ .position(|layer| layer.next_capture.is_none())
+ .unwrap_or(self.layers.len());
+ }
+}
+
+impl<'a> SyntaxMapMatches<'a> {
+ fn new(
+ range: Range<usize>,
+ text: &'a Rope,
+ layers: impl Iterator<Item = SyntaxLayerInfo<'a>>,
+ query: fn(&Grammar) -> Option<&Query>,
+ ) -> Self {
+ let mut result = Self::default();
+ for layer in layers {
+ let grammar = match &layer.language.grammar {
+ Some(grammar) => grammar,
+ None => continue,
+ };
+ let query = match query(&grammar) {
+ Some(query) => query,
+ None => continue,
+ };
+
+ let mut query_cursor = QueryCursorHandle::new();
+
+ // TODO - add a Tree-sitter API to remove the need for this.
+ let cursor = unsafe {
+ std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
+ };
+
+ cursor.set_byte_range(range.clone());
+ let matches = cursor.matches(query, layer.node(), TextProvider(text));
+ let grammar_index = result
+ .grammars
+ .iter()
+ .position(|g| g.id == grammar.id())
+ .unwrap_or_else(|| {
+ result.grammars.push(grammar);
+ result.grammars.len() - 1
+ });
+ let mut layer = SyntaxMapMatchesLayer {
+ depth: layer.depth,
+ grammar_index,
+ matches,
+ next_pattern_index: 0,
+ next_captures: Vec::new(),
+ has_next: false,
+ _query_cursor: query_cursor,
+ };
+
+ layer.advance();
+ if layer.has_next {
+ let key = layer.sort_key();
+ let ix = match result.layers[..result.active_layer_count]
+ .binary_search_by_key(&key, |layer| layer.sort_key())
+ {
+ Ok(ix) | Err(ix) => ix,
+ };
+ result.layers.insert(ix, layer);
+ result.active_layer_count += 1;
+ } else {
+ result.layers.push(layer);
+ }
+ }
+ result
+ }
+
+ pub fn grammars(&self) -> &[&'a Grammar] {
+ &self.grammars
+ }
+
+ pub fn peek(&self) -> Option<SyntaxMapMatch> {
+ let layer = self.layers.first()?;
+ if !layer.has_next {
+ return None;
+ }
+ Some(SyntaxMapMatch {
+ depth: layer.depth,
+ grammar_index: layer.grammar_index,
+ pattern_index: layer.next_pattern_index,
+ captures: &layer.next_captures,
+ })
+ }
+
+ pub fn advance(&mut self) -> bool {
+ let layer = if let Some(layer) = self.layers.first_mut() {
+ layer
+ } else {
+ return false;
+ };
+
+ layer.advance();
+ if layer.has_next {
+ let key = layer.sort_key();
+ let i = 1 + self.layers[1..self.active_layer_count]
+ .iter()
+ .position(|later_layer| key < later_layer.sort_key())
+ .unwrap_or(self.active_layer_count - 1);
+ self.layers[0..i].rotate_left(1);
+ } else {
+ self.layers[0..self.active_layer_count].rotate_left(1);
+ self.active_layer_count -= 1;
+ }
+
+ true
+ }
+}
+
+impl<'a> SyntaxMapCapturesLayer<'a> {
+ fn advance(&mut self) {
+ self.next_capture = self.captures.next().map(|(mat, ix)| mat.captures[ix]);
+ }
+
+ fn sort_key(&self) -> (usize, Reverse<usize>, usize) {
+ if let Some(capture) = &self.next_capture {
+ let range = capture.node.byte_range();
+ (range.start, Reverse(range.end), self.depth)
+ } else {
+ (usize::MAX, Reverse(0), usize::MAX)
+ }
+ }
+}
+
+impl<'a> SyntaxMapMatchesLayer<'a> {
+ fn advance(&mut self) {
+ if let Some(mat) = self.matches.next() {
+ self.next_captures.clear();
+ self.next_captures.extend_from_slice(&mat.captures);
+ self.next_pattern_index = mat.pattern_index;
+ self.has_next = true;
+ } else {
+ self.has_next = false;
+ }
+ }
+
+ fn sort_key(&self) -> (usize, Reverse<usize>, usize) {
+ if self.has_next {
+ let captures = &self.next_captures;
+ if let Some((first, last)) = captures.first().zip(captures.last()) {
+ return (
+ first.node.start_byte(),
+ Reverse(last.node.end_byte()),
+ self.depth,
+ );
+ }
+ }
+ (usize::MAX, Reverse(0), usize::MAX)
+ }
+}
+
+impl<'a> Iterator for SyntaxMapCaptures<'a> {
+ type Item = SyntaxMapCapture<'a>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let result = self.peek();
+ self.advance();
+ result
+ }
+}
+
+fn join_ranges(
+ a: impl Iterator<Item = Range<usize>>,
+ b: impl Iterator<Item = Range<usize>>,
+) -> Vec<Range<usize>> {
+ let mut result = Vec::<Range<usize>>::new();
+ let mut a = a.peekable();
+ let mut b = b.peekable();
+ loop {
+ let range = match (a.peek(), b.peek()) {
+ (Some(range_a), Some(range_b)) => {
+ if range_a.start < range_b.start {
+ a.next().unwrap()
+ } else {
+ b.next().unwrap()
+ }
+ }
+ (None, Some(_)) => b.next().unwrap(),
+ (Some(_), None) => a.next().unwrap(),
+ (None, None) => break,
+ };
+
+ if let Some(last) = result.last_mut() {
+ if range.start <= last.end {
+ last.end = last.end.max(range.end);
+ continue;
+ }
+ }
+ result.push(range);
+ }
+ result
+}
+
+fn parse_text(
+ grammar: &Grammar,
+ text: &Rope,
+ start_byte: usize,
+ ranges: Vec<tree_sitter::Range>,
+ old_tree: Option<Tree>,
+) -> Tree {
+ PARSER.with(|parser| {
+ let mut parser = parser.borrow_mut();
+ let mut chunks = text.chunks_in_range(start_byte..text.len());
+ parser
+ .set_included_ranges(&ranges)
+ .expect("overlapping ranges");
+ parser
+ .set_language(grammar.ts_language)
+ .expect("incompatible grammar");
+ parser
+ .parse_with(
+ &mut move |offset, _| {
+ chunks.seek(start_byte + offset);
+ chunks.next().unwrap_or("").as_bytes()
+ },
+ old_tree.as_ref(),
+ )
+ .expect("invalid language")
+ })
+}
+
+fn get_injections(
+ config: &InjectionConfig,
+ text: &BufferSnapshot,
+ outer_range: Range<Anchor>,
+ node: Node,
+ language_registry: &Arc<LanguageRegistry>,
+ depth: usize,
+ changed_ranges: &[Range<usize>],
+ combined_injection_ranges: &mut HashMap<Arc<Language>, Vec<tree_sitter::Range>>,
+ queue: &mut BinaryHeap<ParseStep>,
+) {
+ let mut query_cursor = QueryCursorHandle::new();
+ let mut prev_match = None;
+
+ // Ensure that a `ParseStep` is created for every combined injection language, even
+ // if there currently no matches for that injection.
+ combined_injection_ranges.clear();
+ for pattern in &config.patterns {
+ if let (Some(language_name), true) = (pattern.language.as_ref(), pattern.combined) {
+ if let Some(language) = language_registry
+ .language_for_name_or_extension(language_name)
+ .now_or_never()
+ .and_then(|language| language.ok())
+ {
+ combined_injection_ranges.insert(language, Vec::new());
+ }
+ }
+ }
+
+ for query_range in changed_ranges {
+ query_cursor.set_byte_range(query_range.start.saturating_sub(1)..query_range.end + 1);
+ for mat in query_cursor.matches(&config.query, node, TextProvider(text.as_rope())) {
+ let content_ranges = mat
+ .nodes_for_capture_index(config.content_capture_ix)
+ .map(|node| node.range())
+ .collect::<Vec<_>>();
+ if content_ranges.is_empty() {
+ continue;
+ }
+
+ let content_range =
+ content_ranges.first().unwrap().start_byte..content_ranges.last().unwrap().end_byte;
+
+ // Avoid duplicate matches if two changed ranges intersect the same injection.
+ if let Some((prev_pattern_ix, prev_range)) = &prev_match {
+ if mat.pattern_index == *prev_pattern_ix && content_range == *prev_range {
+ continue;
+ }
+ }
+
+ prev_match = Some((mat.pattern_index, content_range.clone()));
+ let combined = config.patterns[mat.pattern_index].combined;
+
+ let mut language_name = None;
+ let mut step_range = content_range.clone();
+ if let Some(name) = config.patterns[mat.pattern_index].language.as_ref() {
+ language_name = Some(Cow::Borrowed(name.as_ref()))
+ } else if let Some(language_node) = config
+ .language_capture_ix
+ .and_then(|ix| mat.nodes_for_capture_index(ix).next())
+ {
+ step_range.start = cmp::min(content_range.start, language_node.start_byte());
+ step_range.end = cmp::max(content_range.end, language_node.end_byte());
+ language_name = Some(Cow::Owned(
+ text.text_for_range(language_node.byte_range()).collect(),
+ ))
+ };
+
+ if let Some(language_name) = language_name {
+ let language = language_registry
+ .language_for_name_or_extension(&language_name)
+ .now_or_never()
+ .and_then(|language| language.ok());
+ let range = text.anchor_before(step_range.start)..text.anchor_after(step_range.end);
+ if let Some(language) = language {
+ if combined {
+ combined_injection_ranges
+ .entry(language.clone())
+ .or_default()
+ .extend(content_ranges);
+ } else {
+ queue.push(ParseStep {
+ depth,
+ language: ParseStepLanguage::Loaded { language },
+ included_ranges: content_ranges,
+ range,
+ mode: ParseMode::Single,
+ });
+ }
+ } else {
+ queue.push(ParseStep {
+ depth,
+ language: ParseStepLanguage::Pending {
+ name: language_name.into(),
+ },
+ included_ranges: content_ranges,
+ range,
+ mode: ParseMode::Single,
+ });
+ }
+ }
+ }
+ }
+
+ for (language, mut included_ranges) in combined_injection_ranges.drain() {
+ included_ranges.sort_unstable_by(|a, b| {
+ Ord::cmp(&a.start_byte, &b.start_byte).then_with(|| Ord::cmp(&a.end_byte, &b.end_byte))
+ });
+ queue.push(ParseStep {
+ depth,
+ language: ParseStepLanguage::Loaded { language },
+ range: outer_range.clone(),
+ included_ranges,
+ mode: ParseMode::Combined {
+ parent_layer_range: node.start_byte()..node.end_byte(),
+ parent_layer_changed_ranges: changed_ranges.to_vec(),
+ },
+ })
+ }
+}
+
+/// Update the given list of included `ranges`, removing any ranges that intersect
+/// `removed_ranges`, and inserting the given `new_ranges`.
+///
+/// Returns a new vector of ranges, and the range of the vector that was changed,
+/// from the previous `ranges` vector.
+pub(crate) fn splice_included_ranges(
+ mut ranges: Vec<tree_sitter::Range>,
+ removed_ranges: &[Range<usize>],
+ new_ranges: &[tree_sitter::Range],
+) -> (Vec<tree_sitter::Range>, Range<usize>) {
+ let mut removed_ranges = removed_ranges.iter().cloned().peekable();
+ let mut new_ranges = new_ranges.into_iter().cloned().peekable();
+ let mut ranges_ix = 0;
+ let mut changed_portion = usize::MAX..0;
+ loop {
+ let next_new_range = new_ranges.peek();
+ let next_removed_range = removed_ranges.peek();
+
+ let (remove, insert) = match (next_removed_range, next_new_range) {
+ (None, None) => break,
+ (Some(_), None) => (removed_ranges.next().unwrap(), None),
+ (Some(next_removed_range), Some(next_new_range)) => {
+ if next_removed_range.end < next_new_range.start_byte {
+ (removed_ranges.next().unwrap(), None)
+ } else {
+ let mut start = next_new_range.start_byte;
+ let mut end = next_new_range.end_byte;
+
+ while let Some(next_removed_range) = removed_ranges.peek() {
+ if next_removed_range.start > next_new_range.end_byte {
+ break;
+ }
+ let next_removed_range = removed_ranges.next().unwrap();
+ start = cmp::min(start, next_removed_range.start);
+ end = cmp::max(end, next_removed_range.end);
+ }
+
+ (start..end, Some(new_ranges.next().unwrap()))
+ }
+ }
+ (None, Some(next_new_range)) => (
+ next_new_range.start_byte..next_new_range.end_byte,
+ Some(new_ranges.next().unwrap()),
+ ),
+ };
+
+ let mut start_ix = ranges_ix
+ + match ranges[ranges_ix..].binary_search_by_key(&remove.start, |r| r.end_byte) {
+ Ok(ix) => ix,
+ Err(ix) => ix,
+ };
+ let mut end_ix = ranges_ix
+ + match ranges[ranges_ix..].binary_search_by_key(&remove.end, |r| r.start_byte) {
+ Ok(ix) => ix + 1,
+ Err(ix) => ix,
+ };
+
+ // If there are empty ranges, then there may be multiple ranges with the same
+ // start or end. Expand the splice to include any adjacent ranges that touch
+ // the changed range.
+ while start_ix > 0 {
+ if ranges[start_ix - 1].end_byte == remove.start {
+ start_ix -= 1;
+ } else {
+ break;
+ }
+ }
+ while let Some(range) = ranges.get(end_ix) {
+ if range.start_byte == remove.end {
+ end_ix += 1;
+ } else {
+ break;
+ }
+ }
+
+ changed_portion.start = changed_portion.start.min(start_ix);
+ changed_portion.end = changed_portion.end.max(if insert.is_some() {
+ start_ix + 1
+ } else {
+ start_ix
+ });
+
+ ranges.splice(start_ix..end_ix, insert);
+ ranges_ix = start_ix;
+ }
+
+ if changed_portion.end < changed_portion.start {
+ changed_portion = 0..0;
+ }
+
+ (ranges, changed_portion)
+}
+
+/// Ensure there are newline ranges in between content range that appear on
+/// different lines. For performance, only iterate through the given range of
+/// indices. All of the ranges in the array are relative to a given start byte
+/// and point.
+fn insert_newlines_between_ranges(
+ indices: Range<usize>,
+ ranges: &mut Vec<tree_sitter::Range>,
+ text: &text::BufferSnapshot,
+ start_byte: usize,
+ start_point: Point,
+) {
+ let mut ix = indices.end + 1;
+ while ix > indices.start {
+ ix -= 1;
+ if 0 == ix || ix == ranges.len() {
+ continue;
+ }
+
+ let range_b = ranges[ix].clone();
+ let range_a = &mut ranges[ix - 1];
+ if range_a.end_point.column == 0 {
+ continue;
+ }
+
+ if range_a.end_point.row < range_b.start_point.row {
+ let end_point = start_point + Point::from_ts_point(range_a.end_point);
+ let line_end = Point::new(end_point.row, text.line_len(end_point.row));
+ if end_point.column as u32 >= line_end.column {
+ range_a.end_byte += 1;
+ range_a.end_point.row += 1;
+ range_a.end_point.column = 0;
+ } else {
+ let newline_offset = text.point_to_offset(line_end);
+ ranges.insert(
+ ix,
+ tree_sitter::Range {
+ start_byte: newline_offset - start_byte,
+ end_byte: newline_offset - start_byte + 1,
+ start_point: (line_end - start_point).to_ts_point(),
+ end_point: ((line_end - start_point) + Point::new(1, 0)).to_ts_point(),
+ },
+ )
+ }
+ }
+ }
+}
+
+impl OwnedSyntaxLayerInfo {
+ pub fn node(&self) -> Node {
+ self.tree
+ .root_node_with_offset(self.offset.0, self.offset.1)
+ }
+}
+
+impl<'a> SyntaxLayerInfo<'a> {
+ pub fn to_owned(&self) -> OwnedSyntaxLayerInfo {
+ OwnedSyntaxLayerInfo {
+ tree: self.tree.clone(),
+ offset: self.offset,
+ depth: self.depth,
+ language: self.language.clone(),
+ }
+ }
+
+ pub fn node(&self) -> Node<'a> {
+ self.tree
+ .root_node_with_offset(self.offset.0, self.offset.1)
+ }
+
+ pub(crate) fn override_id(&self, offset: usize, text: &text::BufferSnapshot) -> Option<u32> {
+ let text = TextProvider(text.as_rope());
+ let config = self.language.grammar.as_ref()?.override_config.as_ref()?;
+
+ let mut query_cursor = QueryCursorHandle::new();
+ query_cursor.set_byte_range(offset..offset);
+
+ let mut smallest_match: Option<(u32, Range<usize>)> = None;
+ for mat in query_cursor.matches(&config.query, self.node(), text) {
+ for capture in mat.captures {
+ if !config.values.contains_key(&capture.index) {
+ continue;
+ }
+
+ let range = capture.node.byte_range();
+ if offset <= range.start || offset >= range.end {
+ continue;
+ }
+
+ if let Some((_, smallest_range)) = &smallest_match {
+ if range.len() < smallest_range.len() {
+ smallest_match = Some((capture.index, range))
+ }
+ continue;
+ }
+
+ smallest_match = Some((capture.index, range));
+ }
+ }
+
+ smallest_match.map(|(index, _)| index)
+ }
+}
+
+impl std::ops::Deref for SyntaxMap {
+ type Target = SyntaxSnapshot;
+
+ fn deref(&self) -> &Self::Target {
+ &self.snapshot
+ }
+}
+
+impl PartialEq for ParseStep {
+ fn eq(&self, _: &Self) -> bool {
+ false
+ }
+}
+
+impl Eq for ParseStep {}
+
+impl PartialOrd for ParseStep {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(&other))
+ }
+}
+
+impl Ord for ParseStep {
+ fn cmp(&self, other: &Self) -> Ordering {
+ let range_a = self.range();
+ let range_b = other.range();
+ Ord::cmp(&other.depth, &self.depth)
+ .then_with(|| Ord::cmp(&range_b.start, &range_a.start))
+ .then_with(|| Ord::cmp(&range_a.end, &range_b.end))
+ .then_with(|| self.language.id().cmp(&other.language.id()))
+ }
+}
+
+impl ParseStep {
+ fn range(&self) -> Range<usize> {
+ if let ParseMode::Combined {
+ parent_layer_range, ..
+ } = &self.mode
+ {
+ parent_layer_range.clone()
+ } else {
+ let start = self.included_ranges.first().map_or(0, |r| r.start_byte);
+ let end = self.included_ranges.last().map_or(0, |r| r.end_byte);
+ start..end
+ }
+ }
+}
+
+impl ChangedRegion {
+ fn cmp(&self, other: &Self, buffer: &BufferSnapshot) -> Ordering {
+ let range_a = &self.range;
+ let range_b = &other.range;
+ Ord::cmp(&self.depth, &other.depth)
+ .then_with(|| range_a.start.cmp(&range_b.start, buffer))
+ .then_with(|| range_b.end.cmp(&range_a.end, buffer))
+ }
+}
+
+impl ChangeRegionSet {
+ fn start_position(&self) -> ChangeStartPosition {
+ self.0.first().map_or(
+ ChangeStartPosition {
+ depth: usize::MAX,
+ position: Anchor::MAX,
+ },
+ |region| ChangeStartPosition {
+ depth: region.depth,
+ position: region.range.start,
+ },
+ )
+ }
+
+ fn intersects(&self, layer: &SyntaxLayer, text: &BufferSnapshot) -> bool {
+ for region in &self.0 {
+ if region.depth < layer.depth {
+ continue;
+ }
+ if region.depth > layer.depth {
+ break;
+ }
+ if region.range.end.cmp(&layer.range.start, text).is_le() {
+ continue;
+ }
+ if region.range.start.cmp(&layer.range.end, text).is_ge() {
+ break;
+ }
+ return true;
+ }
+ false
+ }
+
+ fn insert(&mut self, region: ChangedRegion, text: &BufferSnapshot) {
+ if let Err(ix) = self.0.binary_search_by(|probe| probe.cmp(®ion, text)) {
+ self.0.insert(ix, region);
+ }
+ }
+
+ fn prune(&mut self, summary: SyntaxLayerSummary, text: &BufferSnapshot) -> bool {
+ let prev_len = self.0.len();
+ self.0.retain(|region| {
+ region.depth > summary.max_depth
+ || (region.depth == summary.max_depth
+ && region
+ .range
+ .end
+ .cmp(&summary.last_layer_range.start, text)
+ .is_gt())
+ });
+ self.0.len() < prev_len
+ }
+}
+
+impl Default for SyntaxLayerSummary {
+ fn default() -> Self {
+ Self {
+ max_depth: 0,
+ min_depth: 0,
+ range: Anchor::MAX..Anchor::MIN,
+ last_layer_range: Anchor::MIN..Anchor::MAX,
+ last_layer_language: None,
+ contains_unknown_injections: false,
+ }
+ }
+}
+
+impl sum_tree::Summary for SyntaxLayerSummary {
+ type Context = BufferSnapshot;
+
+ fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
+ if other.max_depth > self.max_depth {
+ self.max_depth = other.max_depth;
+ self.range = other.range.clone();
+ } else {
+ if self.range == (Anchor::MAX..Anchor::MAX) {
+ self.range.start = other.range.start;
+ }
+ if other.range.end.cmp(&self.range.end, buffer).is_gt() {
+ self.range.end = other.range.end;
+ }
+ }
+ self.last_layer_range = other.last_layer_range.clone();
+ self.last_layer_language = other.last_layer_language;
+ self.contains_unknown_injections |= other.contains_unknown_injections;
+ }
+}
+
+impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary> for SyntaxLayerPosition {
+ fn cmp(&self, cursor_location: &SyntaxLayerSummary, buffer: &BufferSnapshot) -> Ordering {
+ Ord::cmp(&self.depth, &cursor_location.max_depth)
+ .then_with(|| {
+ self.range
+ .start
+ .cmp(&cursor_location.last_layer_range.start, buffer)
+ })
+ .then_with(|| {
+ cursor_location
+ .last_layer_range
+ .end
+ .cmp(&self.range.end, buffer)
+ })
+ .then_with(|| self.language.cmp(&cursor_location.last_layer_language))
+ }
+}
+
+impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary> for ChangeStartPosition {
+ fn cmp(&self, cursor_location: &SyntaxLayerSummary, text: &BufferSnapshot) -> Ordering {
+ Ord::cmp(&self.depth, &cursor_location.max_depth)
+ .then_with(|| self.position.cmp(&cursor_location.range.end, text))
+ }
+}
+
+impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary>
+ for SyntaxLayerPositionBeforeChange
+{
+ fn cmp(&self, cursor_location: &SyntaxLayerSummary, buffer: &BufferSnapshot) -> Ordering {
+ if self.change.cmp(cursor_location, buffer).is_le() {
+ return Ordering::Less;
+ } else {
+ self.position.cmp(cursor_location, buffer)
+ }
+ }
+}
+
+impl sum_tree::Item for SyntaxLayer {
+ type Summary = SyntaxLayerSummary;
+
+ fn summary(&self) -> Self::Summary {
+ SyntaxLayerSummary {
+ min_depth: self.depth,
+ max_depth: self.depth,
+ range: self.range.clone(),
+ last_layer_range: self.range.clone(),
+ last_layer_language: self.content.language_id(),
+ contains_unknown_injections: matches!(self.content, SyntaxLayerContent::Pending { .. }),
+ }
+ }
+}
+
+impl std::fmt::Debug for SyntaxLayer {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("SyntaxLayer")
+ .field("depth", &self.depth)
+ .field("range", &self.range)
+ .field("tree", &self.content.tree())
+ .finish()
+ }
+}
+
+impl<'a> tree_sitter::TextProvider<&'a [u8]> for TextProvider<'a> {
+ type I = ByteChunks<'a>;
+
+ fn text(&mut self, node: tree_sitter::Node) -> Self::I {
+ ByteChunks(self.0.chunks_in_range(node.byte_range()))
+ }
+}
+
+impl<'a> Iterator for ByteChunks<'a> {
+ type Item = &'a [u8];
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next().map(str::as_bytes)
+ }
+}
+
+impl QueryCursorHandle {
+ pub(crate) fn new() -> Self {
+ let mut cursor = QUERY_CURSORS.lock().pop().unwrap_or_else(QueryCursor::new);
+ cursor.set_match_limit(64);
+ QueryCursorHandle(Some(cursor))
+ }
+}
+
+impl Deref for QueryCursorHandle {
+ type Target = QueryCursor;
+
+ fn deref(&self) -> &Self::Target {
+ self.0.as_ref().unwrap()
+ }
+}
+
+impl DerefMut for QueryCursorHandle {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ self.0.as_mut().unwrap()
+ }
+}
+
+impl Drop for QueryCursorHandle {
+ fn drop(&mut self) {
+ let mut cursor = self.0.take().unwrap();
+ cursor.set_byte_range(0..usize::MAX);
+ cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
+ QUERY_CURSORS.lock().push(cursor)
+ }
+}
+
+pub(crate) trait ToTreeSitterPoint {
+ fn to_ts_point(self) -> tree_sitter::Point;
+ fn from_ts_point(point: tree_sitter::Point) -> Self;
+}
+
+impl ToTreeSitterPoint for Point {
+ fn to_ts_point(self) -> tree_sitter::Point {
+ tree_sitter::Point::new(self.row as usize, self.column as usize)
+ }
+
+ fn from_ts_point(point: tree_sitter::Point) -> Self {
+ Point::new(point.row as u32, point.column as u32)
+ }
+}
+
+struct LogIncludedRanges<'a>(&'a [tree_sitter::Range]);
+struct LogPoint(Point);
+struct LogAnchorRange<'a>(&'a Range<Anchor>, &'a text::BufferSnapshot);
+struct LogChangedRegions<'a>(&'a ChangeRegionSet, &'a text::BufferSnapshot);
+
+impl<'a> fmt::Debug for LogIncludedRanges<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list()
+ .entries(self.0.iter().map(|range| {
+ let start = range.start_point;
+ let end = range.end_point;
+ (start.row, start.column)..(end.row, end.column)
+ }))
+ .finish()
+ }
+}
+
+impl<'a> fmt::Debug for LogAnchorRange<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let range = self.0.to_point(self.1);
+ (LogPoint(range.start)..LogPoint(range.end)).fmt(f)
+ }
+}
+
+impl<'a> fmt::Debug for LogChangedRegions<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list()
+ .entries(
+ self.0
+ .0
+ .iter()
+ .map(|region| LogAnchorRange(®ion.range, self.1)),
+ )
+ .finish()
+ }
+}
+
+impl fmt::Debug for LogPoint {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ (self.0.row, self.0.column).fmt(f)
+ }
+}
@@ -0,0 +1,1323 @@
+// use super::*;
+// use crate::LanguageConfig;
+// use rand::rngs::StdRng;
+// use std::{env, ops::Range, sync::Arc};
+// use text::Buffer;
+// use tree_sitter::Node;
+// use unindent::Unindent as _;
+// use util::test::marked_text_ranges;
+
+// #[test]
+// fn test_splice_included_ranges() {
+// let ranges = vec![ts_range(20..30), ts_range(50..60), ts_range(80..90)];
+
+// let (new_ranges, change) = splice_included_ranges(
+// ranges.clone(),
+// &[54..56, 58..68],
+// &[ts_range(50..54), ts_range(59..67)],
+// );
+// assert_eq!(
+// new_ranges,
+// &[
+// ts_range(20..30),
+// ts_range(50..54),
+// ts_range(59..67),
+// ts_range(80..90),
+// ]
+// );
+// assert_eq!(change, 1..3);
+
+// let (new_ranges, change) = splice_included_ranges(ranges.clone(), &[70..71, 91..100], &[]);
+// assert_eq!(
+// new_ranges,
+// &[ts_range(20..30), ts_range(50..60), ts_range(80..90)]
+// );
+// assert_eq!(change, 2..3);
+
+// let (new_ranges, change) =
+// splice_included_ranges(ranges.clone(), &[], &[ts_range(0..2), ts_range(70..75)]);
+// assert_eq!(
+// new_ranges,
+// &[
+// ts_range(0..2),
+// ts_range(20..30),
+// ts_range(50..60),
+// ts_range(70..75),
+// ts_range(80..90)
+// ]
+// );
+// assert_eq!(change, 0..4);
+
+// let (new_ranges, change) =
+// splice_included_ranges(ranges.clone(), &[30..50], &[ts_range(25..55)]);
+// assert_eq!(new_ranges, &[ts_range(25..55), ts_range(80..90)]);
+// assert_eq!(change, 0..1);
+
+// // does not create overlapping ranges
+// let (new_ranges, change) =
+// splice_included_ranges(ranges.clone(), &[0..18], &[ts_range(20..32)]);
+// assert_eq!(
+// new_ranges,
+// &[ts_range(20..32), ts_range(50..60), ts_range(80..90)]
+// );
+// assert_eq!(change, 0..1);
+
+// fn ts_range(range: Range<usize>) -> tree_sitter::Range {
+// tree_sitter::Range {
+// start_byte: range.start,
+// start_point: tree_sitter::Point {
+// row: 0,
+// column: range.start,
+// },
+// end_byte: range.end,
+// end_point: tree_sitter::Point {
+// row: 0,
+// column: range.end,
+// },
+// }
+// }
+// }
+
+// #[gpui::test]
+// fn test_syntax_map_layers_for_range() {
+// let registry = Arc::new(LanguageRegistry::test());
+// let language = Arc::new(rust_lang());
+// registry.add(language.clone());
+
+// let mut buffer = Buffer::new(
+// 0,
+// 0,
+// r#"
+// fn a() {
+// assert_eq!(
+// b(vec![C {}]),
+// vec![d.e],
+// );
+// println!("{}", f(|_| true));
+// }
+// "#
+// .unindent(),
+// );
+
+// let mut syntax_map = SyntaxMap::new();
+// syntax_map.set_language_registry(registry.clone());
+// syntax_map.reparse(language.clone(), &buffer);
+
+// assert_layers_for_range(
+// &syntax_map,
+// &buffer,
+// Point::new(2, 0)..Point::new(2, 0),
+// &[
+// "...(function_item ... (block (expression_statement (macro_invocation...",
+// "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
+// ],
+// );
+// assert_layers_for_range(
+// &syntax_map,
+// &buffer,
+// Point::new(2, 14)..Point::new(2, 16),
+// &[
+// "...(function_item ...",
+// "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
+// "...(array_expression (struct_expression ...",
+// ],
+// );
+// assert_layers_for_range(
+// &syntax_map,
+// &buffer,
+// Point::new(3, 14)..Point::new(3, 16),
+// &[
+// "...(function_item ...",
+// "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
+// "...(array_expression (field_expression ...",
+// ],
+// );
+// assert_layers_for_range(
+// &syntax_map,
+// &buffer,
+// Point::new(5, 12)..Point::new(5, 16),
+// &[
+// "...(function_item ...",
+// "...(call_expression ... (arguments (closure_expression ...",
+// ],
+// );
+
+// // Replace a vec! macro invocation with a plain slice, removing a syntactic layer.
+// let macro_name_range = range_for_text(&buffer, "vec!");
+// buffer.edit([(macro_name_range, "&")]);
+// syntax_map.interpolate(&buffer);
+// syntax_map.reparse(language.clone(), &buffer);
+
+// assert_layers_for_range(
+// &syntax_map,
+// &buffer,
+// Point::new(2, 14)..Point::new(2, 16),
+// &[
+// "...(function_item ...",
+// "...(tuple_expression (call_expression ... arguments: (arguments (reference_expression value: (array_expression...",
+// ],
+// );
+
+// // Put the vec! macro back, adding back the syntactic layer.
+// buffer.undo();
+// syntax_map.interpolate(&buffer);
+// syntax_map.reparse(language.clone(), &buffer);
+
+// assert_layers_for_range(
+// &syntax_map,
+// &buffer,
+// Point::new(2, 14)..Point::new(2, 16),
+// &[
+// "...(function_item ...",
+// "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
+// "...(array_expression (struct_expression ...",
+// ],
+// );
+// }
+
+// #[gpui::test]
+// fn test_dynamic_language_injection() {
+// let registry = Arc::new(LanguageRegistry::test());
+// let markdown = Arc::new(markdown_lang());
+// registry.add(markdown.clone());
+// registry.add(Arc::new(rust_lang()));
+// registry.add(Arc::new(ruby_lang()));
+
+// let mut buffer = Buffer::new(
+// 0,
+// 0,
+// r#"
+// This is a code block:
+
+// ```rs
+// fn foo() {}
+// ```
+// "#
+// .unindent(),
+// );
+
+// let mut syntax_map = SyntaxMap::new();
+// syntax_map.set_language_registry(registry.clone());
+// syntax_map.reparse(markdown.clone(), &buffer);
+// assert_layers_for_range(
+// &syntax_map,
+// &buffer,
+// Point::new(3, 0)..Point::new(3, 0),
+// &[
+// "...(fenced_code_block (fenced_code_block_delimiter) (info_string (language)) (code_fence_content) (fenced_code_block_delimiter...",
+// "...(function_item name: (identifier) parameters: (parameters) body: (block)...",
+// ],
+// );
+
+// // Replace Rust with Ruby in code block.
+// let macro_name_range = range_for_text(&buffer, "rs");
+// buffer.edit([(macro_name_range, "ruby")]);
+// syntax_map.interpolate(&buffer);
+// syntax_map.reparse(markdown.clone(), &buffer);
+// assert_layers_for_range(
+// &syntax_map,
+// &buffer,
+// Point::new(3, 0)..Point::new(3, 0),
+// &[
+// "...(fenced_code_block (fenced_code_block_delimiter) (info_string (language)) (code_fence_content) (fenced_code_block_delimiter...",
+// "...(call method: (identifier) arguments: (argument_list (call method: (identifier) arguments: (argument_list) block: (block)...",
+// ],
+// );
+
+// // Replace Ruby with a language that hasn't been loaded yet.
+// let macro_name_range = range_for_text(&buffer, "ruby");
+// buffer.edit([(macro_name_range, "html")]);
+// syntax_map.interpolate(&buffer);
+// syntax_map.reparse(markdown.clone(), &buffer);
+// assert_layers_for_range(
+// &syntax_map,
+// &buffer,
+// Point::new(3, 0)..Point::new(3, 0),
+// &[
+// "...(fenced_code_block (fenced_code_block_delimiter) (info_string (language)) (code_fence_content) (fenced_code_block_delimiter..."
+// ],
+// );
+// assert!(syntax_map.contains_unknown_injections());
+
+// registry.add(Arc::new(html_lang()));
+// syntax_map.reparse(markdown.clone(), &buffer);
+// assert_layers_for_range(
+// &syntax_map,
+// &buffer,
+// Point::new(3, 0)..Point::new(3, 0),
+// &[
+// "...(fenced_code_block (fenced_code_block_delimiter) (info_string (language)) (code_fence_content) (fenced_code_block_delimiter...",
+// "(fragment (text))",
+// ],
+// );
+// assert!(!syntax_map.contains_unknown_injections());
+// }
+
+// #[gpui::test]
+// fn test_typing_multiple_new_injections() {
+// let (buffer, syntax_map) = test_edit_sequence(
+// "Rust",
+// &[
+// "fn a() { dbg }",
+// "fn a() { dbg«!» }",
+// "fn a() { dbg!«()» }",
+// "fn a() { dbg!(«b») }",
+// "fn a() { dbg!(b«.») }",
+// "fn a() { dbg!(b.«c») }",
+// "fn a() { dbg!(b.c«()») }",
+// "fn a() { dbg!(b.c(«vec»)) }",
+// "fn a() { dbg!(b.c(vec«!»)) }",
+// "fn a() { dbg!(b.c(vec!«[]»)) }",
+// "fn a() { dbg!(b.c(vec![«d»])) }",
+// "fn a() { dbg!(b.c(vec![d«.»])) }",
+// "fn a() { dbg!(b.c(vec![d.«e»])) }",
+// ],
+// );
+
+// assert_capture_ranges(
+// &syntax_map,
+// &buffer,
+// &["field"],
+// "fn a() { dbg!(b.«c»(vec![d.«e»])) }",
+// );
+// }
+
+// #[gpui::test]
+// fn test_pasting_new_injection_line_between_others() {
+// let (buffer, syntax_map) = test_edit_sequence(
+// "Rust",
+// &[
+// "
+// fn a() {
+// b!(B {});
+// c!(C {});
+// d!(D {});
+// e!(E {});
+// f!(F {});
+// g!(G {});
+// }
+// ",
+// "
+// fn a() {
+// b!(B {});
+// c!(C {});
+// d!(D {});
+// « h!(H {});
+// » e!(E {});
+// f!(F {});
+// g!(G {});
+// }
+// ",
+// ],
+// );
+
+// assert_capture_ranges(
+// &syntax_map,
+// &buffer,
+// &["struct"],
+// "
+// fn a() {
+// b!(«B {}»);
+// c!(«C {}»);
+// d!(«D {}»);
+// h!(«H {}»);
+// e!(«E {}»);
+// f!(«F {}»);
+// g!(«G {}»);
+// }
+// ",
+// );
+// }
+
+// #[gpui::test]
+// fn test_joining_injections_with_child_injections() {
+// let (buffer, syntax_map) = test_edit_sequence(
+// "Rust",
+// &[
+// "
+// fn a() {
+// b!(
+// c![one.two.three],
+// d![four.five.six],
+// );
+// e!(
+// f![seven.eight],
+// );
+// }
+// ",
+// "
+// fn a() {
+// b!(
+// c![one.two.three],
+// d![four.five.six],
+// ˇ f![seven.eight],
+// );
+// }
+// ",
+// ],
+// );
+
+// assert_capture_ranges(
+// &syntax_map,
+// &buffer,
+// &["field"],
+// "
+// fn a() {
+// b!(
+// c![one.«two».«three»],
+// d![four.«five».«six»],
+// f![seven.«eight»],
+// );
+// }
+// ",
+// );
+// }
+
+// #[gpui::test]
+// fn test_editing_edges_of_injection() {
+// test_edit_sequence(
+// "Rust",
+// &[
+// "
+// fn a() {
+// b!(c!())
+// }
+// ",
+// "
+// fn a() {
+// «d»!(c!())
+// }
+// ",
+// "
+// fn a() {
+// «e»d!(c!())
+// }
+// ",
+// "
+// fn a() {
+// ed!«[»c!()«]»
+// }
+// ",
+// ],
+// );
+// }
+
+// #[gpui::test]
+// fn test_edits_preceding_and_intersecting_injection() {
+// test_edit_sequence(
+// "Rust",
+// &[
+// //
+// "const aaaaaaaaaaaa: B = c!(d(e.f));",
+// "const aˇa: B = c!(d(eˇ));",
+// ],
+// );
+// }
+
+// #[gpui::test]
+// fn test_non_local_changes_create_injections() {
+// test_edit_sequence(
+// "Rust",
+// &[
+// "
+// // a! {
+// static B: C = d;
+// // }
+// ",
+// "
+// ˇa! {
+// static B: C = d;
+// ˇ}
+// ",
+// ],
+// );
+// }
+
+// #[gpui::test]
+// fn test_creating_many_injections_in_one_edit() {
+// test_edit_sequence(
+// "Rust",
+// &[
+// "
+// fn a() {
+// one(Two::three(3));
+// four(Five::six(6));
+// seven(Eight::nine(9));
+// }
+// ",
+// "
+// fn a() {
+// one«!»(Two::three(3));
+// four«!»(Five::six(6));
+// seven«!»(Eight::nine(9));
+// }
+// ",
+// "
+// fn a() {
+// one!(Two::three«!»(3));
+// four!(Five::six«!»(6));
+// seven!(Eight::nine«!»(9));
+// }
+// ",
+// ],
+// );
+// }
+
+// #[gpui::test]
+// fn test_editing_across_injection_boundary() {
+// test_edit_sequence(
+// "Rust",
+// &[
+// "
+// fn one() {
+// two();
+// three!(
+// three.four,
+// five.six,
+// );
+// }
+// ",
+// "
+// fn one() {
+// two();
+// th«irty_five![»
+// three.four,
+// five.six,
+// « seven.eight,
+// ];»
+// }
+// ",
+// ],
+// );
+// }
+
+// #[gpui::test]
+// fn test_removing_injection_by_replacing_across_boundary() {
+// test_edit_sequence(
+// "Rust",
+// &[
+// "
+// fn one() {
+// two!(
+// three.four,
+// );
+// }
+// ",
+// "
+// fn one() {
+// t«en
+// .eleven(
+// twelve,
+// »
+// three.four,
+// );
+// }
+// ",
+// ],
+// );
+// }
+
+// #[gpui::test]
+// fn test_combined_injections_simple() {
+// let (buffer, syntax_map) = test_edit_sequence(
+// "ERB",
+// &[
+// "
+// <body>
+// <% if @one %>
+// <div class=one>
+// <% else %>
+// <div class=two>
+// <% end %>
+// </div>
+// </body>
+// ",
+// "
+// <body>
+// <% if @one %>
+// <div class=one>
+// ˇ else ˇ
+// <div class=two>
+// <% end %>
+// </div>
+// </body>
+// ",
+// "
+// <body>
+// <% if @one «;» end %>
+// </div>
+// </body>
+// ",
+// ],
+// );
+
+// assert_capture_ranges(
+// &syntax_map,
+// &buffer,
+// &["tag", "ivar"],
+// "
+// <«body»>
+// <% if «@one» ; end %>
+// </«div»>
+// </«body»>
+// ",
+// );
+// }
+
+// #[gpui::test]
+// fn test_combined_injections_empty_ranges() {
+// test_edit_sequence(
+// "ERB",
+// &[
+// "
+// <% if @one %>
+// <% else %>
+// <% end %>
+// ",
+// "
+// <% if @one %>
+// ˇ<% end %>
+// ",
+// ],
+// );
+// }
+
+// #[gpui::test]
+// fn test_combined_injections_edit_edges_of_ranges() {
+// let (buffer, syntax_map) = test_edit_sequence(
+// "ERB",
+// &[
+// "
+// <%= one @two %>
+// <%= three @four %>
+// ",
+// "
+// <%= one @two %ˇ
+// <%= three @four %>
+// ",
+// "
+// <%= one @two %«>»
+// <%= three @four %>
+// ",
+// ],
+// );
+
+// assert_capture_ranges(
+// &syntax_map,
+// &buffer,
+// &["tag", "ivar"],
+// "
+// <%= one «@two» %>
+// <%= three «@four» %>
+// ",
+// );
+// }
+
+// #[gpui::test]
+// fn test_combined_injections_splitting_some_injections() {
+// let (_buffer, _syntax_map) = test_edit_sequence(
+// "ERB",
+// &[
+// r#"
+// <%A if b(:c) %>
+// d
+// <% end %>
+// eee
+// <% f %>
+// "#,
+// r#"
+// <%« AAAAAAA %>
+// hhhhhhh
+// <%=» if b(:c) %>
+// d
+// <% end %>
+// eee
+// <% f %>
+// "#,
+// ],
+// );
+// }
+
+// #[gpui::test]
+// fn test_combined_injections_editing_after_last_injection() {
+// test_edit_sequence(
+// "ERB",
+// &[
+// r#"
+// <% foo %>
+// <div></div>
+// <% bar %>
+// "#,
+// r#"
+// <% foo %>
+// <div></div>
+// <% bar %>«
+// more text»
+// "#,
+// ],
+// );
+// }
+
+// #[gpui::test]
+// fn test_combined_injections_inside_injections() {
+// let (buffer, syntax_map) = test_edit_sequence(
+// "Markdown",
+// &[
+// r#"
+// here is
+// some
+// ERB code:
+
+// ```erb
+// <ul>
+// <% people.each do |person| %>
+// <li><%= person.name %></li>
+// <li><%= person.age %></li>
+// <% end %>
+// </ul>
+// ```
+// "#,
+// r#"
+// here is
+// some
+// ERB code:
+
+// ```erb
+// <ul>
+// <% people«2».each do |person| %>
+// <li><%= person.name %></li>
+// <li><%= person.age %></li>
+// <% end %>
+// </ul>
+// ```
+// "#,
+// // Inserting a comment character inside one code directive
+// // does not cause the other code directive to become a comment,
+// // because newlines are included in between each injection range.
+// r#"
+// here is
+// some
+// ERB code:
+
+// ```erb
+// <ul>
+// <% people2.each do |person| %>
+// <li><%= «# »person.name %></li>
+// <li><%= person.age %></li>
+// <% end %>
+// </ul>
+// ```
+// "#,
+// ],
+// );
+
+// // Check that the code directive below the ruby comment is
+// // not parsed as a comment.
+// assert_capture_ranges(
+// &syntax_map,
+// &buffer,
+// &["method"],
+// "
+// here is
+// some
+// ERB code:
+
+// ```erb
+// <ul>
+// <% people2.«each» do |person| %>
+// <li><%= # person.name %></li>
+// <li><%= person.«age» %></li>
+// <% end %>
+// </ul>
+// ```
+// ",
+// );
+// }
+
+// #[gpui::test]
+// fn test_empty_combined_injections_inside_injections() {
+// let (buffer, syntax_map) = test_edit_sequence(
+// "Markdown",
+// &[r#"
+// ```erb
+// hello
+// ```
+
+// goodbye
+// "#],
+// );
+
+// assert_layers_for_range(
+// &syntax_map,
+// &buffer,
+// Point::new(0, 0)..Point::new(5, 0),
+// &[
+// "...(paragraph)...",
+// "(template...",
+// "(fragment...",
+// // The ruby syntax tree should be empty, since there are
+// // no interpolations in the ERB template.
+// "(program)",
+// ],
+// );
+// }
+
+// #[gpui::test(iterations = 50)]
+// fn test_random_syntax_map_edits_rust_macros(rng: StdRng) {
+// let text = r#"
+// fn test_something() {
+// let vec = vec![5, 1, 3, 8];
+// assert_eq!(
+// vec
+// .into_iter()
+// .map(|i| i * 2)
+// .collect::<Vec<usize>>(),
+// vec![
+// 5 * 2, 1 * 2, 3 * 2, 8 * 2
+// ],
+// );
+// }
+// "#
+// .unindent()
+// .repeat(2);
+
+// let registry = Arc::new(LanguageRegistry::test());
+// let language = Arc::new(rust_lang());
+// registry.add(language.clone());
+
+// test_random_edits(text, registry, language, rng);
+// }
+
+// #[gpui::test(iterations = 50)]
+// fn test_random_syntax_map_edits_with_erb(rng: StdRng) {
+// let text = r#"
+// <div id="main">
+// <% if one?(:two) %>
+// <p class="three" four>
+// <%= yield :five %>
+// </p>
+// <% elsif Six.seven(8) %>
+// <p id="three" four>
+// <%= yield :five %>
+// </p>
+// <% else %>
+// <span>Ok</span>
+// <% end %>
+// </div>
+// "#
+// .unindent()
+// .repeat(5);
+
+// let registry = Arc::new(LanguageRegistry::test());
+// let language = Arc::new(erb_lang());
+// registry.add(language.clone());
+// registry.add(Arc::new(ruby_lang()));
+// registry.add(Arc::new(html_lang()));
+
+// test_random_edits(text, registry, language, rng);
+// }
+
+// #[gpui::test(iterations = 50)]
+// fn test_random_syntax_map_edits_with_heex(rng: StdRng) {
+// let text = r#"
+// defmodule TheModule do
+// def the_method(assigns) do
+// ~H"""
+// <%= if @empty do %>
+// <div class="h-4"></div>
+// <% else %>
+// <div class="max-w-2xl w-full animate-pulse">
+// <div class="flex-1 space-y-4">
+// <div class={[@bg_class, "h-4 rounded-lg w-3/4"]}></div>
+// <div class={[@bg_class, "h-4 rounded-lg"]}></div>
+// <div class={[@bg_class, "h-4 rounded-lg w-5/6"]}></div>
+// </div>
+// </div>
+// <% end %>
+// """
+// end
+// end
+// "#
+// .unindent()
+// .repeat(3);
+
+// let registry = Arc::new(LanguageRegistry::test());
+// let language = Arc::new(elixir_lang());
+// registry.add(language.clone());
+// registry.add(Arc::new(heex_lang()));
+// registry.add(Arc::new(html_lang()));
+
+// test_random_edits(text, registry, language, rng);
+// }
+
+// fn test_random_edits(
+// text: String,
+// registry: Arc<LanguageRegistry>,
+// language: Arc<Language>,
+// mut rng: StdRng,
+// ) {
+// let operations = env::var("OPERATIONS")
+// .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+// .unwrap_or(10);
+
+// let mut buffer = Buffer::new(0, 0, text);
+
+// let mut syntax_map = SyntaxMap::new();
+// syntax_map.set_language_registry(registry.clone());
+// syntax_map.reparse(language.clone(), &buffer);
+
+// let mut reference_syntax_map = SyntaxMap::new();
+// reference_syntax_map.set_language_registry(registry.clone());
+
+// log::info!("initial text:\n{}", buffer.text());
+
+// for _ in 0..operations {
+// let prev_buffer = buffer.snapshot();
+// let prev_syntax_map = syntax_map.snapshot();
+
+// buffer.randomly_edit(&mut rng, 3);
+// log::info!("text:\n{}", buffer.text());
+
+// syntax_map.interpolate(&buffer);
+// check_interpolation(&prev_syntax_map, &syntax_map, &prev_buffer, &buffer);
+
+// syntax_map.reparse(language.clone(), &buffer);
+
+// reference_syntax_map.clear();
+// reference_syntax_map.reparse(language.clone(), &buffer);
+// }
+
+// for i in 0..operations {
+// let i = operations - i - 1;
+// buffer.undo();
+// log::info!("undoing operation {}", i);
+// log::info!("text:\n{}", buffer.text());
+
+// syntax_map.interpolate(&buffer);
+// syntax_map.reparse(language.clone(), &buffer);
+
+// reference_syntax_map.clear();
+// reference_syntax_map.reparse(language.clone(), &buffer);
+// assert_eq!(
+// syntax_map.layers(&buffer).len(),
+// reference_syntax_map.layers(&buffer).len(),
+// "wrong number of layers after undoing edit {i}"
+// );
+// }
+
+// let layers = syntax_map.layers(&buffer);
+// let reference_layers = reference_syntax_map.layers(&buffer);
+// for (edited_layer, reference_layer) in layers.into_iter().zip(reference_layers.into_iter()) {
+// assert_eq!(
+// edited_layer.node().to_sexp(),
+// reference_layer.node().to_sexp()
+// );
+// assert_eq!(edited_layer.node().range(), reference_layer.node().range());
+// }
+// }
+
+// fn check_interpolation(
+// old_syntax_map: &SyntaxSnapshot,
+// new_syntax_map: &SyntaxSnapshot,
+// old_buffer: &BufferSnapshot,
+// new_buffer: &BufferSnapshot,
+// ) {
+// let edits = new_buffer
+// .edits_since::<usize>(&old_buffer.version())
+// .collect::<Vec<_>>();
+
+// for (old_layer, new_layer) in old_syntax_map
+// .layers
+// .iter()
+// .zip(new_syntax_map.layers.iter())
+// {
+// assert_eq!(old_layer.range, new_layer.range);
+// let Some(old_tree) = old_layer.content.tree() else {
+// continue;
+// };
+// let Some(new_tree) = new_layer.content.tree() else {
+// continue;
+// };
+// let old_start_byte = old_layer.range.start.to_offset(old_buffer);
+// let new_start_byte = new_layer.range.start.to_offset(new_buffer);
+// let old_start_point = old_layer.range.start.to_point(old_buffer).to_ts_point();
+// let new_start_point = new_layer.range.start.to_point(new_buffer).to_ts_point();
+// let old_node = old_tree.root_node_with_offset(old_start_byte, old_start_point);
+// let new_node = new_tree.root_node_with_offset(new_start_byte, new_start_point);
+// check_node_edits(
+// old_layer.depth,
+// &old_layer.range,
+// old_node,
+// new_node,
+// old_buffer,
+// new_buffer,
+// &edits,
+// );
+// }
+
+// fn check_node_edits(
+// depth: usize,
+// range: &Range<Anchor>,
+// old_node: Node,
+// new_node: Node,
+// old_buffer: &BufferSnapshot,
+// new_buffer: &BufferSnapshot,
+// edits: &[text::Edit<usize>],
+// ) {
+// assert_eq!(old_node.kind(), new_node.kind());
+
+// let old_range = old_node.byte_range();
+// let new_range = new_node.byte_range();
+
+// let is_edited = edits
+// .iter()
+// .any(|edit| edit.new.start < new_range.end && edit.new.end > new_range.start);
+// if is_edited {
+// assert!(
+// new_node.has_changes(),
+// concat!(
+// "failed to mark node as edited.\n",
+// "layer depth: {}, old layer range: {:?}, new layer range: {:?},\n",
+// "node kind: {}, old node range: {:?}, new node range: {:?}",
+// ),
+// depth,
+// range.to_offset(old_buffer),
+// range.to_offset(new_buffer),
+// new_node.kind(),
+// old_range,
+// new_range,
+// );
+// }
+
+// if !new_node.has_changes() {
+// assert_eq!(
+// old_buffer
+// .text_for_range(old_range.clone())
+// .collect::<String>(),
+// new_buffer
+// .text_for_range(new_range.clone())
+// .collect::<String>(),
+// concat!(
+// "mismatched text for node\n",
+// "layer depth: {}, old layer range: {:?}, new layer range: {:?},\n",
+// "node kind: {}, old node range:{:?}, new node range:{:?}",
+// ),
+// depth,
+// range.to_offset(old_buffer),
+// range.to_offset(new_buffer),
+// new_node.kind(),
+// old_range,
+// new_range,
+// );
+// }
+
+// for i in 0..new_node.child_count() {
+// check_node_edits(
+// depth,
+// range,
+// old_node.child(i).unwrap(),
+// new_node.child(i).unwrap(),
+// old_buffer,
+// new_buffer,
+// edits,
+// )
+// }
+// }
+// }
+
+// fn test_edit_sequence(language_name: &str, steps: &[&str]) -> (Buffer, SyntaxMap) {
+// let registry = Arc::new(LanguageRegistry::test());
+// registry.add(Arc::new(elixir_lang()));
+// registry.add(Arc::new(heex_lang()));
+// registry.add(Arc::new(rust_lang()));
+// registry.add(Arc::new(ruby_lang()));
+// registry.add(Arc::new(html_lang()));
+// registry.add(Arc::new(erb_lang()));
+// registry.add(Arc::new(markdown_lang()));
+
+// let language = registry
+// .language_for_name(language_name)
+// .now_or_never()
+// .unwrap()
+// .unwrap();
+// let mut buffer = Buffer::new(0, 0, Default::default());
+
+// let mut mutated_syntax_map = SyntaxMap::new();
+// mutated_syntax_map.set_language_registry(registry.clone());
+// mutated_syntax_map.reparse(language.clone(), &buffer);
+
+// for (i, marked_string) in steps.into_iter().enumerate() {
+// let marked_string = marked_string.unindent();
+// log::info!("incremental parse {i}: {marked_string:?}");
+// buffer.edit_via_marked_text(&marked_string);
+
+// // Reparse the syntax map
+// mutated_syntax_map.interpolate(&buffer);
+// mutated_syntax_map.reparse(language.clone(), &buffer);
+
+// // Create a second syntax map from scratch
+// log::info!("fresh parse {i}: {marked_string:?}");
+// let mut reference_syntax_map = SyntaxMap::new();
+// reference_syntax_map.set_language_registry(registry.clone());
+// reference_syntax_map.reparse(language.clone(), &buffer);
+
+// // Compare the mutated syntax map to the new syntax map
+// let mutated_layers = mutated_syntax_map.layers(&buffer);
+// let reference_layers = reference_syntax_map.layers(&buffer);
+// assert_eq!(
+// mutated_layers.len(),
+// reference_layers.len(),
+// "wrong number of layers at step {i}"
+// );
+// for (edited_layer, reference_layer) in
+// mutated_layers.into_iter().zip(reference_layers.into_iter())
+// {
+// assert_eq!(
+// edited_layer.node().to_sexp(),
+// reference_layer.node().to_sexp(),
+// "different layer at step {i}"
+// );
+// assert_eq!(
+// edited_layer.node().range(),
+// reference_layer.node().range(),
+// "different layer at step {i}"
+// );
+// }
+// }
+
+// (buffer, mutated_syntax_map)
+// }
+
+// fn html_lang() -> Language {
+// Language::new(
+// LanguageConfig {
+// name: "HTML".into(),
+// path_suffixes: vec!["html".to_string()],
+// ..Default::default()
+// },
+// Some(tree_sitter_html::language()),
+// )
+// .with_highlights_query(
+// r#"
+// (tag_name) @tag
+// (erroneous_end_tag_name) @tag
+// (attribute_name) @property
+// "#,
+// )
+// .unwrap()
+// }
+
+// fn ruby_lang() -> Language {
+// Language::new(
+// LanguageConfig {
+// name: "Ruby".into(),
+// path_suffixes: vec!["rb".to_string()],
+// ..Default::default()
+// },
+// Some(tree_sitter_ruby::language()),
+// )
+// .with_highlights_query(
+// r#"
+// ["if" "do" "else" "end"] @keyword
+// (instance_variable) @ivar
+// (call method: (identifier) @method)
+// "#,
+// )
+// .unwrap()
+// }
+
+// fn erb_lang() -> Language {
+// Language::new(
+// LanguageConfig {
+// name: "ERB".into(),
+// path_suffixes: vec!["erb".to_string()],
+// ..Default::default()
+// },
+// Some(tree_sitter_embedded_template::language()),
+// )
+// .with_highlights_query(
+// r#"
+// ["<%" "%>"] @keyword
+// "#,
+// )
+// .unwrap()
+// .with_injection_query(
+// r#"
+// (
+// (code) @content
+// (#set! "language" "ruby")
+// (#set! "combined")
+// )
+
+// (
+// (content) @content
+// (#set! "language" "html")
+// (#set! "combined")
+// )
+// "#,
+// )
+// .unwrap()
+// }
+
+// fn rust_lang() -> Language {
+// Language::new(
+// LanguageConfig {
+// name: "Rust".into(),
+// path_suffixes: vec!["rs".to_string()],
+// ..Default::default()
+// },
+// Some(tree_sitter_rust::language()),
+// )
+// .with_highlights_query(
+// r#"
+// (field_identifier) @field
+// (struct_expression) @struct
+// "#,
+// )
+// .unwrap()
+// .with_injection_query(
+// r#"
+// (macro_invocation
+// (token_tree) @content
+// (#set! "language" "rust"))
+// "#,
+// )
+// .unwrap()
+// }
+
+// fn markdown_lang() -> Language {
+// Language::new(
+// LanguageConfig {
+// name: "Markdown".into(),
+// path_suffixes: vec!["md".into()],
+// ..Default::default()
+// },
+// Some(tree_sitter_markdown::language()),
+// )
+// .with_injection_query(
+// r#"
+// (fenced_code_block
+// (info_string
+// (language) @language)
+// (code_fence_content) @content)
+// "#,
+// )
+// .unwrap()
+// }
+
+// fn elixir_lang() -> Language {
+// Language::new(
+// LanguageConfig {
+// name: "Elixir".into(),
+// path_suffixes: vec!["ex".into()],
+// ..Default::default()
+// },
+// Some(tree_sitter_elixir::language()),
+// )
+// .with_highlights_query(
+// r#"
+
+// "#,
+// )
+// .unwrap()
+// }
+
+// fn heex_lang() -> Language {
+// Language::new(
+// LanguageConfig {
+// name: "HEEx".into(),
+// path_suffixes: vec!["heex".into()],
+// ..Default::default()
+// },
+// Some(tree_sitter_heex::language()),
+// )
+// .with_injection_query(
+// r#"
+// (
+// (directive
+// [
+// (partial_expression_value)
+// (expression_value)
+// (ending_expression_value)
+// ] @content)
+// (#set! language "elixir")
+// (#set! combined)
+// )
+
+// ((expression (expression_value) @content)
+// (#set! language "elixir"))
+// "#,
+// )
+// .unwrap()
+// }
+
+// fn range_for_text(buffer: &Buffer, text: &str) -> Range<usize> {
+// let start = buffer.as_rope().to_string().find(text).unwrap();
+// start..start + text.len()
+// }
+
+// #[track_caller]
+// fn assert_layers_for_range(
+// syntax_map: &SyntaxMap,
+// buffer: &BufferSnapshot,
+// range: Range<Point>,
+// expected_layers: &[&str],
+// ) {
+// let layers = syntax_map
+// .layers_for_range(range, &buffer)
+// .collect::<Vec<_>>();
+// assert_eq!(
+// layers.len(),
+// expected_layers.len(),
+// "wrong number of layers"
+// );
+// for (i, (layer, expected_s_exp)) in layers.iter().zip(expected_layers.iter()).enumerate() {
+// let actual_s_exp = layer.node().to_sexp();
+// assert!(
+// string_contains_sequence(
+// &actual_s_exp,
+// &expected_s_exp.split("...").collect::<Vec<_>>()
+// ),
+// "layer {i}:\n\nexpected: {expected_s_exp}\nactual: {actual_s_exp}",
+// );
+// }
+// }
+
+// fn assert_capture_ranges(
+// syntax_map: &SyntaxMap,
+// buffer: &BufferSnapshot,
+// highlight_query_capture_names: &[&str],
+// marked_string: &str,
+// ) {
+// let mut actual_ranges = Vec::<Range<usize>>::new();
+// let captures = syntax_map.captures(0..buffer.len(), buffer, |grammar| {
+// grammar.highlights_query.as_ref()
+// });
+// let queries = captures
+// .grammars()
+// .iter()
+// .map(|grammar| grammar.highlights_query.as_ref().unwrap())
+// .collect::<Vec<_>>();
+// for capture in captures {
+// let name = &queries[capture.grammar_index].capture_names()[capture.index as usize];
+// if highlight_query_capture_names.contains(&name.as_str()) {
+// actual_ranges.push(capture.node.byte_range());
+// }
+// }
+
+// let (text, expected_ranges) = marked_text_ranges(&marked_string.unindent(), false);
+// assert_eq!(text, buffer.text());
+// assert_eq!(actual_ranges, expected_ranges);
+// }
+
+// pub fn string_contains_sequence(text: &str, parts: &[&str]) -> bool {
+// let mut last_part_end = 0;
+// for part in parts {
+// if let Some(start_ix) = text[last_part_end..].find(part) {
+// last_part_end = start_ix + part.len();
+// } else {
+// return false;
+// }
+// }
+// true
+// }
@@ -1,6 +1,6 @@
use gpui2::{
- elements::div, interactive::Interactive, platform::MouseButton, style::StyleHelpers, ArcCow,
- Element, EventContext, IntoElement, ParentElement, ViewContext,
+ div, ArcCow, Element, EventContext, Interactive, IntoElement, MouseButton, ParentElement,
+ StyleHelpers, ViewContext,
};
use std::{marker::PhantomData, rc::Rc};