Detailed changes
@@ -11,7 +11,7 @@ dependencies = [
"editor",
"futures 0.3.28",
"gpui2",
- "language2",
+ "language",
"project",
"settings2",
"smallvec",
@@ -84,7 +84,7 @@ dependencies = [
"futures 0.3.28",
"gpui2",
"isahc",
- "language2",
+ "language",
"lazy_static",
"log",
"matrixmultiply",
@@ -315,7 +315,7 @@ dependencies = [
"gpui2",
"indoc",
"isahc",
- "language2",
+ "language",
"log",
"menu2",
"multi_buffer",
@@ -1025,7 +1025,7 @@ dependencies = [
"editor",
"gpui2",
"itertools 0.10.5",
- "language2",
+ "language",
"outline",
"project",
"search",
@@ -1152,7 +1152,7 @@ dependencies = [
"futures 0.3.28",
"gpui2",
"image",
- "language2",
+ "language",
"live_kit_client2",
"log",
"media",
@@ -1278,7 +1278,7 @@ dependencies = [
"futures 0.3.28",
"gpui2",
"image",
- "language2",
+ "language",
"lazy_static",
"log",
"parking_lot 0.11.2",
@@ -1659,13 +1659,13 @@ dependencies = [
"gpui2",
"hyper",
"indoc",
- "language2",
+ "language",
"lazy_static",
"lipsum",
"live_kit_client2",
"live_kit_server",
"log",
- "lsp2",
+ "lsp",
"nanoid",
"node_runtime",
"notifications2",
@@ -1719,9 +1719,9 @@ dependencies = [
"feature_flags",
"feedback",
"futures 0.3.28",
- "fuzzy2",
+ "fuzzy",
"gpui2",
- "language2",
+ "language",
"lazy_static",
"log",
"menu2",
@@ -1787,10 +1787,10 @@ dependencies = [
"ctor",
"editor",
"env_logger",
- "fuzzy2",
+ "fuzzy",
"go_to_line",
"gpui2",
- "language2",
+ "language",
"menu2",
"picker",
"project",
@@ -1889,9 +1889,9 @@ dependencies = [
"fs",
"futures 0.3.28",
"gpui2",
- "language2",
+ "language",
"log",
- "lsp2",
+ "lsp",
"node_runtime",
"parking_lot 0.11.2",
"rpc2",
@@ -1914,7 +1914,7 @@ dependencies = [
"fs2",
"futures 0.3.28",
"gpui2",
- "language2",
+ "language",
"settings2",
"smol",
"theme2",
@@ -2438,9 +2438,9 @@ dependencies = [
"editor",
"futures 0.3.28",
"gpui2",
- "language2",
+ "language",
"log",
- "lsp2",
+ "lsp",
"postage",
"project",
"schemars",
@@ -2603,15 +2603,15 @@ dependencies = [
"db2",
"env_logger",
"futures 0.3.28",
- "fuzzy2",
+ "fuzzy",
"git3",
"gpui2",
"indoc",
"itertools 0.10.5",
- "language2",
+ "language",
"lazy_static",
"log",
- "lsp2",
+ "lsp",
"multi_buffer",
"ordered-float 2.10.0",
"parking_lot 0.11.2",
@@ -2830,7 +2830,7 @@ dependencies = [
"gpui2",
"human_bytes",
"isahc",
- "language2",
+ "language",
"lazy_static",
"log",
"menu2",
@@ -2860,9 +2860,9 @@ dependencies = [
"ctor",
"editor",
"env_logger",
- "fuzzy2",
+ "fuzzy",
"gpui2",
- "language2",
+ "language",
"menu2",
"picker",
"postage",
@@ -3250,7 +3250,7 @@ dependencies = [
name = "fuzzy"
version = "0.1.0"
dependencies = [
- "gpui",
+ "gpui2",
"util",
]
@@ -3263,14 +3263,6 @@ dependencies = [
"thread_local",
]
-[[package]]
-name = "fuzzy2"
-version = "0.1.0"
-dependencies = [
- "gpui2",
- "util",
-]
-
[[package]]
name = "generic-array"
version = "0.14.7"
@@ -4191,61 +4183,6 @@ dependencies = [
[[package]]
name = "language"
version = "0.1.0"
-dependencies = [
- "anyhow",
- "async-broadcast",
- "async-trait",
- "client",
- "clock",
- "collections",
- "ctor",
- "env_logger",
- "fs",
- "futures 0.3.28",
- "fuzzy",
- "git",
- "globset",
- "gpui",
- "indoc",
- "lazy_static",
- "log",
- "lsp",
- "parking_lot 0.11.2",
- "postage",
- "pulldown-cmark",
- "rand 0.8.5",
- "regex",
- "rpc",
- "schemars",
- "serde",
- "serde_derive",
- "serde_json",
- "settings",
- "similar",
- "smallvec",
- "smol",
- "sum_tree",
- "text",
- "theme",
- "tree-sitter",
- "tree-sitter-elixir",
- "tree-sitter-embedded-template",
- "tree-sitter-heex",
- "tree-sitter-html",
- "tree-sitter-json 0.20.0",
- "tree-sitter-markdown",
- "tree-sitter-python",
- "tree-sitter-ruby",
- "tree-sitter-rust",
- "tree-sitter-typescript",
- "unicase",
- "unindent",
- "util",
-]
-
-[[package]]
-name = "language2"
-version = "0.1.0"
dependencies = [
"anyhow",
"async-broadcast",
@@ -4256,14 +4193,14 @@ dependencies = [
"ctor",
"env_logger",
"futures 0.3.28",
- "fuzzy2",
+ "fuzzy",
"git3",
"globset",
"gpui2",
"indoc",
"lazy_static",
"log",
- "lsp2",
+ "lsp",
"parking_lot 0.11.2",
"postage",
"pulldown-cmark",
@@ -4303,9 +4240,9 @@ version = "0.1.0"
dependencies = [
"anyhow",
"editor",
- "fuzzy2",
+ "fuzzy",
"gpui2",
- "language2",
+ "language",
"picker",
"project",
"settings2",
@@ -4326,8 +4263,8 @@ dependencies = [
"env_logger",
"futures 0.3.28",
"gpui2",
- "language2",
- "lsp2",
+ "language",
+ "lsp",
"project",
"serde",
"settings2",
@@ -4599,7 +4536,7 @@ dependencies = [
"ctor",
"env_logger",
"futures 0.3.28",
- "gpui",
+ "gpui2",
"log",
"lsp-types",
"parking_lot 0.11.2",
@@ -4624,29 +4561,6 @@ dependencies = [
"url",
]
-[[package]]
-name = "lsp2"
-version = "0.1.0"
-dependencies = [
- "anyhow",
- "async-pipe",
- "collections",
- "ctor",
- "env_logger",
- "futures 0.3.28",
- "gpui2",
- "log",
- "lsp-types",
- "parking_lot 0.11.2",
- "postage",
- "serde",
- "serde_derive",
- "serde_json",
- "smol",
- "unindent",
- "util",
-]
-
[[package]]
name = "mach"
version = "0.3.2"
@@ -4954,10 +4868,10 @@ dependencies = [
"gpui2",
"indoc",
"itertools 0.10.5",
- "language2",
+ "language",
"lazy_static",
"log",
- "lsp2",
+ "lsp",
"ordered-float 2.10.0",
"parking_lot 0.11.2",
"postage",
@@ -5563,9 +5477,9 @@ name = "outline"
version = "0.1.0"
dependencies = [
"editor",
- "fuzzy2",
+ "fuzzy",
"gpui2",
- "language2",
+ "language",
"ordered-float 2.10.0",
"picker",
"postage",
@@ -5994,9 +5908,9 @@ dependencies = [
"fs2",
"futures 0.3.28",
"gpui2",
- "language2",
+ "language",
"log",
- "lsp2",
+ "lsp",
"node_runtime",
"parking_lot 0.11.2",
"serde",
@@ -6106,17 +6020,17 @@ dependencies = [
"fs2",
"fsevent",
"futures 0.3.28",
- "fuzzy2",
+ "fuzzy",
"git2",
"git3",
"globset",
"gpui2",
"ignore",
"itertools 0.10.5",
- "language2",
+ "language",
"lazy_static",
"log",
- "lsp2",
+ "lsp",
"node_runtime",
"parking_lot 0.11.2",
"postage",
@@ -6154,7 +6068,7 @@ dependencies = [
"editor",
"futures 0.3.28",
"gpui2",
- "language2",
+ "language",
"menu2",
"postage",
"pretty_assertions",
@@ -6180,10 +6094,10 @@ dependencies = [
"anyhow",
"editor",
"futures 0.3.28",
- "fuzzy2",
+ "fuzzy",
"gpui2",
- "language2",
- "lsp2",
+ "language",
+ "lsp",
"ordered-float 2.10.0",
"picker",
"postage",
@@ -6531,9 +6445,9 @@ version = "0.1.0"
dependencies = [
"editor",
"futures 0.3.28",
- "fuzzy2",
+ "fuzzy",
"gpui2",
- "language2",
+ "language",
"ordered-float 2.10.0",
"picker",
"postage",
@@ -6730,7 +6644,7 @@ dependencies = [
"collections",
"futures 0.3.28",
"gpui2",
- "language2",
+ "language",
"lazy_static",
"pulldown-cmark",
"smallvec",
@@ -7356,7 +7270,7 @@ dependencies = [
"editor",
"futures 0.3.28",
"gpui2",
- "language2",
+ "language",
"log",
"menu2",
"postage",
@@ -7466,7 +7380,7 @@ dependencies = [
"futures 0.3.28",
"globset",
"gpui2",
- "language2",
+ "language",
"lazy_static",
"log",
"ndarray",
@@ -8280,11 +8194,11 @@ dependencies = [
"clap 4.4.4",
"dialoguer",
"editor",
- "fuzzy2",
+ "fuzzy",
"gpui2",
"indoc",
"itertools 0.11.0",
- "language2",
+ "language",
"log",
"menu2",
"picker",
@@ -8646,7 +8560,7 @@ dependencies = [
"futures 0.3.28",
"gpui2",
"itertools 0.10.5",
- "language2",
+ "language",
"lazy_static",
"libc",
"mio-extras",
@@ -8791,7 +8705,7 @@ dependencies = [
"editor",
"feature_flags",
"fs2",
- "fuzzy2",
+ "fuzzy",
"gpui2",
"log",
"parking_lot 0.11.2",
@@ -9861,7 +9775,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"fs2",
- "fuzzy2",
+ "fuzzy",
"gpui2",
"picker",
"ui2",
@@ -9890,9 +9804,9 @@ dependencies = [
"gpui2",
"indoc",
"itertools 0.10.5",
- "language2",
+ "language",
"log",
- "lsp2",
+ "lsp",
"nvim-rs",
"parking_lot 0.11.2",
"project",
@@ -10305,7 +10219,7 @@ dependencies = [
"db2",
"editor",
"fs2",
- "fuzzy2",
+ "fuzzy",
"gpui2",
"install_cli",
"log",
@@ -10579,7 +10493,7 @@ dependencies = [
"indoc",
"install_cli",
"itertools 0.10.5",
- "language2",
+ "language",
"lazy_static",
"log",
"node_runtime",
@@ -10722,13 +10636,13 @@ dependencies = [
"install_cli",
"isahc",
"journal",
- "language2",
+ "language",
"language_selector",
"language_tools",
"lazy_static",
"libc",
"log",
- "lsp2",
+ "lsp",
"menu2",
"node_runtime",
"notifications2",
@@ -38,7 +38,6 @@ members = [
"crates/fs2",
"crates/fsevent",
"crates/fuzzy",
- "crates/fuzzy2",
"crates/git",
"crates/go_to_line",
"crates/gpui",
@@ -49,13 +48,11 @@ members = [
"crates/journal",
"crates/journal",
"crates/language",
- "crates/language2",
"crates/language_selector",
"crates/language_tools",
"crates/live_kit_client",
"crates/live_kit_server",
"crates/lsp",
- "crates/lsp2",
"crates/media",
"crates/menu",
"crates/menu2",
@@ -11,7 +11,7 @@ doctest = false
[dependencies]
auto_update = { path = "../auto_update" }
editor = { path = "../editor" }
-language = { path = "../language2", package = "language2" }
+language = { path = "../language" }
gpui = { path = "../gpui2", package = "gpui2" }
project = { path = "../project" }
settings = { path = "../settings2", package = "settings2" }
@@ -14,7 +14,7 @@ test-support = []
[dependencies]
gpui = { package = "gpui2", path = "../gpui2" }
util = { path = "../util" }
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
async-trait.workspace = true
anyhow.workspace = true
futures.workspace = true
@@ -15,7 +15,7 @@ collections = { path = "../collections"}
editor = { path = "../editor" }
fs = { package = "fs2", path = "../fs2" }
gpui = { package = "gpui2", path = "../gpui2" }
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
menu = { package = "menu2", path = "../menu2" }
multi_buffer = { path = "../multi_buffer" }
project = { path = "../project" }
@@ -13,7 +13,7 @@ collections = { path = "../collections" }
editor = { path = "../editor" }
gpui = { package = "gpui2", path = "../gpui2" }
ui = { package = "ui2", path = "../ui2" }
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
project = { path = "../project" }
search = { path = "../search" }
settings = { package = "settings2", path = "../settings2" }
@@ -26,7 +26,7 @@ gpui = { package = "gpui2", path = "../gpui2" }
log.workspace = true
live_kit_client = { package = "live_kit_client2", path = "../live_kit_client2" }
fs = { package = "fs2", path = "../fs2" }
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
media = { path = "../media" }
project = { path = "../project" }
settings = { package = "settings2", path = "../settings2" }
@@ -46,7 +46,7 @@ smallvec.workspace = true
[dev-dependencies]
client = { package = "client2", path = "../client2", features = ["test-support"] }
fs = { package = "fs2", path = "../fs2", features = ["test-support"] }
-language = { package = "language2", path = "../language2", features = ["test-support"] }
+language = { path = "../language", features = ["test-support"] }
collections = { path = "../collections", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
live_kit_client = { package = "live_kit_client2", path = "../live_kit_client2", features = ["test-support"] }
@@ -19,7 +19,7 @@ gpui = { package = "gpui2", path = "../gpui2" }
util = { path = "../util" }
rpc = { package = "rpc2", path = "../rpc2" }
text = { package = "text2", path = "../text2" }
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
settings = { package = "settings2", path = "../settings2" }
feature_flags = { path = "../feature_flags" }
sum_tree = { path = "../sum_tree" }
@@ -67,11 +67,11 @@ call = { package = "call2", path = "../call2", features = ["test-support"] }
client = { package = "client2", path = "../client2", features = ["test-support"] }
channel = { package = "channel2", path = "../channel2" }
editor = { path = "../editor", features = ["test-support"] }
-language = { package = "language2", path = "../language2", features = ["test-support"] }
+language = { path = "../language", features = ["test-support"] }
fs = { package = "fs2", path = "../fs2", features = ["test-support"] }
git = { package = "git3", path = "../git3", features = ["test-support"] }
live_kit_client = { package = "live_kit_client2", path = "../live_kit_client2", features = ["test-support"] }
-lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
+lsp = { path = "../lsp", features = ["test-support"] }
node_runtime = { path = "../node_runtime" }
notifications = { package = "notifications2", path = "../notifications2", features = ["test-support"] }
@@ -33,9 +33,9 @@ collections = { path = "../collections" }
# drag_and_drop = { path = "../drag_and_drop" }
editor = { path = "../editor" }
feedback = { path = "../feedback" }
-fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
+fuzzy = { path = "../fuzzy" }
gpui = { package = "gpui2", path = "../gpui2" }
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
menu = { package = "menu2", path = "../menu2" }
notifications = { package = "notifications2", path = "../notifications2" }
rich_text = { path = "../rich_text" }
@@ -11,7 +11,7 @@ doctest = false
[dependencies]
collections = { path = "../collections" }
editor = { path = "../editor" }
-fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
+fuzzy = { path = "../fuzzy" }
gpui = { package = "gpui2", path = "../gpui2" }
picker = { path = "../picker" }
project = { path = "../project" }
@@ -27,7 +27,7 @@ serde.workspace = true
[dev-dependencies]
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
editor = { path = "../editor", features = ["test-support"] }
-language = { package="language2", path = "../language2", features = ["test-support"] }
+language = { path = "../language", features = ["test-support"] }
project = { path = "../project", features = ["test-support"] }
menu = { package = "menu2", path = "../menu2" }
go_to_line = { path = "../go_to_line" }
@@ -22,10 +22,10 @@ test-support = [
collections = { path = "../collections" }
# context_menu = { path = "../context_menu" }
gpui = { package = "gpui2", path = "../gpui2" }
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
settings = { package = "settings2", path = "../settings2" }
theme = { package = "theme2", path = "../theme2" }
-lsp = { package = "lsp2", path = "../lsp2" }
+lsp = { path = "../lsp" }
node_runtime = { path = "../node_runtime"}
util = { path = "../util" }
ui = { package = "ui2", path = "../ui2" }
@@ -44,8 +44,8 @@ clock = { path = "../clock" }
collections = { path = "../collections", features = ["test-support"] }
fs = { path = "../fs", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
-language = { package = "language2", path = "../language2", features = ["test-support"] }
-lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
+language = { path = "../language", features = ["test-support"] }
+lsp = { path = "../lsp", features = ["test-support"] }
rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
@@ -14,7 +14,7 @@ editor = { path = "../editor" }
fs = { package = "fs2", path = "../fs2" }
zed_actions = { path = "../zed_actions"}
gpui = { package = "gpui2", path = "../gpui2" }
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
settings = { package = "settings2", path = "../settings2" }
theme = { package = "theme2", path = "../theme2" }
util = { path = "../util" }
@@ -13,8 +13,8 @@ collections = { path = "../collections" }
editor = { path = "../editor" }
gpui = { package = "gpui2", path = "../gpui2" }
ui = { package = "ui2", path = "../ui2" }
-language = { package = "language2", path = "../language2" }
-lsp = { package = "lsp2", path = "../lsp2" }
+language = { path = "../language" }
+lsp = { path = "../lsp" }
project = { path = "../project" }
settings = { package = "settings2", path = "../settings2" }
theme = { package = "theme2", path = "../theme2" }
@@ -33,8 +33,8 @@ postage.workspace = true
[dev-dependencies]
client = { package = "client2", path = "../client2", features = ["test-support"] }
editor = { path = "../editor", features = ["test-support"] }
-language = { package = "language2", path = "../language2", features = ["test-support"] }
-lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
+language = { path = "../language", features = ["test-support"] }
+lsp = { path = "../lsp", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
workspace = {path = "../workspace", features = ["test-support"] }
theme = { package = "theme2", path = "../theme2", features = ["test-support"] }
@@ -29,11 +29,11 @@ copilot = { path = "../copilot" }
db = { package="db2", path = "../db2" }
collections = { path = "../collections" }
# context_menu = { path = "../context_menu" }
-fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
+fuzzy = { path = "../fuzzy" }
git = { package = "git3", path = "../git3" }
gpui = { package = "gpui2", path = "../gpui2" }
-language = { package = "language2", path = "../language2" }
-lsp = { package = "lsp2", path = "../lsp2" }
+language = { path = "../language" }
+lsp = { path = "../lsp" }
multi_buffer = { path = "../multi_buffer" }
project = { path = "../project" }
rpc = { package = "rpc2", path = "../rpc2" }
@@ -74,8 +74,8 @@ tree-sitter-typescript = { workspace = true, optional = true }
[dev-dependencies]
copilot = { path = "../copilot", features = ["test-support"] }
text = { package="text2", path = "../text2", features = ["test-support"] }
-language = { package="language2", path = "../language2", features = ["test-support"] }
-lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
+language = { path = "../language", features = ["test-support"] }
+lsp = { path = "../lsp", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
project = { path = "../project", features = ["test-support"] }
@@ -15,7 +15,7 @@ client = { package = "client2", path = "../client2" }
db = { package = "db2", path = "../db2" }
editor = { path = "../editor" }
gpui = { package = "gpui2", path = "../gpui2" }
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
menu = { package = "menu2", path = "../menu2" }
project = { path = "../project" }
search = { path = "../search" }
@@ -11,7 +11,7 @@ doctest = false
[dependencies]
editor = { path = "../editor" }
collections = { path = "../collections" }
-fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
+fuzzy = { path = "../fuzzy" }
gpui = { package = "gpui2", path = "../gpui2" }
menu = { package = "menu2", path = "../menu2" }
picker = { path = "../picker" }
@@ -28,7 +28,7 @@ serde.workspace = true
[dev-dependencies]
editor = { path = "../editor", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
-language = { package = "language2", path = "../language2", features = ["test-support"] }
+language = { path = "../language", features = ["test-support"] }
workspace = { path = "../workspace", features = ["test-support"] }
theme = { package = "theme2", path = "../theme2", features = ["test-support"] }
@@ -9,5 +9,5 @@ path = "src/fuzzy.rs"
doctest = false
[dependencies]
-gpui = { path = "../gpui" }
+gpui = { package = "gpui2", path = "../gpui2" }
util = { path = "../util" }
@@ -1,3 +1,4 @@
+use gpui::BackgroundExecutor;
use std::{
borrow::Cow,
cmp::{self, Ordering},
@@ -5,8 +6,6 @@ use std::{
sync::{atomic::AtomicBool, Arc},
};
-use gpui::executor;
-
use crate::{
matcher::{Match, MatchCandidate, Matcher},
CharBag,
@@ -135,7 +134,7 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>(
smart_case: bool,
max_results: usize,
cancel_flag: &AtomicBool,
- background: Arc<executor::Background>,
+ executor: BackgroundExecutor,
) -> Vec<PathMatch> {
let path_count: usize = candidate_sets.iter().map(|s| s.len()).sum();
if path_count == 0 {
@@ -149,13 +148,13 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>(
let query = &query;
let query_char_bag = CharBag::from(&lowercase_query[..]);
- let num_cpus = background.num_cpus().min(path_count);
+ let num_cpus = executor.num_cpus().min(path_count);
let segment_size = (path_count + num_cpus - 1) / num_cpus;
let mut segment_results = (0..num_cpus)
.map(|_| Vec::with_capacity(max_results))
.collect::<Vec<_>>();
- background
+ executor
.scoped(|scope| {
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
let relative_to = relative_to.clone();
@@ -1,15 +1,15 @@
-use std::{
- borrow::Cow,
- cmp::{self, Ordering},
- sync::{atomic::AtomicBool, Arc},
-};
-
-use gpui::executor;
-
use crate::{
matcher::{Match, MatchCandidate, Matcher},
CharBag,
};
+use gpui::BackgroundExecutor;
+use std::{
+ borrow::Cow,
+ cmp::{self, Ordering},
+ iter,
+ ops::Range,
+ sync::atomic::AtomicBool,
+};
#[derive(Clone, Debug)]
pub struct StringMatchCandidate {
@@ -56,6 +56,32 @@ pub struct StringMatch {
pub string: String,
}
+impl StringMatch {
+ pub fn ranges<'a>(&'a self) -> impl 'a + Iterator<Item = Range<usize>> {
+ let mut positions = self.positions.iter().peekable();
+ iter::from_fn(move || {
+ while let Some(start) = positions.next().copied() {
+ let mut end = start + self.char_len_at_index(start);
+ while let Some(next_start) = positions.peek() {
+ if end == **next_start {
+ end += self.char_len_at_index(end);
+ positions.next();
+ } else {
+ break;
+ }
+ }
+
+ return Some(start..end);
+ }
+ None
+ })
+ }
+
+ fn char_len_at_index(&self, ix: usize) -> usize {
+ self.string[ix..].chars().next().unwrap().len_utf8()
+ }
+}
+
impl PartialEq for StringMatch {
fn eq(&self, other: &Self) -> bool {
self.cmp(other).is_eq()
@@ -85,7 +111,7 @@ pub async fn match_strings(
smart_case: bool,
max_results: usize,
cancel_flag: &AtomicBool,
- background: Arc<executor::Background>,
+ executor: BackgroundExecutor,
) -> Vec<StringMatch> {
if candidates.is_empty() || max_results == 0 {
return Default::default();
@@ -110,13 +136,13 @@ pub async fn match_strings(
let query = &query;
let query_char_bag = CharBag::from(&lowercase_query[..]);
- let num_cpus = background.num_cpus().min(candidates.len());
+ let num_cpus = executor.num_cpus().min(candidates.len());
let segment_size = (candidates.len() + num_cpus - 1) / num_cpus;
let mut segment_results = (0..num_cpus)
.map(|_| Vec::with_capacity(max_results.min(candidates.len())))
.collect::<Vec<_>>();
- background
+ executor
.scoped(|scope| {
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
let cancel_flag = &cancel_flag;
@@ -1,13 +0,0 @@
-[package]
-name = "fuzzy2"
-version = "0.1.0"
-edition = "2021"
-publish = false
-
-[lib]
-path = "src/fuzzy2.rs"
-doctest = false
-
-[dependencies]
-gpui = { package = "gpui2", path = "../gpui2" }
-util = { path = "../util" }
@@ -1,63 +0,0 @@
-use std::iter::FromIterator;
-
-#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
-pub struct CharBag(u64);
-
-impl CharBag {
- pub fn is_superset(self, other: CharBag) -> bool {
- self.0 & other.0 == other.0
- }
-
- fn insert(&mut self, c: char) {
- let c = c.to_ascii_lowercase();
- if ('a'..='z').contains(&c) {
- let mut count = self.0;
- let idx = c as u8 - b'a';
- count >>= idx * 2;
- count = ((count << 1) | 1) & 3;
- count <<= idx * 2;
- self.0 |= count;
- } else if ('0'..='9').contains(&c) {
- let idx = c as u8 - b'0';
- self.0 |= 1 << (idx + 52);
- } else if c == '-' {
- self.0 |= 1 << 62;
- }
- }
-}
-
-impl Extend<char> for CharBag {
- fn extend<T: IntoIterator<Item = char>>(&mut self, iter: T) {
- for c in iter {
- self.insert(c);
- }
- }
-}
-
-impl FromIterator<char> for CharBag {
- fn from_iter<T: IntoIterator<Item = char>>(iter: T) -> Self {
- let mut result = Self::default();
- result.extend(iter);
- result
- }
-}
-
-impl From<&str> for CharBag {
- fn from(s: &str) -> Self {
- let mut bag = Self(0);
- for c in s.chars() {
- bag.insert(c);
- }
- bag
- }
-}
-
-impl From<&[char]> for CharBag {
- fn from(chars: &[char]) -> Self {
- let mut bag = Self(0);
- for c in chars {
- bag.insert(*c);
- }
- bag
- }
-}
@@ -1,10 +0,0 @@
-mod char_bag;
-mod matcher;
-mod paths;
-mod strings;
-
-pub use char_bag::CharBag;
-pub use paths::{
- match_fixed_path_set, match_path_sets, PathMatch, PathMatchCandidate, PathMatchCandidateSet,
-};
-pub use strings::{match_strings, StringMatch, StringMatchCandidate};
@@ -1,464 +0,0 @@
-use std::{
- borrow::Cow,
- sync::atomic::{self, AtomicBool},
-};
-
-use crate::CharBag;
-
-const BASE_DISTANCE_PENALTY: f64 = 0.6;
-const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05;
-const MIN_DISTANCE_PENALTY: f64 = 0.2;
-
-pub struct Matcher<'a> {
- query: &'a [char],
- lowercase_query: &'a [char],
- query_char_bag: CharBag,
- smart_case: bool,
- max_results: usize,
- min_score: f64,
- match_positions: Vec<usize>,
- last_positions: Vec<usize>,
- score_matrix: Vec<Option<f64>>,
- best_position_matrix: Vec<usize>,
-}
-
-pub trait Match: Ord {
- fn score(&self) -> f64;
- fn set_positions(&mut self, positions: Vec<usize>);
-}
-
-pub trait MatchCandidate {
- fn has_chars(&self, bag: CharBag) -> bool;
- fn to_string(&self) -> Cow<'_, str>;
-}
-
-impl<'a> Matcher<'a> {
- pub fn new(
- query: &'a [char],
- lowercase_query: &'a [char],
- query_char_bag: CharBag,
- smart_case: bool,
- max_results: usize,
- ) -> Self {
- Self {
- query,
- lowercase_query,
- query_char_bag,
- min_score: 0.0,
- last_positions: vec![0; query.len()],
- match_positions: vec![0; query.len()],
- score_matrix: Vec::new(),
- best_position_matrix: Vec::new(),
- smart_case,
- max_results,
- }
- }
-
- pub fn match_candidates<C: MatchCandidate, R, F>(
- &mut self,
- prefix: &[char],
- lowercase_prefix: &[char],
- candidates: impl Iterator<Item = C>,
- results: &mut Vec<R>,
- cancel_flag: &AtomicBool,
- build_match: F,
- ) where
- R: Match,
- F: Fn(&C, f64) -> R,
- {
- let mut candidate_chars = Vec::new();
- let mut lowercase_candidate_chars = Vec::new();
-
- for candidate in candidates {
- if !candidate.has_chars(self.query_char_bag) {
- continue;
- }
-
- if cancel_flag.load(atomic::Ordering::Relaxed) {
- break;
- }
-
- candidate_chars.clear();
- lowercase_candidate_chars.clear();
- for c in candidate.to_string().chars() {
- candidate_chars.push(c);
- lowercase_candidate_chars.push(c.to_ascii_lowercase());
- }
-
- if !self.find_last_positions(lowercase_prefix, &lowercase_candidate_chars) {
- continue;
- }
-
- let matrix_len = self.query.len() * (prefix.len() + candidate_chars.len());
- self.score_matrix.clear();
- self.score_matrix.resize(matrix_len, None);
- self.best_position_matrix.clear();
- self.best_position_matrix.resize(matrix_len, 0);
-
- let score = self.score_match(
- &candidate_chars,
- &lowercase_candidate_chars,
- prefix,
- lowercase_prefix,
- );
-
- if score > 0.0 {
- let mut mat = build_match(&candidate, score);
- if let Err(i) = results.binary_search_by(|m| mat.cmp(m)) {
- if results.len() < self.max_results {
- mat.set_positions(self.match_positions.clone());
- results.insert(i, mat);
- } else if i < results.len() {
- results.pop();
- mat.set_positions(self.match_positions.clone());
- results.insert(i, mat);
- }
- if results.len() == self.max_results {
- self.min_score = results.last().unwrap().score();
- }
- }
- }
- }
- }
-
- fn find_last_positions(
- &mut self,
- lowercase_prefix: &[char],
- lowercase_candidate: &[char],
- ) -> bool {
- let mut lowercase_prefix = lowercase_prefix.iter();
- let mut lowercase_candidate = lowercase_candidate.iter();
- for (i, char) in self.lowercase_query.iter().enumerate().rev() {
- if let Some(j) = lowercase_candidate.rposition(|c| c == char) {
- self.last_positions[i] = j + lowercase_prefix.len();
- } else if let Some(j) = lowercase_prefix.rposition(|c| c == char) {
- self.last_positions[i] = j;
- } else {
- return false;
- }
- }
- true
- }
-
- fn score_match(
- &mut self,
- path: &[char],
- path_cased: &[char],
- prefix: &[char],
- lowercase_prefix: &[char],
- ) -> f64 {
- let score = self.recursive_score_match(
- path,
- path_cased,
- prefix,
- lowercase_prefix,
- 0,
- 0,
- self.query.len() as f64,
- ) * self.query.len() as f64;
-
- if score <= 0.0 {
- return 0.0;
- }
-
- let path_len = prefix.len() + path.len();
- let mut cur_start = 0;
- let mut byte_ix = 0;
- let mut char_ix = 0;
- for i in 0..self.query.len() {
- let match_char_ix = self.best_position_matrix[i * path_len + cur_start];
- while char_ix < match_char_ix {
- let ch = prefix
- .get(char_ix)
- .or_else(|| path.get(char_ix - prefix.len()))
- .unwrap();
- byte_ix += ch.len_utf8();
- char_ix += 1;
- }
- cur_start = match_char_ix + 1;
- self.match_positions[i] = byte_ix;
- }
-
- score
- }
-
- #[allow(clippy::too_many_arguments)]
- fn recursive_score_match(
- &mut self,
- path: &[char],
- path_cased: &[char],
- prefix: &[char],
- lowercase_prefix: &[char],
- query_idx: usize,
- path_idx: usize,
- cur_score: f64,
- ) -> f64 {
- if query_idx == self.query.len() {
- return 1.0;
- }
-
- let path_len = prefix.len() + path.len();
-
- if let Some(memoized) = self.score_matrix[query_idx * path_len + path_idx] {
- return memoized;
- }
-
- let mut score = 0.0;
- let mut best_position = 0;
-
- let query_char = self.lowercase_query[query_idx];
- let limit = self.last_positions[query_idx];
-
- let mut last_slash = 0;
- for j in path_idx..=limit {
- let path_char = if j < prefix.len() {
- lowercase_prefix[j]
- } else {
- path_cased[j - prefix.len()]
- };
- let is_path_sep = path_char == '/' || path_char == '\\';
-
- if query_idx == 0 && is_path_sep {
- last_slash = j;
- }
-
- if query_char == path_char || (is_path_sep && query_char == '_' || query_char == '\\') {
- let curr = if j < prefix.len() {
- prefix[j]
- } else {
- path[j - prefix.len()]
- };
-
- let mut char_score = 1.0;
- if j > path_idx {
- let last = if j - 1 < prefix.len() {
- prefix[j - 1]
- } else {
- path[j - 1 - prefix.len()]
- };
-
- if last == '/' {
- char_score = 0.9;
- } else if (last == '-' || last == '_' || last == ' ' || last.is_numeric())
- || (last.is_lowercase() && curr.is_uppercase())
- {
- char_score = 0.8;
- } else if last == '.' {
- char_score = 0.7;
- } else if query_idx == 0 {
- char_score = BASE_DISTANCE_PENALTY;
- } else {
- char_score = MIN_DISTANCE_PENALTY.max(
- BASE_DISTANCE_PENALTY
- - (j - path_idx - 1) as f64 * ADDITIONAL_DISTANCE_PENALTY,
- );
- }
- }
-
- // Apply a severe penalty if the case doesn't match.
- // This will make the exact matches have higher score than the case-insensitive and the
- // path insensitive matches.
- if (self.smart_case || curr == '/') && self.query[query_idx] != curr {
- char_score *= 0.001;
- }
-
- let mut multiplier = char_score;
-
- // Scale the score based on how deep within the path we found the match.
- if query_idx == 0 {
- multiplier /= ((prefix.len() + path.len()) - last_slash) as f64;
- }
-
- let mut next_score = 1.0;
- if self.min_score > 0.0 {
- next_score = cur_score * multiplier;
- // Scores only decrease. If we can't pass the previous best, bail
- if next_score < self.min_score {
- // Ensure that score is non-zero so we use it in the memo table.
- if score == 0.0 {
- score = 1e-18;
- }
- continue;
- }
- }
-
- let new_score = self.recursive_score_match(
- path,
- path_cased,
- prefix,
- lowercase_prefix,
- query_idx + 1,
- j + 1,
- next_score,
- ) * multiplier;
-
- if new_score > score {
- score = new_score;
- best_position = j;
- // Optimization: can't score better than 1.
- if new_score == 1.0 {
- break;
- }
- }
- }
- }
-
- if best_position != 0 {
- self.best_position_matrix[query_idx * path_len + path_idx] = best_position;
- }
-
- self.score_matrix[query_idx * path_len + path_idx] = Some(score);
- score
- }
-}
-
-#[cfg(test)]
-mod tests {
- use crate::{PathMatch, PathMatchCandidate};
-
- use super::*;
- use std::{
- path::{Path, PathBuf},
- sync::Arc,
- };
-
- #[test]
- fn test_get_last_positions() {
- let mut query: &[char] = &['d', 'c'];
- let mut matcher = Matcher::new(query, query, query.into(), false, 10);
- let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']);
- assert!(!result);
-
- query = &['c', 'd'];
- let mut matcher = Matcher::new(query, query, query.into(), false, 10);
- let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']);
- assert!(result);
- assert_eq!(matcher.last_positions, vec![2, 4]);
-
- query = &['z', '/', 'z', 'f'];
- let mut matcher = Matcher::new(query, query, query.into(), false, 10);
- let result = matcher.find_last_positions(&['z', 'e', 'd', '/'], &['z', 'e', 'd', '/', 'f']);
- assert!(result);
- assert_eq!(matcher.last_positions, vec![0, 3, 4, 8]);
- }
-
- #[test]
- fn test_match_path_entries() {
- let paths = vec![
- "",
- "a",
- "ab",
- "abC",
- "abcd",
- "alphabravocharlie",
- "AlphaBravoCharlie",
- "thisisatestdir",
- "/////ThisIsATestDir",
- "/this/is/a/test/dir",
- "/test/tiatd",
- ];
-
- assert_eq!(
- match_single_path_query("abc", false, &paths),
- vec![
- ("abC", vec![0, 1, 2]),
- ("abcd", vec![0, 1, 2]),
- ("AlphaBravoCharlie", vec![0, 5, 10]),
- ("alphabravocharlie", vec![4, 5, 10]),
- ]
- );
- assert_eq!(
- match_single_path_query("t/i/a/t/d", false, &paths),
- vec![("/this/is/a/test/dir", vec![1, 5, 6, 8, 9, 10, 11, 15, 16]),]
- );
-
- assert_eq!(
- match_single_path_query("tiatd", false, &paths),
- vec![
- ("/test/tiatd", vec![6, 7, 8, 9, 10]),
- ("/this/is/a/test/dir", vec![1, 6, 9, 11, 16]),
- ("/////ThisIsATestDir", vec![5, 9, 11, 12, 16]),
- ("thisisatestdir", vec![0, 2, 6, 7, 11]),
- ]
- );
- }
-
- #[test]
- fn test_match_multibyte_path_entries() {
- let paths = vec!["aαbβ/cγdδ", "αβγδ/bcde", "c1️⃣2️⃣3️⃣/d4️⃣5️⃣6️⃣/e7️⃣8️⃣9️⃣/f", "/d/🆒/h"];
- assert_eq!("1️⃣".len(), 7);
- assert_eq!(
- match_single_path_query("bcd", false, &paths),
- vec![
- ("αβγδ/bcde", vec![9, 10, 11]),
- ("aαbβ/cγdδ", vec![3, 7, 10]),
- ]
- );
- assert_eq!(
- match_single_path_query("cde", false, &paths),
- vec![
- ("αβγδ/bcde", vec![10, 11, 12]),
- ("c1️⃣2️⃣3️⃣/d4️⃣5️⃣6️⃣/e7️⃣8️⃣9️⃣/f", vec![0, 23, 46]),
- ]
- );
- }
-
- fn match_single_path_query<'a>(
- query: &str,
- smart_case: bool,
- paths: &[&'a str],
- ) -> Vec<(&'a str, Vec<usize>)> {
- let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
- let query = query.chars().collect::<Vec<_>>();
- let query_chars = CharBag::from(&lowercase_query[..]);
-
- let path_arcs: Vec<Arc<Path>> = paths
- .iter()
- .map(|path| Arc::from(PathBuf::from(path)))
- .collect::<Vec<_>>();
- let mut path_entries = Vec::new();
- for (i, path) in paths.iter().enumerate() {
- let lowercase_path = path.to_lowercase().chars().collect::<Vec<_>>();
- let char_bag = CharBag::from(lowercase_path.as_slice());
- path_entries.push(PathMatchCandidate {
- char_bag,
- path: &path_arcs[i],
- });
- }
-
- let mut matcher = Matcher::new(&query, &lowercase_query, query_chars, smart_case, 100);
-
- let cancel_flag = AtomicBool::new(false);
- let mut results = Vec::new();
-
- matcher.match_candidates(
- &[],
- &[],
- path_entries.into_iter(),
- &mut results,
- &cancel_flag,
- |candidate, score| PathMatch {
- score,
- worktree_id: 0,
- positions: Vec::new(),
- path: Arc::from(candidate.path),
- path_prefix: "".into(),
- distance_to_relative_ancestor: usize::MAX,
- },
- );
-
- results
- .into_iter()
- .map(|result| {
- (
- paths
- .iter()
- .copied()
- .find(|p| result.path.as_ref() == Path::new(p))
- .unwrap(),
- result.positions,
- )
- })
- .collect()
- }
-}
@@ -1,257 +0,0 @@
-use gpui::BackgroundExecutor;
-use std::{
- borrow::Cow,
- cmp::{self, Ordering},
- path::Path,
- sync::{atomic::AtomicBool, Arc},
-};
-
-use crate::{
- matcher::{Match, MatchCandidate, Matcher},
- CharBag,
-};
-
-#[derive(Clone, Debug)]
-pub struct PathMatchCandidate<'a> {
- pub path: &'a Path,
- pub char_bag: CharBag,
-}
-
-#[derive(Clone, Debug)]
-pub struct PathMatch {
- pub score: f64,
- pub positions: Vec<usize>,
- pub worktree_id: usize,
- pub path: Arc<Path>,
- pub path_prefix: Arc<str>,
- /// Number of steps removed from a shared parent with the relative path
- /// Used to order closer paths first in the search list
- pub distance_to_relative_ancestor: usize,
-}
-
-pub trait PathMatchCandidateSet<'a>: Send + Sync {
- type Candidates: Iterator<Item = PathMatchCandidate<'a>>;
- fn id(&self) -> usize;
- fn len(&self) -> usize;
- fn is_empty(&self) -> bool {
- self.len() == 0
- }
- fn prefix(&self) -> Arc<str>;
- fn candidates(&'a self, start: usize) -> Self::Candidates;
-}
-
-impl Match for PathMatch {
- fn score(&self) -> f64 {
- self.score
- }
-
- fn set_positions(&mut self, positions: Vec<usize>) {
- self.positions = positions;
- }
-}
-
-impl<'a> MatchCandidate for PathMatchCandidate<'a> {
- fn has_chars(&self, bag: CharBag) -> bool {
- self.char_bag.is_superset(bag)
- }
-
- fn to_string(&self) -> Cow<'a, str> {
- self.path.to_string_lossy()
- }
-}
-
-impl PartialEq for PathMatch {
- fn eq(&self, other: &Self) -> bool {
- self.cmp(other).is_eq()
- }
-}
-
-impl Eq for PathMatch {}
-
-impl PartialOrd for PathMatch {
- fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
- Some(self.cmp(other))
- }
-}
-
-impl Ord for PathMatch {
- fn cmp(&self, other: &Self) -> Ordering {
- self.score
- .partial_cmp(&other.score)
- .unwrap_or(Ordering::Equal)
- .then_with(|| self.worktree_id.cmp(&other.worktree_id))
- .then_with(|| {
- other
- .distance_to_relative_ancestor
- .cmp(&self.distance_to_relative_ancestor)
- })
- .then_with(|| self.path.cmp(&other.path))
- }
-}
-
-pub fn match_fixed_path_set(
- candidates: Vec<PathMatchCandidate>,
- worktree_id: usize,
- query: &str,
- smart_case: bool,
- max_results: usize,
-) -> Vec<PathMatch> {
- let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
- let query = query.chars().collect::<Vec<_>>();
- let query_char_bag = CharBag::from(&lowercase_query[..]);
-
- let mut matcher = Matcher::new(
- &query,
- &lowercase_query,
- query_char_bag,
- smart_case,
- max_results,
- );
-
- let mut results = Vec::new();
- matcher.match_candidates(
- &[],
- &[],
- candidates.into_iter(),
- &mut results,
- &AtomicBool::new(false),
- |candidate, score| PathMatch {
- score,
- worktree_id,
- positions: Vec::new(),
- path: Arc::from(candidate.path),
- path_prefix: Arc::from(""),
- distance_to_relative_ancestor: usize::MAX,
- },
- );
- results
-}
-
-pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>(
- candidate_sets: &'a [Set],
- query: &str,
- relative_to: Option<Arc<Path>>,
- smart_case: bool,
- max_results: usize,
- cancel_flag: &AtomicBool,
- executor: BackgroundExecutor,
-) -> Vec<PathMatch> {
- let path_count: usize = candidate_sets.iter().map(|s| s.len()).sum();
- if path_count == 0 {
- return Vec::new();
- }
-
- let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
- let query = query.chars().collect::<Vec<_>>();
-
- let lowercase_query = &lowercase_query;
- let query = &query;
- let query_char_bag = CharBag::from(&lowercase_query[..]);
-
- let num_cpus = executor.num_cpus().min(path_count);
- let segment_size = (path_count + num_cpus - 1) / num_cpus;
- let mut segment_results = (0..num_cpus)
- .map(|_| Vec::with_capacity(max_results))
- .collect::<Vec<_>>();
-
- executor
- .scoped(|scope| {
- for (segment_idx, results) in segment_results.iter_mut().enumerate() {
- let relative_to = relative_to.clone();
- scope.spawn(async move {
- let segment_start = segment_idx * segment_size;
- let segment_end = segment_start + segment_size;
- let mut matcher = Matcher::new(
- query,
- lowercase_query,
- query_char_bag,
- smart_case,
- max_results,
- );
-
- let mut tree_start = 0;
- for candidate_set in candidate_sets {
- let tree_end = tree_start + candidate_set.len();
-
- if tree_start < segment_end && segment_start < tree_end {
- let start = cmp::max(tree_start, segment_start) - tree_start;
- let end = cmp::min(tree_end, segment_end) - tree_start;
- let candidates = candidate_set.candidates(start).take(end - start);
-
- let worktree_id = candidate_set.id();
- let prefix = candidate_set.prefix().chars().collect::<Vec<_>>();
- let lowercase_prefix = prefix
- .iter()
- .map(|c| c.to_ascii_lowercase())
- .collect::<Vec<_>>();
- matcher.match_candidates(
- &prefix,
- &lowercase_prefix,
- candidates,
- results,
- cancel_flag,
- |candidate, score| PathMatch {
- score,
- worktree_id,
- positions: Vec::new(),
- path: Arc::from(candidate.path),
- path_prefix: candidate_set.prefix(),
- distance_to_relative_ancestor: relative_to.as_ref().map_or(
- usize::MAX,
- |relative_to| {
- distance_between_paths(
- candidate.path.as_ref(),
- relative_to.as_ref(),
- )
- },
- ),
- },
- );
- }
- if tree_end >= segment_end {
- break;
- }
- tree_start = tree_end;
- }
- })
- }
- })
- .await;
-
- let mut results = Vec::new();
- for segment_result in segment_results {
- if results.is_empty() {
- results = segment_result;
- } else {
- util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a));
- }
- }
- results
-}
-
-/// Compute the distance from a given path to some other path
-/// If there is no shared path, returns usize::MAX
-fn distance_between_paths(path: &Path, relative_to: &Path) -> usize {
- let mut path_components = path.components();
- let mut relative_components = relative_to.components();
-
- while path_components
- .next()
- .zip(relative_components.next())
- .map(|(path_component, relative_component)| path_component == relative_component)
- .unwrap_or_default()
- {}
- path_components.count() + relative_components.count() + 1
-}
-
-#[cfg(test)]
-mod tests {
- use std::path::Path;
-
- use super::distance_between_paths;
-
- #[test]
- fn test_distance_between_paths_empty() {
- distance_between_paths(Path::new(""), Path::new(""));
- }
-}
@@ -1,187 +0,0 @@
-use crate::{
- matcher::{Match, MatchCandidate, Matcher},
- CharBag,
-};
-use gpui::BackgroundExecutor;
-use std::{
- borrow::Cow,
- cmp::{self, Ordering},
- iter,
- ops::Range,
- sync::atomic::AtomicBool,
-};
-
-#[derive(Clone, Debug)]
-pub struct StringMatchCandidate {
- pub id: usize,
- pub string: String,
- pub char_bag: CharBag,
-}
-
-impl Match for StringMatch {
- fn score(&self) -> f64 {
- self.score
- }
-
- fn set_positions(&mut self, positions: Vec<usize>) {
- self.positions = positions;
- }
-}
-
-impl StringMatchCandidate {
- pub fn new(id: usize, string: String) -> Self {
- Self {
- id,
- char_bag: CharBag::from(string.as_str()),
- string,
- }
- }
-}
-
-impl<'a> MatchCandidate for &'a StringMatchCandidate {
- fn has_chars(&self, bag: CharBag) -> bool {
- self.char_bag.is_superset(bag)
- }
-
- fn to_string(&self) -> Cow<'a, str> {
- self.string.as_str().into()
- }
-}
-
-#[derive(Clone, Debug)]
-pub struct StringMatch {
- pub candidate_id: usize,
- pub score: f64,
- pub positions: Vec<usize>,
- pub string: String,
-}
-
-impl StringMatch {
- pub fn ranges<'a>(&'a self) -> impl 'a + Iterator<Item = Range<usize>> {
- let mut positions = self.positions.iter().peekable();
- iter::from_fn(move || {
- while let Some(start) = positions.next().copied() {
- let mut end = start + self.char_len_at_index(start);
- while let Some(next_start) = positions.peek() {
- if end == **next_start {
- end += self.char_len_at_index(end);
- positions.next();
- } else {
- break;
- }
- }
-
- return Some(start..end);
- }
- None
- })
- }
-
- fn char_len_at_index(&self, ix: usize) -> usize {
- self.string[ix..].chars().next().unwrap().len_utf8()
- }
-}
-
-impl PartialEq for StringMatch {
- fn eq(&self, other: &Self) -> bool {
- self.cmp(other).is_eq()
- }
-}
-
-impl Eq for StringMatch {}
-
-impl PartialOrd for StringMatch {
- fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
- Some(self.cmp(other))
- }
-}
-
-impl Ord for StringMatch {
- fn cmp(&self, other: &Self) -> Ordering {
- self.score
- .partial_cmp(&other.score)
- .unwrap_or(Ordering::Equal)
- .then_with(|| self.candidate_id.cmp(&other.candidate_id))
- }
-}
-
-pub async fn match_strings(
- candidates: &[StringMatchCandidate],
- query: &str,
- smart_case: bool,
- max_results: usize,
- cancel_flag: &AtomicBool,
- executor: BackgroundExecutor,
-) -> Vec<StringMatch> {
- if candidates.is_empty() || max_results == 0 {
- return Default::default();
- }
-
- if query.is_empty() {
- return candidates
- .iter()
- .map(|candidate| StringMatch {
- candidate_id: candidate.id,
- score: 0.,
- positions: Default::default(),
- string: candidate.string.clone(),
- })
- .collect();
- }
-
- let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
- let query = query.chars().collect::<Vec<_>>();
-
- let lowercase_query = &lowercase_query;
- let query = &query;
- let query_char_bag = CharBag::from(&lowercase_query[..]);
-
- let num_cpus = executor.num_cpus().min(candidates.len());
- let segment_size = (candidates.len() + num_cpus - 1) / num_cpus;
- let mut segment_results = (0..num_cpus)
- .map(|_| Vec::with_capacity(max_results.min(candidates.len())))
- .collect::<Vec<_>>();
-
- executor
- .scoped(|scope| {
- for (segment_idx, results) in segment_results.iter_mut().enumerate() {
- let cancel_flag = &cancel_flag;
- scope.spawn(async move {
- let segment_start = cmp::min(segment_idx * segment_size, candidates.len());
- let segment_end = cmp::min(segment_start + segment_size, candidates.len());
- let mut matcher = Matcher::new(
- query,
- lowercase_query,
- query_char_bag,
- smart_case,
- max_results,
- );
-
- matcher.match_candidates(
- &[],
- &[],
- candidates[segment_start..segment_end].iter(),
- results,
- cancel_flag,
- |candidate, score| StringMatch {
- candidate_id: candidate.id,
- score,
- positions: Vec::new(),
- string: candidate.string.to_string(),
- },
- );
- });
- }
- })
- .await;
-
- let mut results = Vec::new();
- for segment_result in segment_results {
- if results.is_empty() {
- results = segment_result;
- } else {
- util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a));
- }
- }
- results
-}
@@ -24,16 +24,15 @@ test-support = [
[dependencies]
clock = { path = "../clock" }
collections = { path = "../collections" }
-fuzzy = { path = "../fuzzy" }
-fs = { path = "../fs" }
-git = { path = "../git" }
-gpui = { path = "../gpui" }
+fuzzy = { path = "../fuzzy" }
+git = { package = "git3", path = "../git3" }
+gpui = { package = "gpui2", path = "../gpui2" }
lsp = { path = "../lsp" }
-rpc = { path = "../rpc" }
-settings = { path = "../settings" }
+rpc = { package = "rpc2", path = "../rpc2" }
+settings = { package = "settings2", path = "../settings2" }
sum_tree = { path = "../sum_tree" }
-text = { path = "../text" }
-theme = { path = "../theme" }
+text = { package = "text2", path = "../text2" }
+theme = { package = "theme2", path = "../theme2" }
util = { path = "../util" }
anyhow.workspace = true
@@ -45,7 +44,6 @@ lazy_static.workspace = true
log.workspace = true
parking_lot.workspace = true
postage.workspace = true
-pulldown-cmark = { version = "0.9.2", default-features = false }
regex.workspace = true
schemars.workspace = true
serde.workspace = true
@@ -60,14 +58,15 @@ unicase = "2.6"
rand = { workspace = true, optional = true }
tree-sitter-rust = { workspace = true, optional = true }
tree-sitter-typescript = { workspace = true, optional = true }
+pulldown-cmark = { version = "0.9.2", default-features = false }
[dev-dependencies]
-client = { path = "../client", features = ["test-support"] }
+client = { package = "client2", path = "../client2", features = ["test-support"] }
collections = { path = "../collections", features = ["test-support"] }
-gpui = { path = "../gpui", features = ["test-support"] }
+gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
lsp = { path = "../lsp", features = ["test-support"] }
-text = { path = "../text", features = ["test-support"] }
-settings = { path = "../settings", features = ["test-support"] }
+text = { package = "text2", path = "../text2", features = ["test-support"] }
+settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
ctor.workspace = true
env_logger.workspace = true
@@ -18,7 +18,8 @@ use crate::{
use anyhow::{anyhow, Result};
pub use clock::ReplicaId;
use futures::channel::oneshot;
-use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, Task};
+use gpui::{AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLabel};
+use lazy_static::lazy_static;
use lsp::LanguageServerId;
use parking_lot::Mutex;
use similar::{ChangeTag, TextDiff};
@@ -52,14 +53,23 @@ pub use {tree_sitter_rust, tree_sitter_typescript};
pub use lsp::DiagnosticSeverity;
+lazy_static! {
+ pub static ref BUFFER_DIFF_TASK: TaskLabel = TaskLabel::new();
+}
+
pub struct Buffer {
text: TextBuffer,
diff_base: Option<String>,
git_diff: git::diff::BufferDiff,
file: Option<Arc<dyn File>>,
- saved_version: clock::Global,
- saved_version_fingerprint: RopeFingerprint,
+ /// The mtime of the file when this buffer was last loaded from
+ /// or saved to disk.
saved_mtime: SystemTime,
+ /// The version vector when this buffer was last loaded from
+ /// or saved to disk.
+ saved_version: clock::Global,
+ /// A hash of the current contents of the buffer's file.
+ file_fingerprint: RopeFingerprint,
transaction_depth: usize,
was_dirty_before_starting_transaction: Option<bool>,
reload_task: Option<Task<Result<()>>>,
@@ -190,8 +200,8 @@ pub struct Completion {
pub old_range: Range<Anchor>,
pub new_text: String,
pub label: CodeLabel,
- pub documentation: Option<Documentation>,
pub server_id: LanguageServerId,
+ pub documentation: Option<Documentation>,
pub lsp_completion: lsp::CompletionItem,
}
@@ -422,8 +432,7 @@ impl Buffer {
.ok_or_else(|| anyhow!("missing line_ending"))?,
));
this.saved_version = proto::deserialize_version(&message.saved_version);
- this.saved_version_fingerprint =
- proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
+ this.file_fingerprint = proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
this.saved_mtime = message
.saved_mtime
.ok_or_else(|| anyhow!("invalid saved_mtime"))?
@@ -439,7 +448,7 @@ impl Buffer {
diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
saved_version: proto::serialize_version(&self.saved_version),
- saved_version_fingerprint: proto::serialize_fingerprint(self.saved_version_fingerprint),
+ saved_version_fingerprint: proto::serialize_fingerprint(self.file_fingerprint),
saved_mtime: Some(self.saved_mtime.into()),
}
}
@@ -477,7 +486,7 @@ impl Buffer {
));
let text_operations = self.text.operations().clone();
- cx.background().spawn(async move {
+ cx.background_executor().spawn(async move {
let since = since.unwrap_or_default();
operations.extend(
text_operations
@@ -509,7 +518,7 @@ impl Buffer {
Self {
saved_mtime,
saved_version: buffer.version(),
- saved_version_fingerprint: buffer.as_rope().fingerprint(),
+ file_fingerprint: buffer.as_rope().fingerprint(),
reload_task: None,
transaction_depth: 0,
was_dirty_before_starting_transaction: None,
@@ -576,7 +585,7 @@ impl Buffer {
}
pub fn saved_version_fingerprint(&self) -> RopeFingerprint {
- self.saved_version_fingerprint
+ self.file_fingerprint
}
pub fn saved_mtime(&self) -> SystemTime {
@@ -604,7 +613,7 @@ impl Buffer {
cx: &mut ModelContext<Self>,
) {
self.saved_version = version;
- self.saved_version_fingerprint = fingerprint;
+ self.file_fingerprint = fingerprint;
self.saved_mtime = mtime;
cx.emit(Event::Saved);
cx.notify();
@@ -620,13 +629,14 @@ impl Buffer {
let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
let file = this.file.as_ref()?.as_local()?;
Some((file.mtime(), file.load(cx)))
- }) else {
+ })?
+ else {
return Ok(());
};
let new_text = new_text.await?;
let diff = this
- .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))
+ .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
.await;
this.update(&mut cx, |this, cx| {
if this.version() == diff.base_version {
@@ -652,8 +662,7 @@ impl Buffer {
}
this.reload_task.take();
- });
- Ok(())
+ })
}));
rx
}
@@ -667,14 +676,14 @@ impl Buffer {
cx: &mut ModelContext<Self>,
) {
self.saved_version = version;
- self.saved_version_fingerprint = fingerprint;
+ self.file_fingerprint = fingerprint;
self.text.set_line_ending(line_ending);
self.saved_mtime = mtime;
if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
file.buffer_reloaded(
self.remote_id(),
&self.saved_version,
- self.saved_version_fingerprint,
+ self.file_fingerprint,
self.line_ending(),
self.saved_mtime,
cx,
@@ -736,20 +745,18 @@ impl Buffer {
let snapshot = self.snapshot();
let mut diff = self.git_diff.clone();
- let diff = cx.background().spawn(async move {
+ let diff = cx.background_executor().spawn(async move {
diff.update(&diff_base, &snapshot).await;
diff
});
- let handle = cx.weak_handle();
- Some(cx.spawn_weak(|_, mut cx| async move {
+ Some(cx.spawn(|this, mut cx| async move {
let buffer_diff = diff.await;
- if let Some(this) = handle.upgrade(&mut cx) {
- this.update(&mut cx, |this, _| {
- this.git_diff = buffer_diff;
- this.git_diff_update_count += 1;
- })
- }
+ this.update(&mut cx, |this, _| {
+ this.git_diff = buffer_diff;
+ this.git_diff_update_count += 1;
+ })
+ .ok();
}))
}
@@ -847,7 +854,7 @@ impl Buffer {
let mut syntax_snapshot = syntax_map.snapshot();
drop(syntax_map);
- let parse_task = cx.background().spawn({
+ let parse_task = cx.background_executor().spawn({
let language = language.clone();
let language_registry = language_registry.clone();
async move {
@@ -857,7 +864,7 @@ impl Buffer {
});
match cx
- .background()
+ .background_executor()
.block_with_timeout(self.sync_parse_timeout, parse_task)
{
Ok(new_syntax_snapshot) => {
@@ -886,7 +893,8 @@ impl Buffer {
if parse_again {
this.reparse(cx);
}
- });
+ })
+ .ok();
})
.detach();
}
@@ -919,9 +927,9 @@ impl Buffer {
fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
if let Some(indent_sizes) = self.compute_autoindents() {
- let indent_sizes = cx.background().spawn(indent_sizes);
+ let indent_sizes = cx.background_executor().spawn(indent_sizes);
match cx
- .background()
+ .background_executor()
.block_with_timeout(Duration::from_micros(500), indent_sizes)
{
Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
@@ -930,7 +938,8 @@ impl Buffer {
let indent_sizes = indent_sizes.await;
this.update(&mut cx, |this, cx| {
this.apply_autoindents(indent_sizes, cx);
- });
+ })
+ .ok();
}));
}
}
@@ -1169,36 +1178,72 @@ impl Buffer {
pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
let old_text = self.as_rope().clone();
let base_version = self.version();
- cx.background().spawn(async move {
- let old_text = old_text.to_string();
- let line_ending = LineEnding::detect(&new_text);
- LineEnding::normalize(&mut new_text);
- let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
- let mut edits = Vec::new();
- let mut offset = 0;
- let empty: Arc<str> = "".into();
- for change in diff.iter_all_changes() {
- let value = change.value();
- let end_offset = offset + value.len();
- match change.tag() {
- ChangeTag::Equal => {
- offset = end_offset;
- }
- ChangeTag::Delete => {
- edits.push((offset..end_offset, empty.clone()));
- offset = end_offset;
+ cx.background_executor()
+ .spawn_labeled(*BUFFER_DIFF_TASK, async move {
+ let old_text = old_text.to_string();
+ let line_ending = LineEnding::detect(&new_text);
+ LineEnding::normalize(&mut new_text);
+
+ let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
+ let empty: Arc<str> = "".into();
+
+ let mut edits = Vec::new();
+ let mut old_offset = 0;
+ let mut new_offset = 0;
+ let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
+ for change in diff.iter_all_changes().map(Some).chain([None]) {
+ if let Some(change) = &change {
+ let len = change.value().len();
+ match change.tag() {
+ ChangeTag::Equal => {
+ old_offset += len;
+ new_offset += len;
+ }
+ ChangeTag::Delete => {
+ let old_end_offset = old_offset + len;
+ if let Some((last_old_range, _)) = &mut last_edit {
+ last_old_range.end = old_end_offset;
+ } else {
+ last_edit =
+ Some((old_offset..old_end_offset, new_offset..new_offset));
+ }
+ old_offset = old_end_offset;
+ }
+ ChangeTag::Insert => {
+ let new_end_offset = new_offset + len;
+ if let Some((_, last_new_range)) = &mut last_edit {
+ last_new_range.end = new_end_offset;
+ } else {
+ last_edit =
+ Some((old_offset..old_offset, new_offset..new_end_offset));
+ }
+ new_offset = new_end_offset;
+ }
+ }
}
- ChangeTag::Insert => {
- edits.push((offset..offset, value.into()));
+
+ if let Some((old_range, new_range)) = &last_edit {
+ if old_offset > old_range.end
+ || new_offset > new_range.end
+ || change.is_none()
+ {
+ let text = if new_range.is_empty() {
+ empty.clone()
+ } else {
+ new_text[new_range.clone()].into()
+ };
+ edits.push((old_range.clone(), text));
+ last_edit.take();
+ }
}
}
- }
- Diff {
- base_version,
- line_ending,
- edits,
- }
- })
+
+ Diff {
+ base_version,
+ line_ending,
+ edits,
+ }
+ })
}
/// Spawn a background task that searches the buffer for any whitespace
@@ -1207,7 +1252,7 @@ impl Buffer {
let old_text = self.as_rope().clone();
let line_ending = self.line_ending();
let base_version = self.version();
- cx.background().spawn(async move {
+ cx.background_executor().spawn(async move {
let ranges = trailing_whitespace_ranges(&old_text);
let empty = Arc::<str>::from("");
Diff {
@@ -1282,12 +1327,12 @@ impl Buffer {
}
pub fn is_dirty(&self) -> bool {
- self.saved_version_fingerprint != self.as_rope().fingerprint()
+ self.file_fingerprint != self.as_rope().fingerprint()
|| self.file.as_ref().map_or(false, |file| file.is_deleted())
}
pub fn has_conflict(&self) -> bool {
- self.saved_version_fingerprint != self.as_rope().fingerprint()
+ self.file_fingerprint != self.as_rope().fingerprint()
&& self
.file
.as_ref()
@@ -1458,95 +1503,82 @@ impl Buffer {
return None;
}
- // Non-generic part hoisted out to reduce LLVM IR size.
- fn tail(
- this: &mut Buffer,
- edits: Vec<(Range<usize>, Arc<str>)>,
- autoindent_mode: Option<AutoindentMode>,
- cx: &mut ModelContext<Buffer>,
- ) -> Option<clock::Lamport> {
- this.start_transaction();
- this.pending_autoindent.take();
- let autoindent_request = autoindent_mode
- .and_then(|mode| this.language.as_ref().map(|_| (this.snapshot(), mode)));
-
- let edit_operation = this.text.edit(edits.iter().cloned());
- let edit_id = edit_operation.timestamp();
-
- if let Some((before_edit, mode)) = autoindent_request {
- let mut delta = 0isize;
- let entries = edits
- .into_iter()
- .enumerate()
- .zip(&edit_operation.as_edit().unwrap().new_text)
- .map(|((ix, (range, _)), new_text)| {
- let new_text_length = new_text.len();
- let old_start = range.start.to_point(&before_edit);
- let new_start = (delta + range.start as isize) as usize;
- delta +=
- new_text_length as isize - (range.end as isize - range.start as isize);
-
- let mut range_of_insertion_to_indent = 0..new_text_length;
- let mut first_line_is_new = false;
- let mut original_indent_column = None;
-
- // When inserting an entire line at the beginning of an existing line,
- // treat the insertion as new.
- if new_text.contains('\n')
- && old_start.column
- <= before_edit.indent_size_for_line(old_start.row).len
- {
- first_line_is_new = true;
- }
+ self.start_transaction();
+ self.pending_autoindent.take();
+ let autoindent_request = autoindent_mode
+ .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
- // When inserting text starting with a newline, avoid auto-indenting the
- // previous line.
- if new_text.starts_with('\n') {
- range_of_insertion_to_indent.start += 1;
- first_line_is_new = true;
- }
+ let edit_operation = self.text.edit(edits.iter().cloned());
+ let edit_id = edit_operation.timestamp();
- // Avoid auto-indenting after the insertion.
- if let AutoindentMode::Block {
- original_indent_columns,
- } = &mode
- {
- original_indent_column = Some(
- original_indent_columns.get(ix).copied().unwrap_or_else(|| {
- indent_size_for_text(
- new_text[range_of_insertion_to_indent.clone()].chars(),
- )
- .len
- }),
- );
- if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
- range_of_insertion_to_indent.end -= 1;
- }
- }
+ if let Some((before_edit, mode)) = autoindent_request {
+ let mut delta = 0isize;
+ let entries = edits
+ .into_iter()
+ .enumerate()
+ .zip(&edit_operation.as_edit().unwrap().new_text)
+ .map(|((ix, (range, _)), new_text)| {
+ let new_text_length = new_text.len();
+ let old_start = range.start.to_point(&before_edit);
+ let new_start = (delta + range.start as isize) as usize;
+ delta += new_text_length as isize - (range.end as isize - range.start as isize);
+
+ let mut range_of_insertion_to_indent = 0..new_text_length;
+ let mut first_line_is_new = false;
+ let mut original_indent_column = None;
+
+ // When inserting an entire line at the beginning of an existing line,
+ // treat the insertion as new.
+ if new_text.contains('\n')
+ && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
+ {
+ first_line_is_new = true;
+ }
+
+ // When inserting text starting with a newline, avoid auto-indenting the
+ // previous line.
+ if new_text.starts_with('\n') {
+ range_of_insertion_to_indent.start += 1;
+ first_line_is_new = true;
+ }
- AutoindentRequestEntry {
- first_line_is_new,
- original_indent_column,
- indent_size: before_edit.language_indent_size_at(range.start, cx),
- range: this
- .anchor_before(new_start + range_of_insertion_to_indent.start)
- ..this.anchor_after(new_start + range_of_insertion_to_indent.end),
+ // Avoid auto-indenting after the insertion.
+ if let AutoindentMode::Block {
+ original_indent_columns,
+ } = &mode
+ {
+ original_indent_column =
+ Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
+ indent_size_for_text(
+ new_text[range_of_insertion_to_indent.clone()].chars(),
+ )
+ .len
+ }));
+ if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
+ range_of_insertion_to_indent.end -= 1;
}
- })
- .collect();
+ }
- this.autoindent_requests.push(Arc::new(AutoindentRequest {
- before_edit,
- entries,
- is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
- }));
- }
+ AutoindentRequestEntry {
+ first_line_is_new,
+ original_indent_column,
+ indent_size: before_edit.language_indent_size_at(range.start, cx),
+ range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
+ ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
+ }
+ })
+ .collect();
- this.end_transaction(cx);
- this.send_operation(Operation::Buffer(edit_operation), cx);
- Some(edit_id)
+ self.autoindent_requests.push(Arc::new(AutoindentRequest {
+ before_edit,
+ entries,
+ is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
+ }));
}
- tail(self, edits, autoindent_mode, cx)
+
+ self.end_transaction(cx);
+ self.send_operation(Operation::Buffer(edit_operation), cx);
+ Some(edit_id)
}
fn did_edit(
@@ -1879,9 +1911,7 @@ impl Buffer {
}
}
-impl Entity for Buffer {
- type Event = Event;
-}
+impl EventEmitter<Event> for Buffer {}
impl Deref for Buffer {
type Target = TextBuffer;
@@ -1,25 +1,25 @@
+use super::*;
use crate::language_settings::{
AllLanguageSettings, AllLanguageSettingsContent, LanguageSettingsContent,
};
-
-use super::*;
+use crate::Buffer;
use clock::ReplicaId;
use collections::BTreeMap;
-use gpui::{AppContext, ModelHandle};
+use gpui::{AppContext, Model};
+use gpui::{Context, TestAppContext};
use indoc::indoc;
use proto::deserialize_operation;
use rand::prelude::*;
use regex::RegexBuilder;
use settings::SettingsStore;
use std::{
- cell::RefCell,
env,
ops::Range,
- rc::Rc,
time::{Duration, Instant},
};
use text::network::Network;
use text::LineEnding;
+use text::{Point, ToPoint};
use unindent::Unindent as _;
use util::{assert_set_eq, post_inc, test::marked_text_ranges, RandomCharIter};
@@ -42,8 +42,8 @@ fn init_logger() {
fn test_line_endings(cx: &mut gpui::AppContext) {
init_settings(cx, |_| {});
- cx.add_model(|cx| {
- let mut buffer = Buffer::new(0, cx.model_id() as u64, "one\r\ntwo\rthree")
+ cx.new_model(|cx| {
+ let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), "one\r\ntwo\rthree")
.with_language(Arc::new(rust_lang()), cx);
assert_eq!(buffer.text(), "one\ntwo\nthree");
assert_eq!(buffer.line_ending(), LineEnding::Windows);
@@ -135,24 +135,24 @@ fn test_select_language() {
#[gpui::test]
fn test_edit_events(cx: &mut gpui::AppContext) {
let mut now = Instant::now();
- let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
- let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
+ let buffer_1_events = Arc::new(Mutex::new(Vec::new()));
+ let buffer_2_events = Arc::new(Mutex::new(Vec::new()));
- let buffer1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abcdef"));
- let buffer2 = cx.add_model(|cx| Buffer::new(1, cx.model_id() as u64, "abcdef"));
- let buffer1_ops = Rc::new(RefCell::new(Vec::new()));
+ let buffer1 = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "abcdef"));
+ let buffer2 = cx.new_model(|cx| Buffer::new(1, cx.entity_id().as_u64(), "abcdef"));
+ let buffer1_ops = Arc::new(Mutex::new(Vec::new()));
buffer1.update(cx, {
let buffer1_ops = buffer1_ops.clone();
|buffer, cx| {
let buffer_1_events = buffer_1_events.clone();
cx.subscribe(&buffer1, move |_, _, event, _| match event.clone() {
- Event::Operation(op) => buffer1_ops.borrow_mut().push(op),
- event => buffer_1_events.borrow_mut().push(event),
+ Event::Operation(op) => buffer1_ops.lock().push(op),
+ event => buffer_1_events.lock().push(event),
})
.detach();
let buffer_2_events = buffer_2_events.clone();
cx.subscribe(&buffer2, move |_, _, event, _| {
- buffer_2_events.borrow_mut().push(event.clone())
+ buffer_2_events.lock().push(event.clone())
})
.detach();
@@ -179,12 +179,10 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
// Incorporating a set of remote ops emits a single edited event,
// followed by a dirty changed event.
buffer2.update(cx, |buffer, cx| {
- buffer
- .apply_ops(buffer1_ops.borrow_mut().drain(..), cx)
- .unwrap();
+ buffer.apply_ops(buffer1_ops.lock().drain(..), cx).unwrap();
});
assert_eq!(
- mem::take(&mut *buffer_1_events.borrow_mut()),
+ mem::take(&mut *buffer_1_events.lock()),
vec![
Event::Edited,
Event::DirtyChanged,
@@ -193,7 +191,7 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
]
);
assert_eq!(
- mem::take(&mut *buffer_2_events.borrow_mut()),
+ mem::take(&mut *buffer_2_events.lock()),
vec![Event::Edited, Event::DirtyChanged]
);
@@ -205,28 +203,26 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
// Incorporating the remote ops again emits a single edited event,
// followed by a dirty changed event.
buffer2.update(cx, |buffer, cx| {
- buffer
- .apply_ops(buffer1_ops.borrow_mut().drain(..), cx)
- .unwrap();
+ buffer.apply_ops(buffer1_ops.lock().drain(..), cx).unwrap();
});
assert_eq!(
- mem::take(&mut *buffer_1_events.borrow_mut()),
+ mem::take(&mut *buffer_1_events.lock()),
vec![Event::Edited, Event::DirtyChanged,]
);
assert_eq!(
- mem::take(&mut *buffer_2_events.borrow_mut()),
+ mem::take(&mut *buffer_2_events.lock()),
vec![Event::Edited, Event::DirtyChanged]
);
}
#[gpui::test]
-async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
+async fn test_apply_diff(cx: &mut TestAppContext) {
let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
- let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
- let anchor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(3, 3)));
+ let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text));
+ let anchor = buffer.update(cx, |buffer, _| buffer.anchor_before(Point::new(3, 3)));
let text = "a\nccc\ndddd\nffffff\n";
- let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await;
+ let diff = buffer.update(cx, |b, cx| b.diff(text.into(), cx)).await;
buffer.update(cx, |buffer, cx| {
buffer.apply_diff(diff, cx).unwrap();
assert_eq!(buffer.text(), text);
@@ -234,7 +230,7 @@ async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
});
let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
- let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await;
+ let diff = buffer.update(cx, |b, cx| b.diff(text.into(), cx)).await;
buffer.update(cx, |buffer, cx| {
buffer.apply_diff(diff, cx).unwrap();
assert_eq!(buffer.text(), text);
@@ -254,15 +250,15 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
]
.join("\n");
- let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
+ let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text));
// Spawn a task to format the buffer's whitespace.
// Pause so that the foratting task starts running.
- let format = buffer.read_with(cx, |buffer, cx| buffer.remove_trailing_whitespace(cx));
+ let format = buffer.update(cx, |buffer, cx| buffer.remove_trailing_whitespace(cx));
smol::future::yield_now().await;
// Edit the buffer while the normalization task is running.
- let version_before_edit = buffer.read_with(cx, |buffer, _| buffer.version());
+ let version_before_edit = buffer.update(cx, |buffer, _| buffer.version());
buffer.update(cx, |buffer, cx| {
buffer.edit(
[
@@ -318,12 +314,13 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
#[gpui::test]
async fn test_reparse(cx: &mut gpui::TestAppContext) {
let text = "fn a() {}";
- let buffer = cx.add_model(|cx| {
- Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
+ let buffer = cx.new_model(|cx| {
+ Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx)
});
// Wait for the initial text to parse
- buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
+ cx.executor().run_until_parked();
+ assert!(!buffer.update(cx, |buffer, _| buffer.is_parsing()));
assert_eq!(
get_tree_sexp(&buffer, cx),
concat!(
@@ -354,7 +351,8 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
assert_eq!(buf.text(), "fn a(b: C) { d; }");
assert!(buf.is_parsing());
});
- buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
+ cx.executor().run_until_parked();
+ assert!(!buffer.update(cx, |buffer, _| buffer.is_parsing()));
assert_eq!(
get_tree_sexp(&buffer, cx),
concat!(
@@ -386,7 +384,7 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
assert!(buf.is_parsing());
});
- buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
+ cx.executor().run_until_parked();
assert_eq!(
get_tree_sexp(&buffer, cx),
concat!(
@@ -408,7 +406,8 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
assert_eq!(buf.text(), "fn a() {}");
assert!(buf.is_parsing());
});
- buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
+
+ cx.executor().run_until_parked();
assert_eq!(
get_tree_sexp(&buffer, cx),
concat!(
@@ -426,7 +425,7 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
assert!(buf.is_parsing());
});
- buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
+ cx.executor().run_until_parked();
assert_eq!(
get_tree_sexp(&buffer, cx),
concat!(
@@ -443,15 +442,15 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
#[gpui::test]
async fn test_resetting_language(cx: &mut gpui::TestAppContext) {
- let buffer = cx.add_model(|cx| {
+ let buffer = cx.new_model(|cx| {
let mut buffer =
- Buffer::new(0, cx.model_id() as u64, "{}").with_language(Arc::new(rust_lang()), cx);
+ Buffer::new(0, cx.entity_id().as_u64(), "{}").with_language(Arc::new(rust_lang()), cx);
buffer.set_sync_parse_timeout(Duration::ZERO);
buffer
});
// Wait for the initial text to parse
- buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
+ cx.executor().run_until_parked();
assert_eq!(
get_tree_sexp(&buffer, cx),
"(source_file (expression_statement (block)))"
@@ -460,7 +459,7 @@ async fn test_resetting_language(cx: &mut gpui::TestAppContext) {
buffer.update(cx, |buffer, cx| {
buffer.set_language(Some(Arc::new(json_lang())), cx)
});
- buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
+ cx.executor().run_until_parked();
assert_eq!(get_tree_sexp(&buffer, cx), "(document (object))");
}
@@ -493,11 +492,11 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
"#
.unindent();
- let buffer = cx.add_model(|cx| {
- Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
+ let buffer = cx.new_model(|cx| {
+ Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx)
});
let outline = buffer
- .read_with(cx, |buffer, _| buffer.snapshot().outline(None))
+ .update(cx, |buffer, _| buffer.snapshot().outline(None))
.unwrap();
assert_eq!(
@@ -560,7 +559,7 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
cx: &'a gpui::TestAppContext,
) -> Vec<(&'a str, Vec<usize>)> {
let matches = cx
- .read(|cx| outline.search(query, cx.background().clone()))
+ .update(|cx| outline.search(query, cx.background_executor().clone()))
.await;
matches
.into_iter()
@@ -579,11 +578,11 @@ async fn test_outline_nodes_with_newlines(cx: &mut gpui::TestAppContext) {
"#
.unindent();
- let buffer = cx.add_model(|cx| {
- Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
+ let buffer = cx.new_model(|cx| {
+ Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx)
});
let outline = buffer
- .read_with(cx, |buffer, _| buffer.snapshot().outline(None))
+ .update(cx, |buffer, _| buffer.snapshot().outline(None))
.unwrap();
assert_eq!(
@@ -617,10 +616,10 @@ async fn test_outline_with_extra_context(cx: &mut gpui::TestAppContext) {
"#
.unindent();
- let buffer = cx.add_model(|cx| {
- Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(language), cx)
+ let buffer = cx.new_model(|cx| {
+ Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(language), cx)
});
- let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
+ let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
// extra context nodes are included in the outline.
let outline = snapshot.outline(None).unwrap();
@@ -661,10 +660,10 @@ async fn test_symbols_containing(cx: &mut gpui::TestAppContext) {
"#
.unindent();
- let buffer = cx.add_model(|cx| {
- Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
+ let buffer = cx.new_model(|cx| {
+ Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx)
});
- let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
+ let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
// point is at the start of an item
assert_eq!(
@@ -882,10 +881,10 @@ fn test_enclosing_bracket_ranges_where_brackets_are_not_outermost_children(cx: &
#[gpui::test]
fn test_range_for_syntax_ancestor(cx: &mut AppContext) {
- cx.add_model(|cx| {
+ cx.new_model(|cx| {
let text = "fn a() { b(|c| {}) }";
let buffer =
- Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
+ Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
let snapshot = buffer.snapshot();
assert_eq!(
@@ -923,10 +922,10 @@ fn test_range_for_syntax_ancestor(cx: &mut AppContext) {
fn test_autoindent_with_soft_tabs(cx: &mut AppContext) {
init_settings(cx, |_| {});
- cx.add_model(|cx| {
+ cx.new_model(|cx| {
let text = "fn a() {}";
let mut buffer =
- Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
+ Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
assert_eq!(buffer.text(), "fn a() {\n \n}");
@@ -966,10 +965,10 @@ fn test_autoindent_with_hard_tabs(cx: &mut AppContext) {
settings.defaults.hard_tabs = Some(true);
});
- cx.add_model(|cx| {
+ cx.new_model(|cx| {
let text = "fn a() {}";
let mut buffer =
- Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
+ Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
assert_eq!(buffer.text(), "fn a() {\n\t\n}");
@@ -1007,10 +1006,11 @@ fn test_autoindent_with_hard_tabs(cx: &mut AppContext) {
fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppContext) {
init_settings(cx, |_| {});
- cx.add_model(|cx| {
+ cx.new_model(|cx| {
+ let entity_id = cx.entity_id();
let mut buffer = Buffer::new(
0,
- cx.model_id() as u64,
+ entity_id.as_u64(),
"
fn a() {
c;
@@ -1080,10 +1080,12 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
buffer
});
- cx.add_model(|cx| {
+ cx.new_model(|cx| {
+ eprintln!("second buffer: {:?}", cx.entity_id());
+
let mut buffer = Buffer::new(
0,
- cx.model_id() as u64,
+ cx.entity_id().as_u64(),
"
fn a() {
b();
@@ -1137,16 +1139,18 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
);
buffer
});
+
+ eprintln!("DONE");
}
#[gpui::test]
fn test_autoindent_does_not_adjust_lines_within_newly_created_errors(cx: &mut AppContext) {
init_settings(cx, |_| {});
- cx.add_model(|cx| {
+ cx.new_model(|cx| {
let mut buffer = Buffer::new(
0,
- cx.model_id() as u64,
+ cx.entity_id().as_u64(),
"
fn a() {
i
@@ -1205,10 +1209,10 @@ fn test_autoindent_does_not_adjust_lines_within_newly_created_errors(cx: &mut Ap
fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut AppContext) {
init_settings(cx, |_| {});
- cx.add_model(|cx| {
+ cx.new_model(|cx| {
let mut buffer = Buffer::new(
0,
- cx.model_id() as u64,
+ cx.entity_id().as_u64(),
"
fn a() {}
"
@@ -1262,10 +1266,10 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut AppContext) {
fn test_autoindent_with_edit_at_end_of_buffer(cx: &mut AppContext) {
init_settings(cx, |_| {});
- cx.add_model(|cx| {
+ cx.new_model(|cx| {
let text = "a\nb";
let mut buffer =
- Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
+ Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
buffer.edit(
[(0..1, "\n"), (2..3, "\n")],
Some(AutoindentMode::EachLine),
@@ -1280,7 +1284,7 @@ fn test_autoindent_with_edit_at_end_of_buffer(cx: &mut AppContext) {
fn test_autoindent_multi_line_insertion(cx: &mut AppContext) {
init_settings(cx, |_| {});
- cx.add_model(|cx| {
+ cx.new_model(|cx| {
let text = "
const a: usize = 1;
fn b() {
@@ -1292,7 +1296,7 @@ fn test_autoindent_multi_line_insertion(cx: &mut AppContext) {
.unindent();
let mut buffer =
- Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
+ Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
buffer.edit(
[(Point::new(3, 0)..Point::new(3, 0), "e(\n f()\n);\n")],
Some(AutoindentMode::EachLine),
@@ -1322,7 +1326,7 @@ fn test_autoindent_multi_line_insertion(cx: &mut AppContext) {
fn test_autoindent_block_mode(cx: &mut AppContext) {
init_settings(cx, |_| {});
- cx.add_model(|cx| {
+ cx.new_model(|cx| {
let text = r#"
fn a() {
b();
@@ -1330,7 +1334,7 @@ fn test_autoindent_block_mode(cx: &mut AppContext) {
"#
.unindent();
let mut buffer =
- Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
+ Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
// When this text was copied, both of the quotation marks were at the same
// indent level, but the indentation of the first line was not included in
@@ -1406,7 +1410,7 @@ fn test_autoindent_block_mode(cx: &mut AppContext) {
fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut AppContext) {
init_settings(cx, |_| {});
- cx.add_model(|cx| {
+ cx.new_model(|cx| {
let text = r#"
fn a() {
if b() {
@@ -1416,7 +1420,7 @@ fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut AppContex
"#
.unindent();
let mut buffer =
- Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
+ Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
// The original indent columns are not known, so this text is
// auto-indented in a block as if the first line was copied in
@@ -1486,7 +1490,7 @@ fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut AppContex
fn test_autoindent_language_without_indents_query(cx: &mut AppContext) {
init_settings(cx, |_| {});
- cx.add_model(|cx| {
+ cx.new_model(|cx| {
let text = "
* one
- a
@@ -1495,7 +1499,7 @@ fn test_autoindent_language_without_indents_query(cx: &mut AppContext) {
"
.unindent();
- let mut buffer = Buffer::new(0, cx.model_id() as u64, text).with_language(
+ let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), text).with_language(
Arc::new(Language::new(
LanguageConfig {
name: "Markdown".into(),
@@ -1555,7 +1559,7 @@ fn test_autoindent_with_injected_languages(cx: &mut AppContext) {
language_registry.add(html_language.clone());
language_registry.add(javascript_language.clone());
- cx.add_model(|cx| {
+ cx.new_model(|cx| {
let (text, ranges) = marked_text_ranges(
&"
<div>ˇ
@@ -1571,7 +1575,7 @@ fn test_autoindent_with_injected_languages(cx: &mut AppContext) {
false,
);
- let mut buffer = Buffer::new(0, cx.model_id() as u64, text);
+ let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), text);
buffer.set_language_registry(language_registry);
buffer.set_language(Some(html_language), cx);
buffer.edit(
@@ -1606,9 +1610,9 @@ fn test_autoindent_query_with_outdent_captures(cx: &mut AppContext) {
settings.defaults.tab_size = Some(2.try_into().unwrap());
});
- cx.add_model(|cx| {
+ cx.new_model(|cx| {
let mut buffer =
- Buffer::new(0, cx.model_id() as u64, "").with_language(Arc::new(ruby_lang()), cx);
+ Buffer::new(0, cx.entity_id().as_u64(), "").with_language(Arc::new(ruby_lang()), cx);
let text = r#"
class C
@@ -1649,7 +1653,7 @@ fn test_autoindent_query_with_outdent_captures(cx: &mut AppContext) {
fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
init_settings(cx, |_| {});
- cx.add_model(|cx| {
+ cx.new_model(|cx| {
let language = Language::new(
LanguageConfig {
name: "JavaScript".into(),
@@ -1710,7 +1714,7 @@ fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
.unindent();
let buffer =
- Buffer::new(0, cx.model_id() as u64, &text).with_language(Arc::new(language), cx);
+ Buffer::new(0, cx.entity_id().as_u64(), &text).with_language(Arc::new(language), cx);
let snapshot = buffer.snapshot();
let config = snapshot.language_scope_at(0).unwrap();
@@ -1782,7 +1786,7 @@ fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
fn test_language_scope_at_with_rust(cx: &mut AppContext) {
init_settings(cx, |_| {});
- cx.add_model(|cx| {
+ cx.new_model(|cx| {
let language = Language::new(
LanguageConfig {
name: "Rust".into(),
@@ -1822,7 +1826,7 @@ fn test_language_scope_at_with_rust(cx: &mut AppContext) {
"#
.unindent();
- let buffer = Buffer::new(0, cx.model_id() as u64, text.clone())
+ let buffer = Buffer::new(0, cx.entity_id().as_u64(), text.clone())
.with_language(Arc::new(language), cx);
let snapshot = buffer.snapshot();
@@ -1850,7 +1854,7 @@ fn test_language_scope_at_with_rust(cx: &mut AppContext) {
fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
init_settings(cx, |_| {});
- cx.add_model(|cx| {
+ cx.new_model(|cx| {
let text = r#"
<ol>
<% people.each do |person| %>
@@ -1867,7 +1871,7 @@ fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
language_registry.add(Arc::new(html_lang()));
language_registry.add(Arc::new(erb_lang()));
- let mut buffer = Buffer::new(0, cx.model_id() as u64, text);
+ let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), text);
buffer.set_language_registry(language_registry.clone());
buffer.set_language(
language_registry
@@ -1898,8 +1902,8 @@ fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
fn test_serialization(cx: &mut gpui::AppContext) {
let mut now = Instant::now();
- let buffer1 = cx.add_model(|cx| {
- let mut buffer = Buffer::new(0, cx.model_id() as u64, "abc");
+ let buffer1 = cx.new_model(|cx| {
+ let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), "abc");
buffer.edit([(3..3, "D")], None, cx);
now += Duration::from_secs(1);
@@ -1919,9 +1923,9 @@ fn test_serialization(cx: &mut gpui::AppContext) {
let state = buffer1.read(cx).to_proto();
let ops = cx
- .background()
+ .background_executor()
.block(buffer1.read(cx).serialize_ops(None, cx));
- let buffer2 = cx.add_model(|cx| {
+ let buffer2 = cx.new_model(|cx| {
let mut buffer = Buffer::from_proto(1, state, None).unwrap();
buffer
.apply_ops(
@@ -1953,14 +1957,15 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
.collect::<String>();
let mut replica_ids = Vec::new();
let mut buffers = Vec::new();
- let network = Rc::new(RefCell::new(Network::new(rng.clone())));
- let base_buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, base_text.as_str()));
+ let network = Arc::new(Mutex::new(Network::new(rng.clone())));
+ let base_buffer =
+ cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), base_text.as_str()));
for i in 0..rng.gen_range(min_peers..=max_peers) {
- let buffer = cx.add_model(|cx| {
+ let buffer = cx.new_model(|cx| {
let state = base_buffer.read(cx).to_proto();
let ops = cx
- .background()
+ .background_executor()
.block(base_buffer.read(cx).serialize_ops(None, cx));
let mut buffer = Buffer::from_proto(i as ReplicaId, state, None).unwrap();
buffer
@@ -1975,16 +1980,17 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
cx.subscribe(&cx.handle(), move |buffer, _, event, _| {
if let Event::Operation(op) = event {
network
- .borrow_mut()
+ .lock()
.broadcast(buffer.replica_id(), vec![proto::serialize_operation(op)]);
}
})
.detach();
buffer
});
+
buffers.push(buffer);
replica_ids.push(i as ReplicaId);
- network.borrow_mut().add_peer(i as ReplicaId);
+ network.lock().add_peer(i as ReplicaId);
log::info!("Adding initial peer with replica id {}", i);
}
@@ -2065,7 +2071,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
50..=59 if replica_ids.len() < max_peers => {
let old_buffer_state = buffer.read(cx).to_proto();
let old_buffer_ops = cx
- .background()
+ .background_executor()
.block(buffer.read(cx).serialize_ops(None, cx));
let new_replica_id = (0..=replica_ids.len() as ReplicaId)
.filter(|replica_id| *replica_id != buffer.read(cx).replica_id())
@@ -2076,7 +2082,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
new_replica_id,
replica_id
);
- new_buffer = Some(cx.add_model(|cx| {
+ new_buffer = Some(cx.new_model(|cx| {
let mut new_buffer =
Buffer::from_proto(new_replica_id, old_buffer_state, None).unwrap();
new_buffer
@@ -2096,7 +2102,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
let network = network.clone();
cx.subscribe(&cx.handle(), move |buffer, _, event, _| {
if let Event::Operation(op) = event {
- network.borrow_mut().broadcast(
+ network.lock().broadcast(
buffer.replica_id(),
vec![proto::serialize_operation(op)],
);
@@ -2105,15 +2111,15 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
.detach();
new_buffer
}));
- network.borrow_mut().replicate(replica_id, new_replica_id);
+ network.lock().replicate(replica_id, new_replica_id);
if new_replica_id as usize == replica_ids.len() {
replica_ids.push(new_replica_id);
} else {
let new_buffer = new_buffer.take().unwrap();
- while network.borrow().has_unreceived(new_replica_id) {
+ while network.lock().has_unreceived(new_replica_id) {
let ops = network
- .borrow_mut()
+ .lock()
.receive(new_replica_id)
.into_iter()
.map(|op| proto::deserialize_operation(op).unwrap());
@@ -2140,9 +2146,9 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
});
mutation_count -= 1;
}
- _ if network.borrow().has_unreceived(replica_id) => {
+ _ if network.lock().has_unreceived(replica_id) => {
let ops = network
- .borrow_mut()
+ .lock()
.receive(replica_id)
.into_iter()
.map(|op| proto::deserialize_operation(op).unwrap());
@@ -2167,7 +2173,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
buffer.read(cx).check_invariants();
}
- if mutation_count == 0 && network.borrow().is_idle() {
+ if mutation_count == 0 && network.lock().is_idle() {
break;
}
}
@@ -2438,8 +2444,8 @@ fn javascript_lang() -> Language {
.unwrap()
}
-fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
- buffer.read_with(cx, |buffer, _| {
+fn get_tree_sexp(buffer: &Model<Buffer>, cx: &mut gpui::TestAppContext) -> String {
+ buffer.update(cx, |buffer, _| {
let snapshot = buffer.snapshot();
let layers = snapshot.syntax.layers(buffer.as_text_snapshot());
layers[0].node().to_sexp()
@@ -2454,8 +2460,8 @@ fn assert_bracket_pairs(
cx: &mut AppContext,
) {
let (expected_text, selection_ranges) = marked_text_ranges(selection_text, false);
- let buffer = cx.add_model(|cx| {
- Buffer::new(0, cx.model_id() as u64, expected_text.clone())
+ let buffer = cx.new_model(|cx| {
+ Buffer::new(0, cx.entity_id().as_u64(), expected_text.clone())
.with_language(Arc::new(language), cx)
});
let buffer = buffer.update(cx, |buffer, _cx| buffer.snapshot());
@@ -2478,9 +2484,10 @@ fn assert_bracket_pairs(
}
fn init_settings(cx: &mut AppContext, f: fn(&mut AllLanguageSettingsContent)) {
- cx.set_global(SettingsStore::test(cx));
+ let settings_store = SettingsStore::test(cx);
+ cx.set_global(settings_store);
crate::init(cx);
- cx.update_global::<SettingsStore, _, _>(|settings, cx| {
+ cx.update_global::<SettingsStore, _>(|settings, cx| {
settings.update_user_settings::<AllLanguageSettings>(cx, f);
});
}
@@ -1,4 +1,4 @@
-use gpui::fonts::HighlightStyle;
+use gpui::HighlightStyle;
use std::sync::Arc;
use theme::SyntaxTheme;
@@ -79,23 +79,23 @@ impl Default for HighlightId {
#[cfg(test)]
mod tests {
use super::*;
- use gpui::color::Color;
+ use gpui::rgba;
#[test]
fn test_highlight_map() {
- let theme = SyntaxTheme::new(
- [
- ("function", Color::from_u32(0x100000ff)),
- ("function.method", Color::from_u32(0x200000ff)),
- ("function.async", Color::from_u32(0x300000ff)),
- ("variable.builtin.self.rust", Color::from_u32(0x400000ff)),
- ("variable.builtin", Color::from_u32(0x500000ff)),
- ("variable", Color::from_u32(0x600000ff)),
+ let theme = SyntaxTheme {
+ highlights: [
+ ("function", rgba(0x100000ff)),
+ ("function.method", rgba(0x200000ff)),
+ ("function.async", rgba(0x300000ff)),
+ ("variable.builtin.self.rust", rgba(0x400000ff)),
+ ("variable.builtin", rgba(0x500000ff)),
+ ("variable", rgba(0x600000ff)),
]
.iter()
.map(|(name, color)| (name.to_string(), (*color).into()))
.collect(),
- );
+ };
let capture_names = &[
"function.special",
@@ -2,13 +2,13 @@ mod buffer;
mod diagnostic_set;
mod highlight_map;
pub mod language_settings;
-pub mod markdown;
mod outline;
pub mod proto;
mod syntax_map;
#[cfg(test)]
mod buffer_tests;
+pub mod markdown;
use anyhow::{anyhow, Context, Result};
use async_trait::async_trait;
@@ -18,7 +18,7 @@ use futures::{
future::{BoxFuture, Shared},
FutureExt, TryFutureExt as _,
};
-use gpui::{executor::Background, AppContext, AsyncAppContext, Task};
+use gpui::{AppContext, AsyncAppContext, BackgroundExecutor, Task};
pub use highlight_map::HighlightMap;
use lazy_static::lazy_static;
use lsp::{CodeActionKind, LanguageServerBinary};
@@ -44,7 +44,7 @@ use std::{
};
use syntax_map::SyntaxSnapshot;
use theme::{SyntaxTheme, Theme};
-use tree_sitter::{self, Query};
+use tree_sitter::{self, wasmtime, Query, WasmStore};
use unicase::UniCase;
use util::{http::HttpClient, paths::PathExt};
use util::{post_inc, ResultExt, TryFutureExt as _, UnwrapFuture};
@@ -84,10 +84,15 @@ impl LspBinaryStatusSender {
}
thread_local! {
- static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
+ static PARSER: RefCell<Parser> = {
+ let mut parser = Parser::new();
+ parser.set_wasm_store(WasmStore::new(WASM_ENGINE.clone()).unwrap()).unwrap();
+ RefCell::new(parser)
+ };
}
lazy_static! {
+ pub static ref WASM_ENGINE: wasmtime::Engine = wasmtime::Engine::default();
pub static ref NEXT_GRAMMAR_ID: AtomicUsize = Default::default();
pub static ref PLAIN_TEXT: Arc<Language> = Arc::new(Language::new(
LanguageConfig {
@@ -111,6 +116,7 @@ pub struct LanguageServerName(pub Arc<str>);
pub struct CachedLspAdapter {
pub name: LanguageServerName,
pub short_name: &'static str,
+ pub initialization_options: Option<Value>,
pub disk_based_diagnostic_sources: Vec<String>,
pub disk_based_diagnostics_progress_token: Option<String>,
pub language_ids: HashMap<String, String>,
@@ -122,6 +128,7 @@ impl CachedLspAdapter {
pub async fn new(adapter: Arc<dyn LspAdapter>) -> Arc<Self> {
let name = adapter.name().await;
let short_name = adapter.short_name();
+ let initialization_options = adapter.initialization_options().await;
let disk_based_diagnostic_sources = adapter.disk_based_diagnostic_sources().await;
let disk_based_diagnostics_progress_token =
adapter.disk_based_diagnostics_progress_token().await;
@@ -130,6 +137,7 @@ impl CachedLspAdapter {
Arc::new(CachedLspAdapter {
name,
short_name,
+ initialization_options,
disk_based_diagnostic_sources,
disk_based_diagnostics_progress_token,
language_ids,
@@ -357,6 +365,7 @@ pub struct CodeLabel {
#[derive(Clone, Deserialize)]
pub struct LanguageConfig {
pub name: Arc<str>,
+ pub grammar_name: Option<Arc<str>>,
pub path_suffixes: Vec<String>,
pub brackets: BracketPairConfig,
#[serde(default, deserialize_with = "deserialize_regex")]
@@ -443,6 +452,7 @@ impl Default for LanguageConfig {
fn default() -> Self {
Self {
name: "".into(),
+ grammar_name: None,
path_suffixes: Default::default(),
brackets: Default::default(),
auto_indent_using_last_non_empty_line: auto_indent_using_last_non_empty_line_default(),
@@ -617,14 +627,25 @@ type AvailableLanguageId = usize;
#[derive(Clone)]
struct AvailableLanguage {
id: AvailableLanguageId,
- path: &'static str,
config: LanguageConfig,
- grammar: tree_sitter::Language,
+ grammar: AvailableGrammar,
lsp_adapters: Vec<Arc<dyn LspAdapter>>,
- get_queries: fn(&str) -> LanguageQueries,
loaded: bool,
}
+#[derive(Clone)]
+enum AvailableGrammar {
+ Native {
+ grammar: tree_sitter::Language,
+ asset_dir: &'static str,
+ get_queries: fn(&str) -> LanguageQueries,
+ },
+ Wasm {
+ grammar_name: Arc<str>,
+ path: Arc<Path>,
+ },
+}
+
pub struct LanguageRegistry {
state: RwLock<LanguageRegistryState>,
language_server_download_dir: Option<Arc<Path>>,
@@ -633,7 +654,7 @@ pub struct LanguageRegistry {
lsp_binary_paths: Mutex<
HashMap<LanguageServerName, Shared<Task<Result<LanguageServerBinary, Arc<anyhow::Error>>>>>,
>,
- executor: Option<Arc<Background>>,
+ executor: Option<BackgroundExecutor>,
lsp_binary_status_tx: LspBinaryStatusSender,
}
@@ -682,7 +703,7 @@ impl LanguageRegistry {
Self::new(Task::ready(()))
}
- pub fn set_executor(&mut self, executor: Arc<Background>) {
+ pub fn set_executor(&mut self, executor: BackgroundExecutor) {
self.executor = Some(executor);
}
@@ -696,7 +717,7 @@ impl LanguageRegistry {
pub fn register(
&self,
- path: &'static str,
+ asset_dir: &'static str,
config: LanguageConfig,
grammar: tree_sitter::Language,
lsp_adapters: Vec<Arc<dyn LspAdapter>>,
@@ -705,11 +726,24 @@ impl LanguageRegistry {
let state = &mut *self.state.write();
state.available_languages.push(AvailableLanguage {
id: post_inc(&mut state.next_available_language_id),
- path,
config,
- grammar,
+ grammar: AvailableGrammar::Native {
+ grammar,
+ get_queries,
+ asset_dir,
+ },
lsp_adapters,
- get_queries,
+ loaded: false,
+ });
+ }
+
+ pub fn register_wasm(&self, path: Arc<Path>, grammar_name: Arc<str>, config: LanguageConfig) {
+ let state = &mut *self.state.write();
+ state.available_languages.push(AvailableLanguage {
+ id: post_inc(&mut state.next_available_language_id),
+ config,
+ grammar: AvailableGrammar::Wasm { grammar_name, path },
+ lsp_adapters: Vec::new(),
loaded: false,
});
}
@@ -749,7 +783,7 @@ impl LanguageRegistry {
let mut state = self.state.write();
state.theme = Some(theme.clone());
for language in &state.languages {
- language.set_theme(&theme.editor.syntax);
+ language.set_theme(&theme.syntax());
}
}
@@ -834,13 +868,43 @@ impl LanguageRegistry {
executor
.spawn(async move {
let id = language.id;
- let queries = (language.get_queries)(&language.path);
- let language =
- Language::new(language.config, Some(language.grammar))
+ let name = language.config.name.clone();
+ let language = async {
+ let (grammar, queries) = match language.grammar {
+ AvailableGrammar::Native {
+ grammar,
+ asset_dir,
+ get_queries,
+ } => (grammar, (get_queries)(asset_dir)),
+ AvailableGrammar::Wasm { grammar_name, path } => {
+ let mut wasm_path = path.join(grammar_name.as_ref());
+ wasm_path.set_extension("wasm");
+ let wasm_bytes = std::fs::read(&wasm_path)?;
+ let grammar = PARSER.with(|parser| {
+ let mut parser = parser.borrow_mut();
+ let mut store = parser.take_wasm_store().unwrap();
+ let grammar =
+ store.load_language(&grammar_name, &wasm_bytes);
+ parser.set_wasm_store(store).unwrap();
+ grammar
+ })?;
+ let mut queries = LanguageQueries::default();
+ if let Ok(contents) = std::fs::read_to_string(
+ &path.join("highlights.scm"),
+ ) {
+ queries.highlights = Some(contents.into());
+ }
+ (grammar, queries)
+ }
+ };
+ Language::new(language.config, Some(grammar))
.with_lsp_adapters(language.lsp_adapters)
- .await;
- let name = language.name();
- match language.with_queries(queries) {
+ .await
+ .with_queries(queries)
+ }
+ .await;
+
+ match language {
Ok(language) => {
let language = Arc::new(language);
let mut state = this.state.write();
@@ -918,7 +982,7 @@ impl LanguageRegistry {
}
let servers_tx = servers_tx.clone();
- cx.background()
+ cx.background_executor()
.spawn(async move {
if fake_server
.try_receive_notification::<lsp::notification::Initialized>()
@@ -955,18 +1019,22 @@ impl LanguageRegistry {
let task = {
let container_dir = container_dir.clone();
- cx.spawn(|mut cx| async move {
+ cx.spawn(move |mut cx| async move {
login_shell_env_loaded.await;
- let mut lock = this.lsp_binary_paths.lock();
- let entry = lock
+ let entry = this
+ .lsp_binary_paths
+ .lock()
.entry(adapter.name.clone())
.or_insert_with(|| {
+ let adapter = adapter.clone();
+ let language = language.clone();
+ let delegate = delegate.clone();
cx.spawn(|cx| {
get_binary(
- adapter.clone(),
- language.clone(),
- delegate.clone(),
+ adapter,
+ language,
+ delegate,
container_dir,
lsp_binary_statuses,
cx,
@@ -976,9 +1044,8 @@ impl LanguageRegistry {
.shared()
})
.clone();
- drop(lock);
- let binary = match entry.clone().await {
+ let binary = match entry.await {
Ok(binary) => binary,
Err(err) => anyhow::bail!("{err}"),
};
@@ -1047,7 +1114,7 @@ impl LanguageRegistryState {
fn add(&mut self, language: Arc<Language>) {
if let Some(theme) = self.theme.as_ref() {
- language.set_theme(&theme.editor.syntax);
+ language.set_theme(&theme.syntax());
}
self.languages.push(language);
self.version += 1;
@@ -1387,9 +1454,9 @@ impl Language {
let query = Query::new(&self.grammar_mut().ts_language, source)?;
let mut override_configs_by_id = HashMap::default();
- for (ix, name) in query.capture_names().iter().copied().enumerate() {
+ for (ix, name) in query.capture_names().iter().enumerate() {
if !name.starts_with('_') {
- let value = self.config.overrides.remove(name).unwrap_or_default();
+ let value = self.config.overrides.remove(*name).unwrap_or_default();
for server_name in &value.opt_into_language_servers {
if !self
.config
@@ -1400,7 +1467,7 @@ impl Language {
}
}
- override_configs_by_id.insert(ix as u32, (name.into(), value));
+ override_configs_by_id.insert(ix as u32, (name.to_string(), value));
}
}
@@ -1855,7 +1922,8 @@ mod tests {
#[gpui::test(iterations = 10)]
async fn test_first_line_pattern(cx: &mut TestAppContext) {
let mut languages = LanguageRegistry::test();
- languages.set_executor(cx.background());
+
+ languages.set_executor(cx.executor());
let languages = Arc::new(languages);
languages.register(
"/javascript",
@@ -1892,7 +1960,7 @@ mod tests {
#[gpui::test(iterations = 10)]
async fn test_language_loading(cx: &mut TestAppContext) {
let mut languages = LanguageRegistry::test();
- languages.set_executor(cx.background());
+ languages.set_executor(cx.executor());
let languages = Arc::new(languages);
languages.register(
"/JSON",
@@ -8,10 +8,11 @@ use schemars::{
JsonSchema,
};
use serde::{Deserialize, Serialize};
+use settings::Settings;
use std::{num::NonZeroU32, path::Path, sync::Arc};
pub fn init(cx: &mut AppContext) {
- settings::register::<AllLanguageSettings>(cx);
+ AllLanguageSettings::register(cx);
}
pub fn language_settings<'a>(
@@ -28,7 +29,7 @@ pub fn all_language_settings<'a>(
cx: &'a AppContext,
) -> &'a AllLanguageSettings {
let location = file.map(|f| (f.worktree_id(), f.path().as_ref()));
- settings::get_local(location, cx)
+ AllLanguageSettings::get(location, cx)
}
#[derive(Debug, Clone)]
@@ -254,7 +255,7 @@ impl InlayHintKind {
}
}
-impl settings::Setting for AllLanguageSettings {
+impl settings::Settings for AllLanguageSettings {
const KEY: Option<&'static str> = None;
type FileContent = AllLanguageSettingsContent;
@@ -262,7 +263,7 @@ impl settings::Setting for AllLanguageSettings {
fn load(
default_value: &Self::FileContent,
user_settings: &[&Self::FileContent],
- _: &AppContext,
+ _: &mut AppContext,
) -> Result<Self> {
// A default is provided for all settings.
let mut defaults: LanguageSettings =
@@ -2,7 +2,7 @@ use std::sync::Arc;
use std::{ops::Range, path::PathBuf};
use crate::{HighlightId, Language, LanguageRegistry};
-use gpui::fonts::{self, HighlightStyle, Weight};
+use gpui::{px, FontStyle, FontWeight, HighlightStyle, UnderlineStyle};
use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag};
#[derive(Debug, Clone)]
@@ -26,18 +26,18 @@ impl MarkdownHighlight {
let mut highlight = HighlightStyle::default();
if style.italic {
- highlight.italic = Some(true);
+ highlight.font_style = Some(FontStyle::Italic);
}
if style.underline {
- highlight.underline = Some(fonts::Underline {
- thickness: 1.0.into(),
+ highlight.underline = Some(UnderlineStyle {
+ thickness: px(1.),
..Default::default()
});
}
- if style.weight != fonts::Weight::default() {
- highlight.weight = Some(style.weight);
+ if style.weight != FontWeight::default() {
+ highlight.font_weight = Some(style.weight);
}
Some(highlight)
@@ -52,7 +52,7 @@ impl MarkdownHighlight {
pub struct MarkdownHighlightStyle {
pub italic: bool,
pub underline: bool,
- pub weight: Weight,
+ pub weight: FontWeight,
}
#[derive(Debug, Clone)]
@@ -138,7 +138,7 @@ pub async fn parse_markdown_block(
let mut style = MarkdownHighlightStyle::default();
if bold_depth > 0 {
- style.weight = Weight::BOLD;
+ style.weight = FontWeight::BOLD;
}
if italic_depth > 0 {
@@ -1,6 +1,6 @@
use fuzzy::{StringMatch, StringMatchCandidate};
-use gpui::{executor::Background, fonts::HighlightStyle};
-use std::{ops::Range, sync::Arc};
+use gpui::{BackgroundExecutor, HighlightStyle};
+use std::ops::Range;
#[derive(Debug)]
pub struct Outline<T> {
@@ -57,7 +57,7 @@ impl<T> Outline<T> {
}
}
- pub async fn search(&self, query: &str, executor: Arc<Background>) -> Vec<StringMatch> {
+ pub async fn search(&self, query: &str, executor: BackgroundExecutor) -> Vec<StringMatch> {
let query = query.trim_start();
let is_path_query = query.contains(' ');
let smart_case = query.chars().any(|c| c.is_uppercase());
@@ -81,6 +81,7 @@ impl<T> Outline<T> {
let mut prev_item_ix = 0;
for mut string_match in matches {
let outline_match = &self.items[string_match.candidate_id];
+ string_match.string = outline_match.text.clone();
if is_path_query {
let prefix_len = self.path_candidate_prefixes[string_match.candidate_id];
@@ -7,7 +7,6 @@ use futures::FutureExt;
use parking_lot::Mutex;
use std::{
borrow::Cow,
- cell::RefCell,
cmp::{self, Ordering, Reverse},
collections::BinaryHeap,
fmt, iter,
@@ -16,13 +15,9 @@ use std::{
};
use sum_tree::{Bias, SeekTarget, SumTree};
use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint};
-use tree_sitter::{
- Node, Parser, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatches, Tree,
-};
+use tree_sitter::{Node, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatches, Tree};
-thread_local! {
- static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
-}
+use super::PARSER;
static QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Mutex::new(vec![]);
@@ -1,86 +0,0 @@
-[package]
-name = "language2"
-version = "0.1.0"
-edition = "2021"
-publish = false
-
-[lib]
-path = "src/language2.rs"
-doctest = false
-
-[features]
-test-support = [
- "rand",
- "client/test-support",
- "collections/test-support",
- "lsp/test-support",
- "text/test-support",
- "tree-sitter-rust",
- "tree-sitter-typescript",
- "settings/test-support",
- "util/test-support",
-]
-
-[dependencies]
-clock = { path = "../clock" }
-collections = { path = "../collections" }
-fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
-git = { package = "git3", path = "../git3" }
-gpui = { package = "gpui2", path = "../gpui2" }
-lsp = { package = "lsp2", path = "../lsp2" }
-rpc = { package = "rpc2", path = "../rpc2" }
-settings = { package = "settings2", path = "../settings2" }
-sum_tree = { path = "../sum_tree" }
-text = { package = "text2", path = "../text2" }
-theme = { package = "theme2", path = "../theme2" }
-util = { path = "../util" }
-
-anyhow.workspace = true
-async-broadcast = "0.4"
-async-trait.workspace = true
-futures.workspace = true
-globset.workspace = true
-lazy_static.workspace = true
-log.workspace = true
-parking_lot.workspace = true
-postage.workspace = true
-regex.workspace = true
-schemars.workspace = true
-serde.workspace = true
-serde_derive.workspace = true
-serde_json.workspace = true
-similar = "1.3"
-smallvec.workspace = true
-smol.workspace = true
-tree-sitter.workspace = true
-unicase = "2.6"
-
-rand = { workspace = true, optional = true }
-tree-sitter-rust = { workspace = true, optional = true }
-tree-sitter-typescript = { workspace = true, optional = true }
-pulldown-cmark = { version = "0.9.2", default-features = false }
-
-[dev-dependencies]
-client = { package = "client2", path = "../client2", features = ["test-support"] }
-collections = { path = "../collections", features = ["test-support"] }
-gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
-lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
-text = { package = "text2", path = "../text2", features = ["test-support"] }
-settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
-util = { path = "../util", features = ["test-support"] }
-ctor.workspace = true
-env_logger.workspace = true
-indoc.workspace = true
-rand.workspace = true
-unindent.workspace = true
-
-tree-sitter-embedded-template.workspace = true
-tree-sitter-html.workspace = true
-tree-sitter-json.workspace = true
-tree-sitter-markdown.workspace = true
-tree-sitter-rust.workspace = true
-tree-sitter-python.workspace = true
-tree-sitter-typescript.workspace = true
-tree-sitter-ruby.workspace = true
-tree-sitter-elixir.workspace = true
-tree-sitter-heex.workspace = true
@@ -1,5 +0,0 @@
-fn main() {
- if let Ok(bundled) = std::env::var("ZED_BUNDLE") {
- println!("cargo:rustc-env=ZED_BUNDLE={}", bundled);
- }
-}
@@ -1,3193 +0,0 @@
-pub use crate::{
- diagnostic_set::DiagnosticSet,
- highlight_map::{HighlightId, HighlightMap},
- markdown::ParsedMarkdown,
- proto, Grammar, Language, LanguageRegistry,
-};
-use crate::{
- diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
- language_settings::{language_settings, LanguageSettings},
- markdown::parse_markdown,
- outline::OutlineItem,
- syntax_map::{
- SyntaxLayerInfo, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatches,
- SyntaxSnapshot, ToTreeSitterPoint,
- },
- CodeLabel, LanguageScope, Outline,
-};
-use anyhow::{anyhow, Result};
-pub use clock::ReplicaId;
-use futures::channel::oneshot;
-use gpui::{AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLabel};
-use lazy_static::lazy_static;
-use lsp::LanguageServerId;
-use parking_lot::Mutex;
-use similar::{ChangeTag, TextDiff};
-use smallvec::SmallVec;
-use smol::future::yield_now;
-use std::{
- any::Any,
- cmp::{self, Ordering},
- collections::BTreeMap,
- ffi::OsStr,
- future::Future,
- iter::{self, Iterator, Peekable},
- mem,
- ops::{Deref, Range},
- path::{Path, PathBuf},
- str,
- sync::Arc,
- time::{Duration, Instant, SystemTime, UNIX_EPOCH},
- vec,
-};
-use sum_tree::TreeMap;
-use text::operation_queue::OperationQueue;
-pub use text::{Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, *};
-use theme::SyntaxTheme;
-#[cfg(any(test, feature = "test-support"))]
-use util::RandomCharIter;
-use util::RangeExt;
-
-#[cfg(any(test, feature = "test-support"))]
-pub use {tree_sitter_rust, tree_sitter_typescript};
-
-pub use lsp::DiagnosticSeverity;
-
-lazy_static! {
- pub static ref BUFFER_DIFF_TASK: TaskLabel = TaskLabel::new();
-}
-
-pub struct Buffer {
- text: TextBuffer,
- diff_base: Option<String>,
- git_diff: git::diff::BufferDiff,
- file: Option<Arc<dyn File>>,
- /// The mtime of the file when this buffer was last loaded from
- /// or saved to disk.
- saved_mtime: SystemTime,
- /// The version vector when this buffer was last loaded from
- /// or saved to disk.
- saved_version: clock::Global,
- /// A hash of the current contents of the buffer's file.
- file_fingerprint: RopeFingerprint,
- transaction_depth: usize,
- was_dirty_before_starting_transaction: Option<bool>,
- reload_task: Option<Task<Result<()>>>,
- language: Option<Arc<Language>>,
- autoindent_requests: Vec<Arc<AutoindentRequest>>,
- pending_autoindent: Option<Task<()>>,
- sync_parse_timeout: Duration,
- syntax_map: Mutex<SyntaxMap>,
- parsing_in_background: bool,
- parse_count: usize,
- diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
- remote_selections: TreeMap<ReplicaId, SelectionSet>,
- selections_update_count: usize,
- diagnostics_update_count: usize,
- diagnostics_timestamp: clock::Lamport,
- file_update_count: usize,
- git_diff_update_count: usize,
- completion_triggers: Vec<String>,
- completion_triggers_timestamp: clock::Lamport,
- deferred_ops: OperationQueue<Operation>,
-}
-
-pub struct BufferSnapshot {
- text: text::BufferSnapshot,
- pub git_diff: git::diff::BufferDiff,
- pub(crate) syntax: SyntaxSnapshot,
- file: Option<Arc<dyn File>>,
- diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
- diagnostics_update_count: usize,
- file_update_count: usize,
- git_diff_update_count: usize,
- remote_selections: TreeMap<ReplicaId, SelectionSet>,
- selections_update_count: usize,
- language: Option<Arc<Language>>,
- parse_count: usize,
-}
-
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
-pub struct IndentSize {
- pub len: u32,
- pub kind: IndentKind,
-}
-
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
-pub enum IndentKind {
- #[default]
- Space,
- Tab,
-}
-
-#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
-pub enum CursorShape {
- #[default]
- Bar,
- Block,
- Underscore,
- Hollow,
-}
-
-#[derive(Clone, Debug)]
-struct SelectionSet {
- line_mode: bool,
- cursor_shape: CursorShape,
- selections: Arc<[Selection<Anchor>]>,
- lamport_timestamp: clock::Lamport,
-}
-
-#[derive(Clone, Debug, PartialEq, Eq)]
-pub struct GroupId {
- source: Arc<str>,
- id: usize,
-}
-
-#[derive(Clone, Debug, PartialEq, Eq)]
-pub struct Diagnostic {
- pub source: Option<String>,
- pub code: Option<String>,
- pub severity: DiagnosticSeverity,
- pub message: String,
- pub group_id: usize,
- pub is_valid: bool,
- pub is_primary: bool,
- pub is_disk_based: bool,
- pub is_unnecessary: bool,
-}
-
-pub async fn prepare_completion_documentation(
- documentation: &lsp::Documentation,
- language_registry: &Arc<LanguageRegistry>,
- language: Option<Arc<Language>>,
-) -> Documentation {
- match documentation {
- lsp::Documentation::String(text) => {
- if text.lines().count() <= 1 {
- Documentation::SingleLine(text.clone())
- } else {
- Documentation::MultiLinePlainText(text.clone())
- }
- }
-
- lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
- lsp::MarkupKind::PlainText => {
- if value.lines().count() <= 1 {
- Documentation::SingleLine(value.clone())
- } else {
- Documentation::MultiLinePlainText(value.clone())
- }
- }
-
- lsp::MarkupKind::Markdown => {
- let parsed = parse_markdown(value, language_registry, language).await;
- Documentation::MultiLineMarkdown(parsed)
- }
- },
- }
-}
-
-#[derive(Clone, Debug)]
-pub enum Documentation {
- Undocumented,
- SingleLine(String),
- MultiLinePlainText(String),
- MultiLineMarkdown(ParsedMarkdown),
-}
-
-#[derive(Clone, Debug)]
-pub struct Completion {
- pub old_range: Range<Anchor>,
- pub new_text: String,
- pub label: CodeLabel,
- pub server_id: LanguageServerId,
- pub documentation: Option<Documentation>,
- pub lsp_completion: lsp::CompletionItem,
-}
-
-#[derive(Clone, Debug)]
-pub struct CodeAction {
- pub server_id: LanguageServerId,
- pub range: Range<Anchor>,
- pub lsp_action: lsp::CodeAction,
-}
-
-#[derive(Clone, Debug, PartialEq)]
-pub enum Operation {
- Buffer(text::Operation),
-
- UpdateDiagnostics {
- server_id: LanguageServerId,
- diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
- lamport_timestamp: clock::Lamport,
- },
-
- UpdateSelections {
- selections: Arc<[Selection<Anchor>]>,
- lamport_timestamp: clock::Lamport,
- line_mode: bool,
- cursor_shape: CursorShape,
- },
-
- UpdateCompletionTriggers {
- triggers: Vec<String>,
- lamport_timestamp: clock::Lamport,
- },
-}
-
-#[derive(Clone, Debug, PartialEq)]
-pub enum Event {
- Operation(Operation),
- Edited,
- DirtyChanged,
- Saved,
- FileHandleChanged,
- Reloaded,
- DiffBaseChanged,
- LanguageChanged,
- Reparsed,
- DiagnosticsUpdated,
- Closed,
-}
-
-pub trait File: Send + Sync {
- fn as_local(&self) -> Option<&dyn LocalFile>;
-
- fn is_local(&self) -> bool {
- self.as_local().is_some()
- }
-
- fn mtime(&self) -> SystemTime;
-
- /// Returns the path of this file relative to the worktree's root directory.
- fn path(&self) -> &Arc<Path>;
-
- /// Returns the path of this file relative to the worktree's parent directory (this means it
- /// includes the name of the worktree's root folder).
- fn full_path(&self, cx: &AppContext) -> PathBuf;
-
- /// Returns the last component of this handle's absolute path. If this handle refers to the root
- /// of its worktree, then this method will return the name of the worktree itself.
- fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
-
- /// Returns the id of the worktree to which this file belongs.
- ///
- /// This is needed for looking up project-specific settings.
- fn worktree_id(&self) -> usize;
-
- fn is_deleted(&self) -> bool;
-
- fn as_any(&self) -> &dyn Any;
-
- fn to_proto(&self) -> rpc::proto::File;
-}
-
-pub trait LocalFile: File {
- /// Returns the absolute path of this file.
- fn abs_path(&self, cx: &AppContext) -> PathBuf;
-
- fn load(&self, cx: &AppContext) -> Task<Result<String>>;
-
- fn buffer_reloaded(
- &self,
- buffer_id: u64,
- version: &clock::Global,
- fingerprint: RopeFingerprint,
- line_ending: LineEnding,
- mtime: SystemTime,
- cx: &mut AppContext,
- );
-}
-
-#[derive(Clone, Debug)]
-pub enum AutoindentMode {
- /// Indent each line of inserted text.
- EachLine,
- /// Apply the same indentation adjustment to all of the lines
- /// in a given insertion.
- Block {
- /// The original indentation level of the first line of each
- /// insertion, if it has been copied.
- original_indent_columns: Vec<u32>,
- },
-}
-
-#[derive(Clone)]
-struct AutoindentRequest {
- before_edit: BufferSnapshot,
- entries: Vec<AutoindentRequestEntry>,
- is_block_mode: bool,
-}
-
-#[derive(Clone)]
-struct AutoindentRequestEntry {
- /// A range of the buffer whose indentation should be adjusted.
- range: Range<Anchor>,
- /// Whether or not these lines should be considered brand new, for the
- /// purpose of auto-indent. When text is not new, its indentation will
- /// only be adjusted if the suggested indentation level has *changed*
- /// since the edit was made.
- first_line_is_new: bool,
- indent_size: IndentSize,
- original_indent_column: Option<u32>,
-}
-
-#[derive(Debug)]
-struct IndentSuggestion {
- basis_row: u32,
- delta: Ordering,
- within_error: bool,
-}
-
-struct BufferChunkHighlights<'a> {
- captures: SyntaxMapCaptures<'a>,
- next_capture: Option<SyntaxMapCapture<'a>>,
- stack: Vec<(usize, HighlightId)>,
- highlight_maps: Vec<HighlightMap>,
-}
-
-pub struct BufferChunks<'a> {
- range: Range<usize>,
- chunks: text::Chunks<'a>,
- diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
- error_depth: usize,
- warning_depth: usize,
- information_depth: usize,
- hint_depth: usize,
- unnecessary_depth: usize,
- highlights: Option<BufferChunkHighlights<'a>>,
-}
-
-#[derive(Clone, Copy, Debug, Default)]
-pub struct Chunk<'a> {
- pub text: &'a str,
- pub syntax_highlight_id: Option<HighlightId>,
- pub highlight_style: Option<HighlightStyle>,
- pub diagnostic_severity: Option<DiagnosticSeverity>,
- pub is_unnecessary: bool,
- pub is_tab: bool,
-}
-
-pub struct Diff {
- pub(crate) base_version: clock::Global,
- line_ending: LineEnding,
- edits: Vec<(Range<usize>, Arc<str>)>,
-}
-
-#[derive(Clone, Copy)]
-pub(crate) struct DiagnosticEndpoint {
- offset: usize,
- is_start: bool,
- severity: DiagnosticSeverity,
- is_unnecessary: bool,
-}
-
-#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
-pub enum CharKind {
- Whitespace,
- Punctuation,
- Word,
-}
-
-impl CharKind {
- pub fn coerce_punctuation(self, treat_punctuation_as_word: bool) -> Self {
- if treat_punctuation_as_word && self == CharKind::Punctuation {
- CharKind::Word
- } else {
- self
- }
- }
-}
-
-impl Buffer {
- pub fn new<T: Into<String>>(replica_id: ReplicaId, id: u64, base_text: T) -> Self {
- Self::build(
- TextBuffer::new(replica_id, id, base_text.into()),
- None,
- None,
- )
- }
-
- pub fn remote(remote_id: u64, replica_id: ReplicaId, base_text: String) -> Self {
- Self::build(
- TextBuffer::new(replica_id, remote_id, base_text),
- None,
- None,
- )
- }
-
- pub fn from_proto(
- replica_id: ReplicaId,
- message: proto::BufferState,
- file: Option<Arc<dyn File>>,
- ) -> Result<Self> {
- let buffer = TextBuffer::new(replica_id, message.id, message.base_text);
- let mut this = Self::build(
- buffer,
- message.diff_base.map(|text| text.into_boxed_str().into()),
- file,
- );
- this.text.set_line_ending(proto::deserialize_line_ending(
- rpc::proto::LineEnding::from_i32(message.line_ending)
- .ok_or_else(|| anyhow!("missing line_ending"))?,
- ));
- this.saved_version = proto::deserialize_version(&message.saved_version);
- this.file_fingerprint = proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
- this.saved_mtime = message
- .saved_mtime
- .ok_or_else(|| anyhow!("invalid saved_mtime"))?
- .into();
- Ok(this)
- }
-
- pub fn to_proto(&self) -> proto::BufferState {
- proto::BufferState {
- id: self.remote_id(),
- file: self.file.as_ref().map(|f| f.to_proto()),
- base_text: self.base_text().to_string(),
- diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
- line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
- saved_version: proto::serialize_version(&self.saved_version),
- saved_version_fingerprint: proto::serialize_fingerprint(self.file_fingerprint),
- saved_mtime: Some(self.saved_mtime.into()),
- }
- }
-
- pub fn serialize_ops(
- &self,
- since: Option<clock::Global>,
- cx: &AppContext,
- ) -> Task<Vec<proto::Operation>> {
- let mut operations = Vec::new();
- operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
-
- operations.extend(self.remote_selections.iter().map(|(_, set)| {
- proto::serialize_operation(&Operation::UpdateSelections {
- selections: set.selections.clone(),
- lamport_timestamp: set.lamport_timestamp,
- line_mode: set.line_mode,
- cursor_shape: set.cursor_shape,
- })
- }));
-
- for (server_id, diagnostics) in &self.diagnostics {
- operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
- lamport_timestamp: self.diagnostics_timestamp,
- server_id: *server_id,
- diagnostics: diagnostics.iter().cloned().collect(),
- }));
- }
-
- operations.push(proto::serialize_operation(
- &Operation::UpdateCompletionTriggers {
- triggers: self.completion_triggers.clone(),
- lamport_timestamp: self.completion_triggers_timestamp,
- },
- ));
-
- let text_operations = self.text.operations().clone();
- cx.background_executor().spawn(async move {
- let since = since.unwrap_or_default();
- operations.extend(
- text_operations
- .iter()
- .filter(|(_, op)| !since.observed(op.timestamp()))
- .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
- );
- operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
- operations
- })
- }
-
- pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
- self.set_language(Some(language), cx);
- self
- }
-
- pub fn build(
- buffer: TextBuffer,
- diff_base: Option<String>,
- file: Option<Arc<dyn File>>,
- ) -> Self {
- let saved_mtime = if let Some(file) = file.as_ref() {
- file.mtime()
- } else {
- UNIX_EPOCH
- };
-
- Self {
- saved_mtime,
- saved_version: buffer.version(),
- file_fingerprint: buffer.as_rope().fingerprint(),
- reload_task: None,
- transaction_depth: 0,
- was_dirty_before_starting_transaction: None,
- text: buffer,
- diff_base,
- git_diff: git::diff::BufferDiff::new(),
- file,
- syntax_map: Mutex::new(SyntaxMap::new()),
- parsing_in_background: false,
- parse_count: 0,
- sync_parse_timeout: Duration::from_millis(1),
- autoindent_requests: Default::default(),
- pending_autoindent: Default::default(),
- language: None,
- remote_selections: Default::default(),
- selections_update_count: 0,
- diagnostics: Default::default(),
- diagnostics_update_count: 0,
- diagnostics_timestamp: Default::default(),
- file_update_count: 0,
- git_diff_update_count: 0,
- completion_triggers: Default::default(),
- completion_triggers_timestamp: Default::default(),
- deferred_ops: OperationQueue::new(),
- }
- }
-
- pub fn snapshot(&self) -> BufferSnapshot {
- let text = self.text.snapshot();
- let mut syntax_map = self.syntax_map.lock();
- syntax_map.interpolate(&text);
- let syntax = syntax_map.snapshot();
-
- BufferSnapshot {
- text,
- syntax,
- git_diff: self.git_diff.clone(),
- file: self.file.clone(),
- remote_selections: self.remote_selections.clone(),
- diagnostics: self.diagnostics.clone(),
- diagnostics_update_count: self.diagnostics_update_count,
- file_update_count: self.file_update_count,
- git_diff_update_count: self.git_diff_update_count,
- language: self.language.clone(),
- parse_count: self.parse_count,
- selections_update_count: self.selections_update_count,
- }
- }
-
- pub fn as_text_snapshot(&self) -> &text::BufferSnapshot {
- &self.text
- }
-
- pub fn text_snapshot(&self) -> text::BufferSnapshot {
- self.text.snapshot()
- }
-
- pub fn file(&self) -> Option<&Arc<dyn File>> {
- self.file.as_ref()
- }
-
- pub fn saved_version(&self) -> &clock::Global {
- &self.saved_version
- }
-
- pub fn saved_version_fingerprint(&self) -> RopeFingerprint {
- self.file_fingerprint
- }
-
- pub fn saved_mtime(&self) -> SystemTime {
- self.saved_mtime
- }
-
- pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
- self.syntax_map.lock().clear();
- self.language = language;
- self.reparse(cx);
- cx.emit(Event::LanguageChanged);
- }
-
- pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
- self.syntax_map
- .lock()
- .set_language_registry(language_registry);
- }
-
- pub fn did_save(
- &mut self,
- version: clock::Global,
- fingerprint: RopeFingerprint,
- mtime: SystemTime,
- cx: &mut ModelContext<Self>,
- ) {
- self.saved_version = version;
- self.file_fingerprint = fingerprint;
- self.saved_mtime = mtime;
- cx.emit(Event::Saved);
- cx.notify();
- }
-
- pub fn reload(
- &mut self,
- cx: &mut ModelContext<Self>,
- ) -> oneshot::Receiver<Option<Transaction>> {
- let (tx, rx) = futures::channel::oneshot::channel();
- let prev_version = self.text.version();
- self.reload_task = Some(cx.spawn(|this, mut cx| async move {
- let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
- let file = this.file.as_ref()?.as_local()?;
- Some((file.mtime(), file.load(cx)))
- })?
- else {
- return Ok(());
- };
-
- let new_text = new_text.await?;
- let diff = this
- .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
- .await;
- this.update(&mut cx, |this, cx| {
- if this.version() == diff.base_version {
- this.finalize_last_transaction();
- this.apply_diff(diff, cx);
- tx.send(this.finalize_last_transaction().cloned()).ok();
-
- this.did_reload(
- this.version(),
- this.as_rope().fingerprint(),
- this.line_ending(),
- new_mtime,
- cx,
- );
- } else {
- this.did_reload(
- prev_version,
- Rope::text_fingerprint(&new_text),
- this.line_ending(),
- this.saved_mtime,
- cx,
- );
- }
-
- this.reload_task.take();
- })
- }));
- rx
- }
-
- pub fn did_reload(
- &mut self,
- version: clock::Global,
- fingerprint: RopeFingerprint,
- line_ending: LineEnding,
- mtime: SystemTime,
- cx: &mut ModelContext<Self>,
- ) {
- self.saved_version = version;
- self.file_fingerprint = fingerprint;
- self.text.set_line_ending(line_ending);
- self.saved_mtime = mtime;
- if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
- file.buffer_reloaded(
- self.remote_id(),
- &self.saved_version,
- self.file_fingerprint,
- self.line_ending(),
- self.saved_mtime,
- cx,
- );
- }
- cx.emit(Event::Reloaded);
- cx.notify();
- }
-
- pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
- let mut file_changed = false;
-
- if let Some(old_file) = self.file.as_ref() {
- if new_file.path() != old_file.path() {
- file_changed = true;
- }
-
- if new_file.is_deleted() {
- if !old_file.is_deleted() {
- file_changed = true;
- if !self.is_dirty() {
- cx.emit(Event::DirtyChanged);
- }
- }
- } else {
- let new_mtime = new_file.mtime();
- if new_mtime != old_file.mtime() {
- file_changed = true;
-
- if !self.is_dirty() {
- self.reload(cx).close();
- }
- }
- }
- } else {
- file_changed = true;
- };
-
- self.file = Some(new_file);
- if file_changed {
- self.file_update_count += 1;
- cx.emit(Event::FileHandleChanged);
- cx.notify();
- }
- }
-
- pub fn diff_base(&self) -> Option<&str> {
- self.diff_base.as_deref()
- }
-
- pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
- self.diff_base = diff_base;
- self.git_diff_recalc(cx);
- cx.emit(Event::DiffBaseChanged);
- }
-
- pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
- let diff_base = self.diff_base.clone()?; // TODO: Make this an Arc
- let snapshot = self.snapshot();
-
- let mut diff = self.git_diff.clone();
- let diff = cx.background_executor().spawn(async move {
- diff.update(&diff_base, &snapshot).await;
- diff
- });
-
- Some(cx.spawn(|this, mut cx| async move {
- let buffer_diff = diff.await;
- this.update(&mut cx, |this, _| {
- this.git_diff = buffer_diff;
- this.git_diff_update_count += 1;
- })
- .ok();
- }))
- }
-
- pub fn close(&mut self, cx: &mut ModelContext<Self>) {
- cx.emit(Event::Closed);
- }
-
- pub fn language(&self) -> Option<&Arc<Language>> {
- self.language.as_ref()
- }
-
- pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
- let offset = position.to_offset(self);
- self.syntax_map
- .lock()
- .layers_for_range(offset..offset, &self.text)
- .last()
- .map(|info| info.language.clone())
- .or_else(|| self.language.clone())
- }
-
- pub fn parse_count(&self) -> usize {
- self.parse_count
- }
-
- pub fn selections_update_count(&self) -> usize {
- self.selections_update_count
- }
-
- pub fn diagnostics_update_count(&self) -> usize {
- self.diagnostics_update_count
- }
-
- pub fn file_update_count(&self) -> usize {
- self.file_update_count
- }
-
- pub fn git_diff_update_count(&self) -> usize {
- self.git_diff_update_count
- }
-
- #[cfg(any(test, feature = "test-support"))]
- pub fn is_parsing(&self) -> bool {
- self.parsing_in_background
- }
-
- pub fn contains_unknown_injections(&self) -> bool {
- self.syntax_map.lock().contains_unknown_injections()
- }
-
- #[cfg(test)]
- pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
- self.sync_parse_timeout = timeout;
- }
-
- /// Called after an edit to synchronize the buffer's main parse tree with
- /// the buffer's new underlying state.
- ///
- /// Locks the syntax map and interpolates the edits since the last reparse
- /// into the foreground syntax tree.
- ///
- /// Then takes a stable snapshot of the syntax map before unlocking it.
- /// The snapshot with the interpolated edits is sent to a background thread,
- /// where we ask Tree-sitter to perform an incremental parse.
- ///
- /// Meanwhile, in the foreground, we block the main thread for up to 1ms
- /// waiting on the parse to complete. As soon as it completes, we proceed
- /// synchronously, unless a 1ms timeout elapses.
- ///
- /// If we time out waiting on the parse, we spawn a second task waiting
- /// until the parse does complete and return with the interpolated tree still
- /// in the foreground. When the background parse completes, call back into
- /// the main thread and assign the foreground parse state.
- ///
- /// If the buffer or grammar changed since the start of the background parse,
- /// initiate an additional reparse recursively. To avoid concurrent parses
- /// for the same buffer, we only initiate a new parse if we are not already
- /// parsing in the background.
- pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
- if self.parsing_in_background {
- return;
- }
- let language = if let Some(language) = self.language.clone() {
- language
- } else {
- return;
- };
-
- let text = self.text_snapshot();
- let parsed_version = self.version();
-
- let mut syntax_map = self.syntax_map.lock();
- syntax_map.interpolate(&text);
- let language_registry = syntax_map.language_registry();
- let mut syntax_snapshot = syntax_map.snapshot();
- drop(syntax_map);
-
- let parse_task = cx.background_executor().spawn({
- let language = language.clone();
- let language_registry = language_registry.clone();
- async move {
- syntax_snapshot.reparse(&text, language_registry, language);
- syntax_snapshot
- }
- });
-
- match cx
- .background_executor()
- .block_with_timeout(self.sync_parse_timeout, parse_task)
- {
- Ok(new_syntax_snapshot) => {
- self.did_finish_parsing(new_syntax_snapshot, cx);
- return;
- }
- Err(parse_task) => {
- self.parsing_in_background = true;
- cx.spawn(move |this, mut cx| async move {
- let new_syntax_map = parse_task.await;
- this.update(&mut cx, move |this, cx| {
- let grammar_changed =
- this.language.as_ref().map_or(true, |current_language| {
- !Arc::ptr_eq(&language, current_language)
- });
- let language_registry_changed = new_syntax_map
- .contains_unknown_injections()
- && language_registry.map_or(false, |registry| {
- registry.version() != new_syntax_map.language_registry_version()
- });
- let parse_again = language_registry_changed
- || grammar_changed
- || this.version.changed_since(&parsed_version);
- this.did_finish_parsing(new_syntax_map, cx);
- this.parsing_in_background = false;
- if parse_again {
- this.reparse(cx);
- }
- })
- .ok();
- })
- .detach();
- }
- }
- }
-
- fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
- self.parse_count += 1;
- self.syntax_map.lock().did_parse(syntax_snapshot);
- self.request_autoindent(cx);
- cx.emit(Event::Reparsed);
- cx.notify();
- }
-
- pub fn update_diagnostics(
- &mut self,
- server_id: LanguageServerId,
- diagnostics: DiagnosticSet,
- cx: &mut ModelContext<Self>,
- ) {
- let lamport_timestamp = self.text.lamport_clock.tick();
- let op = Operation::UpdateDiagnostics {
- server_id,
- diagnostics: diagnostics.iter().cloned().collect(),
- lamport_timestamp,
- };
- self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
- self.send_operation(op, cx);
- }
-
- fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
- if let Some(indent_sizes) = self.compute_autoindents() {
- let indent_sizes = cx.background_executor().spawn(indent_sizes);
- match cx
- .background_executor()
- .block_with_timeout(Duration::from_micros(500), indent_sizes)
- {
- Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
- Err(indent_sizes) => {
- self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
- let indent_sizes = indent_sizes.await;
- this.update(&mut cx, |this, cx| {
- this.apply_autoindents(indent_sizes, cx);
- })
- .ok();
- }));
- }
- }
- } else {
- self.autoindent_requests.clear();
- }
- }
-
- fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
- let max_rows_between_yields = 100;
- let snapshot = self.snapshot();
- if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
- return None;
- }
-
- let autoindent_requests = self.autoindent_requests.clone();
- Some(async move {
- let mut indent_sizes = BTreeMap::new();
- for request in autoindent_requests {
- // Resolve each edited range to its row in the current buffer and in the
- // buffer before this batch of edits.
- let mut row_ranges = Vec::new();
- let mut old_to_new_rows = BTreeMap::new();
- let mut language_indent_sizes_by_new_row = Vec::new();
- for entry in &request.entries {
- let position = entry.range.start;
- let new_row = position.to_point(&snapshot).row;
- let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
- language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
-
- if !entry.first_line_is_new {
- let old_row = position.to_point(&request.before_edit).row;
- old_to_new_rows.insert(old_row, new_row);
- }
- row_ranges.push((new_row..new_end_row, entry.original_indent_column));
- }
-
- // Build a map containing the suggested indentation for each of the edited lines
- // with respect to the state of the buffer before these edits. This map is keyed
- // by the rows for these lines in the current state of the buffer.
- let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
- let old_edited_ranges =
- contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
- let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
- let mut language_indent_size = IndentSize::default();
- for old_edited_range in old_edited_ranges {
- let suggestions = request
- .before_edit
- .suggest_autoindents(old_edited_range.clone())
- .into_iter()
- .flatten();
- for (old_row, suggestion) in old_edited_range.zip(suggestions) {
- if let Some(suggestion) = suggestion {
- let new_row = *old_to_new_rows.get(&old_row).unwrap();
-
- // Find the indent size based on the language for this row.
- while let Some((row, size)) = language_indent_sizes.peek() {
- if *row > new_row {
- break;
- }
- language_indent_size = *size;
- language_indent_sizes.next();
- }
-
- let suggested_indent = old_to_new_rows
- .get(&suggestion.basis_row)
- .and_then(|from_row| {
- Some(old_suggestions.get(from_row).copied()?.0)
- })
- .unwrap_or_else(|| {
- request
- .before_edit
- .indent_size_for_line(suggestion.basis_row)
- })
- .with_delta(suggestion.delta, language_indent_size);
- old_suggestions
- .insert(new_row, (suggested_indent, suggestion.within_error));
- }
- }
- yield_now().await;
- }
-
- // In block mode, only compute indentation suggestions for the first line
- // of each insertion. Otherwise, compute suggestions for every inserted line.
- let new_edited_row_ranges = contiguous_ranges(
- row_ranges.iter().flat_map(|(range, _)| {
- if request.is_block_mode {
- range.start..range.start + 1
- } else {
- range.clone()
- }
- }),
- max_rows_between_yields,
- );
-
- // Compute new suggestions for each line, but only include them in the result
- // if they differ from the old suggestion for that line.
- let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
- let mut language_indent_size = IndentSize::default();
- for new_edited_row_range in new_edited_row_ranges {
- let suggestions = snapshot
- .suggest_autoindents(new_edited_row_range.clone())
- .into_iter()
- .flatten();
- for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
- if let Some(suggestion) = suggestion {
- // Find the indent size based on the language for this row.
- while let Some((row, size)) = language_indent_sizes.peek() {
- if *row > new_row {
- break;
- }
- language_indent_size = *size;
- language_indent_sizes.next();
- }
-
- let suggested_indent = indent_sizes
- .get(&suggestion.basis_row)
- .copied()
- .unwrap_or_else(|| {
- snapshot.indent_size_for_line(suggestion.basis_row)
- })
- .with_delta(suggestion.delta, language_indent_size);
- if old_suggestions.get(&new_row).map_or(
- true,
- |(old_indentation, was_within_error)| {
- suggested_indent != *old_indentation
- && (!suggestion.within_error || *was_within_error)
- },
- ) {
- indent_sizes.insert(new_row, suggested_indent);
- }
- }
- }
- yield_now().await;
- }
-
- // For each block of inserted text, adjust the indentation of the remaining
- // lines of the block by the same amount as the first line was adjusted.
- if request.is_block_mode {
- for (row_range, original_indent_column) in
- row_ranges
- .into_iter()
- .filter_map(|(range, original_indent_column)| {
- if range.len() > 1 {
- Some((range, original_indent_column?))
- } else {
- None
- }
- })
- {
- let new_indent = indent_sizes
- .get(&row_range.start)
- .copied()
- .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
- let delta = new_indent.len as i64 - original_indent_column as i64;
- if delta != 0 {
- for row in row_range.skip(1) {
- indent_sizes.entry(row).or_insert_with(|| {
- let mut size = snapshot.indent_size_for_line(row);
- if size.kind == new_indent.kind {
- match delta.cmp(&0) {
- Ordering::Greater => size.len += delta as u32,
- Ordering::Less => {
- size.len = size.len.saturating_sub(-delta as u32)
- }
- Ordering::Equal => {}
- }
- }
- size
- });
- }
- }
- }
- }
- }
-
- indent_sizes
- })
- }
-
- fn apply_autoindents(
- &mut self,
- indent_sizes: BTreeMap<u32, IndentSize>,
- cx: &mut ModelContext<Self>,
- ) {
- self.autoindent_requests.clear();
-
- let edits: Vec<_> = indent_sizes
- .into_iter()
- .filter_map(|(row, indent_size)| {
- let current_size = indent_size_for_line(self, row);
- Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
- })
- .collect();
-
- self.edit(edits, None, cx);
- }
-
- // Create a minimal edit that will cause the the given row to be indented
- // with the given size. After applying this edit, the length of the line
- // will always be at least `new_size.len`.
- pub fn edit_for_indent_size_adjustment(
- row: u32,
- current_size: IndentSize,
- new_size: IndentSize,
- ) -> Option<(Range<Point>, String)> {
- if new_size.kind != current_size.kind {
- Some((
- Point::new(row, 0)..Point::new(row, current_size.len),
- iter::repeat(new_size.char())
- .take(new_size.len as usize)
- .collect::<String>(),
- ))
- } else {
- match new_size.len.cmp(¤t_size.len) {
- Ordering::Greater => {
- let point = Point::new(row, 0);
- Some((
- point..point,
- iter::repeat(new_size.char())
- .take((new_size.len - current_size.len) as usize)
- .collect::<String>(),
- ))
- }
-
- Ordering::Less => Some((
- Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
- String::new(),
- )),
-
- Ordering::Equal => None,
- }
- }
- }
-
- pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
- let old_text = self.as_rope().clone();
- let base_version = self.version();
- cx.background_executor()
- .spawn_labeled(*BUFFER_DIFF_TASK, async move {
- let old_text = old_text.to_string();
- let line_ending = LineEnding::detect(&new_text);
- LineEnding::normalize(&mut new_text);
-
- let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
- let empty: Arc<str> = "".into();
-
- let mut edits = Vec::new();
- let mut old_offset = 0;
- let mut new_offset = 0;
- let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
- for change in diff.iter_all_changes().map(Some).chain([None]) {
- if let Some(change) = &change {
- let len = change.value().len();
- match change.tag() {
- ChangeTag::Equal => {
- old_offset += len;
- new_offset += len;
- }
- ChangeTag::Delete => {
- let old_end_offset = old_offset + len;
- if let Some((last_old_range, _)) = &mut last_edit {
- last_old_range.end = old_end_offset;
- } else {
- last_edit =
- Some((old_offset..old_end_offset, new_offset..new_offset));
- }
- old_offset = old_end_offset;
- }
- ChangeTag::Insert => {
- let new_end_offset = new_offset + len;
- if let Some((_, last_new_range)) = &mut last_edit {
- last_new_range.end = new_end_offset;
- } else {
- last_edit =
- Some((old_offset..old_offset, new_offset..new_end_offset));
- }
- new_offset = new_end_offset;
- }
- }
- }
-
- if let Some((old_range, new_range)) = &last_edit {
- if old_offset > old_range.end
- || new_offset > new_range.end
- || change.is_none()
- {
- let text = if new_range.is_empty() {
- empty.clone()
- } else {
- new_text[new_range.clone()].into()
- };
- edits.push((old_range.clone(), text));
- last_edit.take();
- }
- }
- }
-
- Diff {
- base_version,
- line_ending,
- edits,
- }
- })
- }
-
- /// Spawn a background task that searches the buffer for any whitespace
- /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
- pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
- let old_text = self.as_rope().clone();
- let line_ending = self.line_ending();
- let base_version = self.version();
- cx.background_executor().spawn(async move {
- let ranges = trailing_whitespace_ranges(&old_text);
- let empty = Arc::<str>::from("");
- Diff {
- base_version,
- line_ending,
- edits: ranges
- .into_iter()
- .map(|range| (range, empty.clone()))
- .collect(),
- }
- })
- }
-
- /// Ensure that the buffer ends with a single newline character, and
- /// no other whitespace.
- pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
- let len = self.len();
- let mut offset = len;
- for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
- let non_whitespace_len = chunk
- .trim_end_matches(|c: char| c.is_ascii_whitespace())
- .len();
- offset -= chunk.len();
- offset += non_whitespace_len;
- if non_whitespace_len != 0 {
- if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
- return;
- }
- break;
- }
- }
- self.edit([(offset..len, "\n")], None, cx);
- }
-
- /// Apply a diff to the buffer. If the buffer has changed since the given diff was
- /// calculated, then adjust the diff to account for those changes, and discard any
- /// parts of the diff that conflict with those changes.
- pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
- // Check for any edits to the buffer that have occurred since this diff
- // was computed.
- let snapshot = self.snapshot();
- let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
- let mut delta = 0;
- let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
- while let Some(edit_since) = edits_since.peek() {
- // If the edit occurs after a diff hunk, then it does not
- // affect that hunk.
- if edit_since.old.start > range.end {
- break;
- }
- // If the edit precedes the diff hunk, then adjust the hunk
- // to reflect the edit.
- else if edit_since.old.end < range.start {
- delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
- edits_since.next();
- }
- // If the edit intersects a diff hunk, then discard that hunk.
- else {
- return None;
- }
- }
-
- let start = (range.start as i64 + delta) as usize;
- let end = (range.end as i64 + delta) as usize;
- Some((start..end, new_text))
- });
-
- self.start_transaction();
- self.text.set_line_ending(diff.line_ending);
- self.edit(adjusted_edits, None, cx);
- self.end_transaction(cx)
- }
-
- pub fn is_dirty(&self) -> bool {
- self.file_fingerprint != self.as_rope().fingerprint()
- || self.file.as_ref().map_or(false, |file| file.is_deleted())
- }
-
- pub fn has_conflict(&self) -> bool {
- self.file_fingerprint != self.as_rope().fingerprint()
- && self
- .file
- .as_ref()
- .map_or(false, |file| file.mtime() > self.saved_mtime)
- }
-
- pub fn subscribe(&mut self) -> Subscription {
- self.text.subscribe()
- }
-
- pub fn start_transaction(&mut self) -> Option<TransactionId> {
- self.start_transaction_at(Instant::now())
- }
-
- pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
- self.transaction_depth += 1;
- if self.was_dirty_before_starting_transaction.is_none() {
- self.was_dirty_before_starting_transaction = Some(self.is_dirty());
- }
- self.text.start_transaction_at(now)
- }
-
- pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
- self.end_transaction_at(Instant::now(), cx)
- }
-
- pub fn end_transaction_at(
- &mut self,
- now: Instant,
- cx: &mut ModelContext<Self>,
- ) -> Option<TransactionId> {
- assert!(self.transaction_depth > 0);
- self.transaction_depth -= 1;
- let was_dirty = if self.transaction_depth == 0 {
- self.was_dirty_before_starting_transaction.take().unwrap()
- } else {
- false
- };
- if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
- self.did_edit(&start_version, was_dirty, cx);
- Some(transaction_id)
- } else {
- None
- }
- }
-
- pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
- self.text.push_transaction(transaction, now);
- }
-
- pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
- self.text.finalize_last_transaction()
- }
-
- pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
- self.text.group_until_transaction(transaction_id);
- }
-
- pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
- self.text.forget_transaction(transaction_id);
- }
-
- pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
- self.text.merge_transactions(transaction, destination);
- }
-
- pub fn wait_for_edits(
- &mut self,
- edit_ids: impl IntoIterator<Item = clock::Lamport>,
- ) -> impl Future<Output = Result<()>> {
- self.text.wait_for_edits(edit_ids)
- }
-
- pub fn wait_for_anchors(
- &mut self,
- anchors: impl IntoIterator<Item = Anchor>,
- ) -> impl 'static + Future<Output = Result<()>> {
- self.text.wait_for_anchors(anchors)
- }
-
- pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
- self.text.wait_for_version(version)
- }
-
- pub fn give_up_waiting(&mut self) {
- self.text.give_up_waiting();
- }
-
- pub fn set_active_selections(
- &mut self,
- selections: Arc<[Selection<Anchor>]>,
- line_mode: bool,
- cursor_shape: CursorShape,
- cx: &mut ModelContext<Self>,
- ) {
- let lamport_timestamp = self.text.lamport_clock.tick();
- self.remote_selections.insert(
- self.text.replica_id(),
- SelectionSet {
- selections: selections.clone(),
- lamport_timestamp,
- line_mode,
- cursor_shape,
- },
- );
- self.send_operation(
- Operation::UpdateSelections {
- selections,
- line_mode,
- lamport_timestamp,
- cursor_shape,
- },
- cx,
- );
- }
-
- pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
- if self
- .remote_selections
- .get(&self.text.replica_id())
- .map_or(true, |set| !set.selections.is_empty())
- {
- self.set_active_selections(Arc::from([]), false, Default::default(), cx);
- }
- }
-
- pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
- where
- T: Into<Arc<str>>,
- {
- self.autoindent_requests.clear();
- self.edit([(0..self.len(), text)], None, cx)
- }
-
- pub fn edit<I, S, T>(
- &mut self,
- edits_iter: I,
- autoindent_mode: Option<AutoindentMode>,
- cx: &mut ModelContext<Self>,
- ) -> Option<clock::Lamport>
- where
- I: IntoIterator<Item = (Range<S>, T)>,
- S: ToOffset,
- T: Into<Arc<str>>,
- {
- // Skip invalid edits and coalesce contiguous ones.
- let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
- for (range, new_text) in edits_iter {
- let mut range = range.start.to_offset(self)..range.end.to_offset(self);
- if range.start > range.end {
- mem::swap(&mut range.start, &mut range.end);
- }
- let new_text = new_text.into();
- if !new_text.is_empty() || !range.is_empty() {
- if let Some((prev_range, prev_text)) = edits.last_mut() {
- if prev_range.end >= range.start {
- prev_range.end = cmp::max(prev_range.end, range.end);
- *prev_text = format!("{prev_text}{new_text}").into();
- } else {
- edits.push((range, new_text));
- }
- } else {
- edits.push((range, new_text));
- }
- }
- }
- if edits.is_empty() {
- return None;
- }
-
- self.start_transaction();
- self.pending_autoindent.take();
- let autoindent_request = autoindent_mode
- .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
-
- let edit_operation = self.text.edit(edits.iter().cloned());
- let edit_id = edit_operation.timestamp();
-
- if let Some((before_edit, mode)) = autoindent_request {
- let mut delta = 0isize;
- let entries = edits
- .into_iter()
- .enumerate()
- .zip(&edit_operation.as_edit().unwrap().new_text)
- .map(|((ix, (range, _)), new_text)| {
- let new_text_length = new_text.len();
- let old_start = range.start.to_point(&before_edit);
- let new_start = (delta + range.start as isize) as usize;
- delta += new_text_length as isize - (range.end as isize - range.start as isize);
-
- let mut range_of_insertion_to_indent = 0..new_text_length;
- let mut first_line_is_new = false;
- let mut original_indent_column = None;
-
- // When inserting an entire line at the beginning of an existing line,
- // treat the insertion as new.
- if new_text.contains('\n')
- && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
- {
- first_line_is_new = true;
- }
-
- // When inserting text starting with a newline, avoid auto-indenting the
- // previous line.
- if new_text.starts_with('\n') {
- range_of_insertion_to_indent.start += 1;
- first_line_is_new = true;
- }
-
- // Avoid auto-indenting after the insertion.
- if let AutoindentMode::Block {
- original_indent_columns,
- } = &mode
- {
- original_indent_column =
- Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
- indent_size_for_text(
- new_text[range_of_insertion_to_indent.clone()].chars(),
- )
- .len
- }));
- if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
- range_of_insertion_to_indent.end -= 1;
- }
- }
-
- AutoindentRequestEntry {
- first_line_is_new,
- original_indent_column,
- indent_size: before_edit.language_indent_size_at(range.start, cx),
- range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
- ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
- }
- })
- .collect();
-
- self.autoindent_requests.push(Arc::new(AutoindentRequest {
- before_edit,
- entries,
- is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
- }));
- }
-
- self.end_transaction(cx);
- self.send_operation(Operation::Buffer(edit_operation), cx);
- Some(edit_id)
- }
-
- fn did_edit(
- &mut self,
- old_version: &clock::Global,
- was_dirty: bool,
- cx: &mut ModelContext<Self>,
- ) {
- if self.edits_since::<usize>(old_version).next().is_none() {
- return;
- }
-
- self.reparse(cx);
-
- cx.emit(Event::Edited);
- if was_dirty != self.is_dirty() {
- cx.emit(Event::DirtyChanged);
- }
- cx.notify();
- }
-
- pub fn apply_ops<I: IntoIterator<Item = Operation>>(
- &mut self,
- ops: I,
- cx: &mut ModelContext<Self>,
- ) -> Result<()> {
- self.pending_autoindent.take();
- let was_dirty = self.is_dirty();
- let old_version = self.version.clone();
- let mut deferred_ops = Vec::new();
- let buffer_ops = ops
- .into_iter()
- .filter_map(|op| match op {
- Operation::Buffer(op) => Some(op),
- _ => {
- if self.can_apply_op(&op) {
- self.apply_op(op, cx);
- } else {
- deferred_ops.push(op);
- }
- None
- }
- })
- .collect::<Vec<_>>();
- self.text.apply_ops(buffer_ops)?;
- self.deferred_ops.insert(deferred_ops);
- self.flush_deferred_ops(cx);
- self.did_edit(&old_version, was_dirty, cx);
- // Notify independently of whether the buffer was edited as the operations could include a
- // selection update.
- cx.notify();
- Ok(())
- }
-
- fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
- let mut deferred_ops = Vec::new();
- for op in self.deferred_ops.drain().iter().cloned() {
- if self.can_apply_op(&op) {
- self.apply_op(op, cx);
- } else {
- deferred_ops.push(op);
- }
- }
- self.deferred_ops.insert(deferred_ops);
- }
-
- fn can_apply_op(&self, operation: &Operation) -> bool {
- match operation {
- Operation::Buffer(_) => {
- unreachable!("buffer operations should never be applied at this layer")
- }
- Operation::UpdateDiagnostics {
- diagnostics: diagnostic_set,
- ..
- } => diagnostic_set.iter().all(|diagnostic| {
- self.text.can_resolve(&diagnostic.range.start)
- && self.text.can_resolve(&diagnostic.range.end)
- }),
- Operation::UpdateSelections { selections, .. } => selections
- .iter()
- .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
- Operation::UpdateCompletionTriggers { .. } => true,
- }
- }
-
- fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
- match operation {
- Operation::Buffer(_) => {
- unreachable!("buffer operations should never be applied at this layer")
- }
- Operation::UpdateDiagnostics {
- server_id,
- diagnostics: diagnostic_set,
- lamport_timestamp,
- } => {
- let snapshot = self.snapshot();
- self.apply_diagnostic_update(
- server_id,
- DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
- lamport_timestamp,
- cx,
- );
- }
- Operation::UpdateSelections {
- selections,
- lamport_timestamp,
- line_mode,
- cursor_shape,
- } => {
- if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
- if set.lamport_timestamp > lamport_timestamp {
- return;
- }
- }
-
- self.remote_selections.insert(
- lamport_timestamp.replica_id,
- SelectionSet {
- selections,
- lamport_timestamp,
- line_mode,
- cursor_shape,
- },
- );
- self.text.lamport_clock.observe(lamport_timestamp);
- self.selections_update_count += 1;
- }
- Operation::UpdateCompletionTriggers {
- triggers,
- lamport_timestamp,
- } => {
- self.completion_triggers = triggers;
- self.text.lamport_clock.observe(lamport_timestamp);
- }
- }
- }
-
- fn apply_diagnostic_update(
- &mut self,
- server_id: LanguageServerId,
- diagnostics: DiagnosticSet,
- lamport_timestamp: clock::Lamport,
- cx: &mut ModelContext<Self>,
- ) {
- if lamport_timestamp > self.diagnostics_timestamp {
- let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
- if diagnostics.len() == 0 {
- if let Ok(ix) = ix {
- self.diagnostics.remove(ix);
- }
- } else {
- match ix {
- Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
- Ok(ix) => self.diagnostics[ix].1 = diagnostics,
- };
- }
- self.diagnostics_timestamp = lamport_timestamp;
- self.diagnostics_update_count += 1;
- self.text.lamport_clock.observe(lamport_timestamp);
- cx.notify();
- cx.emit(Event::DiagnosticsUpdated);
- }
- }
-
- fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
- cx.emit(Event::Operation(operation));
- }
-
- pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
- self.remote_selections.remove(&replica_id);
- cx.notify();
- }
-
- pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
- let was_dirty = self.is_dirty();
- let old_version = self.version.clone();
-
- if let Some((transaction_id, operation)) = self.text.undo() {
- self.send_operation(Operation::Buffer(operation), cx);
- self.did_edit(&old_version, was_dirty, cx);
- Some(transaction_id)
- } else {
- None
- }
- }
-
- pub fn undo_transaction(
- &mut self,
- transaction_id: TransactionId,
- cx: &mut ModelContext<Self>,
- ) -> bool {
- let was_dirty = self.is_dirty();
- let old_version = self.version.clone();
- if let Some(operation) = self.text.undo_transaction(transaction_id) {
- self.send_operation(Operation::Buffer(operation), cx);
- self.did_edit(&old_version, was_dirty, cx);
- true
- } else {
- false
- }
- }
-
- pub fn undo_to_transaction(
- &mut self,
- transaction_id: TransactionId,
- cx: &mut ModelContext<Self>,
- ) -> bool {
- let was_dirty = self.is_dirty();
- let old_version = self.version.clone();
-
- let operations = self.text.undo_to_transaction(transaction_id);
- let undone = !operations.is_empty();
- for operation in operations {
- self.send_operation(Operation::Buffer(operation), cx);
- }
- if undone {
- self.did_edit(&old_version, was_dirty, cx)
- }
- undone
- }
-
- pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
- let was_dirty = self.is_dirty();
- let old_version = self.version.clone();
-
- if let Some((transaction_id, operation)) = self.text.redo() {
- self.send_operation(Operation::Buffer(operation), cx);
- self.did_edit(&old_version, was_dirty, cx);
- Some(transaction_id)
- } else {
- None
- }
- }
-
- pub fn redo_to_transaction(
- &mut self,
- transaction_id: TransactionId,
- cx: &mut ModelContext<Self>,
- ) -> bool {
- let was_dirty = self.is_dirty();
- let old_version = self.version.clone();
-
- let operations = self.text.redo_to_transaction(transaction_id);
- let redone = !operations.is_empty();
- for operation in operations {
- self.send_operation(Operation::Buffer(operation), cx);
- }
- if redone {
- self.did_edit(&old_version, was_dirty, cx)
- }
- redone
- }
-
- pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
- self.completion_triggers = triggers.clone();
- self.completion_triggers_timestamp = self.text.lamport_clock.tick();
- self.send_operation(
- Operation::UpdateCompletionTriggers {
- triggers,
- lamport_timestamp: self.completion_triggers_timestamp,
- },
- cx,
- );
- cx.notify();
- }
-
- pub fn completion_triggers(&self) -> &[String] {
- &self.completion_triggers
- }
-}
-
-#[cfg(any(test, feature = "test-support"))]
-impl Buffer {
- pub fn edit_via_marked_text(
- &mut self,
- marked_string: &str,
- autoindent_mode: Option<AutoindentMode>,
- cx: &mut ModelContext<Self>,
- ) {
- let edits = self.edits_for_marked_text(marked_string);
- self.edit(edits, autoindent_mode, cx);
- }
-
- pub fn set_group_interval(&mut self, group_interval: Duration) {
- self.text.set_group_interval(group_interval);
- }
-
- pub fn randomly_edit<T>(
- &mut self,
- rng: &mut T,
- old_range_count: usize,
- cx: &mut ModelContext<Self>,
- ) where
- T: rand::Rng,
- {
- let mut edits: Vec<(Range<usize>, String)> = Vec::new();
- let mut last_end = None;
- for _ in 0..old_range_count {
- if last_end.map_or(false, |last_end| last_end >= self.len()) {
- break;
- }
-
- let new_start = last_end.map_or(0, |last_end| last_end + 1);
- let mut range = self.random_byte_range(new_start, rng);
- if rng.gen_bool(0.2) {
- mem::swap(&mut range.start, &mut range.end);
- }
- last_end = Some(range.end);
-
- let new_text_len = rng.gen_range(0..10);
- let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
-
- edits.push((range, new_text));
- }
- log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
- self.edit(edits, None, cx);
- }
-
- pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
- let was_dirty = self.is_dirty();
- let old_version = self.version.clone();
-
- let ops = self.text.randomly_undo_redo(rng);
- if !ops.is_empty() {
- for op in ops {
- self.send_operation(Operation::Buffer(op), cx);
- self.did_edit(&old_version, was_dirty, cx);
- }
- }
- }
-}
-
-impl EventEmitter<Event> for Buffer {}
-
-impl Deref for Buffer {
- type Target = TextBuffer;
-
- fn deref(&self) -> &Self::Target {
- &self.text
- }
-}
-
-impl BufferSnapshot {
- pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
- indent_size_for_line(self, row)
- }
-
- pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
- let settings = language_settings(self.language_at(position), self.file(), cx);
- if settings.hard_tabs {
- IndentSize::tab()
- } else {
- IndentSize::spaces(settings.tab_size.get())
- }
- }
-
- pub fn suggested_indents(
- &self,
- rows: impl Iterator<Item = u32>,
- single_indent_size: IndentSize,
- ) -> BTreeMap<u32, IndentSize> {
- let mut result = BTreeMap::new();
-
- for row_range in contiguous_ranges(rows, 10) {
- let suggestions = match self.suggest_autoindents(row_range.clone()) {
- Some(suggestions) => suggestions,
- _ => break,
- };
-
- for (row, suggestion) in row_range.zip(suggestions) {
- let indent_size = if let Some(suggestion) = suggestion {
- result
- .get(&suggestion.basis_row)
- .copied()
- .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
- .with_delta(suggestion.delta, single_indent_size)
- } else {
- self.indent_size_for_line(row)
- };
-
- result.insert(row, indent_size);
- }
- }
-
- result
- }
-
- fn suggest_autoindents(
- &self,
- row_range: Range<u32>,
- ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
- let config = &self.language.as_ref()?.config;
- let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
-
- // Find the suggested indentation ranges based on the syntax tree.
- let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
- let end = Point::new(row_range.end, 0);
- let range = (start..end).to_offset(&self.text);
- let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
- Some(&grammar.indents_config.as_ref()?.query)
- });
- let indent_configs = matches
- .grammars()
- .iter()
- .map(|grammar| grammar.indents_config.as_ref().unwrap())
- .collect::<Vec<_>>();
-
- let mut indent_ranges = Vec::<Range<Point>>::new();
- let mut outdent_positions = Vec::<Point>::new();
- while let Some(mat) = matches.peek() {
- let mut start: Option<Point> = None;
- let mut end: Option<Point> = None;
-
- let config = &indent_configs[mat.grammar_index];
- for capture in mat.captures {
- if capture.index == config.indent_capture_ix {
- start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
- end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
- } else if Some(capture.index) == config.start_capture_ix {
- start = Some(Point::from_ts_point(capture.node.end_position()));
- } else if Some(capture.index) == config.end_capture_ix {
- end = Some(Point::from_ts_point(capture.node.start_position()));
- } else if Some(capture.index) == config.outdent_capture_ix {
- outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
- }
- }
-
- matches.advance();
- if let Some((start, end)) = start.zip(end) {
- if start.row == end.row {
- continue;
- }
-
- let range = start..end;
- match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
- Err(ix) => indent_ranges.insert(ix, range),
- Ok(ix) => {
- let prev_range = &mut indent_ranges[ix];
- prev_range.end = prev_range.end.max(range.end);
- }
- }
- }
- }
-
- let mut error_ranges = Vec::<Range<Point>>::new();
- let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
- Some(&grammar.error_query)
- });
- while let Some(mat) = matches.peek() {
- let node = mat.captures[0].node;
- let start = Point::from_ts_point(node.start_position());
- let end = Point::from_ts_point(node.end_position());
- let range = start..end;
- let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
- Ok(ix) | Err(ix) => ix,
- };
- let mut end_ix = ix;
- while let Some(existing_range) = error_ranges.get(end_ix) {
- if existing_range.end < end {
- end_ix += 1;
- } else {
- break;
- }
- }
- error_ranges.splice(ix..end_ix, [range]);
- matches.advance();
- }
-
- outdent_positions.sort();
- for outdent_position in outdent_positions {
- // find the innermost indent range containing this outdent_position
- // set its end to the outdent position
- if let Some(range_to_truncate) = indent_ranges
- .iter_mut()
- .filter(|indent_range| indent_range.contains(&outdent_position))
- .last()
- {
- range_to_truncate.end = outdent_position;
- }
- }
-
- // Find the suggested indentation increases and decreased based on regexes.
- let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
- self.for_each_line(
- Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
- ..Point::new(row_range.end, 0),
- |row, line| {
- if config
- .decrease_indent_pattern
- .as_ref()
- .map_or(false, |regex| regex.is_match(line))
- {
- indent_change_rows.push((row, Ordering::Less));
- }
- if config
- .increase_indent_pattern
- .as_ref()
- .map_or(false, |regex| regex.is_match(line))
- {
- indent_change_rows.push((row + 1, Ordering::Greater));
- }
- },
- );
-
- let mut indent_changes = indent_change_rows.into_iter().peekable();
- let mut prev_row = if config.auto_indent_using_last_non_empty_line {
- prev_non_blank_row.unwrap_or(0)
- } else {
- row_range.start.saturating_sub(1)
- };
- let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
- Some(row_range.map(move |row| {
- let row_start = Point::new(row, self.indent_size_for_line(row).len);
-
- let mut indent_from_prev_row = false;
- let mut outdent_from_prev_row = false;
- let mut outdent_to_row = u32::MAX;
-
- while let Some((indent_row, delta)) = indent_changes.peek() {
- match indent_row.cmp(&row) {
- Ordering::Equal => match delta {
- Ordering::Less => outdent_from_prev_row = true,
- Ordering::Greater => indent_from_prev_row = true,
- _ => {}
- },
-
- Ordering::Greater => break,
- Ordering::Less => {}
- }
-
- indent_changes.next();
- }
-
- for range in &indent_ranges {
- if range.start.row >= row {
- break;
- }
- if range.start.row == prev_row && range.end > row_start {
- indent_from_prev_row = true;
- }
- if range.end > prev_row_start && range.end <= row_start {
- outdent_to_row = outdent_to_row.min(range.start.row);
- }
- }
-
- let within_error = error_ranges
- .iter()
- .any(|e| e.start.row < row && e.end > row_start);
-
- let suggestion = if outdent_to_row == prev_row
- || (outdent_from_prev_row && indent_from_prev_row)
- {
- Some(IndentSuggestion {
- basis_row: prev_row,
- delta: Ordering::Equal,
- within_error,
- })
- } else if indent_from_prev_row {
- Some(IndentSuggestion {
- basis_row: prev_row,
- delta: Ordering::Greater,
- within_error,
- })
- } else if outdent_to_row < prev_row {
- Some(IndentSuggestion {
- basis_row: outdent_to_row,
- delta: Ordering::Equal,
- within_error,
- })
- } else if outdent_from_prev_row {
- Some(IndentSuggestion {
- basis_row: prev_row,
- delta: Ordering::Less,
- within_error,
- })
- } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
- {
- Some(IndentSuggestion {
- basis_row: prev_row,
- delta: Ordering::Equal,
- within_error,
- })
- } else {
- None
- };
-
- prev_row = row;
- prev_row_start = row_start;
- suggestion
- }))
- }
-
- fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
- while row > 0 {
- row -= 1;
- if !self.is_line_blank(row) {
- return Some(row);
- }
- }
- None
- }
-
- pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
- let range = range.start.to_offset(self)..range.end.to_offset(self);
-
- let mut syntax = None;
- let mut diagnostic_endpoints = Vec::new();
- if language_aware {
- let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
- grammar.highlights_query.as_ref()
- });
- let highlight_maps = captures
- .grammars()
- .into_iter()
- .map(|grammar| grammar.highlight_map())
- .collect();
- syntax = Some((captures, highlight_maps));
- for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
- diagnostic_endpoints.push(DiagnosticEndpoint {
- offset: entry.range.start,
- is_start: true,
- severity: entry.diagnostic.severity,
- is_unnecessary: entry.diagnostic.is_unnecessary,
- });
- diagnostic_endpoints.push(DiagnosticEndpoint {
- offset: entry.range.end,
- is_start: false,
- severity: entry.diagnostic.severity,
- is_unnecessary: entry.diagnostic.is_unnecessary,
- });
- }
- diagnostic_endpoints
- .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
- }
-
- BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
- }
-
- pub fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
- let mut line = String::new();
- let mut row = range.start.row;
- for chunk in self
- .as_rope()
- .chunks_in_range(range.to_offset(self))
- .chain(["\n"])
- {
- for (newline_ix, text) in chunk.split('\n').enumerate() {
- if newline_ix > 0 {
- callback(row, &line);
- row += 1;
- line.clear();
- }
- line.push_str(text);
- }
- }
- }
-
- pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayerInfo> + '_ {
- self.syntax.layers_for_range(0..self.len(), &self.text)
- }
-
- pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayerInfo> {
- let offset = position.to_offset(self);
- self.syntax
- .layers_for_range(offset..offset, &self.text)
- .filter(|l| l.node().end_byte() > offset)
- .last()
- }
-
- pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
- self.syntax_layer_at(position)
- .map(|info| info.language)
- .or(self.language.as_ref())
- }
-
- pub fn settings_at<'a, D: ToOffset>(
- &self,
- position: D,
- cx: &'a AppContext,
- ) -> &'a LanguageSettings {
- language_settings(self.language_at(position), self.file.as_ref(), cx)
- }
-
- pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
- let offset = position.to_offset(self);
- let mut scope = None;
- let mut smallest_range: Option<Range<usize>> = None;
-
- // Use the layer that has the smallest node intersecting the given point.
- for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
- let mut cursor = layer.node().walk();
-
- let mut range = None;
- loop {
- let child_range = cursor.node().byte_range();
- if !child_range.to_inclusive().contains(&offset) {
- break;
- }
-
- range = Some(child_range);
- if cursor.goto_first_child_for_byte(offset).is_none() {
- break;
- }
- }
-
- if let Some(range) = range {
- if smallest_range
- .as_ref()
- .map_or(true, |smallest_range| range.len() < smallest_range.len())
- {
- smallest_range = Some(range);
- scope = Some(LanguageScope {
- language: layer.language.clone(),
- override_id: layer.override_id(offset, &self.text),
- });
- }
- }
- }
-
- scope.or_else(|| {
- self.language.clone().map(|language| LanguageScope {
- language,
- override_id: None,
- })
- })
- }
-
- pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
- let mut start = start.to_offset(self);
- let mut end = start;
- let mut next_chars = self.chars_at(start).peekable();
- let mut prev_chars = self.reversed_chars_at(start).peekable();
-
- let scope = self.language_scope_at(start);
- let kind = |c| char_kind(&scope, c);
- let word_kind = cmp::max(
- prev_chars.peek().copied().map(kind),
- next_chars.peek().copied().map(kind),
- );
-
- for ch in prev_chars {
- if Some(kind(ch)) == word_kind && ch != '\n' {
- start -= ch.len_utf8();
- } else {
- break;
- }
- }
-
- for ch in next_chars {
- if Some(kind(ch)) == word_kind && ch != '\n' {
- end += ch.len_utf8();
- } else {
- break;
- }
- }
-
- (start..end, word_kind)
- }
-
- pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
- let range = range.start.to_offset(self)..range.end.to_offset(self);
- let mut result: Option<Range<usize>> = None;
- 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
- let mut cursor = layer.node().walk();
-
- // Descend to the first leaf that touches the start of the range,
- // and if the range is non-empty, extends beyond the start.
- while cursor.goto_first_child_for_byte(range.start).is_some() {
- if !range.is_empty() && cursor.node().end_byte() == range.start {
- cursor.goto_next_sibling();
- }
- }
-
- // Ascend to the smallest ancestor that strictly contains the range.
- loop {
- let node_range = cursor.node().byte_range();
- if node_range.start <= range.start
- && node_range.end >= range.end
- && node_range.len() > range.len()
- {
- break;
- }
- if !cursor.goto_parent() {
- continue 'outer;
- }
- }
-
- let left_node = cursor.node();
- let mut layer_result = left_node.byte_range();
-
- // For an empty range, try to find another node immediately to the right of the range.
- if left_node.end_byte() == range.start {
- let mut right_node = None;
- while !cursor.goto_next_sibling() {
- if !cursor.goto_parent() {
- break;
- }
- }
-
- while cursor.node().start_byte() == range.start {
- right_node = Some(cursor.node());
- if !cursor.goto_first_child() {
- break;
- }
- }
-
- // If there is a candidate node on both sides of the (empty) range, then
- // decide between the two by favoring a named node over an anonymous token.
- // If both nodes are the same in that regard, favor the right one.
- if let Some(right_node) = right_node {
- if right_node.is_named() || !left_node.is_named() {
- layer_result = right_node.byte_range();
- }
- }
- }
-
- if let Some(previous_result) = &result {
- if previous_result.len() < layer_result.len() {
- continue;
- }
- }
- result = Some(layer_result);
- }
-
- result
- }
-
- pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
- self.outline_items_containing(0..self.len(), true, theme)
- .map(Outline::new)
- }
-
- pub fn symbols_containing<T: ToOffset>(
- &self,
- position: T,
- theme: Option<&SyntaxTheme>,
- ) -> Option<Vec<OutlineItem<Anchor>>> {
- let position = position.to_offset(self);
- let mut items = self.outline_items_containing(
- position.saturating_sub(1)..self.len().min(position + 1),
- false,
- theme,
- )?;
- let mut prev_depth = None;
- items.retain(|item| {
- let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
- prev_depth = Some(item.depth);
- result
- });
- Some(items)
- }
-
- fn outline_items_containing(
- &self,
- range: Range<usize>,
- include_extra_context: bool,
- theme: Option<&SyntaxTheme>,
- ) -> Option<Vec<OutlineItem<Anchor>>> {
- let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
- grammar.outline_config.as_ref().map(|c| &c.query)
- });
- let configs = matches
- .grammars()
- .iter()
- .map(|g| g.outline_config.as_ref().unwrap())
- .collect::<Vec<_>>();
-
- let mut stack = Vec::<Range<usize>>::new();
- let mut items = Vec::new();
- while let Some(mat) = matches.peek() {
- let config = &configs[mat.grammar_index];
- let item_node = mat.captures.iter().find_map(|cap| {
- if cap.index == config.item_capture_ix {
- Some(cap.node)
- } else {
- None
- }
- })?;
-
- let item_range = item_node.byte_range();
- if item_range.end < range.start || item_range.start > range.end {
- matches.advance();
- continue;
- }
-
- let mut buffer_ranges = Vec::new();
- for capture in mat.captures {
- let node_is_name;
- if capture.index == config.name_capture_ix {
- node_is_name = true;
- } else if Some(capture.index) == config.context_capture_ix
- || (Some(capture.index) == config.extra_context_capture_ix
- && include_extra_context)
- {
- node_is_name = false;
- } else {
- continue;
- }
-
- let mut range = capture.node.start_byte()..capture.node.end_byte();
- let start = capture.node.start_position();
- if capture.node.end_position().row > start.row {
- range.end =
- range.start + self.line_len(start.row as u32) as usize - start.column;
- }
-
- buffer_ranges.push((range, node_is_name));
- }
-
- if buffer_ranges.is_empty() {
- continue;
- }
-
- let mut text = String::new();
- let mut highlight_ranges = Vec::new();
- let mut name_ranges = Vec::new();
- let mut chunks = self.chunks(
- buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
- true,
- );
- let mut last_buffer_range_end = 0;
- for (buffer_range, is_name) in buffer_ranges {
- if !text.is_empty() && buffer_range.start > last_buffer_range_end {
- text.push(' ');
- }
- last_buffer_range_end = buffer_range.end;
- if is_name {
- let mut start = text.len();
- let end = start + buffer_range.len();
-
- // When multiple names are captured, then the matcheable text
- // includes the whitespace in between the names.
- if !name_ranges.is_empty() {
- start -= 1;
- }
-
- name_ranges.push(start..end);
- }
-
- let mut offset = buffer_range.start;
- chunks.seek(offset);
- for mut chunk in chunks.by_ref() {
- if chunk.text.len() > buffer_range.end - offset {
- chunk.text = &chunk.text[0..(buffer_range.end - offset)];
- offset = buffer_range.end;
- } else {
- offset += chunk.text.len();
- }
- let style = chunk
- .syntax_highlight_id
- .zip(theme)
- .and_then(|(highlight, theme)| highlight.style(theme));
- if let Some(style) = style {
- let start = text.len();
- let end = start + chunk.text.len();
- highlight_ranges.push((start..end, style));
- }
- text.push_str(chunk.text);
- if offset >= buffer_range.end {
- break;
- }
- }
- }
-
- matches.advance();
- while stack.last().map_or(false, |prev_range| {
- prev_range.start > item_range.start || prev_range.end < item_range.end
- }) {
- stack.pop();
- }
- stack.push(item_range.clone());
-
- items.push(OutlineItem {
- depth: stack.len() - 1,
- range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
- text,
- highlight_ranges,
- name_ranges,
- })
- }
- Some(items)
- }
-
- pub fn matches(
- &self,
- range: Range<usize>,
- query: fn(&Grammar) -> Option<&tree_sitter::Query>,
- ) -> SyntaxMapMatches {
- self.syntax.matches(range, self, query)
- }
-
- /// Returns bracket range pairs overlapping or adjacent to `range`
- pub fn bracket_ranges<'a, T: ToOffset>(
- &'a self,
- range: Range<T>,
- ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + 'a {
- // Find bracket pairs that *inclusively* contain the given range.
- let range = range.start.to_offset(self).saturating_sub(1)
- ..self.len().min(range.end.to_offset(self) + 1);
-
- let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
- grammar.brackets_config.as_ref().map(|c| &c.query)
- });
- let configs = matches
- .grammars()
- .iter()
- .map(|grammar| grammar.brackets_config.as_ref().unwrap())
- .collect::<Vec<_>>();
-
- iter::from_fn(move || {
- while let Some(mat) = matches.peek() {
- let mut open = None;
- let mut close = None;
- let config = &configs[mat.grammar_index];
- for capture in mat.captures {
- if capture.index == config.open_capture_ix {
- open = Some(capture.node.byte_range());
- } else if capture.index == config.close_capture_ix {
- close = Some(capture.node.byte_range());
- }
- }
-
- matches.advance();
-
- let Some((open, close)) = open.zip(close) else {
- continue;
- };
-
- let bracket_range = open.start..=close.end;
- if !bracket_range.overlaps(&range) {
- continue;
- }
-
- return Some((open, close));
- }
- None
- })
- }
-
- #[allow(clippy::type_complexity)]
- pub fn remote_selections_in_range(
- &self,
- range: Range<Anchor>,
- ) -> impl Iterator<
- Item = (
- ReplicaId,
- bool,
- CursorShape,
- impl Iterator<Item = &Selection<Anchor>> + '_,
- ),
- > + '_ {
- self.remote_selections
- .iter()
- .filter(|(replica_id, set)| {
- **replica_id != self.text.replica_id() && !set.selections.is_empty()
- })
- .map(move |(replica_id, set)| {
- let start_ix = match set.selections.binary_search_by(|probe| {
- probe.end.cmp(&range.start, self).then(Ordering::Greater)
- }) {
- Ok(ix) | Err(ix) => ix,
- };
- let end_ix = match set.selections.binary_search_by(|probe| {
- probe.start.cmp(&range.end, self).then(Ordering::Less)
- }) {
- Ok(ix) | Err(ix) => ix,
- };
-
- (
- *replica_id,
- set.line_mode,
- set.cursor_shape,
- set.selections[start_ix..end_ix].iter(),
- )
- })
- }
-
- pub fn git_diff_hunks_in_row_range<'a>(
- &'a self,
- range: Range<u32>,
- ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
- self.git_diff.hunks_in_row_range(range, self)
- }
-
- pub fn git_diff_hunks_intersecting_range<'a>(
- &'a self,
- range: Range<Anchor>,
- ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
- self.git_diff.hunks_intersecting_range(range, self)
- }
-
- pub fn git_diff_hunks_intersecting_range_rev<'a>(
- &'a self,
- range: Range<Anchor>,
- ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
- self.git_diff.hunks_intersecting_range_rev(range, self)
- }
-
- pub fn diagnostics_in_range<'a, T, O>(
- &'a self,
- search_range: Range<T>,
- reversed: bool,
- ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
- where
- T: 'a + Clone + ToOffset,
- O: 'a + FromAnchor + Ord,
- {
- let mut iterators: Vec<_> = self
- .diagnostics
- .iter()
- .map(|(_, collection)| {
- collection
- .range::<T, O>(search_range.clone(), self, true, reversed)
- .peekable()
- })
- .collect();
-
- std::iter::from_fn(move || {
- let (next_ix, _) = iterators
- .iter_mut()
- .enumerate()
- .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
- .min_by(|(_, a), (_, b)| a.range.start.cmp(&b.range.start))?;
- iterators[next_ix].next()
- })
- }
-
- pub fn diagnostic_groups(
- &self,
- language_server_id: Option<LanguageServerId>,
- ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
- let mut groups = Vec::new();
-
- if let Some(language_server_id) = language_server_id {
- if let Ok(ix) = self
- .diagnostics
- .binary_search_by_key(&language_server_id, |e| e.0)
- {
- self.diagnostics[ix]
- .1
- .groups(language_server_id, &mut groups, self);
- }
- } else {
- for (language_server_id, diagnostics) in self.diagnostics.iter() {
- diagnostics.groups(*language_server_id, &mut groups, self);
- }
- }
-
- groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
- let a_start = &group_a.entries[group_a.primary_ix].range.start;
- let b_start = &group_b.entries[group_b.primary_ix].range.start;
- a_start.cmp(b_start, self).then_with(|| id_a.cmp(&id_b))
- });
-
- groups
- }
-
- pub fn diagnostic_group<'a, O>(
- &'a self,
- group_id: usize,
- ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
- where
- O: 'a + FromAnchor,
- {
- self.diagnostics
- .iter()
- .flat_map(move |(_, set)| set.group(group_id, self))
- }
-
- pub fn diagnostics_update_count(&self) -> usize {
- self.diagnostics_update_count
- }
-
- pub fn parse_count(&self) -> usize {
- self.parse_count
- }
-
- pub fn selections_update_count(&self) -> usize {
- self.selections_update_count
- }
-
- pub fn file(&self) -> Option<&Arc<dyn File>> {
- self.file.as_ref()
- }
-
- pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
- if let Some(file) = self.file() {
- if file.path().file_name().is_none() || include_root {
- Some(file.full_path(cx))
- } else {
- Some(file.path().to_path_buf())
- }
- } else {
- None
- }
- }
-
- pub fn file_update_count(&self) -> usize {
- self.file_update_count
- }
-
- pub fn git_diff_update_count(&self) -> usize {
- self.git_diff_update_count
- }
-}
-
-fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
- indent_size_for_text(text.chars_at(Point::new(row, 0)))
-}
-
-pub fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
- let mut result = IndentSize::spaces(0);
- for c in text {
- let kind = match c {
- ' ' => IndentKind::Space,
- '\t' => IndentKind::Tab,
- _ => break,
- };
- if result.len == 0 {
- result.kind = kind;
- }
- result.len += 1;
- }
- result
-}
-
-impl Clone for BufferSnapshot {
- fn clone(&self) -> Self {
- Self {
- text: self.text.clone(),
- git_diff: self.git_diff.clone(),
- syntax: self.syntax.clone(),
- file: self.file.clone(),
- remote_selections: self.remote_selections.clone(),
- diagnostics: self.diagnostics.clone(),
- selections_update_count: self.selections_update_count,
- diagnostics_update_count: self.diagnostics_update_count,
- file_update_count: self.file_update_count,
- git_diff_update_count: self.git_diff_update_count,
- language: self.language.clone(),
- parse_count: self.parse_count,
- }
- }
-}
-
-impl Deref for BufferSnapshot {
- type Target = text::BufferSnapshot;
-
- fn deref(&self) -> &Self::Target {
- &self.text
- }
-}
-
-unsafe impl<'a> Send for BufferChunks<'a> {}
-
-impl<'a> BufferChunks<'a> {
- pub(crate) fn new(
- text: &'a Rope,
- range: Range<usize>,
- syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
- diagnostic_endpoints: Vec<DiagnosticEndpoint>,
- ) -> Self {
- let mut highlights = None;
- if let Some((captures, highlight_maps)) = syntax {
- highlights = Some(BufferChunkHighlights {
- captures,
- next_capture: None,
- stack: Default::default(),
- highlight_maps,
- })
- }
-
- let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
- let chunks = text.chunks_in_range(range.clone());
-
- BufferChunks {
- range,
- chunks,
- diagnostic_endpoints,
- error_depth: 0,
- warning_depth: 0,
- information_depth: 0,
- hint_depth: 0,
- unnecessary_depth: 0,
- highlights,
- }
- }
-
- pub fn seek(&mut self, offset: usize) {
- self.range.start = offset;
- self.chunks.seek(self.range.start);
- if let Some(highlights) = self.highlights.as_mut() {
- highlights
- .stack
- .retain(|(end_offset, _)| *end_offset > offset);
- if let Some(capture) = &highlights.next_capture {
- if offset >= capture.node.start_byte() {
- let next_capture_end = capture.node.end_byte();
- if offset < next_capture_end {
- highlights.stack.push((
- next_capture_end,
- highlights.highlight_maps[capture.grammar_index].get(capture.index),
- ));
- }
- highlights.next_capture.take();
- }
- }
- highlights.captures.set_byte_range(self.range.clone());
- }
- }
-
- pub fn offset(&self) -> usize {
- self.range.start
- }
-
- fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
- let depth = match endpoint.severity {
- DiagnosticSeverity::ERROR => &mut self.error_depth,
- DiagnosticSeverity::WARNING => &mut self.warning_depth,
- DiagnosticSeverity::INFORMATION => &mut self.information_depth,
- DiagnosticSeverity::HINT => &mut self.hint_depth,
- _ => return,
- };
- if endpoint.is_start {
- *depth += 1;
- } else {
- *depth -= 1;
- }
-
- if endpoint.is_unnecessary {
- if endpoint.is_start {
- self.unnecessary_depth += 1;
- } else {
- self.unnecessary_depth -= 1;
- }
- }
- }
-
- fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
- if self.error_depth > 0 {
- Some(DiagnosticSeverity::ERROR)
- } else if self.warning_depth > 0 {
- Some(DiagnosticSeverity::WARNING)
- } else if self.information_depth > 0 {
- Some(DiagnosticSeverity::INFORMATION)
- } else if self.hint_depth > 0 {
- Some(DiagnosticSeverity::HINT)
- } else {
- None
- }
- }
-
- fn current_code_is_unnecessary(&self) -> bool {
- self.unnecessary_depth > 0
- }
-}
-
-impl<'a> Iterator for BufferChunks<'a> {
- type Item = Chunk<'a>;
-
- fn next(&mut self) -> Option<Self::Item> {
- let mut next_capture_start = usize::MAX;
- let mut next_diagnostic_endpoint = usize::MAX;
-
- if let Some(highlights) = self.highlights.as_mut() {
- while let Some((parent_capture_end, _)) = highlights.stack.last() {
- if *parent_capture_end <= self.range.start {
- highlights.stack.pop();
- } else {
- break;
- }
- }
-
- if highlights.next_capture.is_none() {
- highlights.next_capture = highlights.captures.next();
- }
-
- while let Some(capture) = highlights.next_capture.as_ref() {
- if self.range.start < capture.node.start_byte() {
- next_capture_start = capture.node.start_byte();
- break;
- } else {
- let highlight_id =
- highlights.highlight_maps[capture.grammar_index].get(capture.index);
- highlights
- .stack
- .push((capture.node.end_byte(), highlight_id));
- highlights.next_capture = highlights.captures.next();
- }
- }
- }
-
- while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
- if endpoint.offset <= self.range.start {
- self.update_diagnostic_depths(endpoint);
- self.diagnostic_endpoints.next();
- } else {
- next_diagnostic_endpoint = endpoint.offset;
- break;
- }
- }
-
- if let Some(chunk) = self.chunks.peek() {
- let chunk_start = self.range.start;
- let mut chunk_end = (self.chunks.offset() + chunk.len())
- .min(next_capture_start)
- .min(next_diagnostic_endpoint);
- let mut highlight_id = None;
- if let Some(highlights) = self.highlights.as_ref() {
- if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
- chunk_end = chunk_end.min(*parent_capture_end);
- highlight_id = Some(*parent_highlight_id);
- }
- }
-
- let slice =
- &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
- self.range.start = chunk_end;
- if self.range.start == self.chunks.offset() + chunk.len() {
- self.chunks.next().unwrap();
- }
-
- Some(Chunk {
- text: slice,
- syntax_highlight_id: highlight_id,
- diagnostic_severity: self.current_diagnostic_severity(),
- is_unnecessary: self.current_code_is_unnecessary(),
- ..Default::default()
- })
- } else {
- None
- }
- }
-}
-
-impl operation_queue::Operation for Operation {
- fn lamport_timestamp(&self) -> clock::Lamport {
- match self {
- Operation::Buffer(_) => {
- unreachable!("buffer operations should never be deferred at this layer")
- }
- Operation::UpdateDiagnostics {
- lamport_timestamp, ..
- }
- | Operation::UpdateSelections {
- lamport_timestamp, ..
- }
- | Operation::UpdateCompletionTriggers {
- lamport_timestamp, ..
- } => *lamport_timestamp,
- }
- }
-}
-
-impl Default for Diagnostic {
- fn default() -> Self {
- Self {
- source: Default::default(),
- code: None,
- severity: DiagnosticSeverity::ERROR,
- message: Default::default(),
- group_id: 0,
- is_primary: false,
- is_valid: true,
- is_disk_based: false,
- is_unnecessary: false,
- }
- }
-}
-
-impl IndentSize {
- pub fn spaces(len: u32) -> Self {
- Self {
- len,
- kind: IndentKind::Space,
- }
- }
-
- pub fn tab() -> Self {
- Self {
- len: 1,
- kind: IndentKind::Tab,
- }
- }
-
- pub fn chars(&self) -> impl Iterator<Item = char> {
- iter::repeat(self.char()).take(self.len as usize)
- }
-
- pub fn char(&self) -> char {
- match self.kind {
- IndentKind::Space => ' ',
- IndentKind::Tab => '\t',
- }
- }
-
- pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
- match direction {
- Ordering::Less => {
- if self.kind == size.kind && self.len >= size.len {
- self.len -= size.len;
- }
- }
- Ordering::Equal => {}
- Ordering::Greater => {
- if self.len == 0 {
- self = size;
- } else if self.kind == size.kind {
- self.len += size.len;
- }
- }
- }
- self
- }
-}
-
-impl Completion {
- pub fn sort_key(&self) -> (usize, &str) {
- let kind_key = match self.lsp_completion.kind {
- Some(lsp::CompletionItemKind::VARIABLE) => 0,
- _ => 1,
- };
- (kind_key, &self.label.text[self.label.filter_range.clone()])
- }
-
- pub fn is_snippet(&self) -> bool {
- self.lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
- }
-}
-
-pub fn contiguous_ranges(
- values: impl Iterator<Item = u32>,
- max_len: usize,
-) -> impl Iterator<Item = Range<u32>> {
- let mut values = values;
- let mut current_range: Option<Range<u32>> = None;
- std::iter::from_fn(move || loop {
- if let Some(value) = values.next() {
- if let Some(range) = &mut current_range {
- if value == range.end && range.len() < max_len {
- range.end += 1;
- continue;
- }
- }
-
- let prev_range = current_range.clone();
- current_range = Some(value..(value + 1));
- if prev_range.is_some() {
- return prev_range;
- }
- } else {
- return current_range.take();
- }
- })
-}
-
-pub fn char_kind(scope: &Option<LanguageScope>, c: char) -> CharKind {
- if c.is_whitespace() {
- return CharKind::Whitespace;
- } else if c.is_alphanumeric() || c == '_' {
- return CharKind::Word;
- }
-
- if let Some(scope) = scope {
- if let Some(characters) = scope.word_characters() {
- if characters.contains(&c) {
- return CharKind::Word;
- }
- }
- }
-
- CharKind::Punctuation
-}
-
-/// Find all of the ranges of whitespace that occur at the ends of lines
-/// in the given rope.
-///
-/// This could also be done with a regex search, but this implementation
-/// avoids copying text.
-pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
- let mut ranges = Vec::new();
-
- let mut offset = 0;
- let mut prev_chunk_trailing_whitespace_range = 0..0;
- for chunk in rope.chunks() {
- let mut prev_line_trailing_whitespace_range = 0..0;
- for (i, line) in chunk.split('\n').enumerate() {
- let line_end_offset = offset + line.len();
- let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
- let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
-
- if i == 0 && trimmed_line_len == 0 {
- trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
- }
- if !prev_line_trailing_whitespace_range.is_empty() {
- ranges.push(prev_line_trailing_whitespace_range);
- }
-
- offset = line_end_offset + 1;
- prev_line_trailing_whitespace_range = trailing_whitespace_range;
- }
-
- offset -= 1;
- prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
- }
-
- if !prev_chunk_trailing_whitespace_range.is_empty() {
- ranges.push(prev_chunk_trailing_whitespace_range);
- }
-
- ranges
-}
@@ -1,2493 +0,0 @@
-use super::*;
-use crate::language_settings::{
- AllLanguageSettings, AllLanguageSettingsContent, LanguageSettingsContent,
-};
-use crate::Buffer;
-use clock::ReplicaId;
-use collections::BTreeMap;
-use gpui::{AppContext, Model};
-use gpui::{Context, TestAppContext};
-use indoc::indoc;
-use proto::deserialize_operation;
-use rand::prelude::*;
-use regex::RegexBuilder;
-use settings::SettingsStore;
-use std::{
- env,
- ops::Range,
- time::{Duration, Instant},
-};
-use text::network::Network;
-use text::LineEnding;
-use text::{Point, ToPoint};
-use unindent::Unindent as _;
-use util::{assert_set_eq, post_inc, test::marked_text_ranges, RandomCharIter};
-
-lazy_static! {
- static ref TRAILING_WHITESPACE_REGEX: Regex = RegexBuilder::new("[ \t]+$")
- .multi_line(true)
- .build()
- .unwrap();
-}
-
-#[cfg(test)]
-#[ctor::ctor]
-fn init_logger() {
- if std::env::var("RUST_LOG").is_ok() {
- env_logger::init();
- }
-}
-
-#[gpui::test]
-fn test_line_endings(cx: &mut gpui::AppContext) {
- init_settings(cx, |_| {});
-
- cx.new_model(|cx| {
- let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), "one\r\ntwo\rthree")
- .with_language(Arc::new(rust_lang()), cx);
- assert_eq!(buffer.text(), "one\ntwo\nthree");
- assert_eq!(buffer.line_ending(), LineEnding::Windows);
-
- buffer.check_invariants();
- buffer.edit(
- [(buffer.len()..buffer.len(), "\r\nfour")],
- Some(AutoindentMode::EachLine),
- cx,
- );
- buffer.edit([(0..0, "zero\r\n")], None, cx);
- assert_eq!(buffer.text(), "zero\none\ntwo\nthree\nfour");
- assert_eq!(buffer.line_ending(), LineEnding::Windows);
- buffer.check_invariants();
-
- buffer
- });
-}
-
-#[gpui::test]
-fn test_select_language() {
- let registry = Arc::new(LanguageRegistry::test());
- registry.add(Arc::new(Language::new(
- LanguageConfig {
- name: "Rust".into(),
- path_suffixes: vec!["rs".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_rust::language()),
- )));
- registry.add(Arc::new(Language::new(
- LanguageConfig {
- name: "Make".into(),
- path_suffixes: vec!["Makefile".to_string(), "mk".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_rust::language()),
- )));
-
- // matching file extension
- assert_eq!(
- registry
- .language_for_file("zed/lib.rs", None)
- .now_or_never()
- .and_then(|l| Some(l.ok()?.name())),
- Some("Rust".into())
- );
- assert_eq!(
- registry
- .language_for_file("zed/lib.mk", None)
- .now_or_never()
- .and_then(|l| Some(l.ok()?.name())),
- Some("Make".into())
- );
-
- // matching filename
- assert_eq!(
- registry
- .language_for_file("zed/Makefile", None)
- .now_or_never()
- .and_then(|l| Some(l.ok()?.name())),
- Some("Make".into())
- );
-
- // matching suffix that is not the full file extension or filename
- assert_eq!(
- registry
- .language_for_file("zed/cars", None)
- .now_or_never()
- .and_then(|l| Some(l.ok()?.name())),
- None
- );
- assert_eq!(
- registry
- .language_for_file("zed/a.cars", None)
- .now_or_never()
- .and_then(|l| Some(l.ok()?.name())),
- None
- );
- assert_eq!(
- registry
- .language_for_file("zed/sumk", None)
- .now_or_never()
- .and_then(|l| Some(l.ok()?.name())),
- None
- );
-}
-
-#[gpui::test]
-fn test_edit_events(cx: &mut gpui::AppContext) {
- let mut now = Instant::now();
- let buffer_1_events = Arc::new(Mutex::new(Vec::new()));
- let buffer_2_events = Arc::new(Mutex::new(Vec::new()));
-
- let buffer1 = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "abcdef"));
- let buffer2 = cx.new_model(|cx| Buffer::new(1, cx.entity_id().as_u64(), "abcdef"));
- let buffer1_ops = Arc::new(Mutex::new(Vec::new()));
- buffer1.update(cx, {
- let buffer1_ops = buffer1_ops.clone();
- |buffer, cx| {
- let buffer_1_events = buffer_1_events.clone();
- cx.subscribe(&buffer1, move |_, _, event, _| match event.clone() {
- Event::Operation(op) => buffer1_ops.lock().push(op),
- event => buffer_1_events.lock().push(event),
- })
- .detach();
- let buffer_2_events = buffer_2_events.clone();
- cx.subscribe(&buffer2, move |_, _, event, _| {
- buffer_2_events.lock().push(event.clone())
- })
- .detach();
-
- // An edit emits an edited event, followed by a dirty changed event,
- // since the buffer was previously in a clean state.
- buffer.edit([(2..4, "XYZ")], None, cx);
-
- // An empty transaction does not emit any events.
- buffer.start_transaction();
- buffer.end_transaction(cx);
-
- // A transaction containing two edits emits one edited event.
- now += Duration::from_secs(1);
- buffer.start_transaction_at(now);
- buffer.edit([(5..5, "u")], None, cx);
- buffer.edit([(6..6, "w")], None, cx);
- buffer.end_transaction_at(now, cx);
-
- // Undoing a transaction emits one edited event.
- buffer.undo(cx);
- }
- });
-
- // Incorporating a set of remote ops emits a single edited event,
- // followed by a dirty changed event.
- buffer2.update(cx, |buffer, cx| {
- buffer.apply_ops(buffer1_ops.lock().drain(..), cx).unwrap();
- });
- assert_eq!(
- mem::take(&mut *buffer_1_events.lock()),
- vec![
- Event::Edited,
- Event::DirtyChanged,
- Event::Edited,
- Event::Edited,
- ]
- );
- assert_eq!(
- mem::take(&mut *buffer_2_events.lock()),
- vec![Event::Edited, Event::DirtyChanged]
- );
-
- buffer1.update(cx, |buffer, cx| {
- // Undoing the first transaction emits edited event, followed by a
- // dirty changed event, since the buffer is again in a clean state.
- buffer.undo(cx);
- });
- // Incorporating the remote ops again emits a single edited event,
- // followed by a dirty changed event.
- buffer2.update(cx, |buffer, cx| {
- buffer.apply_ops(buffer1_ops.lock().drain(..), cx).unwrap();
- });
- assert_eq!(
- mem::take(&mut *buffer_1_events.lock()),
- vec![Event::Edited, Event::DirtyChanged,]
- );
- assert_eq!(
- mem::take(&mut *buffer_2_events.lock()),
- vec![Event::Edited, Event::DirtyChanged]
- );
-}
-
-#[gpui::test]
-async fn test_apply_diff(cx: &mut TestAppContext) {
- let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
- let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text));
- let anchor = buffer.update(cx, |buffer, _| buffer.anchor_before(Point::new(3, 3)));
-
- let text = "a\nccc\ndddd\nffffff\n";
- let diff = buffer.update(cx, |b, cx| b.diff(text.into(), cx)).await;
- buffer.update(cx, |buffer, cx| {
- buffer.apply_diff(diff, cx).unwrap();
- assert_eq!(buffer.text(), text);
- assert_eq!(anchor.to_point(buffer), Point::new(2, 3));
- });
-
- let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
- let diff = buffer.update(cx, |b, cx| b.diff(text.into(), cx)).await;
- buffer.update(cx, |buffer, cx| {
- buffer.apply_diff(diff, cx).unwrap();
- assert_eq!(buffer.text(), text);
- assert_eq!(anchor.to_point(buffer), Point::new(4, 4));
- });
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
- let text = [
- "zero", //
- "one ", // 2 trailing spaces
- "two", //
- "three ", // 3 trailing spaces
- "four", //
- "five ", // 4 trailing spaces
- ]
- .join("\n");
-
- let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text));
-
- // Spawn a task to format the buffer's whitespace.
- // Pause so that the foratting task starts running.
- let format = buffer.update(cx, |buffer, cx| buffer.remove_trailing_whitespace(cx));
- smol::future::yield_now().await;
-
- // Edit the buffer while the normalization task is running.
- let version_before_edit = buffer.update(cx, |buffer, _| buffer.version());
- buffer.update(cx, |buffer, cx| {
- buffer.edit(
- [
- (Point::new(0, 1)..Point::new(0, 1), "EE"),
- (Point::new(3, 5)..Point::new(3, 5), "EEE"),
- ],
- None,
- cx,
- );
- });
-
- let format_diff = format.await;
- buffer.update(cx, |buffer, cx| {
- let version_before_format = format_diff.base_version.clone();
- buffer.apply_diff(format_diff, cx);
-
- // The outcome depends on the order of concurrent taks.
- //
- // If the edit occurred while searching for trailing whitespace ranges,
- // then the trailing whitespace region touched by the edit is left intact.
- if version_before_format == version_before_edit {
- assert_eq!(
- buffer.text(),
- [
- "zEEero", //
- "one", //
- "two", //
- "threeEEE ", //
- "four", //
- "five", //
- ]
- .join("\n")
- );
- }
- // Otherwise, all trailing whitespace is removed.
- else {
- assert_eq!(
- buffer.text(),
- [
- "zEEero", //
- "one", //
- "two", //
- "threeEEE", //
- "four", //
- "five", //
- ]
- .join("\n")
- );
- }
- });
-}
-
-#[gpui::test]
-async fn test_reparse(cx: &mut gpui::TestAppContext) {
- let text = "fn a() {}";
- let buffer = cx.new_model(|cx| {
- Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx)
- });
-
- // Wait for the initial text to parse
- cx.executor().run_until_parked();
- assert!(!buffer.update(cx, |buffer, _| buffer.is_parsing()));
- assert_eq!(
- get_tree_sexp(&buffer, cx),
- concat!(
- "(source_file (function_item name: (identifier) ",
- "parameters: (parameters) ",
- "body: (block)))"
- )
- );
-
- buffer.update(cx, |buffer, _| {
- buffer.set_sync_parse_timeout(Duration::ZERO)
- });
-
- // Perform some edits (add parameter and variable reference)
- // Parsing doesn't begin until the transaction is complete
- buffer.update(cx, |buf, cx| {
- buf.start_transaction();
-
- let offset = buf.text().find(')').unwrap();
- buf.edit([(offset..offset, "b: C")], None, cx);
- assert!(!buf.is_parsing());
-
- let offset = buf.text().find('}').unwrap();
- buf.edit([(offset..offset, " d; ")], None, cx);
- assert!(!buf.is_parsing());
-
- buf.end_transaction(cx);
- assert_eq!(buf.text(), "fn a(b: C) { d; }");
- assert!(buf.is_parsing());
- });
- cx.executor().run_until_parked();
- assert!(!buffer.update(cx, |buffer, _| buffer.is_parsing()));
- assert_eq!(
- get_tree_sexp(&buffer, cx),
- concat!(
- "(source_file (function_item name: (identifier) ",
- "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
- "body: (block (expression_statement (identifier)))))"
- )
- );
-
- // Perform a series of edits without waiting for the current parse to complete:
- // * turn identifier into a field expression
- // * turn field expression into a method call
- // * add a turbofish to the method call
- buffer.update(cx, |buf, cx| {
- let offset = buf.text().find(';').unwrap();
- buf.edit([(offset..offset, ".e")], None, cx);
- assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
- assert!(buf.is_parsing());
- });
- buffer.update(cx, |buf, cx| {
- let offset = buf.text().find(';').unwrap();
- buf.edit([(offset..offset, "(f)")], None, cx);
- assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
- assert!(buf.is_parsing());
- });
- buffer.update(cx, |buf, cx| {
- let offset = buf.text().find("(f)").unwrap();
- buf.edit([(offset..offset, "::<G>")], None, cx);
- assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
- assert!(buf.is_parsing());
- });
- cx.executor().run_until_parked();
- assert_eq!(
- get_tree_sexp(&buffer, cx),
- concat!(
- "(source_file (function_item name: (identifier) ",
- "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
- "body: (block (expression_statement (call_expression ",
- "function: (generic_function ",
- "function: (field_expression value: (identifier) field: (field_identifier)) ",
- "type_arguments: (type_arguments (type_identifier))) ",
- "arguments: (arguments (identifier)))))))",
- )
- );
-
- buffer.update(cx, |buf, cx| {
- buf.undo(cx);
- buf.undo(cx);
- buf.undo(cx);
- buf.undo(cx);
- assert_eq!(buf.text(), "fn a() {}");
- assert!(buf.is_parsing());
- });
-
- cx.executor().run_until_parked();
- assert_eq!(
- get_tree_sexp(&buffer, cx),
- concat!(
- "(source_file (function_item name: (identifier) ",
- "parameters: (parameters) ",
- "body: (block)))"
- )
- );
-
- buffer.update(cx, |buf, cx| {
- buf.redo(cx);
- buf.redo(cx);
- buf.redo(cx);
- buf.redo(cx);
- assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
- assert!(buf.is_parsing());
- });
- cx.executor().run_until_parked();
- assert_eq!(
- get_tree_sexp(&buffer, cx),
- concat!(
- "(source_file (function_item name: (identifier) ",
- "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
- "body: (block (expression_statement (call_expression ",
- "function: (generic_function ",
- "function: (field_expression value: (identifier) field: (field_identifier)) ",
- "type_arguments: (type_arguments (type_identifier))) ",
- "arguments: (arguments (identifier)))))))",
- )
- );
-}
-
-#[gpui::test]
-async fn test_resetting_language(cx: &mut gpui::TestAppContext) {
- let buffer = cx.new_model(|cx| {
- let mut buffer =
- Buffer::new(0, cx.entity_id().as_u64(), "{}").with_language(Arc::new(rust_lang()), cx);
- buffer.set_sync_parse_timeout(Duration::ZERO);
- buffer
- });
-
- // Wait for the initial text to parse
- cx.executor().run_until_parked();
- assert_eq!(
- get_tree_sexp(&buffer, cx),
- "(source_file (expression_statement (block)))"
- );
-
- buffer.update(cx, |buffer, cx| {
- buffer.set_language(Some(Arc::new(json_lang())), cx)
- });
- cx.executor().run_until_parked();
- assert_eq!(get_tree_sexp(&buffer, cx), "(document (object))");
-}
-
-#[gpui::test]
-async fn test_outline(cx: &mut gpui::TestAppContext) {
- let text = r#"
- struct Person {
- name: String,
- age: usize,
- }
-
- mod module {
- enum LoginState {
- LoggedOut,
- LoggingOn,
- LoggedIn {
- person: Person,
- time: Instant,
- }
- }
- }
-
- impl Eq for Person {}
-
- impl Drop for Person {
- fn drop(&mut self) {
- println!("bye");
- }
- }
- "#
- .unindent();
-
- let buffer = cx.new_model(|cx| {
- Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx)
- });
- let outline = buffer
- .update(cx, |buffer, _| buffer.snapshot().outline(None))
- .unwrap();
-
- assert_eq!(
- outline
- .items
- .iter()
- .map(|item| (item.text.as_str(), item.depth))
- .collect::<Vec<_>>(),
- &[
- ("struct Person", 0),
- ("name", 1),
- ("age", 1),
- ("mod module", 0),
- ("enum LoginState", 1),
- ("LoggedOut", 2),
- ("LoggingOn", 2),
- ("LoggedIn", 2),
- ("person", 3),
- ("time", 3),
- ("impl Eq for Person", 0),
- ("impl Drop for Person", 0),
- ("fn drop", 1),
- ]
- );
-
- // Without space, we only match on names
- assert_eq!(
- search(&outline, "oon", cx).await,
- &[
- ("mod module", vec![]), // included as the parent of a match
- ("enum LoginState", vec![]), // included as the parent of a match
- ("LoggingOn", vec![1, 7, 8]), // matches
- ("impl Drop for Person", vec![7, 18, 19]), // matches in two disjoint names
- ]
- );
-
- assert_eq!(
- search(&outline, "dp p", cx).await,
- &[
- ("impl Drop for Person", vec![5, 8, 9, 14]),
- ("fn drop", vec![]),
- ]
- );
- assert_eq!(
- search(&outline, "dpn", cx).await,
- &[("impl Drop for Person", vec![5, 14, 19])]
- );
- assert_eq!(
- search(&outline, "impl ", cx).await,
- &[
- ("impl Eq for Person", vec![0, 1, 2, 3, 4]),
- ("impl Drop for Person", vec![0, 1, 2, 3, 4]),
- ("fn drop", vec![]),
- ]
- );
-
- async fn search<'a>(
- outline: &'a Outline<Anchor>,
- query: &'a str,
- cx: &'a gpui::TestAppContext,
- ) -> Vec<(&'a str, Vec<usize>)> {
- let matches = cx
- .update(|cx| outline.search(query, cx.background_executor().clone()))
- .await;
- matches
- .into_iter()
- .map(|mat| (outline.items[mat.candidate_id].text.as_str(), mat.positions))
- .collect::<Vec<_>>()
- }
-}
-
-#[gpui::test]
-async fn test_outline_nodes_with_newlines(cx: &mut gpui::TestAppContext) {
- let text = r#"
- impl A for B<
- C
- > {
- };
- "#
- .unindent();
-
- let buffer = cx.new_model(|cx| {
- Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx)
- });
- let outline = buffer
- .update(cx, |buffer, _| buffer.snapshot().outline(None))
- .unwrap();
-
- assert_eq!(
- outline
- .items
- .iter()
- .map(|item| (item.text.as_str(), item.depth))
- .collect::<Vec<_>>(),
- &[("impl A for B<", 0)]
- );
-}
-
-#[gpui::test]
-async fn test_outline_with_extra_context(cx: &mut gpui::TestAppContext) {
- let language = javascript_lang()
- .with_outline_query(
- r#"
- (function_declaration
- "function" @context
- name: (_) @name
- parameters: (formal_parameters
- "(" @context.extra
- ")" @context.extra)) @item
- "#,
- )
- .unwrap();
-
- let text = r#"
- function a() {}
- function b(c) {}
- "#
- .unindent();
-
- let buffer = cx.new_model(|cx| {
- Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(language), cx)
- });
- let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
-
- // extra context nodes are included in the outline.
- let outline = snapshot.outline(None).unwrap();
- assert_eq!(
- outline
- .items
- .iter()
- .map(|item| (item.text.as_str(), item.depth))
- .collect::<Vec<_>>(),
- &[("function a()", 0), ("function b( )", 0),]
- );
-
- // extra context nodes do not appear in breadcrumbs.
- let symbols = snapshot.symbols_containing(3, None).unwrap();
- assert_eq!(
- symbols
- .iter()
- .map(|item| (item.text.as_str(), item.depth))
- .collect::<Vec<_>>(),
- &[("function a", 0)]
- );
-}
-
-#[gpui::test]
-async fn test_symbols_containing(cx: &mut gpui::TestAppContext) {
- let text = r#"
- impl Person {
- fn one() {
- 1
- }
-
- fn two() {
- 2
- }fn three() {
- 3
- }
- }
- "#
- .unindent();
-
- let buffer = cx.new_model(|cx| {
- Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx)
- });
- let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
-
- // point is at the start of an item
- assert_eq!(
- symbols_containing(Point::new(1, 4), &snapshot),
- vec![
- (
- "impl Person".to_string(),
- Point::new(0, 0)..Point::new(10, 1)
- ),
- ("fn one".to_string(), Point::new(1, 4)..Point::new(3, 5))
- ]
- );
-
- // point is in the middle of an item
- assert_eq!(
- symbols_containing(Point::new(2, 8), &snapshot),
- vec![
- (
- "impl Person".to_string(),
- Point::new(0, 0)..Point::new(10, 1)
- ),
- ("fn one".to_string(), Point::new(1, 4)..Point::new(3, 5))
- ]
- );
-
- // point is at the end of an item
- assert_eq!(
- symbols_containing(Point::new(3, 5), &snapshot),
- vec![
- (
- "impl Person".to_string(),
- Point::new(0, 0)..Point::new(10, 1)
- ),
- ("fn one".to_string(), Point::new(1, 4)..Point::new(3, 5))
- ]
- );
-
- // point is in between two adjacent items
- assert_eq!(
- symbols_containing(Point::new(7, 5), &snapshot),
- vec![
- (
- "impl Person".to_string(),
- Point::new(0, 0)..Point::new(10, 1)
- ),
- ("fn two".to_string(), Point::new(5, 4)..Point::new(7, 5))
- ]
- );
-
- fn symbols_containing(
- position: Point,
- snapshot: &BufferSnapshot,
- ) -> Vec<(String, Range<Point>)> {
- snapshot
- .symbols_containing(position, None)
- .unwrap()
- .into_iter()
- .map(|item| {
- (
- item.text,
- item.range.start.to_point(snapshot)..item.range.end.to_point(snapshot),
- )
- })
- .collect()
- }
-}
-
-#[gpui::test]
-fn test_enclosing_bracket_ranges(cx: &mut AppContext) {
- let mut assert = |selection_text, range_markers| {
- assert_bracket_pairs(selection_text, range_markers, rust_lang(), cx)
- };
-
- assert(
- indoc! {"
- mod x {
- moˇd y {
-
- }
- }
- let foo = 1;"},
- vec![indoc! {"
- mod x «{»
- mod y {
-
- }
- «}»
- let foo = 1;"}],
- );
-
- assert(
- indoc! {"
- mod x {
- mod y ˇ{
-
- }
- }
- let foo = 1;"},
- vec![
- indoc! {"
- mod x «{»
- mod y {
-
- }
- «}»
- let foo = 1;"},
- indoc! {"
- mod x {
- mod y «{»
-
- «}»
- }
- let foo = 1;"},
- ],
- );
-
- assert(
- indoc! {"
- mod x {
- mod y {
-
- }ˇ
- }
- let foo = 1;"},
- vec![
- indoc! {"
- mod x «{»
- mod y {
-
- }
- «}»
- let foo = 1;"},
- indoc! {"
- mod x {
- mod y «{»
-
- «}»
- }
- let foo = 1;"},
- ],
- );
-
- assert(
- indoc! {"
- mod x {
- mod y {
-
- }
- ˇ}
- let foo = 1;"},
- vec![indoc! {"
- mod x «{»
- mod y {
-
- }
- «}»
- let foo = 1;"}],
- );
-
- assert(
- indoc! {"
- mod x {
- mod y {
-
- }
- }
- let fˇoo = 1;"},
- vec![],
- );
-
- // Regression test: avoid crash when querying at the end of the buffer.
- assert(
- indoc! {"
- mod x {
- mod y {
-
- }
- }
- let foo = 1;ˇ"},
- vec![],
- );
-}
-
-#[gpui::test]
-fn test_enclosing_bracket_ranges_where_brackets_are_not_outermost_children(cx: &mut AppContext) {
- let mut assert = |selection_text, bracket_pair_texts| {
- assert_bracket_pairs(selection_text, bracket_pair_texts, javascript_lang(), cx)
- };
-
- assert(
- indoc! {"
- for (const a in b)ˇ {
- // a comment that's longer than the for-loop header
- }"},
- vec![indoc! {"
- for «(»const a in b«)» {
- // a comment that's longer than the for-loop header
- }"}],
- );
-
- // Regression test: even though the parent node of the parentheses (the for loop) does
- // intersect the given range, the parentheses themselves do not contain the range, so
- // they should not be returned. Only the curly braces contain the range.
- assert(
- indoc! {"
- for (const a in b) {ˇ
- // a comment that's longer than the for-loop header
- }"},
- vec![indoc! {"
- for (const a in b) «{»
- // a comment that's longer than the for-loop header
- «}»"}],
- );
-}
-
-#[gpui::test]
-fn test_range_for_syntax_ancestor(cx: &mut AppContext) {
- cx.new_model(|cx| {
- let text = "fn a() { b(|c| {}) }";
- let buffer =
- Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
- let snapshot = buffer.snapshot();
-
- assert_eq!(
- snapshot.range_for_syntax_ancestor(empty_range_at(text, "|")),
- Some(range_of(text, "|"))
- );
- assert_eq!(
- snapshot.range_for_syntax_ancestor(range_of(text, "|")),
- Some(range_of(text, "|c|"))
- );
- assert_eq!(
- snapshot.range_for_syntax_ancestor(range_of(text, "|c|")),
- Some(range_of(text, "|c| {}"))
- );
- assert_eq!(
- snapshot.range_for_syntax_ancestor(range_of(text, "|c| {}")),
- Some(range_of(text, "(|c| {})"))
- );
-
- buffer
- });
-
- fn empty_range_at(text: &str, part: &str) -> Range<usize> {
- let start = text.find(part).unwrap();
- start..start
- }
-
- fn range_of(text: &str, part: &str) -> Range<usize> {
- let start = text.find(part).unwrap();
- start..start + part.len()
- }
-}
-
-#[gpui::test]
-fn test_autoindent_with_soft_tabs(cx: &mut AppContext) {
- init_settings(cx, |_| {});
-
- cx.new_model(|cx| {
- let text = "fn a() {}";
- let mut buffer =
- Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
-
- buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
- assert_eq!(buffer.text(), "fn a() {\n \n}");
-
- buffer.edit(
- [(Point::new(1, 4)..Point::new(1, 4), "b()\n")],
- Some(AutoindentMode::EachLine),
- cx,
- );
- assert_eq!(buffer.text(), "fn a() {\n b()\n \n}");
-
- // Create a field expression on a new line, causing that line
- // to be indented.
- buffer.edit(
- [(Point::new(2, 4)..Point::new(2, 4), ".c")],
- Some(AutoindentMode::EachLine),
- cx,
- );
- assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}");
-
- // Remove the dot so that the line is no longer a field expression,
- // causing the line to be outdented.
- buffer.edit(
- [(Point::new(2, 8)..Point::new(2, 9), "")],
- Some(AutoindentMode::EachLine),
- cx,
- );
- assert_eq!(buffer.text(), "fn a() {\n b()\n c\n}");
-
- buffer
- });
-}
-
-#[gpui::test]
-fn test_autoindent_with_hard_tabs(cx: &mut AppContext) {
- init_settings(cx, |settings| {
- settings.defaults.hard_tabs = Some(true);
- });
-
- cx.new_model(|cx| {
- let text = "fn a() {}";
- let mut buffer =
- Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
-
- buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
- assert_eq!(buffer.text(), "fn a() {\n\t\n}");
-
- buffer.edit(
- [(Point::new(1, 1)..Point::new(1, 1), "b()\n")],
- Some(AutoindentMode::EachLine),
- cx,
- );
- assert_eq!(buffer.text(), "fn a() {\n\tb()\n\t\n}");
-
- // Create a field expression on a new line, causing that line
- // to be indented.
- buffer.edit(
- [(Point::new(2, 1)..Point::new(2, 1), ".c")],
- Some(AutoindentMode::EachLine),
- cx,
- );
- assert_eq!(buffer.text(), "fn a() {\n\tb()\n\t\t.c\n}");
-
- // Remove the dot so that the line is no longer a field expression,
- // causing the line to be outdented.
- buffer.edit(
- [(Point::new(2, 2)..Point::new(2, 3), "")],
- Some(AutoindentMode::EachLine),
- cx,
- );
- assert_eq!(buffer.text(), "fn a() {\n\tb()\n\tc\n}");
-
- buffer
- });
-}
-
-#[gpui::test]
-fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppContext) {
- init_settings(cx, |_| {});
-
- cx.new_model(|cx| {
- let entity_id = cx.entity_id();
- let mut buffer = Buffer::new(
- 0,
- entity_id.as_u64(),
- "
- fn a() {
- c;
- d;
- }
- "
- .unindent(),
- )
- .with_language(Arc::new(rust_lang()), cx);
-
- // Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
- // their indentation is not adjusted.
- buffer.edit_via_marked_text(
- &"
- fn a() {
- c«()»;
- d«()»;
- }
- "
- .unindent(),
- Some(AutoindentMode::EachLine),
- cx,
- );
- assert_eq!(
- buffer.text(),
- "
- fn a() {
- c();
- d();
- }
- "
- .unindent()
- );
-
- // When appending new content after these lines, the indentation is based on the
- // preceding lines' actual indentation.
- buffer.edit_via_marked_text(
- &"
- fn a() {
- c«
- .f
- .g()»;
- d«
- .f
- .g()»;
- }
- "
- .unindent(),
- Some(AutoindentMode::EachLine),
- cx,
- );
-
- assert_eq!(
- buffer.text(),
- "
- fn a() {
- c
- .f
- .g();
- d
- .f
- .g();
- }
- "
- .unindent()
- );
- buffer
- });
-
- cx.new_model(|cx| {
- eprintln!("second buffer: {:?}", cx.entity_id());
-
- let mut buffer = Buffer::new(
- 0,
- cx.entity_id().as_u64(),
- "
- fn a() {
- b();
- |
- "
- .replace("|", "") // marker to preserve trailing whitespace
- .unindent(),
- )
- .with_language(Arc::new(rust_lang()), cx);
-
- // Insert a closing brace. It is outdented.
- buffer.edit_via_marked_text(
- &"
- fn a() {
- b();
- «}»
- "
- .unindent(),
- Some(AutoindentMode::EachLine),
- cx,
- );
- assert_eq!(
- buffer.text(),
- "
- fn a() {
- b();
- }
- "
- .unindent()
- );
-
- // Manually edit the leading whitespace. The edit is preserved.
- buffer.edit_via_marked_text(
- &"
- fn a() {
- b();
- « »}
- "
- .unindent(),
- Some(AutoindentMode::EachLine),
- cx,
- );
- assert_eq!(
- buffer.text(),
- "
- fn a() {
- b();
- }
- "
- .unindent()
- );
- buffer
- });
-
- eprintln!("DONE");
-}
-
-#[gpui::test]
-fn test_autoindent_does_not_adjust_lines_within_newly_created_errors(cx: &mut AppContext) {
- init_settings(cx, |_| {});
-
- cx.new_model(|cx| {
- let mut buffer = Buffer::new(
- 0,
- cx.entity_id().as_u64(),
- "
- fn a() {
- i
- }
- "
- .unindent(),
- )
- .with_language(Arc::new(rust_lang()), cx);
-
- // Regression test: line does not get outdented due to syntax error
- buffer.edit_via_marked_text(
- &"
- fn a() {
- i«f let Some(x) = y»
- }
- "
- .unindent(),
- Some(AutoindentMode::EachLine),
- cx,
- );
- assert_eq!(
- buffer.text(),
- "
- fn a() {
- if let Some(x) = y
- }
- "
- .unindent()
- );
-
- buffer.edit_via_marked_text(
- &"
- fn a() {
- if let Some(x) = y« {»
- }
- "
- .unindent(),
- Some(AutoindentMode::EachLine),
- cx,
- );
- assert_eq!(
- buffer.text(),
- "
- fn a() {
- if let Some(x) = y {
- }
- "
- .unindent()
- );
-
- buffer
- });
-}
-
-#[gpui::test]
-fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut AppContext) {
- init_settings(cx, |_| {});
-
- cx.new_model(|cx| {
- let mut buffer = Buffer::new(
- 0,
- cx.entity_id().as_u64(),
- "
- fn a() {}
- "
- .unindent(),
- )
- .with_language(Arc::new(rust_lang()), cx);
-
- buffer.edit_via_marked_text(
- &"
- fn a(«
- b») {}
- "
- .unindent(),
- Some(AutoindentMode::EachLine),
- cx,
- );
- assert_eq!(
- buffer.text(),
- "
- fn a(
- b) {}
- "
- .unindent()
- );
-
- // The indentation suggestion changed because `@end` node (a close paren)
- // is now at the beginning of the line.
- buffer.edit_via_marked_text(
- &"
- fn a(
- ˇ) {}
- "
- .unindent(),
- Some(AutoindentMode::EachLine),
- cx,
- );
- assert_eq!(
- buffer.text(),
- "
- fn a(
- ) {}
- "
- .unindent()
- );
-
- buffer
- });
-}
-
-#[gpui::test]
-fn test_autoindent_with_edit_at_end_of_buffer(cx: &mut AppContext) {
- init_settings(cx, |_| {});
-
- cx.new_model(|cx| {
- let text = "a\nb";
- let mut buffer =
- Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
- buffer.edit(
- [(0..1, "\n"), (2..3, "\n")],
- Some(AutoindentMode::EachLine),
- cx,
- );
- assert_eq!(buffer.text(), "\n\n\n");
- buffer
- });
-}
-
-#[gpui::test]
-fn test_autoindent_multi_line_insertion(cx: &mut AppContext) {
- init_settings(cx, |_| {});
-
- cx.new_model(|cx| {
- let text = "
- const a: usize = 1;
- fn b() {
- if c {
- let d = 2;
- }
- }
- "
- .unindent();
-
- let mut buffer =
- Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
- buffer.edit(
- [(Point::new(3, 0)..Point::new(3, 0), "e(\n f()\n);\n")],
- Some(AutoindentMode::EachLine),
- cx,
- );
- assert_eq!(
- buffer.text(),
- "
- const a: usize = 1;
- fn b() {
- if c {
- e(
- f()
- );
- let d = 2;
- }
- }
- "
- .unindent()
- );
-
- buffer
- });
-}
-
-#[gpui::test]
-fn test_autoindent_block_mode(cx: &mut AppContext) {
- init_settings(cx, |_| {});
-
- cx.new_model(|cx| {
- let text = r#"
- fn a() {
- b();
- }
- "#
- .unindent();
- let mut buffer =
- Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
-
- // When this text was copied, both of the quotation marks were at the same
- // indent level, but the indentation of the first line was not included in
- // the copied text. This information is retained in the
- // 'original_indent_columns' vector.
- let original_indent_columns = vec![4];
- let inserted_text = r#"
- "
- c
- d
- e
- "
- "#
- .unindent();
-
- // Insert the block at column zero. The entire block is indented
- // so that the first line matches the previous line's indentation.
- buffer.edit(
- [(Point::new(2, 0)..Point::new(2, 0), inserted_text.clone())],
- Some(AutoindentMode::Block {
- original_indent_columns: original_indent_columns.clone(),
- }),
- cx,
- );
- assert_eq!(
- buffer.text(),
- r#"
- fn a() {
- b();
- "
- c
- d
- e
- "
- }
- "#
- .unindent()
- );
-
- // Grouping is disabled in tests, so we need 2 undos
- buffer.undo(cx); // Undo the auto-indent
- buffer.undo(cx); // Undo the original edit
-
- // Insert the block at a deeper indent level. The entire block is outdented.
- buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
- buffer.edit(
- [(Point::new(2, 8)..Point::new(2, 8), inserted_text)],
- Some(AutoindentMode::Block {
- original_indent_columns: original_indent_columns.clone(),
- }),
- cx,
- );
- assert_eq!(
- buffer.text(),
- r#"
- fn a() {
- b();
- "
- c
- d
- e
- "
- }
- "#
- .unindent()
- );
-
- buffer
- });
-}
-
-#[gpui::test]
-fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut AppContext) {
- init_settings(cx, |_| {});
-
- cx.new_model(|cx| {
- let text = r#"
- fn a() {
- if b() {
-
- }
- }
- "#
- .unindent();
- let mut buffer =
- Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
-
- // The original indent columns are not known, so this text is
- // auto-indented in a block as if the first line was copied in
- // its entirety.
- let original_indent_columns = Vec::new();
- let inserted_text = " c\n .d()\n .e();";
-
- // Insert the block at column zero. The entire block is indented
- // so that the first line matches the previous line's indentation.
- buffer.edit(
- [(Point::new(2, 0)..Point::new(2, 0), inserted_text)],
- Some(AutoindentMode::Block {
- original_indent_columns: original_indent_columns.clone(),
- }),
- cx,
- );
- assert_eq!(
- buffer.text(),
- r#"
- fn a() {
- if b() {
- c
- .d()
- .e();
- }
- }
- "#
- .unindent()
- );
-
- // Grouping is disabled in tests, so we need 2 undos
- buffer.undo(cx); // Undo the auto-indent
- buffer.undo(cx); // Undo the original edit
-
- // Insert the block at a deeper indent level. The entire block is outdented.
- buffer.edit(
- [(Point::new(2, 0)..Point::new(2, 0), " ".repeat(12))],
- None,
- cx,
- );
- buffer.edit(
- [(Point::new(2, 12)..Point::new(2, 12), inserted_text)],
- Some(AutoindentMode::Block {
- original_indent_columns: Vec::new(),
- }),
- cx,
- );
- assert_eq!(
- buffer.text(),
- r#"
- fn a() {
- if b() {
- c
- .d()
- .e();
- }
- }
- "#
- .unindent()
- );
-
- buffer
- });
-}
-
-#[gpui::test]
-fn test_autoindent_language_without_indents_query(cx: &mut AppContext) {
- init_settings(cx, |_| {});
-
- cx.new_model(|cx| {
- let text = "
- * one
- - a
- - b
- * two
- "
- .unindent();
-
- let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), text).with_language(
- Arc::new(Language::new(
- LanguageConfig {
- name: "Markdown".into(),
- auto_indent_using_last_non_empty_line: false,
- ..Default::default()
- },
- Some(tree_sitter_json::language()),
- )),
- cx,
- );
- buffer.edit(
- [(Point::new(3, 0)..Point::new(3, 0), "\n")],
- Some(AutoindentMode::EachLine),
- cx,
- );
- assert_eq!(
- buffer.text(),
- "
- * one
- - a
- - b
-
- * two
- "
- .unindent()
- );
- buffer
- });
-}
-
-#[gpui::test]
-fn test_autoindent_with_injected_languages(cx: &mut AppContext) {
- init_settings(cx, |settings| {
- settings.languages.extend([
- (
- "HTML".into(),
- LanguageSettingsContent {
- tab_size: Some(2.try_into().unwrap()),
- ..Default::default()
- },
- ),
- (
- "JavaScript".into(),
- LanguageSettingsContent {
- tab_size: Some(8.try_into().unwrap()),
- ..Default::default()
- },
- ),
- ])
- });
-
- let html_language = Arc::new(html_lang());
-
- let javascript_language = Arc::new(javascript_lang());
-
- let language_registry = Arc::new(LanguageRegistry::test());
- language_registry.add(html_language.clone());
- language_registry.add(javascript_language.clone());
-
- cx.new_model(|cx| {
- let (text, ranges) = marked_text_ranges(
- &"
- <div>ˇ
- </div>
- <script>
- init({ˇ
- })
- </script>
- <span>ˇ
- </span>
- "
- .unindent(),
- false,
- );
-
- let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), text);
- buffer.set_language_registry(language_registry);
- buffer.set_language(Some(html_language), cx);
- buffer.edit(
- ranges.into_iter().map(|range| (range, "\na")),
- Some(AutoindentMode::EachLine),
- cx,
- );
- assert_eq!(
- buffer.text(),
- "
- <div>
- a
- </div>
- <script>
- init({
- a
- })
- </script>
- <span>
- a
- </span>
- "
- .unindent()
- );
- buffer
- });
-}
-
-#[gpui::test]
-fn test_autoindent_query_with_outdent_captures(cx: &mut AppContext) {
- init_settings(cx, |settings| {
- settings.defaults.tab_size = Some(2.try_into().unwrap());
- });
-
- cx.new_model(|cx| {
- let mut buffer =
- Buffer::new(0, cx.entity_id().as_u64(), "").with_language(Arc::new(ruby_lang()), cx);
-
- let text = r#"
- class C
- def a(b, c)
- puts b
- puts c
- rescue
- puts "errored"
- exit 1
- end
- end
- "#
- .unindent();
-
- buffer.edit([(0..0, text)], Some(AutoindentMode::EachLine), cx);
-
- assert_eq!(
- buffer.text(),
- r#"
- class C
- def a(b, c)
- puts b
- puts c
- rescue
- puts "errored"
- exit 1
- end
- end
- "#
- .unindent()
- );
-
- buffer
- });
-}
-
-#[gpui::test]
-fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
- init_settings(cx, |_| {});
-
- cx.new_model(|cx| {
- let language = Language::new(
- LanguageConfig {
- name: "JavaScript".into(),
- line_comment: Some("// ".into()),
- brackets: BracketPairConfig {
- pairs: vec![
- BracketPair {
- start: "{".into(),
- end: "}".into(),
- close: true,
- newline: false,
- },
- BracketPair {
- start: "'".into(),
- end: "'".into(),
- close: true,
- newline: false,
- },
- ],
- disabled_scopes_by_bracket_ix: vec![
- Vec::new(), //
- vec!["string".into()],
- ],
- },
- overrides: [(
- "element".into(),
- LanguageConfigOverride {
- line_comment: Override::Remove { remove: true },
- block_comment: Override::Set(("{/*".into(), "*/}".into())),
- ..Default::default()
- },
- )]
- .into_iter()
- .collect(),
- ..Default::default()
- },
- Some(tree_sitter_typescript::language_tsx()),
- )
- .with_override_query(
- r#"
- (jsx_element) @element
- (string) @string
- [
- (jsx_opening_element)
- (jsx_closing_element)
- (jsx_expression)
- ] @default
- "#,
- )
- .unwrap();
-
- let text = r#"
- a["b"] = <C d="e">
- <F></F>
- { g() }
- </C>;
- "#
- .unindent();
-
- let buffer =
- Buffer::new(0, cx.entity_id().as_u64(), &text).with_language(Arc::new(language), cx);
- let snapshot = buffer.snapshot();
-
- let config = snapshot.language_scope_at(0).unwrap();
- assert_eq!(config.line_comment_prefix().unwrap().as_ref(), "// ");
- // Both bracket pairs are enabled
- assert_eq!(
- config.brackets().map(|e| e.1).collect::<Vec<_>>(),
- &[true, true]
- );
-
- let string_config = snapshot
- .language_scope_at(text.find("b\"").unwrap())
- .unwrap();
- assert_eq!(string_config.line_comment_prefix().unwrap().as_ref(), "// ");
- // Second bracket pair is disabled
- assert_eq!(
- string_config.brackets().map(|e| e.1).collect::<Vec<_>>(),
- &[true, false]
- );
-
- // In between JSX tags: use the `element` override.
- let element_config = snapshot
- .language_scope_at(text.find("<F>").unwrap())
- .unwrap();
- assert_eq!(element_config.line_comment_prefix(), None);
- assert_eq!(
- element_config.block_comment_delimiters(),
- Some((&"{/*".into(), &"*/}".into()))
- );
- assert_eq!(
- element_config.brackets().map(|e| e.1).collect::<Vec<_>>(),
- &[true, true]
- );
-
- // Within a JSX tag: use the default config.
- let tag_config = snapshot
- .language_scope_at(text.find(" d=").unwrap() + 1)
- .unwrap();
- assert_eq!(tag_config.line_comment_prefix().unwrap().as_ref(), "// ");
- assert_eq!(
- tag_config.brackets().map(|e| e.1).collect::<Vec<_>>(),
- &[true, true]
- );
-
- // In a JSX expression: use the default config.
- let expression_in_element_config = snapshot
- .language_scope_at(text.find("{").unwrap() + 1)
- .unwrap();
- assert_eq!(
- expression_in_element_config
- .line_comment_prefix()
- .unwrap()
- .as_ref(),
- "// "
- );
- assert_eq!(
- expression_in_element_config
- .brackets()
- .map(|e| e.1)
- .collect::<Vec<_>>(),
- &[true, true]
- );
-
- buffer
- });
-}
-
-#[gpui::test]
-fn test_language_scope_at_with_rust(cx: &mut AppContext) {
- init_settings(cx, |_| {});
-
- cx.new_model(|cx| {
- let language = Language::new(
- LanguageConfig {
- name: "Rust".into(),
- brackets: BracketPairConfig {
- pairs: vec![
- BracketPair {
- start: "{".into(),
- end: "}".into(),
- close: true,
- newline: false,
- },
- BracketPair {
- start: "'".into(),
- end: "'".into(),
- close: true,
- newline: false,
- },
- ],
- disabled_scopes_by_bracket_ix: vec![
- Vec::new(), //
- vec!["string".into()],
- ],
- },
- ..Default::default()
- },
- Some(tree_sitter_rust::language()),
- )
- .with_override_query(
- r#"
- (string_literal) @string
- "#,
- )
- .unwrap();
-
- let text = r#"
- const S: &'static str = "hello";
- "#
- .unindent();
-
- let buffer = Buffer::new(0, cx.entity_id().as_u64(), text.clone())
- .with_language(Arc::new(language), cx);
- let snapshot = buffer.snapshot();
-
- // By default, all brackets are enabled
- let config = snapshot.language_scope_at(0).unwrap();
- assert_eq!(
- config.brackets().map(|e| e.1).collect::<Vec<_>>(),
- &[true, true]
- );
-
- // Within a string, the quotation brackets are disabled.
- let string_config = snapshot
- .language_scope_at(text.find("ello").unwrap())
- .unwrap();
- assert_eq!(
- string_config.brackets().map(|e| e.1).collect::<Vec<_>>(),
- &[true, false]
- );
-
- buffer
- });
-}
-
-#[gpui::test]
-fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
- init_settings(cx, |_| {});
-
- cx.new_model(|cx| {
- let text = r#"
- <ol>
- <% people.each do |person| %>
- <li>
- <%= person.name %>
- </li>
- <% end %>
- </ol>
- "#
- .unindent();
-
- let language_registry = Arc::new(LanguageRegistry::test());
- language_registry.add(Arc::new(ruby_lang()));
- language_registry.add(Arc::new(html_lang()));
- language_registry.add(Arc::new(erb_lang()));
-
- let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), text);
- buffer.set_language_registry(language_registry.clone());
- buffer.set_language(
- language_registry
- .language_for_name("ERB")
- .now_or_never()
- .unwrap()
- .ok(),
- cx,
- );
-
- let snapshot = buffer.snapshot();
- let html_config = snapshot.language_scope_at(Point::new(2, 4)).unwrap();
- assert_eq!(html_config.line_comment_prefix(), None);
- assert_eq!(
- html_config.block_comment_delimiters(),
- Some((&"<!--".into(), &"-->".into()))
- );
-
- let ruby_config = snapshot.language_scope_at(Point::new(3, 12)).unwrap();
- assert_eq!(ruby_config.line_comment_prefix().unwrap().as_ref(), "# ");
- assert_eq!(ruby_config.block_comment_delimiters(), None);
-
- buffer
- });
-}
-
-#[gpui::test]
-fn test_serialization(cx: &mut gpui::AppContext) {
- let mut now = Instant::now();
-
- let buffer1 = cx.new_model(|cx| {
- let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), "abc");
- buffer.edit([(3..3, "D")], None, cx);
-
- now += Duration::from_secs(1);
- buffer.start_transaction_at(now);
- buffer.edit([(4..4, "E")], None, cx);
- buffer.end_transaction_at(now, cx);
- assert_eq!(buffer.text(), "abcDE");
-
- buffer.undo(cx);
- assert_eq!(buffer.text(), "abcD");
-
- buffer.edit([(4..4, "F")], None, cx);
- assert_eq!(buffer.text(), "abcDF");
- buffer
- });
- assert_eq!(buffer1.read(cx).text(), "abcDF");
-
- let state = buffer1.read(cx).to_proto();
- let ops = cx
- .background_executor()
- .block(buffer1.read(cx).serialize_ops(None, cx));
- let buffer2 = cx.new_model(|cx| {
- let mut buffer = Buffer::from_proto(1, state, None).unwrap();
- buffer
- .apply_ops(
- ops.into_iter()
- .map(|op| proto::deserialize_operation(op).unwrap()),
- cx,
- )
- .unwrap();
- buffer
- });
- assert_eq!(buffer2.read(cx).text(), "abcDF");
-}
-
-#[gpui::test(iterations = 100)]
-fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
- let min_peers = env::var("MIN_PEERS")
- .map(|i| i.parse().expect("invalid `MIN_PEERS` variable"))
- .unwrap_or(1);
- let max_peers = env::var("MAX_PEERS")
- .map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
- .unwrap_or(5);
- let operations = env::var("OPERATIONS")
- .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
- .unwrap_or(10);
-
- let base_text_len = rng.gen_range(0..10);
- let base_text = RandomCharIter::new(&mut rng)
- .take(base_text_len)
- .collect::<String>();
- let mut replica_ids = Vec::new();
- let mut buffers = Vec::new();
- let network = Arc::new(Mutex::new(Network::new(rng.clone())));
- let base_buffer =
- cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), base_text.as_str()));
-
- for i in 0..rng.gen_range(min_peers..=max_peers) {
- let buffer = cx.new_model(|cx| {
- let state = base_buffer.read(cx).to_proto();
- let ops = cx
- .background_executor()
- .block(base_buffer.read(cx).serialize_ops(None, cx));
- let mut buffer = Buffer::from_proto(i as ReplicaId, state, None).unwrap();
- buffer
- .apply_ops(
- ops.into_iter()
- .map(|op| proto::deserialize_operation(op).unwrap()),
- cx,
- )
- .unwrap();
- buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
- let network = network.clone();
- cx.subscribe(&cx.handle(), move |buffer, _, event, _| {
- if let Event::Operation(op) = event {
- network
- .lock()
- .broadcast(buffer.replica_id(), vec![proto::serialize_operation(op)]);
- }
- })
- .detach();
- buffer
- });
-
- buffers.push(buffer);
- replica_ids.push(i as ReplicaId);
- network.lock().add_peer(i as ReplicaId);
- log::info!("Adding initial peer with replica id {}", i);
- }
-
- log::info!("initial text: {:?}", base_text);
-
- let mut now = Instant::now();
- let mut mutation_count = operations;
- let mut next_diagnostic_id = 0;
- let mut active_selections = BTreeMap::default();
- loop {
- let replica_index = rng.gen_range(0..replica_ids.len());
- let replica_id = replica_ids[replica_index];
- let buffer = &mut buffers[replica_index];
- let mut new_buffer = None;
- match rng.gen_range(0..100) {
- 0..=29 if mutation_count != 0 => {
- buffer.update(cx, |buffer, cx| {
- buffer.start_transaction_at(now);
- buffer.randomly_edit(&mut rng, 5, cx);
- buffer.end_transaction_at(now, cx);
- log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
- });
- mutation_count -= 1;
- }
- 30..=39 if mutation_count != 0 => {
- buffer.update(cx, |buffer, cx| {
- if rng.gen_bool(0.2) {
- log::info!("peer {} clearing active selections", replica_id);
- active_selections.remove(&replica_id);
- buffer.remove_active_selections(cx);
- } else {
- let mut selections = Vec::new();
- for id in 0..rng.gen_range(1..=5) {
- let range = buffer.random_byte_range(0, &mut rng);
- selections.push(Selection {
- id,
- start: buffer.anchor_before(range.start),
- end: buffer.anchor_before(range.end),
- reversed: false,
- goal: SelectionGoal::None,
- });
- }
- let selections: Arc<[Selection<Anchor>]> = selections.into();
- log::info!(
- "peer {} setting active selections: {:?}",
- replica_id,
- selections
- );
- active_selections.insert(replica_id, selections.clone());
- buffer.set_active_selections(selections, false, Default::default(), cx);
- }
- });
- mutation_count -= 1;
- }
- 40..=49 if mutation_count != 0 && replica_id == 0 => {
- let entry_count = rng.gen_range(1..=5);
- buffer.update(cx, |buffer, cx| {
- let diagnostics = DiagnosticSet::new(
- (0..entry_count).map(|_| {
- let range = buffer.random_byte_range(0, &mut rng);
- let range = range.to_point_utf16(buffer);
- let range = range.start..range.end;
- DiagnosticEntry {
- range,
- diagnostic: Diagnostic {
- message: post_inc(&mut next_diagnostic_id).to_string(),
- ..Default::default()
- },
- }
- }),
- buffer,
- );
- log::info!("peer {} setting diagnostics: {:?}", replica_id, diagnostics);
- buffer.update_diagnostics(LanguageServerId(0), diagnostics, cx);
- });
- mutation_count -= 1;
- }
- 50..=59 if replica_ids.len() < max_peers => {
- let old_buffer_state = buffer.read(cx).to_proto();
- let old_buffer_ops = cx
- .background_executor()
- .block(buffer.read(cx).serialize_ops(None, cx));
- let new_replica_id = (0..=replica_ids.len() as ReplicaId)
- .filter(|replica_id| *replica_id != buffer.read(cx).replica_id())
- .choose(&mut rng)
- .unwrap();
- log::info!(
- "Adding new replica {} (replicating from {})",
- new_replica_id,
- replica_id
- );
- new_buffer = Some(cx.new_model(|cx| {
- let mut new_buffer =
- Buffer::from_proto(new_replica_id, old_buffer_state, None).unwrap();
- new_buffer
- .apply_ops(
- old_buffer_ops
- .into_iter()
- .map(|op| deserialize_operation(op).unwrap()),
- cx,
- )
- .unwrap();
- log::info!(
- "New replica {} text: {:?}",
- new_buffer.replica_id(),
- new_buffer.text()
- );
- new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
- let network = network.clone();
- cx.subscribe(&cx.handle(), move |buffer, _, event, _| {
- if let Event::Operation(op) = event {
- network.lock().broadcast(
- buffer.replica_id(),
- vec![proto::serialize_operation(op)],
- );
- }
- })
- .detach();
- new_buffer
- }));
- network.lock().replicate(replica_id, new_replica_id);
-
- if new_replica_id as usize == replica_ids.len() {
- replica_ids.push(new_replica_id);
- } else {
- let new_buffer = new_buffer.take().unwrap();
- while network.lock().has_unreceived(new_replica_id) {
- let ops = network
- .lock()
- .receive(new_replica_id)
- .into_iter()
- .map(|op| proto::deserialize_operation(op).unwrap());
- if ops.len() > 0 {
- log::info!(
- "peer {} (version: {:?}) applying {} ops from the network. {:?}",
- new_replica_id,
- buffer.read(cx).version(),
- ops.len(),
- ops
- );
- new_buffer.update(cx, |new_buffer, cx| {
- new_buffer.apply_ops(ops, cx).unwrap();
- });
- }
- }
- buffers[new_replica_id as usize] = new_buffer;
- }
- }
- 60..=69 if mutation_count != 0 => {
- buffer.update(cx, |buffer, cx| {
- buffer.randomly_undo_redo(&mut rng, cx);
- log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
- });
- mutation_count -= 1;
- }
- _ if network.lock().has_unreceived(replica_id) => {
- let ops = network
- .lock()
- .receive(replica_id)
- .into_iter()
- .map(|op| proto::deserialize_operation(op).unwrap());
- if ops.len() > 0 {
- log::info!(
- "peer {} (version: {:?}) applying {} ops from the network. {:?}",
- replica_id,
- buffer.read(cx).version(),
- ops.len(),
- ops
- );
- buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx).unwrap());
- }
- }
- _ => {}
- }
-
- now += Duration::from_millis(rng.gen_range(0..=200));
- buffers.extend(new_buffer);
-
- for buffer in &buffers {
- buffer.read(cx).check_invariants();
- }
-
- if mutation_count == 0 && network.lock().is_idle() {
- break;
- }
- }
-
- let first_buffer = buffers[0].read(cx).snapshot();
- for buffer in &buffers[1..] {
- let buffer = buffer.read(cx).snapshot();
- assert_eq!(
- buffer.version(),
- first_buffer.version(),
- "Replica {} version != Replica 0 version",
- buffer.replica_id()
- );
- assert_eq!(
- buffer.text(),
- first_buffer.text(),
- "Replica {} text != Replica 0 text",
- buffer.replica_id()
- );
- assert_eq!(
- buffer
- .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
- .collect::<Vec<_>>(),
- first_buffer
- .diagnostics_in_range::<_, usize>(0..first_buffer.len(), false)
- .collect::<Vec<_>>(),
- "Replica {} diagnostics != Replica 0 diagnostics",
- buffer.replica_id()
- );
- }
-
- for buffer in &buffers {
- let buffer = buffer.read(cx).snapshot();
- let actual_remote_selections = buffer
- .remote_selections_in_range(Anchor::MIN..Anchor::MAX)
- .map(|(replica_id, _, _, selections)| (replica_id, selections.collect::<Vec<_>>()))
- .collect::<Vec<_>>();
- let expected_remote_selections = active_selections
- .iter()
- .filter(|(replica_id, _)| **replica_id != buffer.replica_id())
- .map(|(replica_id, selections)| (*replica_id, selections.iter().collect::<Vec<_>>()))
- .collect::<Vec<_>>();
- assert_eq!(
- actual_remote_selections,
- expected_remote_selections,
- "Replica {} remote selections != expected selections",
- buffer.replica_id()
- );
- }
-}
-
-#[test]
-fn test_contiguous_ranges() {
- assert_eq!(
- contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
- &[1..4, 5..7, 9..13]
- );
-
- // Respects the `max_len` parameter
- assert_eq!(
- contiguous_ranges(
- [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
- 3
- )
- .collect::<Vec<_>>(),
- &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
- );
-}
-
-#[gpui::test(iterations = 500)]
-fn test_trailing_whitespace_ranges(mut rng: StdRng) {
- // Generate a random multi-line string containing
- // some lines with trailing whitespace.
- let mut text = String::new();
- for _ in 0..rng.gen_range(0..16) {
- for _ in 0..rng.gen_range(0..36) {
- text.push(match rng.gen_range(0..10) {
- 0..=1 => ' ',
- 3 => '\t',
- _ => rng.gen_range('a'..'z'),
- });
- }
- text.push('\n');
- }
-
- match rng.gen_range(0..10) {
- // sometimes remove the last newline
- 0..=1 => drop(text.pop()), //
-
- // sometimes add extra newlines
- 2..=3 => text.push_str(&"\n".repeat(rng.gen_range(1..5))),
- _ => {}
- }
-
- let rope = Rope::from(text.as_str());
- let actual_ranges = trailing_whitespace_ranges(&rope);
- let expected_ranges = TRAILING_WHITESPACE_REGEX
- .find_iter(&text)
- .map(|m| m.range())
- .collect::<Vec<_>>();
- assert_eq!(
- actual_ranges,
- expected_ranges,
- "wrong ranges for text lines:\n{:?}",
- text.split("\n").collect::<Vec<_>>()
- );
-}
-
-fn ruby_lang() -> Language {
- Language::new(
- LanguageConfig {
- name: "Ruby".into(),
- path_suffixes: vec!["rb".to_string()],
- line_comment: Some("# ".into()),
- ..Default::default()
- },
- Some(tree_sitter_ruby::language()),
- )
- .with_indents_query(
- r#"
- (class "end" @end) @indent
- (method "end" @end) @indent
- (rescue) @outdent
- (then) @indent
- "#,
- )
- .unwrap()
-}
-
-fn html_lang() -> Language {
- Language::new(
- LanguageConfig {
- name: "HTML".into(),
- block_comment: Some(("<!--".into(), "-->".into())),
- ..Default::default()
- },
- Some(tree_sitter_html::language()),
- )
- .with_indents_query(
- "
- (element
- (start_tag) @start
- (end_tag)? @end) @indent
- ",
- )
- .unwrap()
- .with_injection_query(
- r#"
- (script_element
- (raw_text) @content
- (#set! "language" "javascript"))
- "#,
- )
- .unwrap()
-}
-
-fn erb_lang() -> Language {
- Language::new(
- LanguageConfig {
- name: "ERB".into(),
- path_suffixes: vec!["erb".to_string()],
- block_comment: Some(("<%#".into(), "%>".into())),
- ..Default::default()
- },
- Some(tree_sitter_embedded_template::language()),
- )
- .with_injection_query(
- r#"
- (
- (code) @content
- (#set! "language" "ruby")
- (#set! "combined")
- )
-
- (
- (content) @content
- (#set! "language" "html")
- (#set! "combined")
- )
- "#,
- )
- .unwrap()
-}
-
-fn rust_lang() -> Language {
- Language::new(
- LanguageConfig {
- name: "Rust".into(),
- path_suffixes: vec!["rs".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_rust::language()),
- )
- .with_indents_query(
- r#"
- (call_expression) @indent
- (field_expression) @indent
- (_ "(" ")" @end) @indent
- (_ "{" "}" @end) @indent
- "#,
- )
- .unwrap()
- .with_brackets_query(
- r#"
- ("{" @open "}" @close)
- "#,
- )
- .unwrap()
- .with_outline_query(
- r#"
- (struct_item
- "struct" @context
- name: (_) @name) @item
- (enum_item
- "enum" @context
- name: (_) @name) @item
- (enum_variant
- name: (_) @name) @item
- (field_declaration
- name: (_) @name) @item
- (impl_item
- "impl" @context
- trait: (_)? @name
- "for"? @context
- type: (_) @name) @item
- (function_item
- "fn" @context
- name: (_) @name) @item
- (mod_item
- "mod" @context
- name: (_) @name) @item
- "#,
- )
- .unwrap()
-}
-
-fn json_lang() -> Language {
- Language::new(
- LanguageConfig {
- name: "Json".into(),
- path_suffixes: vec!["js".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_json::language()),
- )
-}
-
-fn javascript_lang() -> Language {
- Language::new(
- LanguageConfig {
- name: "JavaScript".into(),
- ..Default::default()
- },
- Some(tree_sitter_typescript::language_tsx()),
- )
- .with_brackets_query(
- r#"
- ("{" @open "}" @close)
- ("(" @open ")" @close)
- "#,
- )
- .unwrap()
- .with_indents_query(
- r#"
- (object "}" @end) @indent
- "#,
- )
- .unwrap()
-}
-
-fn get_tree_sexp(buffer: &Model<Buffer>, cx: &mut gpui::TestAppContext) -> String {
- buffer.update(cx, |buffer, _| {
- let snapshot = buffer.snapshot();
- let layers = snapshot.syntax.layers(buffer.as_text_snapshot());
- layers[0].node().to_sexp()
- })
-}
-
-// Assert that the enclosing bracket ranges around the selection match the pairs indicated by the marked text in `range_markers`
-fn assert_bracket_pairs(
- selection_text: &'static str,
- bracket_pair_texts: Vec<&'static str>,
- language: Language,
- cx: &mut AppContext,
-) {
- let (expected_text, selection_ranges) = marked_text_ranges(selection_text, false);
- let buffer = cx.new_model(|cx| {
- Buffer::new(0, cx.entity_id().as_u64(), expected_text.clone())
- .with_language(Arc::new(language), cx)
- });
- let buffer = buffer.update(cx, |buffer, _cx| buffer.snapshot());
-
- let selection_range = selection_ranges[0].clone();
-
- let bracket_pairs = bracket_pair_texts
- .into_iter()
- .map(|pair_text| {
- let (bracket_text, ranges) = marked_text_ranges(pair_text, false);
- assert_eq!(bracket_text, expected_text);
- (ranges[0].clone(), ranges[1].clone())
- })
- .collect::<Vec<_>>();
-
- assert_set_eq!(
- buffer.bracket_ranges(selection_range).collect::<Vec<_>>(),
- bracket_pairs
- );
-}
-
-fn init_settings(cx: &mut AppContext, f: fn(&mut AllLanguageSettingsContent)) {
- let settings_store = SettingsStore::test(cx);
- cx.set_global(settings_store);
- crate::init(cx);
- cx.update_global::<SettingsStore, _>(|settings, cx| {
- settings.update_user_settings::<AllLanguageSettings>(cx, f);
- });
-}
@@ -1,236 +0,0 @@
-use crate::Diagnostic;
-use collections::HashMap;
-use lsp::LanguageServerId;
-use std::{
- cmp::{Ordering, Reverse},
- iter,
- ops::Range,
-};
-use sum_tree::{self, Bias, SumTree};
-use text::{Anchor, FromAnchor, PointUtf16, ToOffset};
-
-#[derive(Clone, Debug, Default)]
-pub struct DiagnosticSet {
- diagnostics: SumTree<DiagnosticEntry<Anchor>>,
-}
-
-#[derive(Clone, Debug, PartialEq, Eq)]
-pub struct DiagnosticEntry<T> {
- pub range: Range<T>,
- pub diagnostic: Diagnostic,
-}
-
-#[derive(Debug)]
-pub struct DiagnosticGroup<T> {
- pub entries: Vec<DiagnosticEntry<T>>,
- pub primary_ix: usize,
-}
-
-#[derive(Clone, Debug)]
-pub struct Summary {
- start: Anchor,
- end: Anchor,
- min_start: Anchor,
- max_end: Anchor,
- count: usize,
-}
-
-impl<T> DiagnosticEntry<T> {
- // Used to provide diagnostic context to lsp codeAction request
- pub fn to_lsp_diagnostic_stub(&self) -> lsp::Diagnostic {
- let code = self
- .diagnostic
- .code
- .clone()
- .map(lsp::NumberOrString::String);
-
- lsp::Diagnostic {
- code,
- severity: Some(self.diagnostic.severity),
- ..Default::default()
- }
- }
-}
-
-impl DiagnosticSet {
- pub fn from_sorted_entries<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
- where
- I: IntoIterator<Item = DiagnosticEntry<Anchor>>,
- {
- Self {
- diagnostics: SumTree::from_iter(iter, buffer),
- }
- }
-
- pub fn new<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
- where
- I: IntoIterator<Item = DiagnosticEntry<PointUtf16>>,
- {
- let mut entries = iter.into_iter().collect::<Vec<_>>();
- entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end)));
- Self {
- diagnostics: SumTree::from_iter(
- entries.into_iter().map(|entry| DiagnosticEntry {
- range: buffer.anchor_before(entry.range.start)
- ..buffer.anchor_before(entry.range.end),
- diagnostic: entry.diagnostic,
- }),
- buffer,
- ),
- }
- }
-
- pub fn len(&self) -> usize {
- self.diagnostics.summary().count
- }
-
- pub fn iter(&self) -> impl Iterator<Item = &DiagnosticEntry<Anchor>> {
- self.diagnostics.iter()
- }
-
- pub fn range<'a, T, O>(
- &'a self,
- range: Range<T>,
- buffer: &'a text::BufferSnapshot,
- inclusive: bool,
- reversed: bool,
- ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
- where
- T: 'a + ToOffset,
- O: FromAnchor,
- {
- let end_bias = if inclusive { Bias::Right } else { Bias::Left };
- let range = buffer.anchor_before(range.start)..buffer.anchor_at(range.end, end_bias);
- let mut cursor = self.diagnostics.filter::<_, ()>({
- move |summary: &Summary| {
- let start_cmp = range.start.cmp(&summary.max_end, buffer);
- let end_cmp = range.end.cmp(&summary.min_start, buffer);
- if inclusive {
- start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal
- } else {
- start_cmp == Ordering::Less && end_cmp == Ordering::Greater
- }
- }
- });
-
- if reversed {
- cursor.prev(buffer);
- } else {
- cursor.next(buffer);
- }
- iter::from_fn({
- move || {
- if let Some(diagnostic) = cursor.item() {
- if reversed {
- cursor.prev(buffer);
- } else {
- cursor.next(buffer);
- }
- Some(diagnostic.resolve(buffer))
- } else {
- None
- }
- }
- })
- }
-
- pub fn groups(
- &self,
- language_server_id: LanguageServerId,
- output: &mut Vec<(LanguageServerId, DiagnosticGroup<Anchor>)>,
- buffer: &text::BufferSnapshot,
- ) {
- let mut groups = HashMap::default();
- for entry in self.diagnostics.iter() {
- groups
- .entry(entry.diagnostic.group_id)
- .or_insert(Vec::new())
- .push(entry.clone());
- }
-
- let start_ix = output.len();
- output.extend(groups.into_values().filter_map(|mut entries| {
- entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start, buffer));
- entries
- .iter()
- .position(|entry| entry.diagnostic.is_primary)
- .map(|primary_ix| {
- (
- language_server_id,
- DiagnosticGroup {
- entries,
- primary_ix,
- },
- )
- })
- }));
- output[start_ix..].sort_unstable_by(|(id_a, group_a), (id_b, group_b)| {
- group_a.entries[group_a.primary_ix]
- .range
- .start
- .cmp(&group_b.entries[group_b.primary_ix].range.start, buffer)
- .then_with(|| id_a.cmp(&id_b))
- });
- }
-
- pub fn group<'a, O: FromAnchor>(
- &'a self,
- group_id: usize,
- buffer: &'a text::BufferSnapshot,
- ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>> {
- self.iter()
- .filter(move |entry| entry.diagnostic.group_id == group_id)
- .map(|entry| entry.resolve(buffer))
- }
-}
-impl sum_tree::Item for DiagnosticEntry<Anchor> {
- type Summary = Summary;
-
- fn summary(&self) -> Self::Summary {
- Summary {
- start: self.range.start,
- end: self.range.end,
- min_start: self.range.start,
- max_end: self.range.end,
- count: 1,
- }
- }
-}
-
-impl DiagnosticEntry<Anchor> {
- pub fn resolve<O: FromAnchor>(&self, buffer: &text::BufferSnapshot) -> DiagnosticEntry<O> {
- DiagnosticEntry {
- range: O::from_anchor(&self.range.start, buffer)
- ..O::from_anchor(&self.range.end, buffer),
- diagnostic: self.diagnostic.clone(),
- }
- }
-}
-
-impl Default for Summary {
- fn default() -> Self {
- Self {
- start: Anchor::MIN,
- end: Anchor::MAX,
- min_start: Anchor::MAX,
- max_end: Anchor::MIN,
- count: 0,
- }
- }
-}
-
-impl sum_tree::Summary for Summary {
- type Context = text::BufferSnapshot;
-
- fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
- if other.min_start.cmp(&self.min_start, buffer).is_lt() {
- self.min_start = other.min_start;
- }
- if other.max_end.cmp(&self.max_end, buffer).is_gt() {
- self.max_end = other.max_end;
- }
- self.start = other.start;
- self.end = other.end;
- self.count += other.count;
- }
-}
@@ -1,111 +0,0 @@
-use gpui::HighlightStyle;
-use std::sync::Arc;
-use theme::SyntaxTheme;
-
-#[derive(Clone, Debug)]
-pub struct HighlightMap(Arc<[HighlightId]>);
-
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
-pub struct HighlightId(pub u32);
-
-const DEFAULT_SYNTAX_HIGHLIGHT_ID: HighlightId = HighlightId(u32::MAX);
-
-impl HighlightMap {
- pub fn new(capture_names: &[&str], theme: &SyntaxTheme) -> Self {
- // For each capture name in the highlight query, find the longest
- // key in the theme's syntax styles that matches all of the
- // dot-separated components of the capture name.
- HighlightMap(
- capture_names
- .iter()
- .map(|capture_name| {
- theme
- .highlights
- .iter()
- .enumerate()
- .filter_map(|(i, (key, _))| {
- let mut len = 0;
- let capture_parts = capture_name.split('.');
- for key_part in key.split('.') {
- if capture_parts.clone().any(|part| part == key_part) {
- len += 1;
- } else {
- return None;
- }
- }
- Some((i, len))
- })
- .max_by_key(|(_, len)| *len)
- .map_or(DEFAULT_SYNTAX_HIGHLIGHT_ID, |(i, _)| HighlightId(i as u32))
- })
- .collect(),
- )
- }
-
- pub fn get(&self, capture_id: u32) -> HighlightId {
- self.0
- .get(capture_id as usize)
- .copied()
- .unwrap_or(DEFAULT_SYNTAX_HIGHLIGHT_ID)
- }
-}
-
-impl HighlightId {
- pub fn is_default(&self) -> bool {
- *self == DEFAULT_SYNTAX_HIGHLIGHT_ID
- }
-
- pub fn style(&self, theme: &SyntaxTheme) -> Option<HighlightStyle> {
- theme.highlights.get(self.0 as usize).map(|entry| entry.1)
- }
-
- pub fn name<'a>(&self, theme: &'a SyntaxTheme) -> Option<&'a str> {
- theme.highlights.get(self.0 as usize).map(|e| e.0.as_str())
- }
-}
-
-impl Default for HighlightMap {
- fn default() -> Self {
- Self(Arc::new([]))
- }
-}
-
-impl Default for HighlightId {
- fn default() -> Self {
- DEFAULT_SYNTAX_HIGHLIGHT_ID
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use gpui::rgba;
-
- #[test]
- fn test_highlight_map() {
- let theme = SyntaxTheme {
- highlights: [
- ("function", rgba(0x100000ff)),
- ("function.method", rgba(0x200000ff)),
- ("function.async", rgba(0x300000ff)),
- ("variable.builtin.self.rust", rgba(0x400000ff)),
- ("variable.builtin", rgba(0x500000ff)),
- ("variable", rgba(0x600000ff)),
- ]
- .iter()
- .map(|(name, color)| (name.to_string(), (*color).into()))
- .collect(),
- };
-
- let capture_names = &[
- "function.special",
- "function.async.rust",
- "variable.builtin.self",
- ];
-
- let map = HighlightMap::new(capture_names, &theme);
- assert_eq!(map.get(0).name(&theme), Some("function"));
- assert_eq!(map.get(1).name(&theme), Some("function.async"));
- assert_eq!(map.get(2).name(&theme), Some("variable.builtin"));
- }
-}
@@ -1,2025 +0,0 @@
-mod buffer;
-mod diagnostic_set;
-mod highlight_map;
-pub mod language_settings;
-mod outline;
-pub mod proto;
-mod syntax_map;
-
-#[cfg(test)]
-mod buffer_tests;
-pub mod markdown;
-
-use anyhow::{anyhow, Context, Result};
-use async_trait::async_trait;
-use collections::{HashMap, HashSet};
-use futures::{
- channel::{mpsc, oneshot},
- future::{BoxFuture, Shared},
- FutureExt, TryFutureExt as _,
-};
-use gpui::{AppContext, AsyncAppContext, BackgroundExecutor, Task};
-pub use highlight_map::HighlightMap;
-use lazy_static::lazy_static;
-use lsp::{CodeActionKind, LanguageServerBinary};
-use parking_lot::{Mutex, RwLock};
-use postage::watch;
-use regex::Regex;
-use serde::{de, Deserialize, Deserializer};
-use serde_json::Value;
-use std::{
- any::Any,
- borrow::Cow,
- cell::RefCell,
- fmt::Debug,
- hash::Hash,
- mem,
- ops::{Not, Range},
- path::{Path, PathBuf},
- str,
- sync::{
- atomic::{AtomicU64, AtomicUsize, Ordering::SeqCst},
- Arc,
- },
-};
-use syntax_map::SyntaxSnapshot;
-use theme::{SyntaxTheme, Theme};
-use tree_sitter::{self, wasmtime, Query, WasmStore};
-use unicase::UniCase;
-use util::{http::HttpClient, paths::PathExt};
-use util::{post_inc, ResultExt, TryFutureExt as _, UnwrapFuture};
-
-pub use buffer::Operation;
-pub use buffer::*;
-pub use diagnostic_set::DiagnosticEntry;
-pub use lsp::LanguageServerId;
-pub use outline::{Outline, OutlineItem};
-pub use syntax_map::{OwnedSyntaxLayerInfo, SyntaxLayerInfo};
-pub use text::LineEnding;
-pub use tree_sitter::{Parser, Tree};
-
-pub fn init(cx: &mut AppContext) {
- language_settings::init(cx);
-}
-
-#[derive(Clone, Default)]
-struct LspBinaryStatusSender {
- txs: Arc<Mutex<Vec<mpsc::UnboundedSender<(Arc<Language>, LanguageServerBinaryStatus)>>>>,
-}
-
-impl LspBinaryStatusSender {
- fn subscribe(&self) -> mpsc::UnboundedReceiver<(Arc<Language>, LanguageServerBinaryStatus)> {
- let (tx, rx) = mpsc::unbounded();
- self.txs.lock().push(tx);
- rx
- }
-
- fn send(&self, language: Arc<Language>, status: LanguageServerBinaryStatus) {
- let mut txs = self.txs.lock();
- txs.retain(|tx| {
- tx.unbounded_send((language.clone(), status.clone()))
- .is_ok()
- });
- }
-}
-
-thread_local! {
- static PARSER: RefCell<Parser> = {
- let mut parser = Parser::new();
- parser.set_wasm_store(WasmStore::new(WASM_ENGINE.clone()).unwrap()).unwrap();
- RefCell::new(parser)
- };
-}
-
-lazy_static! {
- pub static ref WASM_ENGINE: wasmtime::Engine = wasmtime::Engine::default();
- pub static ref NEXT_GRAMMAR_ID: AtomicUsize = Default::default();
- pub static ref PLAIN_TEXT: Arc<Language> = Arc::new(Language::new(
- LanguageConfig {
- name: "Plain Text".into(),
- ..Default::default()
- },
- None,
- ));
-}
-
-pub trait ToLspPosition {
- fn to_lsp_position(self) -> lsp::Position;
-}
-
-#[derive(Clone, Debug, PartialEq, Eq, Hash)]
-pub struct LanguageServerName(pub Arc<str>);
-
-/// Represents a Language Server, with certain cached sync properties.
-/// Uses [`LspAdapter`] under the hood, but calls all 'static' methods
-/// once at startup, and caches the results.
-pub struct CachedLspAdapter {
- pub name: LanguageServerName,
- pub short_name: &'static str,
- pub initialization_options: Option<Value>,
- pub disk_based_diagnostic_sources: Vec<String>,
- pub disk_based_diagnostics_progress_token: Option<String>,
- pub language_ids: HashMap<String, String>,
- pub adapter: Arc<dyn LspAdapter>,
- pub reinstall_attempt_count: AtomicU64,
-}
-
-impl CachedLspAdapter {
- pub async fn new(adapter: Arc<dyn LspAdapter>) -> Arc<Self> {
- let name = adapter.name().await;
- let short_name = adapter.short_name();
- let initialization_options = adapter.initialization_options().await;
- let disk_based_diagnostic_sources = adapter.disk_based_diagnostic_sources().await;
- let disk_based_diagnostics_progress_token =
- adapter.disk_based_diagnostics_progress_token().await;
- let language_ids = adapter.language_ids().await;
-
- Arc::new(CachedLspAdapter {
- name,
- short_name,
- initialization_options,
- disk_based_diagnostic_sources,
- disk_based_diagnostics_progress_token,
- language_ids,
- adapter,
- reinstall_attempt_count: AtomicU64::new(0),
- })
- }
-
- pub async fn fetch_latest_server_version(
- &self,
- delegate: &dyn LspAdapterDelegate,
- ) -> Result<Box<dyn 'static + Send + Any>> {
- self.adapter.fetch_latest_server_version(delegate).await
- }
-
- pub fn will_fetch_server(
- &self,
- delegate: &Arc<dyn LspAdapterDelegate>,
- cx: &mut AsyncAppContext,
- ) -> Option<Task<Result<()>>> {
- self.adapter.will_fetch_server(delegate, cx)
- }
-
- pub fn will_start_server(
- &self,
- delegate: &Arc<dyn LspAdapterDelegate>,
- cx: &mut AsyncAppContext,
- ) -> Option<Task<Result<()>>> {
- self.adapter.will_start_server(delegate, cx)
- }
-
- pub async fn fetch_server_binary(
- &self,
- version: Box<dyn 'static + Send + Any>,
- container_dir: PathBuf,
- delegate: &dyn LspAdapterDelegate,
- ) -> Result<LanguageServerBinary> {
- self.adapter
- .fetch_server_binary(version, container_dir, delegate)
- .await
- }
-
- pub async fn cached_server_binary(
- &self,
- container_dir: PathBuf,
- delegate: &dyn LspAdapterDelegate,
- ) -> Option<LanguageServerBinary> {
- self.adapter
- .cached_server_binary(container_dir, delegate)
- .await
- }
-
- pub fn can_be_reinstalled(&self) -> bool {
- self.adapter.can_be_reinstalled()
- }
-
- pub async fn installation_test_binary(
- &self,
- container_dir: PathBuf,
- ) -> Option<LanguageServerBinary> {
- self.adapter.installation_test_binary(container_dir).await
- }
-
- pub fn code_action_kinds(&self) -> Option<Vec<CodeActionKind>> {
- self.adapter.code_action_kinds()
- }
-
- pub fn workspace_configuration(
- &self,
- workspace_root: &Path,
- cx: &mut AppContext,
- ) -> BoxFuture<'static, Value> {
- self.adapter.workspace_configuration(workspace_root, cx)
- }
-
- pub fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) {
- self.adapter.process_diagnostics(params)
- }
-
- pub async fn process_completion(&self, completion_item: &mut lsp::CompletionItem) {
- self.adapter.process_completion(completion_item).await
- }
-
- pub async fn label_for_completion(
- &self,
- completion_item: &lsp::CompletionItem,
- language: &Arc<Language>,
- ) -> Option<CodeLabel> {
- self.adapter
- .label_for_completion(completion_item, language)
- .await
- }
-
- pub async fn label_for_symbol(
- &self,
- name: &str,
- kind: lsp::SymbolKind,
- language: &Arc<Language>,
- ) -> Option<CodeLabel> {
- self.adapter.label_for_symbol(name, kind, language).await
- }
-
- pub fn prettier_plugins(&self) -> &[&'static str] {
- self.adapter.prettier_plugins()
- }
-}
-
-pub trait LspAdapterDelegate: Send + Sync {
- fn show_notification(&self, message: &str, cx: &mut AppContext);
- fn http_client(&self) -> Arc<dyn HttpClient>;
-}
-
-#[async_trait]
-pub trait LspAdapter: 'static + Send + Sync {
- async fn name(&self) -> LanguageServerName;
-
- fn short_name(&self) -> &'static str;
-
- async fn fetch_latest_server_version(
- &self,
- delegate: &dyn LspAdapterDelegate,
- ) -> Result<Box<dyn 'static + Send + Any>>;
-
- fn will_fetch_server(
- &self,
- _: &Arc<dyn LspAdapterDelegate>,
- _: &mut AsyncAppContext,
- ) -> Option<Task<Result<()>>> {
- None
- }
-
- fn will_start_server(
- &self,
- _: &Arc<dyn LspAdapterDelegate>,
- _: &mut AsyncAppContext,
- ) -> Option<Task<Result<()>>> {
- None
- }
-
- async fn fetch_server_binary(
- &self,
- version: Box<dyn 'static + Send + Any>,
- container_dir: PathBuf,
- delegate: &dyn LspAdapterDelegate,
- ) -> Result<LanguageServerBinary>;
-
- async fn cached_server_binary(
- &self,
- container_dir: PathBuf,
- delegate: &dyn LspAdapterDelegate,
- ) -> Option<LanguageServerBinary>;
-
- fn can_be_reinstalled(&self) -> bool {
- true
- }
-
- async fn installation_test_binary(
- &self,
- container_dir: PathBuf,
- ) -> Option<LanguageServerBinary>;
-
- fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
-
- async fn process_completion(&self, _: &mut lsp::CompletionItem) {}
-
- async fn label_for_completion(
- &self,
- _: &lsp::CompletionItem,
- _: &Arc<Language>,
- ) -> Option<CodeLabel> {
- None
- }
-
- async fn label_for_symbol(
- &self,
- _: &str,
- _: lsp::SymbolKind,
- _: &Arc<Language>,
- ) -> Option<CodeLabel> {
- None
- }
-
- async fn initialization_options(&self) -> Option<Value> {
- None
- }
-
- fn workspace_configuration(&self, _: &Path, _: &mut AppContext) -> BoxFuture<'static, Value> {
- futures::future::ready(serde_json::json!({})).boxed()
- }
-
- fn code_action_kinds(&self) -> Option<Vec<CodeActionKind>> {
- Some(vec![
- CodeActionKind::EMPTY,
- CodeActionKind::QUICKFIX,
- CodeActionKind::REFACTOR,
- CodeActionKind::REFACTOR_EXTRACT,
- CodeActionKind::SOURCE,
- ])
- }
-
- async fn disk_based_diagnostic_sources(&self) -> Vec<String> {
- Default::default()
- }
-
- async fn disk_based_diagnostics_progress_token(&self) -> Option<String> {
- None
- }
-
- async fn language_ids(&self) -> HashMap<String, String> {
- Default::default()
- }
-
- fn prettier_plugins(&self) -> &[&'static str] {
- &[]
- }
-}
-
-#[derive(Clone, Debug, PartialEq, Eq)]
-pub struct CodeLabel {
- pub text: String,
- pub runs: Vec<(Range<usize>, HighlightId)>,
- pub filter_range: Range<usize>,
-}
-
-#[derive(Clone, Deserialize)]
-pub struct LanguageConfig {
- pub name: Arc<str>,
- pub grammar_name: Option<Arc<str>>,
- pub path_suffixes: Vec<String>,
- pub brackets: BracketPairConfig,
- #[serde(default, deserialize_with = "deserialize_regex")]
- pub first_line_pattern: Option<Regex>,
- #[serde(default = "auto_indent_using_last_non_empty_line_default")]
- pub auto_indent_using_last_non_empty_line: bool,
- #[serde(default, deserialize_with = "deserialize_regex")]
- pub increase_indent_pattern: Option<Regex>,
- #[serde(default, deserialize_with = "deserialize_regex")]
- pub decrease_indent_pattern: Option<Regex>,
- #[serde(default)]
- pub autoclose_before: String,
- #[serde(default)]
- pub line_comment: Option<Arc<str>>,
- #[serde(default)]
- pub collapsed_placeholder: String,
- #[serde(default)]
- pub block_comment: Option<(Arc<str>, Arc<str>)>,
- #[serde(default)]
- pub scope_opt_in_language_servers: Vec<String>,
- #[serde(default)]
- pub overrides: HashMap<String, LanguageConfigOverride>,
- #[serde(default)]
- pub word_characters: HashSet<char>,
- #[serde(default)]
- pub prettier_parser_name: Option<String>,
-}
-
-#[derive(Debug, Default)]
-pub struct LanguageQueries {
- pub highlights: Option<Cow<'static, str>>,
- pub brackets: Option<Cow<'static, str>>,
- pub indents: Option<Cow<'static, str>>,
- pub outline: Option<Cow<'static, str>>,
- pub embedding: Option<Cow<'static, str>>,
- pub injections: Option<Cow<'static, str>>,
- pub overrides: Option<Cow<'static, str>>,
-}
-
-#[derive(Clone, Debug)]
-pub struct LanguageScope {
- language: Arc<Language>,
- override_id: Option<u32>,
-}
-
-#[derive(Clone, Deserialize, Default, Debug)]
-pub struct LanguageConfigOverride {
- #[serde(default)]
- pub line_comment: Override<Arc<str>>,
- #[serde(default)]
- pub block_comment: Override<(Arc<str>, Arc<str>)>,
- #[serde(skip_deserializing)]
- pub disabled_bracket_ixs: Vec<u16>,
- #[serde(default)]
- pub word_characters: Override<HashSet<char>>,
- #[serde(default)]
- pub opt_into_language_servers: Vec<String>,
-}
-
-#[derive(Clone, Deserialize, Debug)]
-#[serde(untagged)]
-pub enum Override<T> {
- Remove { remove: bool },
- Set(T),
-}
-
-impl<T> Default for Override<T> {
- fn default() -> Self {
- Override::Remove { remove: false }
- }
-}
-
-impl<T> Override<T> {
- fn as_option<'a>(this: Option<&'a Self>, original: Option<&'a T>) -> Option<&'a T> {
- match this {
- Some(Self::Set(value)) => Some(value),
- Some(Self::Remove { remove: true }) => None,
- Some(Self::Remove { remove: false }) | None => original,
- }
- }
-}
-
-impl Default for LanguageConfig {
- fn default() -> Self {
- Self {
- name: "".into(),
- grammar_name: None,
- path_suffixes: Default::default(),
- brackets: Default::default(),
- auto_indent_using_last_non_empty_line: auto_indent_using_last_non_empty_line_default(),
- first_line_pattern: Default::default(),
- increase_indent_pattern: Default::default(),
- decrease_indent_pattern: Default::default(),
- autoclose_before: Default::default(),
- line_comment: Default::default(),
- block_comment: Default::default(),
- scope_opt_in_language_servers: Default::default(),
- overrides: Default::default(),
- collapsed_placeholder: Default::default(),
- word_characters: Default::default(),
- prettier_parser_name: None,
- }
- }
-}
-
-fn auto_indent_using_last_non_empty_line_default() -> bool {
- true
-}
-
-fn deserialize_regex<'de, D: Deserializer<'de>>(d: D) -> Result<Option<Regex>, D::Error> {
- let source = Option::<String>::deserialize(d)?;
- if let Some(source) = source {
- Ok(Some(regex::Regex::new(&source).map_err(de::Error::custom)?))
- } else {
- Ok(None)
- }
-}
-
-#[cfg(any(test, feature = "test-support"))]
-pub struct FakeLspAdapter {
- pub name: &'static str,
- pub initialization_options: Option<Value>,
- pub capabilities: lsp::ServerCapabilities,
- pub initializer: Option<Box<dyn 'static + Send + Sync + Fn(&mut lsp::FakeLanguageServer)>>,
- pub disk_based_diagnostics_progress_token: Option<String>,
- pub disk_based_diagnostics_sources: Vec<String>,
- pub prettier_plugins: Vec<&'static str>,
-}
-
-#[derive(Clone, Debug, Default)]
-pub struct BracketPairConfig {
- pub pairs: Vec<BracketPair>,
- pub disabled_scopes_by_bracket_ix: Vec<Vec<String>>,
-}
-
-impl<'de> Deserialize<'de> for BracketPairConfig {
- fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
- where
- D: Deserializer<'de>,
- {
- #[derive(Deserialize)]
- pub struct Entry {
- #[serde(flatten)]
- pub bracket_pair: BracketPair,
- #[serde(default)]
- pub not_in: Vec<String>,
- }
-
- let result = Vec::<Entry>::deserialize(deserializer)?;
- let mut brackets = Vec::with_capacity(result.len());
- let mut disabled_scopes_by_bracket_ix = Vec::with_capacity(result.len());
- for entry in result {
- brackets.push(entry.bracket_pair);
- disabled_scopes_by_bracket_ix.push(entry.not_in);
- }
-
- Ok(BracketPairConfig {
- pairs: brackets,
- disabled_scopes_by_bracket_ix,
- })
- }
-}
-
-#[derive(Clone, Debug, Default, Deserialize, PartialEq)]
-pub struct BracketPair {
- pub start: String,
- pub end: String,
- pub close: bool,
- pub newline: bool,
-}
-
-pub struct Language {
- pub(crate) config: LanguageConfig,
- pub(crate) grammar: Option<Arc<Grammar>>,
- pub(crate) adapters: Vec<Arc<CachedLspAdapter>>,
-
- #[cfg(any(test, feature = "test-support"))]
- fake_adapter: Option<(
- mpsc::UnboundedSender<lsp::FakeLanguageServer>,
- Arc<FakeLspAdapter>,
- )>,
-}
-
-pub struct Grammar {
- id: usize,
- pub ts_language: tree_sitter::Language,
- pub(crate) error_query: Query,
- pub(crate) highlights_query: Option<Query>,
- pub(crate) brackets_config: Option<BracketConfig>,
- pub(crate) indents_config: Option<IndentConfig>,
- pub outline_config: Option<OutlineConfig>,
- pub embedding_config: Option<EmbeddingConfig>,
- pub(crate) injection_config: Option<InjectionConfig>,
- pub(crate) override_config: Option<OverrideConfig>,
- pub(crate) highlight_map: Mutex<HighlightMap>,
-}
-
-struct IndentConfig {
- query: Query,
- indent_capture_ix: u32,
- start_capture_ix: Option<u32>,
- end_capture_ix: Option<u32>,
- outdent_capture_ix: Option<u32>,
-}
-
-pub struct OutlineConfig {
- pub query: Query,
- pub item_capture_ix: u32,
- pub name_capture_ix: u32,
- pub context_capture_ix: Option<u32>,
- pub extra_context_capture_ix: Option<u32>,
-}
-
-#[derive(Debug)]
-pub struct EmbeddingConfig {
- pub query: Query,
- pub item_capture_ix: u32,
- pub name_capture_ix: Option<u32>,
- pub context_capture_ix: Option<u32>,
- pub collapse_capture_ix: Option<u32>,
- pub keep_capture_ix: Option<u32>,
-}
-
-struct InjectionConfig {
- query: Query,
- content_capture_ix: u32,
- language_capture_ix: Option<u32>,
- patterns: Vec<InjectionPatternConfig>,
-}
-
-struct OverrideConfig {
- query: Query,
- values: HashMap<u32, (String, LanguageConfigOverride)>,
-}
-
-#[derive(Default, Clone)]
-struct InjectionPatternConfig {
- language: Option<Box<str>>,
- combined: bool,
-}
-
-struct BracketConfig {
- query: Query,
- open_capture_ix: u32,
- close_capture_ix: u32,
-}
-
-#[derive(Clone)]
-pub enum LanguageServerBinaryStatus {
- CheckingForUpdate,
- Downloading,
- Downloaded,
- Cached,
- Failed { error: String },
-}
-
-type AvailableLanguageId = usize;
-
-#[derive(Clone)]
-struct AvailableLanguage {
- id: AvailableLanguageId,
- config: LanguageConfig,
- grammar: AvailableGrammar,
- lsp_adapters: Vec<Arc<dyn LspAdapter>>,
- loaded: bool,
-}
-
-#[derive(Clone)]
-enum AvailableGrammar {
- Native {
- grammar: tree_sitter::Language,
- asset_dir: &'static str,
- get_queries: fn(&str) -> LanguageQueries,
- },
- Wasm {
- grammar_name: Arc<str>,
- path: Arc<Path>,
- },
-}
-
-pub struct LanguageRegistry {
- state: RwLock<LanguageRegistryState>,
- language_server_download_dir: Option<Arc<Path>>,
- login_shell_env_loaded: Shared<Task<()>>,
- #[allow(clippy::type_complexity)]
- lsp_binary_paths: Mutex<
- HashMap<LanguageServerName, Shared<Task<Result<LanguageServerBinary, Arc<anyhow::Error>>>>>,
- >,
- executor: Option<BackgroundExecutor>,
- lsp_binary_status_tx: LspBinaryStatusSender,
-}
-
-struct LanguageRegistryState {
- next_language_server_id: usize,
- languages: Vec<Arc<Language>>,
- available_languages: Vec<AvailableLanguage>,
- next_available_language_id: AvailableLanguageId,
- loading_languages: HashMap<AvailableLanguageId, Vec<oneshot::Sender<Result<Arc<Language>>>>>,
- subscription: (watch::Sender<()>, watch::Receiver<()>),
- theme: Option<Arc<Theme>>,
- version: usize,
- reload_count: usize,
-}
-
-pub struct PendingLanguageServer {
- pub server_id: LanguageServerId,
- pub task: Task<Result<lsp::LanguageServer>>,
- pub container_dir: Option<Arc<Path>>,
-}
-
-impl LanguageRegistry {
- pub fn new(login_shell_env_loaded: Task<()>) -> Self {
- Self {
- state: RwLock::new(LanguageRegistryState {
- next_language_server_id: 0,
- languages: vec![PLAIN_TEXT.clone()],
- available_languages: Default::default(),
- next_available_language_id: 0,
- loading_languages: Default::default(),
- subscription: watch::channel(),
- theme: Default::default(),
- version: 0,
- reload_count: 0,
- }),
- language_server_download_dir: None,
- login_shell_env_loaded: login_shell_env_loaded.shared(),
- lsp_binary_paths: Default::default(),
- executor: None,
- lsp_binary_status_tx: Default::default(),
- }
- }
-
- #[cfg(any(test, feature = "test-support"))]
- pub fn test() -> Self {
- Self::new(Task::ready(()))
- }
-
- pub fn set_executor(&mut self, executor: BackgroundExecutor) {
- self.executor = Some(executor);
- }
-
- /// Clear out all of the loaded languages and reload them from scratch.
- ///
- /// This is useful in development, when queries have changed.
- #[cfg(debug_assertions)]
- pub fn reload(&self) {
- self.state.write().reload();
- }
-
- pub fn register(
- &self,
- asset_dir: &'static str,
- config: LanguageConfig,
- grammar: tree_sitter::Language,
- lsp_adapters: Vec<Arc<dyn LspAdapter>>,
- get_queries: fn(&str) -> LanguageQueries,
- ) {
- let state = &mut *self.state.write();
- state.available_languages.push(AvailableLanguage {
- id: post_inc(&mut state.next_available_language_id),
- config,
- grammar: AvailableGrammar::Native {
- grammar,
- get_queries,
- asset_dir,
- },
- lsp_adapters,
- loaded: false,
- });
- }
-
- pub fn register_wasm(&self, path: Arc<Path>, grammar_name: Arc<str>, config: LanguageConfig) {
- let state = &mut *self.state.write();
- state.available_languages.push(AvailableLanguage {
- id: post_inc(&mut state.next_available_language_id),
- config,
- grammar: AvailableGrammar::Wasm { grammar_name, path },
- lsp_adapters: Vec::new(),
- loaded: false,
- });
- }
-
- pub fn language_names(&self) -> Vec<String> {
- let state = self.state.read();
- let mut result = state
- .available_languages
- .iter()
- .filter_map(|l| l.loaded.not().then_some(l.config.name.to_string()))
- .chain(state.languages.iter().map(|l| l.config.name.to_string()))
- .collect::<Vec<_>>();
- result.sort_unstable_by_key(|language_name| language_name.to_lowercase());
- result
- }
-
- pub fn add(&self, language: Arc<Language>) {
- self.state.write().add(language);
- }
-
- pub fn subscribe(&self) -> watch::Receiver<()> {
- self.state.read().subscription.1.clone()
- }
-
- /// The number of times that the registry has been changed,
- /// by adding languages or reloading.
- pub fn version(&self) -> usize {
- self.state.read().version
- }
-
- /// The number of times that the registry has been reloaded.
- pub fn reload_count(&self) -> usize {
- self.state.read().reload_count
- }
-
- pub fn set_theme(&self, theme: Arc<Theme>) {
- let mut state = self.state.write();
- state.theme = Some(theme.clone());
- for language in &state.languages {
- language.set_theme(&theme.syntax());
- }
- }
-
- pub fn set_language_server_download_dir(&mut self, path: impl Into<Arc<Path>>) {
- self.language_server_download_dir = Some(path.into());
- }
-
- pub fn language_for_name(
- self: &Arc<Self>,
- name: &str,
- ) -> UnwrapFuture<oneshot::Receiver<Result<Arc<Language>>>> {
- let name = UniCase::new(name);
- self.get_or_load_language(|config| UniCase::new(config.name.as_ref()) == name)
- }
-
- pub fn language_for_name_or_extension(
- self: &Arc<Self>,
- string: &str,
- ) -> UnwrapFuture<oneshot::Receiver<Result<Arc<Language>>>> {
- let string = UniCase::new(string);
- self.get_or_load_language(|config| {
- UniCase::new(config.name.as_ref()) == string
- || config
- .path_suffixes
- .iter()
- .any(|suffix| UniCase::new(suffix) == string)
- })
- }
-
- pub fn language_for_file(
- self: &Arc<Self>,
- path: impl AsRef<Path>,
- content: Option<&Rope>,
- ) -> UnwrapFuture<oneshot::Receiver<Result<Arc<Language>>>> {
- let path = path.as_ref();
- let filename = path.file_name().and_then(|name| name.to_str());
- let extension = path.extension_or_hidden_file_name();
- let path_suffixes = [extension, filename];
- self.get_or_load_language(|config| {
- let path_matches = config
- .path_suffixes
- .iter()
- .any(|suffix| path_suffixes.contains(&Some(suffix.as_str())));
- let content_matches = content.zip(config.first_line_pattern.as_ref()).map_or(
- false,
- |(content, pattern)| {
- let end = content.clip_point(Point::new(0, 256), Bias::Left);
- let end = content.point_to_offset(end);
- let text = content.chunks_in_range(0..end).collect::<String>();
- pattern.is_match(&text)
- },
- );
- path_matches || content_matches
- })
- }
-
- fn get_or_load_language(
- self: &Arc<Self>,
- callback: impl Fn(&LanguageConfig) -> bool,
- ) -> UnwrapFuture<oneshot::Receiver<Result<Arc<Language>>>> {
- let (tx, rx) = oneshot::channel();
-
- let mut state = self.state.write();
- if let Some(language) = state
- .languages
- .iter()
- .find(|language| callback(&language.config))
- {
- let _ = tx.send(Ok(language.clone()));
- } else if let Some(executor) = self.executor.clone() {
- if let Some(language) = state
- .available_languages
- .iter()
- .find(|l| !l.loaded && callback(&l.config))
- .cloned()
- {
- let txs = state
- .loading_languages
- .entry(language.id)
- .or_insert_with(|| {
- let this = self.clone();
- executor
- .spawn(async move {
- let id = language.id;
- let name = language.config.name.clone();
- let language = async {
- let (grammar, queries) = match language.grammar {
- AvailableGrammar::Native {
- grammar,
- asset_dir,
- get_queries,
- } => (grammar, (get_queries)(asset_dir)),
- AvailableGrammar::Wasm { grammar_name, path } => {
- let mut wasm_path = path.join(grammar_name.as_ref());
- wasm_path.set_extension("wasm");
- let wasm_bytes = std::fs::read(&wasm_path)?;
- let grammar = PARSER.with(|parser| {
- let mut parser = parser.borrow_mut();
- let mut store = parser.take_wasm_store().unwrap();
- let grammar =
- store.load_language(&grammar_name, &wasm_bytes);
- parser.set_wasm_store(store).unwrap();
- grammar
- })?;
- let mut queries = LanguageQueries::default();
- if let Ok(contents) = std::fs::read_to_string(
- &path.join("highlights.scm"),
- ) {
- queries.highlights = Some(contents.into());
- }
- (grammar, queries)
- }
- };
- Language::new(language.config, Some(grammar))
- .with_lsp_adapters(language.lsp_adapters)
- .await
- .with_queries(queries)
- }
- .await;
-
- match language {
- Ok(language) => {
- let language = Arc::new(language);
- let mut state = this.state.write();
-
- state.add(language.clone());
- state.mark_language_loaded(id);
- if let Some(mut txs) = state.loading_languages.remove(&id) {
- for tx in txs.drain(..) {
- let _ = tx.send(Ok(language.clone()));
- }
- }
- }
- Err(e) => {
- log::error!("failed to load language {name}:\n{:?}", e);
- let mut state = this.state.write();
- state.mark_language_loaded(id);
- if let Some(mut txs) = state.loading_languages.remove(&id) {
- for tx in txs.drain(..) {
- let _ = tx.send(Err(anyhow!(
- "failed to load language {}: {}",
- name,
- e
- )));
- }
- }
- }
- };
- })
- .detach();
-
- Vec::new()
- });
- txs.push(tx);
- } else {
- let _ = tx.send(Err(anyhow!("language not found")));
- }
- } else {
- let _ = tx.send(Err(anyhow!("executor does not exist")));
- }
-
- rx.unwrap()
- }
-
- pub fn to_vec(&self) -> Vec<Arc<Language>> {
- self.state.read().languages.iter().cloned().collect()
- }
-
- pub fn create_pending_language_server(
- self: &Arc<Self>,
- stderr_capture: Arc<Mutex<Option<String>>>,
- language: Arc<Language>,
- adapter: Arc<CachedLspAdapter>,
- root_path: Arc<Path>,
- delegate: Arc<dyn LspAdapterDelegate>,
- cx: &mut AppContext,
- ) -> Option<PendingLanguageServer> {
- let server_id = self.state.write().next_language_server_id();
- log::info!(
- "starting language server {:?}, path: {root_path:?}, id: {server_id}",
- adapter.name.0
- );
-
- #[cfg(any(test, feature = "test-support"))]
- if language.fake_adapter.is_some() {
- let task = cx.spawn(|cx| async move {
- let (servers_tx, fake_adapter) = language.fake_adapter.as_ref().unwrap();
- let (server, mut fake_server) = lsp::LanguageServer::fake(
- fake_adapter.name.to_string(),
- fake_adapter.capabilities.clone(),
- cx.clone(),
- );
-
- if let Some(initializer) = &fake_adapter.initializer {
- initializer(&mut fake_server);
- }
-
- let servers_tx = servers_tx.clone();
- cx.background_executor()
- .spawn(async move {
- if fake_server
- .try_receive_notification::<lsp::notification::Initialized>()
- .await
- .is_some()
- {
- servers_tx.unbounded_send(fake_server).ok();
- }
- })
- .detach();
-
- Ok(server)
- });
-
- return Some(PendingLanguageServer {
- server_id,
- task,
- container_dir: None,
- });
- }
-
- let download_dir = self
- .language_server_download_dir
- .clone()
- .ok_or_else(|| anyhow!("language server download directory has not been assigned before starting server"))
- .log_err()?;
- let this = self.clone();
- let language = language.clone();
- let container_dir: Arc<Path> = Arc::from(download_dir.join(adapter.name.0.as_ref()));
- let root_path = root_path.clone();
- let adapter = adapter.clone();
- let login_shell_env_loaded = self.login_shell_env_loaded.clone();
- let lsp_binary_statuses = self.lsp_binary_status_tx.clone();
-
- let task = {
- let container_dir = container_dir.clone();
- cx.spawn(move |mut cx| async move {
- login_shell_env_loaded.await;
-
- let entry = this
- .lsp_binary_paths
- .lock()
- .entry(adapter.name.clone())
- .or_insert_with(|| {
- let adapter = adapter.clone();
- let language = language.clone();
- let delegate = delegate.clone();
- cx.spawn(|cx| {
- get_binary(
- adapter,
- language,
- delegate,
- container_dir,
- lsp_binary_statuses,
- cx,
- )
- .map_err(Arc::new)
- })
- .shared()
- })
- .clone();
-
- let binary = match entry.await {
- Ok(binary) => binary,
- Err(err) => anyhow::bail!("{err}"),
- };
-
- if let Some(task) = adapter.will_start_server(&delegate, &mut cx) {
- task.await?;
- }
-
- lsp::LanguageServer::new(
- stderr_capture,
- server_id,
- binary,
- &root_path,
- adapter.code_action_kinds(),
- cx,
- )
- })
- };
-
- Some(PendingLanguageServer {
- server_id,
- task,
- container_dir: Some(container_dir),
- })
- }
-
- pub fn language_server_binary_statuses(
- &self,
- ) -> mpsc::UnboundedReceiver<(Arc<Language>, LanguageServerBinaryStatus)> {
- self.lsp_binary_status_tx.subscribe()
- }
-
- pub fn delete_server_container(
- &self,
- adapter: Arc<CachedLspAdapter>,
- cx: &mut AppContext,
- ) -> Task<()> {
- log::info!("deleting server container");
-
- let mut lock = self.lsp_binary_paths.lock();
- lock.remove(&adapter.name);
-
- let download_dir = self
- .language_server_download_dir
- .clone()
- .expect("language server download directory has not been assigned before deleting server container");
-
- cx.spawn(|_| async move {
- let container_dir = download_dir.join(adapter.name.0.as_ref());
- smol::fs::remove_dir_all(container_dir)
- .await
- .context("server container removal")
- .log_err();
- })
- }
-
- pub fn next_language_server_id(&self) -> LanguageServerId {
- self.state.write().next_language_server_id()
- }
-}
-
-impl LanguageRegistryState {
- fn next_language_server_id(&mut self) -> LanguageServerId {
- LanguageServerId(post_inc(&mut self.next_language_server_id))
- }
-
- fn add(&mut self, language: Arc<Language>) {
- if let Some(theme) = self.theme.as_ref() {
- language.set_theme(&theme.syntax());
- }
- self.languages.push(language);
- self.version += 1;
- *self.subscription.0.borrow_mut() = ();
- }
-
- #[cfg(debug_assertions)]
- fn reload(&mut self) {
- self.languages.clear();
- self.version += 1;
- self.reload_count += 1;
- for language in &mut self.available_languages {
- language.loaded = false;
- }
- *self.subscription.0.borrow_mut() = ();
- }
-
- /// Mark the given language a having been loaded, so that the
- /// language registry won't try to load it again.
- fn mark_language_loaded(&mut self, id: AvailableLanguageId) {
- for language in &mut self.available_languages {
- if language.id == id {
- language.loaded = true;
- break;
- }
- }
- }
-}
-
-#[cfg(any(test, feature = "test-support"))]
-impl Default for LanguageRegistry {
- fn default() -> Self {
- Self::test()
- }
-}
-
-async fn get_binary(
- adapter: Arc<CachedLspAdapter>,
- language: Arc<Language>,
- delegate: Arc<dyn LspAdapterDelegate>,
- container_dir: Arc<Path>,
- statuses: LspBinaryStatusSender,
- mut cx: AsyncAppContext,
-) -> Result<LanguageServerBinary> {
- if !container_dir.exists() {
- smol::fs::create_dir_all(&container_dir)
- .await
- .context("failed to create container directory")?;
- }
-
- if let Some(task) = adapter.will_fetch_server(&delegate, &mut cx) {
- task.await?;
- }
-
- let binary = fetch_latest_binary(
- adapter.clone(),
- language.clone(),
- delegate.as_ref(),
- &container_dir,
- statuses.clone(),
- )
- .await;
-
- if let Err(error) = binary.as_ref() {
- if let Some(binary) = adapter
- .cached_server_binary(container_dir.to_path_buf(), delegate.as_ref())
- .await
- {
- statuses.send(language.clone(), LanguageServerBinaryStatus::Cached);
- return Ok(binary);
- } else {
- statuses.send(
- language.clone(),
- LanguageServerBinaryStatus::Failed {
- error: format!("{:?}", error),
- },
- );
- }
- }
-
- binary
-}
-
-async fn fetch_latest_binary(
- adapter: Arc<CachedLspAdapter>,
- language: Arc<Language>,
- delegate: &dyn LspAdapterDelegate,
- container_dir: &Path,
- lsp_binary_statuses_tx: LspBinaryStatusSender,
-) -> Result<LanguageServerBinary> {
- let container_dir: Arc<Path> = container_dir.into();
- lsp_binary_statuses_tx.send(
- language.clone(),
- LanguageServerBinaryStatus::CheckingForUpdate,
- );
-
- let version_info = adapter.fetch_latest_server_version(delegate).await?;
- lsp_binary_statuses_tx.send(language.clone(), LanguageServerBinaryStatus::Downloading);
-
- let binary = adapter
- .fetch_server_binary(version_info, container_dir.to_path_buf(), delegate)
- .await?;
- lsp_binary_statuses_tx.send(language.clone(), LanguageServerBinaryStatus::Downloaded);
-
- Ok(binary)
-}
-
-impl Language {
- pub fn new(config: LanguageConfig, ts_language: Option<tree_sitter::Language>) -> Self {
- Self {
- config,
- grammar: ts_language.map(|ts_language| {
- Arc::new(Grammar {
- id: NEXT_GRAMMAR_ID.fetch_add(1, SeqCst),
- highlights_query: None,
- brackets_config: None,
- outline_config: None,
- embedding_config: None,
- indents_config: None,
- injection_config: None,
- override_config: None,
- error_query: Query::new(&ts_language, "(ERROR) @error").unwrap(),
- ts_language,
- highlight_map: Default::default(),
- })
- }),
- adapters: Vec::new(),
-
- #[cfg(any(test, feature = "test-support"))]
- fake_adapter: None,
- }
- }
-
- pub fn lsp_adapters(&self) -> &[Arc<CachedLspAdapter>] {
- &self.adapters
- }
-
- pub fn id(&self) -> Option<usize> {
- self.grammar.as_ref().map(|g| g.id)
- }
-
- pub fn with_queries(mut self, queries: LanguageQueries) -> Result<Self> {
- if let Some(query) = queries.highlights {
- self = self
- .with_highlights_query(query.as_ref())
- .context("Error loading highlights query")?;
- }
- if let Some(query) = queries.brackets {
- self = self
- .with_brackets_query(query.as_ref())
- .context("Error loading brackets query")?;
- }
- if let Some(query) = queries.indents {
- self = self
- .with_indents_query(query.as_ref())
- .context("Error loading indents query")?;
- }
- if let Some(query) = queries.outline {
- self = self
- .with_outline_query(query.as_ref())
- .context("Error loading outline query")?;
- }
- if let Some(query) = queries.embedding {
- self = self
- .with_embedding_query(query.as_ref())
- .context("Error loading embedding query")?;
- }
- if let Some(query) = queries.injections {
- self = self
- .with_injection_query(query.as_ref())
- .context("Error loading injection query")?;
- }
- if let Some(query) = queries.overrides {
- self = self
- .with_override_query(query.as_ref())
- .context("Error loading override query")?;
- }
- Ok(self)
- }
-
- pub fn with_highlights_query(mut self, source: &str) -> Result<Self> {
- let grammar = self.grammar_mut();
- grammar.highlights_query = Some(Query::new(&grammar.ts_language, source)?);
- Ok(self)
- }
-
- pub fn with_outline_query(mut self, source: &str) -> Result<Self> {
- let grammar = self.grammar_mut();
- let query = Query::new(&grammar.ts_language, source)?;
- let mut item_capture_ix = None;
- let mut name_capture_ix = None;
- let mut context_capture_ix = None;
- let mut extra_context_capture_ix = None;
- get_capture_indices(
- &query,
- &mut [
- ("item", &mut item_capture_ix),
- ("name", &mut name_capture_ix),
- ("context", &mut context_capture_ix),
- ("context.extra", &mut extra_context_capture_ix),
- ],
- );
- if let Some((item_capture_ix, name_capture_ix)) = item_capture_ix.zip(name_capture_ix) {
- grammar.outline_config = Some(OutlineConfig {
- query,
- item_capture_ix,
- name_capture_ix,
- context_capture_ix,
- extra_context_capture_ix,
- });
- }
- Ok(self)
- }
-
- pub fn with_embedding_query(mut self, source: &str) -> Result<Self> {
- let grammar = self.grammar_mut();
- let query = Query::new(&grammar.ts_language, source)?;
- let mut item_capture_ix = None;
- let mut name_capture_ix = None;
- let mut context_capture_ix = None;
- let mut collapse_capture_ix = None;
- let mut keep_capture_ix = None;
- get_capture_indices(
- &query,
- &mut [
- ("item", &mut item_capture_ix),
- ("name", &mut name_capture_ix),
- ("context", &mut context_capture_ix),
- ("keep", &mut keep_capture_ix),
- ("collapse", &mut collapse_capture_ix),
- ],
- );
- if let Some(item_capture_ix) = item_capture_ix {
- grammar.embedding_config = Some(EmbeddingConfig {
- query,
- item_capture_ix,
- name_capture_ix,
- context_capture_ix,
- collapse_capture_ix,
- keep_capture_ix,
- });
- }
- Ok(self)
- }
-
- pub fn with_brackets_query(mut self, source: &str) -> Result<Self> {
- let grammar = self.grammar_mut();
- let query = Query::new(&grammar.ts_language, source)?;
- let mut open_capture_ix = None;
- let mut close_capture_ix = None;
- get_capture_indices(
- &query,
- &mut [
- ("open", &mut open_capture_ix),
- ("close", &mut close_capture_ix),
- ],
- );
- if let Some((open_capture_ix, close_capture_ix)) = open_capture_ix.zip(close_capture_ix) {
- grammar.brackets_config = Some(BracketConfig {
- query,
- open_capture_ix,
- close_capture_ix,
- });
- }
- Ok(self)
- }
-
- pub fn with_indents_query(mut self, source: &str) -> Result<Self> {
- let grammar = self.grammar_mut();
- let query = Query::new(&grammar.ts_language, source)?;
- let mut indent_capture_ix = None;
- let mut start_capture_ix = None;
- let mut end_capture_ix = None;
- let mut outdent_capture_ix = None;
- get_capture_indices(
- &query,
- &mut [
- ("indent", &mut indent_capture_ix),
- ("start", &mut start_capture_ix),
- ("end", &mut end_capture_ix),
- ("outdent", &mut outdent_capture_ix),
- ],
- );
- if let Some(indent_capture_ix) = indent_capture_ix {
- grammar.indents_config = Some(IndentConfig {
- query,
- indent_capture_ix,
- start_capture_ix,
- end_capture_ix,
- outdent_capture_ix,
- });
- }
- Ok(self)
- }
-
- pub fn with_injection_query(mut self, source: &str) -> Result<Self> {
- let grammar = self.grammar_mut();
- let query = Query::new(&grammar.ts_language, source)?;
- let mut language_capture_ix = None;
- let mut content_capture_ix = None;
- get_capture_indices(
- &query,
- &mut [
- ("language", &mut language_capture_ix),
- ("content", &mut content_capture_ix),
- ],
- );
- let patterns = (0..query.pattern_count())
- .map(|ix| {
- let mut config = InjectionPatternConfig::default();
- for setting in query.property_settings(ix) {
- match setting.key.as_ref() {
- "language" => {
- config.language = setting.value.clone();
- }
- "combined" => {
- config.combined = true;
- }
- _ => {}
- }
- }
- config
- })
- .collect();
- if let Some(content_capture_ix) = content_capture_ix {
- grammar.injection_config = Some(InjectionConfig {
- query,
- language_capture_ix,
- content_capture_ix,
- patterns,
- });
- }
- Ok(self)
- }
-
- pub fn with_override_query(mut self, source: &str) -> anyhow::Result<Self> {
- let query = Query::new(&self.grammar_mut().ts_language, source)?;
-
- let mut override_configs_by_id = HashMap::default();
- for (ix, name) in query.capture_names().iter().enumerate() {
- if !name.starts_with('_') {
- let value = self.config.overrides.remove(*name).unwrap_or_default();
- for server_name in &value.opt_into_language_servers {
- if !self
- .config
- .scope_opt_in_language_servers
- .contains(server_name)
- {
- util::debug_panic!("Server {server_name:?} has been opted-in by scope {name:?} but has not been marked as an opt-in server");
- }
- }
-
- override_configs_by_id.insert(ix as u32, (name.to_string(), value));
- }
- }
-
- if !self.config.overrides.is_empty() {
- let keys = self.config.overrides.keys().collect::<Vec<_>>();
- Err(anyhow!(
- "language {:?} has overrides in config not in query: {keys:?}",
- self.config.name
- ))?;
- }
-
- for disabled_scope_name in self
- .config
- .brackets
- .disabled_scopes_by_bracket_ix
- .iter()
- .flatten()
- {
- if !override_configs_by_id
- .values()
- .any(|(scope_name, _)| scope_name == disabled_scope_name)
- {
- Err(anyhow!(
- "language {:?} has overrides in config not in query: {disabled_scope_name:?}",
- self.config.name
- ))?;
- }
- }
-
- for (name, override_config) in override_configs_by_id.values_mut() {
- override_config.disabled_bracket_ixs = self
- .config
- .brackets
- .disabled_scopes_by_bracket_ix
- .iter()
- .enumerate()
- .filter_map(|(ix, disabled_scope_names)| {
- if disabled_scope_names.contains(name) {
- Some(ix as u16)
- } else {
- None
- }
- })
- .collect();
- }
-
- self.config.brackets.disabled_scopes_by_bracket_ix.clear();
- self.grammar_mut().override_config = Some(OverrideConfig {
- query,
- values: override_configs_by_id,
- });
- Ok(self)
- }
-
- fn grammar_mut(&mut self) -> &mut Grammar {
- Arc::get_mut(self.grammar.as_mut().unwrap()).unwrap()
- }
-
- pub async fn with_lsp_adapters(mut self, lsp_adapters: Vec<Arc<dyn LspAdapter>>) -> Self {
- for adapter in lsp_adapters {
- self.adapters.push(CachedLspAdapter::new(adapter).await);
- }
- self
- }
-
- #[cfg(any(test, feature = "test-support"))]
- pub async fn set_fake_lsp_adapter(
- &mut self,
- fake_lsp_adapter: Arc<FakeLspAdapter>,
- ) -> mpsc::UnboundedReceiver<lsp::FakeLanguageServer> {
- let (servers_tx, servers_rx) = mpsc::unbounded();
- self.fake_adapter = Some((servers_tx, fake_lsp_adapter.clone()));
- let adapter = CachedLspAdapter::new(Arc::new(fake_lsp_adapter)).await;
- self.adapters = vec![adapter];
- servers_rx
- }
-
- pub fn name(&self) -> Arc<str> {
- self.config.name.clone()
- }
-
- pub async fn disk_based_diagnostic_sources(&self) -> &[String] {
- match self.adapters.first().as_ref() {
- Some(adapter) => &adapter.disk_based_diagnostic_sources,
- None => &[],
- }
- }
-
- pub async fn disk_based_diagnostics_progress_token(&self) -> Option<&str> {
- for adapter in &self.adapters {
- let token = adapter.disk_based_diagnostics_progress_token.as_deref();
- if token.is_some() {
- return token;
- }
- }
-
- None
- }
-
- pub async fn process_completion(self: &Arc<Self>, completion: &mut lsp::CompletionItem) {
- for adapter in &self.adapters {
- adapter.process_completion(completion).await;
- }
- }
-
- pub async fn label_for_completion(
- self: &Arc<Self>,
- completion: &lsp::CompletionItem,
- ) -> Option<CodeLabel> {
- self.adapters
- .first()
- .as_ref()?
- .label_for_completion(completion, self)
- .await
- }
-
- pub async fn label_for_symbol(
- self: &Arc<Self>,
- name: &str,
- kind: lsp::SymbolKind,
- ) -> Option<CodeLabel> {
- self.adapters
- .first()
- .as_ref()?
- .label_for_symbol(name, kind, self)
- .await
- }
-
- pub fn highlight_text<'a>(
- self: &'a Arc<Self>,
- text: &'a Rope,
- range: Range<usize>,
- ) -> Vec<(Range<usize>, HighlightId)> {
- let mut result = Vec::new();
- if let Some(grammar) = &self.grammar {
- let tree = grammar.parse_text(text, None);
- let captures =
- SyntaxSnapshot::single_tree_captures(range.clone(), text, &tree, self, |grammar| {
- grammar.highlights_query.as_ref()
- });
- let highlight_maps = vec![grammar.highlight_map()];
- let mut offset = 0;
- for chunk in BufferChunks::new(text, range, Some((captures, highlight_maps)), vec![]) {
- let end_offset = offset + chunk.text.len();
- if let Some(highlight_id) = chunk.syntax_highlight_id {
- if !highlight_id.is_default() {
- result.push((offset..end_offset, highlight_id));
- }
- }
- offset = end_offset;
- }
- }
- result
- }
-
- pub fn path_suffixes(&self) -> &[String] {
- &self.config.path_suffixes
- }
-
- pub fn should_autoclose_before(&self, c: char) -> bool {
- c.is_whitespace() || self.config.autoclose_before.contains(c)
- }
-
- pub fn set_theme(&self, theme: &SyntaxTheme) {
- if let Some(grammar) = self.grammar.as_ref() {
- if let Some(highlights_query) = &grammar.highlights_query {
- *grammar.highlight_map.lock() =
- HighlightMap::new(highlights_query.capture_names(), theme);
- }
- }
- }
-
- pub fn grammar(&self) -> Option<&Arc<Grammar>> {
- self.grammar.as_ref()
- }
-
- pub fn default_scope(self: &Arc<Self>) -> LanguageScope {
- LanguageScope {
- language: self.clone(),
- override_id: None,
- }
- }
-
- pub fn prettier_parser_name(&self) -> Option<&str> {
- self.config.prettier_parser_name.as_deref()
- }
-}
-
-impl LanguageScope {
- pub fn collapsed_placeholder(&self) -> &str {
- self.language.config.collapsed_placeholder.as_ref()
- }
-
- pub fn line_comment_prefix(&self) -> Option<&Arc<str>> {
- Override::as_option(
- self.config_override().map(|o| &o.line_comment),
- self.language.config.line_comment.as_ref(),
- )
- }
-
- pub fn block_comment_delimiters(&self) -> Option<(&Arc<str>, &Arc<str>)> {
- Override::as_option(
- self.config_override().map(|o| &o.block_comment),
- self.language.config.block_comment.as_ref(),
- )
- .map(|e| (&e.0, &e.1))
- }
-
- pub fn word_characters(&self) -> Option<&HashSet<char>> {
- Override::as_option(
- self.config_override().map(|o| &o.word_characters),
- Some(&self.language.config.word_characters),
- )
- }
-
- pub fn brackets(&self) -> impl Iterator<Item = (&BracketPair, bool)> {
- let mut disabled_ids = self
- .config_override()
- .map_or(&[] as _, |o| o.disabled_bracket_ixs.as_slice());
- self.language
- .config
- .brackets
- .pairs
- .iter()
- .enumerate()
- .map(move |(ix, bracket)| {
- let mut is_enabled = true;
- if let Some(next_disabled_ix) = disabled_ids.first() {
- if ix == *next_disabled_ix as usize {
- disabled_ids = &disabled_ids[1..];
- is_enabled = false;
- }
- }
- (bracket, is_enabled)
- })
- }
-
- pub fn should_autoclose_before(&self, c: char) -> bool {
- c.is_whitespace() || self.language.config.autoclose_before.contains(c)
- }
-
- pub fn language_allowed(&self, name: &LanguageServerName) -> bool {
- let config = &self.language.config;
- let opt_in_servers = &config.scope_opt_in_language_servers;
- if opt_in_servers.iter().any(|o| *o == *name.0) {
- if let Some(over) = self.config_override() {
- over.opt_into_language_servers.iter().any(|o| *o == *name.0)
- } else {
- false
- }
- } else {
- true
- }
- }
-
- fn config_override(&self) -> Option<&LanguageConfigOverride> {
- let id = self.override_id?;
- let grammar = self.language.grammar.as_ref()?;
- let override_config = grammar.override_config.as_ref()?;
- override_config.values.get(&id).map(|e| &e.1)
- }
-}
-
-impl Hash for Language {
- fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
- self.id().hash(state)
- }
-}
-
-impl PartialEq for Language {
- fn eq(&self, other: &Self) -> bool {
- self.id().eq(&other.id())
- }
-}
-
-impl Eq for Language {}
-
-impl Debug for Language {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- f.debug_struct("Language")
- .field("name", &self.config.name)
- .finish()
- }
-}
-
-impl Grammar {
- pub fn id(&self) -> usize {
- self.id
- }
-
- fn parse_text(&self, text: &Rope, old_tree: Option<Tree>) -> Tree {
- PARSER.with(|parser| {
- let mut parser = parser.borrow_mut();
- parser
- .set_language(&self.ts_language)
- .expect("incompatible grammar");
- let mut chunks = text.chunks_in_range(0..text.len());
- parser
- .parse_with(
- &mut move |offset, _| {
- chunks.seek(offset);
- chunks.next().unwrap_or("").as_bytes()
- },
- old_tree.as_ref(),
- )
- .unwrap()
- })
- }
-
- pub fn highlight_map(&self) -> HighlightMap {
- self.highlight_map.lock().clone()
- }
-
- pub fn highlight_id_for_name(&self, name: &str) -> Option<HighlightId> {
- let capture_id = self
- .highlights_query
- .as_ref()?
- .capture_index_for_name(name)?;
- Some(self.highlight_map.lock().get(capture_id))
- }
-}
-
-impl CodeLabel {
- pub fn plain(text: String, filter_text: Option<&str>) -> Self {
- let mut result = Self {
- runs: Vec::new(),
- filter_range: 0..text.len(),
- text,
- };
- if let Some(filter_text) = filter_text {
- if let Some(ix) = result.text.find(filter_text) {
- result.filter_range = ix..ix + filter_text.len();
- }
- }
- result
- }
-}
-
-#[cfg(any(test, feature = "test-support"))]
-impl Default for FakeLspAdapter {
- fn default() -> Self {
- Self {
- name: "the-fake-language-server",
- capabilities: lsp::LanguageServer::full_capabilities(),
- initializer: None,
- disk_based_diagnostics_progress_token: None,
- initialization_options: None,
- disk_based_diagnostics_sources: Vec::new(),
- prettier_plugins: Vec::new(),
- }
- }
-}
-
-#[cfg(any(test, feature = "test-support"))]
-#[async_trait]
-impl LspAdapter for Arc<FakeLspAdapter> {
- async fn name(&self) -> LanguageServerName {
- LanguageServerName(self.name.into())
- }
-
- fn short_name(&self) -> &'static str {
- "FakeLspAdapter"
- }
-
- async fn fetch_latest_server_version(
- &self,
- _: &dyn LspAdapterDelegate,
- ) -> Result<Box<dyn 'static + Send + Any>> {
- unreachable!();
- }
-
- async fn fetch_server_binary(
- &self,
- _: Box<dyn 'static + Send + Any>,
- _: PathBuf,
- _: &dyn LspAdapterDelegate,
- ) -> Result<LanguageServerBinary> {
- unreachable!();
- }
-
- async fn cached_server_binary(
- &self,
- _: PathBuf,
- _: &dyn LspAdapterDelegate,
- ) -> Option<LanguageServerBinary> {
- unreachable!();
- }
-
- async fn installation_test_binary(&self, _: PathBuf) -> Option<LanguageServerBinary> {
- unreachable!();
- }
-
- fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
-
- async fn disk_based_diagnostic_sources(&self) -> Vec<String> {
- self.disk_based_diagnostics_sources.clone()
- }
-
- async fn disk_based_diagnostics_progress_token(&self) -> Option<String> {
- self.disk_based_diagnostics_progress_token.clone()
- }
-
- async fn initialization_options(&self) -> Option<Value> {
- self.initialization_options.clone()
- }
-
- fn prettier_plugins(&self) -> &[&'static str] {
- &self.prettier_plugins
- }
-}
-
-fn get_capture_indices(query: &Query, captures: &mut [(&str, &mut Option<u32>)]) {
- for (ix, name) in query.capture_names().iter().enumerate() {
- for (capture_name, index) in captures.iter_mut() {
- if capture_name == name {
- **index = Some(ix as u32);
- break;
- }
- }
- }
-}
-
-pub fn point_to_lsp(point: PointUtf16) -> lsp::Position {
- lsp::Position::new(point.row, point.column)
-}
-
-pub fn point_from_lsp(point: lsp::Position) -> Unclipped<PointUtf16> {
- Unclipped(PointUtf16::new(point.line, point.character))
-}
-
-pub fn range_to_lsp(range: Range<PointUtf16>) -> lsp::Range {
- lsp::Range {
- start: point_to_lsp(range.start),
- end: point_to_lsp(range.end),
- }
-}
-
-pub fn range_from_lsp(range: lsp::Range) -> Range<Unclipped<PointUtf16>> {
- let mut start = point_from_lsp(range.start);
- let mut end = point_from_lsp(range.end);
- if start > end {
- mem::swap(&mut start, &mut end);
- }
- start..end
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use gpui::TestAppContext;
-
- #[gpui::test(iterations = 10)]
- async fn test_first_line_pattern(cx: &mut TestAppContext) {
- let mut languages = LanguageRegistry::test();
-
- languages.set_executor(cx.executor());
- let languages = Arc::new(languages);
- languages.register(
- "/javascript",
- LanguageConfig {
- name: "JavaScript".into(),
- path_suffixes: vec!["js".into()],
- first_line_pattern: Some(Regex::new(r"\bnode\b").unwrap()),
- ..Default::default()
- },
- tree_sitter_typescript::language_tsx(),
- vec![],
- |_| Default::default(),
- );
-
- languages
- .language_for_file("the/script", None)
- .await
- .unwrap_err();
- languages
- .language_for_file("the/script", Some(&"nothing".into()))
- .await
- .unwrap_err();
- assert_eq!(
- languages
- .language_for_file("the/script", Some(&"#!/bin/env node".into()))
- .await
- .unwrap()
- .name()
- .as_ref(),
- "JavaScript"
- );
- }
-
- #[gpui::test(iterations = 10)]
- async fn test_language_loading(cx: &mut TestAppContext) {
- let mut languages = LanguageRegistry::test();
- languages.set_executor(cx.executor());
- let languages = Arc::new(languages);
- languages.register(
- "/JSON",
- LanguageConfig {
- name: "JSON".into(),
- path_suffixes: vec!["json".into()],
- ..Default::default()
- },
- tree_sitter_json::language(),
- vec![],
- |_| Default::default(),
- );
- languages.register(
- "/rust",
- LanguageConfig {
- name: "Rust".into(),
- path_suffixes: vec!["rs".into()],
- ..Default::default()
- },
- tree_sitter_rust::language(),
- vec![],
- |_| Default::default(),
- );
- assert_eq!(
- languages.language_names(),
- &[
- "JSON".to_string(),
- "Plain Text".to_string(),
- "Rust".to_string(),
- ]
- );
-
- let rust1 = languages.language_for_name("Rust");
- let rust2 = languages.language_for_name("Rust");
-
- // Ensure language is still listed even if it's being loaded.
- assert_eq!(
- languages.language_names(),
- &[
- "JSON".to_string(),
- "Plain Text".to_string(),
- "Rust".to_string(),
- ]
- );
-
- let (rust1, rust2) = futures::join!(rust1, rust2);
- assert!(Arc::ptr_eq(&rust1.unwrap(), &rust2.unwrap()));
-
- // Ensure language is still listed even after loading it.
- assert_eq!(
- languages.language_names(),
- &[
- "JSON".to_string(),
- "Plain Text".to_string(),
- "Rust".to_string(),
- ]
- );
-
- // Loading an unknown language returns an error.
- assert!(languages.language_for_name("Unknown").await.is_err());
- }
-}
@@ -1,431 +0,0 @@
-use crate::{File, Language};
-use anyhow::Result;
-use collections::{HashMap, HashSet};
-use globset::GlobMatcher;
-use gpui::AppContext;
-use schemars::{
- schema::{InstanceType, ObjectValidation, Schema, SchemaObject},
- JsonSchema,
-};
-use serde::{Deserialize, Serialize};
-use settings::Settings;
-use std::{num::NonZeroU32, path::Path, sync::Arc};
-
-pub fn init(cx: &mut AppContext) {
- AllLanguageSettings::register(cx);
-}
-
-pub fn language_settings<'a>(
- language: Option<&Arc<Language>>,
- file: Option<&Arc<dyn File>>,
- cx: &'a AppContext,
-) -> &'a LanguageSettings {
- let language_name = language.map(|l| l.name());
- all_language_settings(file, cx).language(language_name.as_deref())
-}
-
-pub fn all_language_settings<'a>(
- file: Option<&Arc<dyn File>>,
- cx: &'a AppContext,
-) -> &'a AllLanguageSettings {
- let location = file.map(|f| (f.worktree_id(), f.path().as_ref()));
- AllLanguageSettings::get(location, cx)
-}
-
-#[derive(Debug, Clone)]
-pub struct AllLanguageSettings {
- pub copilot: CopilotSettings,
- defaults: LanguageSettings,
- languages: HashMap<Arc<str>, LanguageSettings>,
-}
-
-#[derive(Debug, Clone, Deserialize)]
-pub struct LanguageSettings {
- pub tab_size: NonZeroU32,
- pub hard_tabs: bool,
- pub soft_wrap: SoftWrap,
- pub preferred_line_length: u32,
- pub show_wrap_guides: bool,
- pub wrap_guides: Vec<usize>,
- pub format_on_save: FormatOnSave,
- pub remove_trailing_whitespace_on_save: bool,
- pub ensure_final_newline_on_save: bool,
- pub formatter: Formatter,
- pub prettier: HashMap<String, serde_json::Value>,
- pub enable_language_server: bool,
- pub show_copilot_suggestions: bool,
- pub show_whitespaces: ShowWhitespaceSetting,
- pub extend_comment_on_newline: bool,
- pub inlay_hints: InlayHintSettings,
-}
-
-#[derive(Clone, Debug, Default)]
-pub struct CopilotSettings {
- pub feature_enabled: bool,
- pub disabled_globs: Vec<GlobMatcher>,
-}
-
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
-pub struct AllLanguageSettingsContent {
- #[serde(default)]
- pub features: Option<FeaturesContent>,
- #[serde(default)]
- pub copilot: Option<CopilotSettingsContent>,
- #[serde(flatten)]
- pub defaults: LanguageSettingsContent,
- #[serde(default, alias = "language_overrides")]
- pub languages: HashMap<Arc<str>, LanguageSettingsContent>,
-}
-
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
-pub struct LanguageSettingsContent {
- #[serde(default)]
- pub tab_size: Option<NonZeroU32>,
- #[serde(default)]
- pub hard_tabs: Option<bool>,
- #[serde(default)]
- pub soft_wrap: Option<SoftWrap>,
- #[serde(default)]
- pub preferred_line_length: Option<u32>,
- #[serde(default)]
- pub show_wrap_guides: Option<bool>,
- #[serde(default)]
- pub wrap_guides: Option<Vec<usize>>,
- #[serde(default)]
- pub format_on_save: Option<FormatOnSave>,
- #[serde(default)]
- pub remove_trailing_whitespace_on_save: Option<bool>,
- #[serde(default)]
- pub ensure_final_newline_on_save: Option<bool>,
- #[serde(default)]
- pub formatter: Option<Formatter>,
- #[serde(default)]
- pub prettier: Option<HashMap<String, serde_json::Value>>,
- #[serde(default)]
- pub enable_language_server: Option<bool>,
- #[serde(default)]
- pub show_copilot_suggestions: Option<bool>,
- #[serde(default)]
- pub show_whitespaces: Option<ShowWhitespaceSetting>,
- #[serde(default)]
- pub extend_comment_on_newline: Option<bool>,
- #[serde(default)]
- pub inlay_hints: Option<InlayHintSettings>,
-}
-
-#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
-pub struct CopilotSettingsContent {
- #[serde(default)]
- pub disabled_globs: Option<Vec<String>>,
-}
-
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
-#[serde(rename_all = "snake_case")]
-pub struct FeaturesContent {
- pub copilot: Option<bool>,
-}
-
-#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
-#[serde(rename_all = "snake_case")]
-pub enum SoftWrap {
- None,
- EditorWidth,
- PreferredLineLength,
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
-#[serde(rename_all = "snake_case")]
-pub enum FormatOnSave {
- On,
- Off,
- LanguageServer,
- External {
- command: Arc<str>,
- arguments: Arc<[String]>,
- },
-}
-
-#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
-#[serde(rename_all = "snake_case")]
-pub enum ShowWhitespaceSetting {
- Selection,
- None,
- All,
-}
-
-#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
-#[serde(rename_all = "snake_case")]
-pub enum Formatter {
- #[default]
- Auto,
- LanguageServer,
- Prettier,
- External {
- command: Arc<str>,
- arguments: Arc<[String]>,
- },
-}
-
-#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
-pub struct InlayHintSettings {
- #[serde(default)]
- pub enabled: bool,
- #[serde(default = "default_true")]
- pub show_type_hints: bool,
- #[serde(default = "default_true")]
- pub show_parameter_hints: bool,
- #[serde(default = "default_true")]
- pub show_other_hints: bool,
-}
-
-fn default_true() -> bool {
- true
-}
-
-impl InlayHintSettings {
- pub fn enabled_inlay_hint_kinds(&self) -> HashSet<Option<InlayHintKind>> {
- let mut kinds = HashSet::default();
- if self.show_type_hints {
- kinds.insert(Some(InlayHintKind::Type));
- }
- if self.show_parameter_hints {
- kinds.insert(Some(InlayHintKind::Parameter));
- }
- if self.show_other_hints {
- kinds.insert(None);
- }
- kinds
- }
-}
-
-impl AllLanguageSettings {
- pub fn language<'a>(&'a self, language_name: Option<&str>) -> &'a LanguageSettings {
- if let Some(name) = language_name {
- if let Some(overrides) = self.languages.get(name) {
- return overrides;
- }
- }
- &self.defaults
- }
-
- pub fn copilot_enabled_for_path(&self, path: &Path) -> bool {
- !self
- .copilot
- .disabled_globs
- .iter()
- .any(|glob| glob.is_match(path))
- }
-
- pub fn copilot_enabled(&self, language: Option<&Arc<Language>>, path: Option<&Path>) -> bool {
- if !self.copilot.feature_enabled {
- return false;
- }
-
- if let Some(path) = path {
- if !self.copilot_enabled_for_path(path) {
- return false;
- }
- }
-
- self.language(language.map(|l| l.name()).as_deref())
- .show_copilot_suggestions
- }
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub enum InlayHintKind {
- Type,
- Parameter,
-}
-
-impl InlayHintKind {
- pub fn from_name(name: &str) -> Option<Self> {
- match name {
- "type" => Some(InlayHintKind::Type),
- "parameter" => Some(InlayHintKind::Parameter),
- _ => None,
- }
- }
-
- pub fn name(&self) -> &'static str {
- match self {
- InlayHintKind::Type => "type",
- InlayHintKind::Parameter => "parameter",
- }
- }
-}
-
-impl settings::Settings for AllLanguageSettings {
- const KEY: Option<&'static str> = None;
-
- type FileContent = AllLanguageSettingsContent;
-
- fn load(
- default_value: &Self::FileContent,
- user_settings: &[&Self::FileContent],
- _: &mut AppContext,
- ) -> Result<Self> {
- // A default is provided for all settings.
- let mut defaults: LanguageSettings =
- serde_json::from_value(serde_json::to_value(&default_value.defaults)?)?;
-
- let mut languages = HashMap::default();
- for (language_name, settings) in &default_value.languages {
- let mut language_settings = defaults.clone();
- merge_settings(&mut language_settings, &settings);
- languages.insert(language_name.clone(), language_settings);
- }
-
- let mut copilot_enabled = default_value
- .features
- .as_ref()
- .and_then(|f| f.copilot)
- .ok_or_else(Self::missing_default)?;
- let mut copilot_globs = default_value
- .copilot
- .as_ref()
- .and_then(|c| c.disabled_globs.as_ref())
- .ok_or_else(Self::missing_default)?;
-
- for user_settings in user_settings {
- if let Some(copilot) = user_settings.features.as_ref().and_then(|f| f.copilot) {
- copilot_enabled = copilot;
- }
- if let Some(globs) = user_settings
- .copilot
- .as_ref()
- .and_then(|f| f.disabled_globs.as_ref())
- {
- copilot_globs = globs;
- }
-
- // A user's global settings override the default global settings and
- // all default language-specific settings.
- merge_settings(&mut defaults, &user_settings.defaults);
- for language_settings in languages.values_mut() {
- merge_settings(language_settings, &user_settings.defaults);
- }
-
- // A user's language-specific settings override default language-specific settings.
- for (language_name, user_language_settings) in &user_settings.languages {
- merge_settings(
- languages
- .entry(language_name.clone())
- .or_insert_with(|| defaults.clone()),
- &user_language_settings,
- );
- }
- }
-
- Ok(Self {
- copilot: CopilotSettings {
- feature_enabled: copilot_enabled,
- disabled_globs: copilot_globs
- .iter()
- .filter_map(|g| Some(globset::Glob::new(g).ok()?.compile_matcher()))
- .collect(),
- },
- defaults,
- languages,
- })
- }
-
- fn json_schema(
- generator: &mut schemars::gen::SchemaGenerator,
- params: &settings::SettingsJsonSchemaParams,
- _: &AppContext,
- ) -> schemars::schema::RootSchema {
- let mut root_schema = generator.root_schema_for::<Self::FileContent>();
-
- // Create a schema for a 'languages overrides' object, associating editor
- // settings with specific languages.
- assert!(root_schema
- .definitions
- .contains_key("LanguageSettingsContent"));
-
- let languages_object_schema = SchemaObject {
- instance_type: Some(InstanceType::Object.into()),
- object: Some(Box::new(ObjectValidation {
- properties: params
- .language_names
- .iter()
- .map(|name| {
- (
- name.clone(),
- Schema::new_ref("#/definitions/LanguageSettingsContent".into()),
- )
- })
- .collect(),
- ..Default::default()
- })),
- ..Default::default()
- };
-
- root_schema
- .definitions
- .extend([("Languages".into(), languages_object_schema.into())]);
-
- root_schema
- .schema
- .object
- .as_mut()
- .unwrap()
- .properties
- .extend([
- (
- "languages".to_owned(),
- Schema::new_ref("#/definitions/Languages".into()),
- ),
- // For backward compatibility
- (
- "language_overrides".to_owned(),
- Schema::new_ref("#/definitions/Languages".into()),
- ),
- ]);
-
- root_schema
- }
-}
-
-fn merge_settings(settings: &mut LanguageSettings, src: &LanguageSettingsContent) {
- merge(&mut settings.tab_size, src.tab_size);
- merge(&mut settings.hard_tabs, src.hard_tabs);
- merge(&mut settings.soft_wrap, src.soft_wrap);
- merge(&mut settings.show_wrap_guides, src.show_wrap_guides);
- merge(&mut settings.wrap_guides, src.wrap_guides.clone());
-
- merge(
- &mut settings.preferred_line_length,
- src.preferred_line_length,
- );
- merge(&mut settings.formatter, src.formatter.clone());
- merge(&mut settings.prettier, src.prettier.clone());
- merge(&mut settings.format_on_save, src.format_on_save.clone());
- merge(
- &mut settings.remove_trailing_whitespace_on_save,
- src.remove_trailing_whitespace_on_save,
- );
- merge(
- &mut settings.ensure_final_newline_on_save,
- src.ensure_final_newline_on_save,
- );
- merge(
- &mut settings.enable_language_server,
- src.enable_language_server,
- );
- merge(
- &mut settings.show_copilot_suggestions,
- src.show_copilot_suggestions,
- );
- merge(&mut settings.show_whitespaces, src.show_whitespaces);
- merge(
- &mut settings.extend_comment_on_newline,
- src.extend_comment_on_newline,
- );
- merge(&mut settings.inlay_hints, src.inlay_hints);
- fn merge<T>(target: &mut T, value: Option<T>) {
- if let Some(value) = value {
- *target = value;
- }
- }
-}
@@ -1,301 +0,0 @@
-use std::sync::Arc;
-use std::{ops::Range, path::PathBuf};
-
-use crate::{HighlightId, Language, LanguageRegistry};
-use gpui::{px, FontStyle, FontWeight, HighlightStyle, UnderlineStyle};
-use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag};
-
-#[derive(Debug, Clone)]
-pub struct ParsedMarkdown {
- pub text: String,
- pub highlights: Vec<(Range<usize>, MarkdownHighlight)>,
- pub region_ranges: Vec<Range<usize>>,
- pub regions: Vec<ParsedRegion>,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub enum MarkdownHighlight {
- Style(MarkdownHighlightStyle),
- Code(HighlightId),
-}
-
-impl MarkdownHighlight {
- pub fn to_highlight_style(&self, theme: &theme::SyntaxTheme) -> Option<HighlightStyle> {
- match self {
- MarkdownHighlight::Style(style) => {
- let mut highlight = HighlightStyle::default();
-
- if style.italic {
- highlight.font_style = Some(FontStyle::Italic);
- }
-
- if style.underline {
- highlight.underline = Some(UnderlineStyle {
- thickness: px(1.),
- ..Default::default()
- });
- }
-
- if style.weight != FontWeight::default() {
- highlight.font_weight = Some(style.weight);
- }
-
- Some(highlight)
- }
-
- MarkdownHighlight::Code(id) => id.style(theme),
- }
- }
-}
-
-#[derive(Debug, Clone, Default, PartialEq, Eq)]
-pub struct MarkdownHighlightStyle {
- pub italic: bool,
- pub underline: bool,
- pub weight: FontWeight,
-}
-
-#[derive(Debug, Clone)]
-pub struct ParsedRegion {
- pub code: bool,
- pub link: Option<Link>,
-}
-
-#[derive(Debug, Clone)]
-pub enum Link {
- Web { url: String },
- Path { path: PathBuf },
-}
-
-impl Link {
- fn identify(text: String) -> Option<Link> {
- if text.starts_with("http") {
- return Some(Link::Web { url: text });
- }
-
- let path = PathBuf::from(text);
- if path.is_absolute() {
- return Some(Link::Path { path });
- }
-
- None
- }
-}
-
-pub async fn parse_markdown(
- markdown: &str,
- language_registry: &Arc<LanguageRegistry>,
- language: Option<Arc<Language>>,
-) -> ParsedMarkdown {
- let mut text = String::new();
- let mut highlights = Vec::new();
- let mut region_ranges = Vec::new();
- let mut regions = Vec::new();
-
- parse_markdown_block(
- markdown,
- language_registry,
- language,
- &mut text,
- &mut highlights,
- &mut region_ranges,
- &mut regions,
- )
- .await;
-
- ParsedMarkdown {
- text,
- highlights,
- region_ranges,
- regions,
- }
-}
-
-pub async fn parse_markdown_block(
- markdown: &str,
- language_registry: &Arc<LanguageRegistry>,
- language: Option<Arc<Language>>,
- text: &mut String,
- highlights: &mut Vec<(Range<usize>, MarkdownHighlight)>,
- region_ranges: &mut Vec<Range<usize>>,
- regions: &mut Vec<ParsedRegion>,
-) {
- let mut bold_depth = 0;
- let mut italic_depth = 0;
- let mut link_url = None;
- let mut current_language = None;
- let mut list_stack = Vec::new();
-
- for event in Parser::new_ext(&markdown, Options::all()) {
- let prev_len = text.len();
- match event {
- Event::Text(t) => {
- if let Some(language) = ¤t_language {
- highlight_code(text, highlights, t.as_ref(), language);
- } else {
- text.push_str(t.as_ref());
-
- let mut style = MarkdownHighlightStyle::default();
-
- if bold_depth > 0 {
- style.weight = FontWeight::BOLD;
- }
-
- if italic_depth > 0 {
- style.italic = true;
- }
-
- if let Some(link) = link_url.clone().and_then(|u| Link::identify(u)) {
- region_ranges.push(prev_len..text.len());
- regions.push(ParsedRegion {
- code: false,
- link: Some(link),
- });
- style.underline = true;
- }
-
- if style != MarkdownHighlightStyle::default() {
- let mut new_highlight = true;
- if let Some((last_range, MarkdownHighlight::Style(last_style))) =
- highlights.last_mut()
- {
- if last_range.end == prev_len && last_style == &style {
- last_range.end = text.len();
- new_highlight = false;
- }
- }
- if new_highlight {
- let range = prev_len..text.len();
- highlights.push((range, MarkdownHighlight::Style(style)));
- }
- }
- }
- }
-
- Event::Code(t) => {
- text.push_str(t.as_ref());
- region_ranges.push(prev_len..text.len());
-
- let link = link_url.clone().and_then(|u| Link::identify(u));
- if link.is_some() {
- highlights.push((
- prev_len..text.len(),
- MarkdownHighlight::Style(MarkdownHighlightStyle {
- underline: true,
- ..Default::default()
- }),
- ));
- }
- regions.push(ParsedRegion { code: true, link });
- }
-
- Event::Start(tag) => match tag {
- Tag::Paragraph => new_paragraph(text, &mut list_stack),
-
- Tag::Heading(_, _, _) => {
- new_paragraph(text, &mut list_stack);
- bold_depth += 1;
- }
-
- Tag::CodeBlock(kind) => {
- new_paragraph(text, &mut list_stack);
- current_language = if let CodeBlockKind::Fenced(language) = kind {
- language_registry
- .language_for_name(language.as_ref())
- .await
- .ok()
- } else {
- language.clone()
- }
- }
-
- Tag::Emphasis => italic_depth += 1,
-
- Tag::Strong => bold_depth += 1,
-
- Tag::Link(_, url, _) => link_url = Some(url.to_string()),
-
- Tag::List(number) => {
- list_stack.push((number, false));
- }
-
- Tag::Item => {
- let len = list_stack.len();
- if let Some((list_number, has_content)) = list_stack.last_mut() {
- *has_content = false;
- if !text.is_empty() && !text.ends_with('\n') {
- text.push('\n');
- }
- for _ in 0..len - 1 {
- text.push_str(" ");
- }
- if let Some(number) = list_number {
- text.push_str(&format!("{}. ", number));
- *number += 1;
- *has_content = false;
- } else {
- text.push_str("- ");
- }
- }
- }
-
- _ => {}
- },
-
- Event::End(tag) => match tag {
- Tag::Heading(_, _, _) => bold_depth -= 1,
- Tag::CodeBlock(_) => current_language = None,
- Tag::Emphasis => italic_depth -= 1,
- Tag::Strong => bold_depth -= 1,
- Tag::Link(_, _, _) => link_url = None,
- Tag::List(_) => drop(list_stack.pop()),
- _ => {}
- },
-
- Event::HardBreak => text.push('\n'),
-
- Event::SoftBreak => text.push(' '),
-
- _ => {}
- }
- }
-}
-
-pub fn highlight_code(
- text: &mut String,
- highlights: &mut Vec<(Range<usize>, MarkdownHighlight)>,
- content: &str,
- language: &Arc<Language>,
-) {
- let prev_len = text.len();
- text.push_str(content);
- for (range, highlight_id) in language.highlight_text(&content.into(), 0..content.len()) {
- let highlight = MarkdownHighlight::Code(highlight_id);
- highlights.push((prev_len + range.start..prev_len + range.end, highlight));
- }
-}
-
-pub fn new_paragraph(text: &mut String, list_stack: &mut Vec<(Option<u64>, bool)>) {
- let mut is_subsequent_paragraph_of_list = false;
- if let Some((_, has_content)) = list_stack.last_mut() {
- if *has_content {
- is_subsequent_paragraph_of_list = true;
- } else {
- *has_content = true;
- return;
- }
- }
-
- if !text.is_empty() {
- if !text.ends_with('\n') {
- text.push('\n');
- }
- text.push('\n');
- }
- for _ in 0..list_stack.len().saturating_sub(1) {
- text.push_str(" ");
- }
- if is_subsequent_paragraph_of_list {
- text.push_str(" ");
- }
-}
@@ -1,139 +0,0 @@
-use fuzzy::{StringMatch, StringMatchCandidate};
-use gpui::{BackgroundExecutor, HighlightStyle};
-use std::ops::Range;
-
-#[derive(Debug)]
-pub struct Outline<T> {
- pub items: Vec<OutlineItem<T>>,
- candidates: Vec<StringMatchCandidate>,
- path_candidates: Vec<StringMatchCandidate>,
- path_candidate_prefixes: Vec<usize>,
-}
-
-#[derive(Clone, Debug, PartialEq, Eq)]
-pub struct OutlineItem<T> {
- pub depth: usize,
- pub range: Range<T>,
- pub text: String,
- pub highlight_ranges: Vec<(Range<usize>, HighlightStyle)>,
- pub name_ranges: Vec<Range<usize>>,
-}
-
-impl<T> Outline<T> {
- pub fn new(items: Vec<OutlineItem<T>>) -> Self {
- let mut candidates = Vec::new();
- let mut path_candidates = Vec::new();
- let mut path_candidate_prefixes = Vec::new();
- let mut path_text = String::new();
- let mut path_stack = Vec::new();
-
- for (id, item) in items.iter().enumerate() {
- if item.depth < path_stack.len() {
- path_stack.truncate(item.depth);
- path_text.truncate(path_stack.last().copied().unwrap_or(0));
- }
- if !path_text.is_empty() {
- path_text.push(' ');
- }
- path_candidate_prefixes.push(path_text.len());
- path_text.push_str(&item.text);
- path_stack.push(path_text.len());
-
- let candidate_text = item
- .name_ranges
- .iter()
- .map(|range| &item.text[range.start as usize..range.end as usize])
- .collect::<String>();
-
- path_candidates.push(StringMatchCandidate::new(id, path_text.clone()));
- candidates.push(StringMatchCandidate::new(id, candidate_text));
- }
-
- Self {
- candidates,
- path_candidates,
- path_candidate_prefixes,
- items,
- }
- }
-
- pub async fn search(&self, query: &str, executor: BackgroundExecutor) -> Vec<StringMatch> {
- let query = query.trim_start();
- let is_path_query = query.contains(' ');
- let smart_case = query.chars().any(|c| c.is_uppercase());
- let mut matches = fuzzy::match_strings(
- if is_path_query {
- &self.path_candidates
- } else {
- &self.candidates
- },
- query,
- smart_case,
- 100,
- &Default::default(),
- executor.clone(),
- )
- .await;
- matches.sort_unstable_by_key(|m| m.candidate_id);
-
- let mut tree_matches = Vec::new();
-
- let mut prev_item_ix = 0;
- for mut string_match in matches {
- let outline_match = &self.items[string_match.candidate_id];
- string_match.string = outline_match.text.clone();
-
- if is_path_query {
- let prefix_len = self.path_candidate_prefixes[string_match.candidate_id];
- string_match
- .positions
- .retain(|position| *position >= prefix_len);
- for position in &mut string_match.positions {
- *position -= prefix_len;
- }
- } else {
- let mut name_ranges = outline_match.name_ranges.iter();
- let mut name_range = name_ranges.next().unwrap();
- let mut preceding_ranges_len = 0;
- for position in &mut string_match.positions {
- while *position >= preceding_ranges_len + name_range.len() as usize {
- preceding_ranges_len += name_range.len();
- name_range = name_ranges.next().unwrap();
- }
- *position = name_range.start as usize + (*position - preceding_ranges_len);
- }
- }
-
- let insertion_ix = tree_matches.len();
- let mut cur_depth = outline_match.depth;
- for (ix, item) in self.items[prev_item_ix..string_match.candidate_id]
- .iter()
- .enumerate()
- .rev()
- {
- if cur_depth == 0 {
- break;
- }
-
- let candidate_index = ix + prev_item_ix;
- if item.depth == cur_depth - 1 {
- tree_matches.insert(
- insertion_ix,
- StringMatch {
- candidate_id: candidate_index,
- score: Default::default(),
- positions: Default::default(),
- string: Default::default(),
- },
- );
- cur_depth -= 1;
- }
- }
-
- prev_item_ix = string_match.candidate_id + 1;
- tree_matches.push(string_match);
- }
-
- tree_matches
- }
-}
@@ -1,590 +0,0 @@
-use crate::{
- diagnostic_set::DiagnosticEntry, CodeAction, CodeLabel, Completion, CursorShape, Diagnostic,
- Language,
-};
-use anyhow::{anyhow, Result};
-use clock::ReplicaId;
-use lsp::{DiagnosticSeverity, LanguageServerId};
-use rpc::proto;
-use std::{ops::Range, sync::Arc};
-use text::*;
-
-pub use proto::{BufferState, Operation};
-
-pub fn serialize_fingerprint(fingerprint: RopeFingerprint) -> String {
- fingerprint.to_hex()
-}
-
-pub fn deserialize_fingerprint(fingerprint: &str) -> Result<RopeFingerprint> {
- RopeFingerprint::from_hex(fingerprint)
- .map_err(|error| anyhow!("invalid fingerprint: {}", error))
-}
-
-pub fn deserialize_line_ending(message: proto::LineEnding) -> text::LineEnding {
- match message {
- proto::LineEnding::Unix => text::LineEnding::Unix,
- proto::LineEnding::Windows => text::LineEnding::Windows,
- }
-}
-
-pub fn serialize_line_ending(message: text::LineEnding) -> proto::LineEnding {
- match message {
- text::LineEnding::Unix => proto::LineEnding::Unix,
- text::LineEnding::Windows => proto::LineEnding::Windows,
- }
-}
-
-pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation {
- proto::Operation {
- variant: Some(match operation {
- crate::Operation::Buffer(text::Operation::Edit(edit)) => {
- proto::operation::Variant::Edit(serialize_edit_operation(edit))
- }
-
- crate::Operation::Buffer(text::Operation::Undo(undo)) => {
- proto::operation::Variant::Undo(proto::operation::Undo {
- replica_id: undo.timestamp.replica_id as u32,
- lamport_timestamp: undo.timestamp.value,
- version: serialize_version(&undo.version),
- counts: undo
- .counts
- .iter()
- .map(|(edit_id, count)| proto::UndoCount {
- replica_id: edit_id.replica_id as u32,
- lamport_timestamp: edit_id.value,
- count: *count,
- })
- .collect(),
- })
- }
-
- crate::Operation::UpdateSelections {
- selections,
- line_mode,
- lamport_timestamp,
- cursor_shape,
- } => proto::operation::Variant::UpdateSelections(proto::operation::UpdateSelections {
- replica_id: lamport_timestamp.replica_id as u32,
- lamport_timestamp: lamport_timestamp.value,
- selections: serialize_selections(selections),
- line_mode: *line_mode,
- cursor_shape: serialize_cursor_shape(cursor_shape) as i32,
- }),
-
- crate::Operation::UpdateDiagnostics {
- lamport_timestamp,
- server_id,
- diagnostics,
- } => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics {
- replica_id: lamport_timestamp.replica_id as u32,
- lamport_timestamp: lamport_timestamp.value,
- server_id: server_id.0 as u64,
- diagnostics: serialize_diagnostics(diagnostics.iter()),
- }),
-
- crate::Operation::UpdateCompletionTriggers {
- triggers,
- lamport_timestamp,
- } => proto::operation::Variant::UpdateCompletionTriggers(
- proto::operation::UpdateCompletionTriggers {
- replica_id: lamport_timestamp.replica_id as u32,
- lamport_timestamp: lamport_timestamp.value,
- triggers: triggers.clone(),
- },
- ),
- }),
- }
-}
-
-pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation::Edit {
- proto::operation::Edit {
- replica_id: operation.timestamp.replica_id as u32,
- lamport_timestamp: operation.timestamp.value,
- version: serialize_version(&operation.version),
- ranges: operation.ranges.iter().map(serialize_range).collect(),
- new_text: operation
- .new_text
- .iter()
- .map(|text| text.to_string())
- .collect(),
- }
-}
-
-pub fn serialize_undo_map_entry(
- (edit_id, counts): (&clock::Lamport, &[(clock::Lamport, u32)]),
-) -> proto::UndoMapEntry {
- proto::UndoMapEntry {
- replica_id: edit_id.replica_id as u32,
- local_timestamp: edit_id.value,
- counts: counts
- .iter()
- .map(|(undo_id, count)| proto::UndoCount {
- replica_id: undo_id.replica_id as u32,
- lamport_timestamp: undo_id.value,
- count: *count,
- })
- .collect(),
- }
-}
-
-pub fn split_operations(
- mut operations: Vec<proto::Operation>,
-) -> impl Iterator<Item = Vec<proto::Operation>> {
- #[cfg(any(test, feature = "test-support"))]
- const CHUNK_SIZE: usize = 5;
-
- #[cfg(not(any(test, feature = "test-support")))]
- const CHUNK_SIZE: usize = 100;
-
- let mut done = false;
- std::iter::from_fn(move || {
- if done {
- return None;
- }
-
- let operations = operations
- .drain(..std::cmp::min(CHUNK_SIZE, operations.len()))
- .collect::<Vec<_>>();
- if operations.is_empty() {
- done = true;
- }
- Some(operations)
- })
-}
-
-pub fn serialize_selections(selections: &Arc<[Selection<Anchor>]>) -> Vec<proto::Selection> {
- selections.iter().map(serialize_selection).collect()
-}
-
-pub fn serialize_selection(selection: &Selection<Anchor>) -> proto::Selection {
- proto::Selection {
- id: selection.id as u64,
- start: Some(proto::EditorAnchor {
- anchor: Some(serialize_anchor(&selection.start)),
- excerpt_id: 0,
- }),
- end: Some(proto::EditorAnchor {
- anchor: Some(serialize_anchor(&selection.end)),
- excerpt_id: 0,
- }),
- reversed: selection.reversed,
- }
-}
-
-pub fn serialize_cursor_shape(cursor_shape: &CursorShape) -> proto::CursorShape {
- match cursor_shape {
- CursorShape::Bar => proto::CursorShape::CursorBar,
- CursorShape::Block => proto::CursorShape::CursorBlock,
- CursorShape::Underscore => proto::CursorShape::CursorUnderscore,
- CursorShape::Hollow => proto::CursorShape::CursorHollow,
- }
-}
-
-pub fn deserialize_cursor_shape(cursor_shape: proto::CursorShape) -> CursorShape {
- match cursor_shape {
- proto::CursorShape::CursorBar => CursorShape::Bar,
- proto::CursorShape::CursorBlock => CursorShape::Block,
- proto::CursorShape::CursorUnderscore => CursorShape::Underscore,
- proto::CursorShape::CursorHollow => CursorShape::Hollow,
- }
-}
-
-pub fn serialize_diagnostics<'a>(
- diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<Anchor>>,
-) -> Vec<proto::Diagnostic> {
- diagnostics
- .into_iter()
- .map(|entry| proto::Diagnostic {
- source: entry.diagnostic.source.clone(),
- start: Some(serialize_anchor(&entry.range.start)),
- end: Some(serialize_anchor(&entry.range.end)),
- message: entry.diagnostic.message.clone(),
- severity: match entry.diagnostic.severity {
- DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error,
- DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning,
- DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information,
- DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint,
- _ => proto::diagnostic::Severity::None,
- } as i32,
- group_id: entry.diagnostic.group_id as u64,
- is_primary: entry.diagnostic.is_primary,
- is_valid: entry.diagnostic.is_valid,
- code: entry.diagnostic.code.clone(),
- is_disk_based: entry.diagnostic.is_disk_based,
- is_unnecessary: entry.diagnostic.is_unnecessary,
- })
- .collect()
-}
-
-pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
- proto::Anchor {
- replica_id: anchor.timestamp.replica_id as u32,
- timestamp: anchor.timestamp.value,
- offset: anchor.offset as u64,
- bias: match anchor.bias {
- Bias::Left => proto::Bias::Left as i32,
- Bias::Right => proto::Bias::Right as i32,
- },
- buffer_id: anchor.buffer_id,
- }
-}
-
-// This behavior is currently copied in the collab database, for snapshotting channel notes
-pub fn deserialize_operation(message: proto::Operation) -> Result<crate::Operation> {
- Ok(
- match message
- .variant
- .ok_or_else(|| anyhow!("missing operation variant"))?
- {
- proto::operation::Variant::Edit(edit) => {
- crate::Operation::Buffer(text::Operation::Edit(deserialize_edit_operation(edit)))
- }
- proto::operation::Variant::Undo(undo) => {
- crate::Operation::Buffer(text::Operation::Undo(UndoOperation {
- timestamp: clock::Lamport {
- replica_id: undo.replica_id as ReplicaId,
- value: undo.lamport_timestamp,
- },
- version: deserialize_version(&undo.version),
- counts: undo
- .counts
- .into_iter()
- .map(|c| {
- (
- clock::Lamport {
- replica_id: c.replica_id as ReplicaId,
- value: c.lamport_timestamp,
- },
- c.count,
- )
- })
- .collect(),
- }))
- }
- proto::operation::Variant::UpdateSelections(message) => {
- let selections = message
- .selections
- .into_iter()
- .filter_map(|selection| {
- Some(Selection {
- id: selection.id as usize,
- start: deserialize_anchor(selection.start?.anchor?)?,
- end: deserialize_anchor(selection.end?.anchor?)?,
- reversed: selection.reversed,
- goal: SelectionGoal::None,
- })
- })
- .collect::<Vec<_>>();
-
- crate::Operation::UpdateSelections {
- lamport_timestamp: clock::Lamport {
- replica_id: message.replica_id as ReplicaId,
- value: message.lamport_timestamp,
- },
- selections: Arc::from(selections),
- line_mode: message.line_mode,
- cursor_shape: deserialize_cursor_shape(
- proto::CursorShape::from_i32(message.cursor_shape)
- .ok_or_else(|| anyhow!("Missing cursor shape"))?,
- ),
- }
- }
- proto::operation::Variant::UpdateDiagnostics(message) => {
- crate::Operation::UpdateDiagnostics {
- lamport_timestamp: clock::Lamport {
- replica_id: message.replica_id as ReplicaId,
- value: message.lamport_timestamp,
- },
- server_id: LanguageServerId(message.server_id as usize),
- diagnostics: deserialize_diagnostics(message.diagnostics),
- }
- }
- proto::operation::Variant::UpdateCompletionTriggers(message) => {
- crate::Operation::UpdateCompletionTriggers {
- triggers: message.triggers,
- lamport_timestamp: clock::Lamport {
- replica_id: message.replica_id as ReplicaId,
- value: message.lamport_timestamp,
- },
- }
- }
- },
- )
-}
-
-pub fn deserialize_edit_operation(edit: proto::operation::Edit) -> EditOperation {
- EditOperation {
- timestamp: clock::Lamport {
- replica_id: edit.replica_id as ReplicaId,
- value: edit.lamport_timestamp,
- },
- version: deserialize_version(&edit.version),
- ranges: edit.ranges.into_iter().map(deserialize_range).collect(),
- new_text: edit.new_text.into_iter().map(Arc::from).collect(),
- }
-}
-
-pub fn deserialize_undo_map_entry(
- entry: proto::UndoMapEntry,
-) -> (clock::Lamport, Vec<(clock::Lamport, u32)>) {
- (
- clock::Lamport {
- replica_id: entry.replica_id as u16,
- value: entry.local_timestamp,
- },
- entry
- .counts
- .into_iter()
- .map(|undo_count| {
- (
- clock::Lamport {
- replica_id: undo_count.replica_id as u16,
- value: undo_count.lamport_timestamp,
- },
- undo_count.count,
- )
- })
- .collect(),
- )
-}
-
-pub fn deserialize_selections(selections: Vec<proto::Selection>) -> Arc<[Selection<Anchor>]> {
- Arc::from(
- selections
- .into_iter()
- .filter_map(deserialize_selection)
- .collect::<Vec<_>>(),
- )
-}
-
-pub fn deserialize_selection(selection: proto::Selection) -> Option<Selection<Anchor>> {
- Some(Selection {
- id: selection.id as usize,
- start: deserialize_anchor(selection.start?.anchor?)?,
- end: deserialize_anchor(selection.end?.anchor?)?,
- reversed: selection.reversed,
- goal: SelectionGoal::None,
- })
-}
-
-pub fn deserialize_diagnostics(
- diagnostics: Vec<proto::Diagnostic>,
-) -> Arc<[DiagnosticEntry<Anchor>]> {
- diagnostics
- .into_iter()
- .filter_map(|diagnostic| {
- Some(DiagnosticEntry {
- range: deserialize_anchor(diagnostic.start?)?..deserialize_anchor(diagnostic.end?)?,
- diagnostic: Diagnostic {
- source: diagnostic.source,
- severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? {
- proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR,
- proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING,
- proto::diagnostic::Severity::Information => DiagnosticSeverity::INFORMATION,
- proto::diagnostic::Severity::Hint => DiagnosticSeverity::HINT,
- proto::diagnostic::Severity::None => return None,
- },
- message: diagnostic.message,
- group_id: diagnostic.group_id as usize,
- code: diagnostic.code,
- is_valid: diagnostic.is_valid,
- is_primary: diagnostic.is_primary,
- is_disk_based: diagnostic.is_disk_based,
- is_unnecessary: diagnostic.is_unnecessary,
- },
- })
- })
- .collect()
-}
-
-pub fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
- Some(Anchor {
- timestamp: clock::Lamport {
- replica_id: anchor.replica_id as ReplicaId,
- value: anchor.timestamp,
- },
- offset: anchor.offset as usize,
- bias: match proto::Bias::from_i32(anchor.bias)? {
- proto::Bias::Left => Bias::Left,
- proto::Bias::Right => Bias::Right,
- },
- buffer_id: anchor.buffer_id,
- })
-}
-
-pub fn lamport_timestamp_for_operation(operation: &proto::Operation) -> Option<clock::Lamport> {
- let replica_id;
- let value;
- match operation.variant.as_ref()? {
- proto::operation::Variant::Edit(op) => {
- replica_id = op.replica_id;
- value = op.lamport_timestamp;
- }
- proto::operation::Variant::Undo(op) => {
- replica_id = op.replica_id;
- value = op.lamport_timestamp;
- }
- proto::operation::Variant::UpdateDiagnostics(op) => {
- replica_id = op.replica_id;
- value = op.lamport_timestamp;
- }
- proto::operation::Variant::UpdateSelections(op) => {
- replica_id = op.replica_id;
- value = op.lamport_timestamp;
- }
- proto::operation::Variant::UpdateCompletionTriggers(op) => {
- replica_id = op.replica_id;
- value = op.lamport_timestamp;
- }
- }
-
- Some(clock::Lamport {
- replica_id: replica_id as ReplicaId,
- value,
- })
-}
-
-pub fn serialize_completion(completion: &Completion) -> proto::Completion {
- proto::Completion {
- old_start: Some(serialize_anchor(&completion.old_range.start)),
- old_end: Some(serialize_anchor(&completion.old_range.end)),
- new_text: completion.new_text.clone(),
- server_id: completion.server_id.0 as u64,
- lsp_completion: serde_json::to_vec(&completion.lsp_completion).unwrap(),
- }
-}
-
-pub async fn deserialize_completion(
- completion: proto::Completion,
- language: Option<Arc<Language>>,
-) -> Result<Completion> {
- let old_start = completion
- .old_start
- .and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("invalid old start"))?;
- let old_end = completion
- .old_end
- .and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("invalid old end"))?;
- let lsp_completion = serde_json::from_slice(&completion.lsp_completion)?;
-
- let mut label = None;
- if let Some(language) = language {
- label = language.label_for_completion(&lsp_completion).await;
- }
-
- Ok(Completion {
- old_range: old_start..old_end,
- new_text: completion.new_text,
- label: label.unwrap_or_else(|| {
- CodeLabel::plain(
- lsp_completion.label.clone(),
- lsp_completion.filter_text.as_deref(),
- )
- }),
- documentation: None,
- server_id: LanguageServerId(completion.server_id as usize),
- lsp_completion,
- })
-}
-
-pub fn serialize_code_action(action: &CodeAction) -> proto::CodeAction {
- proto::CodeAction {
- server_id: action.server_id.0 as u64,
- start: Some(serialize_anchor(&action.range.start)),
- end: Some(serialize_anchor(&action.range.end)),
- lsp_action: serde_json::to_vec(&action.lsp_action).unwrap(),
- }
-}
-
-pub fn deserialize_code_action(action: proto::CodeAction) -> Result<CodeAction> {
- let start = action
- .start
- .and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("invalid start"))?;
- let end = action
- .end
- .and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("invalid end"))?;
- let lsp_action = serde_json::from_slice(&action.lsp_action)?;
- Ok(CodeAction {
- server_id: LanguageServerId(action.server_id as usize),
- range: start..end,
- lsp_action,
- })
-}
-
-pub fn serialize_transaction(transaction: &Transaction) -> proto::Transaction {
- proto::Transaction {
- id: Some(serialize_timestamp(transaction.id)),
- edit_ids: transaction
- .edit_ids
- .iter()
- .copied()
- .map(serialize_timestamp)
- .collect(),
- start: serialize_version(&transaction.start),
- }
-}
-
-pub fn deserialize_transaction(transaction: proto::Transaction) -> Result<Transaction> {
- Ok(Transaction {
- id: deserialize_timestamp(
- transaction
- .id
- .ok_or_else(|| anyhow!("missing transaction id"))?,
- ),
- edit_ids: transaction
- .edit_ids
- .into_iter()
- .map(deserialize_timestamp)
- .collect(),
- start: deserialize_version(&transaction.start),
- })
-}
-
-pub fn serialize_timestamp(timestamp: clock::Lamport) -> proto::LamportTimestamp {
- proto::LamportTimestamp {
- replica_id: timestamp.replica_id as u32,
- value: timestamp.value,
- }
-}
-
-pub fn deserialize_timestamp(timestamp: proto::LamportTimestamp) -> clock::Lamport {
- clock::Lamport {
- replica_id: timestamp.replica_id as ReplicaId,
- value: timestamp.value,
- }
-}
-
-pub fn serialize_range(range: &Range<FullOffset>) -> proto::Range {
- proto::Range {
- start: range.start.0 as u64,
- end: range.end.0 as u64,
- }
-}
-
-pub fn deserialize_range(range: proto::Range) -> Range<FullOffset> {
- FullOffset(range.start as usize)..FullOffset(range.end as usize)
-}
-
-pub fn deserialize_version(message: &[proto::VectorClockEntry]) -> clock::Global {
- let mut version = clock::Global::new();
- for entry in message {
- version.observe(clock::Lamport {
- replica_id: entry.replica_id as ReplicaId,
- value: entry.timestamp,
- });
- }
- version
-}
-
-pub fn serialize_version(version: &clock::Global) -> Vec<proto::VectorClockEntry> {
- version
- .iter()
- .map(|entry| proto::VectorClockEntry {
- replica_id: entry.replica_id as u32,
- timestamp: entry.value,
- })
- .collect()
-}
@@ -1,1806 +0,0 @@
-#[cfg(test)]
-mod syntax_map_tests;
-
-use crate::{Grammar, InjectionConfig, Language, LanguageRegistry};
-use collections::HashMap;
-use futures::FutureExt;
-use parking_lot::Mutex;
-use std::{
- borrow::Cow,
- cmp::{self, Ordering, Reverse},
- collections::BinaryHeap,
- fmt, iter,
- ops::{Deref, DerefMut, Range},
- sync::Arc,
-};
-use sum_tree::{Bias, SeekTarget, SumTree};
-use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint};
-use tree_sitter::{Node, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatches, Tree};
-
-use super::PARSER;
-
-static QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Mutex::new(vec![]);
-
-#[derive(Default)]
-pub struct SyntaxMap {
- snapshot: SyntaxSnapshot,
- language_registry: Option<Arc<LanguageRegistry>>,
-}
-
-#[derive(Clone, Default)]
-pub struct SyntaxSnapshot {
- layers: SumTree<SyntaxLayer>,
- parsed_version: clock::Global,
- interpolated_version: clock::Global,
- language_registry_version: usize,
-}
-
-#[derive(Default)]
-pub struct SyntaxMapCaptures<'a> {
- layers: Vec<SyntaxMapCapturesLayer<'a>>,
- active_layer_count: usize,
- grammars: Vec<&'a Grammar>,
-}
-
-#[derive(Default)]
-pub struct SyntaxMapMatches<'a> {
- layers: Vec<SyntaxMapMatchesLayer<'a>>,
- active_layer_count: usize,
- grammars: Vec<&'a Grammar>,
-}
-
-#[derive(Debug)]
-pub struct SyntaxMapCapture<'a> {
- pub depth: usize,
- pub node: Node<'a>,
- pub index: u32,
- pub grammar_index: usize,
-}
-
-#[derive(Debug)]
-pub struct SyntaxMapMatch<'a> {
- pub depth: usize,
- pub pattern_index: usize,
- pub captures: &'a [QueryCapture<'a>],
- pub grammar_index: usize,
-}
-
-struct SyntaxMapCapturesLayer<'a> {
- depth: usize,
- captures: QueryCaptures<'a, 'a, TextProvider<'a>, &'a [u8]>,
- next_capture: Option<QueryCapture<'a>>,
- grammar_index: usize,
- _query_cursor: QueryCursorHandle,
-}
-
-struct SyntaxMapMatchesLayer<'a> {
- depth: usize,
- next_pattern_index: usize,
- next_captures: Vec<QueryCapture<'a>>,
- has_next: bool,
- matches: QueryMatches<'a, 'a, TextProvider<'a>, &'a [u8]>,
- grammar_index: usize,
- _query_cursor: QueryCursorHandle,
-}
-
-#[derive(Clone)]
-struct SyntaxLayer {
- depth: usize,
- range: Range<Anchor>,
- content: SyntaxLayerContent,
-}
-
-#[derive(Clone)]
-enum SyntaxLayerContent {
- Parsed {
- tree: tree_sitter::Tree,
- language: Arc<Language>,
- },
- Pending {
- language_name: Arc<str>,
- },
-}
-
-impl SyntaxLayerContent {
- fn language_id(&self) -> Option<usize> {
- match self {
- SyntaxLayerContent::Parsed { language, .. } => language.id(),
- SyntaxLayerContent::Pending { .. } => None,
- }
- }
-
- fn tree(&self) -> Option<&Tree> {
- match self {
- SyntaxLayerContent::Parsed { tree, .. } => Some(tree),
- SyntaxLayerContent::Pending { .. } => None,
- }
- }
-}
-
-#[derive(Debug)]
-pub struct SyntaxLayerInfo<'a> {
- pub depth: usize,
- pub language: &'a Arc<Language>,
- tree: &'a Tree,
- offset: (usize, tree_sitter::Point),
-}
-
-#[derive(Clone)]
-pub struct OwnedSyntaxLayerInfo {
- pub depth: usize,
- pub language: Arc<Language>,
- tree: tree_sitter::Tree,
- offset: (usize, tree_sitter::Point),
-}
-
-#[derive(Debug, Clone)]
-struct SyntaxLayerSummary {
- min_depth: usize,
- max_depth: usize,
- range: Range<Anchor>,
- last_layer_range: Range<Anchor>,
- last_layer_language: Option<usize>,
- contains_unknown_injections: bool,
-}
-
-#[derive(Clone, Debug)]
-struct SyntaxLayerPosition {
- depth: usize,
- range: Range<Anchor>,
- language: Option<usize>,
-}
-
-#[derive(Clone, Debug)]
-struct ChangeStartPosition {
- depth: usize,
- position: Anchor,
-}
-
-#[derive(Clone, Debug)]
-struct SyntaxLayerPositionBeforeChange {
- position: SyntaxLayerPosition,
- change: ChangeStartPosition,
-}
-
-struct ParseStep {
- depth: usize,
- language: ParseStepLanguage,
- range: Range<Anchor>,
- included_ranges: Vec<tree_sitter::Range>,
- mode: ParseMode,
-}
-
-#[derive(Debug)]
-enum ParseStepLanguage {
- Loaded { language: Arc<Language> },
- Pending { name: Arc<str> },
-}
-
-impl ParseStepLanguage {
- fn id(&self) -> Option<usize> {
- match self {
- ParseStepLanguage::Loaded { language } => language.id(),
- ParseStepLanguage::Pending { .. } => None,
- }
- }
-}
-
-enum ParseMode {
- Single,
- Combined {
- parent_layer_range: Range<usize>,
- parent_layer_changed_ranges: Vec<Range<usize>>,
- },
-}
-
-#[derive(Debug, PartialEq, Eq)]
-struct ChangedRegion {
- depth: usize,
- range: Range<Anchor>,
-}
-
-#[derive(Default)]
-struct ChangeRegionSet(Vec<ChangedRegion>);
-
-struct TextProvider<'a>(&'a Rope);
-
-struct ByteChunks<'a>(text::Chunks<'a>);
-
-struct QueryCursorHandle(Option<QueryCursor>);
-
-impl SyntaxMap {
- pub fn new() -> Self {
- Self::default()
- }
-
- pub fn set_language_registry(&mut self, registry: Arc<LanguageRegistry>) {
- self.language_registry = Some(registry);
- }
-
- pub fn snapshot(&self) -> SyntaxSnapshot {
- self.snapshot.clone()
- }
-
- pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
- self.language_registry.clone()
- }
-
- pub fn interpolate(&mut self, text: &BufferSnapshot) {
- self.snapshot.interpolate(text);
- }
-
- #[cfg(test)]
- pub fn reparse(&mut self, language: Arc<Language>, text: &BufferSnapshot) {
- self.snapshot
- .reparse(text, self.language_registry.clone(), language);
- }
-
- pub fn did_parse(&mut self, snapshot: SyntaxSnapshot) {
- self.snapshot = snapshot;
- }
-
- pub fn clear(&mut self) {
- self.snapshot = SyntaxSnapshot::default();
- }
-}
-
-impl SyntaxSnapshot {
- pub fn is_empty(&self) -> bool {
- self.layers.is_empty()
- }
-
- fn interpolate(&mut self, text: &BufferSnapshot) {
- let edits = text
- .anchored_edits_since::<(usize, Point)>(&self.interpolated_version)
- .collect::<Vec<_>>();
- self.interpolated_version = text.version().clone();
-
- if edits.is_empty() {
- return;
- }
-
- let mut layers = SumTree::new();
- let mut first_edit_ix_for_depth = 0;
- let mut prev_depth = 0;
- let mut cursor = self.layers.cursor::<SyntaxLayerSummary>();
- cursor.next(text);
-
- 'outer: loop {
- let depth = cursor.end(text).max_depth;
- if depth > prev_depth {
- first_edit_ix_for_depth = 0;
- prev_depth = depth;
- }
-
- // Preserve any layers at this depth that precede the first edit.
- if let Some((_, edit_range)) = edits.get(first_edit_ix_for_depth) {
- let target = ChangeStartPosition {
- depth,
- position: edit_range.start,
- };
- if target.cmp(&cursor.start(), text).is_gt() {
- let slice = cursor.slice(&target, Bias::Left, text);
- layers.append(slice, text);
- }
- }
- // If this layer follows all of the edits, then preserve it and any
- // subsequent layers at this same depth.
- else if cursor.item().is_some() {
- let slice = cursor.slice(
- &SyntaxLayerPosition {
- depth: depth + 1,
- range: Anchor::MIN..Anchor::MAX,
- language: None,
- },
- Bias::Left,
- text,
- );
- layers.append(slice, text);
- continue;
- };
-
- let Some(layer) = cursor.item() else { break };
- let (start_byte, start_point) = layer.range.start.summary::<(usize, Point)>(text);
-
- // Ignore edits that end before the start of this layer, and don't consider them
- // for any subsequent layers at this same depth.
- loop {
- let Some((_, edit_range)) = edits.get(first_edit_ix_for_depth) else {
- continue 'outer;
- };
- if edit_range.end.cmp(&layer.range.start, text).is_le() {
- first_edit_ix_for_depth += 1;
- } else {
- break;
- }
- }
-
- let mut layer = layer.clone();
- if let SyntaxLayerContent::Parsed { tree, .. } = &mut layer.content {
- for (edit, edit_range) in &edits[first_edit_ix_for_depth..] {
- // Ignore any edits that follow this layer.
- if edit_range.start.cmp(&layer.range.end, text).is_ge() {
- break;
- }
-
- // Apply any edits that intersect this layer to the layer's syntax tree.
- let tree_edit = if edit_range.start.cmp(&layer.range.start, text).is_ge() {
- tree_sitter::InputEdit {
- start_byte: edit.new.start.0 - start_byte,
- old_end_byte: edit.new.start.0 - start_byte
- + (edit.old.end.0 - edit.old.start.0),
- new_end_byte: edit.new.end.0 - start_byte,
- start_position: (edit.new.start.1 - start_point).to_ts_point(),
- old_end_position: (edit.new.start.1 - start_point
- + (edit.old.end.1 - edit.old.start.1))
- .to_ts_point(),
- new_end_position: (edit.new.end.1 - start_point).to_ts_point(),
- }
- } else {
- let node = tree.root_node();
- tree_sitter::InputEdit {
- start_byte: 0,
- old_end_byte: node.end_byte(),
- new_end_byte: 0,
- start_position: Default::default(),
- old_end_position: node.end_position(),
- new_end_position: Default::default(),
- }
- };
-
- tree.edit(&tree_edit);
- }
-
- debug_assert!(
- tree.root_node().end_byte() <= text.len(),
- "tree's size {}, is larger than text size {}",
- tree.root_node().end_byte(),
- text.len(),
- );
- }
-
- layers.push(layer, text);
- cursor.next(text);
- }
-
- layers.append(cursor.suffix(&text), &text);
- drop(cursor);
- self.layers = layers;
- }
-
- pub fn reparse(
- &mut self,
- text: &BufferSnapshot,
- registry: Option<Arc<LanguageRegistry>>,
- root_language: Arc<Language>,
- ) {
- let edit_ranges = text
- .edits_since::<usize>(&self.parsed_version)
- .map(|edit| edit.new)
- .collect::<Vec<_>>();
- self.reparse_with_ranges(text, root_language.clone(), edit_ranges, registry.as_ref());
-
- if let Some(registry) = registry {
- if registry.version() != self.language_registry_version {
- let mut resolved_injection_ranges = Vec::new();
- let mut cursor = self
- .layers
- .filter::<_, ()>(|summary| summary.contains_unknown_injections);
- cursor.next(text);
- while let Some(layer) = cursor.item() {
- let SyntaxLayerContent::Pending { language_name } = &layer.content else {
- unreachable!()
- };
- if registry
- .language_for_name_or_extension(language_name)
- .now_or_never()
- .and_then(|language| language.ok())
- .is_some()
- {
- resolved_injection_ranges.push(layer.range.to_offset(text));
- }
-
- cursor.next(text);
- }
- drop(cursor);
-
- if !resolved_injection_ranges.is_empty() {
- self.reparse_with_ranges(
- text,
- root_language,
- resolved_injection_ranges,
- Some(®istry),
- );
- }
- self.language_registry_version = registry.version();
- }
- }
- }
-
- fn reparse_with_ranges(
- &mut self,
- text: &BufferSnapshot,
- root_language: Arc<Language>,
- invalidated_ranges: Vec<Range<usize>>,
- registry: Option<&Arc<LanguageRegistry>>,
- ) {
- log::trace!("reparse. invalidated ranges:{:?}", invalidated_ranges);
-
- let max_depth = self.layers.summary().max_depth;
- let mut cursor = self.layers.cursor::<SyntaxLayerSummary>();
- cursor.next(&text);
- let mut layers = SumTree::new();
-
- let mut changed_regions = ChangeRegionSet::default();
- let mut queue = BinaryHeap::new();
- let mut combined_injection_ranges = HashMap::default();
- queue.push(ParseStep {
- depth: 0,
- language: ParseStepLanguage::Loaded {
- language: root_language,
- },
- included_ranges: vec![tree_sitter::Range {
- start_byte: 0,
- end_byte: text.len(),
- start_point: Point::zero().to_ts_point(),
- end_point: text.max_point().to_ts_point(),
- }],
- range: Anchor::MIN..Anchor::MAX,
- mode: ParseMode::Single,
- });
-
- loop {
- let step = queue.pop();
- let position = if let Some(step) = &step {
- SyntaxLayerPosition {
- depth: step.depth,
- range: step.range.clone(),
- language: step.language.id(),
- }
- } else {
- SyntaxLayerPosition {
- depth: max_depth + 1,
- range: Anchor::MAX..Anchor::MAX,
- language: None,
- }
- };
-
- let mut done = cursor.item().is_none();
- while !done && position.cmp(&cursor.end(text), &text).is_gt() {
- done = true;
-
- let bounded_position = SyntaxLayerPositionBeforeChange {
- position: position.clone(),
- change: changed_regions.start_position(),
- };
- if bounded_position.cmp(&cursor.start(), &text).is_gt() {
- let slice = cursor.slice(&bounded_position, Bias::Left, text);
- if !slice.is_empty() {
- layers.append(slice, &text);
- if changed_regions.prune(cursor.end(text), text) {
- done = false;
- }
- }
- }
-
- while position.cmp(&cursor.end(text), text).is_gt() {
- let Some(layer) = cursor.item() else { break };
-
- if changed_regions.intersects(&layer, text) {
- if let SyntaxLayerContent::Parsed { language, .. } = &layer.content {
- log::trace!(
- "discard layer. language:{}, range:{:?}. changed_regions:{:?}",
- language.name(),
- LogAnchorRange(&layer.range, text),
- LogChangedRegions(&changed_regions, text),
- );
- }
-
- changed_regions.insert(
- ChangedRegion {
- depth: layer.depth + 1,
- range: layer.range.clone(),
- },
- text,
- );
- } else {
- layers.push(layer.clone(), text);
- }
-
- cursor.next(text);
- if changed_regions.prune(cursor.end(text), text) {
- done = false;
- }
- }
- }
-
- let Some(step) = step else { break };
- let (step_start_byte, step_start_point) =
- step.range.start.summary::<(usize, Point)>(text);
- let step_end_byte = step.range.end.to_offset(text);
-
- let mut old_layer = cursor.item();
- if let Some(layer) = old_layer {
- if layer.range.to_offset(text) == (step_start_byte..step_end_byte)
- && layer.content.language_id() == step.language.id()
- {
- cursor.next(&text);
- } else {
- old_layer = None;
- }
- }
-
- let content = match step.language {
- ParseStepLanguage::Loaded { language } => {
- let Some(grammar) = language.grammar() else {
- continue;
- };
- let tree;
- let changed_ranges;
-
- let mut included_ranges = step.included_ranges;
- for range in &mut included_ranges {
- range.start_byte -= step_start_byte;
- range.end_byte -= step_start_byte;
- range.start_point = (Point::from_ts_point(range.start_point)
- - step_start_point)
- .to_ts_point();
- range.end_point = (Point::from_ts_point(range.end_point)
- - step_start_point)
- .to_ts_point();
- }
-
- if let Some((SyntaxLayerContent::Parsed { tree: old_tree, .. }, layer_start)) =
- old_layer.map(|layer| (&layer.content, layer.range.start))
- {
- log::trace!(
- "existing layer. language:{}, start:{:?}, ranges:{:?}",
- language.name(),
- LogPoint(layer_start.to_point(&text)),
- LogIncludedRanges(&old_tree.included_ranges())
- );
-
- if let ParseMode::Combined {
- mut parent_layer_changed_ranges,
- ..
- } = step.mode
- {
- for range in &mut parent_layer_changed_ranges {
- range.start = range.start.saturating_sub(step_start_byte);
- range.end = range.end.saturating_sub(step_start_byte);
- }
-
- let changed_indices;
- (included_ranges, changed_indices) = splice_included_ranges(
- old_tree.included_ranges(),
- &parent_layer_changed_ranges,
- &included_ranges,
- );
- insert_newlines_between_ranges(
- changed_indices,
- &mut included_ranges,
- &text,
- step_start_byte,
- step_start_point,
- );
- }
-
- if included_ranges.is_empty() {
- included_ranges.push(tree_sitter::Range {
- start_byte: 0,
- end_byte: 0,
- start_point: Default::default(),
- end_point: Default::default(),
- });
- }
-
- log::trace!(
- "update layer. language:{}, start:{:?}, included_ranges:{:?}",
- language.name(),
- LogAnchorRange(&step.range, text),
- LogIncludedRanges(&included_ranges),
- );
-
- tree = parse_text(
- grammar,
- text.as_rope(),
- step_start_byte,
- included_ranges,
- Some(old_tree.clone()),
- );
- changed_ranges = join_ranges(
- invalidated_ranges.iter().cloned().filter(|range| {
- range.start <= step_end_byte && range.end >= step_start_byte
- }),
- old_tree.changed_ranges(&tree).map(|r| {
- step_start_byte + r.start_byte..step_start_byte + r.end_byte
- }),
- );
- } else {
- if matches!(step.mode, ParseMode::Combined { .. }) {
- insert_newlines_between_ranges(
- 0..included_ranges.len(),
- &mut included_ranges,
- text,
- step_start_byte,
- step_start_point,
- );
- }
-
- if included_ranges.is_empty() {
- included_ranges.push(tree_sitter::Range {
- start_byte: 0,
- end_byte: 0,
- start_point: Default::default(),
- end_point: Default::default(),
- });
- }
-
- log::trace!(
- "create layer. language:{}, range:{:?}, included_ranges:{:?}",
- language.name(),
- LogAnchorRange(&step.range, text),
- LogIncludedRanges(&included_ranges),
- );
-
- tree = parse_text(
- grammar,
- text.as_rope(),
- step_start_byte,
- included_ranges,
- None,
- );
- changed_ranges = vec![step_start_byte..step_end_byte];
- }
-
- if let (Some((config, registry)), false) = (
- grammar.injection_config.as_ref().zip(registry.as_ref()),
- changed_ranges.is_empty(),
- ) {
- for range in &changed_ranges {
- changed_regions.insert(
- ChangedRegion {
- depth: step.depth + 1,
- range: text.anchor_before(range.start)
- ..text.anchor_after(range.end),
- },
- text,
- );
- }
- get_injections(
- config,
- text,
- step.range.clone(),
- tree.root_node_with_offset(
- step_start_byte,
- step_start_point.to_ts_point(),
- ),
- registry,
- step.depth + 1,
- &changed_ranges,
- &mut combined_injection_ranges,
- &mut queue,
- );
- }
-
- SyntaxLayerContent::Parsed { tree, language }
- }
- ParseStepLanguage::Pending { name } => SyntaxLayerContent::Pending {
- language_name: name,
- },
- };
-
- layers.push(
- SyntaxLayer {
- depth: step.depth,
- range: step.range,
- content,
- },
- &text,
- );
- }
-
- drop(cursor);
- self.layers = layers;
- self.interpolated_version = text.version.clone();
- self.parsed_version = text.version.clone();
- #[cfg(debug_assertions)]
- self.check_invariants(text);
- }
-
- #[cfg(debug_assertions)]
- fn check_invariants(&self, text: &BufferSnapshot) {
- let mut max_depth = 0;
- let mut prev_range: Option<Range<Anchor>> = None;
- for layer in self.layers.iter() {
- if layer.depth == max_depth {
- if let Some(prev_range) = prev_range {
- match layer.range.start.cmp(&prev_range.start, text) {
- Ordering::Less => panic!("layers out of order"),
- Ordering::Equal => {
- assert!(layer.range.end.cmp(&prev_range.end, text).is_ge())
- }
- Ordering::Greater => {}
- }
- }
- } else if layer.depth < max_depth {
- panic!("layers out of order")
- }
- max_depth = layer.depth;
- prev_range = Some(layer.range.clone());
- }
- }
-
- pub fn single_tree_captures<'a>(
- range: Range<usize>,
- text: &'a Rope,
- tree: &'a Tree,
- language: &'a Arc<Language>,
- query: fn(&Grammar) -> Option<&Query>,
- ) -> SyntaxMapCaptures<'a> {
- SyntaxMapCaptures::new(
- range.clone(),
- text,
- [SyntaxLayerInfo {
- language,
- tree,
- depth: 0,
- offset: (0, tree_sitter::Point::new(0, 0)),
- }]
- .into_iter(),
- query,
- )
- }
-
- pub fn captures<'a>(
- &'a self,
- range: Range<usize>,
- buffer: &'a BufferSnapshot,
- query: fn(&Grammar) -> Option<&Query>,
- ) -> SyntaxMapCaptures {
- SyntaxMapCaptures::new(
- range.clone(),
- buffer.as_rope(),
- self.layers_for_range(range, buffer).into_iter(),
- query,
- )
- }
-
- pub fn matches<'a>(
- &'a self,
- range: Range<usize>,
- buffer: &'a BufferSnapshot,
- query: fn(&Grammar) -> Option<&Query>,
- ) -> SyntaxMapMatches {
- SyntaxMapMatches::new(
- range.clone(),
- buffer.as_rope(),
- self.layers_for_range(range, buffer).into_iter(),
- query,
- )
- }
-
- #[cfg(test)]
- pub fn layers<'a>(&'a self, buffer: &'a BufferSnapshot) -> Vec<SyntaxLayerInfo> {
- self.layers_for_range(0..buffer.len(), buffer).collect()
- }
-
- pub fn layers_for_range<'a, T: ToOffset>(
- &'a self,
- range: Range<T>,
- buffer: &'a BufferSnapshot,
- ) -> impl 'a + Iterator<Item = SyntaxLayerInfo> {
- let start_offset = range.start.to_offset(buffer);
- let end_offset = range.end.to_offset(buffer);
- let start = buffer.anchor_before(start_offset);
- let end = buffer.anchor_after(end_offset);
-
- let mut cursor = self.layers.filter::<_, ()>(move |summary| {
- if summary.max_depth > summary.min_depth {
- true
- } else {
- let is_before_start = summary.range.end.cmp(&start, buffer).is_lt();
- let is_after_end = summary.range.start.cmp(&end, buffer).is_gt();
- !is_before_start && !is_after_end
- }
- });
-
- cursor.next(buffer);
- iter::from_fn(move || {
- while let Some(layer) = cursor.item() {
- let mut info = None;
- if let SyntaxLayerContent::Parsed { tree, language } = &layer.content {
- let layer_start_offset = layer.range.start.to_offset(buffer);
- let layer_start_point = layer.range.start.to_point(buffer).to_ts_point();
-
- info = Some(SyntaxLayerInfo {
- tree,
- language,
- depth: layer.depth,
- offset: (layer_start_offset, layer_start_point),
- });
- }
- cursor.next(buffer);
- if info.is_some() {
- return info;
- }
- }
- None
- })
- }
-
- pub fn contains_unknown_injections(&self) -> bool {
- self.layers.summary().contains_unknown_injections
- }
-
- pub fn language_registry_version(&self) -> usize {
- self.language_registry_version
- }
-}
-
-impl<'a> SyntaxMapCaptures<'a> {
- fn new(
- range: Range<usize>,
- text: &'a Rope,
- layers: impl Iterator<Item = SyntaxLayerInfo<'a>>,
- query: fn(&Grammar) -> Option<&Query>,
- ) -> Self {
- let mut result = Self {
- layers: Vec::new(),
- grammars: Vec::new(),
- active_layer_count: 0,
- };
- for layer in layers {
- let grammar = match &layer.language.grammar {
- Some(grammar) => grammar,
- None => continue,
- };
- let query = match query(&grammar) {
- Some(query) => query,
- None => continue,
- };
-
- let mut query_cursor = QueryCursorHandle::new();
-
- // TODO - add a Tree-sitter API to remove the need for this.
- let cursor = unsafe {
- std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
- };
-
- cursor.set_byte_range(range.clone());
- let captures = cursor.captures(query, layer.node(), TextProvider(text));
- let grammar_index = result
- .grammars
- .iter()
- .position(|g| g.id == grammar.id())
- .unwrap_or_else(|| {
- result.grammars.push(grammar);
- result.grammars.len() - 1
- });
- let mut layer = SyntaxMapCapturesLayer {
- depth: layer.depth,
- grammar_index,
- next_capture: None,
- captures,
- _query_cursor: query_cursor,
- };
-
- layer.advance();
- if layer.next_capture.is_some() {
- let key = layer.sort_key();
- let ix = match result.layers[..result.active_layer_count]
- .binary_search_by_key(&key, |layer| layer.sort_key())
- {
- Ok(ix) | Err(ix) => ix,
- };
- result.layers.insert(ix, layer);
- result.active_layer_count += 1;
- } else {
- result.layers.push(layer);
- }
- }
-
- result
- }
-
- pub fn grammars(&self) -> &[&'a Grammar] {
- &self.grammars
- }
-
- pub fn peek(&self) -> Option<SyntaxMapCapture<'a>> {
- let layer = self.layers[..self.active_layer_count].first()?;
- let capture = layer.next_capture?;
- Some(SyntaxMapCapture {
- depth: layer.depth,
- grammar_index: layer.grammar_index,
- index: capture.index,
- node: capture.node,
- })
- }
-
- pub fn advance(&mut self) -> bool {
- let layer = if let Some(layer) = self.layers[..self.active_layer_count].first_mut() {
- layer
- } else {
- return false;
- };
-
- layer.advance();
- if layer.next_capture.is_some() {
- let key = layer.sort_key();
- let i = 1 + self.layers[1..self.active_layer_count]
- .iter()
- .position(|later_layer| key < later_layer.sort_key())
- .unwrap_or(self.active_layer_count - 1);
- self.layers[0..i].rotate_left(1);
- } else {
- self.layers[0..self.active_layer_count].rotate_left(1);
- self.active_layer_count -= 1;
- }
-
- true
- }
-
- pub fn set_byte_range(&mut self, range: Range<usize>) {
- for layer in &mut self.layers {
- layer.captures.set_byte_range(range.clone());
- if let Some(capture) = &layer.next_capture {
- if capture.node.end_byte() > range.start {
- continue;
- }
- }
- layer.advance();
- }
- self.layers.sort_unstable_by_key(|layer| layer.sort_key());
- self.active_layer_count = self
- .layers
- .iter()
- .position(|layer| layer.next_capture.is_none())
- .unwrap_or(self.layers.len());
- }
-}
-
-impl<'a> SyntaxMapMatches<'a> {
- fn new(
- range: Range<usize>,
- text: &'a Rope,
- layers: impl Iterator<Item = SyntaxLayerInfo<'a>>,
- query: fn(&Grammar) -> Option<&Query>,
- ) -> Self {
- let mut result = Self::default();
- for layer in layers {
- let grammar = match &layer.language.grammar {
- Some(grammar) => grammar,
- None => continue,
- };
- let query = match query(&grammar) {
- Some(query) => query,
- None => continue,
- };
-
- let mut query_cursor = QueryCursorHandle::new();
-
- // TODO - add a Tree-sitter API to remove the need for this.
- let cursor = unsafe {
- std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
- };
-
- cursor.set_byte_range(range.clone());
- let matches = cursor.matches(query, layer.node(), TextProvider(text));
- let grammar_index = result
- .grammars
- .iter()
- .position(|g| g.id == grammar.id())
- .unwrap_or_else(|| {
- result.grammars.push(grammar);
- result.grammars.len() - 1
- });
- let mut layer = SyntaxMapMatchesLayer {
- depth: layer.depth,
- grammar_index,
- matches,
- next_pattern_index: 0,
- next_captures: Vec::new(),
- has_next: false,
- _query_cursor: query_cursor,
- };
-
- layer.advance();
- if layer.has_next {
- let key = layer.sort_key();
- let ix = match result.layers[..result.active_layer_count]
- .binary_search_by_key(&key, |layer| layer.sort_key())
- {
- Ok(ix) | Err(ix) => ix,
- };
- result.layers.insert(ix, layer);
- result.active_layer_count += 1;
- } else {
- result.layers.push(layer);
- }
- }
- result
- }
-
- pub fn grammars(&self) -> &[&'a Grammar] {
- &self.grammars
- }
-
- pub fn peek(&self) -> Option<SyntaxMapMatch> {
- let layer = self.layers.first()?;
- if !layer.has_next {
- return None;
- }
- Some(SyntaxMapMatch {
- depth: layer.depth,
- grammar_index: layer.grammar_index,
- pattern_index: layer.next_pattern_index,
- captures: &layer.next_captures,
- })
- }
-
- pub fn advance(&mut self) -> bool {
- let layer = if let Some(layer) = self.layers.first_mut() {
- layer
- } else {
- return false;
- };
-
- layer.advance();
- if layer.has_next {
- let key = layer.sort_key();
- let i = 1 + self.layers[1..self.active_layer_count]
- .iter()
- .position(|later_layer| key < later_layer.sort_key())
- .unwrap_or(self.active_layer_count - 1);
- self.layers[0..i].rotate_left(1);
- } else {
- self.layers[0..self.active_layer_count].rotate_left(1);
- self.active_layer_count -= 1;
- }
-
- true
- }
-}
-
-impl<'a> SyntaxMapCapturesLayer<'a> {
- fn advance(&mut self) {
- self.next_capture = self.captures.next().map(|(mat, ix)| mat.captures[ix]);
- }
-
- fn sort_key(&self) -> (usize, Reverse<usize>, usize) {
- if let Some(capture) = &self.next_capture {
- let range = capture.node.byte_range();
- (range.start, Reverse(range.end), self.depth)
- } else {
- (usize::MAX, Reverse(0), usize::MAX)
- }
- }
-}
-
-impl<'a> SyntaxMapMatchesLayer<'a> {
- fn advance(&mut self) {
- if let Some(mat) = self.matches.next() {
- self.next_captures.clear();
- self.next_captures.extend_from_slice(&mat.captures);
- self.next_pattern_index = mat.pattern_index;
- self.has_next = true;
- } else {
- self.has_next = false;
- }
- }
-
- fn sort_key(&self) -> (usize, Reverse<usize>, usize) {
- if self.has_next {
- let captures = &self.next_captures;
- if let Some((first, last)) = captures.first().zip(captures.last()) {
- return (
- first.node.start_byte(),
- Reverse(last.node.end_byte()),
- self.depth,
- );
- }
- }
- (usize::MAX, Reverse(0), usize::MAX)
- }
-}
-
-impl<'a> Iterator for SyntaxMapCaptures<'a> {
- type Item = SyntaxMapCapture<'a>;
-
- fn next(&mut self) -> Option<Self::Item> {
- let result = self.peek();
- self.advance();
- result
- }
-}
-
-fn join_ranges(
- a: impl Iterator<Item = Range<usize>>,
- b: impl Iterator<Item = Range<usize>>,
-) -> Vec<Range<usize>> {
- let mut result = Vec::<Range<usize>>::new();
- let mut a = a.peekable();
- let mut b = b.peekable();
- loop {
- let range = match (a.peek(), b.peek()) {
- (Some(range_a), Some(range_b)) => {
- if range_a.start < range_b.start {
- a.next().unwrap()
- } else {
- b.next().unwrap()
- }
- }
- (None, Some(_)) => b.next().unwrap(),
- (Some(_), None) => a.next().unwrap(),
- (None, None) => break,
- };
-
- if let Some(last) = result.last_mut() {
- if range.start <= last.end {
- last.end = last.end.max(range.end);
- continue;
- }
- }
- result.push(range);
- }
- result
-}
-
-fn parse_text(
- grammar: &Grammar,
- text: &Rope,
- start_byte: usize,
- ranges: Vec<tree_sitter::Range>,
- old_tree: Option<Tree>,
-) -> Tree {
- PARSER.with(|parser| {
- let mut parser = parser.borrow_mut();
- let mut chunks = text.chunks_in_range(start_byte..text.len());
- parser
- .set_included_ranges(&ranges)
- .expect("overlapping ranges");
- parser
- .set_language(&grammar.ts_language)
- .expect("incompatible grammar");
- parser
- .parse_with(
- &mut move |offset, _| {
- chunks.seek(start_byte + offset);
- chunks.next().unwrap_or("").as_bytes()
- },
- old_tree.as_ref(),
- )
- .expect("invalid language")
- })
-}
-
-fn get_injections(
- config: &InjectionConfig,
- text: &BufferSnapshot,
- outer_range: Range<Anchor>,
- node: Node,
- language_registry: &Arc<LanguageRegistry>,
- depth: usize,
- changed_ranges: &[Range<usize>],
- combined_injection_ranges: &mut HashMap<Arc<Language>, Vec<tree_sitter::Range>>,
- queue: &mut BinaryHeap<ParseStep>,
-) {
- let mut query_cursor = QueryCursorHandle::new();
- let mut prev_match = None;
-
- // Ensure that a `ParseStep` is created for every combined injection language, even
- // if there currently no matches for that injection.
- combined_injection_ranges.clear();
- for pattern in &config.patterns {
- if let (Some(language_name), true) = (pattern.language.as_ref(), pattern.combined) {
- if let Some(language) = language_registry
- .language_for_name_or_extension(language_name)
- .now_or_never()
- .and_then(|language| language.ok())
- {
- combined_injection_ranges.insert(language, Vec::new());
- }
- }
- }
-
- for query_range in changed_ranges {
- query_cursor.set_byte_range(query_range.start.saturating_sub(1)..query_range.end + 1);
- for mat in query_cursor.matches(&config.query, node, TextProvider(text.as_rope())) {
- let content_ranges = mat
- .nodes_for_capture_index(config.content_capture_ix)
- .map(|node| node.range())
- .collect::<Vec<_>>();
- if content_ranges.is_empty() {
- continue;
- }
-
- let content_range =
- content_ranges.first().unwrap().start_byte..content_ranges.last().unwrap().end_byte;
-
- // Avoid duplicate matches if two changed ranges intersect the same injection.
- if let Some((prev_pattern_ix, prev_range)) = &prev_match {
- if mat.pattern_index == *prev_pattern_ix && content_range == *prev_range {
- continue;
- }
- }
-
- prev_match = Some((mat.pattern_index, content_range.clone()));
- let combined = config.patterns[mat.pattern_index].combined;
-
- let mut language_name = None;
- let mut step_range = content_range.clone();
- if let Some(name) = config.patterns[mat.pattern_index].language.as_ref() {
- language_name = Some(Cow::Borrowed(name.as_ref()))
- } else if let Some(language_node) = config
- .language_capture_ix
- .and_then(|ix| mat.nodes_for_capture_index(ix).next())
- {
- step_range.start = cmp::min(content_range.start, language_node.start_byte());
- step_range.end = cmp::max(content_range.end, language_node.end_byte());
- language_name = Some(Cow::Owned(
- text.text_for_range(language_node.byte_range()).collect(),
- ))
- };
-
- if let Some(language_name) = language_name {
- let language = language_registry
- .language_for_name_or_extension(&language_name)
- .now_or_never()
- .and_then(|language| language.ok());
- let range = text.anchor_before(step_range.start)..text.anchor_after(step_range.end);
- if let Some(language) = language {
- if combined {
- combined_injection_ranges
- .entry(language.clone())
- .or_default()
- .extend(content_ranges);
- } else {
- queue.push(ParseStep {
- depth,
- language: ParseStepLanguage::Loaded { language },
- included_ranges: content_ranges,
- range,
- mode: ParseMode::Single,
- });
- }
- } else {
- queue.push(ParseStep {
- depth,
- language: ParseStepLanguage::Pending {
- name: language_name.into(),
- },
- included_ranges: content_ranges,
- range,
- mode: ParseMode::Single,
- });
- }
- }
- }
- }
-
- for (language, mut included_ranges) in combined_injection_ranges.drain() {
- included_ranges.sort_unstable_by(|a, b| {
- Ord::cmp(&a.start_byte, &b.start_byte).then_with(|| Ord::cmp(&a.end_byte, &b.end_byte))
- });
- queue.push(ParseStep {
- depth,
- language: ParseStepLanguage::Loaded { language },
- range: outer_range.clone(),
- included_ranges,
- mode: ParseMode::Combined {
- parent_layer_range: node.start_byte()..node.end_byte(),
- parent_layer_changed_ranges: changed_ranges.to_vec(),
- },
- })
- }
-}
-
-/// Update the given list of included `ranges`, removing any ranges that intersect
-/// `removed_ranges`, and inserting the given `new_ranges`.
-///
-/// Returns a new vector of ranges, and the range of the vector that was changed,
-/// from the previous `ranges` vector.
-pub(crate) fn splice_included_ranges(
- mut ranges: Vec<tree_sitter::Range>,
- removed_ranges: &[Range<usize>],
- new_ranges: &[tree_sitter::Range],
-) -> (Vec<tree_sitter::Range>, Range<usize>) {
- let mut removed_ranges = removed_ranges.iter().cloned().peekable();
- let mut new_ranges = new_ranges.into_iter().cloned().peekable();
- let mut ranges_ix = 0;
- let mut changed_portion = usize::MAX..0;
- loop {
- let next_new_range = new_ranges.peek();
- let next_removed_range = removed_ranges.peek();
-
- let (remove, insert) = match (next_removed_range, next_new_range) {
- (None, None) => break,
- (Some(_), None) => (removed_ranges.next().unwrap(), None),
- (Some(next_removed_range), Some(next_new_range)) => {
- if next_removed_range.end < next_new_range.start_byte {
- (removed_ranges.next().unwrap(), None)
- } else {
- let mut start = next_new_range.start_byte;
- let mut end = next_new_range.end_byte;
-
- while let Some(next_removed_range) = removed_ranges.peek() {
- if next_removed_range.start > next_new_range.end_byte {
- break;
- }
- let next_removed_range = removed_ranges.next().unwrap();
- start = cmp::min(start, next_removed_range.start);
- end = cmp::max(end, next_removed_range.end);
- }
-
- (start..end, Some(new_ranges.next().unwrap()))
- }
- }
- (None, Some(next_new_range)) => (
- next_new_range.start_byte..next_new_range.end_byte,
- Some(new_ranges.next().unwrap()),
- ),
- };
-
- let mut start_ix = ranges_ix
- + match ranges[ranges_ix..].binary_search_by_key(&remove.start, |r| r.end_byte) {
- Ok(ix) => ix,
- Err(ix) => ix,
- };
- let mut end_ix = ranges_ix
- + match ranges[ranges_ix..].binary_search_by_key(&remove.end, |r| r.start_byte) {
- Ok(ix) => ix + 1,
- Err(ix) => ix,
- };
-
- // If there are empty ranges, then there may be multiple ranges with the same
- // start or end. Expand the splice to include any adjacent ranges that touch
- // the changed range.
- while start_ix > 0 {
- if ranges[start_ix - 1].end_byte == remove.start {
- start_ix -= 1;
- } else {
- break;
- }
- }
- while let Some(range) = ranges.get(end_ix) {
- if range.start_byte == remove.end {
- end_ix += 1;
- } else {
- break;
- }
- }
-
- changed_portion.start = changed_portion.start.min(start_ix);
- changed_portion.end = changed_portion.end.max(if insert.is_some() {
- start_ix + 1
- } else {
- start_ix
- });
-
- ranges.splice(start_ix..end_ix, insert);
- ranges_ix = start_ix;
- }
-
- if changed_portion.end < changed_portion.start {
- changed_portion = 0..0;
- }
-
- (ranges, changed_portion)
-}
-
-/// Ensure there are newline ranges in between content range that appear on
-/// different lines. For performance, only iterate through the given range of
-/// indices. All of the ranges in the array are relative to a given start byte
-/// and point.
-fn insert_newlines_between_ranges(
- indices: Range<usize>,
- ranges: &mut Vec<tree_sitter::Range>,
- text: &text::BufferSnapshot,
- start_byte: usize,
- start_point: Point,
-) {
- let mut ix = indices.end + 1;
- while ix > indices.start {
- ix -= 1;
- if 0 == ix || ix == ranges.len() {
- continue;
- }
-
- let range_b = ranges[ix].clone();
- let range_a = &mut ranges[ix - 1];
- if range_a.end_point.column == 0 {
- continue;
- }
-
- if range_a.end_point.row < range_b.start_point.row {
- let end_point = start_point + Point::from_ts_point(range_a.end_point);
- let line_end = Point::new(end_point.row, text.line_len(end_point.row));
- if end_point.column as u32 >= line_end.column {
- range_a.end_byte += 1;
- range_a.end_point.row += 1;
- range_a.end_point.column = 0;
- } else {
- let newline_offset = text.point_to_offset(line_end);
- ranges.insert(
- ix,
- tree_sitter::Range {
- start_byte: newline_offset - start_byte,
- end_byte: newline_offset - start_byte + 1,
- start_point: (line_end - start_point).to_ts_point(),
- end_point: ((line_end - start_point) + Point::new(1, 0)).to_ts_point(),
- },
- )
- }
- }
- }
-}
-
-impl OwnedSyntaxLayerInfo {
- pub fn node(&self) -> Node {
- self.tree
- .root_node_with_offset(self.offset.0, self.offset.1)
- }
-}
-
-impl<'a> SyntaxLayerInfo<'a> {
- pub fn to_owned(&self) -> OwnedSyntaxLayerInfo {
- OwnedSyntaxLayerInfo {
- tree: self.tree.clone(),
- offset: self.offset,
- depth: self.depth,
- language: self.language.clone(),
- }
- }
-
- pub fn node(&self) -> Node<'a> {
- self.tree
- .root_node_with_offset(self.offset.0, self.offset.1)
- }
-
- pub(crate) fn override_id(&self, offset: usize, text: &text::BufferSnapshot) -> Option<u32> {
- let text = TextProvider(text.as_rope());
- let config = self.language.grammar.as_ref()?.override_config.as_ref()?;
-
- let mut query_cursor = QueryCursorHandle::new();
- query_cursor.set_byte_range(offset..offset);
-
- let mut smallest_match: Option<(u32, Range<usize>)> = None;
- for mat in query_cursor.matches(&config.query, self.node(), text) {
- for capture in mat.captures {
- if !config.values.contains_key(&capture.index) {
- continue;
- }
-
- let range = capture.node.byte_range();
- if offset <= range.start || offset >= range.end {
- continue;
- }
-
- if let Some((_, smallest_range)) = &smallest_match {
- if range.len() < smallest_range.len() {
- smallest_match = Some((capture.index, range))
- }
- continue;
- }
-
- smallest_match = Some((capture.index, range));
- }
- }
-
- smallest_match.map(|(index, _)| index)
- }
-}
-
-impl std::ops::Deref for SyntaxMap {
- type Target = SyntaxSnapshot;
-
- fn deref(&self) -> &Self::Target {
- &self.snapshot
- }
-}
-
-impl PartialEq for ParseStep {
- fn eq(&self, _: &Self) -> bool {
- false
- }
-}
-
-impl Eq for ParseStep {}
-
-impl PartialOrd for ParseStep {
- fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
- Some(self.cmp(&other))
- }
-}
-
-impl Ord for ParseStep {
- fn cmp(&self, other: &Self) -> Ordering {
- let range_a = self.range();
- let range_b = other.range();
- Ord::cmp(&other.depth, &self.depth)
- .then_with(|| Ord::cmp(&range_b.start, &range_a.start))
- .then_with(|| Ord::cmp(&range_a.end, &range_b.end))
- .then_with(|| self.language.id().cmp(&other.language.id()))
- }
-}
-
-impl ParseStep {
- fn range(&self) -> Range<usize> {
- if let ParseMode::Combined {
- parent_layer_range, ..
- } = &self.mode
- {
- parent_layer_range.clone()
- } else {
- let start = self.included_ranges.first().map_or(0, |r| r.start_byte);
- let end = self.included_ranges.last().map_or(0, |r| r.end_byte);
- start..end
- }
- }
-}
-
-impl ChangedRegion {
- fn cmp(&self, other: &Self, buffer: &BufferSnapshot) -> Ordering {
- let range_a = &self.range;
- let range_b = &other.range;
- Ord::cmp(&self.depth, &other.depth)
- .then_with(|| range_a.start.cmp(&range_b.start, buffer))
- .then_with(|| range_b.end.cmp(&range_a.end, buffer))
- }
-}
-
-impl ChangeRegionSet {
- fn start_position(&self) -> ChangeStartPosition {
- self.0.first().map_or(
- ChangeStartPosition {
- depth: usize::MAX,
- position: Anchor::MAX,
- },
- |region| ChangeStartPosition {
- depth: region.depth,
- position: region.range.start,
- },
- )
- }
-
- fn intersects(&self, layer: &SyntaxLayer, text: &BufferSnapshot) -> bool {
- for region in &self.0 {
- if region.depth < layer.depth {
- continue;
- }
- if region.depth > layer.depth {
- break;
- }
- if region.range.end.cmp(&layer.range.start, text).is_le() {
- continue;
- }
- if region.range.start.cmp(&layer.range.end, text).is_ge() {
- break;
- }
- return true;
- }
- false
- }
-
- fn insert(&mut self, region: ChangedRegion, text: &BufferSnapshot) {
- if let Err(ix) = self.0.binary_search_by(|probe| probe.cmp(®ion, text)) {
- self.0.insert(ix, region);
- }
- }
-
- fn prune(&mut self, summary: SyntaxLayerSummary, text: &BufferSnapshot) -> bool {
- let prev_len = self.0.len();
- self.0.retain(|region| {
- region.depth > summary.max_depth
- || (region.depth == summary.max_depth
- && region
- .range
- .end
- .cmp(&summary.last_layer_range.start, text)
- .is_gt())
- });
- self.0.len() < prev_len
- }
-}
-
-impl Default for SyntaxLayerSummary {
- fn default() -> Self {
- Self {
- max_depth: 0,
- min_depth: 0,
- range: Anchor::MAX..Anchor::MIN,
- last_layer_range: Anchor::MIN..Anchor::MAX,
- last_layer_language: None,
- contains_unknown_injections: false,
- }
- }
-}
-
-impl sum_tree::Summary for SyntaxLayerSummary {
- type Context = BufferSnapshot;
-
- fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
- if other.max_depth > self.max_depth {
- self.max_depth = other.max_depth;
- self.range = other.range.clone();
- } else {
- if self.range == (Anchor::MAX..Anchor::MAX) {
- self.range.start = other.range.start;
- }
- if other.range.end.cmp(&self.range.end, buffer).is_gt() {
- self.range.end = other.range.end;
- }
- }
- self.last_layer_range = other.last_layer_range.clone();
- self.last_layer_language = other.last_layer_language;
- self.contains_unknown_injections |= other.contains_unknown_injections;
- }
-}
-
-impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary> for SyntaxLayerPosition {
- fn cmp(&self, cursor_location: &SyntaxLayerSummary, buffer: &BufferSnapshot) -> Ordering {
- Ord::cmp(&self.depth, &cursor_location.max_depth)
- .then_with(|| {
- self.range
- .start
- .cmp(&cursor_location.last_layer_range.start, buffer)
- })
- .then_with(|| {
- cursor_location
- .last_layer_range
- .end
- .cmp(&self.range.end, buffer)
- })
- .then_with(|| self.language.cmp(&cursor_location.last_layer_language))
- }
-}
-
-impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary> for ChangeStartPosition {
- fn cmp(&self, cursor_location: &SyntaxLayerSummary, text: &BufferSnapshot) -> Ordering {
- Ord::cmp(&self.depth, &cursor_location.max_depth)
- .then_with(|| self.position.cmp(&cursor_location.range.end, text))
- }
-}
-
-impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary>
- for SyntaxLayerPositionBeforeChange
-{
- fn cmp(&self, cursor_location: &SyntaxLayerSummary, buffer: &BufferSnapshot) -> Ordering {
- if self.change.cmp(cursor_location, buffer).is_le() {
- return Ordering::Less;
- } else {
- self.position.cmp(cursor_location, buffer)
- }
- }
-}
-
-impl sum_tree::Item for SyntaxLayer {
- type Summary = SyntaxLayerSummary;
-
- fn summary(&self) -> Self::Summary {
- SyntaxLayerSummary {
- min_depth: self.depth,
- max_depth: self.depth,
- range: self.range.clone(),
- last_layer_range: self.range.clone(),
- last_layer_language: self.content.language_id(),
- contains_unknown_injections: matches!(self.content, SyntaxLayerContent::Pending { .. }),
- }
- }
-}
-
-impl std::fmt::Debug for SyntaxLayer {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- f.debug_struct("SyntaxLayer")
- .field("depth", &self.depth)
- .field("range", &self.range)
- .field("tree", &self.content.tree())
- .finish()
- }
-}
-
-impl<'a> tree_sitter::TextProvider<&'a [u8]> for TextProvider<'a> {
- type I = ByteChunks<'a>;
-
- fn text(&mut self, node: tree_sitter::Node) -> Self::I {
- ByteChunks(self.0.chunks_in_range(node.byte_range()))
- }
-}
-
-impl<'a> Iterator for ByteChunks<'a> {
- type Item = &'a [u8];
-
- fn next(&mut self) -> Option<Self::Item> {
- self.0.next().map(str::as_bytes)
- }
-}
-
-impl QueryCursorHandle {
- pub(crate) fn new() -> Self {
- let mut cursor = QUERY_CURSORS.lock().pop().unwrap_or_else(QueryCursor::new);
- cursor.set_match_limit(64);
- QueryCursorHandle(Some(cursor))
- }
-}
-
-impl Deref for QueryCursorHandle {
- type Target = QueryCursor;
-
- fn deref(&self) -> &Self::Target {
- self.0.as_ref().unwrap()
- }
-}
-
-impl DerefMut for QueryCursorHandle {
- fn deref_mut(&mut self) -> &mut Self::Target {
- self.0.as_mut().unwrap()
- }
-}
-
-impl Drop for QueryCursorHandle {
- fn drop(&mut self) {
- let mut cursor = self.0.take().unwrap();
- cursor.set_byte_range(0..usize::MAX);
- cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
- QUERY_CURSORS.lock().push(cursor)
- }
-}
-
-pub(crate) trait ToTreeSitterPoint {
- fn to_ts_point(self) -> tree_sitter::Point;
- fn from_ts_point(point: tree_sitter::Point) -> Self;
-}
-
-impl ToTreeSitterPoint for Point {
- fn to_ts_point(self) -> tree_sitter::Point {
- tree_sitter::Point::new(self.row as usize, self.column as usize)
- }
-
- fn from_ts_point(point: tree_sitter::Point) -> Self {
- Point::new(point.row as u32, point.column as u32)
- }
-}
-
-struct LogIncludedRanges<'a>(&'a [tree_sitter::Range]);
-struct LogPoint(Point);
-struct LogAnchorRange<'a>(&'a Range<Anchor>, &'a text::BufferSnapshot);
-struct LogChangedRegions<'a>(&'a ChangeRegionSet, &'a text::BufferSnapshot);
-
-impl<'a> fmt::Debug for LogIncludedRanges<'a> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_list()
- .entries(self.0.iter().map(|range| {
- let start = range.start_point;
- let end = range.end_point;
- (start.row, start.column)..(end.row, end.column)
- }))
- .finish()
- }
-}
-
-impl<'a> fmt::Debug for LogAnchorRange<'a> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let range = self.0.to_point(self.1);
- (LogPoint(range.start)..LogPoint(range.end)).fmt(f)
- }
-}
-
-impl<'a> fmt::Debug for LogChangedRegions<'a> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_list()
- .entries(
- self.0
- .0
- .iter()
- .map(|region| LogAnchorRange(®ion.range, self.1)),
- )
- .finish()
- }
-}
-
-impl fmt::Debug for LogPoint {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- (self.0.row, self.0.column).fmt(f)
- }
-}
@@ -1,1323 +0,0 @@
-use super::*;
-use crate::LanguageConfig;
-use rand::rngs::StdRng;
-use std::{env, ops::Range, sync::Arc};
-use text::Buffer;
-use tree_sitter::Node;
-use unindent::Unindent as _;
-use util::test::marked_text_ranges;
-
-#[test]
-fn test_splice_included_ranges() {
- let ranges = vec![ts_range(20..30), ts_range(50..60), ts_range(80..90)];
-
- let (new_ranges, change) = splice_included_ranges(
- ranges.clone(),
- &[54..56, 58..68],
- &[ts_range(50..54), ts_range(59..67)],
- );
- assert_eq!(
- new_ranges,
- &[
- ts_range(20..30),
- ts_range(50..54),
- ts_range(59..67),
- ts_range(80..90),
- ]
- );
- assert_eq!(change, 1..3);
-
- let (new_ranges, change) = splice_included_ranges(ranges.clone(), &[70..71, 91..100], &[]);
- assert_eq!(
- new_ranges,
- &[ts_range(20..30), ts_range(50..60), ts_range(80..90)]
- );
- assert_eq!(change, 2..3);
-
- let (new_ranges, change) =
- splice_included_ranges(ranges.clone(), &[], &[ts_range(0..2), ts_range(70..75)]);
- assert_eq!(
- new_ranges,
- &[
- ts_range(0..2),
- ts_range(20..30),
- ts_range(50..60),
- ts_range(70..75),
- ts_range(80..90)
- ]
- );
- assert_eq!(change, 0..4);
-
- let (new_ranges, change) =
- splice_included_ranges(ranges.clone(), &[30..50], &[ts_range(25..55)]);
- assert_eq!(new_ranges, &[ts_range(25..55), ts_range(80..90)]);
- assert_eq!(change, 0..1);
-
- // does not create overlapping ranges
- let (new_ranges, change) =
- splice_included_ranges(ranges.clone(), &[0..18], &[ts_range(20..32)]);
- assert_eq!(
- new_ranges,
- &[ts_range(20..32), ts_range(50..60), ts_range(80..90)]
- );
- assert_eq!(change, 0..1);
-
- fn ts_range(range: Range<usize>) -> tree_sitter::Range {
- tree_sitter::Range {
- start_byte: range.start,
- start_point: tree_sitter::Point {
- row: 0,
- column: range.start,
- },
- end_byte: range.end,
- end_point: tree_sitter::Point {
- row: 0,
- column: range.end,
- },
- }
- }
-}
-
-#[gpui::test]
-fn test_syntax_map_layers_for_range() {
- let registry = Arc::new(LanguageRegistry::test());
- let language = Arc::new(rust_lang());
- registry.add(language.clone());
-
- let mut buffer = Buffer::new(
- 0,
- 0,
- r#"
- fn a() {
- assert_eq!(
- b(vec![C {}]),
- vec![d.e],
- );
- println!("{}", f(|_| true));
- }
- "#
- .unindent(),
- );
-
- let mut syntax_map = SyntaxMap::new();
- syntax_map.set_language_registry(registry.clone());
- syntax_map.reparse(language.clone(), &buffer);
-
- assert_layers_for_range(
- &syntax_map,
- &buffer,
- Point::new(2, 0)..Point::new(2, 0),
- &[
- "...(function_item ... (block (expression_statement (macro_invocation...",
- "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
- ],
- );
- assert_layers_for_range(
- &syntax_map,
- &buffer,
- Point::new(2, 14)..Point::new(2, 16),
- &[
- "...(function_item ...",
- "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
- "...(array_expression (struct_expression ...",
- ],
- );
- assert_layers_for_range(
- &syntax_map,
- &buffer,
- Point::new(3, 14)..Point::new(3, 16),
- &[
- "...(function_item ...",
- "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
- "...(array_expression (field_expression ...",
- ],
- );
- assert_layers_for_range(
- &syntax_map,
- &buffer,
- Point::new(5, 12)..Point::new(5, 16),
- &[
- "...(function_item ...",
- "...(call_expression ... (arguments (closure_expression ...",
- ],
- );
-
- // Replace a vec! macro invocation with a plain slice, removing a syntactic layer.
- let macro_name_range = range_for_text(&buffer, "vec!");
- buffer.edit([(macro_name_range, "&")]);
- syntax_map.interpolate(&buffer);
- syntax_map.reparse(language.clone(), &buffer);
-
- assert_layers_for_range(
- &syntax_map,
- &buffer,
- Point::new(2, 14)..Point::new(2, 16),
- &[
- "...(function_item ...",
- "...(tuple_expression (call_expression ... arguments: (arguments (reference_expression value: (array_expression...",
- ],
- );
-
- // Put the vec! macro back, adding back the syntactic layer.
- buffer.undo();
- syntax_map.interpolate(&buffer);
- syntax_map.reparse(language.clone(), &buffer);
-
- assert_layers_for_range(
- &syntax_map,
- &buffer,
- Point::new(2, 14)..Point::new(2, 16),
- &[
- "...(function_item ...",
- "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
- "...(array_expression (struct_expression ...",
- ],
- );
-}
-
-#[gpui::test]
-fn test_dynamic_language_injection() {
- let registry = Arc::new(LanguageRegistry::test());
- let markdown = Arc::new(markdown_lang());
- registry.add(markdown.clone());
- registry.add(Arc::new(rust_lang()));
- registry.add(Arc::new(ruby_lang()));
-
- let mut buffer = Buffer::new(
- 0,
- 0,
- r#"
- This is a code block:
-
- ```rs
- fn foo() {}
- ```
- "#
- .unindent(),
- );
-
- let mut syntax_map = SyntaxMap::new();
- syntax_map.set_language_registry(registry.clone());
- syntax_map.reparse(markdown.clone(), &buffer);
- assert_layers_for_range(
- &syntax_map,
- &buffer,
- Point::new(3, 0)..Point::new(3, 0),
- &[
- "...(fenced_code_block (fenced_code_block_delimiter) (info_string (language)) (code_fence_content) (fenced_code_block_delimiter...",
- "...(function_item name: (identifier) parameters: (parameters) body: (block)...",
- ],
- );
-
- // Replace Rust with Ruby in code block.
- let macro_name_range = range_for_text(&buffer, "rs");
- buffer.edit([(macro_name_range, "ruby")]);
- syntax_map.interpolate(&buffer);
- syntax_map.reparse(markdown.clone(), &buffer);
- assert_layers_for_range(
- &syntax_map,
- &buffer,
- Point::new(3, 0)..Point::new(3, 0),
- &[
- "...(fenced_code_block (fenced_code_block_delimiter) (info_string (language)) (code_fence_content) (fenced_code_block_delimiter...",
- "...(call method: (identifier) arguments: (argument_list (call method: (identifier) arguments: (argument_list) block: (block)...",
- ],
- );
-
- // Replace Ruby with a language that hasn't been loaded yet.
- let macro_name_range = range_for_text(&buffer, "ruby");
- buffer.edit([(macro_name_range, "html")]);
- syntax_map.interpolate(&buffer);
- syntax_map.reparse(markdown.clone(), &buffer);
- assert_layers_for_range(
- &syntax_map,
- &buffer,
- Point::new(3, 0)..Point::new(3, 0),
- &[
- "...(fenced_code_block (fenced_code_block_delimiter) (info_string (language)) (code_fence_content) (fenced_code_block_delimiter..."
- ],
- );
- assert!(syntax_map.contains_unknown_injections());
-
- registry.add(Arc::new(html_lang()));
- syntax_map.reparse(markdown.clone(), &buffer);
- assert_layers_for_range(
- &syntax_map,
- &buffer,
- Point::new(3, 0)..Point::new(3, 0),
- &[
- "...(fenced_code_block (fenced_code_block_delimiter) (info_string (language)) (code_fence_content) (fenced_code_block_delimiter...",
- "(fragment (text))",
- ],
- );
- assert!(!syntax_map.contains_unknown_injections());
-}
-
-#[gpui::test]
-fn test_typing_multiple_new_injections() {
- let (buffer, syntax_map) = test_edit_sequence(
- "Rust",
- &[
- "fn a() { dbg }",
- "fn a() { dbg«!» }",
- "fn a() { dbg!«()» }",
- "fn a() { dbg!(«b») }",
- "fn a() { dbg!(b«.») }",
- "fn a() { dbg!(b.«c») }",
- "fn a() { dbg!(b.c«()») }",
- "fn a() { dbg!(b.c(«vec»)) }",
- "fn a() { dbg!(b.c(vec«!»)) }",
- "fn a() { dbg!(b.c(vec!«[]»)) }",
- "fn a() { dbg!(b.c(vec![«d»])) }",
- "fn a() { dbg!(b.c(vec![d«.»])) }",
- "fn a() { dbg!(b.c(vec![d.«e»])) }",
- ],
- );
-
- assert_capture_ranges(
- &syntax_map,
- &buffer,
- &["field"],
- "fn a() { dbg!(b.«c»(vec![d.«e»])) }",
- );
-}
-
-#[gpui::test]
-fn test_pasting_new_injection_line_between_others() {
- let (buffer, syntax_map) = test_edit_sequence(
- "Rust",
- &[
- "
- fn a() {
- b!(B {});
- c!(C {});
- d!(D {});
- e!(E {});
- f!(F {});
- g!(G {});
- }
- ",
- "
- fn a() {
- b!(B {});
- c!(C {});
- d!(D {});
- « h!(H {});
- » e!(E {});
- f!(F {});
- g!(G {});
- }
- ",
- ],
- );
-
- assert_capture_ranges(
- &syntax_map,
- &buffer,
- &["struct"],
- "
- fn a() {
- b!(«B {}»);
- c!(«C {}»);
- d!(«D {}»);
- h!(«H {}»);
- e!(«E {}»);
- f!(«F {}»);
- g!(«G {}»);
- }
- ",
- );
-}
-
-#[gpui::test]
-fn test_joining_injections_with_child_injections() {
- let (buffer, syntax_map) = test_edit_sequence(
- "Rust",
- &[
- "
- fn a() {
- b!(
- c![one.two.three],
- d![four.five.six],
- );
- e!(
- f![seven.eight],
- );
- }
- ",
- "
- fn a() {
- b!(
- c![one.two.three],
- d![four.five.six],
- ˇ f![seven.eight],
- );
- }
- ",
- ],
- );
-
- assert_capture_ranges(
- &syntax_map,
- &buffer,
- &["field"],
- "
- fn a() {
- b!(
- c![one.«two».«three»],
- d![four.«five».«six»],
- f![seven.«eight»],
- );
- }
- ",
- );
-}
-
-#[gpui::test]
-fn test_editing_edges_of_injection() {
- test_edit_sequence(
- "Rust",
- &[
- "
- fn a() {
- b!(c!())
- }
- ",
- "
- fn a() {
- «d»!(c!())
- }
- ",
- "
- fn a() {
- «e»d!(c!())
- }
- ",
- "
- fn a() {
- ed!«[»c!()«]»
- }
- ",
- ],
- );
-}
-
-#[gpui::test]
-fn test_edits_preceding_and_intersecting_injection() {
- test_edit_sequence(
- "Rust",
- &[
- //
- "const aaaaaaaaaaaa: B = c!(d(e.f));",
- "const aˇa: B = c!(d(eˇ));",
- ],
- );
-}
-
-#[gpui::test]
-fn test_non_local_changes_create_injections() {
- test_edit_sequence(
- "Rust",
- &[
- "
- // a! {
- static B: C = d;
- // }
- ",
- "
- ˇa! {
- static B: C = d;
- ˇ}
- ",
- ],
- );
-}
-
-#[gpui::test]
-fn test_creating_many_injections_in_one_edit() {
- test_edit_sequence(
- "Rust",
- &[
- "
- fn a() {
- one(Two::three(3));
- four(Five::six(6));
- seven(Eight::nine(9));
- }
- ",
- "
- fn a() {
- one«!»(Two::three(3));
- four«!»(Five::six(6));
- seven«!»(Eight::nine(9));
- }
- ",
- "
- fn a() {
- one!(Two::three«!»(3));
- four!(Five::six«!»(6));
- seven!(Eight::nine«!»(9));
- }
- ",
- ],
- );
-}
-
-#[gpui::test]
-fn test_editing_across_injection_boundary() {
- test_edit_sequence(
- "Rust",
- &[
- "
- fn one() {
- two();
- three!(
- three.four,
- five.six,
- );
- }
- ",
- "
- fn one() {
- two();
- th«irty_five![»
- three.four,
- five.six,
- « seven.eight,
- ];»
- }
- ",
- ],
- );
-}
-
-#[gpui::test]
-fn test_removing_injection_by_replacing_across_boundary() {
- test_edit_sequence(
- "Rust",
- &[
- "
- fn one() {
- two!(
- three.four,
- );
- }
- ",
- "
- fn one() {
- t«en
- .eleven(
- twelve,
- »
- three.four,
- );
- }
- ",
- ],
- );
-}
-
-#[gpui::test]
-fn test_combined_injections_simple() {
- let (buffer, syntax_map) = test_edit_sequence(
- "ERB",
- &[
- "
- <body>
- <% if @one %>
- <div class=one>
- <% else %>
- <div class=two>
- <% end %>
- </div>
- </body>
- ",
- "
- <body>
- <% if @one %>
- <div class=one>
- ˇ else ˇ
- <div class=two>
- <% end %>
- </div>
- </body>
- ",
- "
- <body>
- <% if @one «;» end %>
- </div>
- </body>
- ",
- ],
- );
-
- assert_capture_ranges(
- &syntax_map,
- &buffer,
- &["tag", "ivar"],
- "
- <«body»>
- <% if «@one» ; end %>
- </«div»>
- </«body»>
- ",
- );
-}
-
-#[gpui::test]
-fn test_combined_injections_empty_ranges() {
- test_edit_sequence(
- "ERB",
- &[
- "
- <% if @one %>
- <% else %>
- <% end %>
- ",
- "
- <% if @one %>
- ˇ<% end %>
- ",
- ],
- );
-}
-
-#[gpui::test]
-fn test_combined_injections_edit_edges_of_ranges() {
- let (buffer, syntax_map) = test_edit_sequence(
- "ERB",
- &[
- "
- <%= one @two %>
- <%= three @four %>
- ",
- "
- <%= one @two %ˇ
- <%= three @four %>
- ",
- "
- <%= one @two %«>»
- <%= three @four %>
- ",
- ],
- );
-
- assert_capture_ranges(
- &syntax_map,
- &buffer,
- &["tag", "ivar"],
- "
- <%= one «@two» %>
- <%= three «@four» %>
- ",
- );
-}
-
-#[gpui::test]
-fn test_combined_injections_splitting_some_injections() {
- let (_buffer, _syntax_map) = test_edit_sequence(
- "ERB",
- &[
- r#"
- <%A if b(:c) %>
- d
- <% end %>
- eee
- <% f %>
- "#,
- r#"
- <%« AAAAAAA %>
- hhhhhhh
- <%=» if b(:c) %>
- d
- <% end %>
- eee
- <% f %>
- "#,
- ],
- );
-}
-
-#[gpui::test]
-fn test_combined_injections_editing_after_last_injection() {
- test_edit_sequence(
- "ERB",
- &[
- r#"
- <% foo %>
- <div></div>
- <% bar %>
- "#,
- r#"
- <% foo %>
- <div></div>
- <% bar %>«
- more text»
- "#,
- ],
- );
-}
-
-#[gpui::test]
-fn test_combined_injections_inside_injections() {
- let (buffer, syntax_map) = test_edit_sequence(
- "Markdown",
- &[
- r#"
- here is
- some
- ERB code:
-
- ```erb
- <ul>
- <% people.each do |person| %>
- <li><%= person.name %></li>
- <li><%= person.age %></li>
- <% end %>
- </ul>
- ```
- "#,
- r#"
- here is
- some
- ERB code:
-
- ```erb
- <ul>
- <% people«2».each do |person| %>
- <li><%= person.name %></li>
- <li><%= person.age %></li>
- <% end %>
- </ul>
- ```
- "#,
- // Inserting a comment character inside one code directive
- // does not cause the other code directive to become a comment,
- // because newlines are included in between each injection range.
- r#"
- here is
- some
- ERB code:
-
- ```erb
- <ul>
- <% people2.each do |person| %>
- <li><%= «# »person.name %></li>
- <li><%= person.age %></li>
- <% end %>
- </ul>
- ```
- "#,
- ],
- );
-
- // Check that the code directive below the ruby comment is
- // not parsed as a comment.
- assert_capture_ranges(
- &syntax_map,
- &buffer,
- &["method"],
- "
- here is
- some
- ERB code:
-
- ```erb
- <ul>
- <% people2.«each» do |person| %>
- <li><%= # person.name %></li>
- <li><%= person.«age» %></li>
- <% end %>
- </ul>
- ```
- ",
- );
-}
-
-#[gpui::test]
-fn test_empty_combined_injections_inside_injections() {
- let (buffer, syntax_map) = test_edit_sequence(
- "Markdown",
- &[r#"
- ```erb
- hello
- ```
-
- goodbye
- "#],
- );
-
- assert_layers_for_range(
- &syntax_map,
- &buffer,
- Point::new(0, 0)..Point::new(5, 0),
- &[
- "...(paragraph)...",
- "(template...",
- "(fragment...",
- // The ruby syntax tree should be empty, since there are
- // no interpolations in the ERB template.
- "(program)",
- ],
- );
-}
-
-#[gpui::test(iterations = 50)]
-fn test_random_syntax_map_edits_rust_macros(rng: StdRng) {
- let text = r#"
- fn test_something() {
- let vec = vec![5, 1, 3, 8];
- assert_eq!(
- vec
- .into_iter()
- .map(|i| i * 2)
- .collect::<Vec<usize>>(),
- vec![
- 5 * 2, 1 * 2, 3 * 2, 8 * 2
- ],
- );
- }
- "#
- .unindent()
- .repeat(2);
-
- let registry = Arc::new(LanguageRegistry::test());
- let language = Arc::new(rust_lang());
- registry.add(language.clone());
-
- test_random_edits(text, registry, language, rng);
-}
-
-#[gpui::test(iterations = 50)]
-fn test_random_syntax_map_edits_with_erb(rng: StdRng) {
- let text = r#"
- <div id="main">
- <% if one?(:two) %>
- <p class="three" four>
- <%= yield :five %>
- </p>
- <% elsif Six.seven(8) %>
- <p id="three" four>
- <%= yield :five %>
- </p>
- <% else %>
- <span>Ok</span>
- <% end %>
- </div>
- "#
- .unindent()
- .repeat(5);
-
- let registry = Arc::new(LanguageRegistry::test());
- let language = Arc::new(erb_lang());
- registry.add(language.clone());
- registry.add(Arc::new(ruby_lang()));
- registry.add(Arc::new(html_lang()));
-
- test_random_edits(text, registry, language, rng);
-}
-
-#[gpui::test(iterations = 50)]
-fn test_random_syntax_map_edits_with_heex(rng: StdRng) {
- let text = r#"
- defmodule TheModule do
- def the_method(assigns) do
- ~H"""
- <%= if @empty do %>
- <div class="h-4"></div>
- <% else %>
- <div class="max-w-2xl w-full animate-pulse">
- <div class="flex-1 space-y-4">
- <div class={[@bg_class, "h-4 rounded-lg w-3/4"]}></div>
- <div class={[@bg_class, "h-4 rounded-lg"]}></div>
- <div class={[@bg_class, "h-4 rounded-lg w-5/6"]}></div>
- </div>
- </div>
- <% end %>
- """
- end
- end
- "#
- .unindent()
- .repeat(3);
-
- let registry = Arc::new(LanguageRegistry::test());
- let language = Arc::new(elixir_lang());
- registry.add(language.clone());
- registry.add(Arc::new(heex_lang()));
- registry.add(Arc::new(html_lang()));
-
- test_random_edits(text, registry, language, rng);
-}
-
-fn test_random_edits(
- text: String,
- registry: Arc<LanguageRegistry>,
- language: Arc<Language>,
- mut rng: StdRng,
-) {
- let operations = env::var("OPERATIONS")
- .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
- .unwrap_or(10);
-
- let mut buffer = Buffer::new(0, 0, text);
-
- let mut syntax_map = SyntaxMap::new();
- syntax_map.set_language_registry(registry.clone());
- syntax_map.reparse(language.clone(), &buffer);
-
- let mut reference_syntax_map = SyntaxMap::new();
- reference_syntax_map.set_language_registry(registry.clone());
-
- log::info!("initial text:\n{}", buffer.text());
-
- for _ in 0..operations {
- let prev_buffer = buffer.snapshot();
- let prev_syntax_map = syntax_map.snapshot();
-
- buffer.randomly_edit(&mut rng, 3);
- log::info!("text:\n{}", buffer.text());
-
- syntax_map.interpolate(&buffer);
- check_interpolation(&prev_syntax_map, &syntax_map, &prev_buffer, &buffer);
-
- syntax_map.reparse(language.clone(), &buffer);
-
- reference_syntax_map.clear();
- reference_syntax_map.reparse(language.clone(), &buffer);
- }
-
- for i in 0..operations {
- let i = operations - i - 1;
- buffer.undo();
- log::info!("undoing operation {}", i);
- log::info!("text:\n{}", buffer.text());
-
- syntax_map.interpolate(&buffer);
- syntax_map.reparse(language.clone(), &buffer);
-
- reference_syntax_map.clear();
- reference_syntax_map.reparse(language.clone(), &buffer);
- assert_eq!(
- syntax_map.layers(&buffer).len(),
- reference_syntax_map.layers(&buffer).len(),
- "wrong number of layers after undoing edit {i}"
- );
- }
-
- let layers = syntax_map.layers(&buffer);
- let reference_layers = reference_syntax_map.layers(&buffer);
- for (edited_layer, reference_layer) in layers.into_iter().zip(reference_layers.into_iter()) {
- assert_eq!(
- edited_layer.node().to_sexp(),
- reference_layer.node().to_sexp()
- );
- assert_eq!(edited_layer.node().range(), reference_layer.node().range());
- }
-}
-
-fn check_interpolation(
- old_syntax_map: &SyntaxSnapshot,
- new_syntax_map: &SyntaxSnapshot,
- old_buffer: &BufferSnapshot,
- new_buffer: &BufferSnapshot,
-) {
- let edits = new_buffer
- .edits_since::<usize>(&old_buffer.version())
- .collect::<Vec<_>>();
-
- for (old_layer, new_layer) in old_syntax_map
- .layers
- .iter()
- .zip(new_syntax_map.layers.iter())
- {
- assert_eq!(old_layer.range, new_layer.range);
- let Some(old_tree) = old_layer.content.tree() else {
- continue;
- };
- let Some(new_tree) = new_layer.content.tree() else {
- continue;
- };
- let old_start_byte = old_layer.range.start.to_offset(old_buffer);
- let new_start_byte = new_layer.range.start.to_offset(new_buffer);
- let old_start_point = old_layer.range.start.to_point(old_buffer).to_ts_point();
- let new_start_point = new_layer.range.start.to_point(new_buffer).to_ts_point();
- let old_node = old_tree.root_node_with_offset(old_start_byte, old_start_point);
- let new_node = new_tree.root_node_with_offset(new_start_byte, new_start_point);
- check_node_edits(
- old_layer.depth,
- &old_layer.range,
- old_node,
- new_node,
- old_buffer,
- new_buffer,
- &edits,
- );
- }
-
- fn check_node_edits(
- depth: usize,
- range: &Range<Anchor>,
- old_node: Node,
- new_node: Node,
- old_buffer: &BufferSnapshot,
- new_buffer: &BufferSnapshot,
- edits: &[text::Edit<usize>],
- ) {
- assert_eq!(old_node.kind(), new_node.kind());
-
- let old_range = old_node.byte_range();
- let new_range = new_node.byte_range();
-
- let is_edited = edits
- .iter()
- .any(|edit| edit.new.start < new_range.end && edit.new.end > new_range.start);
- if is_edited {
- assert!(
- new_node.has_changes(),
- concat!(
- "failed to mark node as edited.\n",
- "layer depth: {}, old layer range: {:?}, new layer range: {:?},\n",
- "node kind: {}, old node range: {:?}, new node range: {:?}",
- ),
- depth,
- range.to_offset(old_buffer),
- range.to_offset(new_buffer),
- new_node.kind(),
- old_range,
- new_range,
- );
- }
-
- if !new_node.has_changes() {
- assert_eq!(
- old_buffer
- .text_for_range(old_range.clone())
- .collect::<String>(),
- new_buffer
- .text_for_range(new_range.clone())
- .collect::<String>(),
- concat!(
- "mismatched text for node\n",
- "layer depth: {}, old layer range: {:?}, new layer range: {:?},\n",
- "node kind: {}, old node range:{:?}, new node range:{:?}",
- ),
- depth,
- range.to_offset(old_buffer),
- range.to_offset(new_buffer),
- new_node.kind(),
- old_range,
- new_range,
- );
- }
-
- for i in 0..new_node.child_count() {
- check_node_edits(
- depth,
- range,
- old_node.child(i).unwrap(),
- new_node.child(i).unwrap(),
- old_buffer,
- new_buffer,
- edits,
- )
- }
- }
-}
-
-fn test_edit_sequence(language_name: &str, steps: &[&str]) -> (Buffer, SyntaxMap) {
- let registry = Arc::new(LanguageRegistry::test());
- registry.add(Arc::new(elixir_lang()));
- registry.add(Arc::new(heex_lang()));
- registry.add(Arc::new(rust_lang()));
- registry.add(Arc::new(ruby_lang()));
- registry.add(Arc::new(html_lang()));
- registry.add(Arc::new(erb_lang()));
- registry.add(Arc::new(markdown_lang()));
-
- let language = registry
- .language_for_name(language_name)
- .now_or_never()
- .unwrap()
- .unwrap();
- let mut buffer = Buffer::new(0, 0, Default::default());
-
- let mut mutated_syntax_map = SyntaxMap::new();
- mutated_syntax_map.set_language_registry(registry.clone());
- mutated_syntax_map.reparse(language.clone(), &buffer);
-
- for (i, marked_string) in steps.into_iter().enumerate() {
- let marked_string = marked_string.unindent();
- log::info!("incremental parse {i}: {marked_string:?}");
- buffer.edit_via_marked_text(&marked_string);
-
- // Reparse the syntax map
- mutated_syntax_map.interpolate(&buffer);
- mutated_syntax_map.reparse(language.clone(), &buffer);
-
- // Create a second syntax map from scratch
- log::info!("fresh parse {i}: {marked_string:?}");
- let mut reference_syntax_map = SyntaxMap::new();
- reference_syntax_map.set_language_registry(registry.clone());
- reference_syntax_map.reparse(language.clone(), &buffer);
-
- // Compare the mutated syntax map to the new syntax map
- let mutated_layers = mutated_syntax_map.layers(&buffer);
- let reference_layers = reference_syntax_map.layers(&buffer);
- assert_eq!(
- mutated_layers.len(),
- reference_layers.len(),
- "wrong number of layers at step {i}"
- );
- for (edited_layer, reference_layer) in
- mutated_layers.into_iter().zip(reference_layers.into_iter())
- {
- assert_eq!(
- edited_layer.node().to_sexp(),
- reference_layer.node().to_sexp(),
- "different layer at step {i}"
- );
- assert_eq!(
- edited_layer.node().range(),
- reference_layer.node().range(),
- "different layer at step {i}"
- );
- }
- }
-
- (buffer, mutated_syntax_map)
-}
-
-fn html_lang() -> Language {
- Language::new(
- LanguageConfig {
- name: "HTML".into(),
- path_suffixes: vec!["html".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_html::language()),
- )
- .with_highlights_query(
- r#"
- (tag_name) @tag
- (erroneous_end_tag_name) @tag
- (attribute_name) @property
- "#,
- )
- .unwrap()
-}
-
-fn ruby_lang() -> Language {
- Language::new(
- LanguageConfig {
- name: "Ruby".into(),
- path_suffixes: vec!["rb".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_ruby::language()),
- )
- .with_highlights_query(
- r#"
- ["if" "do" "else" "end"] @keyword
- (instance_variable) @ivar
- (call method: (identifier) @method)
- "#,
- )
- .unwrap()
-}
-
-fn erb_lang() -> Language {
- Language::new(
- LanguageConfig {
- name: "ERB".into(),
- path_suffixes: vec!["erb".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_embedded_template::language()),
- )
- .with_highlights_query(
- r#"
- ["<%" "%>"] @keyword
- "#,
- )
- .unwrap()
- .with_injection_query(
- r#"
- (
- (code) @content
- (#set! "language" "ruby")
- (#set! "combined")
- )
-
- (
- (content) @content
- (#set! "language" "html")
- (#set! "combined")
- )
- "#,
- )
- .unwrap()
-}
-
-fn rust_lang() -> Language {
- Language::new(
- LanguageConfig {
- name: "Rust".into(),
- path_suffixes: vec!["rs".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_rust::language()),
- )
- .with_highlights_query(
- r#"
- (field_identifier) @field
- (struct_expression) @struct
- "#,
- )
- .unwrap()
- .with_injection_query(
- r#"
- (macro_invocation
- (token_tree) @content
- (#set! "language" "rust"))
- "#,
- )
- .unwrap()
-}
-
-fn markdown_lang() -> Language {
- Language::new(
- LanguageConfig {
- name: "Markdown".into(),
- path_suffixes: vec!["md".into()],
- ..Default::default()
- },
- Some(tree_sitter_markdown::language()),
- )
- .with_injection_query(
- r#"
- (fenced_code_block
- (info_string
- (language) @language)
- (code_fence_content) @content)
- "#,
- )
- .unwrap()
-}
-
-fn elixir_lang() -> Language {
- Language::new(
- LanguageConfig {
- name: "Elixir".into(),
- path_suffixes: vec!["ex".into()],
- ..Default::default()
- },
- Some(tree_sitter_elixir::language()),
- )
- .with_highlights_query(
- r#"
-
- "#,
- )
- .unwrap()
-}
-
-fn heex_lang() -> Language {
- Language::new(
- LanguageConfig {
- name: "HEEx".into(),
- path_suffixes: vec!["heex".into()],
- ..Default::default()
- },
- Some(tree_sitter_heex::language()),
- )
- .with_injection_query(
- r#"
- (
- (directive
- [
- (partial_expression_value)
- (expression_value)
- (ending_expression_value)
- ] @content)
- (#set! language "elixir")
- (#set! combined)
- )
-
- ((expression (expression_value) @content)
- (#set! language "elixir"))
- "#,
- )
- .unwrap()
-}
-
-fn range_for_text(buffer: &Buffer, text: &str) -> Range<usize> {
- let start = buffer.as_rope().to_string().find(text).unwrap();
- start..start + text.len()
-}
-
-#[track_caller]
-fn assert_layers_for_range(
- syntax_map: &SyntaxMap,
- buffer: &BufferSnapshot,
- range: Range<Point>,
- expected_layers: &[&str],
-) {
- let layers = syntax_map
- .layers_for_range(range, &buffer)
- .collect::<Vec<_>>();
- assert_eq!(
- layers.len(),
- expected_layers.len(),
- "wrong number of layers"
- );
- for (i, (layer, expected_s_exp)) in layers.iter().zip(expected_layers.iter()).enumerate() {
- let actual_s_exp = layer.node().to_sexp();
- assert!(
- string_contains_sequence(
- &actual_s_exp,
- &expected_s_exp.split("...").collect::<Vec<_>>()
- ),
- "layer {i}:\n\nexpected: {expected_s_exp}\nactual: {actual_s_exp}",
- );
- }
-}
-
-fn assert_capture_ranges(
- syntax_map: &SyntaxMap,
- buffer: &BufferSnapshot,
- highlight_query_capture_names: &[&str],
- marked_string: &str,
-) {
- let mut actual_ranges = Vec::<Range<usize>>::new();
- let captures = syntax_map.captures(0..buffer.len(), buffer, |grammar| {
- grammar.highlights_query.as_ref()
- });
- let queries = captures
- .grammars()
- .iter()
- .map(|grammar| grammar.highlights_query.as_ref().unwrap())
- .collect::<Vec<_>>();
- for capture in captures {
- let name = &queries[capture.grammar_index].capture_names()[capture.index as usize];
- if highlight_query_capture_names.contains(&name) {
- actual_ranges.push(capture.node.byte_range());
- }
- }
-
- let (text, expected_ranges) = marked_text_ranges(&marked_string.unindent(), false);
- assert_eq!(text, buffer.text());
- assert_eq!(actual_ranges, expected_ranges);
-}
-
-pub fn string_contains_sequence(text: &str, parts: &[&str]) -> bool {
- let mut last_part_end = 0;
- for part in parts {
- if let Some(start_ix) = text[last_part_end..].find(part) {
- last_part_end = start_ix + part.len();
- } else {
- return false;
- }
- }
- true
-}
@@ -10,8 +10,8 @@ doctest = false
[dependencies]
editor = { path = "../editor" }
-fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
-language = { package = "language2", path = "../language2" }
+fuzzy = { path = "../fuzzy" }
+language = { path = "../language" }
gpui = { package = "gpui2", path = "../gpui2" }
picker = { path = "../picker" }
project = { path = "../project" }
@@ -13,13 +13,13 @@ collections = { path = "../collections" }
editor = { path = "../editor" }
settings = { package = "settings2", path = "../settings2" }
theme = { package = "theme2", path = "../theme2" }
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
project = { path = "../project" }
workspace = { path = "../workspace" }
gpui = { package = "gpui2", path = "../gpui2" }
ui = { package = "ui2", path = "../ui2" }
util = { path = "../util" }
-lsp = { package = "lsp2", path = "../lsp2" }
+lsp = { path = "../lsp" }
futures.workspace = true
serde.workspace = true
anyhow.workspace = true
@@ -13,7 +13,7 @@ test-support = ["async-pipe"]
[dependencies]
collections = { path = "../collections" }
-gpui = { path = "../gpui" }
+gpui = { package = "gpui2", path = "../gpui2" }
util = { path = "../util" }
anyhow.workspace = true
@@ -29,7 +29,7 @@ serde_json.workspace = true
smol.workspace = true
[dev-dependencies]
-gpui = { path = "../gpui", features = ["test-support"] }
+gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553" }
@@ -5,7 +5,7 @@ pub use lsp_types::*;
use anyhow::{anyhow, Context, Result};
use collections::HashMap;
use futures::{channel::oneshot, io::BufWriter, AsyncRead, AsyncWrite, FutureExt};
-use gpui::{executor, AsyncAppContext, Task};
+use gpui::{AsyncAppContext, BackgroundExecutor, Task};
use parking_lot::Mutex;
use postage::{barrier, prelude::Stream};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
@@ -62,7 +62,7 @@ pub struct LanguageServer {
notification_handlers: Arc<Mutex<HashMap<&'static str, NotificationHandler>>>,
response_handlers: Arc<Mutex<Option<HashMap<usize, ResponseHandler>>>>,
io_handlers: Arc<Mutex<HashMap<usize, IoHandler>>>,
- executor: Arc<executor::Background>,
+ executor: BackgroundExecutor,
#[allow(clippy::type_complexity)]
io_tasks: Mutex<Option<(Task<Option<()>>, Task<Option<()>>)>>,
output_done_rx: Mutex<Option<barrier::Receiver>>,
@@ -210,7 +210,7 @@ impl LanguageServer {
Stdin: AsyncWrite + Unpin + Send + 'static,
Stdout: AsyncRead + Unpin + Send + 'static,
Stderr: AsyncRead + Unpin + Send + 'static,
- F: FnMut(AnyNotification) + 'static + Send + Clone,
+ F: FnMut(AnyNotification) + 'static + Send + Sync + Clone,
{
let (outbound_tx, outbound_rx) = channel::unbounded::<String>();
let (output_done_tx, output_done_rx) = barrier::channel();
@@ -220,30 +220,35 @@ impl LanguageServer {
Arc::new(Mutex::new(Some(HashMap::<_, ResponseHandler>::default())));
let io_handlers = Arc::new(Mutex::new(HashMap::default()));
- let stdout_input_task = cx.spawn(|cx| {
- Self::handle_input(
- stdout,
- on_unhandled_notification.clone(),
- notification_handlers.clone(),
- response_handlers.clone(),
- io_handlers.clone(),
- cx,
- )
- .log_err()
+ let stdout_input_task = cx.spawn({
+ let on_unhandled_notification = on_unhandled_notification.clone();
+ let notification_handlers = notification_handlers.clone();
+ let response_handlers = response_handlers.clone();
+ let io_handlers = io_handlers.clone();
+ move |cx| {
+ Self::handle_input(
+ stdout,
+ on_unhandled_notification,
+ notification_handlers,
+ response_handlers,
+ io_handlers,
+ cx,
+ )
+ .log_err()
+ }
});
let stderr_input_task = stderr
.map(|stderr| {
- cx.spawn(|_| {
- Self::handle_stderr(stderr, io_handlers.clone(), stderr_capture.clone())
- .log_err()
- })
+ let io_handlers = io_handlers.clone();
+ let stderr_captures = stderr_capture.clone();
+ cx.spawn(|_| Self::handle_stderr(stderr, io_handlers, stderr_captures).log_err())
})
.unwrap_or_else(|| Task::Ready(Some(None)));
let input_task = cx.spawn(|_| async move {
let (stdout, stderr) = futures::join!(stdout_input_task, stderr_input_task);
stdout.or(stderr)
});
- let output_task = cx.background().spawn({
+ let output_task = cx.background_executor().spawn({
Self::handle_output(
stdin,
outbound_rx,
@@ -264,7 +269,7 @@ impl LanguageServer {
code_action_kinds,
next_id: Default::default(),
outbound_tx,
- executor: cx.background(),
+ executor: cx.background_executor().clone(),
io_tasks: Mutex::new(Some((input_task, output_task))),
output_done_rx: Mutex::new(Some(output_done_rx)),
root_path: root_path.to_path_buf(),
@@ -481,10 +486,7 @@ impl LanguageServer {
completion_item: Some(CompletionItemCapability {
snippet_support: Some(true),
resolve_support: Some(CompletionItemCapabilityResolveSupport {
- properties: vec![
- "documentation".to_string(),
- "additionalTextEdits".to_string(),
- ],
+ properties: vec!["additionalTextEdits".to_string()],
}),
..Default::default()
}),
@@ -610,7 +612,7 @@ impl LanguageServer {
where
T: request::Request,
T::Params: 'static + Send,
- F: 'static + Send + FnMut(T::Params, AsyncAppContext) -> Fut,
+ F: 'static + FnMut(T::Params, AsyncAppContext) -> Fut + Send,
Fut: 'static + Future<Output = Result<T::Result>>,
{
self.on_custom_request(T::METHOD, f)
@@ -644,7 +646,7 @@ impl LanguageServer {
#[must_use]
pub fn on_custom_notification<Params, F>(&self, method: &'static str, mut f: F) -> Subscription
where
- F: 'static + Send + FnMut(Params, AsyncAppContext),
+ F: 'static + FnMut(Params, AsyncAppContext) + Send,
Params: DeserializeOwned,
{
let prev_handler = self.notification_handlers.lock().insert(
@@ -672,7 +674,7 @@ impl LanguageServer {
mut f: F,
) -> Subscription
where
- F: 'static + Send + FnMut(Params, AsyncAppContext) -> Fut,
+ F: 'static + FnMut(Params, AsyncAppContext) -> Fut + Send,
Fut: 'static + Future<Output = Result<Res>>,
Params: DeserializeOwned + Send + 'static,
Res: Serialize,
@@ -685,7 +687,7 @@ impl LanguageServer {
match serde_json::from_str(params) {
Ok(params) => {
let response = f(params, cx.clone());
- cx.foreground()
+ cx.foreground_executor()
.spawn({
let outbound_tx = outbound_tx.clone();
async move {
@@ -780,20 +782,11 @@ impl LanguageServer {
)
}
- // some child of string literal (be it "" or ``) which is the child of an attribute
-
- // <Foo className="bar" />
- // <Foo className={`bar`} />
- // <Foo className={something + "bar"} />
- // <Foo className={something + "bar"} />
- // const classes = "awesome ";
- // <Foo className={classes} />
-
fn request_internal<T: request::Request>(
next_id: &AtomicUsize,
response_handlers: &Mutex<Option<HashMap<usize, ResponseHandler>>>,
outbound_tx: &channel::Sender<String>,
- executor: &Arc<executor::Background>,
+ executor: &BackgroundExecutor,
params: T::Params,
) -> impl 'static + Future<Output = anyhow::Result<T::Result>>
where
@@ -1071,8 +1064,9 @@ impl FakeLanguageServer {
.on_request::<T, _, _>(move |params, cx| {
let result = handler(params, cx.clone());
let responded_tx = responded_tx.clone();
+ let executor = cx.background_executor().clone();
async move {
- cx.background().simulate_random_delay().await;
+ executor.simulate_random_delay().await;
let result = result.await;
responded_tx.unbounded_send(()).ok();
result
@@ -1,38 +0,0 @@
-[package]
-name = "lsp2"
-version = "0.1.0"
-edition = "2021"
-publish = false
-
-[lib]
-path = "src/lsp2.rs"
-doctest = false
-
-[features]
-test-support = ["async-pipe"]
-
-[dependencies]
-collections = { path = "../collections" }
-gpui = { package = "gpui2", path = "../gpui2" }
-util = { path = "../util" }
-
-anyhow.workspace = true
-async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553", optional = true }
-futures.workspace = true
-log.workspace = true
-lsp-types = { git = "https://github.com/zed-industries/lsp-types", branch = "updated-completion-list-item-defaults" }
-parking_lot.workspace = true
-postage.workspace = true
-serde.workspace = true
-serde_derive.workspace = true
-serde_json.workspace = true
-smol.workspace = true
-
-[dev-dependencies]
-gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
-util = { path = "../util", features = ["test-support"] }
-
-async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553" }
-ctor.workspace = true
-env_logger.workspace = true
-unindent.workspace = true
@@ -1,1197 +0,0 @@
-use log::warn;
-pub use lsp_types::request::*;
-pub use lsp_types::*;
-
-use anyhow::{anyhow, Context, Result};
-use collections::HashMap;
-use futures::{channel::oneshot, io::BufWriter, AsyncRead, AsyncWrite, FutureExt};
-use gpui::{AsyncAppContext, BackgroundExecutor, Task};
-use parking_lot::Mutex;
-use postage::{barrier, prelude::Stream};
-use serde::{de::DeserializeOwned, Deserialize, Serialize};
-use serde_json::{json, value::RawValue, Value};
-use smol::{
- channel,
- io::{AsyncBufReadExt, AsyncReadExt, AsyncWriteExt, BufReader},
- process::{self, Child},
-};
-use std::{
- ffi::OsString,
- fmt,
- future::Future,
- io::Write,
- path::PathBuf,
- str::{self, FromStr as _},
- sync::{
- atomic::{AtomicUsize, Ordering::SeqCst},
- Arc, Weak,
- },
- time::{Duration, Instant},
-};
-use std::{path::Path, process::Stdio};
-use util::{ResultExt, TryFutureExt};
-
-const JSON_RPC_VERSION: &str = "2.0";
-const CONTENT_LEN_HEADER: &str = "Content-Length: ";
-const LSP_REQUEST_TIMEOUT: Duration = Duration::from_secs(60 * 2);
-
-type NotificationHandler = Box<dyn Send + FnMut(Option<usize>, &str, AsyncAppContext)>;
-type ResponseHandler = Box<dyn Send + FnOnce(Result<String, Error>)>;
-type IoHandler = Box<dyn Send + FnMut(IoKind, &str)>;
-
-#[derive(Debug, Clone, Copy)]
-pub enum IoKind {
- StdOut,
- StdIn,
- StdErr,
-}
-
-#[derive(Debug, Clone, Deserialize)]
-pub struct LanguageServerBinary {
- pub path: PathBuf,
- pub arguments: Vec<OsString>,
-}
-
-pub struct LanguageServer {
- server_id: LanguageServerId,
- next_id: AtomicUsize,
- outbound_tx: channel::Sender<String>,
- name: String,
- capabilities: ServerCapabilities,
- code_action_kinds: Option<Vec<CodeActionKind>>,
- notification_handlers: Arc<Mutex<HashMap<&'static str, NotificationHandler>>>,
- response_handlers: Arc<Mutex<Option<HashMap<usize, ResponseHandler>>>>,
- io_handlers: Arc<Mutex<HashMap<usize, IoHandler>>>,
- executor: BackgroundExecutor,
- #[allow(clippy::type_complexity)]
- io_tasks: Mutex<Option<(Task<Option<()>>, Task<Option<()>>)>>,
- output_done_rx: Mutex<Option<barrier::Receiver>>,
- root_path: PathBuf,
- _server: Option<Mutex<Child>>,
-}
-
-#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
-#[repr(transparent)]
-pub struct LanguageServerId(pub usize);
-
-pub enum Subscription {
- Notification {
- method: &'static str,
- notification_handlers: Option<Arc<Mutex<HashMap<&'static str, NotificationHandler>>>>,
- },
- Io {
- id: usize,
- io_handlers: Option<Weak<Mutex<HashMap<usize, IoHandler>>>>,
- },
-}
-
-#[derive(Serialize, Deserialize)]
-pub struct Request<'a, T> {
- jsonrpc: &'static str,
- id: usize,
- method: &'a str,
- params: T,
-}
-
-#[derive(Serialize, Deserialize)]
-struct AnyResponse<'a> {
- jsonrpc: &'a str,
- id: usize,
- #[serde(default)]
- error: Option<Error>,
- #[serde(borrow)]
- result: Option<&'a RawValue>,
-}
-
-#[derive(Serialize)]
-struct Response<T> {
- jsonrpc: &'static str,
- id: usize,
- result: Option<T>,
- error: Option<Error>,
-}
-
-#[derive(Serialize, Deserialize)]
-struct Notification<'a, T> {
- jsonrpc: &'static str,
- #[serde(borrow)]
- method: &'a str,
- params: T,
-}
-
-#[derive(Debug, Clone, Deserialize)]
-struct AnyNotification<'a> {
- #[serde(default)]
- id: Option<usize>,
- #[serde(borrow)]
- method: &'a str,
- #[serde(borrow, default)]
- params: Option<&'a RawValue>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-struct Error {
- message: String,
-}
-
-impl LanguageServer {
- pub fn new(
- stderr_capture: Arc<Mutex<Option<String>>>,
- server_id: LanguageServerId,
- binary: LanguageServerBinary,
- root_path: &Path,
- code_action_kinds: Option<Vec<CodeActionKind>>,
- cx: AsyncAppContext,
- ) -> Result<Self> {
- let working_dir = if root_path.is_dir() {
- root_path
- } else {
- root_path.parent().unwrap_or_else(|| Path::new("/"))
- };
-
- let mut server = process::Command::new(&binary.path)
- .current_dir(working_dir)
- .args(binary.arguments)
- .stdin(Stdio::piped())
- .stdout(Stdio::piped())
- .stderr(Stdio::piped())
- .kill_on_drop(true)
- .spawn()?;
-
- let stdin = server.stdin.take().unwrap();
- let stdout = server.stdout.take().unwrap();
- let stderr = server.stderr.take().unwrap();
- let mut server = Self::new_internal(
- server_id.clone(),
- stdin,
- stdout,
- Some(stderr),
- stderr_capture,
- Some(server),
- root_path,
- code_action_kinds,
- cx,
- move |notification| {
- log::info!(
- "{} unhandled notification {}:\n{}",
- server_id,
- notification.method,
- serde_json::to_string_pretty(
- ¬ification
- .params
- .and_then(|params| Value::from_str(params.get()).ok())
- .unwrap_or(Value::Null)
- )
- .unwrap(),
- );
- },
- );
-
- if let Some(name) = binary.path.file_name() {
- server.name = name.to_string_lossy().to_string();
- }
-
- Ok(server)
- }
-
- fn new_internal<Stdin, Stdout, Stderr, F>(
- server_id: LanguageServerId,
- stdin: Stdin,
- stdout: Stdout,
- stderr: Option<Stderr>,
- stderr_capture: Arc<Mutex<Option<String>>>,
- server: Option<Child>,
- root_path: &Path,
- code_action_kinds: Option<Vec<CodeActionKind>>,
- cx: AsyncAppContext,
- on_unhandled_notification: F,
- ) -> Self
- where
- Stdin: AsyncWrite + Unpin + Send + 'static,
- Stdout: AsyncRead + Unpin + Send + 'static,
- Stderr: AsyncRead + Unpin + Send + 'static,
- F: FnMut(AnyNotification) + 'static + Send + Sync + Clone,
- {
- let (outbound_tx, outbound_rx) = channel::unbounded::<String>();
- let (output_done_tx, output_done_rx) = barrier::channel();
- let notification_handlers =
- Arc::new(Mutex::new(HashMap::<_, NotificationHandler>::default()));
- let response_handlers =
- Arc::new(Mutex::new(Some(HashMap::<_, ResponseHandler>::default())));
- let io_handlers = Arc::new(Mutex::new(HashMap::default()));
-
- let stdout_input_task = cx.spawn({
- let on_unhandled_notification = on_unhandled_notification.clone();
- let notification_handlers = notification_handlers.clone();
- let response_handlers = response_handlers.clone();
- let io_handlers = io_handlers.clone();
- move |cx| {
- Self::handle_input(
- stdout,
- on_unhandled_notification,
- notification_handlers,
- response_handlers,
- io_handlers,
- cx,
- )
- .log_err()
- }
- });
- let stderr_input_task = stderr
- .map(|stderr| {
- let io_handlers = io_handlers.clone();
- let stderr_captures = stderr_capture.clone();
- cx.spawn(|_| Self::handle_stderr(stderr, io_handlers, stderr_captures).log_err())
- })
- .unwrap_or_else(|| Task::Ready(Some(None)));
- let input_task = cx.spawn(|_| async move {
- let (stdout, stderr) = futures::join!(stdout_input_task, stderr_input_task);
- stdout.or(stderr)
- });
- let output_task = cx.background_executor().spawn({
- Self::handle_output(
- stdin,
- outbound_rx,
- output_done_tx,
- response_handlers.clone(),
- io_handlers.clone(),
- )
- .log_err()
- });
-
- Self {
- server_id,
- notification_handlers,
- response_handlers,
- io_handlers,
- name: Default::default(),
- capabilities: Default::default(),
- code_action_kinds,
- next_id: Default::default(),
- outbound_tx,
- executor: cx.background_executor().clone(),
- io_tasks: Mutex::new(Some((input_task, output_task))),
- output_done_rx: Mutex::new(Some(output_done_rx)),
- root_path: root_path.to_path_buf(),
- _server: server.map(|server| Mutex::new(server)),
- }
- }
-
- pub fn code_action_kinds(&self) -> Option<Vec<CodeActionKind>> {
- self.code_action_kinds.clone()
- }
-
- async fn handle_input<Stdout, F>(
- stdout: Stdout,
- mut on_unhandled_notification: F,
- notification_handlers: Arc<Mutex<HashMap<&'static str, NotificationHandler>>>,
- response_handlers: Arc<Mutex<Option<HashMap<usize, ResponseHandler>>>>,
- io_handlers: Arc<Mutex<HashMap<usize, IoHandler>>>,
- cx: AsyncAppContext,
- ) -> anyhow::Result<()>
- where
- Stdout: AsyncRead + Unpin + Send + 'static,
- F: FnMut(AnyNotification) + 'static + Send,
- {
- let mut stdout = BufReader::new(stdout);
- let _clear_response_handlers = util::defer({
- let response_handlers = response_handlers.clone();
- move || {
- response_handlers.lock().take();
- }
- });
- let mut buffer = Vec::new();
- loop {
- buffer.clear();
- stdout.read_until(b'\n', &mut buffer).await?;
- stdout.read_until(b'\n', &mut buffer).await?;
- let header = std::str::from_utf8(&buffer)?;
- let message_len: usize = header
- .strip_prefix(CONTENT_LEN_HEADER)
- .ok_or_else(|| anyhow!("invalid LSP message header {header:?}"))?
- .trim_end()
- .parse()?;
-
- buffer.resize(message_len, 0);
- stdout.read_exact(&mut buffer).await?;
-
- if let Ok(message) = str::from_utf8(&buffer) {
- log::trace!("incoming message: {}", message);
- for handler in io_handlers.lock().values_mut() {
- handler(IoKind::StdOut, message);
- }
- }
-
- if let Ok(msg) = serde_json::from_slice::<AnyNotification>(&buffer) {
- if let Some(handler) = notification_handlers.lock().get_mut(msg.method) {
- handler(
- msg.id,
- &msg.params.map(|params| params.get()).unwrap_or("null"),
- cx.clone(),
- );
- } else {
- on_unhandled_notification(msg);
- }
- } else if let Ok(AnyResponse {
- id, error, result, ..
- }) = serde_json::from_slice(&buffer)
- {
- if let Some(handler) = response_handlers
- .lock()
- .as_mut()
- .and_then(|handlers| handlers.remove(&id))
- {
- if let Some(error) = error {
- handler(Err(error));
- } else if let Some(result) = result {
- handler(Ok(result.get().into()));
- } else {
- handler(Ok("null".into()));
- }
- }
- } else {
- warn!(
- "failed to deserialize LSP message:\n{}",
- std::str::from_utf8(&buffer)?
- );
- }
-
- // Don't starve the main thread when receiving lots of messages at once.
- smol::future::yield_now().await;
- }
- }
-
- async fn handle_stderr<Stderr>(
- stderr: Stderr,
- io_handlers: Arc<Mutex<HashMap<usize, IoHandler>>>,
- stderr_capture: Arc<Mutex<Option<String>>>,
- ) -> anyhow::Result<()>
- where
- Stderr: AsyncRead + Unpin + Send + 'static,
- {
- let mut stderr = BufReader::new(stderr);
- let mut buffer = Vec::new();
-
- loop {
- buffer.clear();
- stderr.read_until(b'\n', &mut buffer).await?;
- if let Ok(message) = str::from_utf8(&buffer) {
- log::trace!("incoming stderr message:{message}");
- for handler in io_handlers.lock().values_mut() {
- handler(IoKind::StdErr, message);
- }
-
- if let Some(stderr) = stderr_capture.lock().as_mut() {
- stderr.push_str(message);
- }
- }
-
- // Don't starve the main thread when receiving lots of messages at once.
- smol::future::yield_now().await;
- }
- }
-
- async fn handle_output<Stdin>(
- stdin: Stdin,
- outbound_rx: channel::Receiver<String>,
- output_done_tx: barrier::Sender,
- response_handlers: Arc<Mutex<Option<HashMap<usize, ResponseHandler>>>>,
- io_handlers: Arc<Mutex<HashMap<usize, IoHandler>>>,
- ) -> anyhow::Result<()>
- where
- Stdin: AsyncWrite + Unpin + Send + 'static,
- {
- let mut stdin = BufWriter::new(stdin);
- let _clear_response_handlers = util::defer({
- let response_handlers = response_handlers.clone();
- move || {
- response_handlers.lock().take();
- }
- });
- let mut content_len_buffer = Vec::new();
- while let Ok(message) = outbound_rx.recv().await {
- log::trace!("outgoing message:{}", message);
- for handler in io_handlers.lock().values_mut() {
- handler(IoKind::StdIn, &message);
- }
-
- content_len_buffer.clear();
- write!(content_len_buffer, "{}", message.len()).unwrap();
- stdin.write_all(CONTENT_LEN_HEADER.as_bytes()).await?;
- stdin.write_all(&content_len_buffer).await?;
- stdin.write_all("\r\n\r\n".as_bytes()).await?;
- stdin.write_all(message.as_bytes()).await?;
- stdin.flush().await?;
- }
- drop(output_done_tx);
- Ok(())
- }
-
- /// Initializes a language server.
- /// Note that `options` is used directly to construct [`InitializeParams`],
- /// which is why it is owned.
- pub async fn initialize(mut self, options: Option<Value>) -> Result<Arc<Self>> {
- let root_uri = Url::from_file_path(&self.root_path).unwrap();
- #[allow(deprecated)]
- let params = InitializeParams {
- process_id: None,
- root_path: None,
- root_uri: Some(root_uri.clone()),
- initialization_options: options,
- capabilities: ClientCapabilities {
- workspace: Some(WorkspaceClientCapabilities {
- configuration: Some(true),
- did_change_watched_files: Some(DidChangeWatchedFilesClientCapabilities {
- dynamic_registration: Some(true),
- relative_pattern_support: Some(true),
- }),
- did_change_configuration: Some(DynamicRegistrationClientCapabilities {
- dynamic_registration: Some(true),
- }),
- workspace_folders: Some(true),
- symbol: Some(WorkspaceSymbolClientCapabilities {
- resolve_support: None,
- ..WorkspaceSymbolClientCapabilities::default()
- }),
- inlay_hint: Some(InlayHintWorkspaceClientCapabilities {
- refresh_support: Some(true),
- }),
- diagnostic: Some(DiagnosticWorkspaceClientCapabilities {
- refresh_support: None,
- }),
- ..Default::default()
- }),
- text_document: Some(TextDocumentClientCapabilities {
- definition: Some(GotoCapability {
- link_support: Some(true),
- dynamic_registration: None,
- }),
- code_action: Some(CodeActionClientCapabilities {
- code_action_literal_support: Some(CodeActionLiteralSupport {
- code_action_kind: CodeActionKindLiteralSupport {
- value_set: vec![
- CodeActionKind::REFACTOR.as_str().into(),
- CodeActionKind::QUICKFIX.as_str().into(),
- CodeActionKind::SOURCE.as_str().into(),
- ],
- },
- }),
- data_support: Some(true),
- resolve_support: Some(CodeActionCapabilityResolveSupport {
- properties: vec!["edit".to_string(), "command".to_string()],
- }),
- ..Default::default()
- }),
- completion: Some(CompletionClientCapabilities {
- completion_item: Some(CompletionItemCapability {
- snippet_support: Some(true),
- resolve_support: Some(CompletionItemCapabilityResolveSupport {
- properties: vec!["additionalTextEdits".to_string()],
- }),
- ..Default::default()
- }),
- completion_list: Some(CompletionListCapability {
- item_defaults: Some(vec![
- "commitCharacters".to_owned(),
- "editRange".to_owned(),
- "insertTextMode".to_owned(),
- "data".to_owned(),
- ]),
- }),
- ..Default::default()
- }),
- rename: Some(RenameClientCapabilities {
- prepare_support: Some(true),
- ..Default::default()
- }),
- hover: Some(HoverClientCapabilities {
- content_format: Some(vec![MarkupKind::Markdown]),
- dynamic_registration: None,
- }),
- inlay_hint: Some(InlayHintClientCapabilities {
- resolve_support: Some(InlayHintResolveClientCapabilities {
- properties: vec![
- "textEdits".to_string(),
- "tooltip".to_string(),
- "label.tooltip".to_string(),
- "label.location".to_string(),
- "label.command".to_string(),
- ],
- }),
- dynamic_registration: Some(false),
- }),
- publish_diagnostics: Some(PublishDiagnosticsClientCapabilities {
- related_information: Some(true),
- ..Default::default()
- }),
- formatting: Some(DynamicRegistrationClientCapabilities {
- dynamic_registration: None,
- }),
- on_type_formatting: Some(DynamicRegistrationClientCapabilities {
- dynamic_registration: None,
- }),
- diagnostic: Some(DiagnosticClientCapabilities {
- related_document_support: Some(true),
- dynamic_registration: None,
- }),
- ..Default::default()
- }),
- experimental: Some(json!({
- "serverStatusNotification": true,
- })),
- window: Some(WindowClientCapabilities {
- work_done_progress: Some(true),
- ..Default::default()
- }),
- general: None,
- },
- trace: None,
- workspace_folders: Some(vec![WorkspaceFolder {
- uri: root_uri,
- name: Default::default(),
- }]),
- client_info: None,
- locale: None,
- };
-
- let response = self.request::<request::Initialize>(params).await?;
- if let Some(info) = response.server_info {
- self.name = info.name;
- }
- self.capabilities = response.capabilities;
-
- self.notify::<notification::Initialized>(InitializedParams {})?;
- Ok(Arc::new(self))
- }
-
- pub fn shutdown(&self) -> Option<impl 'static + Send + Future<Output = Option<()>>> {
- if let Some(tasks) = self.io_tasks.lock().take() {
- let response_handlers = self.response_handlers.clone();
- let next_id = AtomicUsize::new(self.next_id.load(SeqCst));
- let outbound_tx = self.outbound_tx.clone();
- let executor = self.executor.clone();
- let mut output_done = self.output_done_rx.lock().take().unwrap();
- let shutdown_request = Self::request_internal::<request::Shutdown>(
- &next_id,
- &response_handlers,
- &outbound_tx,
- &executor,
- (),
- );
- let exit = Self::notify_internal::<notification::Exit>(&outbound_tx, ());
- outbound_tx.close();
- Some(
- async move {
- log::debug!("language server shutdown started");
- shutdown_request.await?;
- response_handlers.lock().take();
- exit?;
- output_done.recv().await;
- log::debug!("language server shutdown finished");
- drop(tasks);
- anyhow::Ok(())
- }
- .log_err(),
- )
- } else {
- None
- }
- }
-
- #[must_use]
- pub fn on_notification<T, F>(&self, f: F) -> Subscription
- where
- T: notification::Notification,
- F: 'static + Send + FnMut(T::Params, AsyncAppContext),
- {
- self.on_custom_notification(T::METHOD, f)
- }
-
- #[must_use]
- pub fn on_request<T, F, Fut>(&self, f: F) -> Subscription
- where
- T: request::Request,
- T::Params: 'static + Send,
- F: 'static + FnMut(T::Params, AsyncAppContext) -> Fut + Send,
- Fut: 'static + Future<Output = Result<T::Result>>,
- {
- self.on_custom_request(T::METHOD, f)
- }
-
- #[must_use]
- pub fn on_io<F>(&self, f: F) -> Subscription
- where
- F: 'static + Send + FnMut(IoKind, &str),
- {
- let id = self.next_id.fetch_add(1, SeqCst);
- self.io_handlers.lock().insert(id, Box::new(f));
- Subscription::Io {
- id,
- io_handlers: Some(Arc::downgrade(&self.io_handlers)),
- }
- }
-
- pub fn remove_request_handler<T: request::Request>(&self) {
- self.notification_handlers.lock().remove(T::METHOD);
- }
-
- pub fn remove_notification_handler<T: notification::Notification>(&self) {
- self.notification_handlers.lock().remove(T::METHOD);
- }
-
- pub fn has_notification_handler<T: notification::Notification>(&self) -> bool {
- self.notification_handlers.lock().contains_key(T::METHOD)
- }
-
- #[must_use]
- pub fn on_custom_notification<Params, F>(&self, method: &'static str, mut f: F) -> Subscription
- where
- F: 'static + FnMut(Params, AsyncAppContext) + Send,
- Params: DeserializeOwned,
- {
- let prev_handler = self.notification_handlers.lock().insert(
- method,
- Box::new(move |_, params, cx| {
- if let Some(params) = serde_json::from_str(params).log_err() {
- f(params, cx);
- }
- }),
- );
- assert!(
- prev_handler.is_none(),
- "registered multiple handlers for the same LSP method"
- );
- Subscription::Notification {
- method,
- notification_handlers: Some(self.notification_handlers.clone()),
- }
- }
-
- #[must_use]
- pub fn on_custom_request<Params, Res, Fut, F>(
- &self,
- method: &'static str,
- mut f: F,
- ) -> Subscription
- where
- F: 'static + FnMut(Params, AsyncAppContext) -> Fut + Send,
- Fut: 'static + Future<Output = Result<Res>>,
- Params: DeserializeOwned + Send + 'static,
- Res: Serialize,
- {
- let outbound_tx = self.outbound_tx.clone();
- let prev_handler = self.notification_handlers.lock().insert(
- method,
- Box::new(move |id, params, cx| {
- if let Some(id) = id {
- match serde_json::from_str(params) {
- Ok(params) => {
- let response = f(params, cx.clone());
- cx.foreground_executor()
- .spawn({
- let outbound_tx = outbound_tx.clone();
- async move {
- let response = match response.await {
- Ok(result) => Response {
- jsonrpc: JSON_RPC_VERSION,
- id,
- result: Some(result),
- error: None,
- },
- Err(error) => Response {
- jsonrpc: JSON_RPC_VERSION,
- id,
- result: None,
- error: Some(Error {
- message: error.to_string(),
- }),
- },
- };
- if let Some(response) =
- serde_json::to_string(&response).log_err()
- {
- outbound_tx.try_send(response).ok();
- }
- }
- })
- .detach();
- }
-
- Err(error) => {
- log::error!(
- "error deserializing {} request: {:?}, message: {:?}",
- method,
- error,
- params
- );
- let response = AnyResponse {
- jsonrpc: JSON_RPC_VERSION,
- id,
- result: None,
- error: Some(Error {
- message: error.to_string(),
- }),
- };
- if let Some(response) = serde_json::to_string(&response).log_err() {
- outbound_tx.try_send(response).ok();
- }
- }
- }
- }
- }),
- );
- assert!(
- prev_handler.is_none(),
- "registered multiple handlers for the same LSP method"
- );
- Subscription::Notification {
- method,
- notification_handlers: Some(self.notification_handlers.clone()),
- }
- }
-
- pub fn name(&self) -> &str {
- &self.name
- }
-
- pub fn capabilities(&self) -> &ServerCapabilities {
- &self.capabilities
- }
-
- pub fn server_id(&self) -> LanguageServerId {
- self.server_id
- }
-
- pub fn root_path(&self) -> &PathBuf {
- &self.root_path
- }
-
- pub fn request<T: request::Request>(
- &self,
- params: T::Params,
- ) -> impl Future<Output = Result<T::Result>>
- where
- T::Result: 'static + Send,
- {
- Self::request_internal::<T>(
- &self.next_id,
- &self.response_handlers,
- &self.outbound_tx,
- &self.executor,
- params,
- )
- }
-
- fn request_internal<T: request::Request>(
- next_id: &AtomicUsize,
- response_handlers: &Mutex<Option<HashMap<usize, ResponseHandler>>>,
- outbound_tx: &channel::Sender<String>,
- executor: &BackgroundExecutor,
- params: T::Params,
- ) -> impl 'static + Future<Output = anyhow::Result<T::Result>>
- where
- T::Result: 'static + Send,
- {
- let id = next_id.fetch_add(1, SeqCst);
- let message = serde_json::to_string(&Request {
- jsonrpc: JSON_RPC_VERSION,
- id,
- method: T::METHOD,
- params,
- })
- .unwrap();
-
- let (tx, rx) = oneshot::channel();
- let handle_response = response_handlers
- .lock()
- .as_mut()
- .ok_or_else(|| anyhow!("server shut down"))
- .map(|handlers| {
- let executor = executor.clone();
- handlers.insert(
- id,
- Box::new(move |result| {
- executor
- .spawn(async move {
- let response = match result {
- Ok(response) => serde_json::from_str(&response)
- .context("failed to deserialize response"),
- Err(error) => Err(anyhow!("{}", error.message)),
- };
- _ = tx.send(response);
- })
- .detach();
- }),
- );
- });
-
- let send = outbound_tx
- .try_send(message)
- .context("failed to write to language server's stdin");
-
- let mut timeout = executor.timer(LSP_REQUEST_TIMEOUT).fuse();
- let started = Instant::now();
- async move {
- handle_response?;
- send?;
-
- let method = T::METHOD;
- futures::select! {
- response = rx.fuse() => {
- let elapsed = started.elapsed();
- log::trace!("Took {elapsed:?} to recieve response to {method:?} id {id}");
- response?
- }
-
- _ = timeout => {
- log::error!("Cancelled LSP request task for {method:?} id {id} which took over {LSP_REQUEST_TIMEOUT:?}");
- anyhow::bail!("LSP request timeout");
- }
- }
- }
- }
-
- pub fn notify<T: notification::Notification>(&self, params: T::Params) -> Result<()> {
- Self::notify_internal::<T>(&self.outbound_tx, params)
- }
-
- fn notify_internal<T: notification::Notification>(
- outbound_tx: &channel::Sender<String>,
- params: T::Params,
- ) -> Result<()> {
- let message = serde_json::to_string(&Notification {
- jsonrpc: JSON_RPC_VERSION,
- method: T::METHOD,
- params,
- })
- .unwrap();
- outbound_tx.try_send(message)?;
- Ok(())
- }
-}
-
-impl Drop for LanguageServer {
- fn drop(&mut self) {
- if let Some(shutdown) = self.shutdown() {
- self.executor.spawn(shutdown).detach();
- }
- }
-}
-
-impl Subscription {
- pub fn detach(&mut self) {
- match self {
- Subscription::Notification {
- notification_handlers,
- ..
- } => *notification_handlers = None,
- Subscription::Io { io_handlers, .. } => *io_handlers = None,
- }
- }
-}
-
-impl fmt::Display for LanguageServerId {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- self.0.fmt(f)
- }
-}
-
-impl fmt::Debug for LanguageServer {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("LanguageServer")
- .field("id", &self.server_id.0)
- .field("name", &self.name)
- .finish_non_exhaustive()
- }
-}
-
-impl Drop for Subscription {
- fn drop(&mut self) {
- match self {
- Subscription::Notification {
- method,
- notification_handlers,
- } => {
- if let Some(handlers) = notification_handlers {
- handlers.lock().remove(method);
- }
- }
- Subscription::Io { id, io_handlers } => {
- if let Some(io_handlers) = io_handlers.as_ref().and_then(|h| h.upgrade()) {
- io_handlers.lock().remove(id);
- }
- }
- }
- }
-}
-
-#[cfg(any(test, feature = "test-support"))]
-#[derive(Clone)]
-pub struct FakeLanguageServer {
- pub server: Arc<LanguageServer>,
- notifications_rx: channel::Receiver<(String, String)>,
-}
-
-#[cfg(any(test, feature = "test-support"))]
-impl LanguageServer {
- pub fn full_capabilities() -> ServerCapabilities {
- ServerCapabilities {
- document_highlight_provider: Some(OneOf::Left(true)),
- code_action_provider: Some(CodeActionProviderCapability::Simple(true)),
- document_formatting_provider: Some(OneOf::Left(true)),
- document_range_formatting_provider: Some(OneOf::Left(true)),
- definition_provider: Some(OneOf::Left(true)),
- type_definition_provider: Some(TypeDefinitionProviderCapability::Simple(true)),
- ..Default::default()
- }
- }
-
- pub fn fake(
- name: String,
- capabilities: ServerCapabilities,
- cx: AsyncAppContext,
- ) -> (Self, FakeLanguageServer) {
- let (stdin_writer, stdin_reader) = async_pipe::pipe();
- let (stdout_writer, stdout_reader) = async_pipe::pipe();
- let (notifications_tx, notifications_rx) = channel::unbounded();
-
- let server = Self::new_internal(
- LanguageServerId(0),
- stdin_writer,
- stdout_reader,
- None::<async_pipe::PipeReader>,
- Arc::new(Mutex::new(None)),
- None,
- Path::new("/"),
- None,
- cx.clone(),
- |_| {},
- );
- let fake = FakeLanguageServer {
- server: Arc::new(Self::new_internal(
- LanguageServerId(0),
- stdout_writer,
- stdin_reader,
- None::<async_pipe::PipeReader>,
- Arc::new(Mutex::new(None)),
- None,
- Path::new("/"),
- None,
- cx,
- move |msg| {
- notifications_tx
- .try_send((
- msg.method.to_string(),
- msg.params
- .map(|raw_value| raw_value.get())
- .unwrap_or("null")
- .to_string(),
- ))
- .ok();
- },
- )),
- notifications_rx,
- };
- fake.handle_request::<request::Initialize, _, _>({
- let capabilities = capabilities;
- move |_, _| {
- let capabilities = capabilities.clone();
- let name = name.clone();
- async move {
- Ok(InitializeResult {
- capabilities,
- server_info: Some(ServerInfo {
- name,
- ..Default::default()
- }),
- })
- }
- }
- });
-
- (server, fake)
- }
-}
-
-#[cfg(any(test, feature = "test-support"))]
-impl FakeLanguageServer {
- pub fn notify<T: notification::Notification>(&self, params: T::Params) {
- self.server.notify::<T>(params).ok();
- }
-
- pub async fn request<T>(&self, params: T::Params) -> Result<T::Result>
- where
- T: request::Request,
- T::Result: 'static + Send,
- {
- self.server.executor.start_waiting();
- self.server.request::<T>(params).await
- }
-
- pub async fn receive_notification<T: notification::Notification>(&mut self) -> T::Params {
- self.server.executor.start_waiting();
- self.try_receive_notification::<T>().await.unwrap()
- }
-
- pub async fn try_receive_notification<T: notification::Notification>(
- &mut self,
- ) -> Option<T::Params> {
- use futures::StreamExt as _;
-
- loop {
- let (method, params) = self.notifications_rx.next().await?;
- if method == T::METHOD {
- return Some(serde_json::from_str::<T::Params>(¶ms).unwrap());
- } else {
- log::info!("skipping message in fake language server {:?}", params);
- }
- }
- }
-
- pub fn handle_request<T, F, Fut>(
- &self,
- mut handler: F,
- ) -> futures::channel::mpsc::UnboundedReceiver<()>
- where
- T: 'static + request::Request,
- T::Params: 'static + Send,
- F: 'static + Send + FnMut(T::Params, gpui::AsyncAppContext) -> Fut,
- Fut: 'static + Send + Future<Output = Result<T::Result>>,
- {
- let (responded_tx, responded_rx) = futures::channel::mpsc::unbounded();
- self.server.remove_request_handler::<T>();
- self.server
- .on_request::<T, _, _>(move |params, cx| {
- let result = handler(params, cx.clone());
- let responded_tx = responded_tx.clone();
- let executor = cx.background_executor().clone();
- async move {
- executor.simulate_random_delay().await;
- let result = result.await;
- responded_tx.unbounded_send(()).ok();
- result
- }
- })
- .detach();
- responded_rx
- }
-
- pub fn handle_notification<T, F>(
- &self,
- mut handler: F,
- ) -> futures::channel::mpsc::UnboundedReceiver<()>
- where
- T: 'static + notification::Notification,
- T::Params: 'static + Send,
- F: 'static + Send + FnMut(T::Params, gpui::AsyncAppContext),
- {
- let (handled_tx, handled_rx) = futures::channel::mpsc::unbounded();
- self.server.remove_notification_handler::<T>();
- self.server
- .on_notification::<T, _>(move |params, cx| {
- handler(params, cx.clone());
- handled_tx.unbounded_send(()).ok();
- })
- .detach();
- handled_rx
- }
-
- pub fn remove_request_handler<T>(&mut self)
- where
- T: 'static + request::Request,
- {
- self.server.remove_request_handler::<T>();
- }
-
- pub async fn start_progress(&self, token: impl Into<String>) {
- let token = token.into();
- self.request::<request::WorkDoneProgressCreate>(WorkDoneProgressCreateParams {
- token: NumberOrString::String(token.clone()),
- })
- .await
- .unwrap();
- self.notify::<notification::Progress>(ProgressParams {
- token: NumberOrString::String(token),
- value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(Default::default())),
- });
- }
-
- pub fn end_progress(&self, token: impl Into<String>) {
- self.notify::<notification::Progress>(ProgressParams {
- token: NumberOrString::String(token.into()),
- value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(Default::default())),
- });
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use gpui::TestAppContext;
-
- #[ctor::ctor]
- fn init_logger() {
- if std::env::var("RUST_LOG").is_ok() {
- env_logger::init();
- }
- }
-
- #[gpui::test]
- async fn test_fake(cx: &mut TestAppContext) {
- let (server, mut fake) =
- LanguageServer::fake("the-lsp".to_string(), Default::default(), cx.to_async());
-
- let (message_tx, message_rx) = channel::unbounded();
- let (diagnostics_tx, diagnostics_rx) = channel::unbounded();
- server
- .on_notification::<notification::ShowMessage, _>(move |params, _| {
- message_tx.try_send(params).unwrap()
- })
- .detach();
- server
- .on_notification::<notification::PublishDiagnostics, _>(move |params, _| {
- diagnostics_tx.try_send(params).unwrap()
- })
- .detach();
-
- let server = server.initialize(None).await.unwrap();
- server
- .notify::<notification::DidOpenTextDocument>(DidOpenTextDocumentParams {
- text_document: TextDocumentItem::new(
- Url::from_str("file://a/b").unwrap(),
- "rust".to_string(),
- 0,
- "".to_string(),
- ),
- })
- .unwrap();
- assert_eq!(
- fake.receive_notification::<notification::DidOpenTextDocument>()
- .await
- .text_document
- .uri
- .as_str(),
- "file://a/b"
- );
-
- fake.notify::<notification::ShowMessage>(ShowMessageParams {
- typ: MessageType::ERROR,
- message: "ok".to_string(),
- });
- fake.notify::<notification::PublishDiagnostics>(PublishDiagnosticsParams {
- uri: Url::from_str("file://b/c").unwrap(),
- version: Some(5),
- diagnostics: vec![],
- });
- assert_eq!(message_rx.recv().await.unwrap().message, "ok");
- assert_eq!(
- diagnostics_rx.recv().await.unwrap().uri.as_str(),
- "file://b/c"
- );
-
- fake.handle_request::<request::Shutdown, _, _>(|_, _| async move { Ok(()) });
-
- drop(server);
- fake.receive_notification::<notification::Exit>().await;
- }
-}
@@ -25,8 +25,8 @@ clock = { path = "../clock" }
collections = { path = "../collections" }
git = { package = "git3", path = "../git3" }
gpui = { package = "gpui2", path = "../gpui2" }
-language = { package = "language2", path = "../language2" }
-lsp = { package = "lsp2", path = "../lsp2" }
+language = { path = "../language" }
+lsp = { path = "../lsp" }
rich_text = { path = "../rich_text" }
settings = { package = "settings2", path = "../settings2" }
snippet = { path = "../snippet" }
@@ -61,8 +61,8 @@ tree-sitter-typescript = { workspace = true, optional = true }
[dev-dependencies]
copilot = { path = "../copilot", features = ["test-support"] }
text = { package = "text2", path = "../text2", features = ["test-support"] }
-language = { package = "language2", path = "../language2", features = ["test-support"] }
-lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
+language = { path = "../language", features = ["test-support"] }
+lsp = { path = "../lsp", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
project = { path = "../project", features = ["test-support"] }
@@ -10,10 +10,10 @@ doctest = false
[dependencies]
editor = { path = "../editor" }
-fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
+fuzzy = { path = "../fuzzy" }
gpui = { package = "gpui2", path = "../gpui2" }
ui = { package = "ui2", path = "../ui2" }
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
picker = { path = "../picker" }
settings = { package = "settings2", path = "../settings2" }
text = { package = "text2", path = "../text2" }
@@ -14,10 +14,10 @@ test-support = []
[dependencies]
client = { package = "client2", path = "../client2" }
collections = { path = "../collections"}
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
gpui = { package = "gpui2", path = "../gpui2" }
fs = { package = "fs2", path = "../fs2" }
-lsp = { package = "lsp2", path = "../lsp2" }
+lsp = { path = "../lsp" }
node_runtime = { path = "../node_runtime"}
util = { path = "../util" }
@@ -30,6 +30,6 @@ futures.workspace = true
parking_lot.workspace = true
[dev-dependencies]
-language = { package = "language2", path = "../language2", features = ["test-support"] }
+language = { path = "../language", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
fs = { package = "fs2", path = "../fs2", features = ["test-support"] }
@@ -28,11 +28,11 @@ collections = { path = "../collections" }
db = { package = "db2", path = "../db2" }
fs = { package = "fs2", path = "../fs2" }
fsevent = { path = "../fsevent" }
-fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
+fuzzy = { path = "../fuzzy" }
git = { package = "git3", path = "../git3" }
gpui = { package = "gpui2", path = "../gpui2" }
-language = { package = "language2", path = "../language2" }
-lsp = { package = "lsp2", path = "../lsp2" }
+language = { path = "../language" }
+lsp = { path = "../lsp" }
node_runtime = { path = "../node_runtime" }
prettier = { package = "prettier2", path = "../prettier2" }
rpc = { package = "rpc2", path = "../rpc2" }
@@ -74,8 +74,8 @@ collections = { path = "../collections", features = ["test-support"] }
db = { package = "db2", path = "../db2", features = ["test-support"] }
fs = { package = "fs2", path = "../fs2", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
-language = { package = "language2", path = "../language2", features = ["test-support"] }
-lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
+language = { path = "../language", features = ["test-support"] }
+lsp = { path = "../lsp", features = ["test-support"] }
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
prettier = { package = "prettier2", path = "../prettier2", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
@@ -34,7 +34,7 @@ unicase = "2.6"
[dev-dependencies]
client = { path = "../client2", package = "client2", features = ["test-support"] }
-language = { path = "../language2", package = "language2", features = ["test-support"] }
+language = { path = "../language", features = ["test-support"] }
editor = { path = "../editor", features = ["test-support"] }
gpui = { path = "../gpui2", package = "gpui2", features = ["test-support"] }
workspace = { path = "../workspace", features = ["test-support"] }
@@ -10,7 +10,7 @@ doctest = false
[dependencies]
editor = { path = "../editor" }
-fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
+fuzzy = { path = "../fuzzy" }
gpui = { package = "gpui2", path = "../gpui2" }
picker = { path = "../picker" }
project = { path = "../project" }
@@ -30,8 +30,8 @@ futures.workspace = true
editor = { path = "../editor", features = ["test-support"] }
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
-language = { package = "language2", path = "../language2", features = ["test-support"] }
-lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
+language = { path = "../language", features = ["test-support"] }
+lsp = { path = "../lsp", features = ["test-support"] }
project = { path = "../project", features = ["test-support"] }
theme = { package = "theme2", path = "../theme2", features = ["test-support"] }
workspace = { path = "../workspace", features = ["test-support"] }
@@ -10,9 +10,9 @@ doctest = false
[dependencies]
editor = { path = "../editor" }
-fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
+fuzzy = { path = "../fuzzy" }
gpui = { package = "gpui2", path = "../gpui2" }
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
picker = { path = "../picker" }
settings = { package = "settings2", path = "../settings2" }
text = { package = "text2", path = "../text2" }
@@ -19,7 +19,7 @@ collections = { path = "../collections" }
gpui = { package = "gpui2", path = "../gpui2" }
sum_tree = { path = "../sum_tree" }
theme = { package = "theme2", path = "../theme2" }
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
util = { path = "../util" }
anyhow.workspace = true
futures.workspace = true
@@ -13,7 +13,7 @@ bitflags = "1"
collections = { path = "../collections" }
editor = { path = "../editor" }
gpui = { package = "gpui2", path = "../gpui2" }
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
menu = { package = "menu2", path = "../menu2" }
project = { path = "../project" }
settings = { package = "settings2", path = "../settings2" }
@@ -12,7 +12,7 @@ doctest = false
ai = { path = "../ai" }
collections = { path = "../collections" }
gpui = { package = "gpui2", path = "../gpui2" }
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
project = { path = "../project" }
workspace = { path = "../workspace" }
util = { path = "../util" }
@@ -42,7 +42,7 @@ ndarray = { version = "0.15.0" }
ai = { path = "../ai", features = ["test-support"] }
collections = { path = "../collections", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
-language = { package = "language2", path = "../language2", features = ["test-support"] }
+language = { path = "../language", features = ["test-support"] }
project = { path = "../project", features = ["test-support"] }
rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
workspace = { path = "../workspace", features = ["test-support"] }
@@ -16,11 +16,11 @@ chrono = "0.4"
clap = { version = "4.4", features = ["derive", "string"] }
dialoguer = { version = "0.11.0", features = ["fuzzy-select"] }
editor = { path = "../editor" }
-fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
+fuzzy = { path = "../fuzzy" }
gpui = { package = "gpui2", path = "../gpui2" }
indoc.workspace = true
itertools = "0.11.0"
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
log.workspace = true
rust-embed.workspace = true
serde.workspace = true
@@ -10,7 +10,7 @@ doctest = false
[dependencies]
editor = { path = "../editor" }
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
gpui = { package = "gpui2", path = "../gpui2" }
project = { path = "../project" }
# search = { path = "../search" }
@@ -13,7 +13,7 @@ client = { package = "client2", path = "../client2" }
editor = { path = "../editor" }
feature_flags = { path = "../feature_flags" }
fs = { package = "fs2", path = "../fs2" }
-fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
+fuzzy = { path = "../fuzzy" }
gpui = { package = "gpui2", path = "../gpui2" }
picker = { path = "../picker" }
settings = { package = "settings2", path = "../settings2" }
@@ -6,7 +6,7 @@ publish = false
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
-fuzzy = {package = "fuzzy2", path = "../fuzzy2"}
+fuzzy = { path = "../fuzzy"}
fs = {package = "fs2", path = "../fs2"}
gpui = {package = "gpui2", path = "../gpui2"}
picker = {path = "../picker"}
@@ -28,7 +28,7 @@ collections = { path = "../collections" }
command_palette = { path = "../command_palette" }
editor = { path = "../editor" }
gpui = { package = "gpui2", path = "../gpui2" }
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
search = { path = "../search" }
settings = { package = "settings2", path = "../settings2" }
workspace = { path = "../workspace" }
@@ -44,10 +44,10 @@ futures.workspace = true
editor = { path = "../editor", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
-language = { package = "language2", path = "../language2", features = ["test-support"] }
+language = { path = "../language", features = ["test-support"] }
project = { path = "../project", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
settings = { package = "settings2", path = "../settings2" }
workspace = { path = "../workspace", features = ["test-support"] }
theme = { package = "theme2", path = "../theme2", features = ["test-support"] }
-lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
+lsp = { path = "../lsp", features = ["test-support"] }
@@ -14,7 +14,7 @@ test-support = []
client = { package = "client2", path = "../client2" }
editor = { path = "../editor" }
fs = { package = "fs2", path = "../fs2" }
-fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
+fuzzy = { path = "../fuzzy" }
gpui = { package = "gpui2", path = "../gpui2" }
ui = { package = "ui2", path = "../ui2" }
db = { package = "db2", path = "../db2" }
@@ -27,7 +27,7 @@ collections = { path = "../collections" }
fs = { path = "../fs2", package = "fs2" }
gpui = { package = "gpui2", path = "../gpui2" }
install_cli = { path = "../install_cli" }
-language = { path = "../language2", package = "language2" }
+language = { path = "../language" }
#menu = { path = "../menu" }
node_runtime = { path = "../node_runtime" }
project = { path = "../project" }
@@ -43,9 +43,9 @@ go_to_line = { path = "../go_to_line" }
gpui = { package = "gpui2", path = "../gpui2" }
install_cli = { path = "../install_cli" }
journal = { path = "../journal" }
-language = { package = "language2", path = "../language2" }
+language = { path = "../language" }
language_selector = { path = "../language_selector" }
-lsp = { package = "lsp2", path = "../lsp2" }
+lsp = { path = "../lsp" }
menu = { package = "menu2", path = "../menu2" }
language_tools = { path = "../language_tools" }
node_runtime = { path = "../node_runtime" }
@@ -149,7 +149,7 @@ call = { package = "call2", path = "../call2", features = ["test-support"] }
# editor = { path = "../editor", features = ["test-support"] }
# gpui = { path = "../gpui", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
-language = { package = "language2", path = "../language2", features = ["test-support"] }
+language = { path = "../language", features = ["test-support"] }
# lsp = { path = "../lsp", features = ["test-support"] }
project = { path = "../project", features = ["test-support"] }
# rpc = { path = "../rpc", features = ["test-support"] }
@@ -3,7 +3,7 @@ use async_trait::async_trait;
use collections::HashMap;
use futures::lock::Mutex;
use gpui::executor::Background;
-use language2::{LanguageServerName, LspAdapter, LspAdapterDelegate};
+use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp2::LanguageServerBinary;
use plugin_runtime::{Plugin, PluginBinary, PluginBuilder, WasiFn};
use std::{any::Any, path::PathBuf, sync::Arc};