diff --git a/Cargo.lock b/Cargo.lock index 21a08332c5e08e81c35c0d9d4db343a38983d0d6..c5645c6f25154581d84cbfe088fa4c3eff984242 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -347,7 +347,7 @@ dependencies = [ "tar", "target_build_utils", "term", - "toml", + "toml 0.4.10", "uuid", "walkdir", ] @@ -2703,15 +2703,33 @@ dependencies = [ ] [[package]] -name = "tree-sitter" -version = "0.17.1" +name = "toml" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d18dcb776d3affaba6db04d11d645946d34a69b3172e588af96ce9fecd20faac" +checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa" +dependencies = [ + "serde 1.0.125", +] + +[[package]] +name = "tree-sitter" +version = "0.19.5" +source = "git+https://github.com/tree-sitter/tree-sitter?rev=d72771a19f4143530b1cfd23808e344f1276e176#d72771a19f4143530b1cfd23808e344f1276e176" dependencies = [ "cc", "regex", ] +[[package]] +name = "tree-sitter-rust" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "784f7ef9cdbd4c895dc2d4bb785e95b4a5364a602eec803681db83d1927ddf15" +dependencies = [ + "cc", + "tree-sitter", +] + [[package]] name = "ttf-parser" version = "0.9.0" @@ -2987,5 +3005,8 @@ dependencies = [ "smallvec", "smol", "tempdir", + "toml 0.5.8", + "tree-sitter", + "tree-sitter-rust", "unindent", ] diff --git a/Cargo.toml b/Cargo.toml index 8109db121bbf0926b9348bec198a2cd009fa9cd2..02ae3d61a4ddabf5361e3d34c1b85e581de468b4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,13 +2,14 @@ members = ["zed", "gpui", "gpui_macros", "fsevent", "scoped_pool"] [patch.crates-io] -async-task = {git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e"} +async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" } +tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "d72771a19f4143530b1cfd23808e344f1276e176" } # TODO - Remove when a version is released with this PR: https://github.com/servo/core-foundation-rs/pull/457 -cocoa = {git = "https://github.com/servo/core-foundation-rs", rev = "025dcb3c0d1ef01530f57ef65f3b1deb948f5737"} -cocoa-foundation = {git = "https://github.com/servo/core-foundation-rs", rev = "025dcb3c0d1ef01530f57ef65f3b1deb948f5737"} -core-foundation = {git = "https://github.com/servo/core-foundation-rs", rev = "025dcb3c0d1ef01530f57ef65f3b1deb948f5737"} -core-graphics = {git = "https://github.com/servo/core-foundation-rs", rev = "025dcb3c0d1ef01530f57ef65f3b1deb948f5737"} +cocoa = { git = "https://github.com/servo/core-foundation-rs", rev = "025dcb3c0d1ef01530f57ef65f3b1deb948f5737" } +cocoa-foundation = { git = "https://github.com/servo/core-foundation-rs", rev = "025dcb3c0d1ef01530f57ef65f3b1deb948f5737" } +core-foundation = { git = "https://github.com/servo/core-foundation-rs", rev = "025dcb3c0d1ef01530f57ef65f3b1deb948f5737" } +core-graphics = { git = "https://github.com/servo/core-foundation-rs", rev = "025dcb3c0d1ef01530f57ef65f3b1deb948f5737" } [profile.dev] split-debuginfo = "unpacked" diff --git a/gpui/Cargo.toml b/gpui/Cargo.toml index 1eba8612bfa9c2474ca5a0e389bf0824cba6479f..7f436094c9e8ae892b0cf3cf99d4ccb9c6993b16 100644 --- a/gpui/Cargo.toml +++ b/gpui/Cargo.toml @@ -26,7 +26,7 @@ serde_json = "1.0.64" smallvec = {version = "1.6", features = ["union"]} smol = "1.2" tiny-skia = "0.5" -tree-sitter = "0.17" +tree-sitter = "0.19" usvg = "0.14" [build-dependencies] diff --git a/gpui/grammars/context-predicate/Cargo.toml b/gpui/grammars/context-predicate/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..84d18b218013a1f57102c8c7b1ef3e15203fdf0c --- /dev/null +++ b/gpui/grammars/context-predicate/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "tree-sitter-context-predicate" +description = "context-predicate grammar for the tree-sitter parsing library" +version = "0.0.1" +keywords = ["incremental", "parsing", "context-predicate"] +categories = ["parsing", "text-editors"] +repository = "https://github.com/tree-sitter/tree-sitter-javascript" +edition = "2018" +license = "MIT" + +build = "bindings/rust/build.rs" +include = [ + "bindings/rust/*", + "grammar.js", + "queries/*", + "src/*", +] + +[lib] +path = "bindings/rust/lib.rs" + +[dependencies] +tree-sitter = "0.19.3" + +[build-dependencies] +cc = "1.0" diff --git a/gpui/grammars/context-predicate/binding.gyp b/gpui/grammars/context-predicate/binding.gyp index 456116d62b63230de03a8ff80d3dcda00d68cc6e..16f3d1af27b9c9c711e3301e7f048eb33abe3aa8 100644 --- a/gpui/grammars/context-predicate/binding.gyp +++ b/gpui/grammars/context-predicate/binding.gyp @@ -8,7 +8,7 @@ ], "sources": [ "src/parser.c", - "src/binding.cc" + "bindings/node/binding.cc" ], "cflags_c": [ "-std=c99", diff --git a/gpui/grammars/context-predicate/src/binding.cc b/gpui/grammars/context-predicate/bindings/node/binding.cc similarity index 100% rename from gpui/grammars/context-predicate/src/binding.cc rename to gpui/grammars/context-predicate/bindings/node/binding.cc diff --git a/gpui/grammars/context-predicate/bindings/node/index.js b/gpui/grammars/context-predicate/bindings/node/index.js new file mode 100644 index 0000000000000000000000000000000000000000..3bad018a56ca5528443aac5afa9f67e0637f2ab1 --- /dev/null +++ b/gpui/grammars/context-predicate/bindings/node/index.js @@ -0,0 +1,19 @@ +try { + module.exports = require("../../build/Release/tree_sitter_context_predicate_binding"); +} catch (error1) { + if (error1.code !== 'MODULE_NOT_FOUND') { + throw error1; + } + try { + module.exports = require("../../build/Debug/tree_sitter_context_predicate_binding"); + } catch (error2) { + if (error2.code !== 'MODULE_NOT_FOUND') { + throw error2; + } + throw error1 + } +} + +try { + module.exports.nodeTypeInfo = require("../../src/node-types.json"); +} catch (_) {} diff --git a/gpui/grammars/context-predicate/bindings/rust/build.rs b/gpui/grammars/context-predicate/bindings/rust/build.rs new file mode 100644 index 0000000000000000000000000000000000000000..c6061f0995320f044faeac56bcac458a09747f1d --- /dev/null +++ b/gpui/grammars/context-predicate/bindings/rust/build.rs @@ -0,0 +1,40 @@ +fn main() { + let src_dir = std::path::Path::new("src"); + + let mut c_config = cc::Build::new(); + c_config.include(&src_dir); + c_config + .flag_if_supported("-Wno-unused-parameter") + .flag_if_supported("-Wno-unused-but-set-variable") + .flag_if_supported("-Wno-trigraphs"); + let parser_path = src_dir.join("parser.c"); + c_config.file(&parser_path); + + // If your language uses an external scanner written in C, + // then include this block of code: + + /* + let scanner_path = src_dir.join("scanner.c"); + c_config.file(&scanner_path); + println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap()); + */ + + c_config.compile("parser"); + println!("cargo:rerun-if-changed={}", parser_path.to_str().unwrap()); + + // If your language uses an external scanner written in C++, + // then include this block of code: + + /* + let mut cpp_config = cc::Build::new(); + cpp_config.cpp(true); + cpp_config.include(&src_dir); + cpp_config + .flag_if_supported("-Wno-unused-parameter") + .flag_if_supported("-Wno-unused-but-set-variable"); + let scanner_path = src_dir.join("scanner.cc"); + cpp_config.file(&scanner_path); + cpp_config.compile("scanner"); + println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap()); + */ +} diff --git a/gpui/grammars/context-predicate/bindings/rust/lib.rs b/gpui/grammars/context-predicate/bindings/rust/lib.rs new file mode 100644 index 0000000000000000000000000000000000000000..41962c960d333c12c2bbccedbf07fc13215a10b5 --- /dev/null +++ b/gpui/grammars/context-predicate/bindings/rust/lib.rs @@ -0,0 +1,52 @@ +//! This crate provides context_predicate language support for the [tree-sitter][] parsing library. +//! +//! Typically, you will use the [language][language func] function to add this language to a +//! tree-sitter [Parser][], and then use the parser to parse some code: +//! +//! ``` +//! let code = ""; +//! let mut parser = tree_sitter::Parser::new(); +//! parser.set_language(tree_sitter_context_predicate::language()).expect("Error loading context_predicate grammar"); +//! let tree = parser.parse(code, None).unwrap(); +//! ``` +//! +//! [Language]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Language.html +//! [language func]: fn.language.html +//! [Parser]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Parser.html +//! [tree-sitter]: https://tree-sitter.github.io/ + +use tree_sitter::Language; + +extern "C" { + fn tree_sitter_context_predicate() -> Language; +} + +/// Get the tree-sitter [Language][] for this grammar. +/// +/// [Language]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Language.html +pub fn language() -> Language { + unsafe { tree_sitter_context_predicate() } +} + +/// The content of the [`node-types.json`][] file for this grammar. +/// +/// [`node-types.json`]: https://tree-sitter.github.io/tree-sitter/using-parsers#static-node-types +pub const NODE_TYPES: &'static str = include_str!("../../src/node-types.json"); + +// Uncomment these to include any queries that this grammar contains + +// pub const HIGHLIGHTS_QUERY: &'static str = include_str!("../../queries/highlights.scm"); +// pub const INJECTIONS_QUERY: &'static str = include_str!("../../queries/injections.scm"); +// pub const LOCALS_QUERY: &'static str = include_str!("../../queries/locals.scm"); +// pub const TAGS_QUERY: &'static str = include_str!("../../queries/tags.scm"); + +#[cfg(test)] +mod tests { + #[test] + fn test_can_load_grammar() { + let mut parser = tree_sitter::Parser::new(); + parser + .set_language(super::language()) + .expect("Error loading context_predicate language"); + } +} diff --git a/gpui/grammars/context-predicate/index.js b/gpui/grammars/context-predicate/index.js deleted file mode 100644 index 1be86370cd475119ac63c379f5bb7015d66c98a6..0000000000000000000000000000000000000000 --- a/gpui/grammars/context-predicate/index.js +++ /dev/null @@ -1,13 +0,0 @@ -try { - module.exports = require("./build/Release/tree_sitter_context_predicate_binding"); -} catch (error) { - try { - module.exports = require("./build/Debug/tree_sitter_context_predicate_binding"); - } catch (_) { - throw error - } -} - -try { - module.exports.nodeTypeInfo = require("./src/node-types.json"); -} catch (_) { } diff --git a/gpui/grammars/context-predicate/package-lock.json b/gpui/grammars/context-predicate/package-lock.json index d068dc0228b0c0606ab40f1a384262281b4e6445..1da584a856dad08115cb6790c828956fc9e76169 100644 --- a/gpui/grammars/context-predicate/package-lock.json +++ b/gpui/grammars/context-predicate/package-lock.json @@ -9,7 +9,7 @@ "nan": "^2.14.0" }, "devDependencies": { - "tree-sitter-cli": "^0.18.3" + "tree-sitter-cli": "^0.19.5" } }, "node_modules/nan": { @@ -18,9 +18,9 @@ "integrity": "sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ==" }, "node_modules/tree-sitter-cli": { - "version": "0.18.3", - "resolved": "https://registry.npmjs.org/tree-sitter-cli/-/tree-sitter-cli-0.18.3.tgz", - "integrity": "sha512-ntN8Siljy7dlazb4cSYtZCfibaNppIy6RIr/XGt44GW1hAy/yuTLtKVi4kqYlImB4/7H0AjktcSlQRmI8zLNng==", + "version": "0.19.5", + "resolved": "https://registry.npmjs.org/tree-sitter-cli/-/tree-sitter-cli-0.19.5.tgz", + "integrity": "sha512-kRzKrUAwpDN9AjA3b0tPBwT1hd8N2oQvvvHup2OEsX6mdsSMLmAvR+NSqK9fe05JrRbVvG8mbteNUQsxlMQohQ==", "dev": true, "hasInstallScript": true, "bin": { @@ -35,9 +35,9 @@ "integrity": "sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ==" }, "tree-sitter-cli": { - "version": "0.18.3", - "resolved": "https://registry.npmjs.org/tree-sitter-cli/-/tree-sitter-cli-0.18.3.tgz", - "integrity": "sha512-ntN8Siljy7dlazb4cSYtZCfibaNppIy6RIr/XGt44GW1hAy/yuTLtKVi4kqYlImB4/7H0AjktcSlQRmI8zLNng==", + "version": "0.19.5", + "resolved": "https://registry.npmjs.org/tree-sitter-cli/-/tree-sitter-cli-0.19.5.tgz", + "integrity": "sha512-kRzKrUAwpDN9AjA3b0tPBwT1hd8N2oQvvvHup2OEsX6mdsSMLmAvR+NSqK9fe05JrRbVvG8mbteNUQsxlMQohQ==", "dev": true } } diff --git a/gpui/grammars/context-predicate/package.json b/gpui/grammars/context-predicate/package.json index a14e9d0145b7073eabeee5d83bacba2ce9a2563b..298e34a6b56d76ef55b4944f85226cc66c441cf5 100644 --- a/gpui/grammars/context-predicate/package.json +++ b/gpui/grammars/context-predicate/package.json @@ -1,8 +1,8 @@ { "name": "tree-sitter-context-predicate", - "main": "index.js", + "main": "bindings/node", "devDependencies": { - "tree-sitter-cli": "^0.18.3" + "tree-sitter-cli": "^0.19.5" }, "dependencies": { "nan": "^2.14.0" diff --git a/gpui/grammars/context-predicate/src/parser.c b/gpui/grammars/context-predicate/src/parser.c index 4bab2ea4dd37070317b1d9b638f5b196558a67be..e2af5e03caa032537a74a10e68de4536efdd8e59 100644 --- a/gpui/grammars/context-predicate/src/parser.c +++ b/gpui/grammars/context-predicate/src/parser.c @@ -5,7 +5,7 @@ #pragma GCC diagnostic ignored "-Wmissing-field-initializers" #endif -#define LANGUAGE_VERSION 12 +#define LANGUAGE_VERSION 13 #define STATE_COUNT 18 #define LARGE_STATE_COUNT 6 #define SYMBOL_COUNT 17 @@ -14,6 +14,7 @@ #define EXTERNAL_TOKEN_COUNT 0 #define FIELD_COUNT 3 #define MAX_ALIAS_SEQUENCE_LENGTH 3 +#define PRODUCTION_ID_COUNT 3 enum { sym_identifier = 1, @@ -34,7 +35,7 @@ enum { sym_parenthesized = 16, }; -static const char *ts_symbol_names[] = { +static const char * const ts_symbol_names[] = { [ts_builtin_sym_end] = "end", [sym_identifier] = "identifier", [anon_sym_BANG] = "!", @@ -54,7 +55,7 @@ static const char *ts_symbol_names[] = { [sym_parenthesized] = "parenthesized", }; -static TSSymbol ts_symbol_map[] = { +static const TSSymbol ts_symbol_map[] = { [ts_builtin_sym_end] = ts_builtin_sym_end, [sym_identifier] = sym_identifier, [anon_sym_BANG] = anon_sym_BANG, @@ -151,14 +152,14 @@ enum { field_right = 3, }; -static const char *ts_field_names[] = { +static const char * const ts_field_names[] = { [0] = NULL, [field_expression] = "expression", [field_left] = "left", [field_right] = "right", }; -static const TSFieldMapSlice ts_field_map_slices[3] = { +static const TSFieldMapSlice ts_field_map_slices[PRODUCTION_ID_COUNT] = { [1] = {.index = 0, .length = 1}, [2] = {.index = 1, .length = 2}, }; @@ -171,11 +172,11 @@ static const TSFieldMapEntry ts_field_map_entries[] = { {field_right, 2}, }; -static TSSymbol ts_alias_sequences[3][MAX_ALIAS_SEQUENCE_LENGTH] = { +static const TSSymbol ts_alias_sequences[PRODUCTION_ID_COUNT][MAX_ALIAS_SEQUENCE_LENGTH] = { [0] = {0}, }; -static uint16_t ts_non_terminal_alias_map[] = { +static const uint16_t ts_non_terminal_alias_map[] = { 0, }; @@ -279,7 +280,7 @@ static bool ts_lex(TSLexer *lexer, TSStateId state) { } } -static TSLexMode ts_lex_modes[STATE_COUNT] = { +static const TSLexMode ts_lex_modes[STATE_COUNT] = { [0] = {.lex_state = 0}, [1] = {.lex_state = 1}, [2] = {.lex_state = 1}, @@ -300,7 +301,7 @@ static TSLexMode ts_lex_modes[STATE_COUNT] = { [17] = {.lex_state = 0}, }; -static uint16_t ts_parse_table[LARGE_STATE_COUNT][SYMBOL_COUNT] = { +static const uint16_t ts_parse_table[LARGE_STATE_COUNT][SYMBOL_COUNT] = { [0] = { [ts_builtin_sym_end] = ACTIONS(1), [sym_identifier] = ACTIONS(1), @@ -375,7 +376,7 @@ static uint16_t ts_parse_table[LARGE_STATE_COUNT][SYMBOL_COUNT] = { }, }; -static uint16_t ts_small_parse_table[] = { +static const uint16_t ts_small_parse_table[] = { [0] = 3, ACTIONS(11), 1, anon_sym_EQ_EQ, @@ -448,7 +449,7 @@ static uint16_t ts_small_parse_table[] = { sym_identifier, }; -static uint32_t ts_small_parse_table_map[] = { +static const uint32_t ts_small_parse_table_map[] = { [SMALL_STATE(6)] = 0, [SMALL_STATE(7)] = 13, [SMALL_STATE(8)] = 20, @@ -463,7 +464,7 @@ static uint32_t ts_small_parse_table_map[] = { [SMALL_STATE(17)] = 85, }; -static TSParseActionEntry ts_parse_actions[] = { +static const TSParseActionEntry ts_parse_actions[] = { [0] = {.entry = {.count = 0, .reusable = false}}, [1] = {.entry = {.count = 1, .reusable = false}}, RECOVER(), [3] = {.entry = {.count = 1, .reusable = true}}, SHIFT(6), @@ -495,30 +496,31 @@ extern "C" { #endif extern const TSLanguage *tree_sitter_context_predicate(void) { - static TSLanguage language = { + static const TSLanguage language = { .version = LANGUAGE_VERSION, .symbol_count = SYMBOL_COUNT, .alias_count = ALIAS_COUNT, .token_count = TOKEN_COUNT, .external_token_count = EXTERNAL_TOKEN_COUNT, - .symbol_names = ts_symbol_names, - .symbol_metadata = ts_symbol_metadata, - .parse_table = (const uint16_t *)ts_parse_table, - .parse_actions = ts_parse_actions, - .lex_modes = ts_lex_modes, - .alias_sequences = (const TSSymbol *)ts_alias_sequences, - .max_alias_sequence_length = MAX_ALIAS_SEQUENCE_LENGTH, - .lex_fn = ts_lex, + .state_count = STATE_COUNT, + .large_state_count = LARGE_STATE_COUNT, + .production_id_count = PRODUCTION_ID_COUNT, .field_count = FIELD_COUNT, - .field_map_slices = (const TSFieldMapSlice *)ts_field_map_slices, - .field_map_entries = (const TSFieldMapEntry *)ts_field_map_entries, + .max_alias_sequence_length = MAX_ALIAS_SEQUENCE_LENGTH, + .parse_table = &ts_parse_table[0][0], + .small_parse_table = ts_small_parse_table, + .small_parse_table_map = ts_small_parse_table_map, + .parse_actions = ts_parse_actions, + .symbol_names = ts_symbol_names, .field_names = ts_field_names, - .large_state_count = LARGE_STATE_COUNT, - .small_parse_table = (const uint16_t *)ts_small_parse_table, - .small_parse_table_map = (const uint32_t *)ts_small_parse_table_map, + .field_map_slices = ts_field_map_slices, + .field_map_entries = ts_field_map_entries, + .symbol_metadata = ts_symbol_metadata, .public_symbol_map = ts_symbol_map, .alias_map = ts_non_terminal_alias_map, - .state_count = STATE_COUNT, + .alias_sequences = &ts_alias_sequences[0][0], + .lex_modes = ts_lex_modes, + .lex_fn = ts_lex, }; return &language; } diff --git a/gpui/grammars/context-predicate/src/tree_sitter/parser.h b/gpui/grammars/context-predicate/src/tree_sitter/parser.h index c5a788ff647dc103175b53a296f6f54232343204..cbbc7b4ee3c5d0d594d304c8f1c6b44377b3793e 100644 --- a/gpui/grammars/context-predicate/src/tree_sitter/parser.h +++ b/gpui/grammars/context-predicate/src/tree_sitter/parser.h @@ -13,6 +13,8 @@ extern "C" { #define ts_builtin_sym_end 0 #define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024 +typedef uint16_t TSStateId; + #ifndef TREE_SITTER_API_H_ typedef uint16_t TSSymbol; typedef uint16_t TSFieldId; @@ -30,12 +32,10 @@ typedef struct { uint16_t length; } TSFieldMapSlice; -typedef uint16_t TSStateId; - typedef struct { - bool visible : 1; - bool named : 1; - bool supertype: 1; + bool visible; + bool named; + bool supertype; } TSSymbolMetadata; typedef struct TSLexer TSLexer; @@ -57,21 +57,21 @@ typedef enum { TSParseActionTypeRecover, } TSParseActionType; -typedef struct { - union { - struct { - TSStateId state; - bool extra : 1; - bool repetition : 1; - } shift; - struct { - TSSymbol symbol; - int16_t dynamic_precedence; - uint8_t child_count; - uint8_t production_id; - } reduce; - } params; - TSParseActionType type : 4; +typedef union { + struct { + uint8_t type; + TSStateId state; + bool extra; + bool repetition; + } shift; + struct { + uint8_t type; + uint8_t child_count; + TSSymbol symbol; + int16_t dynamic_precedence; + uint16_t production_id; + } reduce; + uint8_t type; } TSParseAction; typedef struct { @@ -83,7 +83,7 @@ typedef union { TSParseAction action; struct { uint8_t count; - bool reusable : 1; + bool reusable; } entry; } TSParseActionEntry; @@ -93,13 +93,24 @@ struct TSLanguage { uint32_t alias_count; uint32_t token_count; uint32_t external_token_count; - const char **symbol_names; - const TSSymbolMetadata *symbol_metadata; + uint32_t state_count; + uint32_t large_state_count; + uint32_t production_id_count; + uint32_t field_count; + uint16_t max_alias_sequence_length; const uint16_t *parse_table; + const uint16_t *small_parse_table; + const uint32_t *small_parse_table_map; const TSParseActionEntry *parse_actions; - const TSLexMode *lex_modes; + const char * const *symbol_names; + const char * const *field_names; + const TSFieldMapSlice *field_map_slices; + const TSFieldMapEntry *field_map_entries; + const TSSymbolMetadata *symbol_metadata; + const TSSymbol *public_symbol_map; + const uint16_t *alias_map; const TSSymbol *alias_sequences; - uint16_t max_alias_sequence_length; + const TSLexMode *lex_modes; bool (*lex_fn)(TSLexer *, TSStateId); bool (*keyword_lex_fn)(TSLexer *, TSStateId); TSSymbol keyword_capture_token; @@ -112,16 +123,6 @@ struct TSLanguage { unsigned (*serialize)(void *, char *); void (*deserialize)(void *, const char *, unsigned); } external_scanner; - uint32_t field_count; - const TSFieldMapSlice *field_map_slices; - const TSFieldMapEntry *field_map_entries; - const char **field_names; - uint32_t large_state_count; - const uint16_t *small_parse_table; - const uint32_t *small_parse_table_map; - const TSSymbol *public_symbol_map; - const uint16_t *alias_map; - uint32_t state_count; }; /* @@ -170,66 +171,50 @@ struct TSLanguage { #define ACTIONS(id) id -#define SHIFT(state_value) \ - { \ - { \ - .params = { \ - .shift = { \ - .state = state_value \ - } \ - }, \ - .type = TSParseActionTypeShift \ - } \ - } +#define SHIFT(state_value) \ + {{ \ + .shift = { \ + .type = TSParseActionTypeShift, \ + .state = state_value \ + } \ + }} #define SHIFT_REPEAT(state_value) \ - { \ - { \ - .params = { \ - .shift = { \ - .state = state_value, \ - .repetition = true \ - } \ - }, \ - .type = TSParseActionTypeShift \ + {{ \ + .shift = { \ + .type = TSParseActionTypeShift, \ + .state = state_value, \ + .repetition = true \ } \ - } - -#define RECOVER() \ - { \ - { .type = TSParseActionTypeRecover } \ - } + }} #define SHIFT_EXTRA() \ - { \ - { \ - .params = { \ - .shift = { \ - .extra = true \ - } \ - }, \ - .type = TSParseActionTypeShift \ + {{ \ + .shift = { \ + .type = TSParseActionTypeShift, \ + .extra = true \ } \ - } + }} #define REDUCE(symbol_val, child_count_val, ...) \ - { \ - { \ - .params = { \ - .reduce = { \ - .symbol = symbol_val, \ - .child_count = child_count_val, \ - __VA_ARGS__ \ - }, \ - }, \ - .type = TSParseActionTypeReduce \ - } \ - } - -#define ACCEPT_INPUT() \ - { \ - { .type = TSParseActionTypeAccept } \ - } + {{ \ + .reduce = { \ + .type = TSParseActionTypeReduce, \ + .symbol = symbol_val, \ + .child_count = child_count_val, \ + __VA_ARGS__ \ + }, \ + }} + +#define RECOVER() \ + {{ \ + .type = TSParseActionTypeRecover \ + }} + +#define ACCEPT_INPUT() \ + {{ \ + .type = TSParseActionTypeAccept \ + }} #ifdef __cplusplus } diff --git a/gpui/src/font_cache.rs b/gpui/src/font_cache.rs index 4e109ef24099a4c3cf74c6169d80b5fcf44bcfc3..75ee206b35e7e3c35400ed584216da3bc2e512ff 100644 --- a/gpui/src/font_cache.rs +++ b/gpui/src/font_cache.rs @@ -161,3 +161,29 @@ impl FontCache { metric * font_size / self.metric(font_id, |m| m.units_per_em as f32) } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + fonts::{Style, Weight}, + platform::{test, Platform as _}, + }; + + #[test] + fn test_select_font() { + let platform = test::platform(); + let fonts = FontCache::new(platform.fonts()); + let arial = fonts.load_family(&["Arial"]).unwrap(); + let arial_regular = fonts.select_font(arial, &Properties::new()).unwrap(); + let arial_italic = fonts + .select_font(arial, &Properties::new().style(Style::Italic)) + .unwrap(); + let arial_bold = fonts + .select_font(arial, &Properties::new().weight(Weight::BOLD)) + .unwrap(); + assert_ne!(arial_regular, arial_italic); + assert_ne!(arial_regular, arial_bold); + assert_ne!(arial_italic, arial_bold); + } +} diff --git a/gpui/src/platform/mac/fonts.rs b/gpui/src/platform/mac/fonts.rs index c33234090fa4be51768cfd6078ccff3d85304ab2..ac455b93b997a3086c466d905cc229930e6de800 100644 --- a/gpui/src/platform/mac/fonts.rs +++ b/gpui/src/platform/mac/fonts.rs @@ -322,6 +322,9 @@ mod tests { let menlo_regular = fonts.select_font(&menlo, &Properties::new())?; let menlo_italic = fonts.select_font(&menlo, &Properties::new().style(Style::Italic))?; let menlo_bold = fonts.select_font(&menlo, &Properties::new().weight(Weight::BOLD))?; + assert_ne!(menlo_regular, menlo_italic); + assert_ne!(menlo_regular, menlo_bold); + assert_ne!(menlo_italic, menlo_bold); let line = fonts.layout_str( "hello world", @@ -371,32 +374,33 @@ mod tests { Ok(()) } - // #[test] - // fn test_rasterize_glyph() { - // use std::{fs::File, io::BufWriter, path::Path}; - - // let fonts = FontSystem::new(); - // let font_ids = fonts.load_family("Fira Code").unwrap(); - // let font_id = fonts.select_font(&font_ids, &Default::default()).unwrap(); - // let glyph_id = fonts.glyph_for_char(font_id, 'G').unwrap(); - - // const VARIANTS: usize = 1; - // for i in 0..VARIANTS { - // let variant = i as f32 / VARIANTS as f32; - // let (bounds, bytes) = fonts - // .rasterize_glyph(font_id, 16.0, glyph_id, vec2f(variant, variant), 2.) - // .unwrap(); - - // let name = format!("/Users/as-cii/Desktop/twog-{}.png", i); - // let path = Path::new(&name); - // let file = File::create(path).unwrap(); - // let ref mut w = BufWriter::new(file); - - // let mut encoder = png::Encoder::new(w, bounds.width() as u32, bounds.height() as u32); - // encoder.set_color(png::ColorType::Grayscale); - // encoder.set_depth(png::BitDepth::Eight); - // let mut writer = encoder.write_header().unwrap(); - // writer.write_image_data(&bytes).unwrap(); - // } - // } + #[test] + #[ignore] + fn test_rasterize_glyph() { + use std::{fs::File, io::BufWriter, path::Path}; + + let fonts = FontSystem::new(); + let font_ids = fonts.load_family("Fira Code").unwrap(); + let font_id = fonts.select_font(&font_ids, &Default::default()).unwrap(); + let glyph_id = fonts.glyph_for_char(font_id, 'G').unwrap(); + + const VARIANTS: usize = 1; + for i in 0..VARIANTS { + let variant = i as f32 / VARIANTS as f32; + let (bounds, bytes) = fonts + .rasterize_glyph(font_id, 16.0, glyph_id, vec2f(variant, variant), 2.) + .unwrap(); + + let name = format!("/Users/as-cii/Desktop/twog-{}.png", i); + let path = Path::new(&name); + let file = File::create(path).unwrap(); + let ref mut w = BufWriter::new(file); + + let mut encoder = png::Encoder::new(w, bounds.width() as u32, bounds.height() as u32); + encoder.set_color(png::ColorType::Grayscale); + encoder.set_depth(png::BitDepth::Eight); + let mut writer = encoder.write_header().unwrap(); + writer.write_image_data(&bytes).unwrap(); + } + } } diff --git a/zed/Cargo.toml b/zed/Cargo.toml index a1749f2474930dacb4ba2db0d52b8e7dc9ea4d13..89e780a5bf385a73ac2958bf154ea0c12503c9ab 100644 --- a/zed/Cargo.toml +++ b/zed/Cargo.toml @@ -38,6 +38,9 @@ similar = "1.3" simplelog = "0.9" smallvec = {version = "1.6", features = ["union"]} smol = "1.2.5" +toml = "0.5" +tree-sitter = "0.19.5" +tree-sitter-rust = "0.19.0" [dev-dependencies] cargo-bundle = "0.5.0" diff --git a/zed/assets/themes/light.toml b/zed/assets/themes/light.toml new file mode 100644 index 0000000000000000000000000000000000000000..1d706fd7241b9f83d3734c64eb6b7ffd670de69c --- /dev/null +++ b/zed/assets/themes/light.toml @@ -0,0 +1,13 @@ +[ui] +background = 0xffffff +line_numbers = 0x237791 +text = 0x0d0d0d + +[syntax] +keyword = 0xaf00db +function = 0x795e26 +string = 0xa31515 +type = 0x267599 +number = 0x0d885b +comment = 0x048204 +property = 0x001080 diff --git a/zed/languages/rust/brackets.scm b/zed/languages/rust/brackets.scm new file mode 100644 index 0000000000000000000000000000000000000000..0be534c48cc4b07c577d5f4d8181e1e2b6f1fcfe --- /dev/null +++ b/zed/languages/rust/brackets.scm @@ -0,0 +1,6 @@ +("(" @open ")" @close) +("[" @open "]" @close) +("{" @open "}" @close) +("<" @open ">" @close) +("\"" @open "\"" @close) +(closure_parameters "|" @open "|" @close) \ No newline at end of file diff --git a/zed/languages/rust/config.toml b/zed/languages/rust/config.toml new file mode 100644 index 0000000000000000000000000000000000000000..4cde06f6f80fb035ce02e0f6d7e88cb657e88336 --- /dev/null +++ b/zed/languages/rust/config.toml @@ -0,0 +1,8 @@ +name = "Rust" +path_suffixes = ["rs"] +bracket_pairs = [ + { start = "{", end = "}" }, + { start = "[", end = "]" }, + { start = "(", end = ")" }, + { start = "<", end = ">" }, +] diff --git a/zed/languages/rust/highlights.scm b/zed/languages/rust/highlights.scm new file mode 100644 index 0000000000000000000000000000000000000000..2b425faf5c0f9b44967e4afcc66b5f2306d70b15 --- /dev/null +++ b/zed/languages/rust/highlights.scm @@ -0,0 +1,63 @@ +(type_identifier) @type +(primitive_type) @type.builtin + +(field_identifier) @property + +(call_expression + function: [ + (identifier) @function + (scoped_identifier + name: (identifier) @function) + (field_expression + field: (field_identifier) @function.method) + ]) + +(function_item name: (identifier) @function.definition) +(function_signature_item name: (identifier) @function.definition) + +[ + "async" + "break" + "const" + "continue" + "default" + "dyn" + "else" + "enum" + "extern" + "for" + "fn" + "if" + "in" + "impl" + "let" + "loop" + "macro_rules!" + "match" + "mod" + "move" + "pub" + "return" + "static" + "struct" + "trait" + "type" + "use" + "where" + "while" + "union" + "unsafe" + (mutable_specifier) + (super) +] @keyword + +[ + (string_literal) + (raw_string_literal) + (char_literal) +] @string + +[ + (line_comment) + (block_comment) +] @comment diff --git a/zed/src/editor/buffer/mod.rs b/zed/src/editor/buffer/mod.rs index 1b458388bc1cf26495fd97a22ce82bed935528aa..65b8075a35d438ec42ab476a6e998307c977dfaa 100644 --- a/zed/src/editor/buffer/mod.rs +++ b/zed/src/editor/buffer/mod.rs @@ -4,15 +4,19 @@ pub mod rope; mod selection; pub use anchor::*; +use parking_lot::Mutex; pub use point::*; -pub use rope::{ChunksIter, Rope, TextSummary}; +pub use rope::{Chunks, Rope, TextSummary}; use seahash::SeaHasher; pub use selection::*; use similar::{ChangeTag, TextDiff}; +use tree_sitter::{InputEdit, Parser, QueryCursor}; use crate::{ editor::Bias, + language::{Language, Tree}, operation_queue::{self, OperationQueue}, + settings::{StyleId, ThemeMap}, sum_tree::{self, FilterCursor, SeekBias, SumTree}, time::{self, ReplicaId}, worktree::FileHandle, @@ -21,11 +25,12 @@ use anyhow::{anyhow, Result}; use gpui::{AppContext, Entity, ModelContext, Task}; use lazy_static::lazy_static; use std::{ + cell::RefCell, cmp, hash::BuildHasher, iter::{self, Iterator}, mem, - ops::Range, + ops::{Deref, DerefMut, Range}, str, sync::Arc, time::{Duration, Instant, SystemTime, UNIX_EPOCH}, @@ -56,6 +61,50 @@ type HashMap = std::collections::HashMap; #[cfg(not(test))] type HashSet = std::collections::HashSet; +thread_local! { + static PARSER: RefCell = RefCell::new(Parser::new()); +} + +lazy_static! { + static ref QUERY_CURSORS: Mutex> = Default::default(); +} + +struct QueryCursorHandle(Option); + +impl QueryCursorHandle { + fn new() -> Self { + QueryCursorHandle(Some( + QUERY_CURSORS + .lock() + .pop() + .unwrap_or_else(|| QueryCursor::new()), + )) + } +} + +impl Deref for QueryCursorHandle { + type Target = QueryCursor; + + fn deref(&self) -> &Self::Target { + self.0.as_ref().unwrap() + } +} + +impl DerefMut for QueryCursorHandle { + fn deref_mut(&mut self) -> &mut Self::Target { + self.0.as_mut().unwrap() + } +} + +impl Drop for QueryCursorHandle { + fn drop(&mut self) { + let mut cursor = self.0.take().unwrap(); + cursor.set_byte_range(0..usize::MAX); + cursor.set_point_range(Point::zero().into()..Point::MAX.into()); + QUERY_CURSORS.lock().push(cursor) + } +} + pub struct Buffer { fragments: SumTree, visible_text: Rope, @@ -68,6 +117,9 @@ pub struct Buffer { undo_map: UndoMap, history: History, file: Option, + language: Option>, + syntax_tree: Mutex>, + is_parsing: bool, selections: HashMap>, pub selections_last_update: SelectionsVersion, deferred_ops: OperationQueue, @@ -77,6 +129,13 @@ pub struct Buffer { lamport_clock: time::Lamport, } +#[derive(Clone)] +struct SyntaxTree { + tree: Tree, + parsed: bool, + version: time::Global, +} + #[derive(Clone)] struct Transaction { start: time::Global, @@ -238,16 +297,18 @@ impl UndoMap { } struct Edits<'a, F: Fn(&FragmentSummary) -> bool> { - cursor: FilterCursor<'a, F, Fragment, usize>, + deleted_text: &'a Rope, + cursor: FilterCursor<'a, F, Fragment, FragmentTextSummary>, undos: &'a UndoMap, since: time::Global, delta: isize, } -#[derive(Clone, Debug, Eq, PartialEq)] +#[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct Edit { pub old_range: Range, pub new_range: Range, + pub old_lines: Point, } impl Edit { @@ -357,22 +418,24 @@ impl Buffer { base_text: T, ctx: &mut ModelContext, ) -> Self { - Self::build(replica_id, History::new(base_text.into()), None, ctx) + Self::build(replica_id, History::new(base_text.into()), None, None, ctx) } pub fn from_history( replica_id: ReplicaId, history: History, file: Option, + language: Option>, ctx: &mut ModelContext, ) -> Self { - Self::build(replica_id, history, file, ctx) + Self::build(replica_id, history, file, language, ctx) } fn build( replica_id: ReplicaId, history: History, file: Option, + language: Option>, ctx: &mut ModelContext, ) -> Self { let saved_mtime; @@ -461,7 +524,7 @@ impl Buffer { ); } - Self { + let mut result = Self { visible_text, deleted_text: Rope::new(), fragments, @@ -472,6 +535,9 @@ impl Buffer { undo_map: Default::default(), history, file, + syntax_tree: Mutex::new(None), + is_parsing: false, + language, saved_mtime, selections: HashMap::default(), selections_last_update: 0, @@ -480,11 +546,18 @@ impl Buffer { replica_id, local_clock: time::Local::new(replica_id), lamport_clock: time::Lamport::new(replica_id), - } + }; + result.reparse(ctx); + result } - pub fn snapshot(&self) -> Rope { - self.visible_text.clone() + pub fn snapshot(&self) -> Snapshot { + Snapshot { + text: self.visible_text.clone(), + tree: self.syntax_tree(), + language: self.language.clone(), + query_cursor: QueryCursorHandle::new(), + } } pub fn file(&self) -> Option<&FileHandle> { @@ -496,13 +569,13 @@ impl Buffer { new_file: Option, ctx: &mut ModelContext, ) -> Task> { - let snapshot = self.snapshot(); + let text = self.visible_text.clone(); let version = self.version.clone(); let file = self.file.clone(); ctx.spawn(|handle, mut ctx| async move { if let Some(file) = new_file.as_ref().or(file.as_ref()) { - let result = ctx.read(|ctx| file.save(snapshot, ctx.as_ref())).await; + let result = ctx.read(|ctx| file.save(text, ctx.as_ref())).await; if result.is_ok() { handle.update(&mut ctx, |me, ctx| me.did_save(version, new_file, ctx)); } @@ -529,6 +602,154 @@ impl Buffer { ctx.emit(Event::Saved); } + pub fn syntax_tree(&self) -> Option { + if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() { + let mut edited = false; + let mut delta = 0_isize; + for Edit { + old_range, + new_range, + old_lines, + } in self.edits_since(syntax_tree.version.clone()) + { + let start_offset = (old_range.start as isize + delta) as usize; + let start_point = self.visible_text.to_point(start_offset); + let old_bytes = old_range.end - old_range.start; + let new_bytes = new_range.end - new_range.start; + syntax_tree.tree.edit(&InputEdit { + start_byte: start_offset, + old_end_byte: start_offset + old_bytes, + new_end_byte: start_offset + new_bytes, + start_position: start_point.into(), + old_end_position: (start_point + old_lines).into(), + new_end_position: self.visible_text.to_point(start_offset + new_bytes).into(), + }); + delta += new_bytes as isize - old_bytes as isize; + edited = true; + } + syntax_tree.parsed &= !edited; + syntax_tree.version = self.version(); + Some(syntax_tree.tree.clone()) + } else { + None + } + } + + pub fn is_parsing(&self) -> bool { + self.is_parsing + } + + fn should_reparse(&self) -> bool { + if let Some(syntax_tree) = self.syntax_tree.lock().as_ref() { + !syntax_tree.parsed || syntax_tree.version != self.version + } else { + self.language.is_some() + } + } + + fn reparse(&mut self, ctx: &mut ModelContext) { + // Avoid spawning a new parsing task if the buffer is already being reparsed + // due to an earlier edit. + if self.is_parsing { + return; + } + + if let Some(language) = self.language.clone() { + self.is_parsing = true; + ctx.spawn(|handle, mut ctx| async move { + while handle.read_with(&ctx, |this, _| this.should_reparse()) { + // The parse tree is out of date, so grab the syntax tree to synchronously + // splice all the edits that have happened since the last parse. + let new_tree = handle.update(&mut ctx, |this, _| this.syntax_tree()); + let (new_text, new_version) = handle + .read_with(&ctx, |this, _| (this.visible_text.clone(), this.version())); + + // Parse the current text in a background thread. + let new_tree = ctx + .background_executor() + .spawn({ + let language = language.clone(); + async move { Self::parse_text(&new_text, new_tree, &language) } + }) + .await; + + handle.update(&mut ctx, |this, ctx| { + *this.syntax_tree.lock() = Some(SyntaxTree { + tree: new_tree, + parsed: true, + version: new_version, + }); + ctx.emit(Event::Reparsed); + ctx.notify(); + }); + } + handle.update(&mut ctx, |this, _| this.is_parsing = false); + }) + .detach(); + } + } + + fn parse_text(text: &Rope, old_tree: Option, language: &Language) -> Tree { + PARSER.with(|parser| { + let mut parser = parser.borrow_mut(); + parser + .set_language(language.grammar) + .expect("incompatible grammar"); + let mut chunks = text.chunks_in_range(0..text.len()); + let tree = parser + .parse_with( + &mut move |offset, _| { + chunks.seek(offset); + chunks.next().unwrap_or("").as_bytes() + }, + old_tree.as_ref(), + ) + .unwrap(); + tree + }) + } + + pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { + if let Some(tree) = self.syntax_tree() { + let root = tree.root_node(); + let range = range.start.to_offset(self)..range.end.to_offset(self); + let mut node = root.descendant_for_byte_range(range.start, range.end); + while node.map_or(false, |n| n.byte_range() == range) { + node = node.unwrap().parent(); + } + node.map(|n| n.byte_range()) + } else { + None + } + } + + pub fn enclosing_bracket_ranges( + &self, + range: Range, + ) -> Option<(Range, Range)> { + let (lang, tree) = self.language.as_ref().zip(self.syntax_tree())?; + let open_capture_ix = lang.brackets_query.capture_index_for_name("open")?; + let close_capture_ix = lang.brackets_query.capture_index_for_name("close")?; + + // Find bracket pairs that *inclusively* contain the given range. + let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1; + let mut cursor = QueryCursorHandle::new(); + let matches = cursor.set_byte_range(range).matches( + &lang.brackets_query, + tree.root_node(), + TextProvider(&self.visible_text), + ); + + // Get the ranges of the innermost pair of brackets. + matches + .filter_map(|mat| { + let open = mat.nodes_for_capture_index(open_capture_ix).next()?; + let close = mat.nodes_for_capture_index(close_capture_ix).next()?; + Some((open.byte_range(), close.byte_range())) + }) + .min_by_key(|(open_range, close_range)| close_range.end - open_range.start) + } + fn diff(&self, new_text: Arc, ctx: &AppContext) -> Task { // TODO: it would be nice to not allocate here. let old_text = self.text(); @@ -620,17 +841,15 @@ impl Buffer { self.visible_text.max_point() } - pub fn line(&self, row: u32) -> String { - self.chars_at(Point::new(row, 0)) - .take_while(|c| *c != '\n') - .collect() + pub fn row_count(&self) -> u32 { + self.max_point().row + 1 } pub fn text(&self) -> String { self.text_for_range(0..self.len()).collect() } - pub fn text_for_range<'a, T: ToOffset>(&'a self, range: Range) -> ChunksIter<'a> { + pub fn text_for_range<'a, T: ToOffset>(&'a self, range: Range) -> Chunks<'a> { let start = range.start.to_offset(self); let end = range.end.to_offset(self); self.visible_text.chunks_in_range(start..end) @@ -656,6 +875,7 @@ impl Buffer { .filter(move |summary| summary.max_version.changed_since(&since_2)); Edits { + deleted_text: &self.deleted_text, cursor, undos: &self.undo_map, since, @@ -720,6 +940,7 @@ impl Buffer { if self.edits_since(since).next().is_some() { self.did_edit(was_dirty, ctx); + self.reparse(ctx); } } } @@ -741,17 +962,17 @@ impl Buffer { self.start_transaction_at(None, Instant::now())?; let new_text = new_text.into(); + let old_ranges = old_ranges + .into_iter() + .map(|range| range.start.to_offset(self)..range.end.to_offset(self)) + .collect::>>(); + let new_text = if new_text.len() > 0 { Some(new_text) } else { None }; - let old_ranges = old_ranges - .into_iter() - .map(|range| range.start.to_offset(self)..range.end.to_offset(self)) - .collect::>>(); - let has_new_text = new_text.is_some(); let ops = self.splice_fragments( old_ranges @@ -889,6 +1110,7 @@ impl Buffer { ctx.notify(); if self.edits_since(old_version).next().is_some() { self.did_edit(was_dirty, ctx); + self.reparse(ctx); } } @@ -1114,6 +1336,7 @@ impl Buffer { ctx.notify(); if self.edits_since(old_version).next().is_some() { self.did_edit(was_dirty, ctx); + self.reparse(ctx); } } @@ -1140,6 +1363,7 @@ impl Buffer { ctx.notify(); if self.edits_since(old_version).next().is_some() { self.did_edit(was_dirty, ctx); + self.reparse(ctx); } } @@ -1884,6 +2108,9 @@ impl Clone for Buffer { selections_last_update: self.selections_last_update.clone(), deferred_ops: self.deferred_ops.clone(), file: self.file.clone(), + language: self.language.clone(), + syntax_tree: Mutex::new(self.syntax_tree.lock().clone()), + is_parsing: false, deferred_replicas: self.deferred_replicas.clone(), replica_id: self.replica_id, local_clock: self.local_clock.clone(), @@ -1892,6 +2119,71 @@ impl Clone for Buffer { } } +pub struct Snapshot { + text: Rope, + tree: Option, + language: Option>, + query_cursor: QueryCursorHandle, +} + +impl Snapshot { + pub fn len(&self) -> usize { + self.text.len() + } + + pub fn text(&self) -> Rope { + self.text.clone() + } + + pub fn text_for_range(&self, range: Range) -> Chunks { + self.text.chunks_in_range(range) + } + + pub fn highlighted_text_for_range(&mut self, range: Range) -> HighlightedChunks { + let chunks = self.text.chunks_in_range(range.clone()); + if let Some((language, tree)) = self.language.as_ref().zip(self.tree.as_ref()) { + let captures = self.query_cursor.set_byte_range(range.clone()).captures( + &language.highlight_query, + tree.root_node(), + TextProvider(&self.text), + ); + + HighlightedChunks { + range, + chunks, + highlights: Some(Highlights { + captures, + next_capture: None, + stack: Default::default(), + theme_mapping: language.theme_mapping(), + }), + } + } else { + HighlightedChunks { + range, + chunks, + highlights: None, + } + } + } + + pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { + self.text.clip_offset(offset, bias) + } + + pub fn clip_point(&self, point: Point, bias: Bias) -> Point { + self.text.clip_point(point, bias) + } + + pub fn to_offset(&self, point: Point) -> usize { + self.text.to_offset(point) + } + + pub fn to_point(&self, offset: usize) -> Point { + self.text.to_point(offset) + } +} + struct RopeBuilder<'a> { old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>, @@ -1951,6 +2243,7 @@ pub enum Event { Saved, FileHandleChanged, Reloaded, + Reparsed, } impl Entity for Buffer { @@ -1964,7 +2257,7 @@ impl<'a, F: Fn(&FragmentSummary) -> bool> Iterator for Edits<'a, F> { let mut change: Option = None; while let Some(fragment) = self.cursor.item() { - let new_offset = *self.cursor.start(); + let new_offset = self.cursor.start().visible; let old_offset = (new_offset as isize - self.delta) as usize; if !fragment.was_visible(&self.since, &self.undos) && fragment.visible { @@ -1979,13 +2272,18 @@ impl<'a, F: Fn(&FragmentSummary) -> bool> Iterator for Edits<'a, F> { change = Some(Edit { old_range: old_offset..old_offset, new_range: new_offset..new_offset + fragment.len(), + old_lines: Point::zero(), }); self.delta += fragment.len() as isize; } } else if fragment.was_visible(&self.since, &self.undos) && !fragment.visible { + let deleted_start = self.cursor.start().deleted; + let old_lines = self.deleted_text.to_point(deleted_start + fragment.len()) + - self.deleted_text.to_point(deleted_start); if let Some(ref mut change) = change { if change.new_range.end == new_offset { change.old_range.end += fragment.len(); + change.old_lines += &old_lines; self.delta -= fragment.len() as isize; } else { break; @@ -1994,6 +2292,7 @@ impl<'a, F: Fn(&FragmentSummary) -> bool> Iterator for Edits<'a, F> { change = Some(Edit { old_range: old_offset..old_offset + fragment.len(), new_range: new_offset..new_offset, + old_lines, }); self.delta -= fragment.len() as isize; } @@ -2006,70 +2305,125 @@ impl<'a, F: Fn(&FragmentSummary) -> bool> Iterator for Edits<'a, F> { } } -// pub fn diff(a: &[u16], b: &[u16]) -> Vec { -// struct EditCollector<'a> { -// a: &'a [u16], -// b: &'a [u16], -// position: Point, -// changes: Vec, -// } -// -// impl<'a> diffs::Diff for EditCollector<'a> { -// type Error = (); -// -// fn equal(&mut self, old: usize, _: usize, len: usize) -> Result<(), ()> { -// self.position += &Text::extent(&self.a[old..old + len]); -// Ok(()) -// } -// -// fn delete(&mut self, old: usize, len: usize) -> Result<(), ()> { -// self.changes.push(Edit { -// range: self.position..self.position + &Text::extent(&self.a[old..old + len]), -// chars: Vec::new(), -// new_char_count: Point::zero(), -// }); -// Ok(()) -// } -// -// fn insert(&mut self, _: usize, new: usize, new_len: usize) -> Result<(), ()> { -// let new_char_count = Text::extent(&self.b[new..new + new_len]); -// self.changes.push(Edit { -// range: self.position..self.position, -// chars: Vec::from(&self.b[new..new + new_len]), -// new_char_count, -// }); -// self.position += &new_char_count; -// Ok(()) -// } -// -// fn replace( -// &mut self, -// old: usize, -// old_len: usize, -// new: usize, -// new_len: usize, -// ) -> Result<(), ()> { -// let old_extent = text::extent(&self.a[old..old + old_len]); -// let new_char_count = text::extent(&self.b[new..new + new_len]); -// self.changes.push(Edit { -// range: self.position..self.position + &old_extent, -// chars: Vec::from(&self.b[new..new + new_len]), -// new_char_count, -// }); -// self.position += &new_char_count; -// Ok(()) -// } -// } -// -// let mut collector = diffs::Replace::new(EditCollector { -// a, -// b, -// position: Point::zero(), -// changes: Vec::new(), -// }); -// diffs::myers::diff(&mut collector, a, 0, a.len(), b, 0, b.len()).unwrap(); -// collector.into_inner().changes -// } +struct ByteChunks<'a>(rope::Chunks<'a>); + +impl<'a> Iterator for ByteChunks<'a> { + type Item = &'a [u8]; + + fn next(&mut self) -> Option { + self.0.next().map(str::as_bytes) + } +} + +struct TextProvider<'a>(&'a Rope); + +impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> { + type I = ByteChunks<'a>; + + fn text(&mut self, node: tree_sitter::Node) -> Self::I { + ByteChunks(self.0.chunks_in_range(node.byte_range())) + } +} + +struct Highlights<'a> { + captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>, + next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>, + stack: Vec<(usize, StyleId)>, + theme_mapping: ThemeMap, +} + +pub struct HighlightedChunks<'a> { + range: Range, + chunks: Chunks<'a>, + highlights: Option>, +} + +impl<'a> HighlightedChunks<'a> { + pub fn seek(&mut self, offset: usize) { + self.range.start = offset; + self.chunks.seek(self.range.start); + if let Some(highlights) = self.highlights.as_mut() { + highlights + .stack + .retain(|(end_offset, _)| *end_offset > offset); + if let Some((mat, capture_ix)) = &highlights.next_capture { + let capture = mat.captures[*capture_ix as usize]; + if offset >= capture.node.start_byte() { + let next_capture_end = capture.node.end_byte(); + if offset < next_capture_end { + highlights.stack.push(( + next_capture_end, + highlights.theme_mapping.get(capture.index), + )); + } + highlights.next_capture.take(); + } + } + highlights.captures.set_byte_range(self.range.clone()); + } + } + + pub fn offset(&self) -> usize { + self.range.start + } +} + +impl<'a> Iterator for HighlightedChunks<'a> { + type Item = (&'a str, StyleId); + + fn next(&mut self) -> Option { + let mut next_capture_start = usize::MAX; + + if let Some(highlights) = self.highlights.as_mut() { + while let Some((parent_capture_end, _)) = highlights.stack.last() { + if *parent_capture_end <= self.range.start { + highlights.stack.pop(); + } else { + break; + } + } + + if highlights.next_capture.is_none() { + highlights.next_capture = highlights.captures.next(); + } + + while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() { + let capture = mat.captures[*capture_ix as usize]; + if self.range.start < capture.node.start_byte() { + next_capture_start = capture.node.start_byte(); + break; + } else { + let style_id = highlights.theme_mapping.get(capture.index); + highlights.stack.push((capture.node.end_byte(), style_id)); + highlights.next_capture = highlights.captures.next(); + } + } + } + + if let Some(chunk) = self.chunks.peek() { + let chunk_start = self.range.start; + let mut chunk_end = (self.chunks.offset() + chunk.len()).min(next_capture_start); + let mut style_id = StyleId::default(); + if let Some((parent_capture_end, parent_style_id)) = + self.highlights.as_ref().and_then(|h| h.stack.last()) + { + chunk_end = chunk_end.min(*parent_capture_end); + style_id = *parent_style_id; + } + + let slice = + &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()]; + self.range.start = chunk_end; + if self.range.start == self.chunks.offset() + chunk.len() { + self.chunks.next().unwrap(); + } + + Some((slice, style_id)) + } else { + None + } + } +} #[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Debug)] struct FragmentId(Arc<[u16]>); @@ -2342,11 +2696,11 @@ impl ToPoint for usize { mod tests { use super::*; use crate::{ - test::temp_tree, + test::{build_app_state, temp_tree}, util::RandomCharIter, worktree::{Worktree, WorktreeHandle}, }; - use gpui::App; + use gpui::{App, ModelHandle}; use rand::prelude::*; use serde_json::json; use std::{ @@ -2475,6 +2829,7 @@ mod tests { for Edit { old_range, new_range, + .. } in buffer.edits_since(old_buffer.version.clone()) { let old_len = old_range.end - old_range.start; @@ -2812,7 +3167,7 @@ mod tests { let file1 = app.update(|ctx| tree.file("file1", ctx)).await; let buffer1 = app.add_model(|ctx| { - Buffer::from_history(0, History::new("abc".into()), Some(file1), ctx) + Buffer::from_history(0, History::new("abc".into()), Some(file1), None, ctx) }); let events = Rc::new(RefCell::new(Vec::new())); @@ -2877,7 +3232,7 @@ mod tests { move |_, event, _| events.borrow_mut().push(event.clone()) }); - Buffer::from_history(0, History::new("abc".into()), Some(file2), ctx) + Buffer::from_history(0, History::new("abc".into()), Some(file2), None, ctx) }); fs::remove_file(dir.path().join("file2")).unwrap(); @@ -2896,7 +3251,7 @@ mod tests { move |_, event, _| events.borrow_mut().push(event.clone()) }); - Buffer::from_history(0, History::new("abc".into()), Some(file3), ctx) + Buffer::from_history(0, History::new("abc".into()), Some(file3), None, ctx) }); tree.flush_fs_events(&app).await; @@ -2923,7 +3278,13 @@ mod tests { let abs_path = dir.path().join("the-file"); let file = app.update(|ctx| tree.file("the-file", ctx)).await; let buffer = app.add_model(|ctx| { - Buffer::from_history(0, History::new(initial_contents.into()), Some(file), ctx) + Buffer::from_history( + 0, + History::new(initial_contents.into()), + Some(file), + None, + ctx, + ) }); // Add a cursor at the start of each row. @@ -3191,6 +3552,193 @@ mod tests { } } + #[gpui::test] + async fn test_reparse(mut ctx: gpui::TestAppContext) { + let app_state = ctx.read(build_app_state); + let rust_lang = app_state.language_registry.select_language("test.rs"); + assert!(rust_lang.is_some()); + + let buffer = ctx.add_model(|ctx| { + let text = "fn a() {}".into(); + let buffer = Buffer::from_history(0, History::new(text), None, rust_lang.cloned(), ctx); + assert!(buffer.is_parsing()); + assert!(buffer.syntax_tree().is_none()); + buffer + }); + + // Wait for the initial text to parse + buffer + .condition(&ctx, |buffer, _| !buffer.is_parsing()) + .await; + assert_eq!( + get_tree_sexp(&buffer, &ctx), + concat!( + "(source_file (function_item name: (identifier) ", + "parameters: (parameters) ", + "body: (block)))" + ) + ); + + // Perform some edits (add parameter and variable reference) + // Parsing doesn't begin until the transaction is complete + buffer.update(&mut ctx, |buf, ctx| { + buf.start_transaction(None).unwrap(); + + let offset = buf.text().find(")").unwrap(); + buf.edit(vec![offset..offset], "b: C", Some(ctx)).unwrap(); + assert!(!buf.is_parsing()); + + let offset = buf.text().find("}").unwrap(); + buf.edit(vec![offset..offset], " d; ", Some(ctx)).unwrap(); + assert!(!buf.is_parsing()); + + buf.end_transaction(None, Some(ctx)).unwrap(); + assert_eq!(buf.text(), "fn a(b: C) { d; }"); + assert!(buf.is_parsing()); + }); + buffer + .condition(&ctx, |buffer, _| !buffer.is_parsing()) + .await; + assert_eq!( + get_tree_sexp(&buffer, &ctx), + concat!( + "(source_file (function_item name: (identifier) ", + "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ", + "body: (block (identifier))))" + ) + ); + + // Perform a series of edits without waiting for the current parse to complete: + // * turn identifier into a field expression + // * turn field expression into a method call + // * add a turbofish to the method call + buffer.update(&mut ctx, |buf, ctx| { + let offset = buf.text().find(";").unwrap(); + buf.edit(vec![offset..offset], ".e", Some(ctx)).unwrap(); + assert_eq!(buf.text(), "fn a(b: C) { d.e; }"); + assert!(buf.is_parsing()); + }); + buffer.update(&mut ctx, |buf, ctx| { + let offset = buf.text().find(";").unwrap(); + buf.edit(vec![offset..offset], "(f)", Some(ctx)).unwrap(); + assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }"); + assert!(buf.is_parsing()); + }); + buffer.update(&mut ctx, |buf, ctx| { + let offset = buf.text().find("(f)").unwrap(); + buf.edit(vec![offset..offset], "::", Some(ctx)).unwrap(); + assert_eq!(buf.text(), "fn a(b: C) { d.e::(f); }"); + assert!(buf.is_parsing()); + }); + buffer + .condition(&ctx, |buffer, _| !buffer.is_parsing()) + .await; + assert_eq!( + get_tree_sexp(&buffer, &ctx), + concat!( + "(source_file (function_item name: (identifier) ", + "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ", + "body: (block (call_expression ", + "function: (generic_function ", + "function: (field_expression value: (identifier) field: (field_identifier)) ", + "type_arguments: (type_arguments (type_identifier))) ", + "arguments: (arguments (identifier))))))", + ) + ); + + buffer.update(&mut ctx, |buf, ctx| { + buf.undo(Some(ctx)); + assert_eq!(buf.text(), "fn a() {}"); + assert!(buf.is_parsing()); + }); + buffer + .condition(&ctx, |buffer, _| !buffer.is_parsing()) + .await; + assert_eq!( + get_tree_sexp(&buffer, &ctx), + concat!( + "(source_file (function_item name: (identifier) ", + "parameters: (parameters) ", + "body: (block)))" + ) + ); + + buffer.update(&mut ctx, |buf, ctx| { + buf.redo(Some(ctx)); + assert_eq!(buf.text(), "fn a(b: C) { d.e::(f); }"); + assert!(buf.is_parsing()); + }); + buffer + .condition(&ctx, |buffer, _| !buffer.is_parsing()) + .await; + assert_eq!( + get_tree_sexp(&buffer, &ctx), + concat!( + "(source_file (function_item name: (identifier) ", + "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ", + "body: (block (call_expression ", + "function: (generic_function ", + "function: (field_expression value: (identifier) field: (field_identifier)) ", + "type_arguments: (type_arguments (type_identifier))) ", + "arguments: (arguments (identifier))))))", + ) + ); + + fn get_tree_sexp(buffer: &ModelHandle, ctx: &gpui::TestAppContext) -> String { + buffer.read_with(ctx, |buffer, _| { + buffer.syntax_tree().unwrap().root_node().to_sexp() + }) + } + } + + #[gpui::test] + async fn test_enclosing_bracket_ranges(mut ctx: gpui::TestAppContext) { + use unindent::Unindent as _; + + let app_state = ctx.read(build_app_state); + let rust_lang = app_state.language_registry.select_language("test.rs"); + assert!(rust_lang.is_some()); + + let buffer = ctx.add_model(|ctx| { + let text = " + mod x { + mod y { + + } + } + " + .unindent() + .into(); + Buffer::from_history(0, History::new(text), None, rust_lang.cloned(), ctx) + }); + buffer + .condition(&ctx, |buffer, _| !buffer.is_parsing()) + .await; + buffer.read_with(&ctx, |buf, _| { + assert_eq!( + buf.enclosing_bracket_point_ranges(Point::new(1, 6)..Point::new(1, 6)), + Some(( + Point::new(0, 6)..Point::new(0, 7), + Point::new(4, 0)..Point::new(4, 1) + )) + ); + assert_eq!( + buf.enclosing_bracket_point_ranges(Point::new(1, 10)..Point::new(1, 10)), + Some(( + Point::new(1, 10)..Point::new(1, 11), + Point::new(3, 4)..Point::new(3, 5) + )) + ); + assert_eq!( + buf.enclosing_bracket_point_ranges(Point::new(3, 5)..Point::new(3, 5)), + Some(( + Point::new(1, 10)..Point::new(1, 11), + Point::new(3, 4)..Point::new(3, 5) + )) + ); + }); + } + impl Buffer { fn random_byte_range(&mut self, start_offset: usize, rng: &mut impl Rng) -> Range { let end = self.clip_offset(rng.gen_range(start_offset..=self.len()), Bias::Right); @@ -3337,6 +3885,17 @@ mod tests { .keys() .map(move |set_id| (*set_id, self.selection_ranges(*set_id).unwrap())) } + + pub fn enclosing_bracket_point_ranges( + &self, + range: Range, + ) -> Option<(Range, Range)> { + self.enclosing_bracket_ranges(range).map(|(start, end)| { + let point_start = start.start.to_point(self)..start.end.to_point(self); + let point_end = end.start.to_point(self)..end.end.to_point(self); + (point_start, point_end) + }) + } } impl Operation { diff --git a/zed/src/editor/buffer/point.rs b/zed/src/editor/buffer/point.rs index d4ecc69e0c67ec8942312f355de9f22e32e0d1e8..63deb052f61067b9a444b25dd724eb530a5b6a1c 100644 --- a/zed/src/editor/buffer/point.rs +++ b/zed/src/editor/buffer/point.rs @@ -10,6 +10,11 @@ pub struct Point { } impl Point { + pub const MAX: Self = Self { + row: u32::MAX, + column: u32::MAX, + }; + pub fn new(row: u32, column: u32) -> Self { Point { row, column } } @@ -98,3 +103,21 @@ impl Ord for Point { } } } + +impl Into for Point { + fn into(self) -> tree_sitter::Point { + tree_sitter::Point { + row: self.row as usize, + column: self.column as usize, + } + } +} + +impl From for Point { + fn from(point: tree_sitter::Point) -> Self { + Self { + row: point.row as u32, + column: point.column as u32, + } + } +} diff --git a/zed/src/editor/buffer/rope.rs b/zed/src/editor/buffer/rope.rs index 101620f6c269cde2600c2edddb1f2817c1356297..98b317c0ed29c8db0c143977faa513238083d28e 100644 --- a/zed/src/editor/buffer/rope.rs +++ b/zed/src/editor/buffer/rope.rs @@ -118,12 +118,12 @@ impl Rope { self.chunks_in_range(start..self.len()).flat_map(str::chars) } - pub fn chunks<'a>(&'a self) -> ChunksIter<'a> { + pub fn chunks<'a>(&'a self) -> Chunks<'a> { self.chunks_in_range(0..self.len()) } - pub fn chunks_in_range<'a>(&'a self, range: Range) -> ChunksIter<'a> { - ChunksIter::new(self, range) + pub fn chunks_in_range<'a>(&'a self, range: Range) -> Chunks<'a> { + Chunks::new(self, range) } pub fn to_point(&self, offset: usize) -> Point { @@ -268,12 +268,12 @@ impl<'a> Cursor<'a> { } } -pub struct ChunksIter<'a> { +pub struct Chunks<'a> { chunks: sum_tree::Cursor<'a, Chunk, usize, usize>, range: Range, } -impl<'a> ChunksIter<'a> { +impl<'a> Chunks<'a> { pub fn new(rope: &'a Rope, range: Range) -> Self { let mut chunks = rope.chunks.cursor(); chunks.seek(&range.start, SeekBias::Right, &()); @@ -284,11 +284,13 @@ impl<'a> ChunksIter<'a> { self.range.start.max(*self.chunks.start()) } - pub fn advance_to(&mut self, offset: usize) { + pub fn seek(&mut self, offset: usize) { if offset >= self.chunks.end() { self.chunks.seek_forward(&offset, SeekBias::Right, &()); - self.range.start = offset; + } else { + self.chunks.seek(&offset, SeekBias::Right, &()); } + self.range.start = offset; } pub fn peek(&self) -> Option<&'a str> { @@ -304,7 +306,7 @@ impl<'a> ChunksIter<'a> { } } -impl<'a> Iterator for ChunksIter<'a> { +impl<'a> Iterator for Chunks<'a> { type Item = &'a str; fn next(&mut self) -> Option { diff --git a/zed/src/editor/buffer/selection.rs b/zed/src/editor/buffer/selection.rs index e150ce0725966f49cdba352f008506eae9ce8913..2064b84cca4d9b57d18509fc431e65fb93933299 100644 --- a/zed/src/editor/buffer/selection.rs +++ b/zed/src/editor/buffer/selection.rs @@ -1,6 +1,6 @@ use crate::{ editor::{ - buffer::{Anchor, Buffer, Point, ToPoint}, + buffer::{Anchor, Buffer, Point, ToOffset as _, ToPoint as _}, display_map::DisplayMap, Bias, DisplayPoint, }, @@ -61,7 +61,7 @@ impl Selection { } } - pub fn range(&self, buffer: &Buffer) -> Range { + pub fn point_range(&self, buffer: &Buffer) -> Range { let start = self.start.to_point(buffer); let end = self.end.to_point(buffer); if self.reversed { @@ -71,6 +71,16 @@ impl Selection { } } + pub fn offset_range(&self, buffer: &Buffer) -> Range { + let start = self.start.to_offset(buffer); + let end = self.end.to_offset(buffer); + if self.reversed { + end..start + } else { + start..end + } + } + pub fn display_range(&self, map: &DisplayMap, app: &AppContext) -> Range { let start = self.start.to_display_point(map, app); let end = self.end.to_display_point(map, app); diff --git a/zed/src/editor/buffer_view.rs b/zed/src/editor/buffer_view.rs index 5f05defdf63cb112552189833055382b599c172c..2381a22ad87c3441f71bfc3ac1bb495e79b11273 100644 --- a/zed/src/editor/buffer_view.rs +++ b/zed/src/editor/buffer_view.rs @@ -2,7 +2,12 @@ use super::{ buffer, movement, Anchor, Bias, Buffer, BufferElement, DisplayMap, DisplayPoint, Point, Selection, SelectionGoal, SelectionSetId, ToOffset, ToPoint, }; -use crate::{settings::Settings, util::post_inc, workspace, worktree::FileHandle}; +use crate::{ + settings::{Settings, StyleId}, + util::post_inc, + workspace, + worktree::FileHandle, +}; use anyhow::Result; use gpui::{ color::ColorU, fonts::Properties as FontProperties, geometry::vector::Vector2F, @@ -161,6 +166,21 @@ pub fn init(app: &mut MutableAppContext) { "buffer:add_selection_below", Some("BufferView"), ), + Binding::new( + "alt-up", + "buffer:select_larger_syntax_node", + Some("BufferView"), + ), + Binding::new( + "alt-down", + "buffer:select_smaller_syntax_node", + Some("BufferView"), + ), + Binding::new( + "ctrl-m", + "buffer:move_to_enclosing_bracket", + Some("BufferView"), + ), Binding::new("pageup", "buffer:page_up", Some("BufferView")), Binding::new("pagedown", "buffer:page_down", Some("BufferView")), Binding::new("alt-cmd-[", "buffer:fold", Some("BufferView")), @@ -265,6 +285,18 @@ pub fn init(app: &mut MutableAppContext) { "buffer:add_selection_below", BufferView::add_selection_below, ); + app.add_action( + "buffer:select_larger_syntax_node", + BufferView::select_larger_syntax_node, + ); + app.add_action( + "buffer:select_smaller_syntax_node", + BufferView::select_smaller_syntax_node, + ); + app.add_action( + "buffer:move_to_enclosing_bracket", + BufferView::move_to_enclosing_bracket, + ); app.add_action("buffer:page_up", BufferView::page_up); app.add_action("buffer:page_down", BufferView::page_down); app.add_action("buffer:fold", BufferView::fold); @@ -295,6 +327,7 @@ pub struct BufferView { pending_selection: Option, next_selection_id: usize, add_selections_state: Option, + select_larger_syntax_node_stack: Vec>, scroll_position: Mutex, autoscroll_requested: Mutex, settings: watch::Receiver, @@ -354,6 +387,7 @@ impl BufferView { pending_selection: None, next_selection_id, add_selections_state: None, + select_larger_syntax_node_stack: Vec::new(), scroll_position: Mutex::new(Vector2F::zero()), autoscroll_requested: Mutex::new(false), settings, @@ -690,7 +724,7 @@ impl BufferView { { let buffer = self.buffer.read(ctx); for selection in &mut selections { - let range = selection.range(buffer); + let range = selection.point_range(buffer); if range.start == range.end { let head = selection .head() @@ -717,7 +751,7 @@ impl BufferView { { let buffer = self.buffer.read(ctx); for selection in &mut selections { - let range = selection.range(buffer); + let range = selection.point_range(buffer); if range.start == range.end { let head = selection .head() @@ -896,7 +930,7 @@ impl BufferView { let mut contiguous_selections = Vec::new(); while let Some(selection) = selections.next() { // Accumulate contiguous regions of rows that we want to move. - contiguous_selections.push(selection.range(buffer)); + contiguous_selections.push(selection.point_range(buffer)); let (mut buffer_rows, mut display_rows) = selection.buffer_rows_for_display_rows(false, &self.display_map, app); while let Some(next_selection) = selections.peek() { @@ -905,7 +939,7 @@ impl BufferView { if next_buffer_rows.start <= buffer_rows.end { buffer_rows.end = next_buffer_rows.end; display_rows.end = next_display_rows.end; - contiguous_selections.push(next_selection.range(buffer)); + contiguous_selections.push(next_selection.point_range(buffer)); selections.next().unwrap(); } else { break; @@ -980,7 +1014,7 @@ impl BufferView { let mut contiguous_selections = Vec::new(); while let Some(selection) = selections.next() { // Accumulate contiguous regions of rows that we want to move. - contiguous_selections.push(selection.range(buffer)); + contiguous_selections.push(selection.point_range(buffer)); let (mut buffer_rows, mut display_rows) = selection.buffer_rows_for_display_rows(false, &self.display_map, app); while let Some(next_selection) = selections.peek() { @@ -989,7 +1023,7 @@ impl BufferView { if next_buffer_rows.start <= buffer_rows.end { buffer_rows.end = next_buffer_rows.end; display_rows.end = next_display_rows.end; - contiguous_selections.push(next_selection.range(buffer)); + contiguous_selections.push(next_selection.point_range(buffer)); selections.next().unwrap(); } else { break; @@ -1613,7 +1647,7 @@ impl BufferView { let mut to_unfold = Vec::new(); let mut new_selections = Vec::new(); for selection in self.selections(app) { - let range = selection.range(buffer).sorted(); + let range = selection.point_range(buffer).sorted(); if range.start.row != range.end.row { new_selections.push(Selection { id: post_inc(&mut self.next_selection_id), @@ -1654,7 +1688,7 @@ impl BufferView { self.add_selection(false, ctx); } - pub fn add_selection(&mut self, above: bool, ctx: &mut ViewContext) { + fn add_selection(&mut self, above: bool, ctx: &mut ViewContext) { use super::RangeExt; let app = ctx.as_ref(); @@ -1746,6 +1780,77 @@ impl BufferView { } } + pub fn select_larger_syntax_node(&mut self, _: &(), ctx: &mut ViewContext) { + let app = ctx.as_ref(); + let buffer = self.buffer.read(app); + + let mut stack = mem::take(&mut self.select_larger_syntax_node_stack); + let mut selected_larger_node = false; + let old_selections = self.selections(app).to_vec(); + let mut new_selections = Vec::new(); + for selection in &old_selections { + let old_range = selection.start.to_offset(buffer)..selection.end.to_offset(buffer); + let mut new_range = old_range.clone(); + while let Some(containing_range) = buffer.range_for_syntax_ancestor(new_range.clone()) { + new_range = containing_range; + if !self.display_map.intersects_fold(new_range.start, app) + && !self.display_map.intersects_fold(new_range.end, app) + { + break; + } + } + + selected_larger_node |= new_range != old_range; + new_selections.push(Selection { + id: selection.id, + start: buffer.anchor_before(new_range.start), + end: buffer.anchor_before(new_range.end), + reversed: selection.reversed, + goal: SelectionGoal::None, + }); + } + + if selected_larger_node { + stack.push(old_selections); + self.update_selections(new_selections, true, ctx); + } + self.select_larger_syntax_node_stack = stack; + } + + pub fn select_smaller_syntax_node(&mut self, _: &(), ctx: &mut ViewContext) { + let mut stack = mem::take(&mut self.select_larger_syntax_node_stack); + if let Some(selections) = stack.pop() { + self.update_selections(selections, true, ctx); + } + self.select_larger_syntax_node_stack = stack; + } + + pub fn move_to_enclosing_bracket(&mut self, _: &(), ctx: &mut ViewContext) { + use super::RangeExt as _; + + let buffer = self.buffer.read(ctx.as_ref()); + let mut selections = self.selections(ctx.as_ref()).to_vec(); + for selection in &mut selections { + let selection_range = selection.offset_range(buffer); + if let Some((open_range, close_range)) = + buffer.enclosing_bracket_ranges(selection_range.clone()) + { + let close_range = close_range.to_inclusive(); + let destination = if close_range.contains(&selection_range.start) + && close_range.contains(&selection_range.end) + { + open_range.end + } else { + *close_range.start() + }; + selection.start = buffer.anchor_before(destination); + selection.end = selection.start.clone(); + } + } + + self.update_selections(selections, true, ctx); + } + fn build_columnar_selection( &mut self, row: u32, @@ -1860,6 +1965,7 @@ impl BufferView { } self.add_selections_state = None; + self.select_larger_syntax_node_stack.clear(); } fn start_transaction(&self, ctx: &mut ViewContext) { @@ -1987,7 +2093,7 @@ impl BufferView { let ranges = self .selections(ctx.as_ref()) .iter() - .map(|s| s.range(buffer).sorted()) + .map(|s| s.point_range(buffer).sorted()) .collect(); self.fold_ranges(ranges, ctx); } @@ -2069,10 +2175,7 @@ impl BufferView { let font_size = settings.buffer_font_size; let font_id = font_cache.select_font(settings.buffer_font_family, &FontProperties::new())?; - let digit_count = ((self.buffer.read(app).max_point().row + 1) as f32) - .log10() - .floor() as usize - + 1; + let digit_count = (self.buffer.read(app).row_count() as f32).log10().floor() as usize + 1; Ok(layout_cache .layout_str( @@ -2135,32 +2238,47 @@ impl BufferView { } let settings = self.settings.borrow(); - let font_id = - font_cache.select_font(settings.buffer_font_family, &FontProperties::new())?; let font_size = settings.buffer_font_size; + let font_family = settings.buffer_font_family; + let mut prev_font_properties = FontProperties::new(); + let mut prev_font_id = font_cache + .select_font(font_family, &prev_font_properties) + .unwrap(); let mut layouts = Vec::with_capacity(rows.len()); let mut line = String::new(); + let mut styles = Vec::new(); let mut row = rows.start; - let snapshot = self.display_map.snapshot(ctx); - let chunks = snapshot.chunks_at(DisplayPoint::new(rows.start, 0), ctx); - for (chunk_row, chunk_line) in chunks - .chain(Some("\n")) - .flat_map(|chunk| chunk.split("\n").enumerate()) - { - if chunk_row > 0 { - layouts.push(layout_cache.layout_str( - &line, - font_size, - &[(line.len(), font_id, ColorU::black())], - )); - line.clear(); - row += 1; - if row == rows.end { - break; + let mut snapshot = self.display_map.snapshot(ctx); + let chunks = snapshot.highlighted_chunks_for_rows(rows.clone()); + let theme = settings.theme.clone(); + + 'outer: for (chunk, style_ix) in chunks.chain(Some(("\n", StyleId::default()))) { + for (ix, line_chunk) in chunk.split('\n').enumerate() { + if ix > 0 { + layouts.push(layout_cache.layout_str(&line, font_size, &styles)); + line.clear(); + styles.clear(); + row += 1; + if row == rows.end { + break 'outer; + } + } + + if !line_chunk.is_empty() { + let (color, font_properties) = theme.syntax_style(style_ix); + // Avoid a lookup if the font properties match the previous ones. + let font_id = if font_properties == prev_font_properties { + prev_font_id + } else { + font_cache.select_font(font_family, &font_properties)? + }; + line.push_str(line_chunk); + styles.push((line_chunk.len(), font_id, color)); + prev_font_id = font_id; + prev_font_properties = font_properties; } } - line.push_str(chunk_line); } Ok(layouts) @@ -2246,6 +2364,7 @@ impl BufferView { buffer::Event::Saved => ctx.emit(Event::Saved), buffer::Event::FileHandleChanged => ctx.emit(Event::FileHandleChanged), buffer::Event::Reloaded => ctx.emit(Event::FileHandleChanged), + buffer::Event::Reparsed => {} } } } @@ -2359,7 +2478,12 @@ impl workspace::ItemView for BufferView { #[cfg(test)] mod tests { use super::*; - use crate::{editor::Point, settings, test::sample_text}; + use crate::{ + editor::Point, + settings, + test::{build_app_state, sample_text}, + }; + use buffer::History; use unindent::Unindent; #[gpui::test] @@ -3840,6 +3964,146 @@ mod tests { ); } + #[gpui::test] + async fn test_select_larger_smaller_syntax_node(mut app: gpui::TestAppContext) { + let app_state = app.read(build_app_state); + let lang = app_state.language_registry.select_language("z.rs"); + let text = r#" + use mod1::mod2::{mod3, mod4}; + + fn fn_1(param1: bool, param2: &str) { + let var1 = "text"; + } + "# + .unindent(); + let buffer = app.add_model(|ctx| { + let history = History::new(text.into()); + Buffer::from_history(0, history, None, lang.cloned(), ctx) + }); + let (_, view) = + app.add_window(|ctx| BufferView::for_buffer(buffer, app_state.settings, ctx)); + view.condition(&app, |view, ctx| !view.buffer.read(ctx).is_parsing()) + .await; + + view.update(&mut app, |view, ctx| { + view.select_display_ranges( + &[ + DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25), + DisplayPoint::new(2, 24)..DisplayPoint::new(2, 12), + DisplayPoint::new(3, 18)..DisplayPoint::new(3, 18), + ], + ctx, + ) + .unwrap(); + view.select_larger_syntax_node(&(), ctx); + }); + assert_eq!( + view.read_with(&app, |view, ctx| view.selection_ranges(ctx)), + &[ + DisplayPoint::new(0, 23)..DisplayPoint::new(0, 27), + DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7), + DisplayPoint::new(3, 15)..DisplayPoint::new(3, 21), + ] + ); + + view.update(&mut app, |view, ctx| { + view.select_larger_syntax_node(&(), ctx); + }); + assert_eq!( + view.read_with(&app, |view, ctx| view.selection_ranges(ctx)), + &[ + DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28), + DisplayPoint::new(4, 1)..DisplayPoint::new(2, 0), + ] + ); + + view.update(&mut app, |view, ctx| { + view.select_larger_syntax_node(&(), ctx); + }); + assert_eq!( + view.read_with(&app, |view, ctx| view.selection_ranges(ctx)), + &[DisplayPoint::new(0, 0)..DisplayPoint::new(5, 0)] + ); + + // Trying to expand the selected syntax node one more time has no effect. + view.update(&mut app, |view, ctx| { + view.select_larger_syntax_node(&(), ctx); + }); + assert_eq!( + view.read_with(&app, |view, ctx| view.selection_ranges(ctx)), + &[DisplayPoint::new(0, 0)..DisplayPoint::new(5, 0)] + ); + + view.update(&mut app, |view, ctx| { + view.select_smaller_syntax_node(&(), ctx); + }); + assert_eq!( + view.read_with(&app, |view, ctx| view.selection_ranges(ctx)), + &[ + DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28), + DisplayPoint::new(4, 1)..DisplayPoint::new(2, 0), + ] + ); + + view.update(&mut app, |view, ctx| { + view.select_smaller_syntax_node(&(), ctx); + }); + assert_eq!( + view.read_with(&app, |view, ctx| view.selection_ranges(ctx)), + &[ + DisplayPoint::new(0, 23)..DisplayPoint::new(0, 27), + DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7), + DisplayPoint::new(3, 15)..DisplayPoint::new(3, 21), + ] + ); + + view.update(&mut app, |view, ctx| { + view.select_smaller_syntax_node(&(), ctx); + }); + assert_eq!( + view.read_with(&app, |view, ctx| view.selection_ranges(ctx)), + &[ + DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25), + DisplayPoint::new(2, 24)..DisplayPoint::new(2, 12), + DisplayPoint::new(3, 18)..DisplayPoint::new(3, 18), + ] + ); + + // Trying to shrink the selected syntax node one more time has no effect. + view.update(&mut app, |view, ctx| { + view.select_smaller_syntax_node(&(), ctx); + }); + assert_eq!( + view.read_with(&app, |view, ctx| view.selection_ranges(ctx)), + &[ + DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25), + DisplayPoint::new(2, 24)..DisplayPoint::new(2, 12), + DisplayPoint::new(3, 18)..DisplayPoint::new(3, 18), + ] + ); + + // Ensure that we keep expanding the selection if the larger selection starts or ends within + // a fold. + view.update(&mut app, |view, ctx| { + view.fold_ranges( + vec![ + Point::new(0, 21)..Point::new(0, 24), + Point::new(3, 20)..Point::new(3, 22), + ], + ctx, + ); + view.select_larger_syntax_node(&(), ctx); + }); + assert_eq!( + view.read_with(&app, |view, ctx| view.selection_ranges(ctx)), + &[ + DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28), + DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7), + DisplayPoint::new(3, 4)..DisplayPoint::new(3, 23), + ] + ); + } + impl BufferView { fn selection_ranges(&self, app: &AppContext) -> Vec> { self.selections_in_range(DisplayPoint::zero()..self.max_point(app), app) diff --git a/zed/src/editor/display_map/fold_map.rs b/zed/src/editor/display_map/fold_map.rs index 4cfcd8933704892bf8e831df4d03bceea63b2d70..3f8c8be58045e2f98197921ddde4efcbc675511d 100644 --- a/zed/src/editor/display_map/fold_map.rs +++ b/zed/src/editor/display_map/fold_map.rs @@ -4,6 +4,7 @@ use super::{ }; use crate::{ editor::buffer, + settings::StyleId, sum_tree::{self, Cursor, FilterCursor, SeekBias, SumTree}, time, }; @@ -45,7 +46,7 @@ impl FoldMap { pub fn snapshot(&self, ctx: &AppContext) -> FoldMapSnapshot { FoldMapSnapshot { transforms: self.sync(ctx).clone(), - buffer: self.buffer.clone(), + buffer: self.buffer.read(ctx).snapshot(), } } @@ -100,6 +101,7 @@ impl FoldMap { edits.push(Edit { old_range: range.clone(), new_range: range.clone(), + ..Default::default() }); } } @@ -144,6 +146,7 @@ impl FoldMap { edits.push(Edit { old_range: offset_range.clone(), new_range: offset_range, + ..Default::default() }); fold_ixs_to_delete.push(*folds_cursor.start()); folds_cursor.next(); @@ -189,6 +192,18 @@ impl FoldMap { }) } + pub fn intersects_fold(&self, offset: T, ctx: &AppContext) -> bool + where + T: ToOffset, + { + let buffer = self.buffer.read(ctx); + let offset = offset.to_offset(buffer); + let transforms = self.sync(ctx); + let mut cursor = transforms.cursor::(); + cursor.seek(&offset, SeekBias::Right, &()); + cursor.item().map_or(false, |t| t.display_text.is_some()) + } + pub fn is_line_folded(&self, display_row: u32, ctx: &AppContext) -> bool { let transforms = self.sync(ctx); let mut cursor = transforms.cursor::(); @@ -207,11 +222,11 @@ impl FoldMap { } pub fn to_buffer_offset(&self, point: DisplayPoint, ctx: &AppContext) -> usize { - self.snapshot(ctx).to_buffer_offset(point, ctx) + self.snapshot(ctx).to_buffer_offset(point) } pub fn to_display_offset(&self, point: DisplayPoint, ctx: &AppContext) -> DisplayOffset { - self.snapshot(ctx).to_display_offset(point, ctx) + self.snapshot(ctx).to_display_offset(point) } pub fn to_buffer_point(&self, display_point: DisplayPoint, ctx: &AppContext) -> Point { @@ -391,7 +406,7 @@ impl FoldMap { pub struct FoldMapSnapshot { transforms: SumTree, - buffer: ModelHandle, + buffer: buffer::Snapshot, } impl FoldMapSnapshot { @@ -410,30 +425,49 @@ impl FoldMapSnapshot { } } - pub fn chunks_at<'a>(&'a self, offset: DisplayOffset, ctx: &'a AppContext) -> Chunks<'a> { + pub fn max_point(&self) -> DisplayPoint { + DisplayPoint(self.transforms.summary().display.lines) + } + + pub fn chunks_at(&self, offset: DisplayOffset) -> Chunks { let mut transform_cursor = self.transforms.cursor::(); transform_cursor.seek(&offset, SeekBias::Right, &()); let overshoot = offset.0 - transform_cursor.start().display.bytes; let buffer_offset = transform_cursor.start().buffer.bytes + overshoot; - let buffer = self.buffer.read(ctx); - let rope_cursor = buffer.text_for_range(buffer_offset..buffer.len()); Chunks { transform_cursor, buffer_offset, - buffer_chunks: rope_cursor, + buffer_chunks: self.buffer.text_for_range(buffer_offset..self.buffer.len()), } } - pub fn chars_at<'a>( - &'a self, - point: DisplayPoint, - ctx: &'a AppContext, - ) -> impl Iterator + 'a { - let offset = self.to_display_offset(point, ctx); - self.chunks_at(offset, ctx).flat_map(str::chars) + pub fn highlighted_chunks(&mut self, range: Range) -> HighlightedChunks { + let mut transform_cursor = self.transforms.cursor::(); + + transform_cursor.seek(&range.end, SeekBias::Right, &()); + let overshoot = range.end.0 - transform_cursor.start().display.bytes; + let buffer_end = transform_cursor.start().buffer.bytes + overshoot; + + transform_cursor.seek(&range.start, SeekBias::Right, &()); + let overshoot = range.start.0 - transform_cursor.start().display.bytes; + let buffer_start = transform_cursor.start().buffer.bytes + overshoot; + + HighlightedChunks { + transform_cursor, + buffer_offset: buffer_start, + buffer_chunks: self + .buffer + .highlighted_text_for_range(buffer_start..buffer_end), + buffer_chunk: None, + } } - pub fn to_display_offset(&self, point: DisplayPoint, ctx: &AppContext) -> DisplayOffset { + pub fn chars_at<'a>(&'a self, point: DisplayPoint) -> impl Iterator + 'a { + let offset = self.to_display_offset(point); + self.chunks_at(offset).flat_map(str::chars) + } + + pub fn to_display_offset(&self, point: DisplayPoint) -> DisplayOffset { let mut cursor = self.transforms.cursor::(); cursor.seek(&point, SeekBias::Right, &()); let overshoot = point.0 - cursor.start().display.lines; @@ -441,27 +475,24 @@ impl FoldMapSnapshot { if !overshoot.is_zero() { let transform = cursor.item().expect("display point out of range"); assert!(transform.display_text.is_none()); - let end_buffer_offset = - (cursor.start().buffer.lines + overshoot).to_offset(self.buffer.read(ctx)); + let end_buffer_offset = self + .buffer + .to_offset(cursor.start().buffer.lines + overshoot); offset += end_buffer_offset - cursor.start().buffer.bytes; } DisplayOffset(offset) } - pub fn to_buffer_offset(&self, point: DisplayPoint, ctx: &AppContext) -> usize { + pub fn to_buffer_offset(&self, point: DisplayPoint) -> usize { let mut cursor = self.transforms.cursor::(); cursor.seek(&point, SeekBias::Right, &()); let overshoot = point.0 - cursor.start().display.lines; - (cursor.start().buffer.lines + overshoot).to_offset(self.buffer.read(ctx)) + self.buffer + .to_offset(cursor.start().buffer.lines + overshoot) } #[cfg(test)] - pub fn clip_offset( - &self, - offset: DisplayOffset, - bias: Bias, - ctx: &AppContext, - ) -> DisplayOffset { + pub fn clip_offset(&self, offset: DisplayOffset, bias: Bias) -> DisplayOffset { let mut cursor = self.transforms.cursor::(); cursor.seek(&offset, SeekBias::Right, &()); if let Some(transform) = cursor.item() { @@ -475,7 +506,7 @@ impl FoldMapSnapshot { } else { let overshoot = offset.0 - transform_start; let buffer_offset = cursor.start().buffer.bytes + overshoot; - let clipped_buffer_offset = self.buffer.read(ctx).clip_offset(buffer_offset, bias); + let clipped_buffer_offset = self.buffer.clip_offset(buffer_offset, bias); DisplayOffset( (offset.0 as isize + (clipped_buffer_offset as isize - buffer_offset as isize)) as usize, @@ -486,7 +517,7 @@ impl FoldMapSnapshot { } } - pub fn clip_point(&self, point: DisplayPoint, bias: Bias, ctx: &AppContext) -> DisplayPoint { + pub fn clip_point(&self, point: DisplayPoint, bias: Bias) -> DisplayPoint { let mut cursor = self.transforms.cursor::(); cursor.seek(&point, SeekBias::Right, &()); if let Some(transform) = cursor.item() { @@ -500,8 +531,7 @@ impl FoldMapSnapshot { } else { let overshoot = point.0 - transform_start; let buffer_position = cursor.start().buffer.lines + overshoot; - let clipped_buffer_position = - self.buffer.read(ctx).clip_point(buffer_position, bias); + let clipped_buffer_position = self.buffer.clip_point(buffer_position, bias); DisplayPoint::new( point.row(), ((point.column() as i32) + clipped_buffer_position.column as i32 @@ -667,7 +697,7 @@ impl<'a> Iterator for BufferRows<'a> { pub struct Chunks<'a> { transform_cursor: Cursor<'a, Transform, DisplayOffset, TransformSummary>, - buffer_chunks: buffer::ChunksIter<'a>, + buffer_chunks: buffer::Chunks<'a>, buffer_offset: usize, } @@ -685,7 +715,7 @@ impl<'a> Iterator for Chunks<'a> { // advance the transform and buffer cursors to the end of the fold. if let Some(display_text) = transform.display_text { self.buffer_offset += transform.summary.buffer.bytes; - self.buffer_chunks.advance_to(self.buffer_offset); + self.buffer_chunks.seek(self.buffer_offset); while self.buffer_offset >= self.transform_cursor.end().buffer.bytes && self.transform_cursor.item().is_some() @@ -718,6 +748,70 @@ impl<'a> Iterator for Chunks<'a> { } } +pub struct HighlightedChunks<'a> { + transform_cursor: Cursor<'a, Transform, DisplayOffset, TransformSummary>, + buffer_chunks: buffer::HighlightedChunks<'a>, + buffer_chunk: Option<(usize, &'a str, StyleId)>, + buffer_offset: usize, +} + +impl<'a> Iterator for HighlightedChunks<'a> { + type Item = (&'a str, StyleId); + + fn next(&mut self) -> Option { + let transform = if let Some(item) = self.transform_cursor.item() { + item + } else { + return None; + }; + + // If we're in a fold, then return the fold's display text and + // advance the transform and buffer cursors to the end of the fold. + if let Some(display_text) = transform.display_text { + self.buffer_chunk.take(); + self.buffer_offset += transform.summary.buffer.bytes; + self.buffer_chunks.seek(self.buffer_offset); + + while self.buffer_offset >= self.transform_cursor.end().buffer.bytes + && self.transform_cursor.item().is_some() + { + self.transform_cursor.next(); + } + + return Some((display_text, StyleId::default())); + } + + // Retrieve a chunk from the current location in the buffer. + if self.buffer_chunk.is_none() { + let chunk_offset = self.buffer_chunks.offset(); + self.buffer_chunk = self + .buffer_chunks + .next() + .map(|(chunk, capture_ix)| (chunk_offset, chunk, capture_ix)); + } + + // Otherwise, take a chunk from the buffer's text. + if let Some((chunk_offset, mut chunk, capture_ix)) = self.buffer_chunk { + let offset_in_chunk = self.buffer_offset - chunk_offset; + chunk = &chunk[offset_in_chunk..]; + + // Truncate the chunk so that it ends at the next fold. + let region_end = self.transform_cursor.end().buffer.bytes - self.buffer_offset; + if chunk.len() >= region_end { + chunk = &chunk[0..region_end]; + self.transform_cursor.next(); + } else { + self.buffer_chunk.take(); + } + + self.buffer_offset += chunk.len(); + return Some((chunk, capture_ix)); + } + + None + } +} + impl<'a> sum_tree::Dimension<'a, TransformSummary> for DisplayPoint { fn add_summary(&mut self, summary: &'a TransformSummary) { self.0 += &summary.display.lines; @@ -1046,11 +1140,10 @@ mod tests { let offset = map.snapshot(app.as_ref()).clip_offset( DisplayOffset(rng.gen_range(0..=map.len(app.as_ref()))), Bias::Right, - app.as_ref(), ); assert_eq!( map.snapshot(app.as_ref()) - .chunks_at(offset, app.as_ref()) + .chunks_at(offset) .collect::(), &expected_text[offset.0..], ); @@ -1133,9 +1226,7 @@ mod tests { impl FoldMap { fn text(&self, app: &AppContext) -> String { - self.snapshot(app) - .chunks_at(DisplayOffset(0), app) - .collect() + self.snapshot(app).chunks_at(DisplayOffset(0)).collect() } fn merged_fold_ranges(&self, app: &AppContext) -> Vec> { diff --git a/zed/src/editor/display_map/mod.rs b/zed/src/editor/display_map/mod.rs index b2874a5805822830fbcd3ebf9ae64a688ac46c90..4b5fcd36abe8f6b84fda783e5d6f1e580f5db730 100644 --- a/zed/src/editor/display_map/mod.rs +++ b/zed/src/editor/display_map/mod.rs @@ -1,10 +1,12 @@ mod fold_map; +use crate::settings::StyleId; + use super::{buffer, Anchor, Bias, Buffer, Edit, Point, ToOffset, ToPoint}; pub use fold_map::BufferRows; use fold_map::{FoldMap, FoldMapSnapshot}; use gpui::{AppContext, ModelHandle}; -use std::ops::Range; +use std::{mem, ops::Range}; pub struct DisplayMap { buffer: ModelHandle, @@ -55,21 +57,23 @@ impl DisplayMap { self.fold_map.unfold(ranges, ctx) } + pub fn intersects_fold(&self, offset: T, ctx: &AppContext) -> bool { + self.fold_map.intersects_fold(offset, ctx) + } + pub fn is_line_folded(&self, display_row: u32, ctx: &AppContext) -> bool { self.fold_map.is_line_folded(display_row, ctx) } pub fn text(&self, ctx: &AppContext) -> String { - self.snapshot(ctx) - .chunks_at(DisplayPoint::zero(), ctx) - .collect() + self.snapshot(ctx).chunks_at(DisplayPoint::zero()).collect() } pub fn line(&self, display_row: u32, ctx: &AppContext) -> String { let mut result = String::new(); for chunk in self .snapshot(ctx) - .chunks_at(DisplayPoint::new(display_row, 0), ctx) + .chunks_at(DisplayPoint::new(display_row, 0)) { if let Some(ix) = chunk.find('\n') { result.push_str(&chunk[0..ix]); @@ -86,7 +90,7 @@ impl DisplayMap { let mut is_blank = true; for c in self .snapshot(ctx) - .chars_at(DisplayPoint::new(display_row, 0), ctx) + .chars_at(DisplayPoint::new(display_row, 0)) { if c == ' ' { indent += 1; @@ -104,9 +108,8 @@ impl DisplayMap { .column() } - // TODO - make this delegate to the DisplayMapSnapshot pub fn max_point(&self, ctx: &AppContext) -> DisplayPoint { - self.fold_map.max_point(ctx).expand_tabs(self, ctx) + self.snapshot(ctx).max_point().expand_tabs(self, ctx) } pub fn longest_row(&self, ctx: &AppContext) -> u32 { @@ -136,12 +139,15 @@ impl DisplayMapSnapshot { self.folds_snapshot.buffer_rows(start_row) } - pub fn chunks_at<'a>(&'a self, point: DisplayPoint, app: &'a AppContext) -> Chunks<'a> { - let (point, expanded_char_column, to_next_stop) = - self.collapse_tabs(point, Bias::Left, app); + pub fn max_point(&self) -> DisplayPoint { + self.expand_tabs(self.folds_snapshot.max_point()) + } + + pub fn chunks_at(&self, point: DisplayPoint) -> Chunks { + let (point, expanded_char_column, to_next_stop) = self.collapse_tabs(point, Bias::Left); let fold_chunks = self .folds_snapshot - .chunks_at(self.folds_snapshot.to_display_offset(point, app), app); + .chunks_at(self.folds_snapshot.to_display_offset(point)); Chunks { fold_chunks, column: expanded_char_column, @@ -151,18 +157,28 @@ impl DisplayMapSnapshot { } } - pub fn chars_at<'a>( - &'a self, - point: DisplayPoint, - app: &'a AppContext, - ) -> impl Iterator + 'a { - self.chunks_at(point, app).flat_map(str::chars) + pub fn highlighted_chunks_for_rows(&mut self, rows: Range) -> HighlightedChunks { + let start = DisplayPoint::new(rows.start, 0); + let start = self.folds_snapshot.to_display_offset(start); + let end = DisplayPoint::new(rows.end, 0).min(self.max_point()); + let end = self.folds_snapshot.to_display_offset(end); + HighlightedChunks { + fold_chunks: self.folds_snapshot.highlighted_chunks(start..end), + column: 0, + tab_size: self.tab_size, + chunk: "", + style_id: Default::default(), + } } - pub fn column_to_chars(&self, display_row: u32, target: u32, ctx: &AppContext) -> u32 { + pub fn chars_at<'a>(&'a self, point: DisplayPoint) -> impl Iterator + 'a { + self.chunks_at(point).flat_map(str::chars) + } + + pub fn column_to_chars(&self, display_row: u32, target: u32) -> u32 { let mut count = 0; let mut column = 0; - for c in self.chars_at(DisplayPoint::new(display_row, 0), ctx) { + for c in self.chars_at(DisplayPoint::new(display_row, 0)) { if column >= target { break; } @@ -172,10 +188,10 @@ impl DisplayMapSnapshot { count } - pub fn column_from_chars(&self, display_row: u32, char_count: u32, ctx: &AppContext) -> u32 { + pub fn column_from_chars(&self, display_row: u32, char_count: u32) -> u32 { let mut count = 0; let mut column = 0; - for c in self.chars_at(DisplayPoint::new(display_row, 0), ctx) { + for c in self.chars_at(DisplayPoint::new(display_row, 0)) { if c == '\n' || count >= char_count { break; } @@ -185,32 +201,26 @@ impl DisplayMapSnapshot { column } - pub fn clip_point(&self, point: DisplayPoint, bias: Bias, ctx: &AppContext) -> DisplayPoint { + pub fn clip_point(&self, point: DisplayPoint, bias: Bias) -> DisplayPoint { self.expand_tabs( self.folds_snapshot - .clip_point(self.collapse_tabs(point, bias, ctx).0, bias, ctx), - ctx, + .clip_point(self.collapse_tabs(point, bias).0, bias), ) } - fn expand_tabs(&self, mut point: DisplayPoint, ctx: &AppContext) -> DisplayPoint { + fn expand_tabs(&self, mut point: DisplayPoint) -> DisplayPoint { let chars = self .folds_snapshot - .chars_at(DisplayPoint(Point::new(point.row(), 0)), ctx); + .chars_at(DisplayPoint(Point::new(point.row(), 0))); let expanded = expand_tabs(chars, point.column() as usize, self.tab_size); *point.column_mut() = expanded as u32; point } - fn collapse_tabs( - &self, - mut point: DisplayPoint, - bias: Bias, - ctx: &AppContext, - ) -> (DisplayPoint, usize, usize) { + fn collapse_tabs(&self, mut point: DisplayPoint, bias: Bias) -> (DisplayPoint, usize, usize) { let chars = self .folds_snapshot - .chars_at(DisplayPoint(Point::new(point.row(), 0)), ctx); + .chars_at(DisplayPoint(Point::new(point.row(), 0))); let expanded = point.column() as usize; let (collapsed, expanded_char_column, to_next_stop) = collapse_tabs(chars, expanded, bias, self.tab_size); @@ -258,11 +268,11 @@ impl DisplayPoint { } fn expand_tabs(self, map: &DisplayMap, ctx: &AppContext) -> Self { - map.snapshot(ctx).expand_tabs(self, ctx) + map.snapshot(ctx).expand_tabs(self) } fn collapse_tabs(self, map: &DisplayMap, bias: Bias, ctx: &AppContext) -> Self { - map.snapshot(ctx).collapse_tabs(self, bias, ctx).0 + map.snapshot(ctx).collapse_tabs(self, bias).0 } } @@ -270,7 +280,7 @@ impl Point { pub fn to_display_point(self, map: &DisplayMap, ctx: &AppContext) -> DisplayPoint { let mut display_point = map.fold_map.to_display_point(self, ctx); let snapshot = map.fold_map.snapshot(ctx); - let chars = snapshot.chars_at(DisplayPoint::new(display_point.row(), 0), ctx); + let chars = snapshot.chars_at(DisplayPoint::new(display_point.row(), 0)); *display_point.column_mut() = expand_tabs(chars, display_point.column() as usize, map.tab_size) as u32; display_point @@ -336,6 +346,50 @@ impl<'a> Iterator for Chunks<'a> { } } +pub struct HighlightedChunks<'a> { + fold_chunks: fold_map::HighlightedChunks<'a>, + chunk: &'a str, + style_id: StyleId, + column: usize, + tab_size: usize, +} + +impl<'a> Iterator for HighlightedChunks<'a> { + type Item = (&'a str, StyleId); + + fn next(&mut self) -> Option { + if self.chunk.is_empty() { + if let Some((chunk, style_id)) = self.fold_chunks.next() { + self.chunk = chunk; + self.style_id = style_id; + } else { + return None; + } + } + + for (ix, c) in self.chunk.char_indices() { + match c { + '\t' => { + if ix > 0 { + let (prefix, suffix) = self.chunk.split_at(ix); + self.chunk = suffix; + return Some((prefix, self.style_id)); + } else { + self.chunk = &self.chunk[1..]; + let len = self.tab_size - self.column % self.tab_size; + self.column += len; + return Some((&SPACES[0..len], self.style_id)); + } + } + '\n' => self.column = 0, + _ => self.column += 1, + } + } + + Some((mem::take(&mut self.chunk), mem::take(&mut self.style_id))) + } +} + pub fn expand_tabs(chars: impl Iterator, column: usize, tab_size: usize) -> usize { let mut expanded_chars = 0; let mut expanded_bytes = 0; @@ -400,7 +454,13 @@ pub fn collapse_tabs( #[cfg(test)] mod tests { use super::*; - use crate::test::*; + use crate::{ + language::{Language, LanguageConfig}, + settings::Theme, + test::*, + }; + use buffer::History; + use std::sync::Arc; #[gpui::test] fn test_chunks_at(app: &mut gpui::MutableAppContext) { @@ -423,24 +483,125 @@ mod tests { assert_eq!( &map.snapshot(app.as_ref()) - .chunks_at(DisplayPoint::new(1, 0), app.as_ref()) + .chunks_at(DisplayPoint::new(1, 0)) .collect::()[0..10], " b bb" ); assert_eq!( &map.snapshot(app.as_ref()) - .chunks_at(DisplayPoint::new(1, 2), app.as_ref()) + .chunks_at(DisplayPoint::new(1, 2)) .collect::()[0..10], " b bbbb" ); assert_eq!( &map.snapshot(app.as_ref()) - .chunks_at(DisplayPoint::new(1, 6), app.as_ref()) + .chunks_at(DisplayPoint::new(1, 6)) .collect::()[0..13], " bbbbb\nc c" ); } + #[gpui::test] + async fn test_highlighted_chunks_at(mut app: gpui::TestAppContext) { + use unindent::Unindent as _; + + let grammar = tree_sitter_rust::language(); + let text = r#" + fn outer() {} + + mod module { + fn inner() {} + }"# + .unindent(); + let highlight_query = tree_sitter::Query::new( + grammar, + r#" + (mod_item name: (identifier) body: _ @mod.body) + (function_item name: (identifier) @fn.name)"#, + ) + .unwrap(); + let theme = Theme::parse( + r#" + [syntax] + "mod.body" = 0xff0000 + "fn.name" = 0x00ff00"#, + ) + .unwrap(); + let lang = Arc::new(Language { + config: LanguageConfig { + name: "Test".to_string(), + path_suffixes: vec![".test".to_string()], + ..Default::default() + }, + grammar: grammar.clone(), + highlight_query, + brackets_query: tree_sitter::Query::new(grammar, "").unwrap(), + theme_mapping: Default::default(), + }); + lang.set_theme(&theme); + + let buffer = app.add_model(|ctx| { + Buffer::from_history(0, History::new(text.into()), None, Some(lang), ctx) + }); + buffer.condition(&app, |buf, _| !buf.is_parsing()).await; + + let mut map = app.read(|ctx| DisplayMap::new(buffer, 2, ctx)); + assert_eq!( + app.read(|ctx| highlighted_chunks(0..5, &map, &theme, ctx)), + vec![ + ("fn ".to_string(), None), + ("outer".to_string(), Some("fn.name")), + ("() {}\n\nmod module ".to_string(), None), + ("{\n fn ".to_string(), Some("mod.body")), + ("inner".to_string(), Some("fn.name")), + ("() {}\n}".to_string(), Some("mod.body")), + ] + ); + assert_eq!( + app.read(|ctx| highlighted_chunks(3..5, &map, &theme, ctx)), + vec![ + (" fn ".to_string(), Some("mod.body")), + ("inner".to_string(), Some("fn.name")), + ("() {}\n}".to_string(), Some("mod.body")), + ] + ); + + app.read(|ctx| map.fold(vec![Point::new(0, 6)..Point::new(3, 2)], ctx)); + assert_eq!( + app.read(|ctx| highlighted_chunks(0..2, &map, &theme, ctx)), + vec![ + ("fn ".to_string(), None), + ("out".to_string(), Some("fn.name")), + ("…".to_string(), None), + (" fn ".to_string(), Some("mod.body")), + ("inner".to_string(), Some("fn.name")), + ("() {}\n}".to_string(), Some("mod.body")), + ] + ); + + fn highlighted_chunks<'a>( + rows: Range, + map: &DisplayMap, + theme: &'a Theme, + ctx: &AppContext, + ) -> Vec<(String, Option<&'a str>)> { + let mut chunks: Vec<(String, Option<&str>)> = Vec::new(); + for (chunk, style_id) in map.snapshot(ctx).highlighted_chunks_for_rows(rows) { + let style_name = theme.syntax_style_name(style_id); + if let Some((last_chunk, last_style_name)) = chunks.last_mut() { + if style_name == *last_style_name { + last_chunk.push_str(chunk); + } else { + chunks.push((chunk.to_string(), style_name)); + } + } else { + chunks.push((chunk.to_string(), style_name)); + } + } + chunks + } + } + #[gpui::test] fn test_clip_point(app: &mut gpui::MutableAppContext) { use Bias::{Left, Right}; @@ -470,7 +631,7 @@ mod tests { ), ] { assert_eq!( - map.clip_point(DisplayPoint::new(1, input_column as u32), bias, ctx), + map.clip_point(DisplayPoint::new(1, input_column as u32), bias), DisplayPoint::new(1, output_column as u32), "clip_point(({}, {}))", 1, @@ -520,7 +681,7 @@ mod tests { ); assert_eq!( map.snapshot(ctx) - .chunks_at(DisplayPoint::new(0, "✅ ".len() as u32), ctx) + .chunks_at(DisplayPoint::new(0, "✅ ".len() as u32)) .collect::(), " α\nβ \n🏀β γ" ); @@ -534,26 +695,20 @@ mod tests { ); assert_eq!( map.snapshot(ctx) - .chunks_at(DisplayPoint::new(0, "✅ ".len() as u32), ctx) + .chunks_at(DisplayPoint::new(0, "✅ ".len() as u32)) .collect::(), " α\nβ \n🏀β γ" ); // Clipping display points inside of multi-byte characters assert_eq!( - map.snapshot(ctx).clip_point( - DisplayPoint::new(0, "✅".len() as u32 - 1), - Bias::Left, - ctx - ), + map.snapshot(ctx) + .clip_point(DisplayPoint::new(0, "✅".len() as u32 - 1), Bias::Left), DisplayPoint::new(0, 0) ); assert_eq!( - map.snapshot(ctx).clip_point( - DisplayPoint::new(0, "✅".len() as u32 - 1), - Bias::Right, - ctx - ), + map.snapshot(ctx) + .clip_point(DisplayPoint::new(0, "✅".len() as u32 - 1), Bias::Right), DisplayPoint::new(0, "✅".len() as u32) ); } diff --git a/zed/src/editor/mod.rs b/zed/src/editor/mod.rs index d3c892367471b6a6a2aa46eabef36020853d5f2f..dc81ffbfbc8eab82a27496e0d4007462a593210c 100644 --- a/zed/src/editor/mod.rs +++ b/zed/src/editor/mod.rs @@ -9,7 +9,10 @@ pub use buffer_element::*; pub use buffer_view::*; pub use display_map::DisplayPoint; use display_map::*; -use std::{cmp, ops::Range}; +use std::{ + cmp, + ops::{Range, RangeInclusive}, +}; #[derive(Copy, Clone)] pub enum Bias { @@ -19,10 +22,15 @@ pub enum Bias { trait RangeExt { fn sorted(&self) -> Range; + fn to_inclusive(&self) -> RangeInclusive; } impl RangeExt for Range { fn sorted(&self) -> Self { cmp::min(&self.start, &self.end).clone()..cmp::max(&self.start, &self.end).clone() } + + fn to_inclusive(&self) -> RangeInclusive { + self.start.clone()..=self.end.clone() + } } diff --git a/zed/src/editor/movement.rs b/zed/src/editor/movement.rs index b40573e5e8ea0d4f91112e77967154beedf0e4ee..f7e89776ecf1d3fd24dc90e03b689ea5f63de22f 100644 --- a/zed/src/editor/movement.rs +++ b/zed/src/editor/movement.rs @@ -9,7 +9,7 @@ pub fn left(map: &DisplayMap, mut point: DisplayPoint, app: &AppContext) -> Resu *point.row_mut() -= 1; *point.column_mut() = map.line_len(point.row(), app); } - Ok(map.snapshot(app).clip_point(point, Bias::Left, app)) + Ok(map.snapshot(app).clip_point(point, Bias::Left)) } pub fn right(map: &DisplayMap, mut point: DisplayPoint, app: &AppContext) -> Result { @@ -20,7 +20,7 @@ pub fn right(map: &DisplayMap, mut point: DisplayPoint, app: &AppContext) -> Res *point.row_mut() += 1; *point.column_mut() = 0; } - Ok(map.snapshot(app).clip_point(point, Bias::Right, app)) + Ok(map.snapshot(app).clip_point(point, Bias::Right)) } pub fn up( @@ -33,12 +33,12 @@ pub fn up( let goal_column = if let SelectionGoal::Column(column) = goal { column } else { - map.column_to_chars(point.row(), point.column(), app) + map.column_to_chars(point.row(), point.column()) }; if point.row() > 0 { *point.row_mut() -= 1; - *point.column_mut() = map.column_from_chars(point.row(), goal_column, app); + *point.column_mut() = map.column_from_chars(point.row(), goal_column); } else { point = DisplayPoint::new(0, 0); } @@ -57,12 +57,12 @@ pub fn down( let goal_column = if let SelectionGoal::Column(column) = goal { column } else { - map.column_to_chars(point.row(), point.column(), app) + map.column_to_chars(point.row(), point.column()) }; if point.row() < max_point.row() { *point.row_mut() += 1; - *point.column_mut() = map.column_from_chars(point.row(), goal_column, app); + *point.column_mut() = map.column_from_chars(point.row(), goal_column); } else { point = max_point; } @@ -107,7 +107,7 @@ pub fn prev_word_boundary( let mut boundary = DisplayPoint::new(point.row(), 0); let mut column = 0; let mut prev_c = None; - for c in map.snapshot(app).chars_at(boundary, app) { + for c in map.snapshot(app).chars_at(boundary) { if column >= point.column() { break; } @@ -129,7 +129,7 @@ pub fn next_word_boundary( app: &AppContext, ) -> Result { let mut prev_c = None; - for c in map.snapshot(app).chars_at(point, app) { + for c in map.snapshot(app).chars_at(point) { if prev_c.is_some() && (c == '\n' || char_kind(prev_c.unwrap()) != char_kind(c)) { break; } diff --git a/zed/src/file_finder.rs b/zed/src/file_finder.rs index 7e57057e7215dee4a6ff67fc244aa2e45b99c34e..5c3cffff7b792f5dec627e918234d8a5339ac677 100644 --- a/zed/src/file_finder.rs +++ b/zed/src/file_finder.rs @@ -458,7 +458,11 @@ impl FileFinder { #[cfg(test)] mod tests { use super::*; - use crate::{editor, settings, test::temp_tree, workspace::Workspace}; + use crate::{ + editor, + test::{build_app_state, temp_tree}, + workspace::Workspace, + }; use serde_json::json; use std::fs; use tempdir::TempDir; @@ -474,9 +478,10 @@ mod tests { editor::init(ctx); }); - let settings = settings::channel(&app.font_cache()).unwrap().1; + let app_state = app.read(build_app_state); let (window_id, workspace) = app.add_window(|ctx| { - let mut workspace = Workspace::new(0, settings, ctx); + let mut workspace = + Workspace::new(0, app_state.settings, app_state.language_registry, ctx); workspace.add_worktree(tmp_dir.path(), ctx); workspace }); @@ -541,15 +546,21 @@ mod tests { "hi": "", "hiccup": "", })); - let settings = settings::channel(&app.font_cache()).unwrap().1; + let app_state = app.read(build_app_state); let (_, workspace) = app.add_window(|ctx| { - let mut workspace = Workspace::new(0, settings.clone(), ctx); + let mut workspace = Workspace::new( + 0, + app_state.settings.clone(), + app_state.language_registry.clone(), + ctx, + ); workspace.add_worktree(tmp_dir.path(), ctx); workspace }); app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx)) .await; - let (_, finder) = app.add_window(|ctx| FileFinder::new(settings, workspace.clone(), ctx)); + let (_, finder) = + app.add_window(|ctx| FileFinder::new(app_state.settings, workspace.clone(), ctx)); let query = "hi".to_string(); finder @@ -598,15 +609,21 @@ mod tests { fs::create_dir(&dir_path).unwrap(); fs::write(&file_path, "").unwrap(); - let settings = settings::channel(&app.font_cache()).unwrap().1; + let app_state = app.read(build_app_state); let (_, workspace) = app.add_window(|ctx| { - let mut workspace = Workspace::new(0, settings.clone(), ctx); + let mut workspace = Workspace::new( + 0, + app_state.settings.clone(), + app_state.language_registry.clone(), + ctx, + ); workspace.add_worktree(&file_path, ctx); workspace }); app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx)) .await; - let (_, finder) = app.add_window(|ctx| FileFinder::new(settings, workspace.clone(), ctx)); + let (_, finder) = + app.add_window(|ctx| FileFinder::new(app_state.settings, workspace.clone(), ctx)); // Even though there is only one worktree, that worktree's filename // is included in the matching, because the worktree is a single file. @@ -641,9 +658,17 @@ mod tests { "dir1": { "a.txt": "" }, "dir2": { "a.txt": "" } })); - let settings = settings::channel(&app.font_cache()).unwrap().1; - let (_, workspace) = app.add_window(|ctx| Workspace::new(0, settings.clone(), ctx)); + let app_state = app.read(build_app_state); + + let (_, workspace) = app.add_window(|ctx| { + Workspace::new( + 0, + app_state.settings.clone(), + app_state.language_registry.clone(), + ctx, + ) + }); workspace .update(&mut app, |workspace, ctx| { @@ -656,7 +681,8 @@ mod tests { app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx)) .await; - let (_, finder) = app.add_window(|ctx| FileFinder::new(settings, workspace.clone(), ctx)); + let (_, finder) = + app.add_window(|ctx| FileFinder::new(app_state.settings, workspace.clone(), ctx)); // Run a search that matches two files with the same relative path. finder diff --git a/zed/src/language.rs b/zed/src/language.rs new file mode 100644 index 0000000000000000000000000000000000000000..9f977bf408fc4e402d884a58c3da47e10c8e5fb8 --- /dev/null +++ b/zed/src/language.rs @@ -0,0 +1,152 @@ +use crate::settings::{Theme, ThemeMap}; +use parking_lot::Mutex; +use rust_embed::RustEmbed; +use serde::Deserialize; +use std::{path::Path, str, sync::Arc}; +use tree_sitter::{Language as Grammar, Query}; +pub use tree_sitter::{Parser, Tree}; + +#[derive(RustEmbed)] +#[folder = "languages"] +pub struct LanguageDir; + +#[derive(Default, Deserialize)] +pub struct LanguageConfig { + pub name: String, + pub path_suffixes: Vec, +} + +#[derive(Deserialize)] +pub struct BracketPair { + pub start: String, + pub end: String, +} + +pub struct Language { + pub config: LanguageConfig, + pub grammar: Grammar, + pub highlight_query: Query, + pub brackets_query: Query, + pub theme_mapping: Mutex, +} + +pub struct LanguageRegistry { + languages: Vec>, +} + +impl Language { + pub fn theme_mapping(&self) -> ThemeMap { + self.theme_mapping.lock().clone() + } + + pub fn set_theme(&self, theme: &Theme) { + *self.theme_mapping.lock() = ThemeMap::new(self.highlight_query.capture_names(), theme); + } +} + +impl LanguageRegistry { + pub fn new() -> Self { + let grammar = tree_sitter_rust::language(); + let rust_config = toml::from_slice(&LanguageDir::get("rust/config.toml").unwrap()).unwrap(); + let rust_language = Language { + config: rust_config, + grammar, + highlight_query: Self::load_query(grammar, "rust/highlights.scm"), + brackets_query: Self::load_query(grammar, "rust/brackets.scm"), + theme_mapping: Mutex::new(ThemeMap::default()), + }; + + Self { + languages: vec![Arc::new(rust_language)], + } + } + + pub fn set_theme(&self, theme: &Theme) { + for language in &self.languages { + language.set_theme(theme); + } + } + + pub fn select_language(&self, path: impl AsRef) -> Option<&Arc> { + let path = path.as_ref(); + let filename = path.file_name().and_then(|name| name.to_str()); + let extension = path.extension().and_then(|name| name.to_str()); + let path_suffixes = [extension, filename]; + self.languages.iter().find(|language| { + language + .config + .path_suffixes + .iter() + .any(|suffix| path_suffixes.contains(&Some(suffix.as_str()))) + }) + } + + fn load_query(grammar: tree_sitter::Language, path: &str) -> Query { + Query::new( + grammar, + str::from_utf8(LanguageDir::get(path).unwrap().as_ref()).unwrap(), + ) + .unwrap() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_select_language() { + let grammar = tree_sitter_rust::language(); + let registry = LanguageRegistry { + languages: vec![ + Arc::new(Language { + config: LanguageConfig { + name: "Rust".to_string(), + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + grammar, + highlight_query: Query::new(grammar, "").unwrap(), + brackets_query: Query::new(grammar, "").unwrap(), + theme_mapping: Default::default(), + }), + Arc::new(Language { + config: LanguageConfig { + name: "Make".to_string(), + path_suffixes: vec!["Makefile".to_string(), "mk".to_string()], + ..Default::default() + }, + grammar, + highlight_query: Query::new(grammar, "").unwrap(), + brackets_query: Query::new(grammar, "").unwrap(), + theme_mapping: Default::default(), + }), + ], + }; + + // matching file extension + assert_eq!( + registry.select_language("zed/lib.rs").map(get_name), + Some("Rust") + ); + assert_eq!( + registry.select_language("zed/lib.mk").map(get_name), + Some("Make") + ); + + // matching filename + assert_eq!( + registry.select_language("zed/Makefile").map(get_name), + Some("Make") + ); + + // matching suffix that is not the full file extension or filename + assert_eq!(registry.select_language("zed/cars").map(get_name), None); + assert_eq!(registry.select_language("zed/a.cars").map(get_name), None); + assert_eq!(registry.select_language("zed/sumk").map(get_name), None); + + fn get_name(language: &Arc) -> &str { + language.config.name.as_str() + } + } +} diff --git a/zed/src/lib.rs b/zed/src/lib.rs index 7dd383f56e612e8f0db08d6affc8d649d6acd702..936185bf884277f091792737125693d1d4fbe0a1 100644 --- a/zed/src/lib.rs +++ b/zed/src/lib.rs @@ -1,6 +1,7 @@ pub mod assets; pub mod editor; pub mod file_finder; +pub mod language; pub mod menus; mod operation_queue; pub mod settings; @@ -11,3 +12,9 @@ mod time; mod util; pub mod workspace; mod worktree; + +#[derive(Clone)] +pub struct AppState { + pub settings: postage::watch::Receiver, + pub language_registry: std::sync::Arc, +} diff --git a/zed/src/main.rs b/zed/src/main.rs index 773acf147e03d55052ca85c8c819989dc40cd27f..8df011e7d2bba5f6d9222d64f29ad88a947aa3d4 100644 --- a/zed/src/main.rs +++ b/zed/src/main.rs @@ -4,19 +4,28 @@ use fs::OpenOptions; use log::LevelFilter; use simplelog::SimpleLogger; -use std::{fs, path::PathBuf}; +use std::{fs, path::PathBuf, sync::Arc}; use zed::{ - assets, editor, file_finder, menus, settings, + assets, editor, file_finder, language, menus, settings, workspace::{self, OpenParams}, + AppState, }; fn main() { init_logger(); let app = gpui::App::new(assets::Assets).unwrap(); - let (_, settings_rx) = settings::channel(&app.font_cache()).unwrap(); + + let (_, settings) = settings::channel(&app.font_cache()).unwrap(); + let language_registry = Arc::new(language::LanguageRegistry::new()); + language_registry.set_theme(&settings.borrow().theme); + let app_state = AppState { + language_registry, + settings, + }; + app.run(move |ctx| { - ctx.set_menus(menus::menus(settings_rx.clone())); + ctx.set_menus(menus::menus(app_state.settings.clone())); workspace::init(ctx); editor::init(ctx); file_finder::init(ctx); @@ -31,7 +40,7 @@ fn main() { "workspace:open_paths", OpenParams { paths, - settings: settings_rx, + app_state: app_state.clone(), }, ); } diff --git a/zed/src/settings.rs b/zed/src/settings.rs index 44b05c99faec208b334f5de8a73e589d94712102..7e60e6da233d89c6ceea7735ff1bb0f6bf05a258 100644 --- a/zed/src/settings.rs +++ b/zed/src/settings.rs @@ -1,6 +1,15 @@ -use anyhow::Result; -use gpui::font_cache::{FamilyId, FontCache}; +use super::assets::Assets; +use anyhow::{anyhow, Context, Result}; +use gpui::{ + color::ColorU, + font_cache::{FamilyId, FontCache}, + fonts::{Properties as FontProperties, Style as FontStyle, Weight as FontWeight}, +}; use postage::watch; +use serde::Deserialize; +use std::{collections::HashMap, sync::Arc}; + +const DEFAULT_STYLE_ID: StyleId = StyleId(u32::MAX); #[derive(Clone)] pub struct Settings { @@ -9,8 +18,23 @@ pub struct Settings { pub tab_size: usize, pub ui_font_family: FamilyId, pub ui_font_size: f32, + pub theme: Arc, +} + +#[derive(Clone, Default)] +pub struct Theme { + pub background_color: ColorU, + pub line_number_color: ColorU, + pub default_text_color: ColorU, + syntax_styles: Vec<(String, ColorU, FontProperties)>, } +#[derive(Clone, Debug)] +pub struct ThemeMap(Arc<[StyleId]>); + +#[derive(Clone, Copy, Debug)] +pub struct StyleId(u32); + impl Settings { pub fn new(font_cache: &FontCache) -> Result { Ok(Self { @@ -19,12 +43,259 @@ impl Settings { tab_size: 4, ui_font_family: font_cache.load_family(&["SF Pro", "Helvetica"])?, ui_font_size: 12.0, + theme: Arc::new( + Theme::parse(Assets::get("themes/light.toml").unwrap()) + .expect("Failed to parse built-in theme"), + ), }) } } +impl Theme { + pub fn parse(source: impl AsRef<[u8]>) -> Result { + #[derive(Deserialize)] + struct ThemeToml { + #[serde(default)] + syntax: HashMap, + #[serde(default)] + ui: HashMap, + } + + #[derive(Deserialize)] + #[serde(untagged)] + enum StyleToml { + Color(u32), + Full { + color: Option, + weight: Option, + #[serde(default)] + italic: bool, + }, + } + + let theme_toml: ThemeToml = + toml::from_slice(source.as_ref()).context("failed to parse theme TOML")?; + + let mut syntax_styles = Vec::<(String, ColorU, FontProperties)>::new(); + for (key, style) in theme_toml.syntax { + let (color, weight, italic) = match style { + StyleToml::Color(color) => (color, None, false), + StyleToml::Full { + color, + weight, + italic, + } => (color.unwrap_or(0), weight, italic), + }; + match syntax_styles.binary_search_by_key(&&key, |e| &e.0) { + Ok(i) | Err(i) => { + let mut properties = FontProperties::new(); + properties.weight = deserialize_weight(weight)?; + if italic { + properties.style = FontStyle::Italic; + } + syntax_styles.insert(i, (key, deserialize_color(color), properties)); + } + } + } + + let background_color = theme_toml + .ui + .get("background") + .copied() + .map_or(ColorU::from_u32(0xffffffff), deserialize_color); + let line_number_color = theme_toml + .ui + .get("line_numbers") + .copied() + .map_or(ColorU::black(), deserialize_color); + let default_text_color = theme_toml + .ui + .get("text") + .copied() + .map_or(ColorU::black(), deserialize_color); + + Ok(Theme { + background_color, + line_number_color, + default_text_color, + syntax_styles, + }) + } + + pub fn syntax_style(&self, id: StyleId) -> (ColorU, FontProperties) { + self.syntax_styles + .get(id.0 as usize) + .map_or((self.default_text_color, FontProperties::new()), |entry| { + (entry.1, entry.2) + }) + } + + #[cfg(test)] + pub fn syntax_style_name(&self, id: StyleId) -> Option<&str> { + self.syntax_styles.get(id.0 as usize).map(|e| e.0.as_str()) + } +} + +impl ThemeMap { + pub fn new(capture_names: &[String], theme: &Theme) -> Self { + // For each capture name in the highlight query, find the longest + // key in the theme's syntax styles that matches all of the + // dot-separated components of the capture name. + ThemeMap( + capture_names + .iter() + .map(|capture_name| { + theme + .syntax_styles + .iter() + .enumerate() + .filter_map(|(i, (key, _, _))| { + let mut len = 0; + let capture_parts = capture_name.split('.'); + for key_part in key.split('.') { + if capture_parts.clone().any(|part| part == key_part) { + len += 1; + } else { + return None; + } + } + Some((i, len)) + }) + .max_by_key(|(_, len)| *len) + .map_or(DEFAULT_STYLE_ID, |(i, _)| StyleId(i as u32)) + }) + .collect(), + ) + } + + pub fn get(&self, capture_id: u32) -> StyleId { + self.0 + .get(capture_id as usize) + .copied() + .unwrap_or(DEFAULT_STYLE_ID) + } +} + +impl Default for ThemeMap { + fn default() -> Self { + Self(Arc::new([])) + } +} + +impl Default for StyleId { + fn default() -> Self { + DEFAULT_STYLE_ID + } +} + pub fn channel( font_cache: &FontCache, ) -> Result<(watch::Sender, watch::Receiver)> { Ok(watch::channel_with(Settings::new(font_cache)?)) } + +fn deserialize_color(color: u32) -> ColorU { + ColorU::from_u32((color << 8) + 0xFF) +} + +fn deserialize_weight(weight: Option) -> Result { + match &weight { + None => return Ok(FontWeight::NORMAL), + Some(toml::Value::Integer(i)) => return Ok(FontWeight(*i as f32)), + Some(toml::Value::String(s)) => match s.as_str() { + "normal" => return Ok(FontWeight::NORMAL), + "bold" => return Ok(FontWeight::BOLD), + "light" => return Ok(FontWeight::LIGHT), + "semibold" => return Ok(FontWeight::SEMIBOLD), + _ => {} + }, + _ => {} + } + Err(anyhow!("Invalid weight {}", weight.unwrap())) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_theme() { + let theme = Theme::parse( + r#" + [ui] + background = 0x00ed00 + line_numbers = 0xdddddd + + [syntax] + "beta.two" = 0xAABBCC + "alpha.one" = {color = 0x112233, weight = "bold"} + "gamma.three" = {weight = "light", italic = true} + "#, + ) + .unwrap(); + + assert_eq!(theme.background_color, ColorU::from_u32(0x00ED00FF)); + assert_eq!(theme.line_number_color, ColorU::from_u32(0xddddddff)); + assert_eq!( + theme.syntax_styles, + &[ + ( + "alpha.one".to_string(), + ColorU::from_u32(0x112233FF), + *FontProperties::new().weight(FontWeight::BOLD) + ), + ( + "beta.two".to_string(), + ColorU::from_u32(0xAABBCCFF), + *FontProperties::new().weight(FontWeight::NORMAL) + ), + ( + "gamma.three".to_string(), + ColorU::from_u32(0x000000FF), + *FontProperties::new() + .weight(FontWeight::LIGHT) + .style(FontStyle::Italic), + ), + ] + ); + } + + #[test] + fn test_parse_empty_theme() { + Theme::parse("").unwrap(); + } + + #[test] + fn test_theme_map() { + let theme = Theme { + default_text_color: Default::default(), + background_color: ColorU::default(), + line_number_color: ColorU::default(), + syntax_styles: [ + ("function", ColorU::from_u32(0x100000ff)), + ("function.method", ColorU::from_u32(0x200000ff)), + ("function.async", ColorU::from_u32(0x300000ff)), + ("variable.builtin.self.rust", ColorU::from_u32(0x400000ff)), + ("variable.builtin", ColorU::from_u32(0x500000ff)), + ("variable", ColorU::from_u32(0x600000ff)), + ] + .iter() + .map(|e| (e.0.to_string(), e.1, FontProperties::new())) + .collect(), + }; + + let capture_names = &[ + "function.special".to_string(), + "function.async.rust".to_string(), + "variable.builtin.self".to_string(), + ]; + + let map = ThemeMap::new(capture_names, &theme); + assert_eq!(theme.syntax_style_name(map.get(0)), Some("function")); + assert_eq!(theme.syntax_style_name(map.get(1)), Some("function.async")); + assert_eq!( + theme.syntax_style_name(map.get(2)), + Some("variable.builtin") + ); + } +} diff --git a/zed/src/test.rs b/zed/src/test.rs index 1d155d4a5ab7a4687a8e3e8d15972edcdd5d3880..5efb0f3e3573f3165596b299b02c6dd844e10a4d 100644 --- a/zed/src/test.rs +++ b/zed/src/test.rs @@ -1,9 +1,11 @@ -use crate::time::ReplicaId; +use crate::{language::LanguageRegistry, settings, time::ReplicaId, AppState}; use ctor::ctor; +use gpui::AppContext; use rand::Rng; use std::{ collections::BTreeMap, path::{Path, PathBuf}, + sync::Arc, }; use tempdir::TempDir; @@ -141,3 +143,12 @@ fn write_tree(path: &Path, tree: serde_json::Value) { panic!("You must pass a JSON object to this helper") } } + +pub fn build_app_state(ctx: &AppContext) -> AppState { + let settings = settings::channel(&ctx.font_cache()).unwrap().1; + let language_registry = Arc::new(LanguageRegistry::new()); + AppState { + settings, + language_registry, + } +} diff --git a/zed/src/workspace.rs b/zed/src/workspace.rs index 328ebf30e8413f637f7f486f52dca51e0ca5191a..bb77ca11817ad2d961586806eff11b14b85a8939 100644 --- a/zed/src/workspace.rs +++ b/zed/src/workspace.rs @@ -2,9 +2,11 @@ pub mod pane; pub mod pane_group; use crate::{ editor::{Buffer, BufferView}, + language::LanguageRegistry, settings::Settings, time::ReplicaId, worktree::{FileHandle, Worktree, WorktreeHandle}, + AppState, }; use futures_core::Future; use gpui::{ @@ -40,11 +42,11 @@ pub fn init(app: &mut MutableAppContext) { pub struct OpenParams { pub paths: Vec, - pub settings: watch::Receiver, + pub app_state: AppState, } -fn open(settings: &watch::Receiver, ctx: &mut MutableAppContext) { - let settings = settings.clone(); +fn open(app_state: &AppState, ctx: &mut MutableAppContext) { + let app_state = app_state.clone(); ctx.prompt_for_paths( PathPromptOptions { files: true, @@ -53,7 +55,7 @@ fn open(settings: &watch::Receiver, ctx: &mut MutableAppContext) { }, move |paths, ctx| { if let Some(paths) = paths { - ctx.dispatch_global_action("workspace:open_paths", OpenParams { paths, settings }); + ctx.dispatch_global_action("workspace:open_paths", OpenParams { paths, app_state }); } }, ); @@ -84,7 +86,12 @@ fn open_paths(params: &OpenParams, app: &mut MutableAppContext) { // Add a new workspace if necessary app.add_window(|ctx| { - let mut view = Workspace::new(0, params.settings.clone(), ctx); + let mut view = Workspace::new( + 0, + params.app_state.settings.clone(), + params.app_state.language_registry.clone(), + ctx, + ); let open_paths = view.open_paths(¶ms.paths, ctx); ctx.foreground().spawn(open_paths).detach(); view @@ -284,6 +291,7 @@ pub struct State { pub struct Workspace { pub settings: watch::Receiver, + language_registry: Arc, modal: Option, center: PaneGroup, panes: Vec>, @@ -301,6 +309,7 @@ impl Workspace { pub fn new( replica_id: ReplicaId, settings: watch::Receiver, + language_registry: Arc, ctx: &mut ViewContext, ) -> Self { let pane = ctx.add_view(|_| Pane::new(settings.clone())); @@ -316,6 +325,7 @@ impl Workspace { panes: vec![pane.clone()], active_pane: pane.clone(), settings, + language_registry, replica_id, worktrees: Default::default(), items: Default::default(), @@ -503,6 +513,7 @@ impl Workspace { let (mut tx, rx) = postage::watch::channel(); entry.insert(rx); let replica_id = self.replica_id; + let language_registry = self.language_registry.clone(); ctx.as_mut() .spawn(|mut ctx| async move { @@ -512,7 +523,14 @@ impl Workspace { *tx.borrow_mut() = Some(match history { Ok(history) => Ok(Box::new(ctx.add_model(|ctx| { - Buffer::from_history(replica_id, history, Some(file), ctx) + let language = language_registry.select_language(path); + Buffer::from_history( + replica_id, + history, + Some(file), + language.cloned(), + ctx, + ) }))), Err(error) => Err(Arc::new(error)), }) @@ -757,14 +775,17 @@ impl WorkspaceHandle for ViewHandle { #[cfg(test)] mod tests { use super::*; - use crate::{editor::BufferView, settings, test::temp_tree}; + use crate::{ + editor::BufferView, + test::{build_app_state, temp_tree}, + }; use serde_json::json; use std::{collections::HashSet, fs}; use tempdir::TempDir; #[gpui::test] fn test_open_paths_action(app: &mut gpui::MutableAppContext) { - let settings = settings::channel(&app.font_cache()).unwrap().1; + let app_state = build_app_state(app.as_ref()); init(app); @@ -790,7 +811,7 @@ mod tests { dir.path().join("a").to_path_buf(), dir.path().join("b").to_path_buf(), ], - settings: settings.clone(), + app_state: app_state.clone(), }, ); assert_eq!(app.window_ids().count(), 1); @@ -799,7 +820,7 @@ mod tests { "workspace:open_paths", OpenParams { paths: vec![dir.path().join("a").to_path_buf()], - settings: settings.clone(), + app_state: app_state.clone(), }, ); assert_eq!(app.window_ids().count(), 1); @@ -815,7 +836,7 @@ mod tests { dir.path().join("b").to_path_buf(), dir.path().join("c").to_path_buf(), ], - settings: settings.clone(), + app_state: app_state.clone(), }, ); assert_eq!(app.window_ids().count(), 2); @@ -831,10 +852,11 @@ mod tests { }, })); - let settings = settings::channel(&app.font_cache()).unwrap().1; + let app_state = app.read(build_app_state); let (_, workspace) = app.add_window(|ctx| { - let mut workspace = Workspace::new(0, settings, ctx); + let mut workspace = + Workspace::new(0, app_state.settings, app_state.language_registry, ctx); workspace.add_worktree(dir.path(), ctx); workspace }); @@ -935,9 +957,10 @@ mod tests { "b.txt": "", })); - let settings = settings::channel(&app.font_cache()).unwrap().1; + let app_state = app.read(build_app_state); let (_, workspace) = app.add_window(|ctx| { - let mut workspace = Workspace::new(0, settings, ctx); + let mut workspace = + Workspace::new(0, app_state.settings, app_state.language_registry, ctx); workspace.add_worktree(dir1.path(), ctx); workspace }); @@ -1003,9 +1026,10 @@ mod tests { "a.txt": "", })); - let settings = settings::channel(&app.font_cache()).unwrap().1; + let app_state = app.read(build_app_state); let (window_id, workspace) = app.add_window(|ctx| { - let mut workspace = Workspace::new(0, settings, ctx); + let mut workspace = + Workspace::new(0, app_state.settings, app_state.language_registry, ctx); workspace.add_worktree(dir.path(), ctx); workspace }); @@ -1046,9 +1070,10 @@ mod tests { #[gpui::test] async fn test_open_and_save_new_file(mut app: gpui::TestAppContext) { let dir = TempDir::new("test-new-file").unwrap(); - let settings = settings::channel(&app.font_cache()).unwrap().1; + let app_state = app.read(build_app_state); let (_, workspace) = app.add_window(|ctx| { - let mut workspace = Workspace::new(0, settings, ctx); + let mut workspace = + Workspace::new(0, app_state.settings, app_state.language_registry, ctx); workspace.add_worktree(dir.path(), ctx); workspace }); @@ -1150,9 +1175,10 @@ mod tests { }, })); - let settings = settings::channel(&app.font_cache()).unwrap().1; + let app_state = app.read(build_app_state); let (window_id, workspace) = app.add_window(|ctx| { - let mut workspace = Workspace::new(0, settings, ctx); + let mut workspace = + Workspace::new(0, app_state.settings, app_state.language_registry, ctx); workspace.add_worktree(dir.path(), ctx); workspace }); diff --git a/zed/src/worktree.rs b/zed/src/worktree.rs index 33b23c8d698102f699529183f187466b9fe92c47..a50dacc4683de41d7a8c02f627746ab229122f5c 100644 --- a/zed/src/worktree.rs +++ b/zed/src/worktree.rs @@ -1483,7 +1483,8 @@ mod tests { let path = tree.update(&mut app, |tree, ctx| { let path = tree.files(0).next().unwrap().path().clone(); assert_eq!(path.file_name().unwrap(), "file1"); - smol::block_on(tree.save(&path, buffer.read(ctx).snapshot(), ctx.as_ref())).unwrap(); + smol::block_on(tree.save(&path, buffer.read(ctx).snapshot().text(), ctx.as_ref())) + .unwrap(); path }); @@ -1512,7 +1513,7 @@ mod tests { let file = app.update(|ctx| tree.file("", ctx)).await; app.update(|ctx| { assert_eq!(file.path().file_name(), None); - smol::block_on(file.save(buffer.read(ctx).snapshot(), ctx.as_ref())).unwrap(); + smol::block_on(file.save(buffer.read(ctx).snapshot().text(), ctx.as_ref())).unwrap(); }); let history = app.read(|ctx| file.load_history(ctx)).await.unwrap();