Detailed changes
@@ -6,14 +6,23 @@ jobs:
discord_release:
runs-on: ubuntu-latest
steps:
+ - name: Get appropriate URL
+ id: get-appropriate-url
+ run: |
+ if [ "${{ github.event.release.prerelease }}" == "true" ]; then
+ URL="https://zed.dev/releases/preview/latest"
+ else
+ URL="https://zed.dev/releases/stable/latest"
+ fi
+ echo "::set-output name=URL::$URL"
+
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v5.3.0
- if: ${{ ! github.event.release.prerelease }}
with:
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
content: |
📣 Zed ${{ github.event.release.tag_name }} was just released!
- Restart your Zed or head to https://zed.dev/releases/stable/latest to grab it.
+ Restart your Zed or head to ${{ steps.get-appropriate-url.outputs.URL }} to grab it.
${{ github.event.release.body }}
@@ -0,0 +1,5 @@
+{
+ "JSON": {
+ "tab_size": 4
+ }
+}
@@ -125,50 +125,52 @@ dependencies = [
[[package]]
name = "alacritty_config"
-version = "0.1.1-dev"
-source = "git+https://github.com/zed-industries/alacritty?rev=a51dbe25d67e84d6ed4261e640d3954fbdd9be45#a51dbe25d67e84d6ed4261e640d3954fbdd9be45"
+version = "0.1.2-dev"
+source = "git+https://github.com/alacritty/alacritty?rev=7b9f32300ee0a249c0872302c97635b460e45ba5#7b9f32300ee0a249c0872302c97635b460e45ba5"
dependencies = [
"log",
"serde",
- "serde_yaml",
+ "toml 0.7.6",
+ "winit",
]
[[package]]
name = "alacritty_config_derive"
-version = "0.2.1-dev"
-source = "git+https://github.com/zed-industries/alacritty?rev=a51dbe25d67e84d6ed4261e640d3954fbdd9be45#a51dbe25d67e84d6ed4261e640d3954fbdd9be45"
+version = "0.2.2-dev"
+source = "git+https://github.com/alacritty/alacritty?rev=7b9f32300ee0a249c0872302c97635b460e45ba5#7b9f32300ee0a249c0872302c97635b460e45ba5"
dependencies = [
"proc-macro2",
"quote",
- "syn 1.0.109",
+ "syn 2.0.28",
]
[[package]]
name = "alacritty_terminal"
-version = "0.17.1-dev"
-source = "git+https://github.com/zed-industries/alacritty?rev=a51dbe25d67e84d6ed4261e640d3954fbdd9be45#a51dbe25d67e84d6ed4261e640d3954fbdd9be45"
+version = "0.20.0-dev"
+source = "git+https://github.com/alacritty/alacritty?rev=7b9f32300ee0a249c0872302c97635b460e45ba5#7b9f32300ee0a249c0872302c97635b460e45ba5"
dependencies = [
"alacritty_config",
"alacritty_config_derive",
"base64 0.13.1",
- "bitflags 1.3.2",
- "dirs 4.0.0",
+ "bitflags 2.3.3",
+ "home",
"libc",
"log",
"mio 0.6.23",
"mio-anonymous-pipes",
"mio-extras",
"miow 0.3.7",
- "nix",
+ "nix 0.26.2",
"parking_lot 0.12.1",
"regex-automata 0.1.10",
"serde",
"serde_yaml",
"signal-hook",
"signal-hook-mio",
+ "toml 0.7.6",
"unicode-width",
"vte",
- "winapi 0.3.9",
+ "windows-sys",
]
[[package]]
@@ -185,14 +187,14 @@ checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
[[package]]
name = "alsa"
-version = "0.7.0"
+version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8512c9117059663fb5606788fbca3619e2a91dac0e3fe516242eab1fa6be5e44"
+checksum = "e2562ad8dcf0f789f65c6fdaad8a8a9708ed6b488e649da28c01656ad66b8b47"
dependencies = [
"alsa-sys",
"bitflags 1.3.2",
"libc",
- "nix",
+ "nix 0.24.3",
]
[[package]]
@@ -211,6 +213,30 @@ version = "0.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec8ad6edb4840b78c5c3d88de606b22252d552b55f3a4699fbb10fc070ec3049"
+[[package]]
+name = "android-activity"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "64529721f27c2314ced0890ce45e469574a73e5e6fdd6e9da1860eb29285f5e0"
+dependencies = [
+ "android-properties",
+ "bitflags 1.3.2",
+ "cc",
+ "jni-sys",
+ "libc",
+ "log",
+ "ndk",
+ "ndk-context",
+ "ndk-sys",
+ "num_enum 0.6.1",
+]
+
+[[package]]
+name = "android-properties"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fc7eb209b1518d6bb87b283c20095f5228ecda460da70b44f0802523dea6da04"
+
[[package]]
name = "android-tzdata"
version = "0.1.1"
@@ -481,7 +507,7 @@ checksum = "0e97ce7de6cf12de5d7226c73f5ba9811622f4db3a5b91b55c53e987e5f91cba"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.27",
+ "syn 2.0.28",
]
[[package]]
@@ -529,7 +555,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.27",
+ "syn 2.0.28",
]
[[package]]
@@ -572,7 +598,7 @@ checksum = "cc6dde6e4ed435a4c1ee4e73592f5ba9da2151af10076cc04858746af9352d09"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.27",
+ "syn 2.0.28",
]
[[package]]
@@ -830,7 +856,7 @@ dependencies = [
"regex",
"rustc-hash",
"shlex",
- "syn 2.0.27",
+ "syn 2.0.28",
"which",
]
@@ -860,6 +886,9 @@ name = "bitflags"
version = "2.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42"
+dependencies = [
+ "serde",
+]
[[package]]
name = "bitvec"
@@ -897,6 +926,25 @@ dependencies = [
"generic-array",
]
+[[package]]
+name = "block-sys"
+version = "0.1.0-beta.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fa55741ee90902547802152aaf3f8e5248aab7e21468089560d4c8840561146"
+dependencies = [
+ "objc-sys",
+]
+
+[[package]]
+name = "block2"
+version = "0.2.0-alpha.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8dd9e63c1744f755c2f60332b88de39d341e5e86239014ad839bd71c106dec42"
+dependencies = [
+ "block-sys",
+ "objc2-encode",
+]
+
[[package]]
name = "blocking"
version = "1.3.1"
@@ -992,7 +1040,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05"
dependencies = [
"memchr",
- "regex-automata 0.3.3",
+ "regex-automata 0.3.4",
"serde",
]
@@ -1078,6 +1126,20 @@ dependencies = [
"util",
]
+[[package]]
+name = "calloop"
+version = "0.10.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "52e0d00eb1ea24371a97d2da6201c6747a633dc6dc1988ef503403b4c59504a8"
+dependencies = [
+ "bitflags 1.3.2",
+ "log",
+ "nix 0.25.1",
+ "slotmap",
+ "thiserror",
+ "vec_map",
+]
+
[[package]]
name = "cap-fs-ext"
version = "0.24.4"
@@ -1186,6 +1248,12 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+[[package]]
+name = "cfg_aliases"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e"
+
[[package]]
name = "chrono"
version = "0.4.26"
@@ -1290,7 +1358,7 @@ dependencies = [
"heck 0.4.1",
"proc-macro2",
"quote",
- "syn 2.0.27",
+ "syn 2.0.28",
]
[[package]]
@@ -1357,7 +1425,7 @@ dependencies = [
"sum_tree",
"tempfile",
"thiserror",
- "time 0.3.23",
+ "time 0.3.24",
"tiny_http",
"url",
"util",
@@ -1459,10 +1527,10 @@ dependencies = [
"sha-1 0.9.8",
"sqlx",
"theme",
- "time 0.3.23",
+ "time 0.3.24",
"tokio",
"tokio-tungstenite",
- "toml",
+ "toml 0.5.11",
"tonic",
"tower",
"tracing",
@@ -1974,9 +2042,9 @@ dependencies = [
[[package]]
name = "curl-sys"
-version = "0.4.64+curl-8.2.0"
+version = "0.4.65+curl-8.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f96069f0b1cb1241c838740659a771ef143363f52772a9ce1bd9c04c75eee0dc"
+checksum = "961ba061c9ef2fe34bbd12b807152d96f0badd2bebe7b90ce6c8c8b7572a0986"
dependencies = [
"cc",
"libc",
@@ -1987,6 +2055,15 @@ dependencies = [
"winapi 0.3.9",
]
+[[package]]
+name = "cursor-icon"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "740bb192a8e2d1350119916954f4409ee7f62f149b536911eeb78ba5a20526bf"
+dependencies = [
+ "serde",
+]
+
[[package]]
name = "dashmap"
version = "5.5.0"
@@ -2047,6 +2124,15 @@ dependencies = [
"byteorder",
]
+[[package]]
+name = "deranged"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8810e7e2cf385b1e9b50d68264908ec367ba642c96d02edfe61c39e88e2a3c01"
+dependencies = [
+ "serde",
+]
+
[[package]]
name = "dhat"
version = "0.3.2"
@@ -2171,6 +2257,12 @@ dependencies = [
"winapi 0.3.9",
]
+[[package]]
+name = "dispatch"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bd0c93bb4b0c6d9b77f4435b0ae98c24d17f1c45b2ff844c6151a07256ca923b"
+
[[package]]
name = "dlib"
version = "0.5.2"
@@ -2258,7 +2350,7 @@ dependencies = [
"tree-sitter",
"tree-sitter-html",
"tree-sitter-rust",
- "tree-sitter-typescript 0.20.2 (git+https://github.com/tree-sitter/tree-sitter-typescript?rev=5d20856f34315b068c41edaee2ac8a100081d259)",
+ "tree-sitter-typescript",
"unindent",
"util",
"workspace",
@@ -2342,9 +2434,9 @@ dependencies = [
[[package]]
name = "errno"
-version = "0.3.1"
+version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a"
+checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f"
dependencies = [
"errno-dragonfly",
"libc",
@@ -2648,7 +2740,7 @@ dependencies = [
"smol",
"sum_tree",
"tempfile",
- "time 0.3.23",
+ "time 0.3.24",
"util",
]
@@ -2798,7 +2890,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.27",
+ "syn 2.0.28",
]
[[package]]
@@ -2950,9 +3042,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "globset"
-version = "0.4.11"
+version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1391ab1f92ffcc08911957149833e682aa3fe252b9f45f966d2ef972274c97df"
+checksum = "aca8bbd8e0707c1887a8bbb7e6b40e228f251ff5d62c8220a4a7a53c73aff006"
dependencies = [
"aho-corasick 1.0.2",
"bstr",
@@ -3038,7 +3130,7 @@ dependencies = [
"smol",
"sqlez",
"sum_tree",
- "time 0.3.23",
+ "time 0.3.24",
"tiny-skia",
"usvg",
"util",
@@ -3230,6 +3322,15 @@ dependencies = [
"digest 0.10.7",
]
+[[package]]
+name = "home"
+version = "0.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb"
+dependencies = [
+ "windows-sys",
+]
+
[[package]]
name = "hound"
version = "3.5.0"
@@ -3759,7 +3860,7 @@ dependencies = [
"text",
"theme",
"tree-sitter",
- "tree-sitter-elixir 0.1.0 (git+https://github.com/elixir-lang/tree-sitter-elixir?rev=4ba9dab6e2602960d95b2b625f3386c27e08084e)",
+ "tree-sitter-elixir",
"tree-sitter-embedded-template",
"tree-sitter-heex",
"tree-sitter-html",
@@ -3768,7 +3869,7 @@ dependencies = [
"tree-sitter-python",
"tree-sitter-ruby",
"tree-sitter-rust",
- "tree-sitter-typescript 0.20.2 (git+https://github.com/tree-sitter/tree-sitter-typescript?rev=5d20856f34315b068c41edaee2ac8a100081d259)",
+ "tree-sitter-typescript",
"unicase",
"unindent",
"util",
@@ -3942,9 +4043,9 @@ checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519"
[[package]]
name = "linux-raw-sys"
-version = "0.4.3"
+version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0"
+checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503"
[[package]]
name = "lipsum"
@@ -4399,7 +4500,7 @@ dependencies = [
"bitflags 1.3.2",
"jni-sys",
"ndk-sys",
- "num_enum",
+ "num_enum 0.5.11",
"raw-window-handle",
"thiserror",
]
@@ -4439,9 +4540,33 @@ dependencies = [
"bitflags 1.3.2",
"cfg-if 1.0.0",
"libc",
+]
+
+[[package]]
+name = "nix"
+version = "0.25.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f346ff70e7dbfd675fe90590b92d59ef2de15a8779ae305ebcbfd3f0caf59be4"
+dependencies = [
+ "autocfg",
+ "bitflags 1.3.2",
+ "cfg-if 1.0.0",
+ "libc",
"memoffset 0.6.5",
]
+[[package]]
+name = "nix"
+version = "0.26.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a"
+dependencies = [
+ "bitflags 1.3.2",
+ "cfg-if 1.0.0",
+ "libc",
+ "static_assertions",
+]
+
[[package]]
name = "node_runtime"
version = "0.1.0"
@@ -4595,7 +4720,16 @@ version = "0.5.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9"
dependencies = [
- "num_enum_derive",
+ "num_enum_derive 0.5.11",
+]
+
+[[package]]
+name = "num_enum"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a015b430d3c108a207fd776d2e2196aaf8b1cf8cf93253e3a097ff3085076a1"
+dependencies = [
+ "num_enum_derive 0.6.1",
]
[[package]]
@@ -4610,6 +4744,18 @@ dependencies = [
"syn 1.0.109",
]
+[[package]]
+name = "num_enum_derive"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6"
+dependencies = [
+ "proc-macro-crate 1.3.1",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.28",
+]
+
[[package]]
name = "nvim-rs"
version = "0.5.0"
@@ -4635,6 +4781,32 @@ dependencies = [
"objc_exception",
]
+[[package]]
+name = "objc-sys"
+version = "0.2.0-beta.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df3b9834c1e95694a05a828b59f55fa2afec6288359cda67146126b3f90a55d7"
+
+[[package]]
+name = "objc2"
+version = "0.3.0-beta.3.patch-leaks.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7e01640f9f2cb1220bbe80325e179e532cb3379ebcd1bf2279d703c19fe3a468"
+dependencies = [
+ "block2",
+ "objc-sys",
+ "objc2-encode",
+]
+
+[[package]]
+name = "objc2-encode"
+version = "2.0.0-pre.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "abfcac41015b00a120608fdaa6938c44cb983fee294351cc4bac7638b4e50512"
+dependencies = [
+ "objc-sys",
+]
+
[[package]]
name = "objc_exception"
version = "0.1.2"
@@ -4732,7 +4904,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.27",
+ "syn 2.0.28",
]
[[package]]
@@ -4753,6 +4925,15 @@ dependencies = [
"vcpkg",
]
+[[package]]
+name = "orbclient"
+version = "0.3.45"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "221d488cd70617f1bd599ed8ceb659df2147d9393717954d82a0f5e8032a6ab1"
+dependencies = [
+ "redox_syscall 0.3.5",
+]
+
[[package]]
name = "ordered-float"
version = "2.10.0"
@@ -4962,9 +5143,9 @@ checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
[[package]]
name = "pest"
-version = "2.7.1"
+version = "2.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0d2d1d55045829d65aad9d389139882ad623b33b904e7c9f1b10c5b8927298e5"
+checksum = "1acb4a4365a13f749a93f1a094a7805e5cfa0955373a9de860d962eaa3a5fe5a"
dependencies = [
"thiserror",
"ucd-trie",
@@ -5020,7 +5201,7 @@ checksum = "ec2e072ecce94ec471b13398d5402c188e76ac03cf74dd1a975161b23a3f6d9c"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.27",
+ "syn 2.0.28",
]
[[package]]
@@ -5058,7 +5239,7 @@ dependencies = [
"line-wrap",
"quick-xml",
"serde",
- "time 0.3.23",
+ "time 0.3.24",
]
[[package]]
@@ -5173,7 +5354,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c64d9ba0963cdcea2e1b2230fbae2bab30eb25a174be395c41e764bfb65dd62"
dependencies = [
"proc-macro2",
- "syn 2.0.27",
+ "syn 2.0.28",
]
[[package]]
@@ -5182,7 +5363,7 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785"
dependencies = [
- "toml",
+ "toml 0.5.11",
]
[[package]]
@@ -5287,7 +5468,7 @@ dependencies = [
"terminal",
"text",
"thiserror",
- "toml",
+ "toml 0.5.11",
"unindent",
"util",
]
@@ -5733,7 +5914,7 @@ checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575"
dependencies = [
"aho-corasick 1.0.2",
"memchr",
- "regex-automata 0.3.3",
+ "regex-automata 0.3.4",
"regex-syntax 0.7.4",
]
@@ -5748,9 +5929,9 @@ dependencies = [
[[package]]
name = "regex-automata"
-version = "0.3.3"
+version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310"
+checksum = "b7b6d6190b7594385f61bd3911cd1be99dfddcfc365a4160cc2ab5bff4aed294"
dependencies = [
"aho-corasick 1.0.2",
"memchr",
@@ -6045,7 +6226,7 @@ dependencies = [
"proc-macro2",
"quote",
"rust-embed-utils",
- "syn 2.0.27",
+ "syn 2.0.28",
"walkdir",
]
@@ -6062,13 +6243,12 @@ dependencies = [
[[package]]
name = "rust_decimal"
-version = "1.30.0"
+version = "1.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d0446843641c69436765a35a5a77088e28c2e6a12da93e84aa3ab1cd4aa5a042"
+checksum = "4a2ab0025103a60ecaaf3abf24db1db240a4e1c15837090d2c32f625ac98abea"
dependencies = [
"arrayvec 0.7.4",
"borsh",
- "bytecheck",
"byteorder",
"bytes 1.4.0",
"num-traits",
@@ -6122,7 +6302,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06"
dependencies = [
"bitflags 1.3.2",
- "errno 0.3.1",
+ "errno 0.3.2",
"io-lifetimes 1.0.11",
"libc",
"linux-raw-sys 0.3.8",
@@ -6136,9 +6316,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5"
dependencies = [
"bitflags 2.3.3",
- "errno 0.3.1",
+ "errno 0.3.2",
"libc",
- "linux-raw-sys 0.4.3",
+ "linux-raw-sys 0.4.5",
"windows-sys",
]
@@ -6337,7 +6517,7 @@ dependencies = [
"serde_json",
"sqlx",
"thiserror",
- "time 0.3.23",
+ "time 0.3.24",
"tracing",
"url",
"uuid 1.4.1",
@@ -6365,7 +6545,7 @@ dependencies = [
"rust_decimal",
"sea-query-derive",
"serde_json",
- "time 0.3.23",
+ "time 0.3.24",
"uuid 1.4.1",
]
@@ -6380,7 +6560,7 @@ dependencies = [
"sea-query",
"serde_json",
"sqlx",
- "time 0.3.23",
+ "time 0.3.24",
"uuid 1.4.1",
]
@@ -6514,11 +6694,14 @@ dependencies = [
"tiktoken-rs 0.5.0",
"tree-sitter",
"tree-sitter-cpp",
- "tree-sitter-elixir 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "tree-sitter-json 0.19.0",
+ "tree-sitter-elixir",
+ "tree-sitter-json 0.20.0",
+ "tree-sitter-lua",
+ "tree-sitter-php",
+ "tree-sitter-ruby",
"tree-sitter-rust",
- "tree-sitter-toml 0.20.0",
- "tree-sitter-typescript 0.20.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tree-sitter-toml",
+ "tree-sitter-typescript",
"unindent",
"util",
"workspace",
@@ -6550,22 +6733,22 @@ checksum = "5a9f47faea3cad316faa914d013d24f471cd90bfca1a0c70f05a3f42c6441e99"
[[package]]
name = "serde"
-version = "1.0.175"
+version = "1.0.180"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5d25439cd7397d044e2748a6fe2432b5e85db703d6d097bd014b3c0ad1ebff0b"
+checksum = "0ea67f183f058fe88a4e3ec6e2788e003840893b91bac4559cabedd00863b3ed"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
-version = "1.0.175"
+version = "1.0.180"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b23f7ade6f110613c0d63858ddb8b94c1041f550eab58a16b371bdf2c9c80ab4"
+checksum = "24e744d7782b686ab3b73267ef05697159cc0e5abbed3f47f9933165e5219036"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.27",
+ "syn 2.0.28",
]
[[package]]
@@ -6590,9 +6773,9 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.103"
+version = "1.0.104"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d03b412469450d4404fe8499a268edd7f8b79fecb074b0d812ad64ca21f4031b"
+checksum = "076066c5f1078eac5b722a31827a8832fe108bed65dfa75e233c89f8206e976c"
dependencies = [
"indexmap 2.0.0",
"itoa 1.0.9",
@@ -6614,13 +6797,22 @@ dependencies = [
[[package]]
name = "serde_repr"
-version = "0.1.15"
+version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e168eaaf71e8f9bd6037feb05190485708e019f4fd87d161b3c0a0d37daf85e5"
+checksum = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.27",
+ "syn 2.0.28",
+]
+
+[[package]]
+name = "serde_spanned"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96426c9936fd7a0124915f9185ea1d20aa9445cc9821142f0a73bc9207a2e186"
+dependencies = [
+ "serde",
]
[[package]]
@@ -6669,7 +6861,7 @@ dependencies = [
"smallvec",
"sqlez",
"staff_mode",
- "toml",
+ "toml 0.5.11",
"tree-sitter",
"tree-sitter-json 0.19.0",
"unindent",
@@ -6855,6 +7047,15 @@ version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "826167069c09b99d56f31e9ae5c99049e932a98c9dc2dac47645b08dbbf76ba7"
+[[package]]
+name = "slotmap"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e1e08e261d0e8f5c43123b7adf3e4ca1690d655377ac93a03b2c9d3e98de1342"
+dependencies = [
+ "version_check",
+]
+
[[package]]
name = "sluice"
version = "0.5.5"
@@ -6899,6 +7100,15 @@ dependencies = [
"pin-project-lite 0.1.12",
]
+[[package]]
+name = "smol_str"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "74212e6bbe9a4352329b2f68ba3130c15a3f26fe88ff22dbdc6cdd58fa85e99c"
+dependencies = [
+ "serde",
+]
+
[[package]]
name = "snippet"
version = "0.1.0"
@@ -7039,7 +7249,7 @@ dependencies = [
"sqlx-rt",
"stringprep",
"thiserror",
- "time 0.3.23",
+ "time 0.3.24",
"tokio-stream",
"url",
"uuid 1.4.1",
@@ -7288,9 +7498,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "2.0.27"
+version = "2.0.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b60f673f44a8255b9c8c657daf66a596d435f2da81a555b06dc644d080ba45e0"
+checksum = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567"
dependencies = [
"proc-macro2",
"quote",
@@ -7358,9 +7568,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]]
name = "target-lexicon"
-version = "0.12.10"
+version = "0.12.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1d2faeef5759ab89935255b1a4cd98e0baf99d1085e37d36599c625dac49ae8e"
+checksum = "9d0e916b1148c8e263850e1ebcbd046f333e0683c724876bb0da63ea4373dc8a"
[[package]]
name = "tempdir"
@@ -7502,7 +7712,7 @@ dependencies = [
"serde_derive",
"serde_json",
"settings",
- "toml",
+ "toml 0.5.11",
"util",
]
@@ -7543,7 +7753,7 @@ checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.27",
+ "syn 2.0.28",
]
[[package]]
@@ -7616,10 +7826,11 @@ dependencies = [
[[package]]
name = "time"
-version = "0.3.23"
+version = "0.3.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "59e399c068f43a5d116fedaf73b203fa4f9c519f17e2b34f63221d3792f81446"
+checksum = "b79eabcd964882a646b3584543ccabeae7869e9ac32a46f6f22b7a5bd405308b"
dependencies = [
+ "deranged",
"itoa 1.0.9",
"serde",
"time-core",
@@ -7634,9 +7845,9 @@ checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb"
[[package]]
name = "time-macros"
-version = "0.2.10"
+version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "96ba15a897f3c86766b757e5ac7221554c6750054d74d5b28844fce5fb36a6c4"
+checksum = "eb71511c991639bb078fd5bf97757e03914361c48100d52878b8e52b46fb92cd"
dependencies = [
"time-core",
]
@@ -7732,7 +7943,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.27",
+ "syn 2.0.28",
]
[[package]]
@@ -7817,11 +8028,26 @@ dependencies = [
"serde",
]
+[[package]]
+name = "toml"
+version = "0.7.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c17e963a819c331dcacd7ab957d80bc2b9a9c1e71c804826d2f283dd65306542"
+dependencies = [
+ "serde",
+ "serde_spanned",
+ "toml_datetime",
+ "toml_edit",
+]
+
[[package]]
name = "toml_datetime"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b"
+dependencies = [
+ "serde",
+]
[[package]]
name = "toml_edit"
@@ -7830,6 +8056,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a"
dependencies = [
"indexmap 2.0.0",
+ "serde",
+ "serde_spanned",
"toml_datetime",
"winnow",
]
@@ -7937,7 +8165,7 @@ checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.27",
+ "syn 2.0.28",
]
[[package]]
@@ -8032,9 +8260,8 @@ dependencies = [
[[package]]
name = "tree-sitter-cpp"
-version = "0.20.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0dbedbf4066bfab725b3f9e2a21530507419a7d2f98621d3c13213502b734ec0"
+version = "0.20.0"
+source = "git+https://github.com/tree-sitter/tree-sitter-cpp?rev=f44509141e7e483323d2ec178f2d2e6c0fc041c1#f44509141e7e483323d2ec178f2d2e6c0fc041c1"
dependencies = [
"cc",
"tree-sitter",
@@ -8052,17 +8279,7 @@ dependencies = [
[[package]]
name = "tree-sitter-elixir"
version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9a9916f3e1c80b3c8aab8582604e97e8720cb9b893489b347cf999f80f9d469e"
-dependencies = [
- "cc",
- "tree-sitter",
-]
-
-[[package]]
-name = "tree-sitter-elixir"
-version = "0.1.0"
-source = "git+https://github.com/elixir-lang/tree-sitter-elixir?rev=4ba9dab6e2602960d95b2b625f3386c27e08084e#4ba9dab6e2602960d95b2b625f3386c27e08084e"
+source = "git+https://github.com/elixir-lang/tree-sitter-elixir?rev=a2861e88a730287a60c11ea9299c033c7d076e30#a2861e88a730287a60c11ea9299c033c7d076e30"
dependencies = [
"cc",
"tree-sitter",
@@ -8162,6 +8379,15 @@ dependencies = [
"tree-sitter",
]
+[[package]]
+name = "tree-sitter-nix"
+version = "0.0.1"
+source = "git+https://github.com/nix-community/tree-sitter-nix?rev=66e3e9ce9180ae08fc57372061006ef83f0abde7#66e3e9ce9180ae08fc57372061006ef83f0abde7"
+dependencies = [
+ "cc",
+ "tree-sitter",
+]
+
[[package]]
name = "tree-sitter-php"
version = "0.19.1"
@@ -109,9 +109,9 @@ pretty_assertions = "1.3.0"
tree-sitter-bash = { git = "https://github.com/tree-sitter/tree-sitter-bash", rev = "1b0321ee85701d5036c334a6f04761cdc672e64c" }
tree-sitter-c = "0.20.1"
-tree-sitter-cpp = "0.20.0"
+tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev="f44509141e7e483323d2ec178f2d2e6c0fc041c1" }
tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
-tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "4ba9dab6e2602960d95b2b625f3386c27e08084e" }
+tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "a2861e88a730287a60c11ea9299c033c7d076e30" }
tree-sitter-elm = { git = "https://github.com/elm-tooling/tree-sitter-elm", rev = "692c50c0b961364c40299e73c1306aecb5d20f40"}
tree-sitter-embedded-template = "0.20.0"
tree-sitter-glsl = { git = "https://github.com/theHamsta/tree-sitter-glsl", rev = "2a56fb7bc8bb03a1892b4741279dd0a8758b7fb3" }
@@ -131,6 +131,7 @@ tree-sitter-svelte = { git = "https://github.com/Himujjal/tree-sitter-svelte", r
tree-sitter-racket = { git = "https://github.com/zed-industries/tree-sitter-racket", rev = "eb010cf2c674c6fd9a6316a84e28ef90190fe51a"}
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930"}
tree-sitter-lua = "0.0.14"
+tree-sitter-nix = { git = "https://github.com/nix-community/tree-sitter-nix", rev = "66e3e9ce9180ae08fc57372061006ef83f0abde7" }
[patch.crates-io]
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "1c65ca24bc9a734ab70115188f465e12eecf224e" }
@@ -1,6 +1,6 @@
# syntax = docker/dockerfile:1.2
-FROM rust:1.70-bullseye as builder
+FROM rust:1.71-bullseye as builder
WORKDIR app
COPY . .
@@ -1,159 +1,179 @@
{
- "suffixes": {
- "aac": "audio",
- "bash": "terminal",
- "bmp": "image",
- "c": "code",
- "conf": "settings",
- "cpp": "code",
- "cc": "code",
- "css": "code",
- "doc": "document",
- "docx": "document",
- "eslintrc": "eslint",
- "eslintrc.js": "eslint",
- "eslintrc.json": "eslint",
- "flac": "audio",
- "fish": "terminal",
- "gitattributes": "vcs",
- "gitignore": "vcs",
- "gitmodules": "vcs",
- "gif": "image",
- "go": "code",
- "h": "code",
- "handlebars": "code",
- "hbs": "template",
- "htm": "template",
- "html": "template",
- "svelte": "template",
- "hpp": "code",
- "ico": "image",
- "ini": "settings",
- "java": "code",
- "jpeg": "image",
- "jpg": "image",
- "js": "code",
- "json": "storage",
- "lock": "lock",
- "log": "log",
- "md": "document",
- "mdx": "document",
- "mp3": "audio",
- "mp4": "video",
- "ods": "document",
- "odp": "document",
- "odt": "document",
- "ogg": "video",
- "pdf": "document",
- "php": "code",
- "png": "image",
- "ppt": "document",
- "pptx": "document",
- "prettierrc": "prettier",
- "prettierignore": "prettier",
- "ps1": "terminal",
- "psd": "image",
- "py": "code",
- "rb": "code",
- "rkt": "code",
- "rs": "rust",
- "rtf": "document",
- "scm": "code",
- "sh": "terminal",
- "bashrc": "terminal",
- "bash_profile": "terminal",
- "bash_aliases": "terminal",
- "bash_logout": "terminal",
- "profile": "terminal",
- "zshrc": "terminal",
- "zshenv": "terminal",
- "zsh_profile": "terminal",
- "zsh_aliases": "terminal",
- "zsh_histfile": "terminal",
- "zlogin": "terminal",
- "sql": "code",
- "svg": "image",
- "swift": "code",
- "tiff": "image",
- "toml": "toml",
- "ts": "typescript",
- "tsx": "code",
- "txt": "document",
- "wav": "audio",
- "webm": "video",
- "xls": "document",
- "xlsx": "document",
- "xml": "template",
- "yaml": "settings",
- "yml": "settings",
- "zsh": "terminal"
- },
- "types": {
- "audio": {
- "icon": "icons/file_icons/audio.svg"
- },
- "code": {
- "icon": "icons/file_icons/code.svg"
- },
- "collapsed_chevron": {
- "icon": "icons/file_icons/chevron_right.svg"
- },
- "collapsed_folder": {
- "icon": "icons/file_icons/folder.svg"
- },
- "default": {
- "icon": "icons/file_icons/file.svg"
- },
- "document": {
- "icon": "icons/file_icons/book.svg"
- },
- "eslint": {
- "icon": "icons/file_icons/eslint.svg"
- },
- "expanded_chevron": {
- "icon": "icons/file_icons/chevron_down.svg"
- },
- "expanded_folder": {
- "icon": "icons/file_icons/folder_open.svg"
- },
- "image": {
- "icon": "icons/file_icons/image.svg"
- },
- "lock": {
- "icon": "icons/file_icons/lock.svg"
- },
- "log": {
- "icon": "icons/file_icons/info.svg"
- },
- "prettier": {
- "icon": "icons/file_icons/prettier.svg"
- },
- "rust": {
- "icon": "icons/file_icons/rust.svg"
- },
- "settings": {
- "icon": "icons/file_icons/settings.svg"
- },
- "storage": {
- "icon": "icons/file_icons/database.svg"
- },
- "template": {
- "icon": "icons/file_icons/html.svg"
- },
- "terminal": {
- "icon": "icons/file_icons/terminal.svg"
- },
- "toml": {
- "icon": "icons/file_icons/toml.svg"
- },
- "typescript": {
- "icon": "icons/file_icons/typescript.svg"
- },
- "vcs": {
- "icon": "icons/file_icons/git.svg"
- },
- "video": {
- "icon": "icons/file_icons/video.svg"
+ "suffixes": {
+ "aac": "audio",
+ "accdb": "storage",
+ "bak": "backup",
+ "bash": "terminal",
+ "bash_aliases": "terminal",
+ "bash_logout": "terminal",
+ "bash_profile": "terminal",
+ "bashrc": "terminal",
+ "bmp": "image",
+ "c": "code",
+ "cc": "code",
+ "conf": "settings",
+ "cpp": "code",
+ "css": "code",
+ "csv": "storage",
+ "dat": "storage",
+ "db": "storage",
+ "dbf": "storage",
+ "dll": "storage",
+ "doc": "document",
+ "docx": "document",
+ "eslintrc": "eslint",
+ "eslintrc.js": "eslint",
+ "eslintrc.json": "eslint",
+ "fmp": "storage",
+ "fp7": "storage",
+ "flac": "audio",
+ "fish": "terminal",
+ "frm": "storage",
+ "gdb": "storage",
+ "gitattributes": "vcs",
+ "gitignore": "vcs",
+ "gitmodules": "vcs",
+ "gif": "image",
+ "go": "code",
+ "h": "code",
+ "handlebars": "code",
+ "hbs": "template",
+ "htm": "template",
+ "html": "template",
+ "ib": "storage",
+ "ico": "image",
+ "ini": "settings",
+ "java": "code",
+ "jpeg": "image",
+ "jpg": "image",
+ "js": "code",
+ "json": "storage",
+ "ldf": "storage",
+ "lock": "lock",
+ "log": "log",
+ "mdb": "storage",
+ "md": "document",
+ "mdf": "storage",
+ "mdx": "document",
+ "mp3": "audio",
+ "mp4": "video",
+ "myd": "storage",
+ "myi": "storage",
+ "ods": "document",
+ "odp": "document",
+ "odt": "document",
+ "ogg": "video",
+ "pdb": "storage",
+ "pdf": "document",
+ "php": "code",
+ "png": "image",
+ "ppt": "document",
+ "pptx": "document",
+ "prettierignore": "prettier",
+ "prettierrc": "prettier",
+ "profile": "terminal",
+ "ps1": "terminal",
+ "psd": "image",
+ "py": "code",
+ "rb": "code",
+ "rkt": "code",
+ "rs": "rust",
+ "rtf": "document",
+ "sav": "storage",
+ "scm": "code",
+ "sh": "terminal",
+ "sqlite": "storage",
+ "sdf": "storage",
+ "svelte": "template",
+ "svg": "image",
+ "swift": "code",
+ "ts": "typescript",
+ "tsx": "code",
+ "tiff": "image",
+ "toml": "toml",
+ "tsv": "storage",
+ "txt": "document",
+ "wav": "audio",
+ "webm": "video",
+ "xls": "document",
+ "xlsx": "document",
+ "xml": "template",
+ "yaml": "settings",
+ "yml": "settings",
+ "zlogin": "terminal",
+ "zsh": "terminal",
+ "zsh_aliases": "terminal",
+ "zshenv": "terminal",
+ "zsh_histfile": "terminal",
+ "zsh_profile": "terminal",
+ "zshrc": "terminal"
+ },
+ "types": {
+ "audio": {
+ "icon": "icons/file_icons/audio.svg"
+ },
+ "code": {
+ "icon": "icons/file_icons/code.svg"
+ },
+ "collapsed_chevron": {
+ "icon": "icons/file_icons/chevron_right.svg"
+ },
+ "collapsed_folder": {
+ "icon": "icons/file_icons/folder.svg"
+ },
+ "default": {
+ "icon": "icons/file_icons/file.svg"
+ },
+ "document": {
+ "icon": "icons/file_icons/book.svg"
+ },
+ "eslint": {
+ "icon": "icons/file_icons/eslint.svg"
+ },
+ "expanded_chevron": {
+ "icon": "icons/file_icons/chevron_down.svg"
+ },
+ "expanded_folder": {
+ "icon": "icons/file_icons/folder_open.svg"
+ },
+ "image": {
+ "icon": "icons/file_icons/image.svg"
+ },
+ "lock": {
+ "icon": "icons/file_icons/lock.svg"
+ },
+ "log": {
+ "icon": "icons/file_icons/info.svg"
+ },
+ "prettier": {
+ "icon": "icons/file_icons/prettier.svg"
+ },
+ "rust": {
+ "icon": "icons/file_icons/rust.svg"
+ },
+ "settings": {
+ "icon": "icons/file_icons/settings.svg"
+ },
+ "storage": {
+ "icon": "icons/file_icons/database.svg"
+ },
+ "template": {
+ "icon": "icons/file_icons/html.svg"
+ },
+ "terminal": {
+ "icon": "icons/file_icons/terminal.svg"
+ },
+ "toml": {
+ "icon": "icons/file_icons/toml.svg"
+ },
+ "typescript": {
+ "icon": "icons/file_icons/typescript.svg"
+ },
+ "vcs": {
+ "icon": "icons/file_icons/git.svg"
+ },
+ "video": {
+ "icon": "icons/file_icons/video.svg"
+ }
}
- }
}
@@ -22,6 +22,7 @@
"alt-cmd-right": "pane::ActivateNextItem",
"cmd-w": "pane::CloseActiveItem",
"alt-cmd-t": "pane::CloseInactiveItems",
+ "ctrl-alt-cmd-w": "workspace::CloseInactiveTabsAndPanes",
"cmd-k u": "pane::CloseCleanItems",
"cmd-k cmd-w": "pane::CloseAllItems",
"cmd-shift-w": "workspace::CloseWindow",
@@ -226,6 +227,13 @@
"alt-enter": "search::SelectAllMatches"
}
},
+ {
+ "context": "BufferSearchBar > Editor",
+ "bindings": {
+ "up": "search::PreviousHistoryQuery",
+ "down": "search::NextHistoryQuery"
+ }
+ },
{
"context": "ProjectSearchBar",
"bindings": {
@@ -233,6 +241,13 @@
"alt-tab": "project_search::CycleMode",
}
},
+ {
+ "context": "ProjectSearchBar > Editor",
+ "bindings": {
+ "up": "search::PreviousHistoryQuery",
+ "down": "search::NextHistoryQuery"
+ }
+ },
{
"context": "ProjectSearchView",
"bindings": {
@@ -1637,6 +1637,7 @@ impl ConversationEditor {
let mut editor = Editor::for_buffer(conversation.read(cx).buffer.clone(), None, cx);
editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx);
editor.set_show_gutter(false, cx);
+ editor.set_show_wrap_guides(false, cx);
editor
});
@@ -183,7 +183,7 @@ async fn apply_server_operation(
let username;
{
let mut plan = plan.lock();
- let mut user = plan.user(user_id);
+ let user = plan.user(user_id);
if user.online {
return false;
}
@@ -374,7 +374,7 @@ impl CollabTitlebarItem {
"Share Feedback",
feedback::feedback_editor::GiveFeedback,
),
- ContextMenuItem::action("Sign out", SignOut),
+ ContextMenuItem::action("Sign Out", SignOut),
]
} else {
vec![
@@ -338,9 +338,9 @@ impl Copilot {
let (server, fake_server) =
LanguageServer::fake("copilot".into(), Default::default(), cx.to_async());
let http = util::http::FakeHttpClient::create(|_| async { unreachable!() });
- let this = cx.add_model(|cx| Self {
+ let this = cx.add_model(|_| Self {
http: http.clone(),
- node_runtime: NodeRuntime::instance(http, cx.background().clone()),
+ node_runtime: NodeRuntime::instance(http),
server: CopilotServer::Running(RunningCopilotServer {
lsp: Arc::new(server),
sign_in_status: SignInStatus::Authorized,
@@ -397,7 +397,7 @@ impl InlayMap {
buffer_snapshot: MultiBufferSnapshot,
mut buffer_edits: Vec<text::Edit<usize>>,
) -> (InlaySnapshot, Vec<InlayEdit>) {
- let mut snapshot = &mut self.snapshot;
+ let snapshot = &mut self.snapshot;
if buffer_edits.is_empty() {
if snapshot.buffer.trailing_excerpt_update_count()
@@ -572,7 +572,6 @@ impl InlayMap {
})
.collect();
let buffer_snapshot = snapshot.buffer.clone();
- drop(snapshot);
let (snapshot, edits) = self.sync(buffer_snapshot, buffer_edits);
(snapshot, edits)
}
@@ -635,7 +634,6 @@ impl InlayMap {
}
log::info!("removing inlays: {:?}", to_remove);
- drop(snapshot);
let (snapshot, edits) = self.splice(to_remove, to_insert);
(snapshot, edits)
}
@@ -543,6 +543,7 @@ pub struct Editor {
show_local_selections: bool,
mode: EditorMode,
show_gutter: bool,
+ show_wrap_guides: Option<bool>,
placeholder_text: Option<Arc<str>>,
highlighted_rows: Option<Range<u32>>,
#[allow(clippy::type_complexity)]
@@ -1375,6 +1376,7 @@ impl Editor {
show_local_selections: true,
mode,
show_gutter: mode == EditorMode::Full,
+ show_wrap_guides: None,
placeholder_text: None,
highlighted_rows: None,
background_highlights: Default::default(),
@@ -1537,7 +1539,7 @@ impl Editor {
self.collapse_matches = collapse_matches;
}
- fn range_for_match<T: std::marker::Copy>(&self, range: &Range<T>) -> Range<T> {
+ pub fn range_for_match<T: std::marker::Copy>(&self, range: &Range<T>) -> Range<T> {
if self.collapse_matches {
return range.start..range.start;
}
@@ -4219,7 +4221,7 @@ impl Editor {
_: &SortLinesCaseSensitive,
cx: &mut ViewContext<Self>,
) {
- self.manipulate_lines(cx, |text| text.sort())
+ self.manipulate_lines(cx, |lines| lines.sort())
}
pub fn sort_lines_case_insensitive(
@@ -4227,7 +4229,7 @@ impl Editor {
_: &SortLinesCaseInsensitive,
cx: &mut ViewContext<Self>,
) {
- self.manipulate_lines(cx, |text| text.sort_by_key(|line| line.to_lowercase()))
+ self.manipulate_lines(cx, |lines| lines.sort_by_key(|line| line.to_lowercase()))
}
pub fn reverse_lines(&mut self, _: &ReverseLines, cx: &mut ViewContext<Self>) {
@@ -4265,19 +4267,19 @@ impl Editor {
let text = buffer
.text_for_range(start_point..end_point)
.collect::<String>();
- let mut text = text.split("\n").collect_vec();
+ let mut lines = text.split("\n").collect_vec();
- let text_len = text.len();
- callback(&mut text);
+ let lines_len = lines.len();
+ callback(&mut lines);
// This is a current limitation with selections.
// If we wanted to support removing or adding lines, we'd need to fix the logic associated with selections.
debug_assert!(
- text.len() == text_len,
+ lines.len() == lines_len,
"callback should not change the number of lines"
);
- edits.push((start_point..end_point, text.join("\n")));
+ edits.push((start_point..end_point, lines.join("\n")));
let start_anchor = buffer.anchor_after(start_point);
let end_anchor = buffer.anchor_before(end_point);
@@ -6374,8 +6376,8 @@ impl Editor {
.range
.to_offset(definition.target.buffer.read(cx));
+ let range = self.range_for_match(&range);
if Some(&definition.target.buffer) == self.buffer.read(cx).as_singleton().as_ref() {
- let range = self.range_for_match(&range);
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range]);
});
@@ -6392,7 +6394,6 @@ impl Editor {
// When selecting a definition in a different buffer, disable the nav history
// to avoid creating a history entry at the previous cursor location.
pane.update(cx, |pane, _| pane.disable_history());
- let range = target_editor.range_for_match(&range);
target_editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range]);
});
@@ -7188,6 +7189,10 @@ impl Editor {
pub fn wrap_guides(&self, cx: &AppContext) -> SmallVec<[(usize, bool); 2]> {
let mut wrap_guides = smallvec::smallvec![];
+ if self.show_wrap_guides == Some(false) {
+ return wrap_guides;
+ }
+
let settings = self.buffer.read(cx).settings_at(0, cx);
if settings.show_wrap_guides {
if let SoftWrap::Column(soft_wrap) = self.soft_wrap_mode(cx) {
@@ -7245,6 +7250,11 @@ impl Editor {
cx.notify();
}
+ pub fn set_show_wrap_guides(&mut self, show_gutter: bool, cx: &mut ViewContext<Self>) {
+ self.show_wrap_guides = Some(show_gutter);
+ cx.notify();
+ }
+
pub fn reveal_in_finder(&mut self, _: &RevealInFinder, cx: &mut ViewContext<Self>) {
if let Some(buffer) = self.buffer().read(cx).as_singleton() {
if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) {
@@ -546,8 +546,20 @@ impl EditorElement {
});
}
+ let scroll_left =
+ layout.position_map.snapshot.scroll_position().x() * layout.position_map.em_width;
+
for (wrap_position, active) in layout.wrap_guides.iter() {
- let x = text_bounds.origin_x() + wrap_position + layout.position_map.em_width / 2.;
+ let x =
+ (text_bounds.origin_x() + wrap_position + layout.position_map.em_width / 2.)
+ - scroll_left;
+
+ if x < text_bounds.origin_x()
+ || (layout.show_scrollbars && x > self.scrollbar_left(&bounds))
+ {
+ continue;
+ }
+
let color = if *active {
self.style.active_wrap_guide
} else {
@@ -1036,6 +1048,10 @@ impl EditorElement {
scene.pop_layer();
}
+ fn scrollbar_left(&self, bounds: &RectF) -> f32 {
+ bounds.max_x() - self.style.theme.scrollbar.width
+ }
+
fn paint_scrollbar(
&mut self,
scene: &mut SceneBuilder,
@@ -1054,7 +1070,7 @@ impl EditorElement {
let top = bounds.min_y();
let bottom = bounds.max_y();
let right = bounds.max_x();
- let left = right - style.width;
+ let left = self.scrollbar_left(&bounds);
let row_range = &layout.scrollbar_row_range;
let max_row = layout.max_row as f32 + (row_range.end - row_range.start);
@@ -571,7 +571,6 @@ fn new_update_task(
if let Some(buffer) =
refresh_multi_buffer.buffer(pending_refresh_query.buffer_id)
{
- drop(refresh_multi_buffer);
editor.inlay_hint_cache.update_tasks.insert(
pending_refresh_query.excerpt_id,
UpdateTask {
@@ -1128,6 +1128,12 @@ impl AppContext {
self.keystroke_matcher.clear_bindings();
}
+ pub fn binding_for_action(&self, action: &dyn Action) -> Option<&Binding> {
+ self.keystroke_matcher
+ .bindings_for_action(action.id())
+ .find(|binding| binding.action().eq(action))
+ }
+
pub fn default_global<T: 'static + Default>(&mut self) -> &T {
let type_id = TypeId::of::<T>();
self.update(|this| {
@@ -844,8 +844,8 @@ impl LanguageRegistry {
}
}
}
- Err(err) => {
- log::error!("failed to load language {name} - {err}");
+ Err(e) => {
+ log::error!("failed to load language {name}:\n{:?}", e);
let mut state = this.state.write();
state.mark_language_loaded(id);
if let Some(mut txs) = state.loading_languages.remove(&id) {
@@ -853,7 +853,7 @@ impl LanguageRegistry {
let _ = tx.send(Err(anyhow!(
"failed to load language {}: {}",
name,
- err
+ e
)));
}
}
@@ -1188,25 +1188,39 @@ impl Language {
pub fn with_queries(mut self, queries: LanguageQueries) -> Result<Self> {
if let Some(query) = queries.highlights {
- self = self.with_highlights_query(query.as_ref())?;
+ self = self
+ .with_highlights_query(query.as_ref())
+ .context("Error loading highlights query")?;
}
if let Some(query) = queries.brackets {
- self = self.with_brackets_query(query.as_ref())?;
+ self = self
+ .with_brackets_query(query.as_ref())
+ .context("Error loading brackets query")?;
}
if let Some(query) = queries.indents {
- self = self.with_indents_query(query.as_ref())?;
+ self = self
+ .with_indents_query(query.as_ref())
+ .context("Error loading indents query")?;
}
if let Some(query) = queries.outline {
- self = self.with_outline_query(query.as_ref())?;
+ self = self
+ .with_outline_query(query.as_ref())
+ .context("Error loading outline query")?;
}
if let Some(query) = queries.embedding {
- self = self.with_embedding_query(query.as_ref())?;
+ self = self
+ .with_embedding_query(query.as_ref())
+ .context("Error loading embedding query")?;
}
if let Some(query) = queries.injections {
- self = self.with_injection_query(query.as_ref())?;
+ self = self
+ .with_injection_query(query.as_ref())
+ .context("Error loading injection query")?;
}
if let Some(query) = queries.overrides {
- self = self.with_override_query(query.as_ref())?;
+ self = self
+ .with_override_query(query.as_ref())
+ .context("Error loading override query")?;
}
Ok(self)
}
@@ -58,11 +58,14 @@ fn build_bridge(swift_target: &SwiftTarget) {
"cargo:rerun-if-changed={}/Package.resolved",
SWIFT_PACKAGE_NAME
);
+
let swift_package_root = swift_package_root();
+ let swift_target_folder = swift_target_folder();
if !Command::new("swift")
.arg("build")
.args(["--configuration", &env::var("PROFILE").unwrap()])
.args(["--triple", &swift_target.target.triple])
+ .args(["--build-path".into(), swift_target_folder])
.current_dir(&swift_package_root)
.status()
.unwrap()
@@ -128,6 +131,12 @@ fn swift_package_root() -> PathBuf {
env::current_dir().unwrap().join(SWIFT_PACKAGE_NAME)
}
+fn swift_target_folder() -> PathBuf {
+ env::current_dir()
+ .unwrap()
+ .join(format!("../../target/{SWIFT_PACKAGE_NAME}"))
+}
+
fn copy_dir(source: &Path, destination: &Path) {
assert!(
Command::new("rm")
@@ -155,8 +164,7 @@ fn copy_dir(source: &Path, destination: &Path) {
impl SwiftTarget {
fn out_dir_path(&self) -> PathBuf {
- swift_package_root()
- .join(".build")
+ swift_target_folder()
.join(&self.target.unversioned_triple)
.join(env::var("PROFILE").unwrap())
}
@@ -1,9 +1,6 @@
use anyhow::{anyhow, bail, Context, Result};
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
-use futures::lock::Mutex;
-use futures::{future::Shared, FutureExt};
-use gpui::{executor::Background, Task};
use serde::Deserialize;
use smol::{fs, io::BufReader, process::Command};
use std::process::{Output, Stdio};
@@ -33,20 +30,12 @@ pub struct NpmInfoDistTags {
pub struct NodeRuntime {
http: Arc<dyn HttpClient>,
- background: Arc<Background>,
- installation_path: Mutex<Option<Shared<Task<Result<PathBuf, Arc<anyhow::Error>>>>>>,
}
impl NodeRuntime {
- pub fn instance(http: Arc<dyn HttpClient>, background: Arc<Background>) -> Arc<NodeRuntime> {
+ pub fn instance(http: Arc<dyn HttpClient>) -> Arc<NodeRuntime> {
RUNTIME_INSTANCE
- .get_or_init(|| {
- Arc::new(NodeRuntime {
- http,
- background,
- installation_path: Mutex::new(None),
- })
- })
+ .get_or_init(|| Arc::new(NodeRuntime { http }))
.clone()
}
@@ -61,7 +50,9 @@ impl NodeRuntime {
subcommand: &str,
args: &[&str],
) -> Result<Output> {
- let attempt = |installation_path: PathBuf| async move {
+ let attempt = || async move {
+ let installation_path = self.install_if_needed().await?;
+
let mut env_path = installation_path.join("bin").into_os_string();
if let Some(existing_path) = std::env::var_os("PATH") {
if !existing_path.is_empty() {
@@ -92,10 +83,9 @@ impl NodeRuntime {
command.output().await.map_err(|e| anyhow!("{e}"))
};
- let installation_path = self.install_if_needed().await?;
- let mut output = attempt(installation_path.clone()).await;
+ let mut output = attempt().await;
if output.is_err() {
- output = attempt(installation_path).await;
+ output = attempt().await;
if output.is_err() {
return Err(anyhow!(
"failed to launch npm subcommand {subcommand} subcommand"
@@ -167,23 +157,8 @@ impl NodeRuntime {
}
async fn install_if_needed(&self) -> Result<PathBuf> {
- let task = self
- .installation_path
- .lock()
- .await
- .get_or_insert_with(|| {
- let http = self.http.clone();
- self.background
- .spawn(async move { Self::install(http).await.map_err(Arc::new) })
- .shared()
- })
- .clone();
-
- task.await.map_err(|e| anyhow!("{}", e))
- }
+ log::info!("Node runtime install_if_needed");
- async fn install(http: Arc<dyn HttpClient>) -> Result<PathBuf> {
- log::info!("installing Node runtime");
let arch = match consts::ARCH {
"x86_64" => "x64",
"aarch64" => "arm64",
@@ -214,7 +189,8 @@ impl NodeRuntime {
let file_name = format!("node-{VERSION}-darwin-{arch}.tar.gz");
let url = format!("https://nodejs.org/dist/{VERSION}/{file_name}");
- let mut response = http
+ let mut response = self
+ .http
.get(&url, Default::default(), true)
.await
.context("error downloading Node binary tarball")?;
@@ -1,7 +1,6 @@
-use crate::{worktree::WorktreeHandle, Event, *};
+use crate::{search::PathMatcher, worktree::WorktreeHandle, Event, *};
use fs::{FakeFs, LineEnding, RealFs};
use futures::{future, StreamExt};
-use globset::Glob;
use gpui::{executor::Deterministic, test::subscribe, AppContext};
use language::{
language_settings::{AllLanguageSettings, LanguageSettingsContent},
@@ -3641,7 +3640,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
- vec![Glob::new("*.odd").unwrap().compile_matcher()],
+ vec![PathMatcher::new("*.odd").unwrap()],
Vec::new()
),
cx
@@ -3659,7 +3658,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
- vec![Glob::new("*.rs").unwrap().compile_matcher()],
+ vec![PathMatcher::new("*.rs").unwrap()],
Vec::new()
),
cx
@@ -3681,8 +3680,8 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
false,
true,
vec![
- Glob::new("*.ts").unwrap().compile_matcher(),
- Glob::new("*.odd").unwrap().compile_matcher(),
+ PathMatcher::new("*.ts").unwrap(),
+ PathMatcher::new("*.odd").unwrap(),
],
Vec::new()
),
@@ -3705,9 +3704,9 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
false,
true,
vec![
- Glob::new("*.rs").unwrap().compile_matcher(),
- Glob::new("*.ts").unwrap().compile_matcher(),
- Glob::new("*.odd").unwrap().compile_matcher(),
+ PathMatcher::new("*.rs").unwrap(),
+ PathMatcher::new("*.ts").unwrap(),
+ PathMatcher::new("*.odd").unwrap(),
],
Vec::new()
),
@@ -3752,7 +3751,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
false,
true,
Vec::new(),
- vec![Glob::new("*.odd").unwrap().compile_matcher()],
+ vec![PathMatcher::new("*.odd").unwrap()],
),
cx
)
@@ -3775,7 +3774,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
false,
true,
Vec::new(),
- vec![Glob::new("*.rs").unwrap().compile_matcher()],
+ vec![PathMatcher::new("*.rs").unwrap()],
),
cx
)
@@ -3797,8 +3796,8 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
true,
Vec::new(),
vec![
- Glob::new("*.ts").unwrap().compile_matcher(),
- Glob::new("*.odd").unwrap().compile_matcher(),
+ PathMatcher::new("*.ts").unwrap(),
+ PathMatcher::new("*.odd").unwrap(),
],
),
cx
@@ -3821,9 +3820,9 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
true,
Vec::new(),
vec![
- Glob::new("*.rs").unwrap().compile_matcher(),
- Glob::new("*.ts").unwrap().compile_matcher(),
- Glob::new("*.odd").unwrap().compile_matcher(),
+ PathMatcher::new("*.rs").unwrap(),
+ PathMatcher::new("*.ts").unwrap(),
+ PathMatcher::new("*.odd").unwrap(),
],
),
cx
@@ -3860,8 +3859,8 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query,
false,
true,
- vec![Glob::new("*.odd").unwrap().compile_matcher()],
- vec![Glob::new("*.odd").unwrap().compile_matcher()],
+ vec![PathMatcher::new("*.odd").unwrap()],
+ vec![PathMatcher::new("*.odd").unwrap()],
),
cx
)
@@ -3878,8 +3877,8 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query,
false,
true,
- vec![Glob::new("*.ts").unwrap().compile_matcher()],
- vec![Glob::new("*.ts").unwrap().compile_matcher()],
+ vec![PathMatcher::new("*.ts").unwrap()],
+ vec![PathMatcher::new("*.ts").unwrap()],
),
cx
)
@@ -3897,12 +3896,12 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
false,
true,
vec![
- Glob::new("*.ts").unwrap().compile_matcher(),
- Glob::new("*.odd").unwrap().compile_matcher()
+ PathMatcher::new("*.ts").unwrap(),
+ PathMatcher::new("*.odd").unwrap()
],
vec![
- Glob::new("*.ts").unwrap().compile_matcher(),
- Glob::new("*.odd").unwrap().compile_matcher()
+ PathMatcher::new("*.ts").unwrap(),
+ PathMatcher::new("*.odd").unwrap()
],
),
cx
@@ -3921,12 +3920,12 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
false,
true,
vec![
- Glob::new("*.ts").unwrap().compile_matcher(),
- Glob::new("*.odd").unwrap().compile_matcher()
+ PathMatcher::new("*.ts").unwrap(),
+ PathMatcher::new("*.odd").unwrap()
],
vec![
- Glob::new("*.rs").unwrap().compile_matcher(),
- Glob::new("*.odd").unwrap().compile_matcher()
+ PathMatcher::new("*.rs").unwrap(),
+ PathMatcher::new("*.odd").unwrap()
],
),
cx
@@ -1,5 +1,5 @@
use aho_corasick::{AhoCorasick, AhoCorasickBuilder};
-use anyhow::Result;
+use anyhow::{Context, Result};
use client::proto;
use globset::{Glob, GlobMatcher};
use itertools::Itertools;
@@ -9,7 +9,7 @@ use smol::future::yield_now;
use std::{
io::{BufRead, BufReader, Read},
ops::Range,
- path::Path,
+ path::{Path, PathBuf},
sync::Arc,
};
@@ -20,8 +20,8 @@ pub enum SearchQuery {
query: Arc<str>,
whole_word: bool,
case_sensitive: bool,
- files_to_include: Vec<GlobMatcher>,
- files_to_exclude: Vec<GlobMatcher>,
+ files_to_include: Vec<PathMatcher>,
+ files_to_exclude: Vec<PathMatcher>,
},
Regex {
regex: Regex,
@@ -29,18 +29,43 @@ pub enum SearchQuery {
multiline: bool,
whole_word: bool,
case_sensitive: bool,
- files_to_include: Vec<GlobMatcher>,
- files_to_exclude: Vec<GlobMatcher>,
+ files_to_include: Vec<PathMatcher>,
+ files_to_exclude: Vec<PathMatcher>,
},
}
+#[derive(Clone, Debug)]
+pub struct PathMatcher {
+ maybe_path: PathBuf,
+ glob: GlobMatcher,
+}
+
+impl std::fmt::Display for PathMatcher {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.maybe_path.to_string_lossy().fmt(f)
+ }
+}
+
+impl PathMatcher {
+ pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
+ Ok(PathMatcher {
+ glob: Glob::new(&maybe_glob)?.compile_matcher(),
+ maybe_path: PathBuf::from(maybe_glob),
+ })
+ }
+
+ pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
+ other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
+ }
+}
+
impl SearchQuery {
pub fn text(
query: impl ToString,
whole_word: bool,
case_sensitive: bool,
- files_to_include: Vec<GlobMatcher>,
- files_to_exclude: Vec<GlobMatcher>,
+ files_to_include: Vec<PathMatcher>,
+ files_to_exclude: Vec<PathMatcher>,
) -> Self {
let query = query.to_string();
let search = AhoCorasickBuilder::new()
@@ -61,8 +86,8 @@ impl SearchQuery {
query: impl ToString,
whole_word: bool,
case_sensitive: bool,
- files_to_include: Vec<GlobMatcher>,
- files_to_exclude: Vec<GlobMatcher>,
+ files_to_include: Vec<PathMatcher>,
+ files_to_exclude: Vec<PathMatcher>,
) -> Result<Self> {
let mut query = query.to_string();
let initial_query = Arc::from(query.as_str());
@@ -96,16 +121,16 @@ impl SearchQuery {
message.query,
message.whole_word,
message.case_sensitive,
- deserialize_globs(&message.files_to_include)?,
- deserialize_globs(&message.files_to_exclude)?,
+ deserialize_path_matches(&message.files_to_include)?,
+ deserialize_path_matches(&message.files_to_exclude)?,
)
} else {
Ok(Self::text(
message.query,
message.whole_word,
message.case_sensitive,
- deserialize_globs(&message.files_to_include)?,
- deserialize_globs(&message.files_to_exclude)?,
+ deserialize_path_matches(&message.files_to_include)?,
+ deserialize_path_matches(&message.files_to_exclude)?,
))
}
}
@@ -120,12 +145,12 @@ impl SearchQuery {
files_to_include: self
.files_to_include()
.iter()
- .map(|g| g.glob().to_string())
+ .map(|matcher| matcher.to_string())
.join(","),
files_to_exclude: self
.files_to_exclude()
.iter()
- .map(|g| g.glob().to_string())
+ .map(|matcher| matcher.to_string())
.join(","),
}
}
@@ -266,7 +291,7 @@ impl SearchQuery {
matches!(self, Self::Regex { .. })
}
- pub fn files_to_include(&self) -> &[GlobMatcher] {
+ pub fn files_to_include(&self) -> &[PathMatcher] {
match self {
Self::Text {
files_to_include, ..
@@ -277,7 +302,7 @@ impl SearchQuery {
}
}
- pub fn files_to_exclude(&self) -> &[GlobMatcher] {
+ pub fn files_to_exclude(&self) -> &[PathMatcher] {
match self {
Self::Text {
files_to_exclude, ..
@@ -306,11 +331,63 @@ impl SearchQuery {
}
}
-fn deserialize_globs(glob_set: &str) -> Result<Vec<GlobMatcher>> {
+fn deserialize_path_matches(glob_set: &str) -> anyhow::Result<Vec<PathMatcher>> {
glob_set
.split(',')
.map(str::trim)
.filter(|glob_str| !glob_str.is_empty())
- .map(|glob_str| Ok(Glob::new(glob_str)?.compile_matcher()))
+ .map(|glob_str| {
+ PathMatcher::new(glob_str)
+ .with_context(|| format!("deserializing path match glob {glob_str}"))
+ })
.collect()
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn path_matcher_creation_for_valid_paths() {
+ for valid_path in [
+ "file",
+ "Cargo.toml",
+ ".DS_Store",
+ "~/dir/another_dir/",
+ "./dir/file",
+ "dir/[a-z].txt",
+ "../dir/filé",
+ ] {
+ let path_matcher = PathMatcher::new(valid_path).unwrap_or_else(|e| {
+ panic!("Valid path {valid_path} should be accepted, but got: {e}")
+ });
+ assert!(
+ path_matcher.is_match(valid_path),
+ "Path matcher for valid path {valid_path} should match itself"
+ )
+ }
+ }
+
+ #[test]
+ fn path_matcher_creation_for_globs() {
+ for invalid_glob in ["dir/[].txt", "dir/[a-z.txt", "dir/{file"] {
+ match PathMatcher::new(invalid_glob) {
+ Ok(_) => panic!("Invalid glob {invalid_glob} should not be accepted"),
+ Err(_expected) => {}
+ }
+ }
+
+ for valid_glob in [
+ "dir/?ile",
+ "dir/*.txt",
+ "dir/**/file",
+ "dir/[a-z].txt",
+ "{dir,file}",
+ ] {
+ match PathMatcher::new(valid_glob) {
+ Ok(_expected) => {}
+ Err(e) => panic!("Valid glob {valid_glob} should be accepted, but got: {e}"),
+ }
+ }
+ }
+}
@@ -2369,7 +2369,7 @@ impl BackgroundScannerState {
}
// Remove any git repositories whose .git entry no longer exists.
- let mut snapshot = &mut self.snapshot;
+ let snapshot = &mut self.snapshot;
let mut repositories = mem::take(&mut snapshot.git_repositories);
let mut repository_entries = mem::take(&mut snapshot.repository_entries);
repositories.retain(|work_directory_id, _| {
@@ -115,6 +115,7 @@ actions!(
[
ExpandSelectedEntry,
CollapseSelectedEntry,
+ CollapseAllEntries,
NewDirectory,
NewFile,
Copy,
@@ -140,6 +141,7 @@ pub fn init(assets: impl AssetSource, cx: &mut AppContext) {
file_associations::init(assets, cx);
cx.add_action(ProjectPanel::expand_selected_entry);
cx.add_action(ProjectPanel::collapse_selected_entry);
+ cx.add_action(ProjectPanel::collapse_all_entries);
cx.add_action(ProjectPanel::select_prev);
cx.add_action(ProjectPanel::select_next);
cx.add_action(ProjectPanel::new_file);
@@ -514,6 +516,12 @@ impl ProjectPanel {
}
}
+ pub fn collapse_all_entries(&mut self, _: &CollapseAllEntries, cx: &mut ViewContext<Self>) {
+ self.expanded_dir_ids.clear();
+ self.update_visible_entries(None, cx);
+ cx.notify();
+ }
+
fn toggle_expanded(&mut self, entry_id: ProjectEntryId, cx: &mut ViewContext<Self>) {
if let Some(worktree_id) = self.project.read(cx).worktree_id_for_entry(entry_id, cx) {
if let Some(expanded_dir_ids) = self.expanded_dir_ids.get_mut(&worktree_id) {
@@ -2678,6 +2686,63 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_collapse_all_entries(cx: &mut gpui::TestAppContext) {
+ init_test_with_editor(cx);
+
+ let fs = FakeFs::new(cx.background());
+ fs.insert_tree(
+ "/project_root",
+ json!({
+ "dir_1": {
+ "nested_dir": {
+ "file_a.py": "# File contents",
+ "file_b.py": "# File contents",
+ "file_c.py": "# File contents",
+ },
+ "file_1.py": "# File contents",
+ "file_2.py": "# File contents",
+ "file_3.py": "# File contents",
+ },
+ "dir_2": {
+ "file_1.py": "# File contents",
+ "file_2.py": "# File contents",
+ "file_3.py": "# File contents",
+ }
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), ["/project_root".as_ref()], cx).await;
+ let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
+ let panel = workspace.update(cx, |workspace, cx| ProjectPanel::new(workspace, cx));
+
+ panel.update(cx, |panel, cx| {
+ panel.collapse_all_entries(&CollapseAllEntries, cx)
+ });
+ cx.foreground().run_until_parked();
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..10, cx),
+ &["v project_root", " > dir_1", " > dir_2",]
+ );
+
+ // Open dir_1 and make sure nested_dir was collapsed when running collapse_all_entries
+ toggle_expand_dir(&panel, "project_root/dir_1", cx);
+ cx.foreground().run_until_parked();
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..10, cx),
+ &[
+ "v project_root",
+ " v dir_1 <== selected",
+ " > nested_dir",
+ " file_1.py",
+ " file_2.py",
+ " file_3.py",
+ " > dir_2",
+ ]
+ );
+ }
+
fn toggle_expand_dir(
panel: &ViewHandle<ProjectPanel>,
path: impl AsRef<Path>,
@@ -2878,3 +2943,4 @@ mod tests {
});
}
}
+// TODO - a workspace command?
@@ -1,6 +1,6 @@
use crate::{
- SearchOptions, SelectAllMatches, SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive,
- ToggleRegex, ToggleWholeWord,
+ NextHistoryQuery, PreviousHistoryQuery, SearchHistory, SearchOptions, SelectAllMatches,
+ SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive, ToggleRegex, ToggleWholeWord,
};
use collections::HashMap;
use editor::Editor;
@@ -46,6 +46,8 @@ pub fn init(cx: &mut AppContext) {
cx.add_action(BufferSearchBar::select_prev_match_on_pane);
cx.add_action(BufferSearchBar::select_all_matches_on_pane);
cx.add_action(BufferSearchBar::handle_editor_cancel);
+ cx.add_action(BufferSearchBar::next_history_query);
+ cx.add_action(BufferSearchBar::previous_history_query);
add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx);
add_toggle_option_action::<ToggleWholeWord>(SearchOptions::WHOLE_WORD, cx);
add_toggle_option_action::<ToggleRegex>(SearchOptions::REGEX, cx);
@@ -65,7 +67,7 @@ fn add_toggle_option_action<A: Action>(option: SearchOptions, cx: &mut AppContex
}
pub struct BufferSearchBar {
- pub query_editor: ViewHandle<Editor>,
+ query_editor: ViewHandle<Editor>,
active_searchable_item: Option<Box<dyn SearchableItemHandle>>,
active_match_index: Option<usize>,
active_searchable_item_subscription: Option<Subscription>,
@@ -76,6 +78,7 @@ pub struct BufferSearchBar {
default_options: SearchOptions,
query_contains_error: bool,
dismissed: bool,
+ search_history: SearchHistory,
}
impl Entity for BufferSearchBar {
@@ -106,6 +109,48 @@ impl View for BufferSearchBar {
.map(|active_searchable_item| active_searchable_item.supported_options())
.unwrap_or_default();
+ let previous_query_keystrokes =
+ cx.binding_for_action(&PreviousHistoryQuery {})
+ .map(|binding| {
+ binding
+ .keystrokes()
+ .iter()
+ .map(|k| k.to_string())
+ .collect::<Vec<_>>()
+ });
+ let next_query_keystrokes = cx.binding_for_action(&NextHistoryQuery {}).map(|binding| {
+ binding
+ .keystrokes()
+ .iter()
+ .map(|k| k.to_string())
+ .collect::<Vec<_>>()
+ });
+ let new_placeholder_text = match (previous_query_keystrokes, next_query_keystrokes) {
+ (Some(previous_query_keystrokes), Some(next_query_keystrokes)) => {
+ format!(
+ "Search ({}/{} for previous/next query)",
+ previous_query_keystrokes.join(" "),
+ next_query_keystrokes.join(" ")
+ )
+ }
+ (None, Some(next_query_keystrokes)) => {
+ format!(
+ "Search ({} for next query)",
+ next_query_keystrokes.join(" ")
+ )
+ }
+ (Some(previous_query_keystrokes), None) => {
+ format!(
+ "Search ({} for previous query)",
+ previous_query_keystrokes.join(" ")
+ )
+ }
+ (None, None) => String::new(),
+ };
+ self.query_editor.update(cx, |editor, cx| {
+ editor.set_placeholder_text(new_placeholder_text, cx);
+ });
+
Flex::row()
.with_child(
Flex::row()
@@ -258,6 +303,7 @@ impl BufferSearchBar {
pending_search: None,
query_contains_error: false,
dismissed: true,
+ search_history: SearchHistory::default(),
}
}
@@ -341,7 +387,7 @@ impl BufferSearchBar {
cx: &mut ViewContext<Self>,
) -> oneshot::Receiver<()> {
let options = options.unwrap_or(self.default_options);
- if query != self.query_editor.read(cx).text(cx) || self.search_options != options {
+ if query != self.query(cx) || self.search_options != options {
self.query_editor.update(cx, |query_editor, cx| {
query_editor.buffer().update(cx, |query_buffer, cx| {
let len = query_buffer.len(cx);
@@ -674,7 +720,7 @@ impl BufferSearchBar {
fn update_matches(&mut self, cx: &mut ViewContext<Self>) -> oneshot::Receiver<()> {
let (done_tx, done_rx) = oneshot::channel();
- let query = self.query_editor.read(cx).text(cx);
+ let query = self.query(cx);
self.pending_search.take();
if let Some(active_searchable_item) = self.active_searchable_item.as_ref() {
if query.is_empty() {
@@ -707,6 +753,7 @@ impl BufferSearchBar {
)
};
+ let query_text = query.as_str().to_string();
let matches = active_searchable_item.find_matches(query, cx);
let active_searchable_item = active_searchable_item.downgrade();
@@ -720,6 +767,7 @@ impl BufferSearchBar {
.insert(active_searchable_item.downgrade(), matches);
this.update_match_index(cx);
+ this.search_history.add(query_text);
if !this.dismissed {
let matches = this
.searchable_items_with_matches
@@ -753,6 +801,28 @@ impl BufferSearchBar {
cx.notify();
}
}
+
+ fn next_history_query(&mut self, _: &NextHistoryQuery, cx: &mut ViewContext<Self>) {
+ if let Some(new_query) = self.search_history.next().map(str::to_string) {
+ let _ = self.search(&new_query, Some(self.search_options), cx);
+ } else {
+ self.search_history.reset_selection();
+ let _ = self.search("", Some(self.search_options), cx);
+ }
+ }
+
+ fn previous_history_query(&mut self, _: &PreviousHistoryQuery, cx: &mut ViewContext<Self>) {
+ if self.query(cx).is_empty() {
+ if let Some(new_query) = self.search_history.current().map(str::to_string) {
+ let _ = self.search(&new_query, Some(self.search_options), cx);
+ return;
+ }
+ }
+
+ if let Some(new_query) = self.search_history.previous().map(str::to_string) {
+ let _ = self.search(&new_query, Some(self.search_options), cx);
+ }
+ }
}
#[cfg(test)]
@@ -1333,4 +1403,154 @@ mod tests {
);
});
}
+
+ #[gpui::test]
+ async fn test_search_query_history(cx: &mut TestAppContext) {
+ crate::project_search::tests::init_test(cx);
+
+ let buffer_text = r#"
+ A regular expression (shortened as regex or regexp;[1] also referred to as
+ rational expression[2][3]) is a sequence of characters that specifies a search
+ pattern in text. Usually such patterns are used by string-searching algorithms
+ for "find" or "find and replace" operations on strings, or for input validation.
+ "#
+ .unindent();
+ let buffer = cx.add_model(|cx| Buffer::new(0, buffer_text, cx));
+ let (window_id, _root_view) = cx.add_window(|_| EmptyView);
+
+ let editor = cx.add_view(window_id, |cx| Editor::for_buffer(buffer.clone(), None, cx));
+
+ let search_bar = cx.add_view(window_id, |cx| {
+ let mut search_bar = BufferSearchBar::new(cx);
+ search_bar.set_active_pane_item(Some(&editor), cx);
+ search_bar.show(cx);
+ search_bar
+ });
+
+ // Add 3 search items into the history.
+ search_bar
+ .update(cx, |search_bar, cx| search_bar.search("a", None, cx))
+ .await
+ .unwrap();
+ search_bar
+ .update(cx, |search_bar, cx| search_bar.search("b", None, cx))
+ .await
+ .unwrap();
+ search_bar
+ .update(cx, |search_bar, cx| {
+ search_bar.search("c", Some(SearchOptions::CASE_SENSITIVE), cx)
+ })
+ .await
+ .unwrap();
+ // Ensure that the latest search is active.
+ search_bar.read_with(cx, |search_bar, cx| {
+ assert_eq!(search_bar.query(cx), "c");
+ assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+
+ // Next history query after the latest should set the query to the empty string.
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.next_history_query(&NextHistoryQuery, cx);
+ });
+ search_bar.read_with(cx, |search_bar, cx| {
+ assert_eq!(search_bar.query(cx), "");
+ assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.next_history_query(&NextHistoryQuery, cx);
+ });
+ search_bar.read_with(cx, |search_bar, cx| {
+ assert_eq!(search_bar.query(cx), "");
+ assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+
+ // First previous query for empty current query should set the query to the latest.
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.previous_history_query(&PreviousHistoryQuery, cx);
+ });
+ search_bar.read_with(cx, |search_bar, cx| {
+ assert_eq!(search_bar.query(cx), "c");
+ assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+
+ // Further previous items should go over the history in reverse order.
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.previous_history_query(&PreviousHistoryQuery, cx);
+ });
+ search_bar.read_with(cx, |search_bar, cx| {
+ assert_eq!(search_bar.query(cx), "b");
+ assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+
+ // Previous items should never go behind the first history item.
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.previous_history_query(&PreviousHistoryQuery, cx);
+ });
+ search_bar.read_with(cx, |search_bar, cx| {
+ assert_eq!(search_bar.query(cx), "a");
+ assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.previous_history_query(&PreviousHistoryQuery, cx);
+ });
+ search_bar.read_with(cx, |search_bar, cx| {
+ assert_eq!(search_bar.query(cx), "a");
+ assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+
+ // Next items should go over the history in the original order.
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.next_history_query(&NextHistoryQuery, cx);
+ });
+ search_bar.read_with(cx, |search_bar, cx| {
+ assert_eq!(search_bar.query(cx), "b");
+ assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+
+ search_bar
+ .update(cx, |search_bar, cx| search_bar.search("ba", None, cx))
+ .await
+ .unwrap();
+ search_bar.read_with(cx, |search_bar, cx| {
+ assert_eq!(search_bar.query(cx), "ba");
+ assert_eq!(search_bar.search_options, SearchOptions::NONE);
+ });
+
+ // New search input should add another entry to history and move the selection to the end of the history.
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.previous_history_query(&PreviousHistoryQuery, cx);
+ });
+ search_bar.read_with(cx, |search_bar, cx| {
+ assert_eq!(search_bar.query(cx), "c");
+ assert_eq!(search_bar.search_options, SearchOptions::NONE);
+ });
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.previous_history_query(&PreviousHistoryQuery, cx);
+ });
+ search_bar.read_with(cx, |search_bar, cx| {
+ assert_eq!(search_bar.query(cx), "b");
+ assert_eq!(search_bar.search_options, SearchOptions::NONE);
+ });
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.next_history_query(&NextHistoryQuery, cx);
+ });
+ search_bar.read_with(cx, |search_bar, cx| {
+ assert_eq!(search_bar.query(cx), "c");
+ assert_eq!(search_bar.search_options, SearchOptions::NONE);
+ });
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.next_history_query(&NextHistoryQuery, cx);
+ });
+ search_bar.read_with(cx, |search_bar, cx| {
+ assert_eq!(search_bar.query(cx), "ba");
+ assert_eq!(search_bar.search_options, SearchOptions::NONE);
+ });
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.next_history_query(&NextHistoryQuery, cx);
+ });
+ search_bar.read_with(cx, |search_bar, cx| {
+ assert_eq!(search_bar.query(cx), "");
+ assert_eq!(search_bar.search_options, SearchOptions::NONE);
+ });
+ }
}
@@ -1,14 +1,14 @@
use crate::{
- SearchOptions, SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive, ToggleWholeWord,
+ NextHistoryQuery, PreviousHistoryQuery, SearchHistory, SearchOptions, SelectNextMatch,
+ SelectPrevMatch, ToggleCaseSensitive, ToggleWholeWord,
};
-use anyhow::Result;
+use anyhow::Context;
use collections::HashMap;
use editor::{
items::active_match_index, scroll::autoscroll::Autoscroll, Anchor, Editor, MultiBuffer,
SelectAll, MAX_TAB_TITLE_LEN,
};
use futures::StreamExt;
-use globset::{Glob, GlobMatcher};
use gpui::color::Color;
use gpui::geometry::rect::RectF;
use gpui::geometry::vector::IntoVector2F;
@@ -24,7 +24,10 @@ use gpui::{
use gpui::{scene::Path, LayoutContext};
use menu::Confirm;
use postage::stream::Stream;
-use project::{search::SearchQuery, Entry, Project};
+use project::{
+ search::{PathMatcher, SearchQuery},
+ Entry, Project,
+};
use semantic_index::SemanticIndex;
use smallvec::SmallVec;
use std::{
@@ -69,6 +72,8 @@ pub fn init(cx: &mut AppContext) {
cx.add_action(ProjectSearchBar::select_next_match);
cx.add_action(ProjectSearchBar::select_prev_match);
cx.add_action(ProjectSearchBar::cycle_mode);
+ cx.add_action(ProjectSearchBar::next_history_query);
+ cx.add_action(ProjectSearchBar::previous_history_query);
cx.capture_action(ProjectSearchBar::tab);
cx.capture_action(ProjectSearchBar::tab_previous);
add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx);
@@ -107,6 +112,7 @@ struct ProjectSearch {
match_ranges: Vec<Range<Anchor>>,
active_query: Option<SearchQuery>,
search_id: usize,
+ search_history: SearchHistory,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -228,6 +234,7 @@ impl ProjectSearch {
match_ranges: Default::default(),
active_query: None,
search_id: 0,
+ search_history: SearchHistory::default(),
}
}
@@ -241,6 +248,7 @@ impl ProjectSearch {
match_ranges: self.match_ranges.clone(),
active_query: self.active_query.clone(),
search_id: self.search_id,
+ search_history: self.search_history.clone(),
})
}
@@ -255,6 +263,7 @@ impl ProjectSearch {
.project
.update(cx, |project, cx| project.search(query.clone(), cx));
self.search_id += 1;
+ self.search_history.add(query.as_str().to_string());
self.active_query = Some(query);
self.match_ranges.clear();
self.pending_search = Some(cx.spawn_weak(|this, mut cx| async move {
@@ -290,27 +299,22 @@ impl ProjectSearch {
cx.notify();
}
- fn semantic_search(
- &mut self,
- query: String,
- include_files: Vec<GlobMatcher>,
- exclude_files: Vec<GlobMatcher>,
- cx: &mut ModelContext<Self>,
- ) {
+ fn semantic_search(&mut self, query: SearchQuery, cx: &mut ModelContext<Self>) {
let search = SemanticIndex::global(cx).map(|index| {
index.update(cx, |semantic_index, cx| {
semantic_index.search_project(
self.project.clone(),
- query.clone(),
+ query.as_str().to_owned(),
10,
- include_files,
- exclude_files,
+ query.files_to_include().to_vec(),
+ query.files_to_exclude().to_vec(),
cx,
)
})
});
self.search_id += 1;
self.match_ranges.clear();
+ self.search_history.add(query.as_str().to_string());
self.pending_search = Some(cx.spawn(|this, mut cx| async move {
let results = search?.await.log_err()?;
@@ -415,6 +419,49 @@ impl View for ProjectSearchView {
],
};
+ let previous_query_keystrokes =
+ cx.binding_for_action(&PreviousHistoryQuery {})
+ .map(|binding| {
+ binding
+ .keystrokes()
+ .iter()
+ .map(|k| k.to_string())
+ .collect::<Vec<_>>()
+ });
+ let next_query_keystrokes =
+ cx.binding_for_action(&NextHistoryQuery {}).map(|binding| {
+ binding
+ .keystrokes()
+ .iter()
+ .map(|k| k.to_string())
+ .collect::<Vec<_>>()
+ });
+ let new_placeholder_text = match (previous_query_keystrokes, next_query_keystrokes) {
+ (Some(previous_query_keystrokes), Some(next_query_keystrokes)) => {
+ format!(
+ "Search ({}/{} for previous/next query)",
+ previous_query_keystrokes.join(" "),
+ next_query_keystrokes.join(" ")
+ )
+ }
+ (None, Some(next_query_keystrokes)) => {
+ format!(
+ "Search ({} for next query)",
+ next_query_keystrokes.join(" ")
+ )
+ }
+ (Some(previous_query_keystrokes), None) => {
+ format!(
+ "Search ({} for previous query)",
+ previous_query_keystrokes.join(" ")
+ )
+ }
+ (None, None) => String::new(),
+ };
+ self.query_editor.update(cx, |editor, cx| {
+ editor.set_placeholder_text(new_placeholder_text, cx);
+ });
+
MouseEventHandler::<Status, _>::new(0, cx, |_, _| {
Flex::column()
.with_child(Flex::column().contained().flex(1., true))
@@ -641,6 +688,9 @@ impl Item for ProjectSearchView {
}
impl ProjectSearchView {
+ fn toggle_search_option(&mut self, option: SearchOptions) {
+ self.search_options.toggle(option);
+ }
fn activate_search_mode(&mut self, mode: SearchMode, cx: &mut ViewContext<Self>) {
self.model.update(cx, |model, _| model.kill_search());
self.current_mode = mode;
@@ -815,8 +865,7 @@ impl ProjectSearchView {
if !dir_entry.is_dir() {
return;
}
- let filter_path = dir_entry.path.join("**");
- let Some(filter_str) = filter_path.to_str() else { return; };
+ let Some(filter_str) = dir_entry.path.to_str() else { return; };
let model = cx.add_model(|cx| ProjectSearch::new(workspace.project().clone(), cx));
let search = cx.add_view(|cx| ProjectSearchView::new(model, cx));
@@ -891,16 +940,13 @@ impl ProjectSearchView {
return;
}
- let query = self.query_editor.read(cx).text(cx);
- if let Some((included_files, exclude_files)) =
- self.get_included_and_excluded_globsets(cx)
- {
- self.model.update(cx, |model, cx| {
- model.semantic_search(query, included_files, exclude_files, cx)
- });
+ if let Some(query) = self.build_search_query(cx) {
+ self.model
+ .update(cx, |model, cx| model.semantic_search(query, cx));
}
}
}
+
_ => {
if let Some(query) = self.build_search_query(cx) {
self.model.update(cx, |model, cx| model.search(query, cx));
@@ -909,45 +955,10 @@ impl ProjectSearchView {
}
}
- fn get_included_and_excluded_globsets(
- &mut self,
- cx: &mut ViewContext<Self>,
- ) -> Option<(Vec<GlobMatcher>, Vec<GlobMatcher>)> {
- let included_files =
- match Self::load_glob_set(&self.included_files_editor.read(cx).text(cx)) {
- Ok(included_files) => {
- self.panels_with_errors.remove(&InputPanel::Include);
- included_files
- }
- Err(_e) => {
- self.panels_with_errors.insert(InputPanel::Include);
- cx.notify();
- return None;
- }
- };
- let excluded_files =
- match Self::load_glob_set(&self.excluded_files_editor.read(cx).text(cx)) {
- Ok(excluded_files) => {
- self.panels_with_errors.remove(&InputPanel::Exclude);
- excluded_files
- }
- Err(_e) => {
- self.panels_with_errors.insert(InputPanel::Exclude);
- cx.notify();
- return None;
- }
- };
-
- Some((included_files, excluded_files))
- }
- fn toggle_search_option(&mut self, option: SearchOptions) {
- self.search_options.toggle(option);
- self.semantic = None;
- }
fn build_search_query(&mut self, cx: &mut ViewContext<Self>) -> Option<SearchQuery> {
let text = self.query_editor.read(cx).text(cx);
let included_files =
- match Self::load_glob_set(&self.included_files_editor.read(cx).text(cx)) {
+ match Self::parse_path_matches(&self.included_files_editor.read(cx).text(cx)) {
Ok(included_files) => {
self.panels_with_errors.remove(&InputPanel::Include);
included_files
@@ -959,7 +970,7 @@ impl ProjectSearchView {
}
};
let excluded_files =
- match Self::load_glob_set(&self.excluded_files_editor.read(cx).text(cx)) {
+ match Self::parse_path_matches(&self.excluded_files_editor.read(cx).text(cx)) {
Ok(excluded_files) => {
self.panels_with_errors.remove(&InputPanel::Exclude);
excluded_files
@@ -999,11 +1010,14 @@ impl ProjectSearchView {
}
}
- fn load_glob_set(text: &str) -> Result<Vec<GlobMatcher>> {
+ fn parse_path_matches(text: &str) -> anyhow::Result<Vec<PathMatcher>> {
text.split(',')
.map(str::trim)
- .filter(|glob_str| !glob_str.is_empty())
- .map(|glob_str| anyhow::Ok(Glob::new(glob_str)?.compile_matcher()))
+ .filter(|maybe_glob_str| !maybe_glob_str.is_empty())
+ .map(|maybe_glob_str| {
+ PathMatcher::new(maybe_glob_str)
+ .with_context(|| format!("parsing {maybe_glob_str} as path matcher"))
+ })
.collect()
}
@@ -1016,6 +1030,7 @@ impl ProjectSearchView {
let range_to_select = match_ranges[new_index].clone();
self.results_editor.update(cx, |editor, cx| {
+ let range_to_select = editor.range_for_match(&range_to_select);
editor.unfold_ranges([range_to_select.clone()], false, true, cx);
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range_to_select])
@@ -1057,8 +1072,12 @@ impl ProjectSearchView {
let is_new_search = self.search_id != prev_search_id;
self.results_editor.update(cx, |editor, cx| {
if is_new_search {
+ let range_to_select = match_ranges
+ .first()
+ .clone()
+ .map(|range| editor.range_for_match(range));
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
- s.select_ranges(match_ranges.first().cloned())
+ s.select_ranges(range_to_select)
});
}
editor.highlight_background::<Self>(
@@ -1597,6 +1616,47 @@ impl ProjectSearchBar {
false
}
}
+
+ fn next_history_query(&mut self, _: &NextHistoryQuery, cx: &mut ViewContext<Self>) {
+ if let Some(search_view) = self.active_project_search.as_ref() {
+ search_view.update(cx, |search_view, cx| {
+ let new_query = search_view.model.update(cx, |model, _| {
+ if let Some(new_query) = model.search_history.next().map(str::to_string) {
+ new_query
+ } else {
+ model.search_history.reset_selection();
+ String::new()
+ }
+ });
+ search_view.set_query(&new_query, cx);
+ });
+ }
+ }
+
+ fn previous_history_query(&mut self, _: &PreviousHistoryQuery, cx: &mut ViewContext<Self>) {
+ if let Some(search_view) = self.active_project_search.as_ref() {
+ search_view.update(cx, |search_view, cx| {
+ if search_view.query_editor.read(cx).text(cx).is_empty() {
+ if let Some(new_query) = search_view
+ .model
+ .read(cx)
+ .search_history
+ .current()
+ .map(str::to_string)
+ {
+ search_view.set_query(&new_query, cx);
+ return;
+ }
+ }
+
+ if let Some(new_query) = search_view.model.update(cx, |model, _| {
+ model.search_history.previous().map(str::to_string)
+ }) {
+ search_view.set_query(&new_query, cx);
+ }
+ });
+ }
+ }
}
impl Entity for ProjectSearchBar {
@@ -1869,6 +1929,7 @@ pub mod tests {
use editor::DisplayPoint;
use gpui::{color::Color, executor::Deterministic, TestAppContext};
use project::FakeFs;
+ use semantic_index::semantic_index_settings::SemanticIndexSettings;
use serde_json::json;
use settings::SettingsStore;
use std::sync::Arc;
@@ -2270,7 +2331,7 @@ pub mod tests {
search_view.included_files_editor.update(cx, |editor, cx| {
assert_eq!(
editor.display_text(cx),
- a_dir_entry.path.join("**").display().to_string(),
+ a_dir_entry.path.to_str().unwrap(),
"New search in directory should have included dir entry path"
);
});
@@ -2294,6 +2355,192 @@ pub mod tests {
});
}
+ #[gpui::test]
+ async fn test_search_query_history(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.background());
+ fs.insert_tree(
+ "/dir",
+ json!({
+ "one.rs": "const ONE: usize = 1;",
+ "two.rs": "const TWO: usize = one::ONE + one::ONE;",
+ "three.rs": "const THREE: usize = one::ONE + two::TWO;",
+ "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
+ }),
+ )
+ .await;
+ let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+ let (window_id, workspace) = cx.add_window(|cx| Workspace::test_new(project, cx));
+ workspace.update(cx, |workspace, cx| {
+ ProjectSearchView::deploy(workspace, &workspace::NewSearch, cx)
+ });
+
+ let search_view = cx.read(|cx| {
+ workspace
+ .read(cx)
+ .active_pane()
+ .read(cx)
+ .active_item()
+ .and_then(|item| item.downcast::<ProjectSearchView>())
+ .expect("Search view expected to appear after new search event trigger")
+ });
+
+ let search_bar = cx.add_view(window_id, |cx| {
+ let mut search_bar = ProjectSearchBar::new();
+ search_bar.set_active_pane_item(Some(&search_view), cx);
+ // search_bar.show(cx);
+ search_bar
+ });
+
+ // Add 3 search items into the history + another unsubmitted one.
+ search_view.update(cx, |search_view, cx| {
+ search_view.search_options = SearchOptions::CASE_SENSITIVE;
+ search_view
+ .query_editor
+ .update(cx, |query_editor, cx| query_editor.set_text("ONE", cx));
+ search_view.search(cx);
+ });
+ cx.foreground().run_until_parked();
+ search_view.update(cx, |search_view, cx| {
+ search_view
+ .query_editor
+ .update(cx, |query_editor, cx| query_editor.set_text("TWO", cx));
+ search_view.search(cx);
+ });
+ cx.foreground().run_until_parked();
+ search_view.update(cx, |search_view, cx| {
+ search_view
+ .query_editor
+ .update(cx, |query_editor, cx| query_editor.set_text("THREE", cx));
+ search_view.search(cx);
+ });
+ cx.foreground().run_until_parked();
+ search_view.update(cx, |search_view, cx| {
+ search_view.query_editor.update(cx, |query_editor, cx| {
+ query_editor.set_text("JUST_TEXT_INPUT", cx)
+ });
+ });
+ cx.foreground().run_until_parked();
+
+ // Ensure that the latest input with search settings is active.
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(
+ search_view.query_editor.read(cx).text(cx),
+ "JUST_TEXT_INPUT"
+ );
+ assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+
+ // Next history query after the latest should set the query to the empty string.
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.next_history_query(&NextHistoryQuery, cx);
+ });
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "");
+ assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.next_history_query(&NextHistoryQuery, cx);
+ });
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "");
+ assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+
+ // First previous query for empty current query should set the query to the latest submitted one.
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.previous_history_query(&PreviousHistoryQuery, cx);
+ });
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE");
+ assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+
+ // Further previous items should go over the history in reverse order.
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.previous_history_query(&PreviousHistoryQuery, cx);
+ });
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO");
+ assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+
+ // Previous items should never go behind the first history item.
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.previous_history_query(&PreviousHistoryQuery, cx);
+ });
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "ONE");
+ assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.previous_history_query(&PreviousHistoryQuery, cx);
+ });
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "ONE");
+ assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+
+ // Next items should go over the history in the original order.
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.next_history_query(&NextHistoryQuery, cx);
+ });
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO");
+ assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+
+ search_view.update(cx, |search_view, cx| {
+ search_view
+ .query_editor
+ .update(cx, |query_editor, cx| query_editor.set_text("TWO_NEW", cx));
+ search_view.search(cx);
+ });
+ cx.foreground().run_until_parked();
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO_NEW");
+ assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+
+ // New search input should add another entry to history and move the selection to the end of the history.
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.previous_history_query(&PreviousHistoryQuery, cx);
+ });
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE");
+ assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.previous_history_query(&PreviousHistoryQuery, cx);
+ });
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO");
+ assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.next_history_query(&NextHistoryQuery, cx);
+ });
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE");
+ assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.next_history_query(&NextHistoryQuery, cx);
+ });
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO_NEW");
+ assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+ search_bar.update(cx, |search_bar, cx| {
+ search_bar.next_history_query(&NextHistoryQuery, cx);
+ });
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "");
+ assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
+ });
+ }
+
pub fn init_test(cx: &mut TestAppContext) {
cx.foreground().forbid_parking();
let fonts = cx.font_cache();
@@ -2303,6 +2550,7 @@ pub mod tests {
cx.update(|cx| {
cx.set_global(SettingsStore::test(cx));
cx.set_global(ActiveSearches::default());
+ settings::register::<SemanticIndexSettings>(cx);
theme::init((), cx);
cx.update_global::<SettingsStore, _, _>(|store, _| {
@@ -3,6 +3,7 @@ pub use buffer_search::BufferSearchBar;
use gpui::{actions, Action, AppContext};
use project::search::SearchQuery;
pub use project_search::{ProjectSearchBar, ProjectSearchView};
+use smallvec::SmallVec;
pub mod buffer_search;
pub mod project_search;
@@ -21,6 +22,8 @@ actions!(
SelectNextMatch,
SelectPrevMatch,
SelectAllMatches,
+ NextHistoryQuery,
+ PreviousHistoryQuery,
]
);
@@ -65,3 +68,187 @@ impl SearchOptions {
options
}
}
+
+const SEARCH_HISTORY_LIMIT: usize = 20;
+
+#[derive(Default, Debug, Clone)]
+pub struct SearchHistory {
+ history: SmallVec<[String; SEARCH_HISTORY_LIMIT]>,
+ selected: Option<usize>,
+}
+
+impl SearchHistory {
+ pub fn add(&mut self, search_string: String) {
+ if let Some(i) = self.selected {
+ if search_string == self.history[i] {
+ return;
+ }
+ }
+
+ if let Some(previously_searched) = self.history.last_mut() {
+ if search_string.find(previously_searched.as_str()).is_some() {
+ *previously_searched = search_string;
+ self.selected = Some(self.history.len() - 1);
+ return;
+ }
+ }
+
+ self.history.push(search_string);
+ if self.history.len() > SEARCH_HISTORY_LIMIT {
+ self.history.remove(0);
+ }
+ self.selected = Some(self.history.len() - 1);
+ }
+
+ pub fn next(&mut self) -> Option<&str> {
+ let history_size = self.history.len();
+ if history_size == 0 {
+ return None;
+ }
+
+ let selected = self.selected?;
+ if selected == history_size - 1 {
+ return None;
+ }
+ let next_index = selected + 1;
+ self.selected = Some(next_index);
+ Some(&self.history[next_index])
+ }
+
+ pub fn current(&self) -> Option<&str> {
+ Some(&self.history[self.selected?])
+ }
+
+ pub fn previous(&mut self) -> Option<&str> {
+ let history_size = self.history.len();
+ if history_size == 0 {
+ return None;
+ }
+
+ let prev_index = match self.selected {
+ Some(selected_index) => {
+ if selected_index == 0 {
+ return None;
+ } else {
+ selected_index - 1
+ }
+ }
+ None => history_size - 1,
+ };
+
+ self.selected = Some(prev_index);
+ Some(&self.history[prev_index])
+ }
+
+ pub fn reset_selection(&mut self) {
+ self.selected = None;
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_add() {
+ let mut search_history = SearchHistory::default();
+ assert_eq!(
+ search_history.current(),
+ None,
+ "No current selection should be set fo the default search history"
+ );
+
+ search_history.add("rust".to_string());
+ assert_eq!(
+ search_history.current(),
+ Some("rust"),
+ "Newly added item should be selected"
+ );
+
+ // check if duplicates are not added
+ search_history.add("rust".to_string());
+ assert_eq!(
+ search_history.history.len(),
+ 1,
+ "Should not add a duplicate"
+ );
+ assert_eq!(search_history.current(), Some("rust"));
+
+ // check if new string containing the previous string replaces it
+ search_history.add("rustlang".to_string());
+ assert_eq!(
+ search_history.history.len(),
+ 1,
+ "Should replace previous item if it's a substring"
+ );
+ assert_eq!(search_history.current(), Some("rustlang"));
+
+ // push enough items to test SEARCH_HISTORY_LIMIT
+ for i in 0..SEARCH_HISTORY_LIMIT * 2 {
+ search_history.add(format!("item{i}"));
+ }
+ assert!(search_history.history.len() <= SEARCH_HISTORY_LIMIT);
+ }
+
+ #[test]
+ fn test_next_and_previous() {
+ let mut search_history = SearchHistory::default();
+ assert_eq!(
+ search_history.next(),
+ None,
+ "Default search history should not have a next item"
+ );
+
+ search_history.add("Rust".to_string());
+ assert_eq!(search_history.next(), None);
+ search_history.add("JavaScript".to_string());
+ assert_eq!(search_history.next(), None);
+ search_history.add("TypeScript".to_string());
+ assert_eq!(search_history.next(), None);
+
+ assert_eq!(search_history.current(), Some("TypeScript"));
+
+ assert_eq!(search_history.previous(), Some("JavaScript"));
+ assert_eq!(search_history.current(), Some("JavaScript"));
+
+ assert_eq!(search_history.previous(), Some("Rust"));
+ assert_eq!(search_history.current(), Some("Rust"));
+
+ assert_eq!(search_history.previous(), None);
+ assert_eq!(search_history.current(), Some("Rust"));
+
+ assert_eq!(search_history.next(), Some("JavaScript"));
+ assert_eq!(search_history.current(), Some("JavaScript"));
+
+ assert_eq!(search_history.next(), Some("TypeScript"));
+ assert_eq!(search_history.current(), Some("TypeScript"));
+
+ assert_eq!(search_history.next(), None);
+ assert_eq!(search_history.current(), Some("TypeScript"));
+ }
+
+ #[test]
+ fn test_reset_selection() {
+ let mut search_history = SearchHistory::default();
+ search_history.add("Rust".to_string());
+ search_history.add("JavaScript".to_string());
+ search_history.add("TypeScript".to_string());
+
+ assert_eq!(search_history.current(), Some("TypeScript"));
+ search_history.reset_selection();
+ assert_eq!(search_history.current(), None);
+ assert_eq!(
+ search_history.previous(),
+ Some("TypeScript"),
+ "Should start from the end after reset on previous item query"
+ );
+
+ search_history.previous();
+ assert_eq!(search_history.current(), Some("JavaScript"));
+ search_history.previous();
+ assert_eq!(search_history.current(), Some("Rust"));
+
+ search_history.reset_selection();
+ assert_eq!(search_history.current(), None);
+ }
+}
@@ -54,9 +54,12 @@ tempdir.workspace = true
ctor.workspace = true
env_logger.workspace = true
-tree-sitter-typescript = "*"
-tree-sitter-json = "*"
-tree-sitter-rust = "*"
-tree-sitter-toml = "*"
-tree-sitter-cpp = "*"
-tree-sitter-elixir = "*"
+tree-sitter-typescript.workspace = true
+tree-sitter-json.workspace = true
+tree-sitter-rust.workspace = true
+tree-sitter-toml.workspace = true
+tree-sitter-cpp.workspace = true
+tree-sitter-elixir.workspace = true
+tree-sitter-lua.workspace = true
+tree-sitter-ruby.workspace = true
+tree-sitter-php.workspace = true
@@ -1,7 +1,6 @@
use crate::{parsing::Document, SEMANTIC_INDEX_VERSION};
use anyhow::{anyhow, Context, Result};
-use globset::GlobMatcher;
-use project::Fs;
+use project::{search::PathMatcher, Fs};
use rpc::proto::Timestamp;
use rusqlite::{
params,
@@ -290,8 +289,8 @@ impl VectorDatabase {
pub fn retrieve_included_file_ids(
&self,
worktree_ids: &[i64],
- include_globs: Vec<GlobMatcher>,
- exclude_globs: Vec<GlobMatcher>,
+ includes: &[PathMatcher],
+ excludes: &[PathMatcher],
) -> Result<Vec<i64>> {
let mut file_query = self.db.prepare(
"
@@ -310,13 +309,9 @@ impl VectorDatabase {
while let Some(row) = rows.next()? {
let file_id = row.get(0)?;
let relative_path = row.get_ref(1)?.as_str()?;
- let included = include_globs.is_empty()
- || include_globs
- .iter()
- .any(|glob| glob.is_match(relative_path));
- let excluded = exclude_globs
- .iter()
- .any(|glob| glob.is_match(relative_path));
+ let included =
+ includes.is_empty() || includes.iter().any(|glob| glob.is_match(relative_path));
+ let excluded = excludes.iter().any(|glob| glob.is_match(relative_path));
if included && !excluded {
file_ids.push(file_id);
}
@@ -21,7 +21,9 @@ const CODE_CONTEXT_TEMPLATE: &str =
"The below code snippet is from file '<path>'\n\n```<language>\n<item>\n```";
const ENTIRE_FILE_TEMPLATE: &str =
"The below snippet is from file '<path>'\n\n```<language>\n<item>\n```";
-pub const PARSEABLE_ENTIRE_FILE_TYPES: &[&str] = &["TOML", "YAML", "CSS"];
+const MARKDOWN_CONTEXT_TEMPLATE: &str = "The below file contents is from file '<path>'\n\n<item>";
+pub const PARSEABLE_ENTIRE_FILE_TYPES: &[&str] =
+ &["TOML", "YAML", "CSS", "HEEX", "ERB", "SVELTE", "HTML"];
pub struct CodeContextRetriever {
pub parser: Parser,
@@ -59,7 +61,7 @@ impl CodeContextRetriever {
let document_span = ENTIRE_FILE_TEMPLATE
.replace("<path>", relative_path.to_string_lossy().as_ref())
.replace("<language>", language_name.as_ref())
- .replace("item", &content);
+ .replace("<item>", &content);
Ok(vec![Document {
range: 0..content.len(),
@@ -69,6 +71,19 @@ impl CodeContextRetriever {
}])
}
+ fn parse_markdown_file(&self, relative_path: &Path, content: &str) -> Result<Vec<Document>> {
+ let document_span = MARKDOWN_CONTEXT_TEMPLATE
+ .replace("<path>", relative_path.to_string_lossy().as_ref())
+ .replace("<item>", &content);
+
+ Ok(vec![Document {
+ range: 0..content.len(),
+ content: document_span,
+ embedding: Vec::new(),
+ name: "Markdown".to_string(),
+ }])
+ }
+
fn get_matches_in_file(
&mut self,
content: &str,
@@ -135,6 +150,8 @@ impl CodeContextRetriever {
if PARSEABLE_ENTIRE_FILE_TYPES.contains(&language_name.as_ref()) {
return self.parse_entire_file(relative_path, language_name, &content);
+ } else if &language_name.to_string() == &"Markdown".to_string() {
+ return self.parse_markdown_file(relative_path, &content);
}
let mut documents = self.parse_file(content, language)?;
@@ -200,7 +217,12 @@ impl CodeContextRetriever {
let mut document_content = String::new();
for context_range in &context_match.context_ranges {
- document_content.push_str(&content[context_range.clone()]);
+ add_content_from_range(
+ &mut document_content,
+ content,
+ context_range.clone(),
+ context_match.start_col,
+ );
document_content.push_str("\n");
}
@@ -11,13 +11,12 @@ use anyhow::{anyhow, Result};
use db::VectorDatabase;
use embedding::{EmbeddingProvider, OpenAIEmbeddings};
use futures::{channel::oneshot, Future};
-use globset::GlobMatcher;
use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle};
use language::{Anchor, Buffer, Language, LanguageRegistry};
use parking_lot::Mutex;
use parsing::{CodeContextRetriever, Document, PARSEABLE_ENTIRE_FILE_TYPES};
use postage::watch;
-use project::{Fs, Project, WorktreeId};
+use project::{search::PathMatcher, Fs, Project, WorktreeId};
use smol::channel;
use std::{
cmp::Ordering,
@@ -613,6 +612,7 @@ impl SemanticIndex {
.await
{
if !PARSEABLE_ENTIRE_FILE_TYPES.contains(&language.name().as_ref())
+ && &language.name().as_ref() != &"Markdown"
&& language
.grammar()
.and_then(|grammar| grammar.embedding_config.as_ref())
@@ -682,8 +682,8 @@ impl SemanticIndex {
project: ModelHandle<Project>,
phrase: String,
limit: usize,
- include_globs: Vec<GlobMatcher>,
- exclude_globs: Vec<GlobMatcher>,
+ includes: Vec<PathMatcher>,
+ excludes: Vec<PathMatcher>,
cx: &mut ModelContext<Self>,
) -> Task<Result<Vec<SearchResult>>> {
let project_state = if let Some(state) = self.projects.get(&project.downgrade()) {
@@ -714,11 +714,8 @@ impl SemanticIndex {
.next()
.unwrap();
- let file_ids = database.retrieve_included_file_ids(
- &worktree_db_ids,
- include_globs,
- exclude_globs,
- )?;
+ let file_ids =
+ database.retrieve_included_file_ids(&worktree_db_ids, &includes, &excludes)?;
let batch_n = cx.background().num_cpus();
let ids_len = file_ids.clone().len();
@@ -7,11 +7,10 @@ use crate::{
};
use anyhow::Result;
use async_trait::async_trait;
-use globset::Glob;
use gpui::{Task, TestAppContext};
use language::{Language, LanguageConfig, LanguageRegistry, ToOffset};
use pretty_assertions::assert_eq;
-use project::{project_settings::ProjectSettings, FakeFs, Fs, Project};
+use project::{project_settings::ProjectSettings, search::PathMatcher, FakeFs, Fs, Project};
use rand::{rngs::StdRng, Rng};
use serde_json::json;
use settings::SettingsStore;
@@ -121,8 +120,8 @@ async fn test_semantic_index(cx: &mut TestAppContext) {
);
// Test Include Files Functonality
- let include_files = vec![Glob::new("*.rs").unwrap().compile_matcher()];
- let exclude_files = vec![Glob::new("*.rs").unwrap().compile_matcher()];
+ let include_files = vec![PathMatcher::new("*.rs").unwrap()];
+ let exclude_files = vec![PathMatcher::new("*.rs").unwrap()];
let rust_only_search_results = store
.update(cx, |store, cx| {
store.search_project(
@@ -486,6 +485,79 @@ async fn test_code_context_retrieval_javascript() {
)
}
+#[gpui::test]
+async fn test_code_context_retrieval_lua() {
+ let language = lua_lang();
+ let mut retriever = CodeContextRetriever::new();
+
+ let text = r#"
+ -- Creates a new class
+ -- @param baseclass The Baseclass of this class, or nil.
+ -- @return A new class reference.
+ function classes.class(baseclass)
+ -- Create the class definition and metatable.
+ local classdef = {}
+ -- Find the super class, either Object or user-defined.
+ baseclass = baseclass or classes.Object
+ -- If this class definition does not know of a function, it will 'look up' to the Baseclass via the __index of the metatable.
+ setmetatable(classdef, { __index = baseclass })
+ -- All class instances have a reference to the class object.
+ classdef.class = classdef
+ --- Recursivly allocates the inheritance tree of the instance.
+ -- @param mastertable The 'root' of the inheritance tree.
+ -- @return Returns the instance with the allocated inheritance tree.
+ function classdef.alloc(mastertable)
+ -- All class instances have a reference to a superclass object.
+ local instance = { super = baseclass.alloc(mastertable) }
+ -- Any functions this instance does not know of will 'look up' to the superclass definition.
+ setmetatable(instance, { __index = classdef, __newindex = mastertable })
+ return instance
+ end
+ end
+ "#.unindent();
+
+ let documents = retriever.parse_file(&text, language.clone()).unwrap();
+
+ assert_documents_eq(
+ &documents,
+ &[
+ (r#"
+ -- Creates a new class
+ -- @param baseclass The Baseclass of this class, or nil.
+ -- @return A new class reference.
+ function classes.class(baseclass)
+ -- Create the class definition and metatable.
+ local classdef = {}
+ -- Find the super class, either Object or user-defined.
+ baseclass = baseclass or classes.Object
+ -- If this class definition does not know of a function, it will 'look up' to the Baseclass via the __index of the metatable.
+ setmetatable(classdef, { __index = baseclass })
+ -- All class instances have a reference to the class object.
+ classdef.class = classdef
+ --- Recursivly allocates the inheritance tree of the instance.
+ -- @param mastertable The 'root' of the inheritance tree.
+ -- @return Returns the instance with the allocated inheritance tree.
+ function classdef.alloc(mastertable)
+ --[ ... ]--
+ --[ ... ]--
+ end
+ end"#.unindent(),
+ 114),
+ (r#"
+ --- Recursivly allocates the inheritance tree of the instance.
+ -- @param mastertable The 'root' of the inheritance tree.
+ -- @return Returns the instance with the allocated inheritance tree.
+ function classdef.alloc(mastertable)
+ -- All class instances have a reference to a superclass object.
+ local instance = { super = baseclass.alloc(mastertable) }
+ -- Any functions this instance does not know of will 'look up' to the superclass definition.
+ setmetatable(instance, { __index = classdef, __newindex = mastertable })
+ return instance
+ end"#.unindent(), 809),
+ ]
+ );
+}
+
#[gpui::test]
async fn test_code_context_retrieval_elixir() {
let language = elixir_lang();
@@ -754,6 +826,346 @@ async fn test_code_context_retrieval_cpp() {
);
}
+#[gpui::test]
+async fn test_code_context_retrieval_ruby() {
+ let language = ruby_lang();
+ let mut retriever = CodeContextRetriever::new();
+
+ let text = r#"
+ # This concern is inspired by "sudo mode" on GitHub. It
+ # is a way to re-authenticate a user before allowing them
+ # to see or perform an action.
+ #
+ # Add `before_action :require_challenge!` to actions you
+ # want to protect.
+ #
+ # The user will be shown a page to enter the challenge (which
+ # is either the password, or just the username when no
+ # password exists). Upon passing, there is a grace period
+ # during which no challenge will be asked from the user.
+ #
+ # Accessing challenge-protected resources during the grace
+ # period will refresh the grace period.
+ module ChallengableConcern
+ extend ActiveSupport::Concern
+
+ CHALLENGE_TIMEOUT = 1.hour.freeze
+
+ def require_challenge!
+ return if skip_challenge?
+
+ if challenge_passed_recently?
+ session[:challenge_passed_at] = Time.now.utc
+ return
+ end
+
+ @challenge = Form::Challenge.new(return_to: request.url)
+
+ if params.key?(:form_challenge)
+ if challenge_passed?
+ session[:challenge_passed_at] = Time.now.utc
+ else
+ flash.now[:alert] = I18n.t('challenge.invalid_password')
+ render_challenge
+ end
+ else
+ render_challenge
+ end
+ end
+
+ def challenge_passed?
+ current_user.valid_password?(challenge_params[:current_password])
+ end
+ end
+
+ class Animal
+ include Comparable
+
+ attr_reader :legs
+
+ def initialize(name, legs)
+ @name, @legs = name, legs
+ end
+
+ def <=>(other)
+ legs <=> other.legs
+ end
+ end
+
+ # Singleton method for car object
+ def car.wheels
+ puts "There are four wheels"
+ end"#
+ .unindent();
+
+ let documents = retriever.parse_file(&text, language.clone()).unwrap();
+
+ assert_documents_eq(
+ &documents,
+ &[
+ (
+ r#"
+ # This concern is inspired by "sudo mode" on GitHub. It
+ # is a way to re-authenticate a user before allowing them
+ # to see or perform an action.
+ #
+ # Add `before_action :require_challenge!` to actions you
+ # want to protect.
+ #
+ # The user will be shown a page to enter the challenge (which
+ # is either the password, or just the username when no
+ # password exists). Upon passing, there is a grace period
+ # during which no challenge will be asked from the user.
+ #
+ # Accessing challenge-protected resources during the grace
+ # period will refresh the grace period.
+ module ChallengableConcern
+ extend ActiveSupport::Concern
+
+ CHALLENGE_TIMEOUT = 1.hour.freeze
+
+ def require_challenge!
+ # ...
+ end
+
+ def challenge_passed?
+ # ...
+ end
+ end"#
+ .unindent(),
+ 558,
+ ),
+ (
+ r#"
+ def require_challenge!
+ return if skip_challenge?
+
+ if challenge_passed_recently?
+ session[:challenge_passed_at] = Time.now.utc
+ return
+ end
+
+ @challenge = Form::Challenge.new(return_to: request.url)
+
+ if params.key?(:form_challenge)
+ if challenge_passed?
+ session[:challenge_passed_at] = Time.now.utc
+ else
+ flash.now[:alert] = I18n.t('challenge.invalid_password')
+ render_challenge
+ end
+ else
+ render_challenge
+ end
+ end"#
+ .unindent(),
+ 663,
+ ),
+ (
+ r#"
+ def challenge_passed?
+ current_user.valid_password?(challenge_params[:current_password])
+ end"#
+ .unindent(),
+ 1254,
+ ),
+ (
+ r#"
+ class Animal
+ include Comparable
+
+ attr_reader :legs
+
+ def initialize(name, legs)
+ # ...
+ end
+
+ def <=>(other)
+ # ...
+ end
+ end"#
+ .unindent(),
+ 1363,
+ ),
+ (
+ r#"
+ def initialize(name, legs)
+ @name, @legs = name, legs
+ end"#
+ .unindent(),
+ 1427,
+ ),
+ (
+ r#"
+ def <=>(other)
+ legs <=> other.legs
+ end"#
+ .unindent(),
+ 1501,
+ ),
+ (
+ r#"
+ # Singleton method for car object
+ def car.wheels
+ puts "There are four wheels"
+ end"#
+ .unindent(),
+ 1591,
+ ),
+ ],
+ );
+}
+
+#[gpui::test]
+async fn test_code_context_retrieval_php() {
+ let language = php_lang();
+ let mut retriever = CodeContextRetriever::new();
+
+ let text = r#"
+ <?php
+
+ namespace LevelUp\Experience\Concerns;
+
+ /*
+ This is a multiple-lines comment block
+ that spans over multiple
+ lines
+ */
+ function functionName() {
+ echo "Hello world!";
+ }
+
+ trait HasAchievements
+ {
+ /**
+ * @throws \Exception
+ */
+ public function grantAchievement(Achievement $achievement, $progress = null): void
+ {
+ if ($progress > 100) {
+ throw new Exception(message: 'Progress cannot be greater than 100');
+ }
+
+ if ($this->achievements()->find($achievement->id)) {
+ throw new Exception(message: 'User already has this Achievement');
+ }
+
+ $this->achievements()->attach($achievement, [
+ 'progress' => $progress ?? null,
+ ]);
+
+ $this->when(value: ($progress === null) || ($progress === 100), callback: fn (): ?array => event(new AchievementAwarded(achievement: $achievement, user: $this)));
+ }
+
+ public function achievements(): BelongsToMany
+ {
+ return $this->belongsToMany(related: Achievement::class)
+ ->withPivot(columns: 'progress')
+ ->where('is_secret', false)
+ ->using(AchievementUser::class);
+ }
+ }
+
+ interface Multiplier
+ {
+ public function qualifies(array $data): bool;
+
+ public function setMultiplier(): int;
+ }
+
+ enum AuditType: string
+ {
+ case Add = 'add';
+ case Remove = 'remove';
+ case Reset = 'reset';
+ case LevelUp = 'level_up';
+ }
+
+ ?>"#
+ .unindent();
+
+ let documents = retriever.parse_file(&text, language.clone()).unwrap();
+
+ assert_documents_eq(
+ &documents,
+ &[
+ (
+ r#"
+ /*
+ This is a multiple-lines comment block
+ that spans over multiple
+ lines
+ */
+ function functionName() {
+ echo "Hello world!";
+ }"#
+ .unindent(),
+ 123,
+ ),
+ (
+ r#"
+ trait HasAchievements
+ {
+ /**
+ * @throws \Exception
+ */
+ public function grantAchievement(Achievement $achievement, $progress = null): void
+ {/* ... */}
+
+ public function achievements(): BelongsToMany
+ {/* ... */}
+ }"#
+ .unindent(),
+ 177,
+ ),
+ (r#"
+ /**
+ * @throws \Exception
+ */
+ public function grantAchievement(Achievement $achievement, $progress = null): void
+ {
+ if ($progress > 100) {
+ throw new Exception(message: 'Progress cannot be greater than 100');
+ }
+
+ if ($this->achievements()->find($achievement->id)) {
+ throw new Exception(message: 'User already has this Achievement');
+ }
+
+ $this->achievements()->attach($achievement, [
+ 'progress' => $progress ?? null,
+ ]);
+
+ $this->when(value: ($progress === null) || ($progress === 100), callback: fn (): ?array => event(new AchievementAwarded(achievement: $achievement, user: $this)));
+ }"#.unindent(), 245),
+ (r#"
+ public function achievements(): BelongsToMany
+ {
+ return $this->belongsToMany(related: Achievement::class)
+ ->withPivot(columns: 'progress')
+ ->where('is_secret', false)
+ ->using(AchievementUser::class);
+ }"#.unindent(), 902),
+ (r#"
+ interface Multiplier
+ {
+ public function qualifies(array $data): bool;
+
+ public function setMultiplier(): int;
+ }"#.unindent(),
+ 1146),
+ (r#"
+ enum AuditType: string
+ {
+ case Add = 'add';
+ case Remove = 'remove';
+ case Reset = 'reset';
+ case LevelUp = 'level_up';
+ }"#.unindent(), 1265)
+ ],
+ );
+}
+
#[gpui::test]
fn test_dot_product(mut rng: StdRng) {
assert_eq!(dot(&[1., 0., 0., 0., 0.], &[0., 1., 0., 0., 0.]), 0.);
@@ -1084,6 +1496,131 @@ fn cpp_lang() -> Arc<Language> {
)
}
+fn lua_lang() -> Arc<Language> {
+ Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "Lua".into(),
+ path_suffixes: vec!["lua".into()],
+ collapsed_placeholder: "--[ ... ]--".to_string(),
+ ..Default::default()
+ },
+ Some(tree_sitter_lua::language()),
+ )
+ .with_embedding_query(
+ r#"
+ (
+ (comment)* @context
+ .
+ (function_declaration
+ "function" @name
+ name: (_) @name
+ (comment)* @collapse
+ body: (block) @collapse
+ ) @item
+ )
+ "#,
+ )
+ .unwrap(),
+ )
+}
+
+fn php_lang() -> Arc<Language> {
+ Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "PHP".into(),
+ path_suffixes: vec!["php".into()],
+ collapsed_placeholder: "/* ... */".into(),
+ ..Default::default()
+ },
+ Some(tree_sitter_php::language()),
+ )
+ .with_embedding_query(
+ r#"
+ (
+ (comment)* @context
+ .
+ [
+ (function_definition
+ "function" @name
+ name: (_) @name
+ body: (_
+ "{" @keep
+ "}" @keep) @collapse
+ )
+
+ (trait_declaration
+ "trait" @name
+ name: (_) @name)
+
+ (method_declaration
+ "function" @name
+ name: (_) @name
+ body: (_
+ "{" @keep
+ "}" @keep) @collapse
+ )
+
+ (interface_declaration
+ "interface" @name
+ name: (_) @name
+ )
+
+ (enum_declaration
+ "enum" @name
+ name: (_) @name
+ )
+
+ ] @item
+ )
+ "#,
+ )
+ .unwrap(),
+ )
+}
+
+fn ruby_lang() -> Arc<Language> {
+ Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "Ruby".into(),
+ path_suffixes: vec!["rb".into()],
+ collapsed_placeholder: "# ...".to_string(),
+ ..Default::default()
+ },
+ Some(tree_sitter_ruby::language()),
+ )
+ .with_embedding_query(
+ r#"
+ (
+ (comment)* @context
+ .
+ [
+ (module
+ "module" @name
+ name: (_) @name)
+ (method
+ "def" @name
+ name: (_) @name
+ body: (body_statement) @collapse)
+ (class
+ "class" @name
+ name: (_) @name)
+ (singleton_method
+ "def" @name
+ object: (_) @name
+ "." @name
+ name: (_) @name
+ body: (body_statement) @collapse)
+ ] @item
+ )
+ "#,
+ )
+ .unwrap(),
+ )
+}
+
fn elixir_lang() -> Arc<Language> {
Arc::new(
Language::new(
@@ -202,7 +202,7 @@ where
self.position = D::default();
}
- let mut entry = self.stack.last_mut().unwrap();
+ let entry = self.stack.last_mut().unwrap();
if !descending {
if entry.index == 0 {
self.stack.pop();
@@ -438,6 +438,7 @@ where
} => {
if ascending {
entry.index += 1;
+ entry.position = self.position.clone();
}
for (child_tree, child_summary) in child_trees[entry.index..]
@@ -738,7 +738,7 @@ mod tests {
for _ in 0..num_operations {
let splice_end = rng.gen_range(0..tree.extent::<Count>(&()).0 + 1);
let splice_start = rng.gen_range(0..splice_end + 1);
- let count = rng.gen_range(0..3);
+ let count = rng.gen_range(0..10);
let tree_end = tree.extent::<Count>(&());
let new_items = rng
.sample_iter(distributions::Standard)
@@ -805,10 +805,12 @@ mod tests {
}
assert_eq!(filter_cursor.item(), None);
- let mut pos = rng.gen_range(0..tree.extent::<Count>(&()).0 + 1);
let mut before_start = false;
let mut cursor = tree.cursor::<Count>();
- cursor.seek(&Count(pos), Bias::Right, &());
+ let start_pos = rng.gen_range(0..=reference_items.len());
+ cursor.seek(&Count(start_pos), Bias::Right, &());
+ let mut pos = rng.gen_range(start_pos..=reference_items.len());
+ cursor.seek_forward(&Count(pos), Bias::Right, &());
for i in 0..10 {
assert_eq!(cursor.start().0, pos);
@@ -16,7 +16,7 @@ db = { path = "../db" }
theme = { path = "../theme" }
util = { path = "../util" }
-alacritty_terminal = { git = "https://github.com/zed-industries/alacritty", rev = "a51dbe25d67e84d6ed4261e640d3954fbdd9be45" }
+alacritty_terminal = { git = "https://github.com/alacritty/alacritty", rev = "7b9f32300ee0a249c0872302c97635b460e45ba5" }
procinfo = { git = "https://github.com/zed-industries/wezterm", rev = "5cd757e5f2eb039ed0c6bb6512223e69d5efc64d", default-features = false }
smallvec.workspace = true
smol.workspace = true
@@ -114,11 +114,7 @@ fn rgb_for_index(i: &u8) -> (u8, u8, u8) {
//Convenience method to convert from a GPUI color to an alacritty Rgb
pub fn to_alac_rgb(color: Color) -> AlacRgb {
- AlacRgb {
- r: color.r,
- g: color.g,
- b: color.g,
- }
+ AlacRgb::new(color.r, color.g, color.g)
}
#[cfg(test)]
@@ -1,20 +1,64 @@
-use gpui::{elements::Label, AnyElement, Element, Entity, View, ViewContext};
+use gpui::{
+ elements::{Empty, Label},
+ AnyElement, Element, Entity, Subscription, View, ViewContext,
+};
+use settings::SettingsStore;
use workspace::{item::ItemHandle, StatusItemView};
-use crate::state::Mode;
+use crate::{state::Mode, Vim, VimEvent, VimModeSetting};
pub struct ModeIndicator {
- pub mode: Mode,
+ pub mode: Option<Mode>,
+ _subscription: Subscription,
}
impl ModeIndicator {
- pub fn new(mode: Mode) -> Self {
- Self { mode }
+ pub fn new(cx: &mut ViewContext<Self>) -> Self {
+ let handle = cx.handle().downgrade();
+
+ let _subscription = cx.subscribe_global::<VimEvent, _>(move |&event, cx| {
+ if let Some(mode_indicator) = handle.upgrade(cx) {
+ match event {
+ VimEvent::ModeChanged { mode } => {
+ cx.update_window(mode_indicator.window_id(), |cx| {
+ mode_indicator.update(cx, move |mode_indicator, cx| {
+ mode_indicator.set_mode(mode, cx);
+ })
+ });
+ }
+ }
+ }
+ });
+
+ cx.observe_global::<SettingsStore, _>(move |mode_indicator, cx| {
+ if settings::get::<VimModeSetting>(cx).0 {
+ mode_indicator.mode = cx
+ .has_global::<Vim>()
+ .then(|| cx.global::<Vim>().state.mode);
+ } else {
+ mode_indicator.mode.take();
+ }
+ })
+ .detach();
+
+ // Vim doesn't exist in some tests
+ let mode = cx
+ .has_global::<Vim>()
+ .then(|| {
+ let vim = cx.global::<Vim>();
+ vim.enabled.then(|| vim.state.mode)
+ })
+ .flatten();
+
+ Self {
+ mode,
+ _subscription,
+ }
}
pub fn set_mode(&mut self, mode: Mode, cx: &mut ViewContext<Self>) {
- if mode != self.mode {
- self.mode = mode;
+ if self.mode != Some(mode) {
+ self.mode = Some(mode);
cx.notify();
}
}
@@ -30,11 +74,16 @@ impl View for ModeIndicator {
}
fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
+ let Some(mode) = self.mode.as_ref() else {
+ return Empty::new().into_any();
+ };
+
let theme = &theme::current(cx).workspace.status_bar;
+
// we always choose text to be 12 monospace characters
// so that as the mode indicator changes, the rest of the
// UI stays still.
- let text = match self.mode {
+ let text = match mode {
Mode::Normal => "-- NORMAL --",
Mode::Insert => "-- INSERT --",
Mode::Visual { line: false } => "-- VISUAL --",
@@ -93,7 +93,7 @@ fn search_submit(workspace: &mut Workspace, _: &SearchSubmit, cx: &mut ViewConte
pane.update(cx, |pane, cx| {
if let Some(search_bar) = pane.toolbar().read(cx).item_of_type::<BufferSearchBar>() {
search_bar.update(cx, |search_bar, cx| {
- let mut state = &mut vim.state.search;
+ let state = &mut vim.state.search;
let mut count = state.count;
// in the case that the query has changed, the search bar
@@ -222,7 +222,7 @@ mod test {
});
search_bar.read_with(cx.cx, |bar, cx| {
- assert_eq!(bar.query_editor.read(cx).text(cx), "cc");
+ assert_eq!(bar.query(cx), "cc");
});
deterministic.run_until_parked();
@@ -99,7 +99,7 @@ async fn test_buffer_search(cx: &mut gpui::TestAppContext) {
});
search_bar.read_with(cx.cx, |bar, cx| {
- assert_eq!(bar.query_editor.read(cx).text(cx), "");
+ assert_eq!(bar.query(cx), "");
})
}
@@ -175,7 +175,7 @@ async fn test_selection_on_search(cx: &mut gpui::TestAppContext) {
});
search_bar.read_with(cx.cx, |bar, cx| {
- assert_eq!(bar.query_editor.read(cx).text(cx), "cc");
+ assert_eq!(bar.query(cx), "cc");
});
// wait for the query editor change event to fire.
@@ -215,7 +215,7 @@ async fn test_status_indicator(
assert_eq!(
cx.workspace(|_, cx| mode_indicator.read(cx).mode),
- Mode::Normal
+ Some(Mode::Normal)
);
// shows the correct mode
@@ -223,7 +223,7 @@ async fn test_status_indicator(
deterministic.run_until_parked();
assert_eq!(
cx.workspace(|_, cx| mode_indicator.read(cx).mode),
- Mode::Insert
+ Some(Mode::Insert)
);
// shows even in search
@@ -231,7 +231,7 @@ async fn test_status_indicator(
deterministic.run_until_parked();
assert_eq!(
cx.workspace(|_, cx| mode_indicator.read(cx).mode),
- Mode::Visual { line: false }
+ Some(Mode::Visual { line: false })
);
// hides if vim mode is disabled
@@ -239,15 +239,15 @@ async fn test_status_indicator(
deterministic.run_until_parked();
cx.workspace(|workspace, cx| {
let status_bar = workspace.status_bar().read(cx);
- let mode_indicator = status_bar.item_of_type::<ModeIndicator>();
- assert!(mode_indicator.is_none());
+ let mode_indicator = status_bar.item_of_type::<ModeIndicator>().unwrap();
+ assert!(mode_indicator.read(cx).mode.is_none());
});
cx.enable_vim();
deterministic.run_until_parked();
cx.workspace(|workspace, cx| {
let status_bar = workspace.status_bar().read(cx);
- let mode_indicator = status_bar.item_of_type::<ModeIndicator>();
- assert!(mode_indicator.is_some());
+ let mode_indicator = status_bar.item_of_type::<ModeIndicator>().unwrap();
+ assert!(mode_indicator.read(cx).mode.is_some());
});
}
@@ -43,6 +43,10 @@ impl<'a> VimTestContext<'a> {
toolbar.add_item(project_search_bar, cx);
})
});
+ workspace.status_bar().update(cx, |status_bar, cx| {
+ let vim_mode_indicator = cx.add_view(ModeIndicator::new);
+ status_bar.add_right_item(vim_mode_indicator, cx);
+ });
});
Self { cx }
@@ -43,6 +43,11 @@ struct Number(u8);
actions!(vim, [Tab, Enter]);
impl_actions!(vim, [Number, SwitchMode, PushOperator]);
+#[derive(Copy, Clone, Debug)]
+enum VimEvent {
+ ModeChanged { mode: Mode },
+}
+
pub fn init(cx: &mut AppContext) {
settings::register::<VimModeSetting>(cx);
@@ -121,8 +126,6 @@ pub fn observe_keystrokes(cx: &mut WindowContext) {
pub struct Vim {
active_editor: Option<WeakViewHandle<Editor>>,
editor_subscription: Option<Subscription>,
- mode_indicator: Option<ViewHandle<ModeIndicator>>,
-
enabled: bool,
state: VimState,
}
@@ -181,9 +184,7 @@ impl Vim {
self.state.mode = mode;
self.state.operator_stack.clear();
- if let Some(mode_indicator) = &self.mode_indicator {
- mode_indicator.update(cx, |mode_indicator, cx| mode_indicator.set_mode(mode, cx))
- }
+ cx.emit_global(VimEvent::ModeChanged { mode });
// Sync editor settings like clip mode
self.sync_vim_settings(cx);
@@ -271,44 +272,6 @@ impl Vim {
}
}
- fn sync_mode_indicator(cx: &mut WindowContext) {
- let Some(workspace) = cx.root_view()
- .downcast_ref::<Workspace>()
- .map(|workspace| workspace.downgrade()) else {
- return;
- };
-
- cx.spawn(|mut cx| async move {
- workspace.update(&mut cx, |workspace, cx| {
- Vim::update(cx, |vim, cx| {
- workspace.status_bar().update(cx, |status_bar, cx| {
- let current_position = status_bar.position_of_item::<ModeIndicator>();
-
- if vim.enabled && current_position.is_none() {
- if vim.mode_indicator.is_none() {
- vim.mode_indicator =
- Some(cx.add_view(|_| ModeIndicator::new(vim.state.mode)));
- };
- let mode_indicator = vim.mode_indicator.as_ref().unwrap();
- let position = status_bar
- .position_of_item::<language_selector::ActiveBufferLanguage>();
- if let Some(position) = position {
- status_bar.insert_item_after(position, mode_indicator.clone(), cx)
- } else {
- status_bar.add_left_item(mode_indicator.clone(), cx)
- }
- } else if !vim.enabled {
- if let Some(position) = current_position {
- status_bar.remove_item_at(position, cx)
- }
- }
- })
- })
- })
- })
- .detach_and_log_err(cx);
- }
-
fn set_enabled(&mut self, enabled: bool, cx: &mut AppContext) {
if self.enabled != enabled {
self.enabled = enabled;
@@ -359,8 +322,6 @@ impl Vim {
self.unhook_vim_settings(editor, cx);
}
});
-
- Vim::sync_mode_indicator(cx);
}
fn unhook_vim_settings(&self, editor: &mut Editor, cx: &mut ViewContext<Editor>) {
@@ -746,6 +746,10 @@ impl Pane {
_: &CloseAllItems,
cx: &mut ViewContext<Self>,
) -> Option<Task<Result<()>>> {
+ if self.items.is_empty() {
+ return None;
+ }
+
Some(self.close_items(cx, move |_| true))
}
@@ -122,6 +122,7 @@ actions!(
NewFile,
NewWindow,
CloseWindow,
+ CloseInactiveTabsAndPanes,
AddFolderToProject,
Unfollow,
Save,
@@ -240,6 +241,7 @@ pub fn init(app_state: Arc<AppState>, cx: &mut AppContext) {
cx.add_async_action(Workspace::follow_next_collaborator);
cx.add_async_action(Workspace::close);
+ cx.add_async_action(Workspace::close_inactive_items_and_panes);
cx.add_global_action(Workspace::close_global);
cx.add_global_action(restart);
cx.add_async_action(Workspace::save_all);
@@ -1671,6 +1673,45 @@ impl Workspace {
}
}
+ pub fn close_inactive_items_and_panes(
+ &mut self,
+ _: &CloseInactiveTabsAndPanes,
+ cx: &mut ViewContext<Self>,
+ ) -> Option<Task<Result<()>>> {
+ let current_pane = self.active_pane();
+
+ let mut tasks = Vec::new();
+
+ if let Some(current_pane_close) = current_pane.update(cx, |pane, cx| {
+ pane.close_inactive_items(&CloseInactiveItems, cx)
+ }) {
+ tasks.push(current_pane_close);
+ };
+
+ for pane in self.panes() {
+ if pane.id() == current_pane.id() {
+ continue;
+ }
+
+ if let Some(close_pane_items) = pane.update(cx, |pane: &mut Pane, cx| {
+ pane.close_all_items(&CloseAllItems, cx)
+ }) {
+ tasks.push(close_pane_items)
+ }
+ }
+
+ if tasks.is_empty() {
+ None
+ } else {
+ Some(cx.spawn(|_, _| async move {
+ for task in tasks {
+ task.await?
+ }
+ Ok(())
+ }))
+ }
+ }
+
pub fn toggle_dock(&mut self, dock_side: DockPosition, cx: &mut ViewContext<Self>) {
let dock = match dock_side {
DockPosition::Left => &self.left_dock,
@@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathansobo@gmail.com>"]
description = "The fast, collaborative code editor."
edition = "2021"
name = "zed"
-version = "0.98.0"
+version = "0.99.0"
publish = false
[lib]
@@ -128,6 +128,7 @@ tree-sitter-svelte.workspace = true
tree-sitter-racket.workspace = true
tree-sitter-yaml.workspace = true
tree-sitter-lua.workspace = true
+tree-sitter-nix.workspace = true
url = "2.2"
urlencoding = "2.1.2"
@@ -152,8 +152,10 @@ pub fn init(languages: Arc<LanguageRegistry>, node_runtime: Arc<NodeRuntime>) {
tree_sitter_php::language(),
vec![Arc::new(php::IntelephenseLspAdapter::new(node_runtime))],
);
+
language("elm", tree_sitter_elm::language(), vec![]);
language("glsl", tree_sitter_glsl::language(), vec![]);
+ language("nix", tree_sitter_nix::language(), vec![]);
}
#[cfg(any(test, feature = "test-support"))]
@@ -7,3 +7,4 @@ brackets = [
{ start = "[", end = "]", close = true, newline = true },
{ start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] },
]
+collapsed_placeholder = "--[ ... ]--"
@@ -0,0 +1,10 @@
+(
+ (comment)* @context
+ .
+ (function_declaration
+ "function" @name
+ name: (_) @name
+ (comment)* @collapse
+ body: (block) @collapse
+ ) @item
+)
@@ -0,0 +1,11 @@
+name = "Nix"
+path_suffixes = ["nix"]
+line_comment = "# "
+block_comment = ["/* ", " */"]
+autoclose_before = ";:.,=}])>` \n\t\""
+brackets = [
+ { start = "{", end = "}", close = true, newline = true },
+ { start = "[", end = "]", close = true, newline = true },
+ { start = "(", end = ")", close = true, newline = true },
+ { start = "<", end = ">", close = true, newline = true },
+]
@@ -0,0 +1,95 @@
+(comment) @comment
+
+[
+ "if"
+ "then"
+ "else"
+ "let"
+ "inherit"
+ "in"
+ "rec"
+ "with"
+ "assert"
+ "or"
+] @keyword
+
+[
+ (string_expression)
+ (indented_string_expression)
+] @string
+
+[
+ (path_expression)
+ (hpath_expression)
+ (spath_expression)
+] @string.special.path
+
+(uri_expression) @link_uri
+
+[
+ (integer_expression)
+ (float_expression)
+] @number
+
+(interpolation
+ "${" @punctuation.special
+ "}" @punctuation.special) @embedded
+
+(escape_sequence) @escape
+(dollar_escape) @escape
+
+(function_expression
+ universal: (identifier) @parameter
+)
+
+(formal
+ name: (identifier) @parameter
+ "?"? @punctuation.delimiter)
+
+(select_expression
+ attrpath: (attrpath (identifier)) @property)
+
+(apply_expression
+ function: [
+ (variable_expression (identifier)) @function
+ (select_expression
+ attrpath: (attrpath
+ attr: (identifier) @function .))])
+
+(unary_expression
+ operator: _ @operator)
+
+(binary_expression
+ operator: _ @operator)
+
+(variable_expression (identifier) @variable)
+
+(binding
+ attrpath: (attrpath (identifier)) @property)
+
+"=" @operator
+
+[
+ ";"
+ "."
+ ","
+] @punctuation.delimiter
+
+[
+ "("
+ ")"
+ "["
+ "]"
+ "{"
+ "}"
+] @punctuation.bracket
+
+(identifier) @variable
+
+((identifier) @function.builtin
@@ -9,3 +9,4 @@ brackets = [
{ start = "(", end = ")", close = true, newline = true },
{ start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] },
]
+collapsed_placeholder = "/* ... */"
@@ -0,0 +1,36 @@
+(
+ (comment)* @context
+ .
+ [
+ (function_definition
+ "function" @name
+ name: (_) @name
+ body: (_
+ "{" @keep
+ "}" @keep) @collapse
+ )
+
+ (trait_declaration
+ "trait" @name
+ name: (_) @name)
+
+ (method_declaration
+ "function" @name
+ name: (_) @name
+ body: (_
+ "{" @keep
+ "}" @keep) @collapse
+ )
+
+ (interface_declaration
+ "interface" @name
+ name: (_) @name
+ )
+
+ (enum_declaration
+ "enum" @name
+ name: (_) @name
+ )
+
+ ] @item
+ )
@@ -8,8 +8,6 @@
name: (_) @name
) @item
-
-
(method_declaration
"function" @context
name: (_) @name
@@ -24,3 +22,8 @@
"enum" @context
name: (_) @name
) @item
+
+(trait_declaration
+ "trait" @context
+ name: (_) @name
+ ) @item
@@ -10,3 +10,4 @@ brackets = [
{ start = "\"", end = "\"", close = true, newline = false, not_in = ["comment", "string"] },
{ start = "'", end = "'", close = true, newline = false, not_in = ["comment", "string"] },
]
+collapsed_placeholder = "# ..."
@@ -0,0 +1,22 @@
+(
+ (comment)* @context
+ .
+ [
+ (module
+ "module" @name
+ name: (_) @name)
+ (method
+ "def" @name
+ name: (_) @name
+ body: (body_statement) @collapse)
+ (class
+ "class" @name
+ name: (_) @name)
+ (singleton_method
+ "def" @name
+ object: (_) @name
+ "." @name
+ name: (_) @name
+ body: (body_statement) @collapse)
+ ] @item
+ )
@@ -45,6 +45,7 @@ use std::{
use sum_tree::Bias;
use terminal_view::{get_working_directory, TerminalSettings, TerminalView};
use util::{
+ channel::ReleaseChannel,
http::{self, HttpClient},
paths::PathLikeWithPosition,
};
@@ -136,7 +137,7 @@ fn main() {
languages.set_executor(cx.background().clone());
languages.set_language_server_download_dir(paths::LANGUAGES_DIR.clone());
let languages = Arc::new(languages);
- let node_runtime = NodeRuntime::instance(http.clone(), cx.background().to_owned());
+ let node_runtime = NodeRuntime::instance(http.clone());
languages::init(languages.clone(), node_runtime.clone());
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http.clone(), cx));
@@ -415,22 +416,41 @@ fn init_panic_hook(app: &App, installation_id: Option<String>) {
panic::set_hook(Box::new(move |info| {
let prior_panic_count = PANIC_COUNT.fetch_add(1, Ordering::SeqCst);
if prior_panic_count > 0 {
- std::panic::resume_unwind(Box::new(()));
+ // Give the panic-ing thread time to write the panic file
+ loop {
+ std::thread::yield_now();
+ }
}
- let app_version = ZED_APP_VERSION
- .or_else(|| platform.app_version().ok())
- .map_or("dev".to_string(), |v| v.to_string());
-
let thread = thread::current();
- let thread = thread.name().unwrap_or("<unnamed>");
+ let thread_name = thread.name().unwrap_or("<unnamed>");
- let payload = info.payload();
- let payload = None
- .or_else(|| payload.downcast_ref::<&str>().map(|s| s.to_string()))
- .or_else(|| payload.downcast_ref::<String>().map(|s| s.clone()))
+ let payload = info
+ .payload()
+ .downcast_ref::<&str>()
+ .map(|s| s.to_string())
+ .or_else(|| info.payload().downcast_ref::<String>().map(|s| s.clone()))
.unwrap_or_else(|| "Box<Any>".to_string());
+ if *util::channel::RELEASE_CHANNEL == ReleaseChannel::Dev {
+ let location = info.location().unwrap();
+ let backtrace = Backtrace::new();
+ eprintln!(
+ "Thread {:?} panicked with {:?} at {}:{}:{}\n{:?}",
+ thread_name,
+ payload,
+ location.file(),
+ location.line(),
+ location.column(),
+ backtrace,
+ );
+ std::process::exit(-1);
+ }
+
+ let app_version = ZED_APP_VERSION
+ .or_else(|| platform.app_version().ok())
+ .map_or("dev".to_string(), |v| v.to_string());
+
let backtrace = Backtrace::new();
let mut backtrace = backtrace
.frames()
@@ -447,7 +467,7 @@ fn init_panic_hook(app: &App, installation_id: Option<String>) {
}
let panic_data = Panic {
- thread: thread.into(),
+ thread: thread_name.into(),
payload: payload.into(),
location_data: info.location().map(|location| LocationData {
file: location.file().into(),
@@ -717,7 +737,7 @@ async fn watch_languages(_: Arc<dyn Fs>, _: Arc<LanguageRegistry>) -> Option<()>
}
#[cfg(not(debug_assertions))]
-fn watch_file_types(fs: Arc<dyn Fs>, cx: &mut AppContext) {}
+fn watch_file_types(_fs: Arc<dyn Fs>, _cx: &mut AppContext) {}
fn connect_to_cli(
server_name: &str,
@@ -308,6 +308,7 @@ pub fn initialize_workspace(
);
let active_buffer_language =
cx.add_view(|_| language_selector::ActiveBufferLanguage::new(workspace));
+ let vim_mode_indicator = cx.add_view(|cx| vim::ModeIndicator::new(cx));
let feedback_button = cx.add_view(|_| {
feedback::deploy_feedback_button::DeployFeedbackButton::new(workspace)
});
@@ -319,6 +320,7 @@ pub fn initialize_workspace(
status_bar.add_right_item(feedback_button, cx);
status_bar.add_right_item(copilot, cx);
status_bar.add_right_item(active_buffer_language, cx);
+ status_bar.add_right_item(vim_mode_indicator, cx);
status_bar.add_right_item(cursor_position, cx);
});
@@ -543,7 +545,6 @@ pub fn handle_keymap_file_changes(
reload_keymaps(cx, &keymap_content);
}
})
- .detach();
}));
}
}
@@ -2362,7 +2363,7 @@ mod tests {
languages.set_executor(cx.background().clone());
let languages = Arc::new(languages);
let http = FakeHttpClient::with_404_response();
- let node_runtime = NodeRuntime::instance(http, cx.background().to_owned());
+ let node_runtime = NodeRuntime::instance(http);
languages::init(languages.clone(), node_runtime);
for name in languages.language_names() {
languages.language_for_name(&name);
@@ -0,0 +1,29 @@
+[⬅ Back to Index](../index.md)
+
+# Generating Theme Types
+
+
+## How to generate theme types:
+
+Run a script
+
+```bash
+./script/build-theme-types
+```
+
+Types are generated in `styles/src/types/zed.ts`
+
+
+## How it works:
+
+1. Rust types
+
+ The `crates/theme` contains theme types.
+ Crate `schemars` used to generate a JSON schema from the theme structs.
+ Every struct that represent theme type has a `#[derive(JsonSchema)]` attribute.
+
+ Task lotaked at `crates/xtask/src/main.rs` generates a JSON schema from the theme structs.
+
+2. TypeScript types
+
+ Script `npm run build-types` from `styles` package generates TypeScript types from the JSON schema and saves them to `styles/src/types/zed.ts`.
@@ -1,4 +1,4 @@
[toolchain]
-channel = "1.70"
+channel = "1.71"
components = [ "rustfmt" ]
targets = [ "x86_64-apple-darwin", "aarch64-apple-darwin", "wasm32-wasi" ]
@@ -170,8 +170,8 @@ export default function editor(): any {
line_number: with_opacity(foreground(layer), 0.35),
line_number_active: foreground(layer),
rename_fade: 0.6,
- wrap_guide: with_opacity(foreground(layer), 0.1),
- active_wrap_guide: with_opacity(foreground(layer), 0.2),
+ wrap_guide: with_opacity(foreground(layer), 0.05),
+ active_wrap_guide: with_opacity(foreground(layer), 0.1),
unnecessary_code_fade: 0.5,
selection: theme.players[0],
whitespace: theme.ramps.neutral(0.5).hex(),
@@ -44,7 +44,7 @@ export default function status_bar(): any {
icon_spacing: 4,
icon_width: 14,
height: 18,
- message: text(layer, "sans"),
+ message: text(layer, "sans", { size: "xs" }),
icon_color: foreground(layer),
},
state: {