Detailed changes
@@ -11,7 +11,7 @@ env:
jobs:
publish:
- name: Publish collab server image
+ name: Publish collab server image
runs-on:
- self-hosted
- deploy
@@ -22,6 +22,9 @@ jobs:
- name: Sign into DigitalOcean docker registry
run: doctl registry login
+ - name: Prune Docker system
+ run: docker system prune
+
- name: Checkout repo
uses: actions/checkout@v3
with:
@@ -41,6 +44,6 @@ jobs:
- name: Build docker image
run: docker build . --tag registry.digitalocean.com/zed/collab:v${COLLAB_VERSION}
-
+
- name: Publish docker image
run: docker push registry.digitalocean.com/zed/collab:v${COLLAB_VERSION}
@@ -36,11 +36,11 @@ dependencies = [
[[package]]
name = "addr2line"
-version = "0.20.0"
+version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3"
+checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb"
dependencies = [
- "gimli 0.27.3",
+ "gimli 0.28.0",
]
[[package]]
@@ -88,9 +88,9 @@ dependencies = [
[[package]]
name = "aho-corasick"
-version = "1.0.2"
+version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41"
+checksum = "6748e8def348ed4d14996fa801f4122cd763fff530258cdc03f64b25f89d3a5a"
dependencies = [
"memchr",
]
@@ -140,7 +140,7 @@ source = "git+https://github.com/zed-industries/alacritty?rev=33306142195b354ef3
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.28",
+ "syn 2.0.29",
]
[[package]]
@@ -151,7 +151,7 @@ dependencies = [
"alacritty_config",
"alacritty_config_derive",
"base64 0.13.1",
- "bitflags 2.3.3",
+ "bitflags 2.4.0",
"home",
"libc",
"log",
@@ -244,9 +244,9 @@ dependencies = [
[[package]]
name = "anstyle"
-version = "1.0.1"
+version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd"
+checksum = "15c4c2c83f81532e5845a733998b6971faca23490340a418e9b72a3ec9de12ea"
[[package]]
name = "anstyle-parse"
@@ -268,9 +268,9 @@ dependencies = [
[[package]]
name = "anstyle-wincon"
-version = "1.0.1"
+version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188"
+checksum = "c677ab05e09154296dd37acecd46420c17b9713e8366facafa8fc0885167cf4c"
dependencies = [
"anstyle",
"windows-sys",
@@ -278,9 +278,9 @@ dependencies = [
[[package]]
name = "anyhow"
-version = "1.0.72"
+version = "1.0.75"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854"
+checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
[[package]]
name = "arrayref"
@@ -337,7 +337,7 @@ dependencies = [
"futures-core",
"futures-io",
"once_cell",
- "pin-project-lite 0.2.10",
+ "pin-project-lite 0.2.12",
"tokio",
]
@@ -351,7 +351,7 @@ dependencies = [
"futures-core",
"futures-io",
"memchr",
- "pin-project-lite 0.2.10",
+ "pin-project-lite 0.2.12",
]
[[package]]
@@ -411,15 +411,15 @@ dependencies = [
"polling",
"rustix 0.37.23",
"slab",
- "socket2",
+ "socket2 0.4.9",
"waker-fn",
]
[[package]]
name = "async-lock"
-version = "2.7.0"
+version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fa24f727524730b077666307f2734b4a1a1c57acb79193127dcc8914d5242dd7"
+checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b"
dependencies = [
"event-listener",
]
@@ -482,7 +482,7 @@ checksum = "0e97ce7de6cf12de5d7226c73f5ba9811622f4db3a5b91b55c53e987e5f91cba"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.28",
+ "syn 2.0.29",
]
[[package]]
@@ -505,7 +505,7 @@ dependencies = [
"log",
"memchr",
"once_cell",
- "pin-project-lite 0.2.10",
+ "pin-project-lite 0.2.12",
"pin-utils",
"slab",
"wasm-bindgen-futures",
@@ -519,7 +519,7 @@ checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51"
dependencies = [
"async-stream-impl",
"futures-core",
- "pin-project-lite 0.2.10",
+ "pin-project-lite 0.2.12",
]
[[package]]
@@ -530,7 +530,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.28",
+ "syn 2.0.29",
]
[[package]]
@@ -567,13 +567,13 @@ dependencies = [
[[package]]
name = "async-trait"
-version = "0.1.72"
+version = "0.1.73"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cc6dde6e4ed435a4c1ee4e73592f5ba9da2151af10076cc04858746af9352d09"
+checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.28",
+ "syn 2.0.29",
]
[[package]]
@@ -586,7 +586,7 @@ dependencies = [
"futures-io",
"futures-util",
"log",
- "pin-project-lite 0.2.10",
+ "pin-project-lite 0.2.12",
"tungstenite 0.16.0",
]
@@ -681,12 +681,12 @@ dependencies = [
"http",
"http-body",
"hyper",
- "itoa 1.0.9",
+ "itoa",
"matchit",
"memchr",
"mime",
"percent-encoding",
- "pin-project-lite 0.2.10",
+ "pin-project-lite 0.2.12",
"serde",
"serde_json",
"serde_urlencoded",
@@ -727,7 +727,7 @@ dependencies = [
"futures-util",
"http",
"mime",
- "pin-project-lite 0.2.10",
+ "pin-project-lite 0.2.12",
"serde",
"serde_json",
"tokio",
@@ -739,16 +739,16 @@ dependencies = [
[[package]]
name = "backtrace"
-version = "0.3.68"
+version = "0.3.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12"
+checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837"
dependencies = [
- "addr2line 0.20.0",
+ "addr2line 0.21.0",
"cc",
"cfg-if 1.0.0",
"libc",
"miniz_oxide 0.7.1",
- "object 0.31.1",
+ "object 0.32.0",
"rustc-demangle",
]
@@ -831,7 +831,7 @@ dependencies = [
"regex",
"rustc-hash",
"shlex",
- "syn 2.0.28",
+ "syn 2.0.29",
"which",
]
@@ -858,9 +858,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
-version = "2.3.3"
+version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42"
+checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635"
dependencies = [
"serde",
]
@@ -996,7 +996,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05"
dependencies = [
"memchr",
- "regex-automata 0.3.4",
+ "regex-automata 0.3.6",
"serde",
]
@@ -1063,6 +1063,7 @@ dependencies = [
"anyhow",
"async-broadcast",
"audio",
+ "channel",
"client",
"collections",
"fs",
@@ -1156,11 +1157,12 @@ checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6"
[[package]]
name = "cc"
-version = "1.0.79"
+version = "1.0.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f"
+checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0"
dependencies = [
"jobserver",
+ "libc",
]
[[package]]
@@ -1190,6 +1192,41 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+[[package]]
+name = "channel"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "client",
+ "collections",
+ "db",
+ "feature_flags",
+ "futures 0.3.28",
+ "gpui",
+ "image",
+ "language",
+ "lazy_static",
+ "log",
+ "parking_lot 0.11.2",
+ "postage",
+ "rand 0.8.5",
+ "rpc",
+ "schemars",
+ "serde",
+ "serde_derive",
+ "settings",
+ "smol",
+ "sum_tree",
+ "tempfile",
+ "text",
+ "thiserror",
+ "time 0.3.27",
+ "tiny_http",
+ "url",
+ "util",
+ "uuid 1.4.1",
+]
+
[[package]]
name = "chrono"
version = "0.4.26"
@@ -1251,9 +1288,9 @@ dependencies = [
[[package]]
name = "clap"
-version = "4.3.19"
+version = "4.3.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5fd304a20bff958a57f04c4e96a2e7594cc4490a0e809cbd48bb6437edaa452d"
+checksum = "fb690e81c7840c0d7aade59f242ea3b41b9bc27bcd5997890e7702ae4b32e487"
dependencies = [
"clap_builder",
"clap_derive 4.3.12",
@@ -1262,9 +1299,9 @@ dependencies = [
[[package]]
name = "clap_builder"
-version = "4.3.19"
+version = "4.3.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "01c6a3f08f1fe5662a35cfe393aec09c4df95f60ee93b7556505260f75eee9e1"
+checksum = "5ed2e96bc16d8d740f6f48d663eddf4b8a0983e79210fd55479b7bcd0a69860e"
dependencies = [
"anstream",
"anstyle",
@@ -1294,7 +1331,7 @@ dependencies = [
"heck 0.4.1",
"proc-macro2",
"quote",
- "syn 2.0.28",
+ "syn 2.0.29",
]
[[package]]
@@ -1337,6 +1374,7 @@ dependencies = [
"async-tungstenite",
"collections",
"db",
+ "feature_flags",
"futures 0.3.28",
"gpui",
"image",
@@ -1351,11 +1389,11 @@ dependencies = [
"serde_derive",
"settings",
"smol",
- "staff_mode",
"sum_tree",
"tempfile",
+ "text",
"thiserror",
- "time 0.3.24",
+ "time 0.3.27",
"tiny_http",
"url",
"util",
@@ -1409,7 +1447,7 @@ dependencies = [
[[package]]
name = "collab"
-version = "0.17.0"
+version = "0.18.0"
dependencies = [
"anyhow",
"async-tungstenite",
@@ -1418,8 +1456,11 @@ dependencies = [
"axum-extra",
"base64 0.13.1",
"call",
+ "channel",
"clap 3.2.25",
"client",
+ "clock",
+ "collab_ui",
"collections",
"ctor",
"dashmap",
@@ -1444,6 +1485,7 @@ dependencies = [
"pretty_assertions",
"project",
"prometheus",
+ "prost 0.8.0",
"rand 0.8.5",
"reqwest",
"rpc",
@@ -1456,8 +1498,9 @@ dependencies = [
"settings",
"sha-1 0.9.8",
"sqlx",
+ "text",
"theme",
- "time 0.3.24",
+ "time 0.3.27",
"tokio",
"tokio-tungstenite",
"toml 0.5.11",
@@ -1478,16 +1521,19 @@ dependencies = [
"anyhow",
"auto_update",
"call",
+ "channel",
"client",
"clock",
"collections",
"context_menu",
"db",
"editor",
+ "feature_flags",
"feedback",
"futures 0.3.28",
"fuzzy",
"gpui",
+ "language",
"log",
"menu",
"picker",
@@ -1498,7 +1544,6 @@ dependencies = [
"serde",
"serde_derive",
"settings",
- "staff_mode",
"theme",
"theme_selector",
"util",
@@ -1556,6 +1601,19 @@ dependencies = [
"workspace",
]
+[[package]]
+name = "component_test"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "gpui",
+ "project",
+ "settings",
+ "theme",
+ "util",
+ "workspace",
+]
+
[[package]]
name = "concurrent-queue"
version = "2.2.0"
@@ -1984,7 +2042,7 @@ dependencies = [
"openssl-probe",
"openssl-sys",
"schannel",
- "socket2",
+ "socket2 0.4.9",
"winapi 0.3.9",
]
@@ -2005,9 +2063,9 @@ dependencies = [
[[package]]
name = "dashmap"
-version = "5.5.0"
+version = "5.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6943ae99c34386c84a470c499d3414f66502a41340aa895406e0d2e4a207b91d"
+checksum = "edd72493923899c6f10c641bdbdeddc7183d6396641d99c1a0d1597f37f92e28"
dependencies = [
"cfg-if 1.0.0",
"hashbrown 0.14.0",
@@ -2065,9 +2123,9 @@ dependencies = [
[[package]]
name = "deranged"
-version = "0.3.6"
+version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8810e7e2cf385b1e9b50d68264908ec367ba642c96d02edfe61c39e88e2a3c01"
+checksum = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946"
dependencies = [
"serde",
]
@@ -2255,9 +2313,9 @@ dependencies = [
[[package]]
name = "dyn-clone"
-version = "1.0.12"
+version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "304e6508efa593091e97a9abbc10f90aa7ca635b6d2784feff3c89d41dd12272"
+checksum = "bbfc4744c1b8f2a09adc0e55242f60b1af195d88596bd8700be74418c056c555"
[[package]]
name = "editor"
@@ -2320,9 +2378,9 @@ checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
[[package]]
name = "encoding_rs"
-version = "0.8.32"
+version = "0.8.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394"
+checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1"
dependencies = [
"cfg-if 1.0.0",
]
@@ -2370,9 +2428,9 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
[[package]]
name = "erased-serde"
-version = "0.3.28"
+version = "0.3.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "da96524cc884f6558f1769b6c46686af2fe8e8b4cd253bd5a3cdba8181b8e070"
+checksum = "fc978899517288e3ebbd1a3bfc1d9537dbb87eeab149e53ea490e63bcdff561a"
dependencies = [
"serde",
]
@@ -2471,6 +2529,14 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764"
+[[package]]
+name = "feature_flags"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "gpui",
+]
+
[[package]]
name = "feedback"
version = "0.1.0"
@@ -2535,13 +2601,13 @@ dependencies = [
[[package]]
name = "filetime"
-version = "0.2.21"
+version = "0.2.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5cbc844cecaee9d4443931972e1289c8ff485cb4cc2767cb03ca139ed6885153"
+checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0"
dependencies = [
"cfg-if 1.0.0",
"libc",
- "redox_syscall 0.2.16",
+ "redox_syscall 0.3.5",
"windows-sys",
]
@@ -2553,9 +2619,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flate2"
-version = "1.0.26"
+version = "1.0.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743"
+checksum = "c6c98ee8095e9d1dcbf2fcc6d95acccb90d1c81db1e44725c6a984b1dbdfb010"
dependencies = [
"crc32fast",
"miniz_oxide 0.7.1",
@@ -2696,7 +2762,7 @@ dependencies = [
"smol",
"sum_tree",
"tempfile",
- "time 0.3.24",
+ "time 0.3.27",
"util",
]
@@ -2834,7 +2900,7 @@ dependencies = [
"futures-io",
"memchr",
"parking",
- "pin-project-lite 0.2.10",
+ "pin-project-lite 0.2.12",
"waker-fn",
]
@@ -2846,7 +2912,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.28",
+ "syn 2.0.29",
]
[[package]]
@@ -2875,7 +2941,7 @@ dependencies = [
"futures-sink",
"futures-task",
"memchr",
- "pin-project-lite 0.2.10",
+ "pin-project-lite 0.2.12",
"pin-utils",
"slab",
"tokio-io",
@@ -2953,9 +3019,9 @@ dependencies = [
[[package]]
name = "gimli"
-version = "0.27.3"
+version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e"
+checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0"
[[package]]
name = "git"
@@ -2998,11 +3064,11 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "globset"
-version = "0.4.12"
+version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "aca8bbd8e0707c1887a8bbb7e6b40e228f251ff5d62c8220a4a7a53c73aff006"
+checksum = "759c97c1e17c55525b57192c06a267cda0ac5210b222d6b82189a2338fa1c13d"
dependencies = [
- "aho-corasick 1.0.2",
+ "aho-corasick 1.0.4",
"bstr",
"fnv",
"log",
@@ -3089,7 +3155,7 @@ dependencies = [
"sqlez",
"sum_tree",
"taffy",
- "time 0.3.24",
+ "time 0.3.27",
"tiny-skia",
"usvg",
"util",
@@ -3115,9 +3181,9 @@ checksum = "eec1c01eb1de97451ee0d60de7d81cf1e72aabefb021616027f3d1c3ec1c723c"
[[package]]
name = "h2"
-version = "0.3.20"
+version = "0.3.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "97ec8491ebaf99c8eaa73058b045fe58073cd6be7f596ac993ced0b0a0c01049"
+checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833"
dependencies = [
"bytes 1.4.0",
"fnv",
@@ -3311,7 +3377,7 @@ checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482"
dependencies = [
"bytes 1.4.0",
"fnv",
- "itoa 1.0.9",
+ "itoa",
]
[[package]]
@@ -3322,7 +3388,7 @@ checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1"
dependencies = [
"bytes 1.4.0",
"http",
- "pin-project-lite 0.2.10",
+ "pin-project-lite 0.2.12",
]
[[package]]
@@ -3339,9 +3405,9 @@ checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904"
[[package]]
name = "httpdate"
-version = "1.0.2"
+version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421"
+checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]]
name = "human_bytes"
@@ -3370,9 +3436,9 @@ dependencies = [
"http-body",
"httparse",
"httpdate",
- "itoa 1.0.9",
- "pin-project-lite 0.2.10",
- "socket2",
+ "itoa",
+ "pin-project-lite 0.2.12",
+ "socket2 0.4.9",
"tokio",
"tower-service",
"tracing",
@@ -3386,7 +3452,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1"
dependencies = [
"hyper",
- "pin-project-lite 0.2.10",
+ "pin-project-lite 0.2.12",
"tokio",
"tokio-io-timeout",
]
@@ -3604,7 +3670,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b"
dependencies = [
"hermit-abi 0.3.2",
- "rustix 0.38.4",
+ "rustix 0.38.8",
"windows-sys",
]
@@ -3644,12 +3710,6 @@ dependencies = [
"either",
]
-[[package]]
-name = "itoa"
-version = "0.4.8"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
-
[[package]]
name = "itoa"
version = "1.0.9"
@@ -4076,9 +4136,9 @@ dependencies = [
[[package]]
name = "log"
-version = "0.4.19"
+version = "0.4.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
+checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
dependencies = [
"serde",
"value-bag",
@@ -4109,9 +4169,9 @@ dependencies = [
[[package]]
name = "lsp-types"
-version = "0.94.0"
+version = "0.94.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0b63735a13a1f9cd4f4835223d828ed9c2e35c8c5e61837774399f558b6a1237"
+checksum = "c66bfd44a06ae10647fe3f8214762e9369fd4248df1350924b4ef9e770a85ea1"
dependencies = [
"bitflags 1.3.2",
"serde",
@@ -4209,9 +4269,9 @@ dependencies = [
[[package]]
name = "memchr"
-version = "2.5.0"
+version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
+checksum = "76fc44e2588d5b436dbc3c6cf62aef290f90dab6235744a93dfe1cc18f451e2c"
[[package]]
name = "memfd"
@@ -4567,9 +4627,9 @@ dependencies = [
[[package]]
name = "num-bigint"
-version = "0.4.3"
+version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f"
+checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0"
dependencies = [
"autocfg",
"num-integer",
@@ -4725,9 +4785,9 @@ dependencies = [
[[package]]
name = "object"
-version = "0.31.1"
+version = "0.32.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1"
+checksum = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe"
dependencies = [
"memchr",
]
@@ -4769,9 +4829,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
[[package]]
name = "openssl"
-version = "0.10.55"
+version = "0.10.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "345df152bc43501c5eb9e4654ff05f794effb78d4efe3d53abc158baddc0703d"
+checksum = "729b745ad4a5575dd06a3e1af1414bd330ee561c01b3899eb584baeaa8def17e"
dependencies = [
"bitflags 1.3.2",
"cfg-if 1.0.0",
@@ -4790,7 +4850,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.28",
+ "syn 2.0.29",
]
[[package]]
@@ -4801,9 +4861,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
[[package]]
name = "openssl-sys"
-version = "0.9.90"
+version = "0.9.91"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "374533b0e45f3a7ced10fcaeccca020e66656bc03dac384f852e4e5a7a8104a6"
+checksum = "866b5f16f90776b9bb8dc1e1802ac6f0513de3a7a7465867bfbc563dc737faac"
dependencies = [
"cc",
"libc",
@@ -4938,7 +4998,7 @@ dependencies = [
"libc",
"redox_syscall 0.3.5",
"smallvec",
- "windows-targets 0.48.1",
+ "windows-targets 0.48.5",
]
[[package]]
@@ -5030,12 +5090,12 @@ dependencies = [
[[package]]
name = "petgraph"
-version = "0.6.3"
+version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4"
+checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9"
dependencies = [
"fixedbitset",
- "indexmap 1.9.3",
+ "indexmap 2.0.0",
]
[[package]]
@@ -5063,22 +5123,22 @@ checksum = "db8bcd96cb740d03149cbad5518db9fd87126a10ab519c011893b1754134c468"
[[package]]
name = "pin-project"
-version = "1.1.2"
+version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "030ad2bc4db10a8944cb0d837f158bdfec4d4a4873ab701a95046770d11f8842"
+checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422"
dependencies = [
"pin-project-internal",
]
[[package]]
name = "pin-project-internal"
-version = "1.1.2"
+version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ec2e072ecce94ec471b13398d5402c188e76ac03cf74dd1a975161b23a3f6d9c"
+checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.28",
+ "syn 2.0.29",
]
[[package]]
@@ -5089,9 +5149,9 @@ checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777"
[[package]]
name = "pin-project-lite"
-version = "0.2.10"
+version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c40d25201921e5ff0c862a505c6557ea88568a4e3ace775ab55e93f2f4f9d57"
+checksum = "12cc1b0bf1727a77a54b6654e7b5f1af8604923edc8b81885f8ec92f9e3f0a05"
[[package]]
name = "pin-utils"
@@ -5142,7 +5202,7 @@ dependencies = [
"line-wrap",
"quick-xml",
"serde",
- "time 0.3.24",
+ "time 0.3.27",
]
[[package]]
@@ -5207,7 +5267,7 @@ dependencies = [
"concurrent-queue",
"libc",
"log",
- "pin-project-lite 0.2.10",
+ "pin-project-lite 0.2.12",
"windows-sys",
]
@@ -5257,7 +5317,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c64d9ba0963cdcea2e1b2230fbae2bab30eb25a174be395c41e764bfb65dd62"
dependencies = [
"proc-macro2",
- "syn 2.0.28",
+ "syn 2.0.29",
]
[[package]]
@@ -5597,9 +5657,9 @@ dependencies = [
[[package]]
name = "quote"
-version = "1.0.32"
+version = "1.0.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965"
+checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
dependencies = [
"proc-macro2",
]
@@ -5832,13 +5892,13 @@ dependencies = [
[[package]]
name = "regex"
-version = "1.9.1"
+version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575"
+checksum = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a"
dependencies = [
- "aho-corasick 1.0.2",
+ "aho-corasick 1.0.4",
"memchr",
- "regex-automata 0.3.4",
+ "regex-automata 0.3.6",
"regex-syntax 0.7.4",
]
@@ -5853,11 +5913,11 @@ dependencies = [
[[package]]
name = "regex-automata"
-version = "0.3.4"
+version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b7b6d6190b7594385f61bd3911cd1be99dfddcfc365a4160cc2ab5bff4aed294"
+checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69"
dependencies = [
- "aho-corasick 1.0.2",
+ "aho-corasick 1.0.4",
"memchr",
"regex-syntax 0.7.4",
]
@@ -5906,9 +5966,9 @@ dependencies = [
[[package]]
name = "reqwest"
-version = "0.11.18"
+version = "0.11.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cde824a14b7c14f85caff81225f411faacc04a2013f41670f41443742b1c1c55"
+checksum = "20b9b67e2ca7dd9e9f9285b759de30ff538aab981abaaf7bc9bd90b84a0126c3"
dependencies = [
"base64 0.21.2",
"bytes 1.4.0",
@@ -5927,7 +5987,7 @@ dependencies = [
"native-tls",
"once_cell",
"percent-encoding",
- "pin-project-lite 0.2.10",
+ "pin-project-lite 0.2.12",
"serde",
"serde_json",
"serde_urlencoded",
@@ -6129,9 +6189,9 @@ dependencies = [
[[package]]
name = "rust-embed"
-version = "6.8.1"
+version = "8.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a36224c3276f8c4ebc8c20f158eca7ca4359c8db89991c4925132aaaf6702661"
+checksum = "b1e7d90385b59f0a6bf3d3b757f3ca4ece2048265d70db20a2016043d4509a40"
dependencies = [
"rust-embed-impl",
"rust-embed-utils",
@@ -6140,22 +6200,22 @@ dependencies = [
[[package]]
name = "rust-embed-impl"
-version = "6.8.1"
+version = "8.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "49b94b81e5b2c284684141a2fb9e2a31be90638caf040bf9afbc5a0416afe1ac"
+checksum = "3c3d8c6fd84090ae348e63a84336b112b5c3918b3bf0493a581f7bd8ee623c29"
dependencies = [
"proc-macro2",
"quote",
"rust-embed-utils",
- "syn 2.0.28",
+ "syn 2.0.29",
"walkdir",
]
[[package]]
name = "rust-embed-utils"
-version = "7.8.1"
+version = "8.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9d38ff6bf570dc3bb7100fce9f7b60c33fa71d80e88da3f2580df4ff2bdded74"
+checksum = "873feff8cb7bf86fdf0a71bb21c95159f4e4a37dd7a4bd1855a940909b583ada"
dependencies = [
"globset",
"sha2 0.10.7",
@@ -6,6 +6,7 @@ members = [
"crates/auto_update",
"crates/breadcrumbs",
"crates/call",
+ "crates/channel",
"crates/cli",
"crates/client",
"crates/clock",
@@ -13,6 +14,7 @@ members = [
"crates/collab_ui",
"crates/collections",
"crates/command_palette",
+ "crates/component_test",
"crates/context_menu",
"crates/copilot",
"crates/copilot_button",
@@ -60,7 +62,7 @@ members = [
"crates/snippet",
"crates/sqlez",
"crates/sqlez_macros",
- "crates/staff_mode",
+ "crates/feature_flags",
"crates/sum_tree",
"crates/terminal",
"crates/text",
@@ -95,10 +97,11 @@ log = { version = "0.4.16", features = ["kv_unstable_serde"] }
ordered-float = { version = "2.1.1" }
parking_lot = { version = "0.11.1" }
postage = { version = "0.5", features = ["futures-traits"] }
+prost = { version = "0.8" }
rand = { version = "0.8.5" }
refineable = { path = "./crates/refineable" }
regex = { version = "1.5" }
-rust-embed = { version = "6.3", features = ["include-exclude"] }
+rust-embed = { version = "8.0", features = ["include-exclude"] }
schemars = { version = "0.8" }
serde = { version = "1.0", features = ["derive", "rc"] }
serde_derive = { version = "1.0", features = ["deserialize_in_place"] }
@@ -1,6 +1,6 @@
# syntax = docker/dockerfile:1.2
-FROM rust:1.71-bullseye as builder
+FROM rust:1.72-bullseye as builder
WORKDIR app
COPY . .
@@ -521,7 +521,8 @@
"ctrl-alt-cmd-f": "workspace::FollowNextCollaborator",
// TODO: Move this to a dock open action
"cmd-shift-c": "collab_panel::ToggleFocus",
- "cmd-alt-i": "zed::DebugElements"
+ "cmd-alt-i": "zed::DebugElements",
+ "ctrl-shift-:": "editor::ToggleInlayHints",
}
},
{
@@ -137,10 +137,67 @@
"partialWord": true
}
],
+ "g j": [
+ "vim::Down",
+ {
+ "displayLines": true
+ }
+ ],
+ "g down": [
+ "vim::Down",
+ {
+ "displayLines": true
+ }
+ ],
+ "g k": [
+ "vim::Up",
+ {
+ "displayLines": true
+ }
+ ],
+ "g up": [
+ "vim::Up",
+ {
+ "displayLines": true
+ }
+ ],
+ "g $": [
+ "vim::EndOfLine",
+ {
+ "displayLines": true
+ }
+ ],
+ "g end": [
+ "vim::EndOfLine",
+ {
+ "displayLines": true
+ }
+ ],
+ "g 0": [
+ "vim::StartOfLine",
+ {
+ "displayLines": true
+ }
+ ],
+ "g home": [
+ "vim::StartOfLine",
+ {
+ "displayLines": true
+ }
+ ],
+ "g ^": [
+ "vim::FirstNonWhitespace",
+ {
+ "displayLines": true
+ }
+ ],
// z commands
"z t": "editor::ScrollCursorTop",
"z z": "editor::ScrollCursorCenter",
"z b": "editor::ScrollCursorBottom",
+ "z c": "editor::Fold",
+ "z o": "editor::UnfoldLines",
+ "z f": "editor::FoldSelectedRanges",
// Count support
"1": [
"vim::Number",
@@ -98,6 +98,7 @@
// Whether to show selections in the scrollbar.
"selections": true
},
+ "relative_line_numbers": false,
// Inlay hint related settings
"inlay_hints": {
// Global switch to toggle hints on and off, switched off by default.
@@ -284,8 +285,6 @@
// "directory": "~/zed/projects/"
// }
// }
- //
- //
"working_directory": "current_project_directory",
// Set the cursor blinking behavior in the terminal.
// May take 4 values:
@@ -334,13 +333,32 @@
// "line_height": {
// "custom": 2
// },
- "line_height": "comfortable"
+ "line_height": "comfortable",
+ // Activate the python virtual environment, if one is found, in the
+ // terminal's working directory (as resolved by the working_directory
+ // setting). Set this to "off" to disable this behavior.
+ "detect_venv": {
+ "on": {
+ // Default directories to search for virtual environments, relative
+ // to the current working directory. We recommend overriding this
+ // in your project's settings, rather than globally.
+ "directories": [
+ ".env",
+ "env",
+ ".venv",
+ "venv"
+ ],
+ // Can also be 'csh' and 'fish'
+ "activate_script": "default"
+ }
+ }
// Set the terminal's font size. If this option is not included,
// the terminal will default to matching the buffer's font size.
- // "font_size": "15"
+ // "font_size": "15",
// Set the terminal's font family. If this option is not included,
// the terminal will default to matching the buffer's font family.
- // "font_family": "Zed Mono"
+ // "font_family": "Zed Mono",
+ // ---
},
// Difference settings for semantic_index
"semantic_index": {
@@ -855,14 +855,14 @@ impl Conversation {
) -> Self {
let markdown = language_registry.language_for_name("Markdown");
let buffer = cx.add_model(|cx| {
- let mut buffer = Buffer::new(0, "", cx);
+ let mut buffer = Buffer::new(0, cx.model_id() as u64, "");
buffer.set_language_registry(language_registry);
cx.spawn_weak(|buffer, mut cx| async move {
let markdown = markdown.await?;
let buffer = buffer
.upgrade(&cx)
.ok_or_else(|| anyhow!("buffer was dropped"))?;
- buffer.update(&mut cx, |buffer, cx| {
+ buffer.update(&mut cx, |buffer: &mut Buffer, cx| {
buffer.set_language(Some(markdown), cx)
});
anyhow::Ok(())
@@ -944,7 +944,7 @@ impl Conversation {
let mut message_anchors = Vec::new();
let mut next_message_id = MessageId(0);
let buffer = cx.add_model(|cx| {
- let mut buffer = Buffer::new(0, saved_conversation.text, cx);
+ let mut buffer = Buffer::new(0, cx.model_id() as u64, saved_conversation.text);
for message in saved_conversation.messages {
message_anchors.push(MessageAnchor {
id: message.id,
@@ -958,7 +958,7 @@ impl Conversation {
let buffer = buffer
.upgrade(&cx)
.ok_or_else(|| anyhow!("buffer was dropped"))?;
- buffer.update(&mut cx, |buffer, cx| {
+ buffer.update(&mut cx, |buffer: &mut Buffer, cx| {
buffer.set_language(Some(markdown), cx)
});
anyhow::Ok(())
@@ -1128,7 +1128,9 @@ impl Conversation {
stream: true,
};
- let Some(api_key) = self.api_key.borrow().clone() else { continue };
+ let Some(api_key) = self.api_key.borrow().clone() else {
+ continue;
+ };
let stream = stream_completion(api_key, cx.background().clone(), request);
let assistant_message = self
.insert_message_after(
@@ -1484,7 +1486,9 @@ impl Conversation {
}) {
current_message = messages.next();
}
- let Some(message) = current_message.as_ref() else { break };
+ let Some(message) = current_message.as_ref() else {
+ break;
+ };
// Skip offsets that are in the same message.
while offsets.peek().map_or(false, |offset| {
@@ -1921,7 +1925,10 @@ impl ConversationEditor {
let Some(panel) = workspace.panel::<AssistantPanel>(cx) else {
return;
};
- let Some(editor) = workspace.active_item(cx).and_then(|item| item.act_as::<Editor>(cx)) else {
+ let Some(editor) = workspace
+ .active_item(cx)
+ .and_then(|item| item.act_as::<Editor>(cx))
+ else {
return;
};
@@ -20,6 +20,7 @@ test-support = [
[dependencies]
audio = { path = "../audio" }
+channel = { path = "../channel" }
client = { path = "../client" }
collections = { path = "../collections" }
gpui = { path = "../gpui" }
@@ -7,9 +7,8 @@ use std::sync::Arc;
use anyhow::{anyhow, Result};
use audio::Audio;
use call_settings::CallSettings;
-use client::{
- proto, ChannelId, ClickhouseEvent, Client, TelemetrySettings, TypedEnvelope, User, UserStore,
-};
+use channel::ChannelId;
+use client::{proto, ClickhouseEvent, Client, TelemetrySettings, TypedEnvelope, User, UserStore};
use collections::HashSet;
use futures::{future::Shared, FutureExt};
use postage::watch;
@@ -644,7 +644,9 @@ impl Room {
if let Some(participants) = remote_participants.log_err() {
for (participant, user) in room.participants.into_iter().zip(participants) {
- let Some(peer_id) = participant.peer_id else { continue };
+ let Some(peer_id) = participant.peer_id else {
+ continue;
+ };
this.participant_user_ids.insert(participant.user_id);
let old_projects = this
@@ -0,0 +1,51 @@
+[package]
+name = "channel"
+version = "0.1.0"
+edition = "2021"
+publish = false
+
+[lib]
+path = "src/channel.rs"
+doctest = false
+
+[features]
+test-support = ["collections/test-support", "gpui/test-support", "rpc/test-support"]
+
+[dependencies]
+client = { path = "../client" }
+collections = { path = "../collections" }
+db = { path = "../db" }
+gpui = { path = "../gpui" }
+util = { path = "../util" }
+rpc = { path = "../rpc" }
+text = { path = "../text" }
+language = { path = "../language" }
+settings = { path = "../settings" }
+feature_flags = { path = "../feature_flags" }
+sum_tree = { path = "../sum_tree" }
+
+anyhow.workspace = true
+futures.workspace = true
+image = "0.23"
+lazy_static.workspace = true
+log.workspace = true
+parking_lot.workspace = true
+postage.workspace = true
+rand.workspace = true
+schemars.workspace = true
+smol.workspace = true
+thiserror.workspace = true
+time.workspace = true
+tiny_http = "0.8"
+uuid = { version = "1.1.2", features = ["v4"] }
+url = "2.2"
+serde.workspace = true
+serde_derive.workspace = true
+tempfile = "3"
+
+[dev-dependencies]
+collections = { path = "../collections", features = ["test-support"] }
+gpui = { path = "../gpui", features = ["test-support"] }
+rpc = { path = "../rpc", features = ["test-support"] }
+settings = { path = "../settings", features = ["test-support"] }
+util = { path = "../util", features = ["test-support"] }
@@ -0,0 +1,14 @@
+mod channel_store;
+
+pub mod channel_buffer;
+use std::sync::Arc;
+
+pub use channel_store::*;
+use client::Client;
+
+#[cfg(test)]
+mod channel_store_tests;
+
+pub fn init(client: &Arc<Client>) {
+ channel_buffer::init(client);
+}
@@ -0,0 +1,197 @@
+use crate::Channel;
+use anyhow::Result;
+use client::Client;
+use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle};
+use rpc::{proto, TypedEnvelope};
+use std::sync::Arc;
+use util::ResultExt;
+
+pub(crate) fn init(client: &Arc<Client>) {
+ client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer);
+ client.add_model_message_handler(ChannelBuffer::handle_add_channel_buffer_collaborator);
+ client.add_model_message_handler(ChannelBuffer::handle_remove_channel_buffer_collaborator);
+}
+
+pub struct ChannelBuffer {
+ pub(crate) channel: Arc<Channel>,
+ connected: bool,
+ collaborators: Vec<proto::Collaborator>,
+ buffer: ModelHandle<language::Buffer>,
+ client: Arc<Client>,
+ subscription: Option<client::Subscription>,
+}
+
+pub enum Event {
+ CollaboratorsChanged,
+ Disconnected,
+}
+
+impl Entity for ChannelBuffer {
+ type Event = Event;
+
+ fn release(&mut self, _: &mut AppContext) {
+ if self.connected {
+ self.client
+ .send(proto::LeaveChannelBuffer {
+ channel_id: self.channel.id,
+ })
+ .log_err();
+ }
+ }
+}
+
+impl ChannelBuffer {
+ pub(crate) async fn new(
+ channel: Arc<Channel>,
+ client: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<ModelHandle<Self>> {
+ let response = client
+ .request(proto::JoinChannelBuffer {
+ channel_id: channel.id,
+ })
+ .await?;
+
+ let base_text = response.base_text;
+ let operations = response
+ .operations
+ .into_iter()
+ .map(language::proto::deserialize_operation)
+ .collect::<Result<Vec<_>, _>>()?;
+
+ let collaborators = response.collaborators;
+
+ let buffer = cx.add_model(|_| {
+ language::Buffer::remote(response.buffer_id, response.replica_id as u16, base_text)
+ });
+ buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
+
+ let subscription = client.subscribe_to_entity(channel.id)?;
+
+ anyhow::Ok(cx.add_model(|cx| {
+ cx.subscribe(&buffer, Self::on_buffer_update).detach();
+
+ Self {
+ buffer,
+ client,
+ connected: true,
+ collaborators,
+ channel,
+ subscription: Some(subscription.set_model(&cx.handle(), &mut cx.to_async())),
+ }
+ }))
+ }
+
+ async fn handle_update_channel_buffer(
+ this: ModelHandle<Self>,
+ update_channel_buffer: TypedEnvelope<proto::UpdateChannelBuffer>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ let ops = update_channel_buffer
+ .payload
+ .operations
+ .into_iter()
+ .map(language::proto::deserialize_operation)
+ .collect::<Result<Vec<_>, _>>()?;
+
+ this.update(&mut cx, |this, cx| {
+ cx.notify();
+ this.buffer
+ .update(cx, |buffer, cx| buffer.apply_ops(ops, cx))
+ })?;
+
+ Ok(())
+ }
+
+ async fn handle_add_channel_buffer_collaborator(
+ this: ModelHandle<Self>,
+ envelope: TypedEnvelope<proto::AddChannelBufferCollaborator>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ let collaborator = envelope.payload.collaborator.ok_or_else(|| {
+ anyhow::anyhow!(
+ "Should have gotten a collaborator in the AddChannelBufferCollaborator message"
+ )
+ })?;
+
+ this.update(&mut cx, |this, cx| {
+ this.collaborators.push(collaborator);
+ cx.emit(Event::CollaboratorsChanged);
+ cx.notify();
+ });
+
+ Ok(())
+ }
+
+ async fn handle_remove_channel_buffer_collaborator(
+ this: ModelHandle<Self>,
+ message: TypedEnvelope<proto::RemoveChannelBufferCollaborator>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, cx| {
+ this.collaborators.retain(|collaborator| {
+ if collaborator.peer_id == message.payload.peer_id {
+ this.buffer.update(cx, |buffer, cx| {
+ buffer.remove_peer(collaborator.replica_id as u16, cx)
+ });
+ false
+ } else {
+ true
+ }
+ });
+ cx.emit(Event::CollaboratorsChanged);
+ cx.notify();
+ });
+
+ Ok(())
+ }
+
+ fn on_buffer_update(
+ &mut self,
+ _: ModelHandle<language::Buffer>,
+ event: &language::Event,
+ _: &mut ModelContext<Self>,
+ ) {
+ if let language::Event::Operation(operation) = event {
+ let operation = language::proto::serialize_operation(operation);
+ self.client
+ .send(proto::UpdateChannelBuffer {
+ channel_id: self.channel.id,
+ operations: vec![operation],
+ })
+ .log_err();
+ }
+ }
+
+ pub fn buffer(&self) -> ModelHandle<language::Buffer> {
+ self.buffer.clone()
+ }
+
+ pub fn collaborators(&self) -> &[proto::Collaborator] {
+ &self.collaborators
+ }
+
+ pub fn channel(&self) -> Arc<Channel> {
+ self.channel.clone()
+ }
+
+ pub(crate) fn disconnect(&mut self, cx: &mut ModelContext<Self>) {
+ if self.connected {
+ self.connected = false;
+ self.subscription.take();
+ cx.emit(Event::Disconnected);
+ cx.notify()
+ }
+ }
+
+ pub fn is_connected(&self) -> bool {
+ self.connected
+ }
+
+ pub fn replica_id(&self, cx: &AppContext) -> u16 {
+ self.buffer.read(cx).replica_id()
+ }
+}
@@ -1,19 +1,14 @@
-use crate::Status;
-use crate::{Client, Subscription, User, UserStore};
-use anyhow::anyhow;
-use anyhow::Result;
-use collections::HashMap;
-use collections::HashSet;
-use futures::channel::mpsc;
-use futures::Future;
-use futures::StreamExt;
-use gpui::{AsyncAppContext, Entity, ModelContext, ModelHandle, Task};
+use crate::channel_buffer::ChannelBuffer;
+use anyhow::{anyhow, Result};
+use client::{Client, Status, Subscription, User, UserId, UserStore};
+use collections::{hash_map, HashMap, HashSet};
+use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
+use gpui::{AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle};
use rpc::{proto, TypedEnvelope};
use std::sync::Arc;
use util::ResultExt;
pub type ChannelId = u64;
-pub type UserId = u64;
pub struct ChannelStore {
channels_by_id: HashMap<ChannelId, Arc<Channel>>,
@@ -23,6 +18,7 @@ pub struct ChannelStore {
channels_with_admin_privileges: HashSet<ChannelId>,
outgoing_invites: HashSet<(ChannelId, UserId)>,
update_channels_tx: mpsc::UnboundedSender<proto::UpdateChannels>,
+ opened_buffers: HashMap<ChannelId, OpenedChannelBuffer>,
client: Arc<Client>,
user_store: ModelHandle<UserStore>,
_rpc_subscription: Subscription,
@@ -57,6 +53,11 @@ pub enum ChannelMemberStatus {
NotMember,
}
+enum OpenedChannelBuffer {
+ Open(WeakModelHandle<ChannelBuffer>),
+ Loading(Shared<Task<Result<ModelHandle<ChannelBuffer>, Arc<anyhow::Error>>>>),
+}
+
impl ChannelStore {
pub fn new(
client: Arc<Client>,
@@ -70,16 +71,14 @@ impl ChannelStore {
let mut connection_status = client.status();
let watch_connection_status = cx.spawn_weak(|this, mut cx| async move {
while let Some(status) = connection_status.next().await {
- if matches!(status, Status::ConnectionLost | Status::SignedOut) {
+ if !status.is_connected() {
if let Some(this) = this.upgrade(&cx) {
this.update(&mut cx, |this, cx| {
- this.channels_by_id.clear();
- this.channel_invitations.clear();
- this.channel_participants.clear();
- this.channels_with_admin_privileges.clear();
- this.channel_paths.clear();
- this.outgoing_invites.clear();
- cx.notify();
+ if matches!(status, Status::ConnectionLost | Status::SignedOut) {
+ this.handle_disconnect(cx);
+ } else {
+ this.disconnect_buffers(cx);
+ }
});
} else {
break;
@@ -87,6 +86,7 @@ impl ChannelStore {
}
}
});
+
Self {
channels_by_id: HashMap::default(),
channel_invitations: Vec::default(),
@@ -94,6 +94,7 @@ impl ChannelStore {
channel_participants: Default::default(),
channels_with_admin_privileges: Default::default(),
outgoing_invites: Default::default(),
+ opened_buffers: Default::default(),
update_channels_tx,
client,
user_store,
@@ -114,6 +115,16 @@ impl ChannelStore {
}
}
+ pub fn has_children(&self, channel_id: ChannelId) -> bool {
+ self.channel_paths.iter().any(|path| {
+ if let Some(ix) = path.iter().position(|id| *id == channel_id) {
+ path.len() > ix + 1
+ } else {
+ false
+ }
+ })
+ }
+
pub fn channel_count(&self) -> usize {
self.channel_paths.len()
}
@@ -141,6 +152,74 @@ impl ChannelStore {
self.channels_by_id.get(&channel_id)
}
+ pub fn open_channel_buffer(
+ &mut self,
+ channel_id: ChannelId,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<ModelHandle<ChannelBuffer>>> {
+ // Make sure that a given channel buffer is only opened once per
+ // app instance, even if this method is called multiple times
+ // with the same channel id while the first task is still running.
+ let task = loop {
+ match self.opened_buffers.entry(channel_id) {
+ hash_map::Entry::Occupied(e) => match e.get() {
+ OpenedChannelBuffer::Open(buffer) => {
+ if let Some(buffer) = buffer.upgrade(cx) {
+ break Task::ready(Ok(buffer)).shared();
+ } else {
+ self.opened_buffers.remove(&channel_id);
+ continue;
+ }
+ }
+ OpenedChannelBuffer::Loading(task) => break task.clone(),
+ },
+ hash_map::Entry::Vacant(e) => {
+ let client = self.client.clone();
+ let task = cx
+ .spawn(|this, cx| async move {
+ let channel = this.read_with(&cx, |this, _| {
+ this.channel_for_id(channel_id).cloned().ok_or_else(|| {
+ Arc::new(anyhow!("no channel for id: {}", channel_id))
+ })
+ })?;
+
+ ChannelBuffer::new(channel, client, cx)
+ .await
+ .map_err(Arc::new)
+ })
+ .shared();
+ e.insert(OpenedChannelBuffer::Loading(task.clone()));
+ cx.spawn({
+ let task = task.clone();
+ |this, mut cx| async move {
+ let result = task.await;
+ this.update(&mut cx, |this, cx| match result {
+ Ok(buffer) => {
+ cx.observe_release(&buffer, move |this, _, _| {
+ this.opened_buffers.remove(&channel_id);
+ })
+ .detach();
+ this.opened_buffers.insert(
+ channel_id,
+ OpenedChannelBuffer::Open(buffer.downgrade()),
+ );
+ }
+ Err(error) => {
+ log::error!("failed to open channel buffer {error:?}");
+ this.opened_buffers.remove(&channel_id);
+ }
+ });
+ }
+ })
+ .detach();
+ break task;
+ }
+ }
+ };
+ cx.foreground()
+ .spawn(async move { task.await.map_err(|error| anyhow!("{}", error)) })
+ }
+
pub fn is_user_admin(&self, channel_id: ChannelId) -> bool {
self.channel_paths.iter().any(|path| {
if let Some(ix) = path.iter().position(|id| *id == channel_id) {
@@ -403,6 +482,27 @@ impl ChannelStore {
Ok(())
}
+ fn handle_disconnect(&mut self, cx: &mut ModelContext<'_, ChannelStore>) {
+ self.disconnect_buffers(cx);
+ self.channels_by_id.clear();
+ self.channel_invitations.clear();
+ self.channel_participants.clear();
+ self.channels_with_admin_privileges.clear();
+ self.channel_paths.clear();
+ self.outgoing_invites.clear();
+ cx.notify();
+ }
+
+ fn disconnect_buffers(&mut self, cx: &mut ModelContext<ChannelStore>) {
+ for (_, buffer) in self.opened_buffers.drain() {
+ if let OpenedChannelBuffer::Open(buffer) = buffer {
+ if let Some(buffer) = buffer.upgrade(cx) {
+ buffer.update(cx, |buffer, cx| buffer.disconnect(cx));
+ }
+ }
+ }
+ }
+
pub(crate) fn update_channels(
&mut self,
payload: proto::UpdateChannels,
@@ -437,38 +537,44 @@ impl ChannelStore {
.retain(|channel_id, _| !payload.remove_channels.contains(channel_id));
self.channels_with_admin_privileges
.retain(|channel_id| !payload.remove_channels.contains(channel_id));
- }
- for channel in payload.channels {
- if let Some(existing_channel) = self.channels_by_id.get_mut(&channel.id) {
- // FIXME: We may be missing a path for this existing channel in certain cases
- let existing_channel = Arc::make_mut(existing_channel);
- existing_channel.name = channel.name;
- continue;
+ for channel_id in &payload.remove_channels {
+ let channel_id = *channel_id;
+ if let Some(OpenedChannelBuffer::Open(buffer)) =
+ self.opened_buffers.remove(&channel_id)
+ {
+ if let Some(buffer) = buffer.upgrade(cx) {
+ buffer.update(cx, ChannelBuffer::disconnect);
+ }
+ }
}
+ }
- self.channels_by_id.insert(
- channel.id,
- Arc::new(Channel {
- id: channel.id,
- name: channel.name,
- }),
- );
-
- if let Some(parent_id) = channel.parent_id {
- let mut ix = 0;
- while ix < self.channel_paths.len() {
- let path = &self.channel_paths[ix];
- if path.ends_with(&[parent_id]) {
- let mut new_path = path.clone();
- new_path.push(channel.id);
- self.channel_paths.insert(ix + 1, new_path);
+ for channel_proto in payload.channels {
+ if let Some(existing_channel) = self.channels_by_id.get_mut(&channel_proto.id) {
+ Arc::make_mut(existing_channel).name = channel_proto.name;
+ } else {
+ let channel = Arc::new(Channel {
+ id: channel_proto.id,
+ name: channel_proto.name,
+ });
+ self.channels_by_id.insert(channel.id, channel.clone());
+
+ if let Some(parent_id) = channel_proto.parent_id {
+ let mut ix = 0;
+ while ix < self.channel_paths.len() {
+ let path = &self.channel_paths[ix];
+ if path.ends_with(&[parent_id]) {
+ let mut new_path = path.clone();
+ new_path.push(channel.id);
+ self.channel_paths.insert(ix + 1, new_path);
+ ix += 1;
+ }
ix += 1;
}
- ix += 1;
+ } else {
+ self.channel_paths.push(vec![channel.id]);
}
- } else {
- self.channel_paths.push(vec![channel.id]);
}
}
@@ -1,4 +1,7 @@
use super::*;
+use client::{Client, UserStore};
+use gpui::{AppContext, ModelHandle};
+use rpc::proto;
use util::http::FakeHttpClient;
#[gpui::test]
@@ -17,8 +17,9 @@ db = { path = "../db" }
gpui = { path = "../gpui" }
util = { path = "../util" }
rpc = { path = "../rpc" }
+text = { path = "../text" }
settings = { path = "../settings" }
-staff_mode = { path = "../staff_mode" }
+feature_flags = { path = "../feature_flags" }
sum_tree = { path = "../sum_tree" }
anyhow.workspace = true
@@ -1,10 +1,6 @@
#[cfg(any(test, feature = "test-support"))]
pub mod test;
-#[cfg(test)]
-mod channel_store_tests;
-
-pub mod channel_store;
pub mod telemetry;
pub mod user;
@@ -48,7 +44,6 @@ use util::channel::ReleaseChannel;
use util::http::HttpClient;
use util::{ResultExt, TryFutureExt};
-pub use channel_store::*;
pub use rpc::*;
pub use telemetry::ClickhouseEvent;
pub use user::*;
@@ -135,8 +135,6 @@ impl Telemetry {
}
}
- /// This method takes the entire TelemetrySettings struct in order to force client code
- /// to pull the struct out of the settings global. Do not remove!
pub fn set_authenticated_user_info(
self: &Arc<Self>,
metrics_id: Option<String>,
@@ -168,6 +168,7 @@ impl FakeServer {
GetPrivateUserInfoResponse {
metrics_id: "the-metrics-id".into(),
staff: false,
+ flags: Default::default(),
},
)
.await;
@@ -1,18 +1,20 @@
use super::{proto, Client, Status, TypedEnvelope};
use anyhow::{anyhow, Context, Result};
use collections::{hash_map::Entry, HashMap, HashSet};
+use feature_flags::FeatureFlagAppExt;
use futures::{channel::mpsc, future, AsyncReadExt, Future, StreamExt};
use gpui::{AsyncAppContext, Entity, ImageData, ModelContext, ModelHandle, Task};
use postage::{sink::Sink, watch};
use rpc::proto::{RequestMessage, UsersResponse};
-use staff_mode::StaffMode;
use std::sync::{Arc, Weak};
use util::http::HttpClient;
use util::TryFutureExt as _;
+pub type UserId = u64;
+
#[derive(Default, Debug)]
pub struct User {
- pub id: u64,
+ pub id: UserId,
pub github_login: String,
pub avatar: Option<Arc<ImageData>>,
}
@@ -143,26 +145,23 @@ impl UserStore {
let fetch_metrics_id =
client.request(proto::GetPrivateUserInfo {}).log_err();
let (user, info) = futures::join!(fetch_user, fetch_metrics_id);
- cx.read(|cx| {
- client.telemetry.set_authenticated_user_info(
- info.as_ref().map(|info| info.metrics_id.clone()),
- info.as_ref().map(|info| info.staff).unwrap_or(false),
- cx,
- )
- });
- cx.update(|cx| {
- cx.update_default_global(|staff_mode: &mut StaffMode, _| {
- if !staff_mode.0 {
- *staff_mode = StaffMode(
- info.as_ref()
- .map(|info| info.staff)
- .unwrap_or_default(),
- )
- }
- ()
+ if let Some(info) = info {
+ cx.update(|cx| {
+ cx.update_flags(info.staff, info.flags);
+ client.telemetry.set_authenticated_user_info(
+ Some(info.metrics_id.clone()),
+ info.staff,
+ cx,
+ )
});
- });
+ } else {
+ cx.read(|cx| {
+ client
+ .telemetry
+ .set_authenticated_user_info(None, false, cx)
+ });
+ }
current_user_tx.send(user).await.ok();
@@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
default-run = "collab"
edition = "2021"
name = "collab"
-version = "0.17.0"
+version = "0.18.0"
publish = false
[[bin]]
@@ -14,8 +14,10 @@ name = "seed"
required-features = ["seed-support"]
[dependencies]
+clock = { path = "../clock" }
collections = { path = "../collections" }
live_kit_server = { path = "../live_kit_server" }
+text = { path = "../text" }
rpc = { path = "../rpc" }
util = { path = "../util" }
@@ -35,6 +37,7 @@ log.workspace = true
nanoid = "0.4"
parking_lot.workspace = true
prometheus = "0.13"
+prost.workspace = true
rand.workspace = true
reqwest = { version = "0.11", features = ["json"], optional = true }
scrypt = "0.7"
@@ -62,6 +65,7 @@ collections = { path = "../collections", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] }
call = { path = "../call", features = ["test-support"] }
client = { path = "../client", features = ["test-support"] }
+channel = { path = "../channel" }
editor = { path = "../editor", features = ["test-support"] }
language = { path = "../language", features = ["test-support"] }
fs = { path = "../fs", features = ["test-support"] }
@@ -74,6 +78,7 @@ rpc = { path = "../rpc", features = ["test-support"] }
settings = { path = "../settings", features = ["test-support"] }
theme = { path = "../theme" }
workspace = { path = "../workspace", features = ["test-support"] }
+collab_ui = { path = "../collab_ui", features = ["test-support"] }
ctor.workspace = true
env_logger.workspace = true
@@ -208,3 +208,63 @@ CREATE TABLE "channel_members" (
);
CREATE UNIQUE INDEX "index_channel_members_on_channel_id_and_user_id" ON "channel_members" ("channel_id", "user_id");
+
+CREATE TABLE "buffers" (
+ "id" INTEGER PRIMARY KEY AUTOINCREMENT,
+ "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
+ "epoch" INTEGER NOT NULL DEFAULT 0
+);
+
+CREATE INDEX "index_buffers_on_channel_id" ON "buffers" ("channel_id");
+
+CREATE TABLE "buffer_operations" (
+ "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
+ "epoch" INTEGER NOT NULL,
+ "replica_id" INTEGER NOT NULL,
+ "lamport_timestamp" INTEGER NOT NULL,
+ "value" BLOB NOT NULL,
+ PRIMARY KEY(buffer_id, epoch, lamport_timestamp, replica_id)
+);
+
+CREATE TABLE "buffer_snapshots" (
+ "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
+ "epoch" INTEGER NOT NULL,
+ "text" TEXT NOT NULL,
+ "operation_serialization_version" INTEGER NOT NULL,
+ PRIMARY KEY(buffer_id, epoch)
+);
+
+CREATE TABLE "channel_buffer_collaborators" (
+ "id" INTEGER PRIMARY KEY AUTOINCREMENT,
+ "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
+ "connection_id" INTEGER NOT NULL,
+ "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
+ "connection_lost" BOOLEAN NOT NULL DEFAULT false,
+ "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
+ "replica_id" INTEGER NOT NULL
+);
+
+CREATE INDEX "index_channel_buffer_collaborators_on_channel_id" ON "channel_buffer_collaborators" ("channel_id");
+CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_and_replica_id" ON "channel_buffer_collaborators" ("channel_id", "replica_id");
+CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id");
+CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id");
+CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("channel_id", "connection_id", "connection_server_id");
+
+
+CREATE TABLE "feature_flags" (
+ "id" INTEGER PRIMARY KEY AUTOINCREMENT,
+ "flag" TEXT NOT NULL UNIQUE
+);
+
+CREATE INDEX "index_feature_flags" ON "feature_flags" ("id");
+
+
+CREATE TABLE "user_features" (
+ "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
+ "feature_id" INTEGER NOT NULL REFERENCES feature_flags (id) ON DELETE CASCADE,
+ PRIMARY KEY (user_id, feature_id)
+);
+
+CREATE UNIQUE INDEX "index_user_features_user_id_and_feature_id" ON "user_features" ("user_id", "feature_id");
+CREATE INDEX "index_user_features_on_user_id" ON "user_features" ("user_id");
+CREATE INDEX "index_user_features_on_feature_id" ON "user_features" ("feature_id");
@@ -0,0 +1,40 @@
+CREATE TABLE "buffers" (
+ "id" SERIAL PRIMARY KEY,
+ "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
+ "epoch" INTEGER NOT NULL DEFAULT 0
+);
+
+CREATE INDEX "index_buffers_on_channel_id" ON "buffers" ("channel_id");
+
+CREATE TABLE "buffer_operations" (
+ "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
+ "epoch" INTEGER NOT NULL,
+ "replica_id" INTEGER NOT NULL,
+ "lamport_timestamp" INTEGER NOT NULL,
+ "value" BYTEA NOT NULL,
+ PRIMARY KEY(buffer_id, epoch, lamport_timestamp, replica_id)
+);
+
+CREATE TABLE "buffer_snapshots" (
+ "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
+ "epoch" INTEGER NOT NULL,
+ "text" TEXT NOT NULL,
+ "operation_serialization_version" INTEGER NOT NULL,
+ PRIMARY KEY(buffer_id, epoch)
+);
+
+CREATE TABLE "channel_buffer_collaborators" (
+ "id" SERIAL PRIMARY KEY,
+ "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
+ "connection_id" INTEGER NOT NULL,
+ "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
+ "connection_lost" BOOLEAN NOT NULL DEFAULT FALSE,
+ "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
+ "replica_id" INTEGER NOT NULL
+);
+
+CREATE INDEX "index_channel_buffer_collaborators_on_channel_id" ON "channel_buffer_collaborators" ("channel_id");
+CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_and_replica_id" ON "channel_buffer_collaborators" ("channel_id", "replica_id");
+CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id");
+CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id");
+CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("channel_id", "connection_id", "connection_server_id");
@@ -0,0 +1,16 @@
+CREATE TABLE "feature_flags" (
+ "id" SERIAL PRIMARY KEY,
+ "flag" VARCHAR(255) NOT NULL UNIQUE
+);
+
+CREATE UNIQUE INDEX "index_feature_flags" ON "feature_flags" ("id");
+
+CREATE TABLE "user_features" (
+ "user_id" INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
+ "feature_id" INTEGER NOT NULL REFERENCES feature_flags(id) ON DELETE CASCADE,
+ PRIMARY KEY (user_id, feature_id)
+);
+
+CREATE UNIQUE INDEX "index_user_features_user_id_and_feature_id" ON "user_features" ("user_id", "feature_id");
+CREATE INDEX "index_user_features_on_user_id" ON "user_features" ("user_id");
+CREATE INDEX "index_user_features_on_feature_id" ON "user_features" ("feature_id");
@@ -1,7 +1,8 @@
#[cfg(test)]
-mod db_tests;
+pub mod tests;
+
#[cfg(test)]
-pub mod test_db;
+pub use tests::TestDb;
mod ids;
mod queries;
@@ -52,6 +53,8 @@ pub struct Database {
runtime: Option<tokio::runtime::Runtime>,
}
+// The `Database` type has so many methods that its impl blocks are split into
+// separate files in the `queries` folder.
impl Database {
pub async fn new(options: ConnectOptions, executor: Executor) -> Result<Self> {
Ok(Self {
@@ -246,7 +249,9 @@ impl Database {
let mut tx = Arc::new(Some(tx));
let result = f(TransactionHandle(tx.clone())).await;
let Some(tx) = Arc::get_mut(&mut tx).and_then(|tx| tx.take()) else {
- return Err(anyhow!("couldn't complete transaction because it's still in use"))?;
+ return Err(anyhow!(
+ "couldn't complete transaction because it's still in use"
+ ))?;
};
Ok((tx, result))
@@ -110,6 +110,7 @@ fn value_to_integer(v: Value) -> Result<i32, ValueTypeErr> {
}
}
+id_type!(BufferId);
id_type!(AccessTokenId);
id_type!(ChannelId);
id_type!(ChannelMemberId);
@@ -123,3 +124,5 @@ id_type!(ReplicaId);
id_type!(ServerId);
id_type!(SignupId);
id_type!(UserId);
+id_type!(ChannelBufferCollaboratorId);
+id_type!(FlagId);
@@ -1,6 +1,7 @@
use super::*;
pub mod access_tokens;
+pub mod buffers;
pub mod channels;
pub mod contacts;
pub mod projects;
@@ -0,0 +1,588 @@
+use super::*;
+use prost::Message;
+use text::{EditOperation, InsertionTimestamp, UndoOperation};
+
+impl Database {
+ pub async fn join_channel_buffer(
+ &self,
+ channel_id: ChannelId,
+ user_id: UserId,
+ connection: ConnectionId,
+ ) -> Result<proto::JoinChannelBufferResponse> {
+ self.transaction(|tx| async move {
+ let tx = tx;
+
+ self.check_user_is_channel_member(channel_id, user_id, &tx)
+ .await?;
+
+ let buffer = channel::Model {
+ id: channel_id,
+ ..Default::default()
+ }
+ .find_related(buffer::Entity)
+ .one(&*tx)
+ .await?;
+
+ let buffer = if let Some(buffer) = buffer {
+ buffer
+ } else {
+ let buffer = buffer::ActiveModel {
+ channel_id: ActiveValue::Set(channel_id),
+ ..Default::default()
+ }
+ .insert(&*tx)
+ .await?;
+ buffer_snapshot::ActiveModel {
+ buffer_id: ActiveValue::Set(buffer.id),
+ epoch: ActiveValue::Set(0),
+ text: ActiveValue::Set(String::new()),
+ operation_serialization_version: ActiveValue::Set(
+ storage::SERIALIZATION_VERSION,
+ ),
+ }
+ .insert(&*tx)
+ .await?;
+ buffer
+ };
+
+ // Join the collaborators
+ let mut collaborators = channel_buffer_collaborator::Entity::find()
+ .filter(channel_buffer_collaborator::Column::ChannelId.eq(channel_id))
+ .all(&*tx)
+ .await?;
+ let replica_ids = collaborators
+ .iter()
+ .map(|c| c.replica_id)
+ .collect::<HashSet<_>>();
+ let mut replica_id = ReplicaId(0);
+ while replica_ids.contains(&replica_id) {
+ replica_id.0 += 1;
+ }
+ let collaborator = channel_buffer_collaborator::ActiveModel {
+ channel_id: ActiveValue::Set(channel_id),
+ connection_id: ActiveValue::Set(connection.id as i32),
+ connection_server_id: ActiveValue::Set(ServerId(connection.owner_id as i32)),
+ user_id: ActiveValue::Set(user_id),
+ replica_id: ActiveValue::Set(replica_id),
+ ..Default::default()
+ }
+ .insert(&*tx)
+ .await?;
+ collaborators.push(collaborator);
+
+ // Assemble the buffer state
+ let (base_text, operations) = self.get_buffer_state(&buffer, &tx).await?;
+
+ Ok(proto::JoinChannelBufferResponse {
+ buffer_id: buffer.id.to_proto(),
+ replica_id: replica_id.to_proto() as u32,
+ base_text,
+ operations,
+ collaborators: collaborators
+ .into_iter()
+ .map(|collaborator| proto::Collaborator {
+ peer_id: Some(collaborator.connection().into()),
+ user_id: collaborator.user_id.to_proto(),
+ replica_id: collaborator.replica_id.0 as u32,
+ })
+ .collect(),
+ })
+ })
+ .await
+ }
+
+ pub async fn leave_channel_buffer(
+ &self,
+ channel_id: ChannelId,
+ connection: ConnectionId,
+ ) -> Result<Vec<ConnectionId>> {
+ self.transaction(|tx| async move {
+ self.leave_channel_buffer_internal(channel_id, connection, &*tx)
+ .await
+ })
+ .await
+ }
+
+ pub async fn leave_channel_buffer_internal(
+ &self,
+ channel_id: ChannelId,
+ connection: ConnectionId,
+ tx: &DatabaseTransaction,
+ ) -> Result<Vec<ConnectionId>> {
+ let result = channel_buffer_collaborator::Entity::delete_many()
+ .filter(
+ Condition::all()
+ .add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id))
+ .add(channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32))
+ .add(
+ channel_buffer_collaborator::Column::ConnectionServerId
+ .eq(connection.owner_id as i32),
+ ),
+ )
+ .exec(&*tx)
+ .await?;
+ if result.rows_affected == 0 {
+ Err(anyhow!("not a collaborator on this project"))?;
+ }
+
+ let mut connections = Vec::new();
+ let mut rows = channel_buffer_collaborator::Entity::find()
+ .filter(
+ Condition::all().add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)),
+ )
+ .stream(&*tx)
+ .await?;
+ while let Some(row) = rows.next().await {
+ let row = row?;
+ connections.push(ConnectionId {
+ id: row.connection_id as u32,
+ owner_id: row.connection_server_id.0 as u32,
+ });
+ }
+
+ drop(rows);
+
+ if connections.is_empty() {
+ self.snapshot_buffer(channel_id, &tx).await?;
+ }
+
+ Ok(connections)
+ }
+
+ pub async fn leave_channel_buffers(
+ &self,
+ connection: ConnectionId,
+ ) -> Result<Vec<(ChannelId, Vec<ConnectionId>)>> {
+ self.transaction(|tx| async move {
+ #[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
+ enum QueryChannelIds {
+ ChannelId,
+ }
+
+ let channel_ids: Vec<ChannelId> = channel_buffer_collaborator::Entity::find()
+ .select_only()
+ .column(channel_buffer_collaborator::Column::ChannelId)
+ .filter(Condition::all().add(
+ channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32),
+ ))
+ .into_values::<_, QueryChannelIds>()
+ .all(&*tx)
+ .await?;
+
+ let mut result = Vec::new();
+ for channel_id in channel_ids {
+ let collaborators = self
+ .leave_channel_buffer_internal(channel_id, connection, &*tx)
+ .await?;
+ result.push((channel_id, collaborators));
+ }
+
+ Ok(result)
+ })
+ .await
+ }
+
+ #[cfg(debug_assertions)]
+ pub async fn get_channel_buffer_collaborators(
+ &self,
+ channel_id: ChannelId,
+ ) -> Result<Vec<UserId>> {
+ self.transaction(|tx| async move {
+ #[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
+ enum QueryUserIds {
+ UserId,
+ }
+
+ let users: Vec<UserId> = channel_buffer_collaborator::Entity::find()
+ .select_only()
+ .column(channel_buffer_collaborator::Column::UserId)
+ .filter(
+ Condition::all()
+ .add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)),
+ )
+ .into_values::<_, QueryUserIds>()
+ .all(&*tx)
+ .await?;
+
+ Ok(users)
+ })
+ .await
+ }
+
+ pub async fn update_channel_buffer(
+ &self,
+ channel_id: ChannelId,
+ user: UserId,
+ operations: &[proto::Operation],
+ ) -> Result<Vec<ConnectionId>> {
+ self.transaction(move |tx| async move {
+ self.check_user_is_channel_member(channel_id, user, &*tx)
+ .await?;
+
+ let buffer = buffer::Entity::find()
+ .filter(buffer::Column::ChannelId.eq(channel_id))
+ .one(&*tx)
+ .await?
+ .ok_or_else(|| anyhow!("no such buffer"))?;
+
+ #[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
+ enum QueryVersion {
+ OperationSerializationVersion,
+ }
+
+ let serialization_version: i32 = buffer
+ .find_related(buffer_snapshot::Entity)
+ .select_only()
+ .column(buffer_snapshot::Column::OperationSerializationVersion)
+ .filter(buffer_snapshot::Column::Epoch.eq(buffer.epoch))
+ .into_values::<_, QueryVersion>()
+ .one(&*tx)
+ .await?
+ .ok_or_else(|| anyhow!("missing buffer snapshot"))?;
+
+ let operations = operations
+ .iter()
+ .filter_map(|op| operation_to_storage(op, &buffer, serialization_version))
+ .collect::<Vec<_>>();
+ if !operations.is_empty() {
+ buffer_operation::Entity::insert_many(operations)
+ .exec(&*tx)
+ .await?;
+ }
+
+ let mut connections = Vec::new();
+ let mut rows = channel_buffer_collaborator::Entity::find()
+ .filter(
+ Condition::all()
+ .add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)),
+ )
+ .stream(&*tx)
+ .await?;
+ while let Some(row) = rows.next().await {
+ let row = row?;
+ connections.push(ConnectionId {
+ id: row.connection_id as u32,
+ owner_id: row.connection_server_id.0 as u32,
+ });
+ }
+
+ Ok(connections)
+ })
+ .await
+ }
+
+ async fn get_buffer_state(
+ &self,
+ buffer: &buffer::Model,
+ tx: &DatabaseTransaction,
+ ) -> Result<(String, Vec<proto::Operation>)> {
+ let id = buffer.id;
+ let (base_text, version) = if buffer.epoch > 0 {
+ let snapshot = buffer_snapshot::Entity::find()
+ .filter(
+ buffer_snapshot::Column::BufferId
+ .eq(id)
+ .and(buffer_snapshot::Column::Epoch.eq(buffer.epoch)),
+ )
+ .one(&*tx)
+ .await?
+ .ok_or_else(|| anyhow!("no such snapshot"))?;
+
+ let version = snapshot.operation_serialization_version;
+ (snapshot.text, version)
+ } else {
+ (String::new(), storage::SERIALIZATION_VERSION)
+ };
+
+ let mut rows = buffer_operation::Entity::find()
+ .filter(
+ buffer_operation::Column::BufferId
+ .eq(id)
+ .and(buffer_operation::Column::Epoch.eq(buffer.epoch)),
+ )
+ .stream(&*tx)
+ .await?;
+ let mut operations = Vec::new();
+ while let Some(row) = rows.next().await {
+ let row = row?;
+
+ let operation = operation_from_storage(row, version)?;
+ operations.push(proto::Operation {
+ variant: Some(operation),
+ })
+ }
+
+ Ok((base_text, operations))
+ }
+
+ async fn snapshot_buffer(&self, channel_id: ChannelId, tx: &DatabaseTransaction) -> Result<()> {
+ let buffer = channel::Model {
+ id: channel_id,
+ ..Default::default()
+ }
+ .find_related(buffer::Entity)
+ .one(&*tx)
+ .await?
+ .ok_or_else(|| anyhow!("no such buffer"))?;
+
+ let (base_text, operations) = self.get_buffer_state(&buffer, tx).await?;
+ if operations.is_empty() {
+ return Ok(());
+ }
+
+ let mut text_buffer = text::Buffer::new(0, 0, base_text);
+ text_buffer
+ .apply_ops(operations.into_iter().filter_map(operation_from_wire))
+ .unwrap();
+
+ let base_text = text_buffer.text();
+ let epoch = buffer.epoch + 1;
+
+ buffer_snapshot::Model {
+ buffer_id: buffer.id,
+ epoch,
+ text: base_text,
+ operation_serialization_version: storage::SERIALIZATION_VERSION,
+ }
+ .into_active_model()
+ .insert(tx)
+ .await?;
+
+ buffer::ActiveModel {
+ id: ActiveValue::Unchanged(buffer.id),
+ epoch: ActiveValue::Set(epoch),
+ ..Default::default()
+ }
+ .save(tx)
+ .await?;
+
+ Ok(())
+ }
+}
+
+fn operation_to_storage(
+ operation: &proto::Operation,
+ buffer: &buffer::Model,
+ _format: i32,
+) -> Option<buffer_operation::ActiveModel> {
+ let (replica_id, lamport_timestamp, value) = match operation.variant.as_ref()? {
+ proto::operation::Variant::Edit(operation) => (
+ operation.replica_id,
+ operation.lamport_timestamp,
+ storage::Operation {
+ local_timestamp: operation.local_timestamp,
+ version: version_to_storage(&operation.version),
+ is_undo: false,
+ edit_ranges: operation
+ .ranges
+ .iter()
+ .map(|range| storage::Range {
+ start: range.start,
+ end: range.end,
+ })
+ .collect(),
+ edit_texts: operation.new_text.clone(),
+ undo_counts: Vec::new(),
+ },
+ ),
+ proto::operation::Variant::Undo(operation) => (
+ operation.replica_id,
+ operation.lamport_timestamp,
+ storage::Operation {
+ local_timestamp: operation.local_timestamp,
+ version: version_to_storage(&operation.version),
+ is_undo: true,
+ edit_ranges: Vec::new(),
+ edit_texts: Vec::new(),
+ undo_counts: operation
+ .counts
+ .iter()
+ .map(|entry| storage::UndoCount {
+ replica_id: entry.replica_id,
+ local_timestamp: entry.local_timestamp,
+ count: entry.count,
+ })
+ .collect(),
+ },
+ ),
+ _ => None?,
+ };
+
+ Some(buffer_operation::ActiveModel {
+ buffer_id: ActiveValue::Set(buffer.id),
+ epoch: ActiveValue::Set(buffer.epoch),
+ replica_id: ActiveValue::Set(replica_id as i32),
+ lamport_timestamp: ActiveValue::Set(lamport_timestamp as i32),
+ value: ActiveValue::Set(value.encode_to_vec()),
+ })
+}
+
+fn operation_from_storage(
+ row: buffer_operation::Model,
+ _format_version: i32,
+) -> Result<proto::operation::Variant, Error> {
+ let operation =
+ storage::Operation::decode(row.value.as_slice()).map_err(|error| anyhow!("{}", error))?;
+ let version = version_from_storage(&operation.version);
+ Ok(if operation.is_undo {
+ proto::operation::Variant::Undo(proto::operation::Undo {
+ replica_id: row.replica_id as u32,
+ local_timestamp: operation.local_timestamp as u32,
+ lamport_timestamp: row.lamport_timestamp as u32,
+ version,
+ counts: operation
+ .undo_counts
+ .iter()
+ .map(|entry| proto::UndoCount {
+ replica_id: entry.replica_id,
+ local_timestamp: entry.local_timestamp,
+ count: entry.count,
+ })
+ .collect(),
+ })
+ } else {
+ proto::operation::Variant::Edit(proto::operation::Edit {
+ replica_id: row.replica_id as u32,
+ local_timestamp: operation.local_timestamp as u32,
+ lamport_timestamp: row.lamport_timestamp as u32,
+ version,
+ ranges: operation
+ .edit_ranges
+ .into_iter()
+ .map(|range| proto::Range {
+ start: range.start,
+ end: range.end,
+ })
+ .collect(),
+ new_text: operation.edit_texts,
+ })
+ })
+}
+
+fn version_to_storage(version: &Vec<proto::VectorClockEntry>) -> Vec<storage::VectorClockEntry> {
+ version
+ .iter()
+ .map(|entry| storage::VectorClockEntry {
+ replica_id: entry.replica_id,
+ timestamp: entry.timestamp,
+ })
+ .collect()
+}
+
+fn version_from_storage(version: &Vec<storage::VectorClockEntry>) -> Vec<proto::VectorClockEntry> {
+ version
+ .iter()
+ .map(|entry| proto::VectorClockEntry {
+ replica_id: entry.replica_id,
+ timestamp: entry.timestamp,
+ })
+ .collect()
+}
+
+// This is currently a manual copy of the deserialization code in the client's langauge crate
+pub fn operation_from_wire(operation: proto::Operation) -> Option<text::Operation> {
+ match operation.variant? {
+ proto::operation::Variant::Edit(edit) => Some(text::Operation::Edit(EditOperation {
+ timestamp: InsertionTimestamp {
+ replica_id: edit.replica_id as text::ReplicaId,
+ local: edit.local_timestamp,
+ lamport: edit.lamport_timestamp,
+ },
+ version: version_from_wire(&edit.version),
+ ranges: edit
+ .ranges
+ .into_iter()
+ .map(|range| {
+ text::FullOffset(range.start as usize)..text::FullOffset(range.end as usize)
+ })
+ .collect(),
+ new_text: edit.new_text.into_iter().map(Arc::from).collect(),
+ })),
+ proto::operation::Variant::Undo(undo) => Some(text::Operation::Undo {
+ lamport_timestamp: clock::Lamport {
+ replica_id: undo.replica_id as text::ReplicaId,
+ value: undo.lamport_timestamp,
+ },
+ undo: UndoOperation {
+ id: clock::Local {
+ replica_id: undo.replica_id as text::ReplicaId,
+ value: undo.local_timestamp,
+ },
+ version: version_from_wire(&undo.version),
+ counts: undo
+ .counts
+ .into_iter()
+ .map(|c| {
+ (
+ clock::Local {
+ replica_id: c.replica_id as text::ReplicaId,
+ value: c.local_timestamp,
+ },
+ c.count,
+ )
+ })
+ .collect(),
+ },
+ }),
+ _ => None,
+ }
+}
+
+fn version_from_wire(message: &[proto::VectorClockEntry]) -> clock::Global {
+ let mut version = clock::Global::new();
+ for entry in message {
+ version.observe(clock::Local {
+ replica_id: entry.replica_id as text::ReplicaId,
+ value: entry.timestamp,
+ });
+ }
+ version
+}
+
+mod storage {
+ #![allow(non_snake_case)]
+ use prost::Message;
+ pub const SERIALIZATION_VERSION: i32 = 1;
+
+ #[derive(Message)]
+ pub struct Operation {
+ #[prost(uint32, tag = "1")]
+ pub local_timestamp: u32,
+ #[prost(message, repeated, tag = "2")]
+ pub version: Vec<VectorClockEntry>,
+ #[prost(bool, tag = "3")]
+ pub is_undo: bool,
+ #[prost(message, repeated, tag = "4")]
+ pub edit_ranges: Vec<Range>,
+ #[prost(string, repeated, tag = "5")]
+ pub edit_texts: Vec<String>,
+ #[prost(message, repeated, tag = "6")]
+ pub undo_counts: Vec<UndoCount>,
+ }
+
+ #[derive(Message)]
+ pub struct VectorClockEntry {
+ #[prost(uint32, tag = "1")]
+ pub replica_id: u32,
+ #[prost(uint32, tag = "2")]
+ pub timestamp: u32,
+ }
+
+ #[derive(Message)]
+ pub struct Range {
+ #[prost(uint64, tag = "1")]
+ pub start: u64,
+ #[prost(uint64, tag = "2")]
+ pub end: u64,
+ }
+
+ #[derive(Message)]
+ pub struct UndoCount {
+ #[prost(uint32, tag = "1")]
+ pub replica_id: u32,
+ #[prost(uint32, tag = "2")]
+ pub local_timestamp: u32,
+ #[prost(uint32, tag = "3")]
+ pub count: u32,
+ }
+}
@@ -465,9 +465,9 @@ impl Database {
let mut rejoined_projects = Vec::new();
for rejoined_project in &rejoin_room.rejoined_projects {
let project_id = ProjectId::from_proto(rejoined_project.id);
- let Some(project) = project::Entity::find_by_id(project_id)
- .one(&*tx)
- .await? else { continue };
+ let Some(project) = project::Entity::find_by_id(project_id).one(&*tx).await? else {
+ continue;
+ };
let mut worktrees = Vec::new();
let db_worktrees = project.find_related(worktree::Entity).all(&*tx).await?;
@@ -903,15 +903,35 @@ impl Database {
),
)
.one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("not a participant in any room"))?;
+ .await?;
- room_participant::Entity::update(room_participant::ActiveModel {
- answering_connection_lost: ActiveValue::set(true),
- ..participant.into_active_model()
- })
- .exec(&*tx)
- .await?;
+ if let Some(participant) = participant {
+ room_participant::Entity::update(room_participant::ActiveModel {
+ answering_connection_lost: ActiveValue::set(true),
+ ..participant.into_active_model()
+ })
+ .exec(&*tx)
+ .await?;
+ }
+
+ channel_buffer_collaborator::Entity::update_many()
+ .filter(
+ Condition::all()
+ .add(
+ channel_buffer_collaborator::Column::ConnectionId
+ .eq(connection.id as i32),
+ )
+ .add(
+ channel_buffer_collaborator::Column::ConnectionServerId
+ .eq(connection.owner_id as i32),
+ ),
+ )
+ .set(channel_buffer_collaborator::ActiveModel {
+ connection_lost: ActiveValue::set(true),
+ ..Default::default()
+ })
+ .exec(&*tx)
+ .await?;
Ok(())
})
@@ -240,4 +240,58 @@ impl Database {
result.push('%');
result
}
+
+ #[cfg(debug_assertions)]
+ pub async fn create_user_flag(&self, flag: &str) -> Result<FlagId> {
+ self.transaction(|tx| async move {
+ let flag = feature_flag::Entity::insert(feature_flag::ActiveModel {
+ flag: ActiveValue::set(flag.to_string()),
+ ..Default::default()
+ })
+ .exec(&*tx)
+ .await?
+ .last_insert_id;
+
+ Ok(flag)
+ })
+ .await
+ }
+
+ #[cfg(debug_assertions)]
+ pub async fn add_user_flag(&self, user: UserId, flag: FlagId) -> Result<()> {
+ self.transaction(|tx| async move {
+ user_feature::Entity::insert(user_feature::ActiveModel {
+ user_id: ActiveValue::set(user),
+ feature_id: ActiveValue::set(flag),
+ })
+ .exec(&*tx)
+ .await?;
+
+ Ok(())
+ })
+ .await
+ }
+
+ pub async fn get_user_flags(&self, user: UserId) -> Result<Vec<String>> {
+ self.transaction(|tx| async move {
+ #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
+ enum QueryAs {
+ Flag,
+ }
+
+ let flags = user::Model {
+ id: user,
+ ..Default::default()
+ }
+ .find_linked(user::UserFlags)
+ .select_only()
+ .column(feature_flag::Column::Flag)
+ .into_values::<_, QueryAs>()
+ .all(&*tx)
+ .await?;
+
+ Ok(flags)
+ })
+ .await
+ }
}
@@ -1,8 +1,13 @@
pub mod access_token;
+pub mod buffer;
+pub mod buffer_operation;
+pub mod buffer_snapshot;
pub mod channel;
+pub mod channel_buffer_collaborator;
pub mod channel_member;
pub mod channel_path;
pub mod contact;
+pub mod feature_flag;
pub mod follower;
pub mod language_server;
pub mod project;
@@ -12,6 +17,7 @@ pub mod room_participant;
pub mod server;
pub mod signup;
pub mod user;
+pub mod user_feature;
pub mod worktree;
pub mod worktree_diagnostic_summary;
pub mod worktree_entry;
@@ -0,0 +1,45 @@
+use crate::db::{BufferId, ChannelId};
+use sea_orm::entity::prelude::*;
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "buffers")]
+pub struct Model {
+ #[sea_orm(primary_key)]
+ pub id: BufferId,
+ pub epoch: i32,
+ pub channel_id: ChannelId,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {
+ #[sea_orm(has_many = "super::buffer_operation::Entity")]
+ Operations,
+ #[sea_orm(has_many = "super::buffer_snapshot::Entity")]
+ Snapshots,
+ #[sea_orm(
+ belongs_to = "super::channel::Entity",
+ from = "Column::ChannelId",
+ to = "super::channel::Column::Id"
+ )]
+ Channel,
+}
+
+impl Related<super::buffer_operation::Entity> for Entity {
+ fn to() -> RelationDef {
+ Relation::Operations.def()
+ }
+}
+
+impl Related<super::buffer_snapshot::Entity> for Entity {
+ fn to() -> RelationDef {
+ Relation::Snapshots.def()
+ }
+}
+
+impl Related<super::channel::Entity> for Entity {
+ fn to() -> RelationDef {
+ Relation::Channel.def()
+ }
+}
+
+impl ActiveModelBehavior for ActiveModel {}
@@ -0,0 +1,34 @@
+use crate::db::BufferId;
+use sea_orm::entity::prelude::*;
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "buffer_operations")]
+pub struct Model {
+ #[sea_orm(primary_key)]
+ pub buffer_id: BufferId,
+ #[sea_orm(primary_key)]
+ pub epoch: i32,
+ #[sea_orm(primary_key)]
+ pub lamport_timestamp: i32,
+ #[sea_orm(primary_key)]
+ pub replica_id: i32,
+ pub value: Vec<u8>,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {
+ #[sea_orm(
+ belongs_to = "super::buffer::Entity",
+ from = "Column::BufferId",
+ to = "super::buffer::Column::Id"
+ )]
+ Buffer,
+}
+
+impl Related<super::buffer::Entity> for Entity {
+ fn to() -> RelationDef {
+ Relation::Buffer.def()
+ }
+}
+
+impl ActiveModelBehavior for ActiveModel {}
@@ -0,0 +1,31 @@
+use crate::db::BufferId;
+use sea_orm::entity::prelude::*;
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "buffer_snapshots")]
+pub struct Model {
+ #[sea_orm(primary_key)]
+ pub buffer_id: BufferId,
+ #[sea_orm(primary_key)]
+ pub epoch: i32,
+ pub text: String,
+ pub operation_serialization_version: i32,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {
+ #[sea_orm(
+ belongs_to = "super::buffer::Entity",
+ from = "Column::BufferId",
+ to = "super::buffer::Column::Id"
+ )]
+ Buffer,
+}
+
+impl Related<super::buffer::Entity> for Entity {
+ fn to() -> RelationDef {
+ Relation::Buffer.def()
+ }
+}
+
+impl ActiveModelBehavior for ActiveModel {}
@@ -15,8 +15,12 @@ impl ActiveModelBehavior for ActiveModel {}
pub enum Relation {
#[sea_orm(has_one = "super::room::Entity")]
Room,
+ #[sea_orm(has_one = "super::buffer::Entity")]
+ Buffer,
#[sea_orm(has_many = "super::channel_member::Entity")]
Member,
+ #[sea_orm(has_many = "super::channel_buffer_collaborator::Entity")]
+ BufferCollaborators,
}
impl Related<super::channel_member::Entity> for Entity {
@@ -30,3 +34,15 @@ impl Related<super::room::Entity> for Entity {
Relation::Room.def()
}
}
+
+impl Related<super::buffer::Entity> for Entity {
+ fn to() -> RelationDef {
+ Relation::Buffer.def()
+ }
+}
+
+impl Related<super::channel_buffer_collaborator::Entity> for Entity {
+ fn to() -> RelationDef {
+ Relation::BufferCollaborators.def()
+ }
+}
@@ -0,0 +1,43 @@
+use crate::db::{ChannelBufferCollaboratorId, ChannelId, ReplicaId, ServerId, UserId};
+use rpc::ConnectionId;
+use sea_orm::entity::prelude::*;
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "channel_buffer_collaborators")]
+pub struct Model {
+ #[sea_orm(primary_key)]
+ pub id: ChannelBufferCollaboratorId,
+ pub channel_id: ChannelId,
+ pub connection_id: i32,
+ pub connection_server_id: ServerId,
+ pub connection_lost: bool,
+ pub user_id: UserId,
+ pub replica_id: ReplicaId,
+}
+
+impl Model {
+ pub fn connection(&self) -> ConnectionId {
+ ConnectionId {
+ owner_id: self.connection_server_id.0 as u32,
+ id: self.connection_id as u32,
+ }
+ }
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {
+ #[sea_orm(
+ belongs_to = "super::channel::Entity",
+ from = "Column::ChannelId",
+ to = "super::channel::Column::Id"
+ )]
+ Channel,
+}
+
+impl Related<super::channel::Entity> for Entity {
+ fn to() -> RelationDef {
+ Relation::Channel.def()
+ }
+}
+
+impl ActiveModelBehavior for ActiveModel {}
@@ -0,0 +1,40 @@
+use sea_orm::entity::prelude::*;
+
+use crate::db::FlagId;
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "feature_flags")]
+pub struct Model {
+ #[sea_orm(primary_key)]
+ pub id: FlagId,
+ pub flag: String,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {
+ #[sea_orm(has_many = "super::user_feature::Entity")]
+ UserFeature,
+}
+
+impl Related<super::user_feature::Entity> for Entity {
+ fn to() -> RelationDef {
+ Relation::UserFeature.def()
+ }
+}
+
+impl ActiveModelBehavior for ActiveModel {}
+
+pub struct FlaggedUsers;
+
+impl Linked for FlaggedUsers {
+ type FromEntity = Entity;
+
+ type ToEntity = super::user::Entity;
+
+ fn link(&self) -> Vec<RelationDef> {
+ vec![
+ super::user_feature::Relation::Flag.def().rev(),
+ super::user_feature::Relation::User.def(),
+ ]
+ }
+}
@@ -28,6 +28,8 @@ pub enum Relation {
HostedProjects,
#[sea_orm(has_many = "super::channel_member::Entity")]
ChannelMemberships,
+ #[sea_orm(has_many = "super::user_feature::Entity")]
+ UserFeatures,
}
impl Related<super::access_token::Entity> for Entity {
@@ -54,4 +56,25 @@ impl Related<super::channel_member::Entity> for Entity {
}
}
+impl Related<super::user_feature::Entity> for Entity {
+ fn to() -> RelationDef {
+ Relation::UserFeatures.def()
+ }
+}
+
impl ActiveModelBehavior for ActiveModel {}
+
+pub struct UserFlags;
+
+impl Linked for UserFlags {
+ type FromEntity = Entity;
+
+ type ToEntity = super::feature_flag::Entity;
+
+ fn link(&self) -> Vec<RelationDef> {
+ vec![
+ super::user_feature::Relation::User.def().rev(),
+ super::user_feature::Relation::Flag.def(),
+ ]
+ }
+}
@@ -0,0 +1,42 @@
+use sea_orm::entity::prelude::*;
+
+use crate::db::{FlagId, UserId};
+
+#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
+#[sea_orm(table_name = "user_features")]
+pub struct Model {
+ #[sea_orm(primary_key)]
+ pub user_id: UserId,
+ #[sea_orm(primary_key)]
+ pub feature_id: FlagId,
+}
+
+#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
+pub enum Relation {
+ #[sea_orm(
+ belongs_to = "super::feature_flag::Entity",
+ from = "Column::FeatureId",
+ to = "super::feature_flag::Column::Id"
+ )]
+ Flag,
+ #[sea_orm(
+ belongs_to = "super::user::Entity",
+ from = "Column::UserId",
+ to = "super::user::Column::Id"
+ )]
+ User,
+}
+
+impl Related<super::feature_flag::Entity> for Entity {
+ fn to() -> RelationDef {
+ Relation::Flag.def()
+ }
+}
+
+impl Related<super::user::Entity> for Entity {
+ fn to() -> RelationDef {
+ Relation::User.def()
+ }
+}
+
+impl ActiveModelBehavior for ActiveModel {}
@@ -1,3 +1,7 @@
+mod buffer_tests;
+mod db_tests;
+mod feature_flag_tests;
+
use super::*;
use gpui::executor::Background;
use parking_lot::Mutex;
@@ -91,6 +95,26 @@ impl TestDb {
}
}
+#[macro_export]
+macro_rules! test_both_dbs {
+ ($test_name:ident, $postgres_test_name:ident, $sqlite_test_name:ident) => {
+ #[gpui::test]
+ async fn $postgres_test_name() {
+ let test_db = crate::db::TestDb::postgres(
+ gpui::executor::Deterministic::new(0).build_background(),
+ );
+ $test_name(test_db.db()).await;
+ }
+
+ #[gpui::test]
+ async fn $sqlite_test_name() {
+ let test_db =
+ crate::db::TestDb::sqlite(gpui::executor::Deterministic::new(0).build_background());
+ $test_name(test_db.db()).await;
+ }
+ };
+}
+
impl Drop for TestDb {
fn drop(&mut self) {
let db = self.db.take().unwrap();
@@ -0,0 +1,165 @@
+use super::*;
+use crate::test_both_dbs;
+use language::proto;
+use text::Buffer;
+
+test_both_dbs!(
+ test_channel_buffers,
+ test_channel_buffers_postgres,
+ test_channel_buffers_sqlite
+);
+
+async fn test_channel_buffers(db: &Arc<Database>) {
+ let a_id = db
+ .create_user(
+ "user_a@example.com",
+ false,
+ NewUserParams {
+ github_login: "user_a".into(),
+ github_user_id: 101,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
+ let b_id = db
+ .create_user(
+ "user_b@example.com",
+ false,
+ NewUserParams {
+ github_login: "user_b".into(),
+ github_user_id: 102,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
+
+ // This user will not be a part of the channel
+ let c_id = db
+ .create_user(
+ "user_c@example.com",
+ false,
+ NewUserParams {
+ github_login: "user_c".into(),
+ github_user_id: 102,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
+
+ let owner_id = db.create_server("production").await.unwrap().0 as u32;
+
+ let zed_id = db.create_root_channel("zed", "1", a_id).await.unwrap();
+
+ db.invite_channel_member(zed_id, b_id, a_id, false)
+ .await
+ .unwrap();
+
+ db.respond_to_channel_invite(zed_id, b_id, true)
+ .await
+ .unwrap();
+
+ let connection_id_a = ConnectionId { owner_id, id: 1 };
+ let _ = db
+ .join_channel_buffer(zed_id, a_id, connection_id_a)
+ .await
+ .unwrap();
+
+ let mut buffer_a = Buffer::new(0, 0, "".to_string());
+ let mut operations = Vec::new();
+ operations.push(buffer_a.edit([(0..0, "hello world")]));
+ operations.push(buffer_a.edit([(5..5, ", cruel")]));
+ operations.push(buffer_a.edit([(0..5, "goodbye")]));
+ operations.push(buffer_a.undo().unwrap().1);
+ assert_eq!(buffer_a.text(), "hello, cruel world");
+
+ let operations = operations
+ .into_iter()
+ .map(|op| proto::serialize_operation(&language::Operation::Buffer(op)))
+ .collect::<Vec<_>>();
+
+ db.update_channel_buffer(zed_id, a_id, &operations)
+ .await
+ .unwrap();
+
+ let connection_id_b = ConnectionId { owner_id, id: 2 };
+ let buffer_response_b = db
+ .join_channel_buffer(zed_id, b_id, connection_id_b)
+ .await
+ .unwrap();
+
+ let mut buffer_b = Buffer::new(0, 0, buffer_response_b.base_text);
+ buffer_b
+ .apply_ops(buffer_response_b.operations.into_iter().map(|operation| {
+ let operation = proto::deserialize_operation(operation).unwrap();
+ if let language::Operation::Buffer(operation) = operation {
+ operation
+ } else {
+ unreachable!()
+ }
+ }))
+ .unwrap();
+
+ assert_eq!(buffer_b.text(), "hello, cruel world");
+
+ // Ensure that C fails to open the buffer
+ assert!(db
+ .join_channel_buffer(zed_id, c_id, ConnectionId { owner_id, id: 3 })
+ .await
+ .is_err());
+
+ // Ensure that both collaborators have shown up
+ assert_eq!(
+ buffer_response_b.collaborators,
+ &[
+ rpc::proto::Collaborator {
+ user_id: a_id.to_proto(),
+ peer_id: Some(rpc::proto::PeerId { id: 1, owner_id }),
+ replica_id: 0,
+ },
+ rpc::proto::Collaborator {
+ user_id: b_id.to_proto(),
+ peer_id: Some(rpc::proto::PeerId { id: 2, owner_id }),
+ replica_id: 1,
+ }
+ ]
+ );
+
+ // Ensure that get_channel_buffer_collaborators works
+ let zed_collaborats = db.get_channel_buffer_collaborators(zed_id).await.unwrap();
+ assert_eq!(zed_collaborats, &[a_id, b_id]);
+
+ let collaborators = db
+ .leave_channel_buffer(zed_id, connection_id_b)
+ .await
+ .unwrap();
+
+ assert_eq!(collaborators, &[connection_id_a],);
+
+ let cargo_id = db.create_root_channel("cargo", "2", a_id).await.unwrap();
+ let _ = db
+ .join_channel_buffer(cargo_id, a_id, connection_id_a)
+ .await
+ .unwrap();
+
+ db.leave_channel_buffers(connection_id_a).await.unwrap();
+
+ let zed_collaborators = db.get_channel_buffer_collaborators(zed_id).await.unwrap();
+ let cargo_collaborators = db.get_channel_buffer_collaborators(cargo_id).await.unwrap();
+ assert_eq!(zed_collaborators, &[]);
+ assert_eq!(cargo_collaborators, &[]);
+
+ // When everyone has left the channel, the operations are collapsed into
+ // a new base text.
+ let buffer_response_b = db
+ .join_channel_buffer(zed_id, b_id, connection_id_b)
+ .await
+ .unwrap();
+ assert_eq!(buffer_response_b.base_text, "hello, cruel world");
+ assert_eq!(buffer_response_b.operations, &[]);
+}
@@ -1,242 +1,234 @@
use super::*;
+use crate::test_both_dbs;
use gpui::executor::{Background, Deterministic};
use pretty_assertions::{assert_eq, assert_ne};
use std::sync::Arc;
-use test_db::TestDb;
-
-macro_rules! test_both_dbs {
- ($postgres_test_name:ident, $sqlite_test_name:ident, $db:ident, $body:block) => {
- #[gpui::test]
- async fn $postgres_test_name() {
- let test_db = TestDb::postgres(Deterministic::new(0).build_background());
- let $db = test_db.db();
- $body
- }
-
- #[gpui::test]
- async fn $sqlite_test_name() {
- let test_db = TestDb::sqlite(Deterministic::new(0).build_background());
- let $db = test_db.db();
- $body
- }
- };
-}
+use tests::TestDb;
test_both_dbs!(
+ test_get_users,
test_get_users_by_ids_postgres,
- test_get_users_by_ids_sqlite,
- db,
- {
- let mut user_ids = Vec::new();
- let mut user_metric_ids = Vec::new();
- for i in 1..=4 {
- let user = db
- .create_user(
- &format!("user{i}@example.com"),
- false,
- NewUserParams {
- github_login: format!("user{i}"),
- github_user_id: i,
- invite_count: 0,
- },
- )
- .await
- .unwrap();
- user_ids.push(user.user_id);
- user_metric_ids.push(user.metrics_id);
- }
-
- assert_eq!(
- db.get_users_by_ids(user_ids.clone()).await.unwrap(),
- vec![
- User {
- id: user_ids[0],
- github_login: "user1".to_string(),
- github_user_id: Some(1),
- email_address: Some("user1@example.com".to_string()),
- admin: false,
- metrics_id: user_metric_ids[0].parse().unwrap(),
- ..Default::default()
- },
- User {
- id: user_ids[1],
- github_login: "user2".to_string(),
- github_user_id: Some(2),
- email_address: Some("user2@example.com".to_string()),
- admin: false,
- metrics_id: user_metric_ids[1].parse().unwrap(),
- ..Default::default()
- },
- User {
- id: user_ids[2],
- github_login: "user3".to_string(),
- github_user_id: Some(3),
- email_address: Some("user3@example.com".to_string()),
- admin: false,
- metrics_id: user_metric_ids[2].parse().unwrap(),
- ..Default::default()
- },
- User {
- id: user_ids[3],
- github_login: "user4".to_string(),
- github_user_id: Some(4),
- email_address: Some("user4@example.com".to_string()),
- admin: false,
- metrics_id: user_metric_ids[3].parse().unwrap(),
- ..Default::default()
- }
- ]
- );
- }
+ test_get_users_by_ids_sqlite
);
-test_both_dbs!(
- test_get_or_create_user_by_github_account_postgres,
- test_get_or_create_user_by_github_account_sqlite,
- db,
- {
- let user_id1 = db
- .create_user(
- "user1@example.com",
- false,
- NewUserParams {
- github_login: "login1".into(),
- github_user_id: 101,
- invite_count: 0,
- },
- )
- .await
- .unwrap()
- .user_id;
- let user_id2 = db
+async fn test_get_users(db: &Arc<Database>) {
+ let mut user_ids = Vec::new();
+ let mut user_metric_ids = Vec::new();
+ for i in 1..=4 {
+ let user = db
.create_user(
- "user2@example.com",
+ &format!("user{i}@example.com"),
false,
NewUserParams {
- github_login: "login2".into(),
- github_user_id: 102,
+ github_login: format!("user{i}"),
+ github_user_id: i,
invite_count: 0,
},
)
.await
- .unwrap()
- .user_id;
-
- let user = db
- .get_or_create_user_by_github_account("login1", None, None)
- .await
- .unwrap()
.unwrap();
- assert_eq!(user.id, user_id1);
- assert_eq!(&user.github_login, "login1");
- assert_eq!(user.github_user_id, Some(101));
-
- assert!(db
- .get_or_create_user_by_github_account("non-existent-login", None, None)
- .await
- .unwrap()
- .is_none());
+ user_ids.push(user.user_id);
+ user_metric_ids.push(user.metrics_id);
+ }
- let user = db
- .get_or_create_user_by_github_account("the-new-login2", Some(102), None)
- .await
- .unwrap()
- .unwrap();
- assert_eq!(user.id, user_id2);
- assert_eq!(&user.github_login, "the-new-login2");
- assert_eq!(user.github_user_id, Some(102));
+ assert_eq!(
+ db.get_users_by_ids(user_ids.clone()).await.unwrap(),
+ vec![
+ User {
+ id: user_ids[0],
+ github_login: "user1".to_string(),
+ github_user_id: Some(1),
+ email_address: Some("user1@example.com".to_string()),
+ admin: false,
+ metrics_id: user_metric_ids[0].parse().unwrap(),
+ ..Default::default()
+ },
+ User {
+ id: user_ids[1],
+ github_login: "user2".to_string(),
+ github_user_id: Some(2),
+ email_address: Some("user2@example.com".to_string()),
+ admin: false,
+ metrics_id: user_metric_ids[1].parse().unwrap(),
+ ..Default::default()
+ },
+ User {
+ id: user_ids[2],
+ github_login: "user3".to_string(),
+ github_user_id: Some(3),
+ email_address: Some("user3@example.com".to_string()),
+ admin: false,
+ metrics_id: user_metric_ids[2].parse().unwrap(),
+ ..Default::default()
+ },
+ User {
+ id: user_ids[3],
+ github_login: "user4".to_string(),
+ github_user_id: Some(4),
+ email_address: Some("user4@example.com".to_string()),
+ admin: false,
+ metrics_id: user_metric_ids[3].parse().unwrap(),
+ ..Default::default()
+ }
+ ]
+ );
+}
- let user = db
- .get_or_create_user_by_github_account("login3", Some(103), Some("user3@example.com"))
- .await
- .unwrap()
- .unwrap();
- assert_eq!(&user.github_login, "login3");
- assert_eq!(user.github_user_id, Some(103));
- assert_eq!(user.email_address, Some("user3@example.com".into()));
- }
+test_both_dbs!(
+ test_get_or_create_user_by_github_account,
+ test_get_or_create_user_by_github_account_postgres,
+ test_get_or_create_user_by_github_account_sqlite
);
+async fn test_get_or_create_user_by_github_account(db: &Arc<Database>) {
+ let user_id1 = db
+ .create_user(
+ "user1@example.com",
+ false,
+ NewUserParams {
+ github_login: "login1".into(),
+ github_user_id: 101,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
+ let user_id2 = db
+ .create_user(
+ "user2@example.com",
+ false,
+ NewUserParams {
+ github_login: "login2".into(),
+ github_user_id: 102,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
+
+ let user = db
+ .get_or_create_user_by_github_account("login1", None, None)
+ .await
+ .unwrap()
+ .unwrap();
+ assert_eq!(user.id, user_id1);
+ assert_eq!(&user.github_login, "login1");
+ assert_eq!(user.github_user_id, Some(101));
+
+ assert!(db
+ .get_or_create_user_by_github_account("non-existent-login", None, None)
+ .await
+ .unwrap()
+ .is_none());
+
+ let user = db
+ .get_or_create_user_by_github_account("the-new-login2", Some(102), None)
+ .await
+ .unwrap()
+ .unwrap();
+ assert_eq!(user.id, user_id2);
+ assert_eq!(&user.github_login, "the-new-login2");
+ assert_eq!(user.github_user_id, Some(102));
+
+ let user = db
+ .get_or_create_user_by_github_account("login3", Some(103), Some("user3@example.com"))
+ .await
+ .unwrap()
+ .unwrap();
+ assert_eq!(&user.github_login, "login3");
+ assert_eq!(user.github_user_id, Some(103));
+ assert_eq!(user.email_address, Some("user3@example.com".into()));
+}
+
test_both_dbs!(
+ test_create_access_tokens,
test_create_access_tokens_postgres,
- test_create_access_tokens_sqlite,
- db,
- {
- let user = db
- .create_user(
- "u1@example.com",
- false,
- NewUserParams {
- github_login: "u1".into(),
- github_user_id: 1,
- invite_count: 0,
- },
- )
- .await
- .unwrap()
- .user_id;
-
- let token_1 = db.create_access_token(user, "h1", 2).await.unwrap();
- let token_2 = db.create_access_token(user, "h2", 2).await.unwrap();
- assert_eq!(
- db.get_access_token(token_1).await.unwrap(),
- access_token::Model {
- id: token_1,
- user_id: user,
- hash: "h1".into(),
- }
- );
- assert_eq!(
- db.get_access_token(token_2).await.unwrap(),
- access_token::Model {
- id: token_2,
- user_id: user,
- hash: "h2".into()
- }
- );
+ test_create_access_tokens_sqlite
+);
- let token_3 = db.create_access_token(user, "h3", 2).await.unwrap();
- assert_eq!(
- db.get_access_token(token_3).await.unwrap(),
- access_token::Model {
- id: token_3,
- user_id: user,
- hash: "h3".into()
- }
- );
- assert_eq!(
- db.get_access_token(token_2).await.unwrap(),
- access_token::Model {
- id: token_2,
- user_id: user,
- hash: "h2".into()
- }
- );
- assert!(db.get_access_token(token_1).await.is_err());
-
- let token_4 = db.create_access_token(user, "h4", 2).await.unwrap();
- assert_eq!(
- db.get_access_token(token_4).await.unwrap(),
- access_token::Model {
- id: token_4,
- user_id: user,
- hash: "h4".into()
- }
- );
- assert_eq!(
- db.get_access_token(token_3).await.unwrap(),
- access_token::Model {
- id: token_3,
- user_id: user,
- hash: "h3".into()
- }
- );
- assert!(db.get_access_token(token_2).await.is_err());
- assert!(db.get_access_token(token_1).await.is_err());
- }
+async fn test_create_access_tokens(db: &Arc<Database>) {
+ let user = db
+ .create_user(
+ "u1@example.com",
+ false,
+ NewUserParams {
+ github_login: "u1".into(),
+ github_user_id: 1,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
+
+ let token_1 = db.create_access_token(user, "h1", 2).await.unwrap();
+ let token_2 = db.create_access_token(user, "h2", 2).await.unwrap();
+ assert_eq!(
+ db.get_access_token(token_1).await.unwrap(),
+ access_token::Model {
+ id: token_1,
+ user_id: user,
+ hash: "h1".into(),
+ }
+ );
+ assert_eq!(
+ db.get_access_token(token_2).await.unwrap(),
+ access_token::Model {
+ id: token_2,
+ user_id: user,
+ hash: "h2".into()
+ }
+ );
+
+ let token_3 = db.create_access_token(user, "h3", 2).await.unwrap();
+ assert_eq!(
+ db.get_access_token(token_3).await.unwrap(),
+ access_token::Model {
+ id: token_3,
+ user_id: user,
+ hash: "h3".into()
+ }
+ );
+ assert_eq!(
+ db.get_access_token(token_2).await.unwrap(),
+ access_token::Model {
+ id: token_2,
+ user_id: user,
+ hash: "h2".into()
+ }
+ );
+ assert!(db.get_access_token(token_1).await.is_err());
+
+ let token_4 = db.create_access_token(user, "h4", 2).await.unwrap();
+ assert_eq!(
+ db.get_access_token(token_4).await.unwrap(),
+ access_token::Model {
+ id: token_4,
+ user_id: user,
+ hash: "h4".into()
+ }
+ );
+ assert_eq!(
+ db.get_access_token(token_3).await.unwrap(),
+ access_token::Model {
+ id: token_3,
+ user_id: user,
+ hash: "h3".into()
+ }
+ );
+ assert!(db.get_access_token(token_2).await.is_err());
+ assert!(db.get_access_token(token_1).await.is_err());
+}
+
+test_both_dbs!(
+ test_add_contacts,
+ test_add_contacts_postgres,
+ test_add_contacts_sqlite
);
-test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
+async fn test_add_contacts(db: &Arc<Database>) {
let mut user_ids = Vec::new();
for i in 0..3 {
user_ids.push(
@@ -403,9 +395,15 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
busy: false,
}],
);
-});
+}
-test_both_dbs!(test_metrics_id_postgres, test_metrics_id_sqlite, db, {
+test_both_dbs!(
+ test_metrics_id,
+ test_metrics_id_postgres,
+ test_metrics_id_sqlite
+);
+
+async fn test_metrics_id(db: &Arc<Database>) {
let NewUserResult {
user_id: user1,
metrics_id: metrics_id1,
@@ -444,82 +442,83 @@ test_both_dbs!(test_metrics_id_postgres, test_metrics_id_sqlite, db, {
assert_eq!(metrics_id1.len(), 36);
assert_eq!(metrics_id2.len(), 36);
assert_ne!(metrics_id1, metrics_id2);
-});
+}
test_both_dbs!(
+ test_project_count,
test_project_count_postgres,
- test_project_count_sqlite,
- db,
- {
- let owner_id = db.create_server("test").await.unwrap().0 as u32;
+ test_project_count_sqlite
+);
- let user1 = db
- .create_user(
- &format!("admin@example.com"),
- true,
- NewUserParams {
- github_login: "admin".into(),
- github_user_id: 0,
- invite_count: 0,
- },
- )
- .await
- .unwrap();
- let user2 = db
- .create_user(
- &format!("user@example.com"),
- false,
- NewUserParams {
- github_login: "user".into(),
- github_user_id: 1,
- invite_count: 0,
- },
- )
- .await
- .unwrap();
+async fn test_project_count(db: &Arc<Database>) {
+ let owner_id = db.create_server("test").await.unwrap().0 as u32;
- let room_id = RoomId::from_proto(
- db.create_room(user1.user_id, ConnectionId { owner_id, id: 0 }, "")
- .await
- .unwrap()
- .id,
- );
- db.call(
- room_id,
- user1.user_id,
- ConnectionId { owner_id, id: 0 },
- user2.user_id,
- None,
+ let user1 = db
+ .create_user(
+ &format!("admin@example.com"),
+ true,
+ NewUserParams {
+ github_login: "admin".into(),
+ github_user_id: 0,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap();
+ let user2 = db
+ .create_user(
+ &format!("user@example.com"),
+ false,
+ NewUserParams {
+ github_login: "user".into(),
+ github_user_id: 1,
+ invite_count: 0,
+ },
)
.await
.unwrap();
- db.join_room(room_id, user2.user_id, ConnectionId { owner_id, id: 1 })
- .await
- .unwrap();
- assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
- db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[])
+ let room_id = RoomId::from_proto(
+ db.create_room(user1.user_id, ConnectionId { owner_id, id: 0 }, "")
.await
- .unwrap();
- assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1);
+ .unwrap()
+ .id,
+ );
+ db.call(
+ room_id,
+ user1.user_id,
+ ConnectionId { owner_id, id: 0 },
+ user2.user_id,
+ None,
+ )
+ .await
+ .unwrap();
+ db.join_room(room_id, user2.user_id, ConnectionId { owner_id, id: 1 })
+ .await
+ .unwrap();
+ assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
- db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[])
- .await
- .unwrap();
- assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
+ db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[])
+ .await
+ .unwrap();
+ assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1);
- // Projects shared by admins aren't counted.
- db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[])
- .await
- .unwrap();
- assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
+ db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[])
+ .await
+ .unwrap();
+ assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
- db.leave_room(ConnectionId { owner_id, id: 1 })
- .await
- .unwrap();
- assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
- }
-);
+ // Projects shared by admins aren't counted.
+ db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[])
+ .await
+ .unwrap();
+ assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
+
+ db.leave_room(ConnectionId { owner_id, id: 1 })
+ .await
+ .unwrap();
+ assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
+}
#[test]
fn test_fuzzy_like_string() {
@@ -878,7 +877,9 @@ async fn test_invite_codes() {
assert!(db.has_contact(user5, user1).await.unwrap());
}
-test_both_dbs!(test_channels_postgres, test_channels_sqlite, db, {
+test_both_dbs!(test_channels, test_channels_postgres, test_channels_sqlite);
+
+async fn test_channels(db: &Arc<Database>) {
let a_id = db
.create_user(
"user1@example.com",
@@ -1063,267 +1064,270 @@ test_both_dbs!(test_channels_postgres, test_channels_sqlite, db, {
assert!(db.get_channel(rust_id, a_id).await.unwrap().is_none());
assert!(db.get_channel(cargo_id, a_id).await.unwrap().is_none());
assert!(db.get_channel(cargo_ra_id, a_id).await.unwrap().is_none());
-});
+}
test_both_dbs!(
+ test_joining_channels,
test_joining_channels_postgres,
- test_joining_channels_sqlite,
- db,
- {
- let owner_id = db.create_server("test").await.unwrap().0 as u32;
+ test_joining_channels_sqlite
+);
- let user_1 = db
- .create_user(
- "user1@example.com",
- false,
- NewUserParams {
- github_login: "user1".into(),
- github_user_id: 5,
- invite_count: 0,
- },
- )
- .await
- .unwrap()
- .user_id;
- let user_2 = db
- .create_user(
- "user2@example.com",
- false,
- NewUserParams {
- github_login: "user2".into(),
- github_user_id: 6,
- invite_count: 0,
- },
- )
- .await
- .unwrap()
- .user_id;
+async fn test_joining_channels(db: &Arc<Database>) {
+ let owner_id = db.create_server("test").await.unwrap().0 as u32;
- let channel_1 = db
- .create_root_channel("channel_1", "1", user_1)
- .await
- .unwrap();
- let room_1 = db.room_id_for_channel(channel_1).await.unwrap();
+ let user_1 = db
+ .create_user(
+ "user1@example.com",
+ false,
+ NewUserParams {
+ github_login: "user1".into(),
+ github_user_id: 5,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
+ let user_2 = db
+ .create_user(
+ "user2@example.com",
+ false,
+ NewUserParams {
+ github_login: "user2".into(),
+ github_user_id: 6,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
- // can join a room with membership to its channel
- let joined_room = db
- .join_room(room_1, user_1, ConnectionId { owner_id, id: 1 })
- .await
- .unwrap();
- assert_eq!(joined_room.room.participants.len(), 1);
+ let channel_1 = db
+ .create_root_channel("channel_1", "1", user_1)
+ .await
+ .unwrap();
+ let room_1 = db.room_id_for_channel(channel_1).await.unwrap();
- drop(joined_room);
- // cannot join a room without membership to its channel
- assert!(db
- .join_room(room_1, user_2, ConnectionId { owner_id, id: 1 })
- .await
- .is_err());
- }
-);
+ // can join a room with membership to its channel
+ let joined_room = db
+ .join_room(room_1, user_1, ConnectionId { owner_id, id: 1 })
+ .await
+ .unwrap();
+ assert_eq!(joined_room.room.participants.len(), 1);
+
+ drop(joined_room);
+ // cannot join a room without membership to its channel
+ assert!(db
+ .join_room(room_1, user_2, ConnectionId { owner_id, id: 1 })
+ .await
+ .is_err());
+}
test_both_dbs!(
+ test_channel_invites,
test_channel_invites_postgres,
- test_channel_invites_sqlite,
- db,
- {
- db.create_server("test").await.unwrap();
+ test_channel_invites_sqlite
+);
- let user_1 = db
- .create_user(
- "user1@example.com",
- false,
- NewUserParams {
- github_login: "user1".into(),
- github_user_id: 5,
- invite_count: 0,
- },
- )
- .await
- .unwrap()
- .user_id;
- let user_2 = db
- .create_user(
- "user2@example.com",
- false,
- NewUserParams {
- github_login: "user2".into(),
- github_user_id: 6,
- invite_count: 0,
- },
- )
- .await
- .unwrap()
- .user_id;
+async fn test_channel_invites(db: &Arc<Database>) {
+ db.create_server("test").await.unwrap();
- let user_3 = db
- .create_user(
- "user3@example.com",
- false,
- NewUserParams {
- github_login: "user3".into(),
- github_user_id: 7,
- invite_count: 0,
- },
- )
- .await
- .unwrap()
- .user_id;
+ let user_1 = db
+ .create_user(
+ "user1@example.com",
+ false,
+ NewUserParams {
+ github_login: "user1".into(),
+ github_user_id: 5,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
+ let user_2 = db
+ .create_user(
+ "user2@example.com",
+ false,
+ NewUserParams {
+ github_login: "user2".into(),
+ github_user_id: 6,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
- let channel_1_1 = db
- .create_root_channel("channel_1", "1", user_1)
- .await
- .unwrap();
+ let user_3 = db
+ .create_user(
+ "user3@example.com",
+ false,
+ NewUserParams {
+ github_login: "user3".into(),
+ github_user_id: 7,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
- let channel_1_2 = db
- .create_root_channel("channel_2", "2", user_1)
- .await
- .unwrap();
+ let channel_1_1 = db
+ .create_root_channel("channel_1", "1", user_1)
+ .await
+ .unwrap();
- db.invite_channel_member(channel_1_1, user_2, user_1, false)
- .await
- .unwrap();
- db.invite_channel_member(channel_1_2, user_2, user_1, false)
- .await
- .unwrap();
- db.invite_channel_member(channel_1_1, user_3, user_1, true)
- .await
- .unwrap();
+ let channel_1_2 = db
+ .create_root_channel("channel_2", "2", user_1)
+ .await
+ .unwrap();
- let user_2_invites = db
- .get_channel_invites_for_user(user_2) // -> [channel_1_1, channel_1_2]
- .await
- .unwrap()
- .into_iter()
- .map(|channel| channel.id)
- .collect::<Vec<_>>();
+ db.invite_channel_member(channel_1_1, user_2, user_1, false)
+ .await
+ .unwrap();
+ db.invite_channel_member(channel_1_2, user_2, user_1, false)
+ .await
+ .unwrap();
+ db.invite_channel_member(channel_1_1, user_3, user_1, true)
+ .await
+ .unwrap();
- assert_eq!(user_2_invites, &[channel_1_1, channel_1_2]);
+ let user_2_invites = db
+ .get_channel_invites_for_user(user_2) // -> [channel_1_1, channel_1_2]
+ .await
+ .unwrap()
+ .into_iter()
+ .map(|channel| channel.id)
+ .collect::<Vec<_>>();
- let user_3_invites = db
- .get_channel_invites_for_user(user_3) // -> [channel_1_1]
- .await
- .unwrap()
- .into_iter()
- .map(|channel| channel.id)
- .collect::<Vec<_>>();
+ assert_eq!(user_2_invites, &[channel_1_1, channel_1_2]);
- assert_eq!(user_3_invites, &[channel_1_1]);
+ let user_3_invites = db
+ .get_channel_invites_for_user(user_3) // -> [channel_1_1]
+ .await
+ .unwrap()
+ .into_iter()
+ .map(|channel| channel.id)
+ .collect::<Vec<_>>();
- let members = db
- .get_channel_member_details(channel_1_1, user_1)
- .await
- .unwrap();
- assert_eq!(
- members,
- &[
- proto::ChannelMember {
- user_id: user_1.to_proto(),
- kind: proto::channel_member::Kind::Member.into(),
- admin: true,
- },
- proto::ChannelMember {
- user_id: user_2.to_proto(),
- kind: proto::channel_member::Kind::Invitee.into(),
- admin: false,
- },
- proto::ChannelMember {
- user_id: user_3.to_proto(),
- kind: proto::channel_member::Kind::Invitee.into(),
- admin: true,
- },
- ]
- );
+ assert_eq!(user_3_invites, &[channel_1_1]);
- db.respond_to_channel_invite(channel_1_1, user_2, true)
- .await
- .unwrap();
+ let members = db
+ .get_channel_member_details(channel_1_1, user_1)
+ .await
+ .unwrap();
+ assert_eq!(
+ members,
+ &[
+ proto::ChannelMember {
+ user_id: user_1.to_proto(),
+ kind: proto::channel_member::Kind::Member.into(),
+ admin: true,
+ },
+ proto::ChannelMember {
+ user_id: user_2.to_proto(),
+ kind: proto::channel_member::Kind::Invitee.into(),
+ admin: false,
+ },
+ proto::ChannelMember {
+ user_id: user_3.to_proto(),
+ kind: proto::channel_member::Kind::Invitee.into(),
+ admin: true,
+ },
+ ]
+ );
- let channel_1_3 = db
- .create_channel("channel_3", Some(channel_1_1), "1", user_1)
- .await
- .unwrap();
+ db.respond_to_channel_invite(channel_1_1, user_2, true)
+ .await
+ .unwrap();
- let members = db
- .get_channel_member_details(channel_1_3, user_1)
- .await
- .unwrap();
- assert_eq!(
- members,
- &[
- proto::ChannelMember {
- user_id: user_1.to_proto(),
- kind: proto::channel_member::Kind::Member.into(),
- admin: true,
- },
- proto::ChannelMember {
- user_id: user_2.to_proto(),
- kind: proto::channel_member::Kind::AncestorMember.into(),
- admin: false,
- },
- ]
- );
- }
-);
+ let channel_1_3 = db
+ .create_channel("channel_3", Some(channel_1_1), "1", user_1)
+ .await
+ .unwrap();
+
+ let members = db
+ .get_channel_member_details(channel_1_3, user_1)
+ .await
+ .unwrap();
+ assert_eq!(
+ members,
+ &[
+ proto::ChannelMember {
+ user_id: user_1.to_proto(),
+ kind: proto::channel_member::Kind::Member.into(),
+ admin: true,
+ },
+ proto::ChannelMember {
+ user_id: user_2.to_proto(),
+ kind: proto::channel_member::Kind::AncestorMember.into(),
+ admin: false,
+ },
+ ]
+ );
+}
test_both_dbs!(
+ test_channel_renames,
test_channel_renames_postgres,
- test_channel_renames_sqlite,
- db,
- {
- db.create_server("test").await.unwrap();
+ test_channel_renames_sqlite
+);
- let user_1 = db
- .create_user(
- "user1@example.com",
- false,
- NewUserParams {
- github_login: "user1".into(),
- github_user_id: 5,
- invite_count: 0,
- },
- )
- .await
- .unwrap()
- .user_id;
+async fn test_channel_renames(db: &Arc<Database>) {
+ db.create_server("test").await.unwrap();
- let user_2 = db
- .create_user(
- "user2@example.com",
- false,
- NewUserParams {
- github_login: "user2".into(),
- github_user_id: 6,
- invite_count: 0,
- },
- )
- .await
- .unwrap()
- .user_id;
+ let user_1 = db
+ .create_user(
+ "user1@example.com",
+ false,
+ NewUserParams {
+ github_login: "user1".into(),
+ github_user_id: 5,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
- let zed_id = db.create_root_channel("zed", "1", user_1).await.unwrap();
+ let user_2 = db
+ .create_user(
+ "user2@example.com",
+ false,
+ NewUserParams {
+ github_login: "user2".into(),
+ github_user_id: 6,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
- db.rename_channel(zed_id, user_1, "#zed-archive")
- .await
- .unwrap();
+ let zed_id = db.create_root_channel("zed", "1", user_1).await.unwrap();
- let zed_archive_id = zed_id;
+ db.rename_channel(zed_id, user_1, "#zed-archive")
+ .await
+ .unwrap();
- let (channel, _) = db
- .get_channel(zed_archive_id, user_1)
- .await
- .unwrap()
- .unwrap();
- assert_eq!(channel.name, "zed-archive");
+ let zed_archive_id = zed_id;
- let non_permissioned_rename = db
- .rename_channel(zed_archive_id, user_2, "hacked-lol")
- .await;
- assert!(non_permissioned_rename.is_err());
+ let (channel, _) = db
+ .get_channel(zed_archive_id, user_1)
+ .await
+ .unwrap()
+ .unwrap();
+ assert_eq!(channel.name, "zed-archive");
- let bad_name_rename = db.rename_channel(zed_id, user_1, "#").await;
- assert!(bad_name_rename.is_err())
- }
-);
+ let non_permissioned_rename = db
+ .rename_channel(zed_archive_id, user_2, "hacked-lol")
+ .await;
+ assert!(non_permissioned_rename.is_err());
+
+ let bad_name_rename = db.rename_channel(zed_id, user_1, "#").await;
+ assert!(bad_name_rename.is_err())
+}
#[gpui::test]
async fn test_multiple_signup_overwrite() {
@@ -0,0 +1,60 @@
+use crate::{
+ db::{Database, NewUserParams},
+ test_both_dbs,
+};
+use std::sync::Arc;
+
+test_both_dbs!(
+ test_get_user_flags,
+ test_get_user_flags_postgres,
+ test_get_user_flags_sqlite
+);
+
+async fn test_get_user_flags(db: &Arc<Database>) {
+ let user_1 = db
+ .create_user(
+ &format!("user1@example.com"),
+ false,
+ NewUserParams {
+ github_login: format!("user1"),
+ github_user_id: 1,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
+
+ let user_2 = db
+ .create_user(
+ &format!("user2@example.com"),
+ false,
+ NewUserParams {
+ github_login: format!("user2"),
+ github_user_id: 2,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
+
+ const CHANNELS_ALPHA: &'static str = "channels-alpha";
+ const NEW_SEARCH: &'static str = "new-search";
+
+ let channels_flag = db.create_user_flag(CHANNELS_ALPHA).await.unwrap();
+ let search_flag = db.create_user_flag(NEW_SEARCH).await.unwrap();
+
+ db.add_user_flag(user_1, channels_flag).await.unwrap();
+ db.add_user_flag(user_1, search_flag).await.unwrap();
+
+ db.add_user_flag(user_2, channels_flag).await.unwrap();
+
+ let mut user_1_flags = db.get_user_flags(user_1).await.unwrap();
+ user_1_flags.sort();
+ assert_eq!(user_1_flags, &[CHANNELS_ALPHA, NEW_SEARCH]);
+
+ let mut user_2_flags = db.get_user_flags(user_2).await.unwrap();
+ user_2_flags.sort();
+ assert_eq!(user_2_flags, &[CHANNELS_ALPHA]);
+}
@@ -35,8 +35,8 @@ use lazy_static::lazy_static;
use prometheus::{register_int_gauge, IntGauge};
use rpc::{
proto::{
- self, AnyTypedEnvelope, EntityMessage, EnvelopedMessage, LiveKitConnectionInfo,
- RequestMessage,
+ self, Ack, AddChannelBufferCollaborator, AnyTypedEnvelope, EntityMessage, EnvelopedMessage,
+ LiveKitConnectionInfo, RequestMessage,
},
Connection, ConnectionId, Peer, Receipt, TypedEnvelope,
};
@@ -248,6 +248,9 @@ impl Server {
.add_request_handler(remove_channel_member)
.add_request_handler(set_channel_member_admin)
.add_request_handler(rename_channel)
+ .add_request_handler(join_channel_buffer)
+ .add_request_handler(leave_channel_buffer)
+ .add_message_handler(update_channel_buffer)
.add_request_handler(get_channel_members)
.add_request_handler(respond_to_channel_invite)
.add_request_handler(join_channel)
@@ -851,6 +854,10 @@ async fn connection_lost(
.await
.trace_err();
+ leave_channel_buffers_for_session(&session)
+ .await
+ .trace_err();
+
futures::select_biased! {
_ = executor.sleep(RECONNECT_TIMEOUT).fuse() => {
leave_room_for_session(&session).await.trace_err();
@@ -866,6 +873,8 @@ async fn connection_lost(
}
}
update_user_contacts(session.user_id, &session).await?;
+
+
}
_ = teardown.changed().fuse() => {}
}
@@ -2478,6 +2487,104 @@ async fn join_channel(
Ok(())
}
+async fn join_channel_buffer(
+ request: proto::JoinChannelBuffer,
+ response: Response<proto::JoinChannelBuffer>,
+ session: Session,
+) -> Result<()> {
+ let db = session.db().await;
+ let channel_id = ChannelId::from_proto(request.channel_id);
+
+ let open_response = db
+ .join_channel_buffer(channel_id, session.user_id, session.connection_id)
+ .await?;
+
+ let replica_id = open_response.replica_id;
+ let collaborators = open_response.collaborators.clone();
+
+ response.send(open_response)?;
+
+ let update = AddChannelBufferCollaborator {
+ channel_id: channel_id.to_proto(),
+ collaborator: Some(proto::Collaborator {
+ user_id: session.user_id.to_proto(),
+ peer_id: Some(session.connection_id.into()),
+ replica_id,
+ }),
+ };
+ channel_buffer_updated(
+ session.connection_id,
+ collaborators
+ .iter()
+ .filter_map(|collaborator| Some(collaborator.peer_id?.into())),
+ &update,
+ &session.peer,
+ );
+
+ Ok(())
+}
+
+async fn update_channel_buffer(
+ request: proto::UpdateChannelBuffer,
+ session: Session,
+) -> Result<()> {
+ let db = session.db().await;
+ let channel_id = ChannelId::from_proto(request.channel_id);
+
+ let collaborators = db
+ .update_channel_buffer(channel_id, session.user_id, &request.operations)
+ .await?;
+
+ channel_buffer_updated(
+ session.connection_id,
+ collaborators,
+ &proto::UpdateChannelBuffer {
+ channel_id: channel_id.to_proto(),
+ operations: request.operations,
+ },
+ &session.peer,
+ );
+ Ok(())
+}
+
+async fn leave_channel_buffer(
+ request: proto::LeaveChannelBuffer,
+ response: Response<proto::LeaveChannelBuffer>,
+ session: Session,
+) -> Result<()> {
+ let db = session.db().await;
+ let channel_id = ChannelId::from_proto(request.channel_id);
+
+ let collaborators_to_notify = db
+ .leave_channel_buffer(channel_id, session.connection_id)
+ .await?;
+
+ response.send(Ack {})?;
+
+ channel_buffer_updated(
+ session.connection_id,
+ collaborators_to_notify,
+ &proto::RemoveChannelBufferCollaborator {
+ channel_id: channel_id.to_proto(),
+ peer_id: Some(session.connection_id.into()),
+ },
+ &session.peer,
+ );
+
+ Ok(())
+}
+
+fn channel_buffer_updated<T: EnvelopedMessage>(
+ sender_id: ConnectionId,
+ collaborators: impl IntoIterator<Item = ConnectionId>,
+ message: &T,
+ peer: &Peer,
+) {
+ broadcast(Some(sender_id), collaborators.into_iter(), |peer_id| {
+ peer.send(peer_id.into(), message.clone())
+ });
+}
+
async fn update_diff_base(request: proto::UpdateDiffBase, session: Session) -> Result<()> {
let project_id = ProjectId::from_proto(request.project_id);
let project_connection_ids = session
@@ -2502,20 +2609,19 @@ async fn get_private_user_info(
response: Response<proto::GetPrivateUserInfo>,
session: Session,
) -> Result<()> {
- let metrics_id = session
- .db()
- .await
- .get_user_metrics_id(session.user_id)
- .await?;
- let user = session
- .db()
- .await
+ let db = session.db().await;
+
+ let metrics_id = db.get_user_metrics_id(session.user_id).await?;
+ let user = db
.get_user_by_id(session.user_id)
.await?
.ok_or_else(|| anyhow!("user not found"))?;
+ let flags = db.get_user_flags(session.user_id).await?;
+
response.send(proto::GetPrivateUserInfoResponse {
metrics_id,
staff: user.admin,
+ flags,
})?;
Ok(())
}
@@ -2803,6 +2909,28 @@ async fn leave_room_for_session(session: &Session) -> Result<()> {
Ok(())
}
+async fn leave_channel_buffers_for_session(session: &Session) -> Result<()> {
+ let left_channel_buffers = session
+ .db()
+ .await
+ .leave_channel_buffers(session.connection_id)
+ .await?;
+
+ for (channel_id, connections) in left_channel_buffers {
+ channel_buffer_updated(
+ session.connection_id,
+ connections,
+ &proto::RemoveChannelBufferCollaborator {
+ channel_id: channel_id.to_proto(),
+ peer_id: Some(session.connection_id.into()),
+ },
+ &session.peer,
+ );
+ }
+
+ Ok(())
+}
+
fn project_left(project: &db::LeftProject, session: &Session) {
for connection_id in &project.connection_ids {
if project.host_user_id == session.user_id {
@@ -1,14 +1,14 @@
use crate::{
- db::{test_db::TestDb, NewUserParams, UserId},
+ db::{tests::TestDb, NewUserParams, UserId},
executor::Executor,
rpc::{Server, CLEANUP_TIMEOUT},
AppState,
};
use anyhow::anyhow;
use call::{ActiveCall, Room};
+use channel::ChannelStore;
use client::{
- self, proto::PeerId, ChannelStore, Client, Connection, Credentials, EstablishConnectionError,
- UserStore,
+ self, proto::PeerId, Client, Connection, Credentials, EstablishConnectionError, UserStore,
};
use collections::{HashMap, HashSet};
use fs::FakeFs;
@@ -31,6 +31,7 @@ use std::{
use util::http::FakeHttpClient;
use workspace::Workspace;
+mod channel_buffer_tests;
mod channel_tests;
mod integration_tests;
mod randomized_integration_tests;
@@ -210,6 +211,7 @@ impl TestServer {
workspace::init(app_state.clone(), cx);
audio::init((), cx);
call::init(client.clone(), user_store.clone(), cx);
+ channel::init(&client);
});
client
@@ -0,0 +1,426 @@
+use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer};
+use call::ActiveCall;
+use channel::Channel;
+use client::UserId;
+use collab_ui::channel_view::ChannelView;
+use collections::HashMap;
+use futures::future;
+use gpui::{executor::Deterministic, ModelHandle, TestAppContext};
+use rpc::{proto, RECEIVE_TIMEOUT};
+use serde_json::json;
+use std::sync::Arc;
+
+#[gpui::test]
+async fn test_core_channel_buffers(
+ deterministic: Arc<Deterministic>,
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
+) {
+ deterministic.forbid_parking();
+ let mut server = TestServer::start(&deterministic).await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
+
+ let zed_id = server
+ .make_channel("zed", (&client_a, cx_a), &mut [(&client_b, cx_b)])
+ .await;
+
+ // Client A joins the channel buffer
+ let channel_buffer_a = client_a
+ .channel_store()
+ .update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx))
+ .await
+ .unwrap();
+
+ // Client A edits the buffer
+ let buffer_a = channel_buffer_a.read_with(cx_a, |buffer, _| buffer.buffer());
+
+ buffer_a.update(cx_a, |buffer, cx| {
+ buffer.edit([(0..0, "hello world")], None, cx)
+ });
+ buffer_a.update(cx_a, |buffer, cx| {
+ buffer.edit([(5..5, ", cruel")], None, cx)
+ });
+ buffer_a.update(cx_a, |buffer, cx| {
+ buffer.edit([(0..5, "goodbye")], None, cx)
+ });
+ buffer_a.update(cx_a, |buffer, cx| buffer.undo(cx));
+ deterministic.run_until_parked();
+
+ assert_eq!(buffer_text(&buffer_a, cx_a), "hello, cruel world");
+
+ // Client B joins the channel buffer
+ let channel_buffer_b = client_b
+ .channel_store()
+ .update(cx_b, |channel, cx| channel.open_channel_buffer(zed_id, cx))
+ .await
+ .unwrap();
+
+ channel_buffer_b.read_with(cx_b, |buffer, _| {
+ assert_collaborators(
+ buffer.collaborators(),
+ &[client_a.user_id(), client_b.user_id()],
+ );
+ });
+
+ // Client B sees the correct text, and then edits it
+ let buffer_b = channel_buffer_b.read_with(cx_b, |buffer, _| buffer.buffer());
+ assert_eq!(
+ buffer_b.read_with(cx_b, |buffer, _| buffer.remote_id()),
+ buffer_a.read_with(cx_a, |buffer, _| buffer.remote_id())
+ );
+ assert_eq!(buffer_text(&buffer_b, cx_b), "hello, cruel world");
+ buffer_b.update(cx_b, |buffer, cx| {
+ buffer.edit([(7..12, "beautiful")], None, cx)
+ });
+
+ // Both A and B see the new edit
+ deterministic.run_until_parked();
+ assert_eq!(buffer_text(&buffer_a, cx_a), "hello, beautiful world");
+ assert_eq!(buffer_text(&buffer_b, cx_b), "hello, beautiful world");
+
+ // Client A closes the channel buffer.
+ cx_a.update(|_| drop(channel_buffer_a));
+ deterministic.run_until_parked();
+
+ // Client B sees that client A is gone from the channel buffer.
+ channel_buffer_b.read_with(cx_b, |buffer, _| {
+ assert_collaborators(&buffer.collaborators(), &[client_b.user_id()]);
+ });
+
+ // Client A rejoins the channel buffer
+ let _channel_buffer_a = client_a
+ .channel_store()
+ .update(cx_a, |channels, cx| {
+ channels.open_channel_buffer(zed_id, cx)
+ })
+ .await
+ .unwrap();
+ deterministic.run_until_parked();
+
+ // Sanity test, make sure we saw A rejoining
+ channel_buffer_b.read_with(cx_b, |buffer, _| {
+ assert_collaborators(
+ &buffer.collaborators(),
+ &[client_b.user_id(), client_a.user_id()],
+ );
+ });
+
+ // Client A loses connection.
+ server.forbid_connections();
+ server.disconnect_client(client_a.peer_id().unwrap());
+ deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
+
+ // Client B observes A disconnect
+ channel_buffer_b.read_with(cx_b, |buffer, _| {
+ assert_collaborators(&buffer.collaborators(), &[client_b.user_id()]);
+ });
+
+ // TODO:
+ // - Test synchronizing offline updates, what happens to A's channel buffer when A disconnects
+ // - Test interaction with channel deletion while buffer is open
+}
+
+#[gpui::test]
+async fn test_channel_buffer_replica_ids(
+ deterministic: Arc<Deterministic>,
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
+ cx_c: &mut TestAppContext,
+) {
+ deterministic.forbid_parking();
+ let mut server = TestServer::start(&deterministic).await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
+ let client_c = server.create_client(cx_c, "user_c").await;
+
+ let channel_id = server
+ .make_channel(
+ "zed",
+ (&client_a, cx_a),
+ &mut [(&client_b, cx_b), (&client_c, cx_c)],
+ )
+ .await;
+
+ let active_call_a = cx_a.read(ActiveCall::global);
+ let active_call_b = cx_b.read(ActiveCall::global);
+ let active_call_c = cx_c.read(ActiveCall::global);
+
+ // Clients A and B join a channel.
+ active_call_a
+ .update(cx_a, |call, cx| call.join_channel(channel_id, cx))
+ .await
+ .unwrap();
+ active_call_b
+ .update(cx_b, |call, cx| call.join_channel(channel_id, cx))
+ .await
+ .unwrap();
+
+ // Clients A, B, and C join a channel buffer
+ // C first so that the replica IDs in the project and the channel buffer are different
+ let channel_buffer_c = client_c
+ .channel_store()
+ .update(cx_c, |channel, cx| {
+ channel.open_channel_buffer(channel_id, cx)
+ })
+ .await
+ .unwrap();
+ let channel_buffer_b = client_b
+ .channel_store()
+ .update(cx_b, |channel, cx| {
+ channel.open_channel_buffer(channel_id, cx)
+ })
+ .await
+ .unwrap();
+ let channel_buffer_a = client_a
+ .channel_store()
+ .update(cx_a, |channel, cx| {
+ channel.open_channel_buffer(channel_id, cx)
+ })
+ .await
+ .unwrap();
+
+ // Client B shares a project
+ client_b
+ .fs()
+ .insert_tree("/dir", json!({ "file.txt": "contents" }))
+ .await;
+ let (project_b, _) = client_b.build_local_project("/dir", cx_b).await;
+ let shared_project_id = active_call_b
+ .update(cx_b, |call, cx| call.share_project(project_b.clone(), cx))
+ .await
+ .unwrap();
+
+ // Client A joins the project
+ let project_a = client_a.build_remote_project(shared_project_id, cx_a).await;
+ deterministic.run_until_parked();
+
+ // Client C is in a separate project.
+ client_c.fs().insert_tree("/dir", json!({})).await;
+ let (separate_project_c, _) = client_c.build_local_project("/dir", cx_c).await;
+
+ // Note that each user has a different replica id in the projects vs the
+ // channel buffer.
+ channel_buffer_a.read_with(cx_a, |channel_buffer, cx| {
+ assert_eq!(project_a.read(cx).replica_id(), 1);
+ assert_eq!(channel_buffer.buffer().read(cx).replica_id(), 2);
+ });
+ channel_buffer_b.read_with(cx_b, |channel_buffer, cx| {
+ assert_eq!(project_b.read(cx).replica_id(), 0);
+ assert_eq!(channel_buffer.buffer().read(cx).replica_id(), 1);
+ });
+ channel_buffer_c.read_with(cx_c, |channel_buffer, cx| {
+ // C is not in the project
+ assert_eq!(channel_buffer.buffer().read(cx).replica_id(), 0);
+ });
+
+ let channel_window_a =
+ cx_a.add_window(|cx| ChannelView::new(project_a.clone(), channel_buffer_a.clone(), cx));
+ let channel_window_b =
+ cx_b.add_window(|cx| ChannelView::new(project_b.clone(), channel_buffer_b.clone(), cx));
+ let channel_window_c = cx_c.add_window(|cx| {
+ ChannelView::new(separate_project_c.clone(), channel_buffer_c.clone(), cx)
+ });
+
+ let channel_view_a = channel_window_a.root(cx_a);
+ let channel_view_b = channel_window_b.root(cx_b);
+ let channel_view_c = channel_window_c.root(cx_c);
+
+ // For clients A and B, the replica ids in the channel buffer are mapped
+ // so that they match the same users' replica ids in their shared project.
+ channel_view_a.read_with(cx_a, |view, cx| {
+ assert_eq!(
+ view.editor.read(cx).replica_id_map().unwrap(),
+ &[(1, 0), (2, 1)].into_iter().collect::<HashMap<_, _>>()
+ );
+ });
+ channel_view_b.read_with(cx_b, |view, cx| {
+ assert_eq!(
+ view.editor.read(cx).replica_id_map().unwrap(),
+ &[(1, 0), (2, 1)].into_iter().collect::<HashMap<u16, u16>>(),
+ )
+ });
+
+ // Client C only sees themself, as they're not part of any shared project
+ channel_view_c.read_with(cx_c, |view, cx| {
+ assert_eq!(
+ view.editor.read(cx).replica_id_map().unwrap(),
+ &[(0, 0)].into_iter().collect::<HashMap<u16, u16>>(),
+ );
+ });
+
+ // Client C joins the project that clients A and B are in.
+ active_call_c
+ .update(cx_c, |call, cx| call.join_channel(channel_id, cx))
+ .await
+ .unwrap();
+ let project_c = client_c.build_remote_project(shared_project_id, cx_c).await;
+ deterministic.run_until_parked();
+ project_c.read_with(cx_c, |project, _| {
+ assert_eq!(project.replica_id(), 2);
+ });
+
+ // For clients A and B, client C's replica id in the channel buffer is
+ // now mapped to their replica id in the shared project.
+ channel_view_a.read_with(cx_a, |view, cx| {
+ assert_eq!(
+ view.editor.read(cx).replica_id_map().unwrap(),
+ &[(1, 0), (2, 1), (0, 2)]
+ .into_iter()
+ .collect::<HashMap<_, _>>()
+ );
+ });
+ channel_view_b.read_with(cx_b, |view, cx| {
+ assert_eq!(
+ view.editor.read(cx).replica_id_map().unwrap(),
+ &[(1, 0), (2, 1), (0, 2)]
+ .into_iter()
+ .collect::<HashMap<_, _>>(),
+ )
+ });
+}
+
+#[gpui::test]
+async fn test_reopen_channel_buffer(deterministic: Arc<Deterministic>, cx_a: &mut TestAppContext) {
+ deterministic.forbid_parking();
+ let mut server = TestServer::start(&deterministic).await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+
+ let zed_id = server.make_channel("zed", (&client_a, cx_a), &mut []).await;
+
+ let channel_buffer_1 = client_a
+ .channel_store()
+ .update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx));
+ let channel_buffer_2 = client_a
+ .channel_store()
+ .update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx));
+ let channel_buffer_3 = client_a
+ .channel_store()
+ .update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx));
+
+ // All concurrent tasks for opening a channel buffer return the same model handle.
+ let (channel_buffer_1, channel_buffer_2, channel_buffer_3) =
+ future::try_join3(channel_buffer_1, channel_buffer_2, channel_buffer_3)
+ .await
+ .unwrap();
+ let model_id = channel_buffer_1.id();
+ assert_eq!(channel_buffer_1, channel_buffer_2);
+ assert_eq!(channel_buffer_1, channel_buffer_3);
+
+ channel_buffer_1.update(cx_a, |buffer, cx| {
+ buffer.buffer().update(cx, |buffer, cx| {
+ buffer.edit([(0..0, "hello")], None, cx);
+ })
+ });
+ deterministic.run_until_parked();
+
+ cx_a.update(|_| {
+ drop(channel_buffer_1);
+ drop(channel_buffer_2);
+ drop(channel_buffer_3);
+ });
+ deterministic.run_until_parked();
+
+ // The channel buffer can be reopened after dropping it.
+ let channel_buffer = client_a
+ .channel_store()
+ .update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx))
+ .await
+ .unwrap();
+ assert_ne!(channel_buffer.id(), model_id);
+ channel_buffer.update(cx_a, |buffer, cx| {
+ buffer.buffer().update(cx, |buffer, _| {
+ assert_eq!(buffer.text(), "hello");
+ })
+ });
+}
+
+#[gpui::test]
+async fn test_channel_buffer_disconnect(
+ deterministic: Arc<Deterministic>,
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
+) {
+ deterministic.forbid_parking();
+ let mut server = TestServer::start(&deterministic).await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
+
+ let channel_id = server
+ .make_channel("zed", (&client_a, cx_a), &mut [(&client_b, cx_b)])
+ .await;
+
+ let channel_buffer_a = client_a
+ .channel_store()
+ .update(cx_a, |channel, cx| {
+ channel.open_channel_buffer(channel_id, cx)
+ })
+ .await
+ .unwrap();
+
+ let channel_buffer_b = client_b
+ .channel_store()
+ .update(cx_b, |channel, cx| {
+ channel.open_channel_buffer(channel_id, cx)
+ })
+ .await
+ .unwrap();
+
+ server.forbid_connections();
+ server.disconnect_client(client_a.peer_id().unwrap());
+ deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
+
+ channel_buffer_a.update(cx_a, |buffer, _| {
+ assert_eq!(
+ buffer.channel().as_ref(),
+ &Channel {
+ id: channel_id,
+ name: "zed".to_string()
+ }
+ );
+ assert!(!buffer.is_connected());
+ });
+
+ deterministic.run_until_parked();
+
+ server.allow_connections();
+ deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
+
+ deterministic.run_until_parked();
+
+ client_a
+ .channel_store()
+ .update(cx_a, |channel_store, _| {
+ channel_store.remove_channel(channel_id)
+ })
+ .await
+ .unwrap();
+ deterministic.run_until_parked();
+
+ // Channel buffer observed the deletion
+ channel_buffer_b.update(cx_b, |buffer, _| {
+ assert_eq!(
+ buffer.channel().as_ref(),
+ &Channel {
+ id: channel_id,
+ name: "zed".to_string()
+ }
+ );
+ assert!(!buffer.is_connected());
+ });
+}
+
+#[track_caller]
+fn assert_collaborators(collaborators: &[proto::Collaborator], ids: &[Option<UserId>]) {
+ assert_eq!(
+ collaborators
+ .into_iter()
+ .map(|collaborator| collaborator.user_id)
+ .collect::<Vec<_>>(),
+ ids.into_iter().map(|id| id.unwrap()).collect::<Vec<_>>()
+ );
+}
+
+fn buffer_text(channel_buffer: &ModelHandle<language::Buffer>, cx: &mut TestAppContext) -> String {
+ channel_buffer.read_with(cx, |buffer, _| buffer.text())
+}
@@ -3,7 +3,8 @@ use crate::{
tests::{room_participants, RoomParticipants, TestServer},
};
use call::ActiveCall;
-use client::{ChannelId, ChannelMembership, ChannelStore, User};
+use channel::{ChannelId, ChannelMembership, ChannelStore};
+use client::User;
use gpui::{executor::Deterministic, ModelHandle, TestAppContext};
use rpc::{proto, RECEIVE_TIMEOUT};
use std::sync::Arc;
@@ -798,7 +799,7 @@ async fn test_lost_channel_creation(
deterministic.run_until_parked();
- // Sanity check
+ // Sanity check, B has the invitation
assert_channel_invitations(
client_b.channel_store(),
cx_b,
@@ -810,6 +811,7 @@ async fn test_lost_channel_creation(
}],
);
+ // A creates a subchannel while the invite is still pending.
let subchannel_id = client_a
.channel_store()
.update(cx_a, |channel_store, cx| {
@@ -840,7 +842,7 @@ async fn test_lost_channel_creation(
],
);
- // Accept the invite
+ // Client B accepts the invite
client_b
.channel_store()
.update(cx_b, |channel_store, _| {
@@ -851,7 +853,7 @@ async fn test_lost_channel_creation(
deterministic.run_until_parked();
- // B should now see the channel
+ // Client B should now see the channel
assert_channels(
client_b.channel_store(),
cx_b,
@@ -4,7 +4,7 @@ use crate::{
};
use call::{room, ActiveCall, ParticipantLocation, Room};
use client::{User, RECEIVE_TIMEOUT};
-use collections::HashSet;
+use collections::{HashMap, HashSet};
use editor::{
test::editor_test_context::EditorTestContext, ConfirmCodeAction, ConfirmCompletion,
ConfirmRename, Editor, ExcerptRange, MultiBuffer, Redo, Rename, ToggleCodeActions, Undo,
@@ -4821,15 +4821,16 @@ async fn test_project_search(
let project_b = client_b.build_remote_project(project_id, cx_b).await;
// Perform a search as the guest.
- let results = project_b
- .update(cx_b, |project, cx| {
- project.search(
- SearchQuery::text("world", false, false, Vec::new(), Vec::new()),
- cx,
- )
- })
- .await
- .unwrap();
+ let mut results = HashMap::default();
+ let mut search_rx = project_b.update(cx_b, |project, cx| {
+ project.search(
+ SearchQuery::text("world", false, false, Vec::new(), Vec::new()),
+ cx,
+ )
+ });
+ while let Some((buffer, ranges)) = search_rx.next().await {
+ results.entry(buffer).or_insert(ranges);
+ }
let mut ranges_by_path = results
.into_iter()
@@ -5320,7 +5321,7 @@ async fn test_collaborating_with_code_actions(
.unwrap();
let mut fake_language_server = fake_language_servers.next().await.unwrap();
- fake_language_server
+ let mut requests = fake_language_server
.handle_request::<lsp::request::CodeActionRequest, _, _>(|params, _| async move {
assert_eq!(
params.text_document.uri,
@@ -5329,9 +5330,9 @@ async fn test_collaborating_with_code_actions(
assert_eq!(params.range.start, lsp::Position::new(0, 0));
assert_eq!(params.range.end, lsp::Position::new(0, 0));
Ok(None)
- })
- .next()
- .await;
+ });
+ deterministic.advance_clock(editor::CODE_ACTIONS_DEBOUNCE_TIMEOUT * 2);
+ requests.next().await;
// Move cursor to a location that contains code actions.
editor_b.update(cx_b, |editor, cx| {
@@ -5341,7 +5342,7 @@ async fn test_collaborating_with_code_actions(
cx.focus(&editor_b);
});
- fake_language_server
+ let mut requests = fake_language_server
.handle_request::<lsp::request::CodeActionRequest, _, _>(|params, _| async move {
assert_eq!(
params.text_document.uri,
@@ -5393,9 +5394,9 @@ async fn test_collaborating_with_code_actions(
..Default::default()
},
)]))
- })
- .next()
- .await;
+ });
+ deterministic.advance_clock(editor::CODE_ACTIONS_DEBOUNCE_TIMEOUT * 2);
+ requests.next().await;
// Toggle code actions and wait for them to display.
editor_b.update(cx_b, |editor, cx| {
@@ -7863,6 +7864,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
client_a.language_registry().add(Arc::clone(&language));
client_b.language_registry().add(language);
+ // Client A opens a project.
client_a
.fs()
.insert_tree(
@@ -7883,6 +7885,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
.await
.unwrap();
+ // Client B joins the project
let project_b = client_b.build_remote_project(project_id, cx_b).await;
active_call_b
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
@@ -7892,6 +7895,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
cx_a.foreground().start_waiting();
+ // The host opens a rust file.
let _buffer_a = project_a
.update(cx_a, |project, cx| {
project.open_local_buffer("/a/main.rs", cx)
@@ -7899,7 +7903,6 @@ async fn test_mutual_editor_inlay_hint_cache_update(
.await
.unwrap();
let fake_language_server = fake_language_servers.next().await.unwrap();
- let next_call_id = Arc::new(AtomicU32::new(0));
let editor_a = workspace_a
.update(cx_a, |workspace, cx| {
workspace.open_path((worktree_id, "main.rs"), None, true, cx)
@@ -7908,6 +7911,9 @@ async fn test_mutual_editor_inlay_hint_cache_update(
.unwrap()
.downcast::<Editor>()
.unwrap();
+
+ // Set up the language server to return an additional inlay hint on each request.
+ let next_call_id = Arc::new(AtomicU32::new(0));
fake_language_server
.handle_request::<lsp::request::InlayHintRequest, _, _>(move |params, _| {
let task_next_call_id = Arc::clone(&next_call_id);
@@ -7916,33 +7922,28 @@ async fn test_mutual_editor_inlay_hint_cache_update(
params.text_document.uri,
lsp::Url::from_file_path("/a/main.rs").unwrap(),
);
- let mut current_call_id = Arc::clone(&task_next_call_id).fetch_add(1, SeqCst);
- let mut new_hints = Vec::with_capacity(current_call_id as usize);
- loop {
- new_hints.push(lsp::InlayHint {
- position: lsp::Position::new(0, current_call_id),
- label: lsp::InlayHintLabel::String(current_call_id.to_string()),
- kind: None,
- text_edits: None,
- tooltip: None,
- padding_left: None,
- padding_right: None,
- data: None,
- });
- if current_call_id == 0 {
- break;
- }
- current_call_id -= 1;
- }
- Ok(Some(new_hints))
+ let call_count = task_next_call_id.fetch_add(1, SeqCst);
+ Ok(Some(
+ (0..=call_count)
+ .map(|ix| lsp::InlayHint {
+ position: lsp::Position::new(0, ix),
+ label: lsp::InlayHintLabel::String(ix.to_string()),
+ kind: None,
+ text_edits: None,
+ tooltip: None,
+ padding_left: None,
+ padding_right: None,
+ data: None,
+ })
+ .collect(),
+ ))
}
})
.next()
.await
.unwrap();
- cx_a.foreground().finish_waiting();
- cx_a.foreground().run_until_parked();
+ deterministic.run_until_parked();
let mut edits_made = 1;
editor_a.update(cx_a, |editor, _| {
@@ -7968,7 +7969,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
.downcast::<Editor>()
.unwrap();
- cx_b.foreground().run_until_parked();
+ deterministic.run_until_parked();
editor_b.update(cx_b, |editor, _| {
assert_eq!(
vec!["0".to_string(), "1".to_string()],
@@ -7989,25 +7990,25 @@ async fn test_mutual_editor_inlay_hint_cache_update(
cx.focus(&editor_b);
edits_made += 1;
});
- cx_a.foreground().run_until_parked();
- cx_b.foreground().run_until_parked();
+
+ deterministic.run_until_parked();
editor_a.update(cx_a, |editor, _| {
assert_eq!(
- vec!["0".to_string(), "1".to_string(), "2".to_string()],
+ vec![
+ "0".to_string(),
+ "1".to_string(),
+ "2".to_string(),
+ "3".to_string()
+ ],
extract_hint_labels(editor),
- "Host should get hints from the 1st edit and 1st LSP query"
+ "Guest should get hints the 1st edit and 2nd LSP query"
);
let inlay_cache = editor.inlay_hint_cache();
assert_eq!(inlay_cache.version(), edits_made);
});
editor_b.update(cx_b, |editor, _| {
assert_eq!(
- vec![
- "0".to_string(),
- "1".to_string(),
- "2".to_string(),
- "3".to_string()
- ],
+ vec!["0".to_string(), "1".to_string(), "2".to_string(),],
extract_hint_labels(editor),
"Guest should get hints the 1st edit and 2nd LSP query"
);
@@ -8021,8 +8022,8 @@ async fn test_mutual_editor_inlay_hint_cache_update(
cx.focus(&editor_a);
edits_made += 1;
});
- cx_a.foreground().run_until_parked();
- cx_b.foreground().run_until_parked();
+
+ deterministic.run_until_parked();
editor_a.update(cx_a, |editor, _| {
assert_eq!(
vec![
@@ -8061,8 +8062,8 @@ async fn test_mutual_editor_inlay_hint_cache_update(
.await
.expect("inlay refresh request failed");
edits_made += 1;
- cx_a.foreground().run_until_parked();
- cx_b.foreground().run_until_parked();
+
+ deterministic.run_until_parked();
editor_a.update(cx_a, |editor, _| {
assert_eq!(
vec![
@@ -6,7 +6,7 @@ use crate::{
use anyhow::{anyhow, Result};
use call::ActiveCall;
use client::RECEIVE_TIMEOUT;
-use collections::BTreeMap;
+use collections::{BTreeMap, HashMap};
use editor::Bias;
use fs::{repository::GitFileStatus, FakeFs, Fs as _};
use futures::StreamExt as _;
@@ -121,7 +121,9 @@ async fn test_random_collaboration(
let mut operation_channels = Vec::new();
loop {
- let Some((next_operation, applied)) = plan.lock().next_server_operation(&clients) else { break };
+ let Some((next_operation, applied)) = plan.lock().next_server_operation(&clients) else {
+ break;
+ };
applied.store(true, SeqCst);
let did_apply = apply_server_operation(
deterministic.clone(),
@@ -224,7 +226,9 @@ async fn apply_server_operation(
let client_ix = clients
.iter()
.position(|(client, cx)| client.current_user_id(cx) == removed_user_id);
- let Some(client_ix) = client_ix else { return false };
+ let Some(client_ix) = client_ix else {
+ return false;
+ };
let user_connection_ids = server
.connection_pool
.lock()
@@ -718,7 +722,7 @@ async fn apply_client_operation(
if detach { "detaching" } else { "awaiting" }
);
- let search = project.update(cx, |project, cx| {
+ let mut search = project.update(cx, |project, cx| {
project.search(
SearchQuery::text(query, false, false, Vec::new(), Vec::new()),
cx,
@@ -726,15 +730,13 @@ async fn apply_client_operation(
});
drop(project);
let search = cx.background().spawn(async move {
- search
- .await
- .map_err(|err| anyhow!("search request failed: {:?}", err))
+ let mut results = HashMap::default();
+ while let Some((buffer, ranges)) = search.next().await {
+ results.entry(buffer).or_insert(ranges);
+ }
+ results
});
- if detach {
- cx.update(|cx| search.detach_and_log_err(cx));
- } else {
- search.await?;
- }
+ search.await;
}
ClientOperation::WriteFsEntry {
@@ -1591,10 +1593,11 @@ impl TestPlan {
81.. => match self.rng.gen_range(0..100_u32) {
// Add a worktree to a local project
0..=50 => {
- let Some(project) = client
- .local_projects()
- .choose(&mut self.rng)
- .cloned() else { continue };
+ let Some(project) =
+ client.local_projects().choose(&mut self.rng).cloned()
+ else {
+ continue;
+ };
let project_root_name = root_name_for_project(&project, cx);
let mut paths = client.fs().paths(false);
paths.remove(0);
@@ -1611,7 +1614,9 @@ impl TestPlan {
// Add an entry to a worktree
_ => {
- let Some(project) = choose_random_project(client, &mut self.rng) else { continue };
+ let Some(project) = choose_random_project(client, &mut self.rng) else {
+ continue;
+ };
let project_root_name = root_name_for_project(&project, cx);
let is_local = project.read_with(cx, |project, _| project.is_local());
let worktree = project.read_with(cx, |project, cx| {
@@ -1645,7 +1650,9 @@ impl TestPlan {
// Query and mutate buffers
60..=90 => {
- let Some(project) = choose_random_project(client, &mut self.rng) else { continue };
+ let Some(project) = choose_random_project(client, &mut self.rng) else {
+ continue;
+ };
let project_root_name = root_name_for_project(&project, cx);
let is_local = project.read_with(cx, |project, _| project.is_local());
@@ -1656,7 +1663,10 @@ impl TestPlan {
.buffers_for_project(&project)
.iter()
.choose(&mut self.rng)
- .cloned() else { continue };
+ .cloned()
+ else {
+ continue;
+ };
let full_path = buffer
.read_with(cx, |buffer, cx| buffer.file().unwrap().full_path(cx));
@@ -2026,7 +2036,10 @@ async fn simulate_client(
client.app_state.languages.add(Arc::new(language));
while let Some(batch_id) = operation_rx.next().await {
- let Some((operation, applied)) = plan.lock().next_client_operation(&client, batch_id, &cx) else { break };
+ let Some((operation, applied)) = plan.lock().next_client_operation(&client, batch_id, &cx)
+ else {
+ break;
+ };
applied.store(true, SeqCst);
match apply_client_operation(&client, operation, &mut cx).await {
Ok(()) => {}
@@ -26,6 +26,7 @@ auto_update = { path = "../auto_update" }
db = { path = "../db" }
call = { path = "../call" }
client = { path = "../client" }
+channel = { path = "../channel" }
clock = { path = "../clock" }
collections = { path = "../collections" }
context_menu = { path = "../context_menu" }
@@ -33,12 +34,13 @@ editor = { path = "../editor" }
feedback = { path = "../feedback" }
fuzzy = { path = "../fuzzy" }
gpui = { path = "../gpui" }
+language = { path = "../language" }
menu = { path = "../menu" }
picker = { path = "../picker" }
project = { path = "../project" }
recent_projects = {path = "../recent_projects"}
settings = { path = "../settings" }
-staff_mode = {path = "../staff_mode"}
+feature_flags = {path = "../feature_flags"}
theme = { path = "../theme" }
theme_selector = { path = "../theme_selector" }
vcs_menu = { path = "../vcs_menu" }
@@ -0,0 +1,355 @@
+use anyhow::{anyhow, Result};
+use channel::{
+ channel_buffer::{self, ChannelBuffer},
+ ChannelId,
+};
+use client::proto;
+use clock::ReplicaId;
+use collections::HashMap;
+use editor::Editor;
+use gpui::{
+ actions,
+ elements::{ChildView, Label},
+ geometry::vector::Vector2F,
+ AnyElement, AnyViewHandle, AppContext, Element, Entity, ModelHandle, Subscription, Task, View,
+ ViewContext, ViewHandle,
+};
+use project::Project;
+use std::any::Any;
+use workspace::{
+ item::{FollowableItem, Item, ItemHandle},
+ register_followable_item,
+ searchable::SearchableItemHandle,
+ ItemNavHistory, Pane, ViewId, Workspace, WorkspaceId,
+};
+
+actions!(channel_view, [Deploy]);
+
+pub(crate) fn init(cx: &mut AppContext) {
+ register_followable_item::<ChannelView>(cx)
+}
+
+pub struct ChannelView {
+ pub editor: ViewHandle<Editor>,
+ project: ModelHandle<Project>,
+ channel_buffer: ModelHandle<ChannelBuffer>,
+ remote_id: Option<ViewId>,
+ _editor_event_subscription: Subscription,
+}
+
+impl ChannelView {
+ pub fn open(
+ channel_id: ChannelId,
+ pane: ViewHandle<Pane>,
+ workspace: ViewHandle<Workspace>,
+ cx: &mut AppContext,
+ ) -> Task<Result<ViewHandle<Self>>> {
+ let workspace = workspace.read(cx);
+ let project = workspace.project().to_owned();
+ let channel_store = workspace.app_state().channel_store.clone();
+ let markdown = workspace
+ .app_state()
+ .languages
+ .language_for_name("Markdown");
+ let channel_buffer =
+ channel_store.update(cx, |store, cx| store.open_channel_buffer(channel_id, cx));
+
+ cx.spawn(|mut cx| async move {
+ let channel_buffer = channel_buffer.await?;
+ let markdown = markdown.await?;
+ channel_buffer.update(&mut cx, |buffer, cx| {
+ buffer.buffer().update(cx, |buffer, cx| {
+ buffer.set_language(Some(markdown), cx);
+ })
+ });
+
+ pane.update(&mut cx, |pane, cx| {
+ pane.items_of_type::<Self>()
+ .find(|channel_view| channel_view.read(cx).channel_buffer == channel_buffer)
+ .unwrap_or_else(|| cx.add_view(|cx| Self::new(project, channel_buffer, cx)))
+ })
+ .ok_or_else(|| anyhow!("pane was dropped"))
+ })
+ }
+
+ pub fn new(
+ project: ModelHandle<Project>,
+ channel_buffer: ModelHandle<ChannelBuffer>,
+ cx: &mut ViewContext<Self>,
+ ) -> Self {
+ let buffer = channel_buffer.read(cx).buffer();
+ // buffer.update(cx, |buffer, cx| buffer.set_language(language, cx));
+ let editor = cx.add_view(|cx| Editor::for_buffer(buffer, None, cx));
+ let _editor_event_subscription = cx.subscribe(&editor, |_, _, e, cx| cx.emit(e.clone()));
+
+ cx.subscribe(&project, Self::handle_project_event).detach();
+ cx.subscribe(&channel_buffer, Self::handle_channel_buffer_event)
+ .detach();
+
+ let this = Self {
+ editor,
+ project,
+ channel_buffer,
+ remote_id: None,
+ _editor_event_subscription,
+ };
+ this.refresh_replica_id_map(cx);
+ this
+ }
+
+ fn handle_project_event(
+ &mut self,
+ _: ModelHandle<Project>,
+ event: &project::Event,
+ cx: &mut ViewContext<Self>,
+ ) {
+ match event {
+ project::Event::RemoteIdChanged(_) => {}
+ project::Event::DisconnectedFromHost => {}
+ project::Event::Closed => {}
+ project::Event::CollaboratorUpdated { .. } => {}
+ project::Event::CollaboratorLeft(_) => {}
+ project::Event::CollaboratorJoined(_) => {}
+ _ => return,
+ }
+ self.refresh_replica_id_map(cx);
+ }
+
+ fn handle_channel_buffer_event(
+ &mut self,
+ _: ModelHandle<ChannelBuffer>,
+ event: &channel_buffer::Event,
+ cx: &mut ViewContext<Self>,
+ ) {
+ match event {
+ channel_buffer::Event::CollaboratorsChanged => {
+ self.refresh_replica_id_map(cx);
+ }
+ channel_buffer::Event::Disconnected => self.editor.update(cx, |editor, cx| {
+ editor.set_read_only(true);
+ cx.notify();
+ }),
+ }
+ }
+
+ /// Build a mapping of channel buffer replica ids to the corresponding
+ /// replica ids in the current project.
+ ///
+ /// Using this mapping, a given user can be displayed with the same color
+ /// in the channel buffer as in other files in the project. Users who are
+ /// in the channel buffer but not the project will not have a color.
+ fn refresh_replica_id_map(&self, cx: &mut ViewContext<Self>) {
+ let mut project_replica_ids_by_channel_buffer_replica_id = HashMap::default();
+ let project = self.project.read(cx);
+ let channel_buffer = self.channel_buffer.read(cx);
+ project_replica_ids_by_channel_buffer_replica_id
+ .insert(channel_buffer.replica_id(cx), project.replica_id());
+ project_replica_ids_by_channel_buffer_replica_id.extend(
+ channel_buffer
+ .collaborators()
+ .iter()
+ .filter_map(|channel_buffer_collaborator| {
+ project
+ .collaborators()
+ .values()
+ .find_map(|project_collaborator| {
+ (project_collaborator.user_id == channel_buffer_collaborator.user_id)
+ .then_some((
+ channel_buffer_collaborator.replica_id as ReplicaId,
+ project_collaborator.replica_id,
+ ))
+ })
+ }),
+ );
+
+ self.editor.update(cx, |editor, cx| {
+ editor.set_replica_id_map(Some(project_replica_ids_by_channel_buffer_replica_id), cx)
+ });
+ }
+}
+
+impl Entity for ChannelView {
+ type Event = editor::Event;
+}
+
+impl View for ChannelView {
+ fn ui_name() -> &'static str {
+ "ChannelView"
+ }
+
+ fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
+ ChildView::new(self.editor.as_any(), cx).into_any()
+ }
+
+ fn focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
+ if cx.is_self_focused() {
+ cx.focus(self.editor.as_any())
+ }
+ }
+}
+
+impl Item for ChannelView {
+ fn tab_content<V: 'static>(
+ &self,
+ _: Option<usize>,
+ style: &theme::Tab,
+ cx: &gpui::AppContext,
+ ) -> AnyElement<V> {
+ let channel_name = &self.channel_buffer.read(cx).channel().name;
+ let label = if self.channel_buffer.read(cx).is_connected() {
+ format!("#{}", channel_name)
+ } else {
+ format!("#{} (disconnected)", channel_name)
+ };
+ Label::new(label, style.label.to_owned()).into_any()
+ }
+
+ fn clone_on_split(&self, _: WorkspaceId, cx: &mut ViewContext<Self>) -> Option<Self> {
+ Some(Self::new(
+ self.project.clone(),
+ self.channel_buffer.clone(),
+ cx,
+ ))
+ }
+
+ fn is_singleton(&self, _cx: &AppContext) -> bool {
+ true
+ }
+
+ fn navigate(&mut self, data: Box<dyn Any>, cx: &mut ViewContext<Self>) -> bool {
+ self.editor
+ .update(cx, |editor, cx| editor.navigate(data, cx))
+ }
+
+ fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
+ self.editor
+ .update(cx, |editor, cx| Item::deactivated(editor, cx))
+ }
+
+ fn set_nav_history(&mut self, history: ItemNavHistory, cx: &mut ViewContext<Self>) {
+ self.editor
+ .update(cx, |editor, cx| Item::set_nav_history(editor, history, cx))
+ }
+
+ fn as_searchable(&self, _: &ViewHandle<Self>) -> Option<Box<dyn SearchableItemHandle>> {
+ Some(Box::new(self.editor.clone()))
+ }
+
+ fn show_toolbar(&self) -> bool {
+ true
+ }
+
+ fn pixel_position_of_cursor(&self, cx: &AppContext) -> Option<Vector2F> {
+ self.editor.read(cx).pixel_position_of_cursor(cx)
+ }
+}
+
+impl FollowableItem for ChannelView {
+ fn remote_id(&self) -> Option<workspace::ViewId> {
+ self.remote_id
+ }
+
+ fn to_state_proto(&self, cx: &AppContext) -> Option<proto::view::Variant> {
+ let channel = self.channel_buffer.read(cx).channel();
+ Some(proto::view::Variant::ChannelView(
+ proto::view::ChannelView {
+ channel_id: channel.id,
+ editor: if let Some(proto::view::Variant::Editor(proto)) =
+ self.editor.read(cx).to_state_proto(cx)
+ {
+ Some(proto)
+ } else {
+ None
+ },
+ },
+ ))
+ }
+
+ fn from_state_proto(
+ pane: ViewHandle<workspace::Pane>,
+ workspace: ViewHandle<workspace::Workspace>,
+ remote_id: workspace::ViewId,
+ state: &mut Option<proto::view::Variant>,
+ cx: &mut AppContext,
+ ) -> Option<gpui::Task<anyhow::Result<ViewHandle<Self>>>> {
+ let Some(proto::view::Variant::ChannelView(_)) = state else {
+ return None;
+ };
+ let Some(proto::view::Variant::ChannelView(state)) = state.take() else {
+ unreachable!()
+ };
+
+ let open = ChannelView::open(state.channel_id, pane, workspace, cx);
+
+ Some(cx.spawn(|mut cx| async move {
+ let this = open.await?;
+
+ let task = this
+ .update(&mut cx, |this, cx| {
+ this.remote_id = Some(remote_id);
+
+ if let Some(state) = state.editor {
+ Some(this.editor.update(cx, |editor, cx| {
+ editor.apply_update_proto(
+ &this.project,
+ proto::update_view::Variant::Editor(proto::update_view::Editor {
+ selections: state.selections,
+ pending_selection: state.pending_selection,
+ scroll_top_anchor: state.scroll_top_anchor,
+ scroll_x: state.scroll_x,
+ scroll_y: state.scroll_y,
+ ..Default::default()
+ }),
+ cx,
+ )
+ }))
+ } else {
+ None
+ }
+ })
+ .ok_or_else(|| anyhow!("window was closed"))?;
+
+ if let Some(task) = task {
+ task.await?;
+ }
+
+ Ok(this)
+ }))
+ }
+
+ fn add_event_to_update_proto(
+ &self,
+ event: &Self::Event,
+ update: &mut Option<proto::update_view::Variant>,
+ cx: &AppContext,
+ ) -> bool {
+ self.editor
+ .read(cx)
+ .add_event_to_update_proto(event, update, cx)
+ }
+
+ fn apply_update_proto(
+ &mut self,
+ project: &ModelHandle<Project>,
+ message: proto::update_view::Variant,
+ cx: &mut ViewContext<Self>,
+ ) -> gpui::Task<anyhow::Result<()>> {
+ self.editor.update(cx, |editor, cx| {
+ editor.apply_update_proto(project, message, cx)
+ })
+ }
+
+ fn set_leader_replica_id(
+ &mut self,
+ leader_replica_id: Option<u16>,
+ cx: &mut ViewContext<Self>,
+ ) {
+ self.editor.update(cx, |editor, cx| {
+ editor.set_leader_replica_id(leader_replica_id, cx)
+ })
+ }
+
+ fn should_unfollow_on_event(event: &Self::Event, cx: &AppContext) -> bool {
+ Editor::should_unfollow_on_event(event, cx)
+ }
+}
@@ -4,28 +4,30 @@ mod panel_settings;
use anyhow::Result;
use call::ActiveCall;
-use client::{
- proto::PeerId, Channel, ChannelEvent, ChannelId, ChannelStore, Client, Contact, User, UserStore,
-};
-
+use channel::{Channel, ChannelEvent, ChannelId, ChannelStore};
+use client::{proto::PeerId, Client, Contact, User, UserStore};
use context_menu::{ContextMenu, ContextMenuItem};
use db::kvp::KEY_VALUE_STORE;
use editor::{Cancel, Editor};
+
+use feature_flags::{ChannelsAlpha, FeatureFlagAppExt, FeatureFlagViewExt};
use futures::StreamExt;
use fuzzy::{match_strings, StringMatchCandidate};
use gpui::{
actions,
elements::{
- Canvas, ChildView, Empty, Flex, Image, Label, List, ListOffset, ListState,
- MouseEventHandler, Orientation, OverlayPositionMode, Padding, ParentElement, Stack, Svg,
+ Canvas, ChildView, Component, Empty, Flex, Image, Label, List, ListOffset, ListState,
+ MouseEventHandler, Orientation, OverlayPositionMode, Padding, ParentElement, SafeStylable,
+ Stack, Svg,
},
+ fonts::TextStyle,
geometry::{
rect::RectF,
vector::{vec2f, Vector2F},
},
impl_actions,
platform::{CursorStyle, MouseButton, PromptLevel},
- serde_json, AnyElement, AppContext, AsyncAppContext, Element, Entity, ModelHandle,
+ serde_json, AnyElement, AppContext, AsyncAppContext, Element, Entity, FontCache, ModelHandle,
Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
};
use menu::{Confirm, SelectNext, SelectPrev};
@@ -33,9 +35,8 @@ use panel_settings::{CollaborationPanelDockPosition, CollaborationPanelSettings}
use project::{Fs, Project};
use serde_derive::{Deserialize, Serialize};
use settings::SettingsStore;
-use staff_mode::StaffMode;
use std::{borrow::Cow, mem, sync::Arc};
-use theme::IconButton;
+use theme::{components::ComponentExt, IconButton};
use util::{iife, ResultExt, TryFutureExt};
use workspace::{
dock::{DockPosition, Panel},
@@ -43,7 +44,10 @@ use workspace::{
Workspace,
};
-use crate::face_pile::FacePile;
+use crate::{
+ channel_view::{self, ChannelView},
+ face_pile::FacePile,
+};
use channel_modal::ChannelModal;
use self::contact_finder::ContactFinder;
@@ -53,6 +57,11 @@ struct RemoveChannel {
channel_id: u64,
}
+#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
+struct ToggleCollapse {
+ channel_id: u64,
+}
+
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
struct NewChannel {
channel_id: u64,
@@ -73,7 +82,21 @@ struct RenameChannel {
channel_id: u64,
}
-actions!(collab_panel, [ToggleFocus, Remove, Secondary]);
+#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
+struct OpenChannelBuffer {
+ channel_id: u64,
+}
+
+actions!(
+ collab_panel,
+ [
+ ToggleFocus,
+ Remove,
+ Secondary,
+ CollapseSelectedChannel,
+ ExpandSelectedChannel
+ ]
+);
impl_actions!(
collab_panel,
@@ -82,7 +105,9 @@ impl_actions!(
NewChannel,
InviteMembers,
ManageMembers,
- RenameChannel
+ RenameChannel,
+ ToggleCollapse,
+ OpenChannelBuffer
]
);
@@ -92,6 +117,7 @@ pub fn init(_client: Arc<Client>, cx: &mut AppContext) {
settings::register::<panel_settings::CollaborationPanelSettings>(cx);
contact_finder::init(cx);
channel_modal::init(cx);
+ channel_view::init(cx);
cx.add_action(CollabPanel::cancel);
cx.add_action(CollabPanel::select_next);
@@ -105,6 +131,10 @@ pub fn init(_client: Arc<Client>, cx: &mut AppContext) {
cx.add_action(CollabPanel::manage_members);
cx.add_action(CollabPanel::rename_selected_channel);
cx.add_action(CollabPanel::rename_channel);
+ cx.add_action(CollabPanel::toggle_channel_collapsed);
+ cx.add_action(CollabPanel::collapse_selected_channel);
+ cx.add_action(CollabPanel::expand_selected_channel);
+ cx.add_action(CollabPanel::open_channel_buffer);
}
#[derive(Debug)]
@@ -147,13 +177,15 @@ pub struct CollabPanel {
list_state: ListState<Self>,
subscriptions: Vec<Subscription>,
collapsed_sections: Vec<Section>,
+ collapsed_channels: Vec<ChannelId>,
workspace: WeakViewHandle<Workspace>,
context_menu_on_selected: bool,
}
#[derive(Serialize, Deserialize)]
-struct SerializedChannelsPanel {
+struct SerializedCollabPanel {
width: Option<f32>,
+ collapsed_channels: Option<Vec<ChannelId>>,
}
#[derive(Debug)]
@@ -198,6 +230,9 @@ enum ListEntry {
channel: Arc<Channel>,
depth: usize,
},
+ ChannelNotes {
+ channel_id: ChannelId,
+ },
ChannelEditor {
depth: usize,
},
@@ -341,6 +376,12 @@ impl CollabPanel {
return channel_row;
}
}
+ ListEntry::ChannelNotes { channel_id } => this.render_channel_notes(
+ *channel_id,
+ &theme.collab_panel,
+ is_selected,
+ cx,
+ ),
ListEntry::ChannelInvite(channel) => Self::render_channel_invite(
channel.clone(),
this.channel_store.clone(),
@@ -398,6 +439,7 @@ impl CollabPanel {
subscriptions: Vec::default(),
match_candidates: Vec::default(),
collapsed_sections: vec![Section::Offline],
+ collapsed_channels: Vec::default(),
workspace: workspace.weak_handle(),
client: workspace.app_state().client.clone(),
context_menu_on_selected: true,
@@ -431,9 +473,10 @@ impl CollabPanel {
}));
this.subscriptions
.push(cx.observe(&active_call, |this, _, cx| this.update_entries(true, cx)));
- this.subscriptions.push(
- cx.observe_global::<StaffMode, _>(move |this, cx| this.update_entries(true, cx)),
- );
+ this.subscriptions
+ .push(cx.observe_flag::<ChannelsAlpha, _>(move |_, this, cx| {
+ this.update_entries(true, cx)
+ }));
this.subscriptions.push(cx.subscribe(
&this.channel_store,
|this, _channel_store, e, cx| match e {
@@ -469,7 +512,7 @@ impl CollabPanel {
.log_err()
.flatten()
{
- Some(serde_json::from_str::<SerializedChannelsPanel>(&panel)?)
+ Some(serde_json::from_str::<SerializedCollabPanel>(&panel)?)
} else {
None
};
@@ -479,6 +522,9 @@ impl CollabPanel {
if let Some(serialized_panel) = serialized_panel {
panel.update(cx, |panel, cx| {
panel.width = serialized_panel.width;
+ panel.collapsed_channels = serialized_panel
+ .collapsed_channels
+ .unwrap_or_else(|| Vec::new());
cx.notify();
});
}
@@ -489,12 +535,16 @@ impl CollabPanel {
fn serialize(&mut self, cx: &mut ViewContext<Self>) {
let width = self.width;
+ let collapsed_channels = self.collapsed_channels.clone();
self.pending_serialization = cx.background().spawn(
async move {
KEY_VALUE_STORE
.write_kvp(
COLLABORATION_PANEL_KEY.into(),
- serde_json::to_string(&SerializedChannelsPanel { width })?,
+ serde_json::to_string(&SerializedCollabPanel {
+ width,
+ collapsed_channels: Some(collapsed_channels),
+ })?,
)
.await?;
anyhow::Ok(())
@@ -518,6 +568,10 @@ impl CollabPanel {
if !self.collapsed_sections.contains(&Section::ActiveCall) {
let room = room.read(cx);
+ if let Some(channel_id) = room.channel_id() {
+ self.entries.push(ListEntry::ChannelNotes { channel_id })
+ }
+
// Populate the active user.
if let Some(user) = user_store.current_user() {
self.match_candidates.clear();
@@ -622,7 +676,8 @@ impl CollabPanel {
}
let mut request_entries = Vec::new();
- if self.include_channels_section(cx) {
+
+ if cx.has_flag::<ChannelsAlpha>() {
self.entries.push(ListEntry::Header(Section::Channels, 0));
if channel_store.channel_count() > 0 || self.channel_editing_state.is_some() {
@@ -657,10 +712,24 @@ impl CollabPanel {
self.entries.push(ListEntry::ChannelEditor { depth: 0 });
}
}
+ let mut collapse_depth = None;
for mat in matches {
let (depth, channel) =
channel_store.channel_at_index(mat.candidate_id).unwrap();
+ if collapse_depth.is_none() && self.is_channel_collapsed(channel.id) {
+ collapse_depth = Some(depth);
+ } else if let Some(collapsed_depth) = collapse_depth {
+ if depth > collapsed_depth {
+ continue;
+ }
+ if self.is_channel_collapsed(channel.id) {
+ collapse_depth = Some(depth);
+ } else {
+ collapse_depth = None;
+ }
+ }
+
match &self.channel_editing_state {
Some(ChannelEditingState::Create { parent_id, .. })
if *parent_id == Some(channel.id) =>
@@ -963,25 +1032,19 @@ impl CollabPanel {
) -> AnyElement<Self> {
enum JoinProject {}
- let font_cache = cx.font_cache();
- let host_avatar_height = theme
+ let host_avatar_width = theme
.contact_avatar
.width
.or(theme.contact_avatar.height)
.unwrap_or(0.);
- let row = &theme.project_row.inactive_state().default;
let tree_branch = theme.tree_branch;
- let line_height = row.name.text.line_height(font_cache);
- let cap_height = row.name.text.cap_height(font_cache);
- let baseline_offset =
- row.name.text.baseline_offset(font_cache) + (theme.row_height - line_height) / 2.;
let project_name = if worktree_root_names.is_empty() {
"untitled".to_string()
} else {
worktree_root_names.join(", ")
};
- MouseEventHandler::new::<JoinProject, _>(project_id as usize, cx, |mouse_state, _| {
+ MouseEventHandler::new::<JoinProject, _>(project_id as usize, cx, |mouse_state, cx| {
let tree_branch = *tree_branch.in_state(is_selected).style_for(mouse_state);
let row = theme
.project_row
@@ -989,39 +1052,20 @@ impl CollabPanel {
.style_for(mouse_state);
Flex::row()
+ .with_child(render_tree_branch(
+ tree_branch,
+ &row.name.text,
+ is_last,
+ vec2f(host_avatar_width, theme.row_height),
+ cx.font_cache(),
+ ))
.with_child(
- Stack::new()
- .with_child(Canvas::new(move |scene, bounds, _, _, _| {
- let start_x =
- bounds.min_x() + (bounds.width() / 2.) - (tree_branch.width / 2.);
- let end_x = bounds.max_x();
- let start_y = bounds.min_y();
- let end_y = bounds.min_y() + baseline_offset - (cap_height / 2.);
-
- scene.push_quad(gpui::Quad {
- bounds: RectF::from_points(
- vec2f(start_x, start_y),
- vec2f(
- start_x + tree_branch.width,
- if is_last { end_y } else { bounds.max_y() },
- ),
- ),
- background: Some(tree_branch.color),
- border: gpui::Border::default(),
- corner_radii: (0.).into(),
- });
- scene.push_quad(gpui::Quad {
- bounds: RectF::from_points(
- vec2f(start_x, end_y),
- vec2f(end_x, end_y + tree_branch.width),
- ),
- background: Some(tree_branch.color),
- border: gpui::Border::default(),
- corner_radii: (0.).into(),
- });
- }))
+ Svg::new("icons/file_icons/folder.svg")
+ .with_color(theme.channel_hash.color)
.constrained()
- .with_width(host_avatar_height),
+ .with_width(theme.channel_hash.width)
+ .aligned()
+ .left(),
)
.with_child(
Label::new(project_name, row.name.text.clone())
@@ -1196,7 +1240,7 @@ impl CollabPanel {
});
if let Some(name) = channel_name {
- Cow::Owned(format!("Current Call - #{}", name))
+ Cow::Owned(format!("#{}", name))
} else {
Cow::Borrowed("Current Call")
}
@@ -1332,7 +1376,7 @@ impl CollabPanel {
.with_cursor_style(CursorStyle::PointingHand)
.on_click(MouseButton::Left, move |_, this, cx| {
if can_collapse {
- this.toggle_expanded(section, cx);
+ this.toggle_section_expanded(section, cx);
}
})
}
@@ -1479,6 +1523,11 @@ impl CollabPanel {
cx: &AppContext,
) -> AnyElement<Self> {
Flex::row()
+ .with_child(
+ Empty::new()
+ .constrained()
+ .with_width(theme.collab_panel.disclosure.button_space()),
+ )
.with_child(
Svg::new("icons/hash.svg")
.with_color(theme.collab_panel.channel_hash.color)
@@ -1537,6 +1586,10 @@ impl CollabPanel {
cx: &mut ViewContext<Self>,
) -> AnyElement<Self> {
let channel_id = channel.id;
+ let has_children = self.channel_store.read(cx).has_children(channel_id);
+ let disclosed =
+ has_children.then(|| !self.collapsed_channels.binary_search(&channel_id).is_ok());
+
let is_active = iife!({
let call_channel = ActiveCall::global(cx)
.read(cx)
@@ -1550,7 +1603,7 @@ impl CollabPanel {
const FACEPILE_LIMIT: usize = 3;
MouseEventHandler::new::<Channel, _>(channel.id as usize, cx, |state, cx| {
- Flex::row()
+ Flex::<Self>::row()
.with_child(
Svg::new("icons/hash.svg")
.with_color(theme.channel_hash.color)
@@ -1599,6 +1652,11 @@ impl CollabPanel {
}
})
.align_children_center()
+ .styleable_component()
+ .disclosable(disclosed, Box::new(ToggleCollapse { channel_id }))
+ .with_id(channel_id as usize)
+ .with_style(theme.disclosure.clone())
+ .element()
.constrained()
.with_height(theme.row_height)
.contained()
@@ -1618,6 +1676,61 @@ impl CollabPanel {
.into_any()
}
+ fn render_channel_notes(
+ &self,
+ channel_id: ChannelId,
+ theme: &theme::CollabPanel,
+ is_selected: bool,
+ cx: &mut ViewContext<Self>,
+ ) -> AnyElement<Self> {
+ enum ChannelNotes {}
+ let host_avatar_width = theme
+ .contact_avatar
+ .width
+ .or(theme.contact_avatar.height)
+ .unwrap_or(0.);
+
+ MouseEventHandler::new::<ChannelNotes, _>(channel_id as usize, cx, |state, cx| {
+ let tree_branch = *theme.tree_branch.in_state(is_selected).style_for(state);
+ let row = theme.project_row.in_state(is_selected).style_for(state);
+
+ Flex::<Self>::row()
+ .with_child(render_tree_branch(
+ tree_branch,
+ &row.name.text,
+ true,
+ vec2f(host_avatar_width, theme.row_height),
+ cx.font_cache(),
+ ))
+ .with_child(
+ Svg::new("icons/radix/file.svg")
+ .with_color(theme.channel_hash.color)
+ .constrained()
+ .with_width(theme.channel_hash.width)
+ .aligned()
+ .left(),
+ )
+ .with_child(
+ Label::new("notes", theme.channel_name.text.clone())
+ .contained()
+ .with_style(theme.channel_name.container)
+ .aligned()
+ .left()
+ .flex(1., true),
+ )
+ .constrained()
+ .with_height(theme.row_height)
+ .contained()
+ .with_style(*theme.channel_row.style_for(is_selected, state))
+ .with_padding_left(theme.channel_row.default_style().padding.left)
+ })
+ .on_click(MouseButton::Left, move |_, this, cx| {
+ this.open_channel_buffer(&OpenChannelBuffer { channel_id }, cx);
+ })
+ .with_cursor_style(CursorStyle::PointingHand)
+ .into_any()
+ }
+
fn render_channel_invite(
channel: Arc<Channel>,
channel_store: ModelHandle<ChannelStore>,
@@ -1801,53 +1914,58 @@ impl CollabPanel {
.into_any()
}
- fn include_channels_section(&self, cx: &AppContext) -> bool {
- if cx.has_global::<StaffMode>() {
- cx.global::<StaffMode>().0
- } else {
- false
- }
- }
-
fn deploy_channel_context_menu(
&mut self,
position: Option<Vector2F>,
channel_id: u64,
cx: &mut ViewContext<Self>,
) {
- if self.channel_store.read(cx).is_user_admin(channel_id) {
- self.context_menu_on_selected = position.is_none();
-
- self.context_menu.update(cx, |context_menu, cx| {
- context_menu.set_position_mode(if self.context_menu_on_selected {
- OverlayPositionMode::Local
- } else {
- OverlayPositionMode::Window
- });
+ self.context_menu_on_selected = position.is_none();
- context_menu.show(
- position.unwrap_or_default(),
- if self.context_menu_on_selected {
- gpui::elements::AnchorCorner::TopRight
- } else {
- gpui::elements::AnchorCorner::BottomLeft
- },
- vec![
- ContextMenuItem::action("New Subchannel", NewChannel { channel_id }),
- ContextMenuItem::Separator,
- ContextMenuItem::action("Invite to Channel", InviteMembers { channel_id }),
- ContextMenuItem::Separator,
- ContextMenuItem::action("Rename", RenameChannel { channel_id }),
- ContextMenuItem::action("Manage", ManageMembers { channel_id }),
- ContextMenuItem::Separator,
- ContextMenuItem::action("Delete", RemoveChannel { channel_id }),
- ],
- cx,
- );
+ self.context_menu.update(cx, |context_menu, cx| {
+ context_menu.set_position_mode(if self.context_menu_on_selected {
+ OverlayPositionMode::Local
+ } else {
+ OverlayPositionMode::Window
});
- cx.notify();
- }
+ let expand_action_name = if self.is_channel_collapsed(channel_id) {
+ "Expand Subchannels"
+ } else {
+ "Collapse Subchannels"
+ };
+
+ let mut items = vec![
+ ContextMenuItem::action(expand_action_name, ToggleCollapse { channel_id }),
+ ContextMenuItem::action("Open Notes", OpenChannelBuffer { channel_id }),
+ ];
+
+ if self.channel_store.read(cx).is_user_admin(channel_id) {
+ items.extend([
+ ContextMenuItem::Separator,
+ ContextMenuItem::action("New Subchannel", NewChannel { channel_id }),
+ ContextMenuItem::action("Rename", RenameChannel { channel_id }),
+ ContextMenuItem::Separator,
+ ContextMenuItem::action("Invite Members", InviteMembers { channel_id }),
+ ContextMenuItem::action("Manage Members", ManageMembers { channel_id }),
+ ContextMenuItem::Separator,
+ ContextMenuItem::action("Delete", RemoveChannel { channel_id }),
+ ]);
+ }
+
+ context_menu.show(
+ position.unwrap_or_default(),
+ if self.context_menu_on_selected {
+ gpui::elements::AnchorCorner::TopRight
+ } else {
+ gpui::elements::AnchorCorner::BottomLeft
+ },
+ items,
+ cx,
+ );
+ });
+
+ cx.notify();
}
fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext<Self>) {
@@ -1912,7 +2030,7 @@ impl CollabPanel {
| Section::Online
| Section::Offline
| Section::ChannelInvites => {
- self.toggle_expanded(*section, cx);
+ self.toggle_section_expanded(*section, cx);
}
},
ListEntry::Contact { contact, calling } => {
@@ -2000,7 +2118,7 @@ impl CollabPanel {
}
}
- fn toggle_expanded(&mut self, section: Section, cx: &mut ViewContext<Self>) {
+ fn toggle_section_expanded(&mut self, section: Section, cx: &mut ViewContext<Self>) {
if let Some(ix) = self.collapsed_sections.iter().position(|s| *s == section) {
self.collapsed_sections.remove(ix);
} else {
@@ -2009,6 +2127,55 @@ impl CollabPanel {
self.update_entries(false, cx);
}
+ fn collapse_selected_channel(
+ &mut self,
+ _: &CollapseSelectedChannel,
+ cx: &mut ViewContext<Self>,
+ ) {
+ let Some(channel_id) = self.selected_channel().map(|channel| channel.id) else {
+ return;
+ };
+
+ if self.is_channel_collapsed(channel_id) {
+ return;
+ }
+
+ self.toggle_channel_collapsed(&ToggleCollapse { channel_id }, cx)
+ }
+
+ fn expand_selected_channel(&mut self, _: &ExpandSelectedChannel, cx: &mut ViewContext<Self>) {
+ let Some(channel_id) = self.selected_channel().map(|channel| channel.id) else {
+ return;
+ };
+
+ if !self.is_channel_collapsed(channel_id) {
+ return;
+ }
+
+ self.toggle_channel_collapsed(&ToggleCollapse { channel_id }, cx)
+ }
+
+ fn toggle_channel_collapsed(&mut self, action: &ToggleCollapse, cx: &mut ViewContext<Self>) {
+ let channel_id = action.channel_id;
+
+ match self.collapsed_channels.binary_search(&channel_id) {
+ Ok(ix) => {
+ self.collapsed_channels.remove(ix);
+ }
+ Err(ix) => {
+ self.collapsed_channels.insert(ix, channel_id);
+ }
+ };
+ self.serialize(cx);
+ self.update_entries(true, cx);
+ cx.notify();
+ cx.focus_self();
+ }
+
+ fn is_channel_collapsed(&self, channel: ChannelId) -> bool {
+ self.collapsed_channels.binary_search(&channel).is_ok()
+ }
+
fn leave_call(cx: &mut ViewContext<Self>) {
ActiveCall::global(cx)
.update(cx, |call, cx| call.hang_up(cx))
@@ -2048,6 +2215,8 @@ impl CollabPanel {
}
fn new_subchannel(&mut self, action: &NewChannel, cx: &mut ViewContext<Self>) {
+ self.collapsed_channels
+ .retain(|&channel| channel != action.channel_id);
self.channel_editing_state = Some(ChannelEditingState::Create {
parent_id: Some(action.channel_id),
pending_name: None,
@@ -2103,6 +2272,21 @@ impl CollabPanel {
}
}
+ fn open_channel_buffer(&mut self, action: &OpenChannelBuffer, cx: &mut ViewContext<Self>) {
+ if let Some(workspace) = self.workspace.upgrade(cx) {
+ let pane = workspace.read(cx).active_pane().clone();
+ let channel_view = ChannelView::open(action.channel_id, pane.clone(), workspace, cx);
+ cx.spawn(|_, mut cx| async move {
+ let channel_view = channel_view.await?;
+ pane.update(&mut cx, |pane, cx| {
+ pane.add_item(Box::new(channel_view), true, true, None, cx)
+ });
+ anyhow::Ok(())
+ })
+ .detach();
+ }
+ }
+
fn show_inline_context_menu(&mut self, _: &menu::ShowContextMenu, cx: &mut ViewContext<Self>) {
let Some(channel) = self.selected_channel() else {
return;
@@ -2261,6 +2445,51 @@ impl CollabPanel {
}
}
+fn render_tree_branch(
+ branch_style: theme::TreeBranch,
+ row_style: &TextStyle,
+ is_last: bool,
+ size: Vector2F,
+ font_cache: &FontCache,
+) -> gpui::elements::ConstrainedBox<CollabPanel> {
+ let line_height = row_style.line_height(font_cache);
+ let cap_height = row_style.cap_height(font_cache);
+ let baseline_offset = row_style.baseline_offset(font_cache) + (size.y() - line_height) / 2.;
+
+ Canvas::new(move |scene, bounds, _, _, _| {
+ scene.paint_layer(None, |scene| {
+ let start_x = bounds.min_x() + (bounds.width() / 2.) - (branch_style.width / 2.);
+ let end_x = bounds.max_x();
+ let start_y = bounds.min_y();
+ let end_y = bounds.min_y() + baseline_offset - (cap_height / 2.);
+
+ scene.push_quad(gpui::Quad {
+ bounds: RectF::from_points(
+ vec2f(start_x, start_y),
+ vec2f(
+ start_x + branch_style.width,
+ if is_last { end_y } else { bounds.max_y() },
+ ),
+ ),
+ background: Some(branch_style.color),
+ border: gpui::Border::default(),
+ corner_radii: (0.).into(),
+ });
+ scene.push_quad(gpui::Quad {
+ bounds: RectF::from_points(
+ vec2f(start_x, end_y),
+ vec2f(end_x, end_y + branch_style.width),
+ ),
+ background: Some(branch_style.color),
+ border: gpui::Border::default(),
+ corner_radii: (0.).into(),
+ });
+ })
+ })
+ .constrained()
+ .with_width(size.x())
+}
+
impl View for CollabPanel {
fn ui_name() -> &'static str {
"CollabPanel"
@@ -2470,6 +2699,14 @@ impl PartialEq for ListEntry {
return channel_1.id == channel_2.id && depth_1 == depth_2;
}
}
+ ListEntry::ChannelNotes { channel_id } => {
+ if let ListEntry::ChannelNotes {
+ channel_id: other_id,
+ } = other
+ {
+ return channel_id == other_id;
+ }
+ }
ListEntry::ChannelInvite(channel_1) => {
if let ListEntry::ChannelInvite(channel_2) = other {
return channel_1.id == channel_2.id;
@@ -1,4 +1,5 @@
-use client::{proto, ChannelId, ChannelMembership, ChannelStore, User, UserId, UserStore};
+use channel::{ChannelId, ChannelMembership, ChannelStore};
+use client::{proto, User, UserId, UserStore};
use context_menu::{ContextMenu, ContextMenuItem};
use fuzzy::{match_strings, StringMatchCandidate};
use gpui::{
@@ -151,12 +152,9 @@ impl View for ChannelModal {
let theme = &theme::current(cx).collab_panel.tabbed_modal;
let mode = self.picker.read(cx).delegate().mode;
- let Some(channel) = self
- .channel_store
- .read(cx)
- .channel_for_id(self.channel_id) else {
- return Empty::new().into_any()
- };
+ let Some(channel) = self.channel_store.read(cx).channel_for_id(self.channel_id) else {
+ return Empty::new().into_any();
+ };
enum InviteMembers {}
enum ManageMembers {}
@@ -1,3 +1,4 @@
+pub mod channel_view;
pub mod collab_panel;
mod collab_titlebar_item;
mod contact_notification;
@@ -0,0 +1,18 @@
+[package]
+name = "component_test"
+version = "0.1.0"
+edition = "2021"
+publish = false
+
+[lib]
+path = "src/component_test.rs"
+doctest = false
+
+[dependencies]
+anyhow.workspace = true
+gpui = { path = "../gpui" }
+settings = { path = "../settings" }
+util = { path = "../util" }
+theme = { path = "../theme" }
+workspace = { path = "../workspace" }
+project = { path = "../project" }
@@ -0,0 +1,121 @@
+use gpui::{
+ actions,
+ elements::{Component, Flex, ParentElement, SafeStylable},
+ AppContext, Element, Entity, ModelHandle, Task, View, ViewContext, ViewHandle, WeakViewHandle,
+};
+use project::Project;
+use theme::components::{action_button::Button, label::Label, ComponentExt};
+use workspace::{
+ item::Item, register_deserializable_item, ItemId, Pane, PaneBackdrop, Workspace, WorkspaceId,
+};
+
+pub fn init(cx: &mut AppContext) {
+ cx.add_action(ComponentTest::toggle_disclosure);
+ cx.add_action(ComponentTest::toggle_toggle);
+ cx.add_action(ComponentTest::deploy);
+ register_deserializable_item::<ComponentTest>(cx);
+}
+
+actions!(
+ test,
+ [NoAction, ToggleDisclosure, ToggleToggle, NewComponentTest]
+);
+
+struct ComponentTest {
+ disclosed: bool,
+ toggled: bool,
+}
+
+impl ComponentTest {
+ fn new() -> Self {
+ Self {
+ disclosed: false,
+ toggled: false,
+ }
+ }
+
+ fn deploy(workspace: &mut Workspace, _: &NewComponentTest, cx: &mut ViewContext<Workspace>) {
+ workspace.add_item(Box::new(cx.add_view(|_| ComponentTest::new())), cx);
+ }
+
+ fn toggle_disclosure(&mut self, _: &ToggleDisclosure, cx: &mut ViewContext<Self>) {
+ self.disclosed = !self.disclosed;
+ cx.notify();
+ }
+
+ fn toggle_toggle(&mut self, _: &ToggleToggle, cx: &mut ViewContext<Self>) {
+ self.toggled = !self.toggled;
+ cx.notify();
+ }
+}
+
+impl Entity for ComponentTest {
+ type Event = ();
+}
+
+impl View for ComponentTest {
+ fn ui_name() -> &'static str {
+ "Component Test"
+ }
+
+ fn render(&mut self, cx: &mut gpui::ViewContext<Self>) -> gpui::AnyElement<Self> {
+ let theme = theme::current(cx);
+
+ PaneBackdrop::new(
+ cx.view_id(),
+ Flex::column()
+ .with_spacing(10.)
+ .with_child(
+ Button::action(NoAction)
+ .with_tooltip("Here's what a tooltip looks like", theme.tooltip.clone())
+ .with_contents(Label::new("Click me!"))
+ .with_style(theme.component_test.button.clone())
+ .element(),
+ )
+ .with_child(
+ Button::action(ToggleToggle)
+ .with_tooltip("Here's what a tooltip looks like", theme.tooltip.clone())
+ .with_contents(Label::new("Toggle me!"))
+ .toggleable(self.toggled)
+ .with_style(theme.component_test.toggle.clone())
+ .element(),
+ )
+ .with_child(
+ Label::new("A disclosure")
+ .disclosable(Some(self.disclosed), Box::new(ToggleDisclosure))
+ .with_style(theme.component_test.disclosure.clone())
+ .element(),
+ )
+ .constrained()
+ .with_width(200.)
+ .aligned()
+ .into_any(),
+ )
+ .into_any()
+ }
+}
+
+impl Item for ComponentTest {
+ fn tab_content<V: 'static>(
+ &self,
+ _: Option<usize>,
+ style: &theme::Tab,
+ _: &AppContext,
+ ) -> gpui::AnyElement<V> {
+ gpui::elements::Label::new("Component test", style.label.clone()).into_any()
+ }
+
+ fn serialized_item_kind() -> Option<&'static str> {
+ Some("ComponentTest")
+ }
+
+ fn deserialize(
+ _project: ModelHandle<Project>,
+ _workspace: WeakViewHandle<Workspace>,
+ _workspace_id: WorkspaceId,
+ _item_id: ItemId,
+ cx: &mut ViewContext<Pane>,
+ ) -> Task<anyhow::Result<ViewHandle<Self>>> {
+ Task::ready(Ok(cx.add_view(|_| Self::new())))
+ }
+}
@@ -980,7 +980,7 @@ mod tests {
deterministic.forbid_parking();
let (copilot, mut lsp) = Copilot::fake(cx);
- let buffer_1 = cx.add_model(|cx| Buffer::new(0, "Hello", cx));
+ let buffer_1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "Hello"));
let buffer_1_uri: lsp::Url = format!("buffer://{}", buffer_1.id()).parse().unwrap();
copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_1, cx));
assert_eq!(
@@ -996,7 +996,7 @@ mod tests {
}
);
- let buffer_2 = cx.add_model(|cx| Buffer::new(0, "Goodbye", cx));
+ let buffer_2 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "Goodbye"));
let buffer_2_uri: lsp::Url = format!("buffer://{}", buffer_2.id()).parse().unwrap();
copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_2, cx));
assert_eq!(
@@ -4,7 +4,10 @@ mod inlay_map;
mod tab_map;
mod wrap_map;
-use crate::{Anchor, AnchorRangeExt, InlayId, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint};
+use crate::{
+ link_go_to_definition::{DocumentRange, InlayRange},
+ Anchor, AnchorRangeExt, InlayId, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint,
+};
pub use block_map::{BlockMap, BlockPoint};
use collections::{HashMap, HashSet};
use fold_map::FoldMap;
@@ -27,7 +30,8 @@ pub use block_map::{
BlockDisposition, BlockId, BlockProperties, BlockStyle, RenderBlock, TransformBlock,
};
-pub use self::inlay_map::Inlay;
+pub use self::fold_map::FoldPoint;
+pub use self::inlay_map::{Inlay, InlayOffset, InlayPoint};
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum FoldStatus {
@@ -39,7 +43,7 @@ pub trait ToDisplayPoint {
fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint;
}
-type TextHighlights = TreeMap<Option<TypeId>, Arc<(HighlightStyle, Vec<Range<Anchor>>)>>;
+type TextHighlights = TreeMap<Option<TypeId>, Arc<(HighlightStyle, Vec<DocumentRange>)>>;
pub struct DisplayMap {
buffer: ModelHandle<MultiBuffer>,
@@ -211,11 +215,28 @@ impl DisplayMap {
ranges: Vec<Range<Anchor>>,
style: HighlightStyle,
) {
- self.text_highlights
- .insert(Some(type_id), Arc::new((style, ranges)));
+ self.text_highlights.insert(
+ Some(type_id),
+ Arc::new((style, ranges.into_iter().map(DocumentRange::Text).collect())),
+ );
}
- pub fn text_highlights(&self, type_id: TypeId) -> Option<(HighlightStyle, &[Range<Anchor>])> {
+ pub fn highlight_inlays(
+ &mut self,
+ type_id: TypeId,
+ ranges: Vec<InlayRange>,
+ style: HighlightStyle,
+ ) {
+ self.text_highlights.insert(
+ Some(type_id),
+ Arc::new((
+ style,
+ ranges.into_iter().map(DocumentRange::Inlay).collect(),
+ )),
+ );
+ }
+
+ pub fn text_highlights(&self, type_id: TypeId) -> Option<(HighlightStyle, &[DocumentRange])> {
let highlights = self.text_highlights.get(&Some(type_id))?;
Some((highlights.0, &highlights.1))
}
@@ -223,7 +244,7 @@ impl DisplayMap {
pub fn clear_text_highlights(
&mut self,
type_id: TypeId,
- ) -> Option<Arc<(HighlightStyle, Vec<Range<Anchor>>)>> {
+ ) -> Option<Arc<(HighlightStyle, Vec<DocumentRange>)>> {
self.text_highlights.remove(&Some(type_id))
}
@@ -290,7 +311,7 @@ impl DisplayMap {
pub struct DisplaySnapshot {
pub buffer_snapshot: MultiBufferSnapshot,
- fold_snapshot: fold_map::FoldSnapshot,
+ pub fold_snapshot: fold_map::FoldSnapshot,
inlay_snapshot: inlay_map::InlaySnapshot,
tab_snapshot: tab_map::TabSnapshot,
wrap_snapshot: wrap_map::WrapSnapshot,
@@ -387,12 +408,49 @@ impl DisplaySnapshot {
}
fn display_point_to_point(&self, point: DisplayPoint, bias: Bias) -> Point {
+ self.inlay_snapshot
+ .to_buffer_point(self.display_point_to_inlay_point(point, bias))
+ }
+
+ pub fn display_point_to_inlay_offset(&self, point: DisplayPoint, bias: Bias) -> InlayOffset {
+ self.inlay_snapshot
+ .to_offset(self.display_point_to_inlay_point(point, bias))
+ }
+
+ pub fn anchor_to_inlay_offset(&self, anchor: Anchor) -> InlayOffset {
+ self.inlay_snapshot
+ .to_inlay_offset(anchor.to_offset(&self.buffer_snapshot))
+ }
+
+ pub fn inlay_offset_to_display_point(&self, offset: InlayOffset, bias: Bias) -> DisplayPoint {
+ let inlay_point = self.inlay_snapshot.to_point(offset);
+ let fold_point = self.fold_snapshot.to_fold_point(inlay_point, bias);
+ let tab_point = self.tab_snapshot.to_tab_point(fold_point);
+ let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point);
+ let block_point = self.block_snapshot.to_block_point(wrap_point);
+ DisplayPoint(block_point)
+ }
+
+ fn display_point_to_inlay_point(&self, point: DisplayPoint, bias: Bias) -> InlayPoint {
let block_point = point.0;
let wrap_point = self.block_snapshot.to_wrap_point(block_point);
let tab_point = self.wrap_snapshot.to_tab_point(wrap_point);
let fold_point = self.tab_snapshot.to_fold_point(tab_point, bias).0;
- let inlay_point = fold_point.to_inlay_point(&self.fold_snapshot);
- self.inlay_snapshot.to_buffer_point(inlay_point)
+ fold_point.to_inlay_point(&self.fold_snapshot)
+ }
+
+ pub fn display_point_to_fold_point(&self, point: DisplayPoint, bias: Bias) -> FoldPoint {
+ let block_point = point.0;
+ let wrap_point = self.block_snapshot.to_wrap_point(block_point);
+ let tab_point = self.wrap_snapshot.to_tab_point(wrap_point);
+ self.tab_snapshot.to_fold_point(tab_point, bias).0
+ }
+
+ pub fn fold_point_to_display_point(&self, fold_point: FoldPoint) -> DisplayPoint {
+ let tab_point = self.tab_snapshot.to_tab_point(fold_point);
+ let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point);
+ let block_point = self.block_snapshot.to_block_point(wrap_point);
+ DisplayPoint(block_point)
}
pub fn max_point(&self) -> DisplayPoint {
@@ -428,15 +486,15 @@ impl DisplaySnapshot {
&self,
display_rows: Range<u32>,
language_aware: bool,
- hint_highlights: Option<HighlightStyle>,
- suggestion_highlights: Option<HighlightStyle>,
+ hint_highlight_style: Option<HighlightStyle>,
+ suggestion_highlight_style: Option<HighlightStyle>,
) -> DisplayChunks<'_> {
self.block_snapshot.chunks(
display_rows,
language_aware,
Some(&self.text_highlights),
- hint_highlights,
- suggestion_highlights,
+ hint_highlight_style,
+ suggestion_highlight_style,
)
}
@@ -757,7 +815,7 @@ impl DisplaySnapshot {
#[cfg(any(test, feature = "test-support"))]
pub fn highlight_ranges<Tag: ?Sized + 'static>(
&self,
- ) -> Option<Arc<(HighlightStyle, Vec<Range<Anchor>>)>> {
+ ) -> Option<Arc<(HighlightStyle, Vec<DocumentRange>)>> {
let type_id = TypeId::of::<Tag>();
self.text_highlights.get(&Some(type_id)).cloned()
}
@@ -1319,7 +1377,8 @@ pub mod tests {
cx.update(|cx| init_test(cx, |s| s.defaults.tab_size = Some(2.try_into().unwrap())));
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
+ let buffer = cx
+ .add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
buffer.condition(cx, |buf, _| !buf.is_parsing()).await;
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
@@ -1408,7 +1467,8 @@ pub mod tests {
cx.update(|cx| init_test(cx, |_| {}));
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
+ let buffer = cx
+ .add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
buffer.condition(cx, |buf, _| !buf.is_parsing()).await;
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
@@ -1480,7 +1540,8 @@ pub mod tests {
let (text, highlighted_ranges) = marked_text_ranges(r#"constˇ «a»: B = "c «d»""#, false);
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
+ let buffer = cx
+ .add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
buffer.condition(cx, |buf, _| !buf.is_parsing()).await;
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
@@ -589,8 +589,8 @@ impl BlockSnapshot {
rows: Range<u32>,
language_aware: bool,
text_highlights: Option<&'a TextHighlights>,
- hint_highlights: Option<HighlightStyle>,
- suggestion_highlights: Option<HighlightStyle>,
+ hint_highlight_style: Option<HighlightStyle>,
+ suggestion_highlight_style: Option<HighlightStyle>,
) -> BlockChunks<'a> {
let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows);
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>();
@@ -623,8 +623,8 @@ impl BlockSnapshot {
input_start..input_end,
language_aware,
text_highlights,
- hint_highlights,
- suggestion_highlights,
+ hint_highlight_style,
+ suggestion_highlight_style,
),
input_chunk: Default::default(),
transforms: cursor,
@@ -652,8 +652,8 @@ impl FoldSnapshot {
range: Range<FoldOffset>,
language_aware: bool,
text_highlights: Option<&'a TextHighlights>,
- hint_highlights: Option<HighlightStyle>,
- suggestion_highlights: Option<HighlightStyle>,
+ hint_highlight_style: Option<HighlightStyle>,
+ suggestion_highlight_style: Option<HighlightStyle>,
) -> FoldChunks<'a> {
let mut transform_cursor = self.transforms.cursor::<(FoldOffset, InlayOffset)>();
@@ -675,8 +675,8 @@ impl FoldSnapshot {
inlay_start..inlay_end,
language_aware,
text_highlights,
- hint_highlights,
- suggestion_highlights,
+ hint_highlight_style,
+ suggestion_highlight_style,
),
inlay_chunk: None,
inlay_offset: inlay_start,
@@ -1,4 +1,5 @@
use crate::{
+ link_go_to_definition::DocumentRange,
multi_buffer::{MultiBufferChunks, MultiBufferRows},
Anchor, InlayId, MultiBufferSnapshot, ToOffset,
};
@@ -183,7 +184,7 @@ pub struct InlayBufferRows<'a> {
max_buffer_row: u32,
}
-#[derive(Copy, Clone, Eq, PartialEq)]
+#[derive(Debug, Copy, Clone, Eq, PartialEq)]
struct HighlightEndpoint {
offset: InlayOffset,
is_start: bool,
@@ -210,6 +211,7 @@ pub struct InlayChunks<'a> {
buffer_chunks: MultiBufferChunks<'a>,
buffer_chunk: Option<Chunk<'a>>,
inlay_chunks: Option<text::Chunks<'a>>,
+ inlay_chunk: Option<&'a str>,
output_offset: InlayOffset,
max_output_offset: InlayOffset,
hint_highlight_style: Option<HighlightStyle>,
@@ -297,13 +299,31 @@ impl<'a> Iterator for InlayChunks<'a> {
- self.transforms.start().0;
inlay.text.chunks_in_range(start.0..end.0)
});
+ let inlay_chunk = self
+ .inlay_chunk
+ .get_or_insert_with(|| inlay_chunks.next().unwrap());
+ let (chunk, remainder) = inlay_chunk.split_at(
+ inlay_chunk
+ .len()
+ .min(next_highlight_endpoint.0 - self.output_offset.0),
+ );
+ *inlay_chunk = remainder;
+ if inlay_chunk.is_empty() {
+ self.inlay_chunk = None;
+ }
- let chunk = inlay_chunks.next().unwrap();
self.output_offset.0 += chunk.len();
- let highlight_style = match inlay.id {
+ let mut highlight_style = match inlay.id {
InlayId::Suggestion(_) => self.suggestion_highlight_style,
InlayId::Hint(_) => self.hint_highlight_style,
};
+ if !self.active_highlights.is_empty() {
+ for active_highlight in self.active_highlights.values() {
+ highlight_style
+ .get_or_insert(Default::default())
+ .highlight(*active_highlight);
+ }
+ }
Chunk {
text: chunk,
highlight_style,
@@ -973,8 +993,8 @@ impl InlaySnapshot {
range: Range<InlayOffset>,
language_aware: bool,
text_highlights: Option<&'a TextHighlights>,
- hint_highlights: Option<HighlightStyle>,
- suggestion_highlights: Option<HighlightStyle>,
+ hint_highlight_style: Option<HighlightStyle>,
+ suggestion_highlight_style: Option<HighlightStyle>,
) -> InlayChunks<'a> {
let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>();
cursor.seek(&range.start, Bias::Right, &());
@@ -983,52 +1003,56 @@ impl InlaySnapshot {
if let Some(text_highlights) = text_highlights {
if !text_highlights.is_empty() {
while cursor.start().0 < range.end {
- if true {
- let transform_start = self.buffer.anchor_after(
- self.to_buffer_offset(cmp::max(range.start, cursor.start().0)),
- );
-
- let transform_end = {
- let overshoot = InlayOffset(range.end.0 - cursor.start().0 .0);
- self.buffer.anchor_before(self.to_buffer_offset(cmp::min(
- cursor.end(&()).0,
- cursor.start().0 + overshoot,
- )))
+ let transform_start = self.buffer.anchor_after(
+ self.to_buffer_offset(cmp::max(range.start, cursor.start().0)),
+ );
+ let transform_start =
+ self.to_inlay_offset(transform_start.to_offset(&self.buffer));
+
+ let transform_end = {
+ let overshoot = InlayOffset(range.end.0 - cursor.start().0 .0);
+ self.buffer.anchor_before(self.to_buffer_offset(cmp::min(
+ cursor.end(&()).0,
+ cursor.start().0 + overshoot,
+ )))
+ };
+ let transform_end = self.to_inlay_offset(transform_end.to_offset(&self.buffer));
+
+ for (tag, text_highlights) in text_highlights.iter() {
+ let style = text_highlights.0;
+ let ranges = &text_highlights.1;
+
+ let start_ix = match ranges.binary_search_by(|probe| {
+ let cmp = self
+ .document_to_inlay_range(probe)
+ .end
+ .cmp(&transform_start);
+ if cmp.is_gt() {
+ cmp::Ordering::Greater
+ } else {
+ cmp::Ordering::Less
+ }
+ }) {
+ Ok(i) | Err(i) => i,
};
-
- for (tag, highlights) in text_highlights.iter() {
- let style = highlights.0;
- let ranges = &highlights.1;
-
- let start_ix = match ranges.binary_search_by(|probe| {
- let cmp = probe.end.cmp(&transform_start, &self.buffer);
- if cmp.is_gt() {
- cmp::Ordering::Greater
- } else {
- cmp::Ordering::Less
- }
- }) {
- Ok(i) | Err(i) => i,
- };
- for range in &ranges[start_ix..] {
- if range.start.cmp(&transform_end, &self.buffer).is_ge() {
- break;
- }
-
- highlight_endpoints.push(HighlightEndpoint {
- offset: self
- .to_inlay_offset(range.start.to_offset(&self.buffer)),
- is_start: true,
- tag: *tag,
- style,
- });
- highlight_endpoints.push(HighlightEndpoint {
- offset: self.to_inlay_offset(range.end.to_offset(&self.buffer)),
- is_start: false,
- tag: *tag,
- style,
- });
+ for range in &ranges[start_ix..] {
+ let range = self.document_to_inlay_range(range);
+ if range.start.cmp(&transform_end).is_ge() {
+ break;
}
+
+ highlight_endpoints.push(HighlightEndpoint {
+ offset: range.start,
+ is_start: true,
+ tag: *tag,
+ style,
+ });
+ highlight_endpoints.push(HighlightEndpoint {
+ offset: range.end,
+ is_start: false,
+ tag: *tag,
+ style,
+ });
}
}
@@ -1046,17 +1070,30 @@ impl InlaySnapshot {
transforms: cursor,
buffer_chunks,
inlay_chunks: None,
+ inlay_chunk: None,
buffer_chunk: None,
output_offset: range.start,
max_output_offset: range.end,
- hint_highlight_style: hint_highlights,
- suggestion_highlight_style: suggestion_highlights,
+ hint_highlight_style,
+ suggestion_highlight_style,
highlight_endpoints: highlight_endpoints.into_iter().peekable(),
active_highlights: Default::default(),
snapshot: self,
}
}
+ fn document_to_inlay_range(&self, range: &DocumentRange) -> Range<InlayOffset> {
+ match range {
+ DocumentRange::Text(text_range) => {
+ self.to_inlay_offset(text_range.start.to_offset(&self.buffer))
+ ..self.to_inlay_offset(text_range.end.to_offset(&self.buffer))
+ }
+ DocumentRange::Inlay(inlay_range) => {
+ inlay_range.highlight_start..inlay_range.highlight_end
+ }
+ }
+ }
+
#[cfg(test)]
pub fn text(&self) -> String {
self.chunks(Default::default()..self.len(), false, None, None, None)
@@ -1107,13 +1144,12 @@ fn push_isomorphic(sum_tree: &mut SumTree<Transform>, summary: TextSummary) {
#[cfg(test)]
mod tests {
use super::*;
- use crate::{InlayId, MultiBuffer};
+ use crate::{link_go_to_definition::InlayRange, InlayId, MultiBuffer};
use gpui::AppContext;
- use project::{InlayHint, InlayHintLabel};
+ use project::{InlayHint, InlayHintLabel, ResolveState};
use rand::prelude::*;
use settings::SettingsStore;
use std::{cmp::Reverse, env, sync::Arc};
- use sum_tree::TreeMap;
use text::Patch;
use util::post_inc;
@@ -1125,12 +1161,12 @@ mod tests {
Anchor::min(),
&InlayHint {
label: InlayHintLabel::String("a".to_string()),
- buffer_id: 0,
position: text::Anchor::default(),
padding_left: false,
padding_right: false,
tooltip: None,
kind: None,
+ resolve_state: ResolveState::Resolved,
},
)
.text
@@ -1145,12 +1181,12 @@ mod tests {
Anchor::min(),
&InlayHint {
label: InlayHintLabel::String("a".to_string()),
- buffer_id: 0,
position: text::Anchor::default(),
padding_left: true,
padding_right: true,
tooltip: None,
kind: None,
+ resolve_state: ResolveState::Resolved,
},
)
.text
@@ -1165,12 +1201,12 @@ mod tests {
Anchor::min(),
&InlayHint {
label: InlayHintLabel::String(" a ".to_string()),
- buffer_id: 0,
position: text::Anchor::default(),
padding_left: false,
padding_right: false,
tooltip: None,
kind: None,
+ resolve_state: ResolveState::Resolved,
},
)
.text
@@ -1185,12 +1221,12 @@ mod tests {
Anchor::min(),
&InlayHint {
label: InlayHintLabel::String(" a ".to_string()),
- buffer_id: 0,
position: text::Anchor::default(),
padding_left: true,
padding_right: true,
tooltip: None,
kind: None,
+ resolve_state: ResolveState::Resolved,
},
)
.text
@@ -1542,26 +1578,6 @@ mod tests {
let mut buffer_snapshot = buffer.read(cx).snapshot(cx);
let mut next_inlay_id = 0;
log::info!("buffer text: {:?}", buffer_snapshot.text());
-
- let mut highlights = TreeMap::default();
- let highlight_count = rng.gen_range(0_usize..10);
- let mut highlight_ranges = (0..highlight_count)
- .map(|_| buffer_snapshot.random_byte_range(0, &mut rng))
- .collect::<Vec<_>>();
- highlight_ranges.sort_by_key(|range| (range.start, Reverse(range.end)));
- log::info!("highlighting ranges {:?}", highlight_ranges);
- let highlight_ranges = highlight_ranges
- .into_iter()
- .map(|range| {
- buffer_snapshot.anchor_before(range.start)..buffer_snapshot.anchor_after(range.end)
- })
- .collect::<Vec<_>>();
-
- highlights.insert(
- Some(TypeId::of::<()>()),
- Arc::new((HighlightStyle::default(), highlight_ranges)),
- );
-
let (mut inlay_map, mut inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
for _ in 0..operations {
let mut inlay_edits = Patch::default();
@@ -1624,6 +1640,38 @@ mod tests {
);
}
+ let mut highlights = TextHighlights::default();
+ let highlight_count = rng.gen_range(0_usize..10);
+ let mut highlight_ranges = (0..highlight_count)
+ .map(|_| buffer_snapshot.random_byte_range(0, &mut rng))
+ .collect::<Vec<_>>();
+ highlight_ranges.sort_by_key(|range| (range.start, Reverse(range.end)));
+ log::info!("highlighting ranges {:?}", highlight_ranges);
+ let highlight_ranges = if rng.gen_bool(0.5) {
+ highlight_ranges
+ .into_iter()
+ .map(|range| InlayRange {
+ inlay_position: buffer_snapshot.anchor_before(range.start),
+ highlight_start: inlay_snapshot.to_inlay_offset(range.start),
+ highlight_end: inlay_snapshot.to_inlay_offset(range.end),
+ })
+ .map(DocumentRange::Inlay)
+ .collect::<Vec<_>>()
+ } else {
+ highlight_ranges
+ .into_iter()
+ .map(|range| {
+ buffer_snapshot.anchor_before(range.start)
+ ..buffer_snapshot.anchor_after(range.end)
+ })
+ .map(DocumentRange::Text)
+ .collect::<Vec<_>>()
+ };
+ highlights.insert(
+ Some(TypeId::of::<()>()),
+ Arc::new((HighlightStyle::default(), highlight_ranges)),
+ );
+
for _ in 0..5 {
let mut end = rng.gen_range(0..=inlay_snapshot.len().0);
end = expected_text.clip_offset(end, Bias::Right);
@@ -224,8 +224,8 @@ impl TabSnapshot {
range: Range<TabPoint>,
language_aware: bool,
text_highlights: Option<&'a TextHighlights>,
- hint_highlights: Option<HighlightStyle>,
- suggestion_highlights: Option<HighlightStyle>,
+ hint_highlight_style: Option<HighlightStyle>,
+ suggestion_highlight_style: Option<HighlightStyle>,
) -> TabChunks<'a> {
let (input_start, expanded_char_column, to_next_stop) =
self.to_fold_point(range.start, Bias::Left);
@@ -246,8 +246,8 @@ impl TabSnapshot {
input_start..input_end,
language_aware,
text_highlights,
- hint_highlights,
- suggestion_highlights,
+ hint_highlight_style,
+ suggestion_highlight_style,
),
input_column,
column: expanded_char_column,
@@ -576,8 +576,8 @@ impl WrapSnapshot {
rows: Range<u32>,
language_aware: bool,
text_highlights: Option<&'a TextHighlights>,
- hint_highlights: Option<HighlightStyle>,
- suggestion_highlights: Option<HighlightStyle>,
+ hint_highlight_style: Option<HighlightStyle>,
+ suggestion_highlight_style: Option<HighlightStyle>,
) -> WrapChunks<'a> {
let output_start = WrapPoint::new(rows.start, 0);
let output_end = WrapPoint::new(rows.end, 0);
@@ -595,8 +595,8 @@ impl WrapSnapshot {
input_start..input_end,
language_aware,
text_highlights,
- hint_highlights,
- suggestion_highlights,
+ hint_highlight_style,
+ suggestion_highlight_style,
),
input_chunk: Default::default(),
output_position: output_start,
@@ -23,7 +23,7 @@ pub mod test;
use ::git::diff::DiffHunk;
use aho_corasick::AhoCorasick;
-use anyhow::{anyhow, Result};
+use anyhow::{anyhow, Context, Result};
use blink_manager::BlinkManager;
use client::{ClickhouseEvent, TelemetrySettings};
use clock::{Global, ReplicaId};
@@ -60,21 +60,24 @@ use itertools::Itertools;
pub use language::{char_kind, CharKind};
use language::{
language_settings::{self, all_language_settings, InlayHintSettings},
- AutoindentMode, BracketPair, Buffer, CodeAction, CodeLabel, Completion, CursorShape,
- Diagnostic, DiagnosticSeverity, File, IndentKind, IndentSize, Language, OffsetRangeExt,
- OffsetUtf16, Point, Selection, SelectionGoal, TransactionId,
+ point_from_lsp, AutoindentMode, BracketPair, Buffer, CodeAction, CodeLabel, Completion,
+ CursorShape, Diagnostic, DiagnosticSeverity, File, IndentKind, IndentSize, Language,
+ LanguageServerName, OffsetRangeExt, OffsetUtf16, Point, Selection, SelectionGoal,
+ TransactionId,
};
use link_go_to_definition::{
- hide_link_definition, show_link_definition, LinkDefinitionKind, LinkGoToDefinitionState,
+ hide_link_definition, show_link_definition, DocumentRange, GoToDefinitionLink, InlayRange,
+ LinkGoToDefinitionState,
};
use log::error;
+use lsp::LanguageServerId;
use multi_buffer::ToOffsetUtf16;
pub use multi_buffer::{
Anchor, AnchorRangeExt, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, ToOffset,
ToPoint,
};
use ordered_float::OrderedFloat;
-use project::{FormatTrigger, Location, LocationLink, Project, ProjectPath, ProjectTransaction};
+use project::{FormatTrigger, Location, Project, ProjectPath, ProjectTransaction};
use rand::{seq::SliceRandom, thread_rng};
use scroll::{
autoscroll::Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, ScrollbarAutoHide,
@@ -108,6 +111,8 @@ const MAX_LINE_LEN: usize = 1024;
const MIN_NAVIGATION_HISTORY_ROW_DELTA: i64 = 10;
const MAX_SELECTION_HISTORY_LEN: usize = 1024;
const COPILOT_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(75);
+pub const CODE_ACTIONS_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(250);
+pub const DOCUMENT_HIGHLIGHTS_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(75);
pub const FORMAT_TIMEOUT: Duration = Duration::from_secs(2);
@@ -535,6 +540,8 @@ type CompletionId = usize;
type GetFieldEditorTheme = dyn Fn(&theme::Theme) -> theme::FieldEditor;
type OverrideTextStyle = dyn Fn(&EditorStyle) -> Option<HighlightStyle>;
+type BackgroundHighlight = (fn(&Theme) -> Color, Vec<DocumentRange>);
+
pub struct Editor {
handle: WeakViewHandle<Self>,
buffer: ModelHandle<MultiBuffer>,
@@ -559,12 +566,12 @@ pub struct Editor {
blink_manager: ModelHandle<BlinkManager>,
show_local_selections: bool,
mode: EditorMode,
+ replica_id_mapping: Option<HashMap<ReplicaId, ReplicaId>>,
show_gutter: bool,
show_wrap_guides: Option<bool>,
placeholder_text: Option<Arc<str>>,
highlighted_rows: Option<Range<u32>>,
- #[allow(clippy::type_complexity)]
- background_highlights: BTreeMap<TypeId, (fn(&Theme) -> Color, Vec<Range<Anchor>>)>,
+ background_highlights: BTreeMap<TypeId, BackgroundHighlight>,
nav_history: Option<ItemNavHistory>,
context_menu: Option<ContextMenu>,
mouse_context_menu: ViewHandle<context_menu::ContextMenu>,
@@ -1246,6 +1253,19 @@ enum InlayHintRefreshReason {
NewLinesShown,
BufferEdited(HashSet<Arc<Language>>),
RefreshRequested,
+ ExcerptsRemoved(Vec<ExcerptId>),
+}
+impl InlayHintRefreshReason {
+ fn description(&self) -> &'static str {
+ match self {
+ Self::Toggle(_) => "toggle",
+ Self::SettingsChange(_) => "settings change",
+ Self::NewLinesShown => "new lines shown",
+ Self::BufferEdited(_) => "buffer edited",
+ Self::RefreshRequested => "refresh requested",
+ Self::ExcerptsRemoved(_) => "excerpts removed",
+ }
+ }
}
impl Editor {
@@ -1253,7 +1273,7 @@ impl Editor {
field_editor_style: Option<Arc<GetFieldEditorTheme>>,
cx: &mut ViewContext<Self>,
) -> Self {
- let buffer = cx.add_model(|cx| Buffer::new(0, String::new(), cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, String::new()));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
Self::new(EditorMode::SingleLine, buffer, None, field_editor_style, cx)
}
@@ -1262,7 +1282,7 @@ impl Editor {
field_editor_style: Option<Arc<GetFieldEditorTheme>>,
cx: &mut ViewContext<Self>,
) -> Self {
- let buffer = cx.add_model(|cx| Buffer::new(0, String::new(), cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, String::new()));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
Self::new(EditorMode::Full, buffer, None, field_editor_style, cx)
}
@@ -1272,7 +1292,7 @@ impl Editor {
field_editor_style: Option<Arc<GetFieldEditorTheme>>,
cx: &mut ViewContext<Self>,
) -> Self {
- let buffer = cx.add_model(|cx| Buffer::new(0, String::new(), cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, String::new()));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
Self::new(
EditorMode::AutoHeight { max_lines },
@@ -1394,6 +1414,7 @@ impl Editor {
blink_manager: blink_manager.clone(),
show_local_selections: true,
mode,
+ replica_id_mapping: None,
show_gutter: mode == EditorMode::Full,
show_wrap_guides: None,
placeholder_text: None,
@@ -1604,6 +1625,19 @@ impl Editor {
self.read_only = read_only;
}
+ pub fn replica_id_map(&self) -> Option<&HashMap<ReplicaId, ReplicaId>> {
+ self.replica_id_mapping.as_ref()
+ }
+
+ pub fn set_replica_id_map(
+ &mut self,
+ mapping: Option<HashMap<ReplicaId, ReplicaId>>,
+ cx: &mut ViewContext<Self>,
+ ) {
+ self.replica_id_mapping = mapping;
+ cx.notify();
+ }
+
fn selections_did_change(
&mut self,
local: bool,
@@ -2722,6 +2756,7 @@ impl Editor {
return;
}
+ let reason_description = reason.description();
let (invalidate_cache, required_languages) = match reason {
InlayHintRefreshReason::Toggle(enabled) => {
self.inlay_hint_cache.enabled = enabled;
@@ -2758,6 +2793,16 @@ impl Editor {
ControlFlow::Continue(()) => (InvalidationStrategy::RefreshRequested, None),
}
}
+ InlayHintRefreshReason::ExcerptsRemoved(excerpts_removed) => {
+ if let Some(InlaySplice {
+ to_remove,
+ to_insert,
+ }) = self.inlay_hint_cache.remove_excerpts(excerpts_removed)
+ {
+ self.splice_inlay_hints(to_remove, to_insert, cx);
+ }
+ return;
+ }
InlayHintRefreshReason::NewLinesShown => (InvalidationStrategy::None, None),
InlayHintRefreshReason::BufferEdited(buffer_languages) => {
(InvalidationStrategy::BufferEdited, Some(buffer_languages))
@@ -2771,6 +2816,7 @@ impl Editor {
to_remove,
to_insert,
}) = self.inlay_hint_cache.spawn_hint_refresh(
+ reason_description,
self.excerpt_visible_offsets(required_languages.as_ref(), cx),
invalidate_cache,
cx,
@@ -3248,7 +3294,7 @@ impl Editor {
}
fn refresh_code_actions(&mut self, cx: &mut ViewContext<Self>) -> Option<()> {
- let project = self.project.as_ref()?;
+ let project = self.project.clone()?;
let buffer = self.buffer.read(cx);
let newest_selection = self.selections.newest_anchor().clone();
let (start_buffer, start) = buffer.text_anchor_for_position(newest_selection.start, cx)?;
@@ -3257,11 +3303,15 @@ impl Editor {
return None;
}
- let actions = project.update(cx, |project, cx| {
- project.code_actions(&start_buffer, start..end, cx)
- });
self.code_actions_task = Some(cx.spawn(|this, mut cx| async move {
- let actions = actions.await;
+ cx.background().timer(CODE_ACTIONS_DEBOUNCE_TIMEOUT).await;
+
+ let actions = project
+ .update(&mut cx, |project, cx| {
+ project.code_actions(&start_buffer, start..end, cx)
+ })
+ .await;
+
this.update(&mut cx, |this, cx| {
this.available_code_actions = actions.log_err().and_then(|actions| {
if actions.is_empty() {
@@ -3282,7 +3332,7 @@ impl Editor {
return None;
}
- let project = self.project.as_ref()?;
+ let project = self.project.clone()?;
let buffer = self.buffer.read(cx);
let newest_selection = self.selections.newest_anchor().clone();
let cursor_position = newest_selection.head();
@@ -3293,12 +3343,19 @@ impl Editor {
return None;
}
- let highlights = project.update(cx, |project, cx| {
- project.document_highlights(&cursor_buffer, cursor_buffer_position, cx)
- });
-
self.document_highlights_task = Some(cx.spawn(|this, mut cx| async move {
- if let Some(highlights) = highlights.await.log_err() {
+ cx.background()
+ .timer(DOCUMENT_HIGHLIGHTS_DEBOUNCE_TIMEOUT)
+ .await;
+
+ let highlights = project
+ .update(&mut cx, |project, cx| {
+ project.document_highlights(&cursor_buffer, cursor_buffer_position, cx)
+ })
+ .await
+ .log_err();
+
+ if let Some(highlights) = highlights {
this.update(&mut cx, |this, cx| {
if this.pending_rename.is_some() {
return;
@@ -6227,7 +6284,9 @@ impl Editor {
) {
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.move_offsets_with(|snapshot, selection| {
- let Some(enclosing_bracket_ranges) = snapshot.enclosing_bracket_ranges(selection.start..selection.end) else {
+ let Some(enclosing_bracket_ranges) =
+ snapshot.enclosing_bracket_ranges(selection.start..selection.end)
+ else {
return;
};
@@ -6239,7 +6298,8 @@ impl Editor {
let close = close.to_inclusive();
let length = close.end() - open.start;
let inside = selection.start >= open.end && selection.end <= *close.start();
- let in_bracket_range = open.to_inclusive().contains(&selection.head()) || close.contains(&selection.head());
+ let in_bracket_range = open.to_inclusive().contains(&selection.head())
+ || close.contains(&selection.head());
// If best is next to a bracket and current isn't, skip
if !in_bracket_range && best_in_bracket_range {
@@ -6254,19 +6314,21 @@ impl Editor {
best_length = length;
best_inside = inside;
best_in_bracket_range = in_bracket_range;
- best_destination = Some(if close.contains(&selection.start) && close.contains(&selection.end) {
- if inside {
- open.end
- } else {
- open.start
- }
- } else {
- if inside {
- *close.start()
+ best_destination = Some(
+ if close.contains(&selection.start) && close.contains(&selection.end) {
+ if inside {
+ open.end
+ } else {
+ open.start
+ }
} else {
- *close.end()
- }
- });
+ if inside {
+ *close.start()
+ } else {
+ *close.end()
+ }
+ },
+ );
}
if let Some(destination) = best_destination {
@@ -6510,7 +6572,9 @@ impl Editor {
split: bool,
cx: &mut ViewContext<Self>,
) {
- let Some(workspace) = self.workspace(cx) else { return };
+ let Some(workspace) = self.workspace(cx) else {
+ return;
+ };
let buffer = self.buffer.read(cx);
let head = self.selections.newest::<usize>(cx).head();
let (buffer, head) = if let Some(text_anchor) = buffer.text_anchor_for_position(head, cx) {
@@ -6528,7 +6592,14 @@ impl Editor {
cx.spawn_labeled("Fetching Definition...", |editor, mut cx| async move {
let definitions = definitions.await?;
editor.update(&mut cx, |editor, cx| {
- editor.navigate_to_definitions(definitions, split, cx);
+ editor.navigate_to_definitions(
+ definitions
+ .into_iter()
+ .map(GoToDefinitionLink::Text)
+ .collect(),
+ split,
+ cx,
+ );
})?;
Ok::<(), anyhow::Error>(())
})
@@ -6537,76 +6608,178 @@ impl Editor {
pub fn navigate_to_definitions(
&mut self,
- mut definitions: Vec<LocationLink>,
+ mut definitions: Vec<GoToDefinitionLink>,
split: bool,
cx: &mut ViewContext<Editor>,
) {
- let Some(workspace) = self.workspace(cx) else { return };
+ let Some(workspace) = self.workspace(cx) else {
+ return;
+ };
let pane = workspace.read(cx).active_pane().clone();
// If there is one definition, just open it directly
if definitions.len() == 1 {
let definition = definitions.pop().unwrap();
- let range = definition
- .target
- .range
- .to_offset(definition.target.buffer.read(cx));
-
- let range = self.range_for_match(&range);
- if Some(&definition.target.buffer) == self.buffer.read(cx).as_singleton().as_ref() {
- self.change_selections(Some(Autoscroll::fit()), cx, |s| {
- s.select_ranges([range]);
- });
- } else {
- cx.window_context().defer(move |cx| {
- let target_editor: ViewHandle<Self> = workspace.update(cx, |workspace, cx| {
- if split {
- workspace.split_project_item(definition.target.buffer.clone(), cx)
+ let target_task = match definition {
+ GoToDefinitionLink::Text(link) => Task::Ready(Some(Ok(Some(link.target)))),
+ GoToDefinitionLink::InlayHint(lsp_location, server_id) => {
+ self.compute_target_location(lsp_location, server_id, cx)
+ }
+ };
+ cx.spawn(|editor, mut cx| async move {
+ let target = target_task.await.context("target resolution task")?;
+ if let Some(target) = target {
+ editor.update(&mut cx, |editor, cx| {
+ let range = target.range.to_offset(target.buffer.read(cx));
+ let range = editor.range_for_match(&range);
+ if Some(&target.buffer) == editor.buffer.read(cx).as_singleton().as_ref() {
+ editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
+ s.select_ranges([range]);
+ });
} else {
- workspace.open_project_item(definition.target.buffer.clone(), cx)
+ cx.window_context().defer(move |cx| {
+ let target_editor: ViewHandle<Self> =
+ workspace.update(cx, |workspace, cx| {
+ if split {
+ workspace.split_project_item(target.buffer.clone(), cx)
+ } else {
+ workspace.open_project_item(target.buffer.clone(), cx)
+ }
+ });
+ target_editor.update(cx, |target_editor, cx| {
+ // When selecting a definition in a different buffer, disable the nav history
+ // to avoid creating a history entry at the previous cursor location.
+ pane.update(cx, |pane, _| pane.disable_history());
+ target_editor.change_selections(
+ Some(Autoscroll::fit()),
+ cx,
+ |s| {
+ s.select_ranges([range]);
+ },
+ );
+ pane.update(cx, |pane, _| pane.enable_history());
+ });
+ });
}
- });
- target_editor.update(cx, |target_editor, cx| {
- // When selecting a definition in a different buffer, disable the nav history
- // to avoid creating a history entry at the previous cursor location.
- pane.update(cx, |pane, _| pane.disable_history());
- target_editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
- s.select_ranges([range]);
- });
- pane.update(cx, |pane, _| pane.enable_history());
- });
- });
- }
+ })
+ } else {
+ Ok(())
+ }
+ })
+ .detach_and_log_err(cx);
} else if !definitions.is_empty() {
let replica_id = self.replica_id(cx);
- cx.window_context().defer(move |cx| {
- let title = definitions
- .iter()
- .find(|definition| definition.origin.is_some())
- .and_then(|definition| {
- definition.origin.as_ref().map(|origin| {
- let buffer = origin.buffer.read(cx);
- format!(
- "Definitions for {}",
- buffer
- .text_for_range(origin.range.clone())
- .collect::<String>()
- )
- })
+ cx.spawn(|editor, mut cx| async move {
+ let (title, location_tasks) = editor
+ .update(&mut cx, |editor, cx| {
+ let title = definitions
+ .iter()
+ .find_map(|definition| match definition {
+ GoToDefinitionLink::Text(link) => {
+ link.origin.as_ref().map(|origin| {
+ let buffer = origin.buffer.read(cx);
+ format!(
+ "Definitions for {}",
+ buffer
+ .text_for_range(origin.range.clone())
+ .collect::<String>()
+ )
+ })
+ }
+ GoToDefinitionLink::InlayHint(_, _) => None,
+ })
+ .unwrap_or("Definitions".to_string());
+ let location_tasks = definitions
+ .into_iter()
+ .map(|definition| match definition {
+ GoToDefinitionLink::Text(link) => {
+ Task::Ready(Some(Ok(Some(link.target))))
+ }
+ GoToDefinitionLink::InlayHint(lsp_location, server_id) => {
+ editor.compute_target_location(lsp_location, server_id, cx)
+ }
+ })
+ .collect::<Vec<_>>();
+ (title, location_tasks)
})
- .unwrap_or("Definitions".to_owned());
- let locations = definitions
+ .context("location tasks preparation")?;
+
+ let locations = futures::future::join_all(location_tasks)
+ .await
.into_iter()
- .map(|definition| definition.target)
- .collect();
- workspace.update(cx, |workspace, cx| {
+ .filter_map(|location| location.transpose())
+ .collect::<Result<_>>()
+ .context("location tasks")?;
+ workspace.update(&mut cx, |workspace, cx| {
Self::open_locations_in_multibuffer(
workspace, locations, replica_id, title, split, cx,
)
});
- });
+
+ anyhow::Ok(())
+ })
+ .detach_and_log_err(cx);
}
}
+ fn compute_target_location(
+ &self,
+ lsp_location: lsp::Location,
+ server_id: LanguageServerId,
+ cx: &mut ViewContext<Editor>,
+ ) -> Task<anyhow::Result<Option<Location>>> {
+ let Some(project) = self.project.clone() else {
+ return Task::Ready(Some(Ok(None)));
+ };
+
+ cx.spawn(move |editor, mut cx| async move {
+ let location_task = editor.update(&mut cx, |editor, cx| {
+ project.update(cx, |project, cx| {
+ let language_server_name =
+ editor.buffer.read(cx).as_singleton().and_then(|buffer| {
+ project
+ .language_server_for_buffer(buffer.read(cx), server_id, cx)
+ .map(|(_, lsp_adapter)| {
+ LanguageServerName(Arc::from(lsp_adapter.name()))
+ })
+ });
+ language_server_name.map(|language_server_name| {
+ project.open_local_buffer_via_lsp(
+ lsp_location.uri.clone(),
+ server_id,
+ language_server_name,
+ cx,
+ )
+ })
+ })
+ })?;
+ let location = match location_task {
+ Some(task) => Some({
+ let target_buffer_handle = task.await.context("open local buffer")?;
+ let range = {
+ target_buffer_handle.update(&mut cx, |target_buffer, _| {
+ let target_start = target_buffer.clip_point_utf16(
+ point_from_lsp(lsp_location.range.start),
+ Bias::Left,
+ );
+ let target_end = target_buffer.clip_point_utf16(
+ point_from_lsp(lsp_location.range.end),
+ Bias::Left,
+ );
+ target_buffer.anchor_after(target_start)
+ ..target_buffer.anchor_before(target_end)
+ })
+ };
+ Location {
+ buffer: target_buffer_handle,
+ range,
+ }
+ }),
+ None => None,
+ };
+ Ok(location)
+ })
+ }
+
pub fn find_all_references(
workspace: &mut Workspace,
_: &FindAllReferences,
@@ -6742,10 +6915,18 @@ impl Editor {
let rename_range = if let Some(range) = prepare_rename.await? {
Some(range)
} else {
- this.read_with(&cx, |this, cx| {
+ this.update(&mut cx, |this, cx| {
let buffer = this.buffer.read(cx).snapshot(cx);
+ let display_snapshot = this
+ .display_map
+ .update(cx, |display_map, cx| display_map.snapshot(cx));
let mut buffer_highlights = this
- .document_highlights_for_position(selection.head(), &buffer)
+ .document_highlights_for_position(
+ selection.head(),
+ &buffer,
+ &display_snapshot,
+ )
+ .filter_map(|highlight| highlight.as_text_range())
.filter(|highlight| {
highlight.start.excerpt_id() == selection.head().excerpt_id()
&& highlight.end.excerpt_id() == selection.head().excerpt_id()
@@ -6800,11 +6981,15 @@ impl Editor {
let ranges = this
.clear_background_highlights::<DocumentHighlightWrite>(cx)
.into_iter()
- .flat_map(|(_, ranges)| ranges)
+ .flat_map(|(_, ranges)| {
+ ranges.into_iter().filter_map(|range| range.as_text_range())
+ })
.chain(
this.clear_background_highlights::<DocumentHighlightRead>(cx)
.into_iter()
- .flat_map(|(_, ranges)| ranges),
+ .flat_map(|(_, ranges)| {
+ ranges.into_iter().filter_map(|range| range.as_text_range())
+ }),
)
.collect();
@@ -7161,7 +7346,7 @@ impl Editor {
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
- let selections = self.selections.all::<Point>(cx);
+ let selections = self.selections.all_adjusted(cx);
for selection in selections {
let range = selection.range().sorted();
let buffer_start_row = range.start.row;
@@ -7237,7 +7422,17 @@ impl Editor {
pub fn fold_selected_ranges(&mut self, _: &FoldSelectedRanges, cx: &mut ViewContext<Self>) {
let selections = self.selections.all::<Point>(cx);
- let ranges = selections.into_iter().map(|s| s.start..s.end);
+ let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
+ let line_mode = self.selections.line_mode;
+ let ranges = selections.into_iter().map(|s| {
+ if line_mode {
+ let start = Point::new(s.start.row, 0);
+ let end = Point::new(s.end.row, display_map.buffer_snapshot.line_len(s.end.row));
+ start..end
+ } else {
+ s.start..s.end
+ }
+ });
self.fold_ranges(ranges, true, cx);
}
@@ -7472,16 +7667,36 @@ impl Editor {
color_fetcher: fn(&Theme) -> Color,
cx: &mut ViewContext<Self>,
) {
- self.background_highlights
- .insert(TypeId::of::<T>(), (color_fetcher, ranges));
+ self.background_highlights.insert(
+ TypeId::of::<T>(),
+ (
+ color_fetcher,
+ ranges.into_iter().map(DocumentRange::Text).collect(),
+ ),
+ );
+ cx.notify();
+ }
+
+ pub fn highlight_inlay_background<T: 'static>(
+ &mut self,
+ ranges: Vec<InlayRange>,
+ color_fetcher: fn(&Theme) -> Color,
+ cx: &mut ViewContext<Self>,
+ ) {
+ self.background_highlights.insert(
+ TypeId::of::<T>(),
+ (
+ color_fetcher,
+ ranges.into_iter().map(DocumentRange::Inlay).collect(),
+ ),
+ );
cx.notify();
}
- #[allow(clippy::type_complexity)]
pub fn clear_background_highlights<T: 'static>(
&mut self,
cx: &mut ViewContext<Self>,
- ) -> Option<(fn(&Theme) -> Color, Vec<Range<Anchor>>)> {
+ ) -> Option<BackgroundHighlight> {
let highlights = self.background_highlights.remove(&TypeId::of::<T>());
if highlights.is_some() {
cx.notify();
@@ -7506,7 +7721,8 @@ impl Editor {
&'a self,
position: Anchor,
buffer: &'a MultiBufferSnapshot,
- ) -> impl 'a + Iterator<Item = &Range<Anchor>> {
+ display_snapshot: &'a DisplaySnapshot,
+ ) -> impl 'a + Iterator<Item = &DocumentRange> {
let read_highlights = self
.background_highlights
.get(&TypeId::of::<DocumentHighlightRead>())
@@ -7515,14 +7731,16 @@ impl Editor {
.background_highlights
.get(&TypeId::of::<DocumentHighlightWrite>())
.map(|h| &h.1);
- let left_position = position.bias_left(buffer);
- let right_position = position.bias_right(buffer);
+ let left_position = display_snapshot.anchor_to_inlay_offset(position.bias_left(buffer));
+ let right_position = display_snapshot.anchor_to_inlay_offset(position.bias_right(buffer));
read_highlights
.into_iter()
.chain(write_highlights)
.flat_map(move |ranges| {
let start_ix = match ranges.binary_search_by(|probe| {
- let cmp = probe.end.cmp(&left_position, buffer);
+ let cmp = document_to_inlay_range(probe, display_snapshot)
+ .end
+ .cmp(&left_position);
if cmp.is_ge() {
Ordering::Greater
} else {
@@ -7533,9 +7751,12 @@ impl Editor {
};
let right_position = right_position.clone();
- ranges[start_ix..]
- .iter()
- .take_while(move |range| range.start.cmp(&right_position, buffer).is_le())
+ ranges[start_ix..].iter().take_while(move |range| {
+ document_to_inlay_range(range, display_snapshot)
+ .start
+ .cmp(&right_position)
+ .is_le()
+ })
})
}
@@ -7545,12 +7766,15 @@ impl Editor {
display_snapshot: &DisplaySnapshot,
theme: &Theme,
) -> Vec<(Range<DisplayPoint>, Color)> {
+ let search_range = display_snapshot.anchor_to_inlay_offset(search_range.start)
+ ..display_snapshot.anchor_to_inlay_offset(search_range.end);
let mut results = Vec::new();
- let buffer = &display_snapshot.buffer_snapshot;
for (color_fetcher, ranges) in self.background_highlights.values() {
let color = color_fetcher(theme);
let start_ix = match ranges.binary_search_by(|probe| {
- let cmp = probe.end.cmp(&search_range.start, buffer);
+ let cmp = document_to_inlay_range(probe, display_snapshot)
+ .end
+ .cmp(&search_range.start);
if cmp.is_gt() {
Ordering::Greater
} else {
@@ -7560,61 +7784,16 @@ impl Editor {
Ok(i) | Err(i) => i,
};
for range in &ranges[start_ix..] {
- if range.start.cmp(&search_range.end, buffer).is_ge() {
+ let range = document_to_inlay_range(range, display_snapshot);
+ if range.start.cmp(&search_range.end).is_ge() {
break;
}
- let start = range
- .start
- .to_point(buffer)
- .to_display_point(display_snapshot);
- let end = range
- .end
- .to_point(buffer)
- .to_display_point(display_snapshot);
- results.push((start..end, color))
- }
- }
- results
- }
- pub fn background_highlights_in_range_for<T: 'static>(
- &self,
- search_range: Range<Anchor>,
- display_snapshot: &DisplaySnapshot,
- theme: &Theme,
- ) -> Vec<(Range<DisplayPoint>, Color)> {
- let mut results = Vec::new();
- let buffer = &display_snapshot.buffer_snapshot;
- let Some((color_fetcher, ranges)) = self.background_highlights
- .get(&TypeId::of::<T>()) else {
- return vec![];
- };
- let color = color_fetcher(theme);
- let start_ix = match ranges.binary_search_by(|probe| {
- let cmp = probe.end.cmp(&search_range.start, buffer);
- if cmp.is_gt() {
- Ordering::Greater
- } else {
- Ordering::Less
- }
- }) {
- Ok(i) | Err(i) => i,
- };
- for range in &ranges[start_ix..] {
- if range.start.cmp(&search_range.end, buffer).is_ge() {
- break;
+ let start = display_snapshot.inlay_offset_to_display_point(range.start, Bias::Left);
+ let end = display_snapshot.inlay_offset_to_display_point(range.end, Bias::Right);
+ results.push((start..end, color))
}
- let start = range
- .start
- .to_point(buffer)
- .to_display_point(display_snapshot);
- let end = range
- .end
- .to_point(buffer)
- .to_display_point(display_snapshot);
- results.push((start..end, color))
}
-
results
}
@@ -7624,15 +7803,17 @@ impl Editor {
display_snapshot: &DisplaySnapshot,
count: usize,
) -> Vec<RangeInclusive<DisplayPoint>> {
+ let search_range = display_snapshot.anchor_to_inlay_offset(search_range.start)
+ ..display_snapshot.anchor_to_inlay_offset(search_range.end);
let mut results = Vec::new();
- let buffer = &display_snapshot.buffer_snapshot;
- let Some((_, ranges)) = self.background_highlights
- .get(&TypeId::of::<T>()) else {
- return vec![];
- };
+ let Some((_, ranges)) = self.background_highlights.get(&TypeId::of::<T>()) else {
+ return vec![];
+ };
let start_ix = match ranges.binary_search_by(|probe| {
- let cmp = probe.end.cmp(&search_range.start, buffer);
+ let cmp = document_to_inlay_range(probe, display_snapshot)
+ .end
+ .cmp(&search_range.start);
if cmp.is_gt() {
Ordering::Greater
} else {
@@ -7652,19 +7833,24 @@ impl Editor {
let mut start_row: Option<Point> = None;
let mut end_row: Option<Point> = None;
if ranges.len() > count {
- return vec![];
+ return Vec::new();
}
for range in &ranges[start_ix..] {
- if range.start.cmp(&search_range.end, buffer).is_ge() {
+ let range = document_to_inlay_range(range, display_snapshot);
+ if range.start.cmp(&search_range.end).is_ge() {
break;
}
- let end = range.end.to_point(buffer);
+ let end = display_snapshot
+ .inlay_offset_to_display_point(range.end, Bias::Right)
+ .to_point(display_snapshot);
if let Some(current_row) = &end_row {
if end.row == current_row.row {
continue;
}
}
- let start = range.start.to_point(buffer);
+ let start = display_snapshot
+ .inlay_offset_to_display_point(range.start, Bias::Left)
+ .to_point(display_snapshot);
if start_row.is_none() {
assert_eq!(end_row, None);
@@ -7702,24 +7888,32 @@ impl Editor {
cx.notify();
}
+ pub fn highlight_inlays<T: 'static>(
+ &mut self,
+ ranges: Vec<InlayRange>,
+ style: HighlightStyle,
+ cx: &mut ViewContext<Self>,
+ ) {
+ self.display_map.update(cx, |map, _| {
+ map.highlight_inlays(TypeId::of::<T>(), ranges, style)
+ });
+ cx.notify();
+ }
+
pub fn text_highlights<'a, T: 'static>(
&'a self,
cx: &'a AppContext,
- ) -> Option<(HighlightStyle, &'a [Range<Anchor>])> {
+ ) -> Option<(HighlightStyle, &'a [DocumentRange])> {
self.display_map.read(cx).text_highlights(TypeId::of::<T>())
}
- pub fn clear_text_highlights<T: 'static>(
- &mut self,
- cx: &mut ViewContext<Self>,
- ) -> Option<Arc<(HighlightStyle, Vec<Range<Anchor>>)>> {
- let highlights = self
+ pub fn clear_text_highlights<T: 'static>(&mut self, cx: &mut ViewContext<Self>) {
+ let text_highlights = self
.display_map
.update(cx, |map, _| map.clear_text_highlights(TypeId::of::<T>()));
- if highlights.is_some() {
+ if text_highlights.is_some() {
cx.notify();
}
- highlights
}
pub fn show_local_cursors(&self, cx: &AppContext) -> bool {
@@ -7737,7 +7931,9 @@ impl Editor {
cx: &mut ViewContext<Self>,
) {
match event {
- multi_buffer::Event::Edited => {
+ multi_buffer::Event::Edited {
+ sigleton_buffer_edited,
+ } => {
self.refresh_active_diagnostics(cx);
self.refresh_code_actions(cx);
if self.has_active_copilot_suggestion(cx) {
@@ -7745,30 +7941,32 @@ impl Editor {
}
cx.emit(Event::BufferEdited);
- if let Some(project) = &self.project {
- let project = project.read(cx);
- let languages_affected = multibuffer
- .read(cx)
- .all_buffers()
- .into_iter()
- .filter_map(|buffer| {
- let buffer = buffer.read(cx);
- let language = buffer.language()?;
- if project.is_local()
- && project.language_servers_for_buffer(buffer, cx).count() == 0
- {
- None
- } else {
- Some(language)
- }
- })
- .cloned()
- .collect::<HashSet<_>>();
- if !languages_affected.is_empty() {
- self.refresh_inlay_hints(
- InlayHintRefreshReason::BufferEdited(languages_affected),
- cx,
- );
+ if *sigleton_buffer_edited {
+ if let Some(project) = &self.project {
+ let project = project.read(cx);
+ let languages_affected = multibuffer
+ .read(cx)
+ .all_buffers()
+ .into_iter()
+ .filter_map(|buffer| {
+ let buffer = buffer.read(cx);
+ let language = buffer.language()?;
+ if project.is_local()
+ && project.language_servers_for_buffer(buffer, cx).count() == 0
+ {
+ None
+ } else {
+ Some(language)
+ }
+ })
+ .cloned()
+ .collect::<HashSet<_>>();
+ if !languages_affected.is_empty() {
+ self.refresh_inlay_hints(
+ InlayHintRefreshReason::BufferEdited(languages_affected),
+ cx,
+ );
+ }
}
}
}
@@ -7776,12 +7974,16 @@ impl Editor {
buffer,
predecessor,
excerpts,
- } => cx.emit(Event::ExcerptsAdded {
- buffer: buffer.clone(),
- predecessor: *predecessor,
- excerpts: excerpts.clone(),
- }),
+ } => {
+ cx.emit(Event::ExcerptsAdded {
+ buffer: buffer.clone(),
+ predecessor: *predecessor,
+ excerpts: excerpts.clone(),
+ });
+ self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx);
+ }
multi_buffer::Event::ExcerptsRemoved { ids } => {
+ self.refresh_inlay_hints(InlayHintRefreshReason::ExcerptsRemoved(ids.clone()), cx);
cx.emit(Event::ExcerptsRemoved { ids: ids.clone() })
}
multi_buffer::Event::Reparsed => cx.emit(Event::Reparsed),
@@ -7926,6 +8128,7 @@ impl Editor {
Some(
ranges
.iter()
+ .filter_map(|range| range.as_text_range())
.map(move |range| {
range.start.to_offset_utf16(&snapshot)..range.end.to_offset_utf16(&snapshot)
})
@@ -7964,9 +8167,7 @@ impl Editor {
suggestion_accepted: bool,
cx: &AppContext,
) {
- let Some(project) = &self.project else {
- return
- };
+ let Some(project) = &self.project else { return };
// If None, we are either getting suggestions in a new, unsaved file, or in a file without an extension
let file_extension = self
@@ -7995,9 +8196,7 @@ impl Editor {
file_extension: Option<String>,
cx: &AppContext,
) {
- let Some(project) = &self.project else {
- return
- };
+ let Some(project) = &self.project else { return };
// If None, we are in a file without an extension
let file = self
@@ -9,6 +9,7 @@ pub struct EditorSettings {
pub show_completions_on_input: bool,
pub use_on_type_format: bool,
pub scrollbar: Scrollbar,
+ pub relative_line_numbers: bool,
}
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
@@ -34,6 +35,7 @@ pub struct EditorSettingsContent {
pub show_completions_on_input: Option<bool>,
pub use_on_type_format: Option<bool>,
pub scrollbar: Option<ScrollbarContent>,
+ pub relative_line_numbers: Option<bool>,
}
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
@@ -42,7 +42,7 @@ fn test_edit_events(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let buffer = cx.add_model(|cx| {
- let mut buffer = language::Buffer::new(0, "123456", cx);
+ let mut buffer = language::Buffer::new(0, cx.model_id() as u64, "123456");
buffer.set_group_interval(Duration::from_secs(1));
buffer
});
@@ -174,7 +174,7 @@ fn test_undo_redo_with_selection_restoration(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let mut now = Instant::now();
- let buffer = cx.add_model(|cx| language::Buffer::new(0, "123456", cx));
+ let buffer = cx.add_model(|cx| language::Buffer::new(0, cx.model_id() as u64, "123456"));
let group_interval = buffer.read_with(cx, |buffer, _| buffer.transaction_group_interval());
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let editor = cx
@@ -247,7 +247,7 @@ fn test_ime_composition(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let buffer = cx.add_model(|cx| {
- let mut buffer = language::Buffer::new(0, "abcde", cx);
+ let mut buffer = language::Buffer::new(0, cx.model_id() as u64, "abcde");
// Ensure automatic grouping doesn't occur.
buffer.set_group_interval(Duration::ZERO);
buffer
@@ -1434,6 +1434,74 @@ async fn test_scroll_page_up_page_down(cx: &mut gpui::TestAppContext) {
});
}
+#[gpui::test]
+async fn test_autoscroll(cx: &mut gpui::TestAppContext) {
+ init_test(cx, |_| {});
+ let mut cx = EditorTestContext::new(cx).await;
+
+ let line_height = cx.update_editor(|editor, cx| {
+ editor.set_vertical_scroll_margin(2, cx);
+ editor.style(cx).text.line_height(cx.font_cache())
+ });
+
+ let window = cx.window;
+ window.simulate_resize(vec2f(1000., 6.0 * line_height), &mut cx);
+
+ cx.set_state(
+ &r#"ˇone
+ two
+ three
+ four
+ five
+ six
+ seven
+ eight
+ nine
+ ten
+ "#,
+ );
+ cx.update_editor(|editor, cx| {
+ assert_eq!(editor.snapshot(cx).scroll_position(), vec2f(0., 0.0));
+ });
+
+ // Add a cursor below the visible area. Since both cursors cannot fit
+ // on screen, the editor autoscrolls to reveal the newest cursor, and
+ // allows the vertical scroll margin below that cursor.
+ cx.update_editor(|editor, cx| {
+ editor.change_selections(Some(Autoscroll::fit()), cx, |selections| {
+ selections.select_ranges([
+ Point::new(0, 0)..Point::new(0, 0),
+ Point::new(6, 0)..Point::new(6, 0),
+ ]);
+ })
+ });
+ cx.update_editor(|editor, cx| {
+ assert_eq!(editor.snapshot(cx).scroll_position(), vec2f(0., 3.0));
+ });
+
+ // Move down. The editor cursor scrolls down to track the newest cursor.
+ cx.update_editor(|editor, cx| {
+ editor.move_down(&Default::default(), cx);
+ });
+ cx.update_editor(|editor, cx| {
+ assert_eq!(editor.snapshot(cx).scroll_position(), vec2f(0., 4.0));
+ });
+
+ // Add a cursor above the visible area. Since both cursors fit on screen,
+ // the editor scrolls to show both.
+ cx.update_editor(|editor, cx| {
+ editor.change_selections(Some(Autoscroll::fit()), cx, |selections| {
+ selections.select_ranges([
+ Point::new(1, 0)..Point::new(1, 0),
+ Point::new(6, 0)..Point::new(6, 0),
+ ]);
+ })
+ });
+ cx.update_editor(|editor, cx| {
+ assert_eq!(editor.snapshot(cx).scroll_position(), vec2f(0., 1.0));
+ });
+}
+
#[gpui::test]
async fn test_move_page_up_page_down(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
@@ -2213,10 +2281,12 @@ fn test_indent_outdent_with_excerpts(cx: &mut TestAppContext) {
None,
));
- let toml_buffer =
- cx.add_model(|cx| Buffer::new(0, "a = 1\nb = 2\n", cx).with_language(toml_language, cx));
+ let toml_buffer = cx.add_model(|cx| {
+ Buffer::new(0, cx.model_id() as u64, "a = 1\nb = 2\n").with_language(toml_language, cx)
+ });
let rust_buffer = cx.add_model(|cx| {
- Buffer::new(0, "const c: usize = 3;\n", cx).with_language(rust_language, cx)
+ Buffer::new(0, cx.model_id() as u64, "const c: usize = 3;\n")
+ .with_language(rust_language, cx)
});
let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0);
@@ -3686,7 +3756,8 @@ async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) {
"#
.unindent();
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
+ let buffer =
+ cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let view = cx.add_window(|cx| build_editor(buffer, cx)).root(cx);
view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
@@ -3849,7 +3920,8 @@ async fn test_autoindent_selections(cx: &mut gpui::TestAppContext) {
let text = "fn a() {}";
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
+ let buffer =
+ cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let editor = cx.add_window(|cx| build_editor(buffer, cx)).root(cx);
editor
@@ -4412,7 +4484,8 @@ async fn test_surround_with_pair(cx: &mut gpui::TestAppContext) {
"#
.unindent();
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
+ let buffer =
+ cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let view = cx.add_window(|cx| build_editor(buffer, cx)).root(cx);
view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
@@ -4560,7 +4633,8 @@ async fn test_delete_autoclose_pair(cx: &mut gpui::TestAppContext) {
"#
.unindent();
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
+ let buffer =
+ cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let editor = cx.add_window(|cx| build_editor(buffer, cx)).root(cx);
editor
@@ -5766,7 +5840,7 @@ async fn test_toggle_block_comment(cx: &mut gpui::TestAppContext) {
fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) {
init_test(cx, |_| {});
- let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(3, 4, 'a')));
let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0);
multibuffer.push_excerpts(
@@ -5850,7 +5924,7 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) {
primary: None,
}
});
- let buffer = cx.add_model(|cx| Buffer::new(0, initial_text, cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, initial_text));
let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0);
multibuffer.push_excerpts(buffer, excerpt_ranges, cx);
@@ -5908,7 +5982,7 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) {
fn test_refresh_selections(cx: &mut TestAppContext) {
init_test(cx, |_| {});
- let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(3, 4, 'a')));
let mut excerpt1_id = None;
let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0);
@@ -5995,7 +6069,7 @@ fn test_refresh_selections(cx: &mut TestAppContext) {
fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) {
init_test(cx, |_| {});
- let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(3, 4, 'a')));
let mut excerpt1_id = None;
let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0);
@@ -6092,7 +6166,8 @@ async fn test_extra_newline_insertion(cx: &mut gpui::TestAppContext) {
"{{} }\n", //
);
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
+ let buffer =
+ cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let view = cx.add_window(|cx| build_editor(buffer, cx)).root(cx);
view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
@@ -6384,7 +6459,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) {
.update(|cx| {
Editor::from_state_proto(
pane.clone(),
- project.clone(),
+ workspace.clone(),
ViewId {
creator: Default::default(),
id: 0,
@@ -6479,7 +6554,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) {
.update(|cx| {
Editor::from_state_proto(
pane.clone(),
- project.clone(),
+ workspace.clone(),
ViewId {
creator: Default::default(),
id: 0,
@@ -7092,8 +7167,8 @@ async fn test_copilot_multibuffer(
let (copilot, copilot_lsp) = Copilot::fake(cx);
cx.update(|cx| cx.set_global(copilot));
- let buffer_1 = cx.add_model(|cx| Buffer::new(0, "a = 1\nb = 2\n", cx));
- let buffer_2 = cx.add_model(|cx| Buffer::new(0, "c = 3\nd = 4\n", cx));
+ let buffer_1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "a = 1\nb = 2\n"));
+ let buffer_2 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "c = 3\nd = 4\n"));
let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0);
multibuffer.push_excerpts(
@@ -13,6 +13,7 @@ use crate::{
},
link_go_to_definition::{
go_to_fetched_definition, go_to_fetched_type_definition, update_go_to_definition_link,
+ update_inlay_link_and_hover_points, GoToDefinitionTrigger,
},
mouse_context_menu, EditorSettings, EditorStyle, GutterHover, UnfoldAt,
};
@@ -62,6 +63,7 @@ struct SelectionLayout {
head: DisplayPoint,
cursor_shape: CursorShape,
is_newest: bool,
+ is_local: bool,
range: Range<DisplayPoint>,
active_rows: Range<u32>,
}
@@ -73,6 +75,7 @@ impl SelectionLayout {
cursor_shape: CursorShape,
map: &DisplaySnapshot,
is_newest: bool,
+ is_local: bool,
) -> Self {
let point_selection = selection.map(|p| p.to_point(&map.buffer_snapshot));
let display_selection = point_selection.map(|p| p.to_display_point(map));
@@ -109,6 +112,7 @@ impl SelectionLayout {
head,
cursor_shape,
is_newest,
+ is_local,
range,
active_rows,
}
@@ -284,13 +288,13 @@ impl EditorElement {
return false;
}
- let (position, target_position) = position_map.point_for_position(text_bounds, position);
-
+ let point_for_position = position_map.point_for_position(text_bounds, position);
+ let position = point_for_position.previous_valid;
if shift && alt {
editor.select(
SelectPhase::BeginColumnar {
position,
- goal_column: target_position.column(),
+ goal_column: point_for_position.exact_unclipped.column(),
},
cx,
);
@@ -326,9 +330,13 @@ impl EditorElement {
if !text_bounds.contains_point(position) {
return false;
}
-
- let (point, _) = position_map.point_for_position(text_bounds, position);
- mouse_context_menu::deploy_context_menu(editor, position, point, cx);
+ let point_for_position = position_map.point_for_position(text_bounds, position);
+ mouse_context_menu::deploy_context_menu(
+ editor,
+ position,
+ point_for_position.previous_valid,
+ cx,
+ );
true
}
@@ -350,17 +358,15 @@ impl EditorElement {
}
if !pending_nonempty_selections && cmd && text_bounds.contains_point(position) {
- let (point, target_point) = position_map.point_for_position(text_bounds, position);
-
- if point == target_point {
- if shift {
- go_to_fetched_type_definition(editor, point, alt, cx);
- } else {
- go_to_fetched_definition(editor, point, alt, cx);
- }
-
- return true;
+ let point = position_map.point_for_position(text_bounds, position);
+ let could_be_inlay = point.as_valid().is_none();
+ if shift || could_be_inlay {
+ go_to_fetched_type_definition(editor, point, alt, cx);
+ } else {
+ go_to_fetched_definition(editor, point, alt, cx);
}
+
+ return true;
}
end_selection
@@ -380,17 +386,20 @@ impl EditorElement {
// This will be handled more correctly once https://github.com/zed-industries/zed/issues/1218 is completed
// Don't trigger hover popover if mouse is hovering over context menu
let point = if text_bounds.contains_point(position) {
- let (point, target_point) = position_map.point_for_position(text_bounds, position);
- if point == target_point {
- Some(point)
- } else {
- None
- }
+ position_map
+ .point_for_position(text_bounds, position)
+ .as_valid()
} else {
None
};
- update_go_to_definition_link(editor, point, cmd, shift, cx);
+ update_go_to_definition_link(
+ editor,
+ point.map(GoToDefinitionTrigger::Text),
+ cmd,
+ shift,
+ cx,
+ );
if editor.has_pending_selection() {
let mut scroll_delta = Vector2F::zero();
@@ -419,13 +428,12 @@ impl EditorElement {
))
}
- let (position, target_position) =
- position_map.point_for_position(text_bounds, position);
+ let point_for_position = position_map.point_for_position(text_bounds, position);
editor.select(
SelectPhase::Update {
- position,
- goal_column: target_position.column(),
+ position: point_for_position.previous_valid,
+ goal_column: point_for_position.exact_unclipped.column(),
scroll_position: (position_map.snapshot.scroll_position() + scroll_delta)
.clamp(Vector2F::zero(), position_map.scroll_max),
},
@@ -452,10 +460,34 @@ impl EditorElement {
) -> bool {
// This will be handled more correctly once https://github.com/zed-industries/zed/issues/1218 is completed
// Don't trigger hover popover if mouse is hovering over context menu
- let point = position_to_display_point(position, text_bounds, position_map);
-
- update_go_to_definition_link(editor, point, cmd, shift, cx);
- hover_at(editor, point, cx);
+ if text_bounds.contains_point(position) {
+ let point_for_position = position_map.point_for_position(text_bounds, position);
+ match point_for_position.as_valid() {
+ Some(point) => {
+ update_go_to_definition_link(
+ editor,
+ Some(GoToDefinitionTrigger::Text(point)),
+ cmd,
+ shift,
+ cx,
+ );
+ hover_at(editor, Some(point), cx);
+ }
+ None => {
+ update_inlay_link_and_hover_points(
+ &position_map.snapshot,
+ point_for_position,
+ editor,
+ cmd,
+ shift,
+ cx,
+ );
+ }
+ }
+ } else {
+ update_go_to_definition_link(editor, None, cmd, shift, cx);
+ hover_at(editor, None, cx);
+ }
true
}
@@ -763,7 +795,6 @@ impl EditorElement {
cx: &mut PaintContext<Editor>,
) {
let style = &self.style;
- let local_replica_id = editor.replica_id(cx);
let scroll_position = layout.position_map.snapshot.scroll_position();
let start_row = layout.visible_display_row_range.start;
let scroll_top = scroll_position.y() * layout.position_map.line_height;
@@ -852,15 +883,13 @@ impl EditorElement {
for (replica_id, selections) in &layout.selections {
let replica_id = *replica_id;
- let selection_style = style.replica_selection_style(replica_id);
+ let selection_style = if let Some(replica_id) = replica_id {
+ style.replica_selection_style(replica_id)
+ } else {
+ &style.absent_selection
+ };
for selection in selections {
- if !selection.range.is_empty()
- && (replica_id == local_replica_id
- || Some(replica_id) == editor.leader_replica_id)
- {
- invisible_display_ranges.push(selection.range.clone());
- }
self.paint_highlighted_range(
scene,
selection.range.clone(),
@@ -874,7 +903,10 @@ impl EditorElement {
bounds,
);
- if editor.show_local_cursors(cx) || replica_id != local_replica_id {
+ if selection.is_local && !selection.range.is_empty() {
+ invisible_display_ranges.push(selection.range.clone());
+ }
+ if !selection.is_local || editor.show_local_cursors(cx) {
let cursor_position = selection.head;
if layout
.visible_display_row_range
@@ -906,7 +938,7 @@ impl EditorElement {
&text,
cursor_row_layout.font_size(),
&[(
- text.len(),
+ text.chars().count(),
RunStyle {
font_id,
color: style.background,
@@ -1405,10 +1437,61 @@ impl EditorElement {
.collect()
}
+ fn calculate_relative_line_numbers(
+ &self,
+ snapshot: &EditorSnapshot,
+ rows: &Range<u32>,
+ relative_to: Option<u32>,
+ ) -> HashMap<u32, u32> {
+ let mut relative_rows: HashMap<u32, u32> = Default::default();
+ let Some(relative_to) = relative_to else {
+ return relative_rows;
+ };
+
+ let start = rows.start.min(relative_to);
+ let end = rows.end.max(relative_to);
+
+ let buffer_rows = snapshot
+ .buffer_rows(start)
+ .take(1 + (end - start) as usize)
+ .collect::<Vec<_>>();
+
+ let head_idx = relative_to - start;
+ let mut delta = 1;
+ let mut i = head_idx + 1;
+ while i < buffer_rows.len() as u32 {
+ if buffer_rows[i as usize].is_some() {
+ if rows.contains(&(i + start)) {
+ relative_rows.insert(i + start, delta);
+ }
+ delta += 1;
+ }
+ i += 1;
+ }
+ delta = 1;
+ i = head_idx.min(buffer_rows.len() as u32 - 1);
+ while i > 0 && buffer_rows[i as usize].is_none() {
+ i -= 1;
+ }
+
+ while i > 0 {
+ i -= 1;
+ if buffer_rows[i as usize].is_some() {
+ if rows.contains(&(i + start)) {
+ relative_rows.insert(i + start, delta);
+ }
+ delta += 1;
+ }
+ }
+
+ relative_rows
+ }
+
fn layout_line_numbers(
&self,
rows: Range<u32>,
active_rows: &BTreeMap<u32, bool>,
+ newest_selection_head: DisplayPoint,
is_singleton: bool,
snapshot: &EditorSnapshot,
cx: &ViewContext<Editor>,
@@ -1421,6 +1504,15 @@ impl EditorElement {
let mut line_number_layouts = Vec::with_capacity(rows.len());
let mut fold_statuses = Vec::with_capacity(rows.len());
let mut line_number = String::new();
+ let is_relative = settings::get::<EditorSettings>(cx).relative_line_numbers;
+ let relative_to = if is_relative {
+ Some(newest_selection_head.row())
+ } else {
+ None
+ };
+
+ let relative_rows = self.calculate_relative_line_numbers(&snapshot, &rows, relative_to);
+
for (ix, row) in snapshot
.buffer_rows(rows.start)
.take((rows.end - rows.start) as usize)
@@ -1435,7 +1527,11 @@ impl EditorElement {
if let Some(buffer_row) = row {
if include_line_numbers {
line_number.clear();
- write!(&mut line_number, "{}", buffer_row + 1).unwrap();
+ let default_number = buffer_row + 1;
+ let number = relative_rows
+ .get(&(ix as u32 + rows.start))
+ .unwrap_or(&default_number);
+ write!(&mut line_number, "{}", number).unwrap();
line_number_layouts.push(Some(cx.text_layout_cache().layout_str(
&line_number,
style.text.font_size,
@@ -2079,14 +2175,11 @@ impl Element<Editor> for EditorElement {
scroll_height
.min(constraint.max_along(Axis::Vertical))
.max(constraint.min_along(Axis::Vertical))
+ .max(line_height)
.min(line_height * max_lines as f32),
)
} else if let EditorMode::SingleLine = snapshot.mode {
- size.set_y(
- line_height
- .min(constraint.max_along(Axis::Vertical))
- .max(constraint.min_along(Axis::Vertical)),
- )
+ size.set_y(line_height.max(constraint.min_along(Axis::Vertical)))
} else if size.y().is_infinite() {
size.set_y(scroll_height);
}
@@ -2124,7 +2217,7 @@ impl Element<Editor> for EditorElement {
.anchor_before(DisplayPoint::new(end_row, 0).to_offset(&snapshot, Bias::Right))
};
- let mut selections: Vec<(ReplicaId, Vec<SelectionLayout>)> = Vec::new();
+ let mut selections: Vec<(Option<ReplicaId>, Vec<SelectionLayout>)> = Vec::new();
let mut active_rows = BTreeMap::new();
let mut fold_ranges = Vec::new();
let is_singleton = editor.is_singleton(cx);
@@ -2155,8 +2248,14 @@ impl Element<Editor> for EditorElement {
.buffer_snapshot
.remote_selections_in_range(&(start_anchor..end_anchor))
{
+ let replica_id = if let Some(mapping) = &editor.replica_id_mapping {
+ mapping.get(&replica_id).copied()
+ } else {
+ None
+ };
+
// The local selections match the leader's selections.
- if Some(replica_id) == editor.leader_replica_id {
+ if replica_id.is_some() && replica_id == editor.leader_replica_id {
continue;
}
remote_selections
@@ -2168,6 +2267,7 @@ impl Element<Editor> for EditorElement {
cursor_shape,
&snapshot.display_snapshot,
false,
+ false,
));
}
selections.extend(remote_selections);
@@ -2191,6 +2291,7 @@ impl Element<Editor> for EditorElement {
editor.cursor_shape,
&snapshot.display_snapshot,
is_newest,
+ true,
);
if is_newest {
newest_selection_head = Some(layout.head);
@@ -2206,11 +2307,18 @@ impl Element<Editor> for EditorElement {
}
// Render the local selections in the leader's color when following.
- let local_replica_id = editor
- .leader_replica_id
- .unwrap_or_else(|| editor.replica_id(cx));
+ let local_replica_id = if let Some(leader_replica_id) = editor.leader_replica_id {
+ leader_replica_id
+ } else {
+ let replica_id = editor.replica_id(cx);
+ if let Some(mapping) = &editor.replica_id_mapping {
+ mapping.get(&replica_id).copied().unwrap_or(replica_id)
+ } else {
+ replica_id
+ }
+ };
- selections.push((local_replica_id, layouts));
+ selections.push((Some(local_replica_id), layouts));
}
let scrollbar_settings = &settings::get::<EditorSettings>(cx).scrollbar;
@@ -2244,9 +2352,23 @@ impl Element<Editor> for EditorElement {
})
.collect();
+ let head_for_relative = newest_selection_head.unwrap_or_else(|| {
+ let newest = editor.selections.newest::<Point>(cx);
+ SelectionLayout::new(
+ newest,
+ editor.selections.line_mode,
+ editor.cursor_shape,
+ &snapshot.display_snapshot,
+ true,
+ true,
+ )
+ .head
+ });
+
let (line_number_layouts, fold_statuses) = self.layout_line_numbers(
start_row..end_row,
&active_rows,
+ head_for_relative,
is_singleton,
&snapshot,
cx,
@@ -2591,7 +2713,7 @@ pub struct LayoutState {
blocks: Vec<BlockLayout>,
highlighted_ranges: Vec<(Range<DisplayPoint>, Color)>,
fold_ranges: Vec<(BufferRow, Range<DisplayPoint>, Color)>,
- selections: Vec<(ReplicaId, Vec<SelectionLayout>)>,
+ selections: Vec<(Option<ReplicaId>, Vec<SelectionLayout>)>,
scrollbar_row_range: Range<f32>,
show_scrollbars: bool,
is_singleton: bool,
@@ -2614,22 +2736,42 @@ struct PositionMap {
snapshot: EditorSnapshot,
}
+#[derive(Debug, Copy, Clone)]
+pub struct PointForPosition {
+ pub previous_valid: DisplayPoint,
+ pub next_valid: DisplayPoint,
+ pub exact_unclipped: DisplayPoint,
+ pub column_overshoot_after_line_end: u32,
+}
+
+impl PointForPosition {
+ #[cfg(test)]
+ pub fn valid(valid: DisplayPoint) -> Self {
+ Self {
+ previous_valid: valid,
+ next_valid: valid,
+ exact_unclipped: valid,
+ column_overshoot_after_line_end: 0,
+ }
+ }
+
+ pub fn as_valid(&self) -> Option<DisplayPoint> {
+ if self.previous_valid == self.exact_unclipped && self.next_valid == self.exact_unclipped {
+ Some(self.previous_valid)
+ } else {
+ None
+ }
+ }
+}
+
impl PositionMap {
- /// Returns two display points:
- /// 1. The nearest *valid* position in the editor
- /// 2. An unclipped, potentially *invalid* position that maps directly to
- /// the given pixel position.
- fn point_for_position(
- &self,
- text_bounds: RectF,
- position: Vector2F,
- ) -> (DisplayPoint, DisplayPoint) {
+ fn point_for_position(&self, text_bounds: RectF, position: Vector2F) -> PointForPosition {
let scroll_position = self.snapshot.scroll_position();
let position = position - text_bounds.origin();
let y = position.y().max(0.0).min(self.size.y());
let x = position.x() + (scroll_position.x() * self.em_width);
let row = (y / self.line_height + scroll_position.y()) as u32;
- let (column, x_overshoot) = if let Some(line) = self
+ let (column, x_overshoot_after_line_end) = if let Some(line) = self
.line_layouts
.get(row as usize - scroll_position.y() as usize)
.map(|line_with_spaces| &line_with_spaces.line)
@@ -2643,11 +2785,18 @@ impl PositionMap {
(0, x)
};
- let mut target_point = DisplayPoint::new(row, column);
- let point = self.snapshot.clip_point(target_point, Bias::Left);
- *target_point.column_mut() += (x_overshoot / self.em_advance) as u32;
-
- (point, target_point)
+ let mut exact_unclipped = DisplayPoint::new(row, column);
+ let previous_valid = self.snapshot.clip_point(exact_unclipped, Bias::Left);
+ let next_valid = self.snapshot.clip_point(exact_unclipped, Bias::Right);
+
+ let column_overshoot_after_line_end = (x_overshoot_after_line_end / self.em_advance) as u32;
+ *exact_unclipped.column_mut() += column_overshoot_after_line_end;
+ PointForPosition {
+ previous_valid,
+ next_valid,
+ exact_unclipped,
+ column_overshoot_after_line_end,
+ }
}
}
@@ -2901,23 +3050,6 @@ impl HighlightedRange {
}
}
-fn position_to_display_point(
- position: Vector2F,
- text_bounds: RectF,
- position_map: &PositionMap,
-) -> Option<DisplayPoint> {
- if text_bounds.contains_point(position) {
- let (point, target_point) = position_map.point_for_position(text_bounds, position);
- if point == target_point {
- Some(point)
- } else {
- None
- }
- } else {
- None
- }
-}
-
fn range_to_bounds(
range: &Range<DisplayPoint>,
content_origin: Vector2F,
@@ -2995,7 +3127,6 @@ mod tests {
#[gpui::test]
fn test_layout_line_numbers(cx: &mut TestAppContext) {
init_test(cx, |_| {});
-
let editor = cx
.add_window(|cx| {
let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx);
@@ -3007,10 +3138,50 @@ mod tests {
let layouts = editor.update(cx, |editor, cx| {
let snapshot = editor.snapshot(cx);
element
- .layout_line_numbers(0..6, &Default::default(), false, &snapshot, cx)
+ .layout_line_numbers(
+ 0..6,
+ &Default::default(),
+ DisplayPoint::new(0, 0),
+ false,
+ &snapshot,
+ cx,
+ )
.0
});
assert_eq!(layouts.len(), 6);
+
+ let relative_rows = editor.update(cx, |editor, cx| {
+ let snapshot = editor.snapshot(cx);
+ element.calculate_relative_line_numbers(&snapshot, &(0..6), Some(3))
+ });
+ assert_eq!(relative_rows[&0], 3);
+ assert_eq!(relative_rows[&1], 2);
+ assert_eq!(relative_rows[&2], 1);
+ // current line has no relative number
+ assert_eq!(relative_rows[&4], 1);
+ assert_eq!(relative_rows[&5], 2);
+
+ // works if cursor is before screen
+ let relative_rows = editor.update(cx, |editor, cx| {
+ let snapshot = editor.snapshot(cx);
+
+ element.calculate_relative_line_numbers(&snapshot, &(3..6), Some(1))
+ });
+ assert_eq!(relative_rows.len(), 3);
+ assert_eq!(relative_rows[&3], 2);
+ assert_eq!(relative_rows[&4], 3);
+ assert_eq!(relative_rows[&5], 4);
+
+ // works if cursor is after screen
+ let relative_rows = editor.update(cx, |editor, cx| {
+ let snapshot = editor.snapshot(cx);
+
+ element.calculate_relative_line_numbers(&snapshot, &(0..3), Some(6))
+ });
+ assert_eq!(relative_rows.len(), 3);
+ assert_eq!(relative_rows[&0], 5);
+ assert_eq!(relative_rows[&1], 4);
+ assert_eq!(relative_rows[&2], 3);
}
#[gpui::test]
@@ -1,6 +1,8 @@
use crate::{
- display_map::ToDisplayPoint, Anchor, AnchorRangeExt, DisplayPoint, Editor, EditorSettings,
- EditorSnapshot, EditorStyle, RangeToAnchorExt,
+ display_map::{InlayOffset, ToDisplayPoint},
+ link_go_to_definition::{DocumentRange, InlayRange},
+ Anchor, AnchorRangeExt, DisplayPoint, Editor, EditorSettings, EditorSnapshot, EditorStyle,
+ ExcerptId, RangeToAnchorExt,
};
use futures::FutureExt;
use gpui::{
@@ -11,7 +13,7 @@ use gpui::{
AnyElement, AppContext, CursorRegion, Element, ModelHandle, MouseRegion, Task, ViewContext,
};
use language::{Bias, DiagnosticEntry, DiagnosticSeverity, Language, LanguageRegistry};
-use project::{HoverBlock, HoverBlockKind, Project};
+use project::{HoverBlock, HoverBlockKind, InlayHintLabelPart, Project};
use std::{ops::Range, sync::Arc, time::Duration};
use util::TryFutureExt;
@@ -46,6 +48,106 @@ pub fn hover_at(editor: &mut Editor, point: Option<DisplayPoint>, cx: &mut ViewC
}
}
+pub struct InlayHover {
+ pub excerpt: ExcerptId,
+ pub triggered_from: InlayOffset,
+ pub range: InlayRange,
+ pub tooltip: HoverBlock,
+}
+
+pub fn find_hovered_hint_part(
+ label_parts: Vec<InlayHintLabelPart>,
+ hint_range: Range<InlayOffset>,
+ hovered_offset: InlayOffset,
+) -> Option<(InlayHintLabelPart, Range<InlayOffset>)> {
+ if hovered_offset >= hint_range.start && hovered_offset <= hint_range.end {
+ let mut hovered_character = (hovered_offset - hint_range.start).0;
+ let mut part_start = hint_range.start;
+ for part in label_parts {
+ let part_len = part.value.chars().count();
+ if hovered_character > part_len {
+ hovered_character -= part_len;
+ part_start.0 += part_len;
+ } else {
+ let part_end = InlayOffset(part_start.0 + part_len);
+ return Some((part, part_start..part_end));
+ }
+ }
+ }
+ None
+}
+
+pub fn hover_at_inlay(editor: &mut Editor, inlay_hover: InlayHover, cx: &mut ViewContext<Editor>) {
+ if settings::get::<EditorSettings>(cx).hover_popover_enabled {
+ if editor.pending_rename.is_some() {
+ return;
+ }
+
+ let Some(project) = editor.project.clone() else {
+ return;
+ };
+
+ if let Some(InfoPopover { symbol_range, .. }) = &editor.hover_state.info_popover {
+ if let DocumentRange::Inlay(range) = symbol_range {
+ if (range.highlight_start..range.highlight_end)
+ .contains(&inlay_hover.triggered_from)
+ {
+ // Hover triggered from same location as last time. Don't show again.
+ return;
+ }
+ }
+ hide_hover(editor, cx);
+ }
+
+ let snapshot = editor.snapshot(cx);
+ // Don't request again if the location is the same as the previous request
+ if let Some(triggered_from) = editor.hover_state.triggered_from {
+ if inlay_hover.triggered_from
+ == snapshot
+ .display_snapshot
+ .anchor_to_inlay_offset(triggered_from)
+ {
+ return;
+ }
+ }
+
+ let task = cx.spawn(|this, mut cx| {
+ async move {
+ cx.background()
+ .timer(Duration::from_millis(HOVER_DELAY_MILLIS))
+ .await;
+ this.update(&mut cx, |this, _| {
+ this.hover_state.diagnostic_popover = None;
+ })?;
+
+ let hover_popover = InfoPopover {
+ project: project.clone(),
+ symbol_range: DocumentRange::Inlay(inlay_hover.range),
+ blocks: vec![inlay_hover.tooltip],
+ language: None,
+ rendered_content: None,
+ };
+
+ this.update(&mut cx, |this, cx| {
+ // Highlight the selected symbol using a background highlight
+ this.highlight_inlay_background::<HoverState>(
+ vec![inlay_hover.range],
+ |theme| theme.editor.hover_popover.highlight,
+ cx,
+ );
+ this.hover_state.info_popover = Some(hover_popover);
+ cx.notify();
+ })?;
+
+ anyhow::Ok(())
+ }
+ .log_err()
+ });
+
+ editor.hover_state.info_task = Some(task);
+ }
+}
+
/// Hides the type information popup.
/// Triggered by the `Hover` action when the cursor is not over a symbol or when the
/// selections changed.
@@ -110,8 +212,13 @@ fn show_hover(
if !ignore_timeout {
if let Some(InfoPopover { symbol_range, .. }) = &editor.hover_state.info_popover {
if symbol_range
- .to_offset(&snapshot.buffer_snapshot)
- .contains(&multibuffer_offset)
+ .as_text_range()
+ .map(|range| {
+ range
+ .to_offset(&snapshot.buffer_snapshot)
+ .contains(&multibuffer_offset)
+ })
+ .unwrap_or(false)
{
// Hover triggered from same location as last time. Don't show again.
return;
@@ -219,7 +326,7 @@ fn show_hover(
Some(InfoPopover {
project: project.clone(),
- symbol_range: range,
+ symbol_range: DocumentRange::Text(range),
blocks: hover_result.contents,
language: hover_result.language,
rendered_content: None,
@@ -227,10 +334,13 @@ fn show_hover(
});
this.update(&mut cx, |this, cx| {
- if let Some(hover_popover) = hover_popover.as_ref() {
+ if let Some(symbol_range) = hover_popover
+ .as_ref()
+ .and_then(|hover_popover| hover_popover.symbol_range.as_text_range())
+ {
// Highlight the selected symbol using a background highlight
this.highlight_background::<HoverState>(
- vec![hover_popover.symbol_range.clone()],
+ vec![symbol_range],
|theme| theme.editor.hover_popover.highlight,
cx,
);
@@ -497,7 +607,10 @@ impl HoverState {
.or_else(|| {
self.info_popover
.as_ref()
- .map(|info_popover| &info_popover.symbol_range.start)
+ .map(|info_popover| match &info_popover.symbol_range {
+ DocumentRange::Text(range) => &range.start,
+ DocumentRange::Inlay(range) => &range.inlay_position,
+ })
})?;
let point = anchor.to_display_point(&snapshot.display_snapshot);
@@ -522,7 +635,7 @@ impl HoverState {
#[derive(Debug, Clone)]
pub struct InfoPopover {
pub project: ModelHandle<Project>,
- pub symbol_range: Range<Anchor>,
+ symbol_range: DocumentRange,
pub blocks: Vec<HoverBlock>,
language: Option<Arc<Language>>,
rendered_content: Option<RenderedInfo>,
@@ -692,10 +805,17 @@ impl DiagnosticPopover {
#[cfg(test)]
mod tests {
use super::*;
- use crate::{editor_tests::init_test, test::editor_lsp_test_context::EditorLspTestContext};
+ use crate::{
+ editor_tests::init_test,
+ element::PointForPosition,
+ inlay_hint_cache::tests::{cached_hint_labels, visible_hint_labels},
+ link_go_to_definition::update_inlay_link_and_hover_points,
+ test::editor_lsp_test_context::EditorLspTestContext,
+ };
+ use collections::BTreeSet;
use gpui::fonts::Weight;
use indoc::indoc;
- use language::{Diagnostic, DiagnosticSet};
+ use language::{language_settings::InlayHintSettings, Diagnostic, DiagnosticSet};
use lsp::LanguageServerId;
use project::{HoverBlock, HoverBlockKind};
use smol::stream::StreamExt;
@@ -1131,4 +1251,327 @@ mod tests {
editor
});
}
+
+ #[gpui::test]
+ async fn test_hover_inlay_label_parts(cx: &mut gpui::TestAppContext) {
+ init_test(cx, |settings| {
+ settings.defaults.inlay_hints = Some(InlayHintSettings {
+ enabled: true,
+ show_type_hints: true,
+ show_parameter_hints: true,
+ show_other_hints: true,
+ })
+ });
+
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ inlay_hint_provider: Some(lsp::OneOf::Right(
+ lsp::InlayHintServerCapabilities::Options(lsp::InlayHintOptions {
+ resolve_provider: Some(true),
+ ..Default::default()
+ }),
+ )),
+ ..Default::default()
+ },
+ cx,
+ )
+ .await;
+
+ cx.set_state(indoc! {"
+ struct TestStruct;
+
+ // ==================
+
+ struct TestNewType<T>(T);
+
+ fn main() {
+ let variableˇ = TestNewType(TestStruct);
+ }
+ "});
+
+ let hint_start_offset = cx.ranges(indoc! {"
+ struct TestStruct;
+
+ // ==================
+
+ struct TestNewType<T>(T);
+
+ fn main() {
+ let variableˇ = TestNewType(TestStruct);
+ }
+ "})[0]
+ .start;
+ let hint_position = cx.to_lsp(hint_start_offset);
+ let new_type_target_range = cx.lsp_range(indoc! {"
+ struct TestStruct;
+
+ // ==================
+
+ struct «TestNewType»<T>(T);
+
+ fn main() {
+ let variable = TestNewType(TestStruct);
+ }
+ "});
+ let struct_target_range = cx.lsp_range(indoc! {"
+ struct «TestStruct»;
+
+ // ==================
+
+ struct TestNewType<T>(T);
+
+ fn main() {
+ let variable = TestNewType(TestStruct);
+ }
+ "});
+
+ let uri = cx.buffer_lsp_url.clone();
+ let new_type_label = "TestNewType";
+ let struct_label = "TestStruct";
+ let entire_hint_label = ": TestNewType<TestStruct>";
+ let closure_uri = uri.clone();
+ cx.lsp
+ .handle_request::<lsp::request::InlayHintRequest, _, _>(move |params, _| {
+ let task_uri = closure_uri.clone();
+ async move {
+ assert_eq!(params.text_document.uri, task_uri);
+ Ok(Some(vec![lsp::InlayHint {
+ position: hint_position,
+ label: lsp::InlayHintLabel::LabelParts(vec![lsp::InlayHintLabelPart {
+ value: entire_hint_label.to_string(),
+ ..Default::default()
+ }]),
+ kind: Some(lsp::InlayHintKind::TYPE),
+ text_edits: None,
+ tooltip: None,
+ padding_left: Some(false),
+ padding_right: Some(false),
+ data: None,
+ }]))
+ }
+ })
+ .next()
+ .await;
+ cx.foreground().run_until_parked();
+ cx.update_editor(|editor, cx| {
+ let expected_layers = vec![entire_hint_label.to_string()];
+ assert_eq!(expected_layers, cached_hint_labels(editor));
+ assert_eq!(expected_layers, visible_hint_labels(editor, cx));
+ });
+
+ let inlay_range = cx
+ .ranges(indoc! {"
+ struct TestStruct;
+
+ // ==================
+
+ struct TestNewType<T>(T);
+
+ fn main() {
+ let variable« »= TestNewType(TestStruct);
+ }
+ "})
+ .get(0)
+ .cloned()
+ .unwrap();
+ let new_type_hint_part_hover_position = cx.update_editor(|editor, cx| {
+ let snapshot = editor.snapshot(cx);
+ let previous_valid = inlay_range.start.to_display_point(&snapshot);
+ let next_valid = inlay_range.end.to_display_point(&snapshot);
+ assert_eq!(previous_valid.row(), next_valid.row());
+ assert!(previous_valid.column() < next_valid.column());
+ let exact_unclipped = DisplayPoint::new(
+ previous_valid.row(),
+ previous_valid.column()
+ + (entire_hint_label.find(new_type_label).unwrap() + new_type_label.len() / 2)
+ as u32,
+ );
+ PointForPosition {
+ previous_valid,
+ next_valid,
+ exact_unclipped,
+ column_overshoot_after_line_end: 0,
+ }
+ });
+ cx.update_editor(|editor, cx| {
+ update_inlay_link_and_hover_points(
+ &editor.snapshot(cx),
+ new_type_hint_part_hover_position,
+ editor,
+ true,
+ false,
+ cx,
+ );
+ });
+
+ let resolve_closure_uri = uri.clone();
+ cx.lsp
+ .handle_request::<lsp::request::InlayHintResolveRequest, _, _>(
+ move |mut hint_to_resolve, _| {
+ let mut resolved_hint_positions = BTreeSet::new();
+ let task_uri = resolve_closure_uri.clone();
+ async move {
+ let inserted = resolved_hint_positions.insert(hint_to_resolve.position);
+ assert!(inserted, "Hint {hint_to_resolve:?} was resolved twice");
+
+ // `: TestNewType<TestStruct>`
+ hint_to_resolve.label = lsp::InlayHintLabel::LabelParts(vec![
+ lsp::InlayHintLabelPart {
+ value: ": ".to_string(),
+ ..Default::default()
+ },
+ lsp::InlayHintLabelPart {
+ value: new_type_label.to_string(),
+ location: Some(lsp::Location {
+ uri: task_uri.clone(),
+ range: new_type_target_range,
+ }),
+ tooltip: Some(lsp::InlayHintLabelPartTooltip::String(format!(
+ "A tooltip for `{new_type_label}`"
+ ))),
+ ..Default::default()
+ },
+ lsp::InlayHintLabelPart {
+ value: "<".to_string(),
+ ..Default::default()
+ },
+ lsp::InlayHintLabelPart {
+ value: struct_label.to_string(),
+ location: Some(lsp::Location {
+ uri: task_uri,
+ range: struct_target_range,
+ }),
+ tooltip: Some(lsp::InlayHintLabelPartTooltip::MarkupContent(
+ lsp::MarkupContent {
+ kind: lsp::MarkupKind::Markdown,
+ value: format!("A tooltip for `{struct_label}`"),
+ },
+ )),
+ ..Default::default()
+ },
+ lsp::InlayHintLabelPart {
+ value: ">".to_string(),
+ ..Default::default()
+ },
+ ]);
+
+ Ok(hint_to_resolve)
+ }
+ },
+ )
+ .next()
+ .await;
+ cx.foreground().run_until_parked();
+
+ cx.update_editor(|editor, cx| {
+ update_inlay_link_and_hover_points(
+ &editor.snapshot(cx),
+ new_type_hint_part_hover_position,
+ editor,
+ true,
+ false,
+ cx,
+ );
+ });
+ cx.foreground()
+ .advance_clock(Duration::from_millis(HOVER_DELAY_MILLIS + 100));
+ cx.foreground().run_until_parked();
+ cx.update_editor(|editor, cx| {
+ let snapshot = editor.snapshot(cx);
+ let hover_state = &editor.hover_state;
+ assert!(hover_state.diagnostic_popover.is_none() && hover_state.info_popover.is_some());
+ let popover = hover_state.info_popover.as_ref().unwrap();
+ let buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx));
+ let entire_inlay_start = snapshot.display_point_to_inlay_offset(
+ inlay_range.start.to_display_point(&snapshot),
+ Bias::Left,
+ );
+
+ let expected_new_type_label_start = InlayOffset(entire_inlay_start.0 + ": ".len());
+ assert_eq!(
+ popover.symbol_range,
+ DocumentRange::Inlay(InlayRange {
+ inlay_position: buffer_snapshot.anchor_at(inlay_range.start, Bias::Right),
+ highlight_start: expected_new_type_label_start,
+ highlight_end: InlayOffset(
+ expected_new_type_label_start.0 + new_type_label.len()
+ ),
+ }),
+ "Popover range should match the new type label part"
+ );
+ assert_eq!(
+ popover
+ .rendered_content
+ .as_ref()
+ .expect("should have label text for new type hint")
+ .text,
+ format!("A tooltip for `{new_type_label}`"),
+ "Rendered text should not anyhow alter backticks"
+ );
+ });
+
+ let struct_hint_part_hover_position = cx.update_editor(|editor, cx| {
+ let snapshot = editor.snapshot(cx);
+ let previous_valid = inlay_range.start.to_display_point(&snapshot);
+ let next_valid = inlay_range.end.to_display_point(&snapshot);
+ assert_eq!(previous_valid.row(), next_valid.row());
+ assert!(previous_valid.column() < next_valid.column());
+ let exact_unclipped = DisplayPoint::new(
+ previous_valid.row(),
+ previous_valid.column()
+ + (entire_hint_label.find(struct_label).unwrap() + struct_label.len() / 2)
+ as u32,
+ );
+ PointForPosition {
+ previous_valid,
+ next_valid,
+ exact_unclipped,
+ column_overshoot_after_line_end: 0,
+ }
+ });
+ cx.update_editor(|editor, cx| {
+ update_inlay_link_and_hover_points(
+ &editor.snapshot(cx),
+ struct_hint_part_hover_position,
+ editor,
+ true,
+ false,
+ cx,
+ );
+ });
+ cx.foreground()
+ .advance_clock(Duration::from_millis(HOVER_DELAY_MILLIS + 100));
+ cx.foreground().run_until_parked();
+ cx.update_editor(|editor, cx| {
+ let snapshot = editor.snapshot(cx);
+ let hover_state = &editor.hover_state;
+ assert!(hover_state.diagnostic_popover.is_none() && hover_state.info_popover.is_some());
+ let popover = hover_state.info_popover.as_ref().unwrap();
+ let buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx));
+ let entire_inlay_start = snapshot.display_point_to_inlay_offset(
+ inlay_range.start.to_display_point(&snapshot),
+ Bias::Left,
+ );
+ let expected_struct_label_start =
+ InlayOffset(entire_inlay_start.0 + ": ".len() + new_type_label.len() + "<".len());
+ assert_eq!(
+ popover.symbol_range,
+ DocumentRange::Inlay(InlayRange {
+ inlay_position: buffer_snapshot.anchor_at(inlay_range.start, Bias::Right),
+ highlight_start: expected_struct_label_start,
+ highlight_end: InlayOffset(expected_struct_label_start.0 + struct_label.len()),
+ }),
+ "Popover range should match the struct label part"
+ );
+ assert_eq!(
+ popover
+ .rendered_content
+ .as_ref()
+ .expect("should have label text for struct hint")
+ .text,
+ format!("A tooltip for {struct_label}"),
+ "Rendered markdown element should remove backticks from text"
+ );
+ });
+ }
}
@@ -2,6 +2,7 @@ use std::{
cmp,
ops::{ControlFlow, Range},
sync::Arc,
+ time::Duration,
};
use crate::{
@@ -9,15 +10,17 @@ use crate::{
};
use anyhow::Context;
use clock::Global;
+use futures::future;
use gpui::{ModelContext, ModelHandle, Task, ViewContext};
use language::{language_settings::InlayHintKind, Buffer, BufferSnapshot};
-use log::error;
use parking_lot::RwLock;
-use project::InlayHint;
+use project::{InlayHint, ResolveState};
use collections::{hash_map, HashMap, HashSet};
use language::language_settings::InlayHintSettings;
+use smol::lock::Semaphore;
use sum_tree::Bias;
+use text::{ToOffset, ToPoint};
use util::post_inc;
pub struct InlayHintCache {
@@ -26,6 +29,7 @@ pub struct InlayHintCache {
version: usize,
pub(super) enabled: bool,
update_tasks: HashMap<ExcerptId, TasksForRanges>,
+ lsp_request_limiter: Arc<Semaphore>,
}
#[derive(Debug)]
@@ -60,7 +64,7 @@ struct ExcerptHintsUpdate {
excerpt_id: ExcerptId,
remove_from_visible: Vec<InlayId>,
remove_from_cache: HashSet<InlayId>,
- add_to_cache: HashSet<InlayHint>,
+ add_to_cache: Vec<InlayHint>,
}
#[derive(Debug, Clone, Copy)]
@@ -69,6 +73,7 @@ struct ExcerptQuery {
excerpt_id: ExcerptId,
cache_version: usize,
invalidate: InvalidationStrategy,
+ reason: &'static str,
}
impl InvalidationStrategy {
@@ -81,7 +86,11 @@ impl InvalidationStrategy {
}
impl TasksForRanges {
- fn new(sorted_ranges: Vec<Range<language::Anchor>>, task: Task<()>) -> Self {
+ fn new(query_ranges: QueryRanges, task: Task<()>) -> Self {
+ let mut sorted_ranges = Vec::new();
+ sorted_ranges.extend(query_ranges.before_visible);
+ sorted_ranges.extend(query_ranges.visible);
+ sorted_ranges.extend(query_ranges.after_visible);
Self {
tasks: vec![task],
sorted_ranges,
@@ -91,82 +100,138 @@ impl TasksForRanges {
fn update_cached_tasks(
&mut self,
buffer_snapshot: &BufferSnapshot,
- query_range: Range<text::Anchor>,
+ query_ranges: QueryRanges,
invalidate: InvalidationStrategy,
- spawn_task: impl FnOnce(Vec<Range<language::Anchor>>) -> Task<()>,
+ spawn_task: impl FnOnce(QueryRanges) -> Task<()>,
) {
- let ranges_to_query = match invalidate {
- InvalidationStrategy::None => {
- let mut ranges_to_query = Vec::new();
- let mut latest_cached_range = None::<&mut Range<language::Anchor>>;
- for cached_range in self
- .sorted_ranges
- .iter_mut()
- .skip_while(|cached_range| {
- cached_range
- .end
- .cmp(&query_range.start, buffer_snapshot)
- .is_lt()
- })
- .take_while(|cached_range| {
- cached_range
- .start
- .cmp(&query_range.end, buffer_snapshot)
- .is_le()
- })
- {
- match latest_cached_range {
- Some(latest_cached_range) => {
- if latest_cached_range.end.offset.saturating_add(1)
- < cached_range.start.offset
- {
- ranges_to_query.push(latest_cached_range.end..cached_range.start);
- cached_range.start = latest_cached_range.end;
- }
- }
- None => {
- if query_range
- .start
- .cmp(&cached_range.start, buffer_snapshot)
- .is_lt()
- {
- ranges_to_query.push(query_range.start..cached_range.start);
- cached_range.start = query_range.start;
- }
- }
- }
- latest_cached_range = Some(cached_range);
- }
+ let query_ranges = if invalidate.should_invalidate() {
+ self.tasks.clear();
+ self.sorted_ranges.clear();
+ query_ranges
+ } else {
+ let mut non_cached_query_ranges = query_ranges;
+ non_cached_query_ranges.before_visible = non_cached_query_ranges
+ .before_visible
+ .into_iter()
+ .flat_map(|query_range| {
+ self.remove_cached_ranges_from_query(buffer_snapshot, query_range)
+ })
+ .collect();
+ non_cached_query_ranges.visible = non_cached_query_ranges
+ .visible
+ .into_iter()
+ .flat_map(|query_range| {
+ self.remove_cached_ranges_from_query(buffer_snapshot, query_range)
+ })
+ .collect();
+ non_cached_query_ranges.after_visible = non_cached_query_ranges
+ .after_visible
+ .into_iter()
+ .flat_map(|query_range| {
+ self.remove_cached_ranges_from_query(buffer_snapshot, query_range)
+ })
+ .collect();
+ non_cached_query_ranges
+ };
- match latest_cached_range {
- Some(latest_cached_range) => {
- if latest_cached_range.end.offset.saturating_add(1) < query_range.end.offset
- {
- ranges_to_query.push(latest_cached_range.end..query_range.end);
- latest_cached_range.end = query_range.end;
- }
+ if !query_ranges.is_empty() {
+ self.tasks.push(spawn_task(query_ranges));
+ }
+ }
+
+ fn remove_cached_ranges_from_query(
+ &mut self,
+ buffer_snapshot: &BufferSnapshot,
+ query_range: Range<language::Anchor>,
+ ) -> Vec<Range<language::Anchor>> {
+ let mut ranges_to_query = Vec::new();
+ let mut latest_cached_range = None::<&mut Range<language::Anchor>>;
+ for cached_range in self
+ .sorted_ranges
+ .iter_mut()
+ .skip_while(|cached_range| {
+ cached_range
+ .end
+ .cmp(&query_range.start, buffer_snapshot)
+ .is_lt()
+ })
+ .take_while(|cached_range| {
+ cached_range
+ .start
+ .cmp(&query_range.end, buffer_snapshot)
+ .is_le()
+ })
+ {
+ match latest_cached_range {
+ Some(latest_cached_range) => {
+ if latest_cached_range.end.offset.saturating_add(1) < cached_range.start.offset
+ {
+ ranges_to_query.push(latest_cached_range.end..cached_range.start);
+ cached_range.start = latest_cached_range.end;
}
- None => {
- ranges_to_query.push(query_range.clone());
- self.sorted_ranges.push(query_range);
- self.sorted_ranges.sort_by(|range_a, range_b| {
- range_a.start.cmp(&range_b.start, buffer_snapshot)
- });
+ }
+ None => {
+ if query_range
+ .start
+ .cmp(&cached_range.start, buffer_snapshot)
+ .is_lt()
+ {
+ ranges_to_query.push(query_range.start..cached_range.start);
+ cached_range.start = query_range.start;
}
}
+ }
+ latest_cached_range = Some(cached_range);
+ }
- ranges_to_query
+ match latest_cached_range {
+ Some(latest_cached_range) => {
+ if latest_cached_range.end.offset.saturating_add(1) < query_range.end.offset {
+ ranges_to_query.push(latest_cached_range.end..query_range.end);
+ latest_cached_range.end = query_range.end;
+ }
}
- InvalidationStrategy::RefreshRequested | InvalidationStrategy::BufferEdited => {
- self.tasks.clear();
- self.sorted_ranges.clear();
- vec![query_range]
+ None => {
+ ranges_to_query.push(query_range.clone());
+ self.sorted_ranges.push(query_range);
+ self.sorted_ranges
+ .sort_by(|range_a, range_b| range_a.start.cmp(&range_b.start, buffer_snapshot));
}
- };
-
- if !ranges_to_query.is_empty() {
- self.tasks.push(spawn_task(ranges_to_query));
}
+
+ ranges_to_query
+ }
+
+ fn invalidate_range(&mut self, buffer: &BufferSnapshot, range: &Range<language::Anchor>) {
+ self.sorted_ranges = self
+ .sorted_ranges
+ .drain(..)
+ .filter_map(|mut cached_range| {
+ if cached_range.start.cmp(&range.end, buffer).is_gt()
+ || cached_range.end.cmp(&range.start, buffer).is_lt()
+ {
+ Some(vec![cached_range])
+ } else if cached_range.start.cmp(&range.start, buffer).is_ge()
+ && cached_range.end.cmp(&range.end, buffer).is_le()
+ {
+ None
+ } else if range.start.cmp(&cached_range.start, buffer).is_ge()
+ && range.end.cmp(&cached_range.end, buffer).is_le()
+ {
+ Some(vec![
+ cached_range.start..range.start,
+ range.end..cached_range.end,
+ ])
+ } else if cached_range.start.cmp(&range.start, buffer).is_ge() {
+ cached_range.start = range.end;
+ Some(vec![cached_range])
+ } else {
+ cached_range.end = range.start;
+ Some(vec![cached_range])
+ }
+ })
+ .flatten()
+ .collect();
}
}
@@ -178,6 +243,7 @@ impl InlayHintCache {
hints: HashMap::default(),
update_tasks: HashMap::default(),
version: 0,
+ lsp_request_limiter: Arc::new(Semaphore::new(MAX_CONCURRENT_LSP_REQUESTS)),
}
}
@@ -234,6 +300,7 @@ impl InlayHintCache {
pub fn spawn_hint_refresh(
&mut self,
+ reason: &'static str,
excerpts_to_query: HashMap<ExcerptId, (ModelHandle<Buffer>, Global, Range<usize>)>,
invalidate: InvalidationStrategy,
cx: &mut ViewContext<Editor>,
@@ -262,7 +329,14 @@ impl InlayHintCache {
cx.spawn(|editor, mut cx| async move {
editor
.update(&mut cx, |editor, cx| {
- spawn_new_update_tasks(editor, excerpts_to_query, invalidate, cache_version, cx)
+ spawn_new_update_tasks(
+ editor,
+ reason,
+ excerpts_to_query,
+ invalidate,
+ cache_version,
+ cx,
+ )
})
.ok();
})
@@ -314,7 +388,10 @@ impl InlayHintCache {
shown_excerpt_hints_to_remove.retain(|(shown_anchor, shown_hint_id)| {
let Some(buffer) = shown_anchor
.buffer_id
- .and_then(|buffer_id| multi_buffer.buffer(buffer_id)) else { return false };
+ .and_then(|buffer_id| multi_buffer.buffer(buffer_id))
+ else {
+ return false;
+ };
let buffer_snapshot = buffer.read(cx).snapshot();
loop {
match excerpt_cache.peek() {
@@ -380,12 +457,45 @@ impl InlayHintCache {
}
}
+ pub fn remove_excerpts(&mut self, excerpts_removed: Vec<ExcerptId>) -> Option<InlaySplice> {
+ let mut to_remove = Vec::new();
+ for excerpt_to_remove in excerpts_removed {
+ self.update_tasks.remove(&excerpt_to_remove);
+ if let Some(cached_hints) = self.hints.remove(&excerpt_to_remove) {
+ let cached_hints = cached_hints.read();
+ to_remove.extend(cached_hints.hints.iter().map(|(id, _)| *id));
+ }
+ }
+ if to_remove.is_empty() {
+ None
+ } else {
+ self.version += 1;
+ Some(InlaySplice {
+ to_remove,
+ to_insert: Vec::new(),
+ })
+ }
+ }
+
pub fn clear(&mut self) {
- self.version += 1;
+ if !self.update_tasks.is_empty() || !self.hints.is_empty() {
+ self.version += 1;
+ }
self.update_tasks.clear();
self.hints.clear();
}
+ pub fn hint_by_id(&self, excerpt_id: ExcerptId, hint_id: InlayId) -> Option<InlayHint> {
+ self.hints
+ .get(&excerpt_id)?
+ .read()
+ .hints
+ .iter()
+ .find(|&(id, _)| id == &hint_id)
+ .map(|(_, hint)| hint)
+ .cloned()
+ }
+
pub fn hints(&self) -> Vec<InlayHint> {
let mut hints = Vec::new();
for excerpt_hints in self.hints.values() {
@@ -398,10 +508,80 @@ impl InlayHintCache {
pub fn version(&self) -> usize {
self.version
}
+
+ pub fn spawn_hint_resolve(
+ &self,
+ buffer_id: u64,
+ excerpt_id: ExcerptId,
+ id: InlayId,
+ cx: &mut ViewContext<'_, '_, Editor>,
+ ) {
+ if let Some(excerpt_hints) = self.hints.get(&excerpt_id) {
+ let mut guard = excerpt_hints.write();
+ if let Some(cached_hint) = guard
+ .hints
+ .iter_mut()
+ .find(|(hint_id, _)| hint_id == &id)
+ .map(|(_, hint)| hint)
+ {
+ if let ResolveState::CanResolve(server_id, _) = &cached_hint.resolve_state {
+ let hint_to_resolve = cached_hint.clone();
+ let server_id = *server_id;
+ cached_hint.resolve_state = ResolveState::Resolving;
+ drop(guard);
+ cx.spawn(|editor, mut cx| async move {
+ let resolved_hint_task = editor.update(&mut cx, |editor, cx| {
+ editor
+ .buffer()
+ .read(cx)
+ .buffer(buffer_id)
+ .and_then(|buffer| {
+ let project = editor.project.as_ref()?;
+ Some(project.update(cx, |project, cx| {
+ project.resolve_inlay_hint(
+ hint_to_resolve,
+ buffer,
+ server_id,
+ cx,
+ )
+ }))
+ })
+ })?;
+ if let Some(resolved_hint_task) = resolved_hint_task {
+ let mut resolved_hint =
+ resolved_hint_task.await.context("hint resolve task")?;
+ editor.update(&mut cx, |editor, _| {
+ if let Some(excerpt_hints) =
+ editor.inlay_hint_cache.hints.get(&excerpt_id)
+ {
+ let mut guard = excerpt_hints.write();
+ if let Some(cached_hint) = guard
+ .hints
+ .iter_mut()
+ .find(|(hint_id, _)| hint_id == &id)
+ .map(|(_, hint)| hint)
+ {
+ if cached_hint.resolve_state == ResolveState::Resolving {
+ resolved_hint.resolve_state = ResolveState::Resolved;
+ *cached_hint = resolved_hint;
+ }
+ }
+ }
+ })?;
+ }
+
+ anyhow::Ok(())
+ })
+ .detach_and_log_err(cx);
+ }
+ }
+ }
+ }
}
fn spawn_new_update_tasks(
editor: &mut Editor,
+ reason: &'static str,
excerpts_to_query: HashMap<ExcerptId, (ModelHandle<Buffer>, Global, Range<usize>)>,
invalidate: InvalidationStrategy,
update_cache_version: usize,
@@ -435,11 +615,11 @@ fn spawn_new_update_tasks(
}
};
- let (multi_buffer_snapshot, Some(query_range)) =
+ let (multi_buffer_snapshot, Some(query_ranges)) =
editor.buffer.update(cx, |multi_buffer, cx| {
(
multi_buffer.snapshot(cx),
- determine_query_range(
+ determine_query_ranges(
multi_buffer,
excerpt_id,
&excerpt_buffer,
@@ -447,22 +627,27 @@ fn spawn_new_update_tasks(
cx,
),
)
- }) else { return; };
+ })
+ else {
+ return;
+ };
let query = ExcerptQuery {
buffer_id,
excerpt_id,
cache_version: update_cache_version,
invalidate,
+ reason,
};
- let new_update_task = |fetch_ranges| {
+ let new_update_task = |query_ranges| {
new_update_task(
query,
- fetch_ranges,
+ query_ranges,
multi_buffer_snapshot,
buffer_snapshot.clone(),
Arc::clone(&visible_hints),
cached_excerpt_hints,
+ Arc::clone(&editor.inlay_hint_cache.lsp_request_limiter),
cx,
)
};
@@ -471,82 +656,184 @@ fn spawn_new_update_tasks(
hash_map::Entry::Occupied(mut o) => {
o.get_mut().update_cached_tasks(
&buffer_snapshot,
- query_range,
+ query_ranges,
invalidate,
new_update_task,
);
}
hash_map::Entry::Vacant(v) => {
v.insert(TasksForRanges::new(
- vec![query_range.clone()],
- new_update_task(vec![query_range]),
+ query_ranges.clone(),
+ new_update_task(query_ranges),
));
}
}
}
}
-fn determine_query_range(
+#[derive(Debug, Clone)]
+struct QueryRanges {
+ before_visible: Vec<Range<language::Anchor>>,
+ visible: Vec<Range<language::Anchor>>,
+ after_visible: Vec<Range<language::Anchor>>,
+}
+
+impl QueryRanges {
+ fn is_empty(&self) -> bool {
+ self.before_visible.is_empty() && self.visible.is_empty() && self.after_visible.is_empty()
+ }
+}
+
+fn determine_query_ranges(
multi_buffer: &mut MultiBuffer,
excerpt_id: ExcerptId,
excerpt_buffer: &ModelHandle<Buffer>,
excerpt_visible_range: Range<usize>,
cx: &mut ModelContext<'_, MultiBuffer>,
-) -> Option<Range<language::Anchor>> {
+) -> Option<QueryRanges> {
let full_excerpt_range = multi_buffer
.excerpts_for_buffer(excerpt_buffer, cx)
.into_iter()
.find(|(id, _)| id == &excerpt_id)
.map(|(_, range)| range.context)?;
-
let buffer = excerpt_buffer.read(cx);
+ let snapshot = buffer.snapshot();
let excerpt_visible_len = excerpt_visible_range.end - excerpt_visible_range.start;
- let start_offset = excerpt_visible_range
- .start
- .saturating_sub(excerpt_visible_len)
- .max(full_excerpt_range.start.offset);
- let start = buffer.anchor_before(buffer.clip_offset(start_offset, Bias::Left));
- let end_offset = excerpt_visible_range
+
+ let visible_range = if excerpt_visible_range.start == excerpt_visible_range.end {
+ return None;
+ } else {
+ vec![
+ buffer.anchor_before(snapshot.clip_offset(excerpt_visible_range.start, Bias::Left))
+ ..buffer.anchor_after(snapshot.clip_offset(excerpt_visible_range.end, Bias::Right)),
+ ]
+ };
+
+ let full_excerpt_range_end_offset = full_excerpt_range.end.to_offset(&snapshot);
+ let after_visible_range_start = excerpt_visible_range
.end
- .saturating_add(excerpt_visible_len)
- .min(full_excerpt_range.end.offset)
+ .saturating_add(1)
+ .min(full_excerpt_range_end_offset)
.min(buffer.len());
- let end = buffer.anchor_after(buffer.clip_offset(end_offset, Bias::Right));
- if start.cmp(&end, buffer).is_eq() {
- None
+ let after_visible_range = if after_visible_range_start == full_excerpt_range_end_offset {
+ Vec::new()
} else {
- Some(start..end)
- }
+ let after_range_end_offset = after_visible_range_start
+ .saturating_add(excerpt_visible_len)
+ .min(full_excerpt_range_end_offset)
+ .min(buffer.len());
+ vec![
+ buffer.anchor_before(snapshot.clip_offset(after_visible_range_start, Bias::Left))
+ ..buffer.anchor_after(snapshot.clip_offset(after_range_end_offset, Bias::Right)),
+ ]
+ };
+
+ let full_excerpt_range_start_offset = full_excerpt_range.start.to_offset(&snapshot);
+ let before_visible_range_end = excerpt_visible_range
+ .start
+ .saturating_sub(1)
+ .max(full_excerpt_range_start_offset);
+ let before_visible_range = if before_visible_range_end == full_excerpt_range_start_offset {
+ Vec::new()
+ } else {
+ let before_range_start_offset = before_visible_range_end
+ .saturating_sub(excerpt_visible_len)
+ .max(full_excerpt_range_start_offset);
+ vec![
+ buffer.anchor_before(snapshot.clip_offset(before_range_start_offset, Bias::Left))
+ ..buffer.anchor_after(snapshot.clip_offset(before_visible_range_end, Bias::Right)),
+ ]
+ };
+
+ Some(QueryRanges {
+ before_visible: before_visible_range,
+ visible: visible_range,
+ after_visible: after_visible_range,
+ })
}
+const MAX_CONCURRENT_LSP_REQUESTS: usize = 5;
+const INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS: u64 = 400;
+
fn new_update_task(
query: ExcerptQuery,
- hint_fetch_ranges: Vec<Range<language::Anchor>>,
+ query_ranges: QueryRanges,
multi_buffer_snapshot: MultiBufferSnapshot,
buffer_snapshot: BufferSnapshot,
visible_hints: Arc<Vec<Inlay>>,
cached_excerpt_hints: Option<Arc<RwLock<CachedExcerptHints>>>,
+ lsp_request_limiter: Arc<Semaphore>,
cx: &mut ViewContext<'_, '_, Editor>,
) -> Task<()> {
- cx.spawn(|editor, cx| async move {
- let task_update_results =
- futures::future::join_all(hint_fetch_ranges.into_iter().map(|range| {
- fetch_and_update_hints(
- editor.clone(),
- multi_buffer_snapshot.clone(),
- buffer_snapshot.clone(),
- Arc::clone(&visible_hints),
- cached_excerpt_hints.as_ref().map(Arc::clone),
- query,
- range,
- cx.clone(),
+ cx.spawn(|editor, mut cx| async move {
+ let closure_cx = cx.clone();
+ let fetch_and_update_hints = |invalidate, range| {
+ fetch_and_update_hints(
+ editor.clone(),
+ multi_buffer_snapshot.clone(),
+ buffer_snapshot.clone(),
+ Arc::clone(&visible_hints),
+ cached_excerpt_hints.as_ref().map(Arc::clone),
+ query,
+ invalidate,
+ range,
+ Arc::clone(&lsp_request_limiter),
+ closure_cx.clone(),
+ )
+ };
+ let visible_range_update_results = future::join_all(query_ranges.visible.into_iter().map(
+ |visible_range| async move {
+ (
+ visible_range.clone(),
+ fetch_and_update_hints(query.invalidate.should_invalidate(), visible_range)
+ .await,
)
- }))
- .await;
+ },
+ ))
+ .await;
+
+ let hint_delay = cx.background().timer(Duration::from_millis(
+ INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS,
+ ));
+
+ let mut query_range_failed = |range: &Range<language::Anchor>, e: anyhow::Error| {
+ log::error!("inlay hint update task for range {range:?} failed: {e:#}");
+ editor
+ .update(&mut cx, |editor, _| {
+ if let Some(task_ranges) = editor
+ .inlay_hint_cache
+ .update_tasks
+ .get_mut(&query.excerpt_id)
+ {
+ task_ranges.invalidate_range(&buffer_snapshot, &range);
+ }
+ })
+ .ok()
+ };
- for result in task_update_results {
+ for (range, result) in visible_range_update_results {
+ if let Err(e) = result {
+ query_range_failed(&range, e);
+ }
+ }
+
+ hint_delay.await;
+ let invisible_range_update_results = future::join_all(
+ query_ranges
+ .before_visible
+ .into_iter()
+ .chain(query_ranges.after_visible.into_iter())
+ .map(|invisible_range| async move {
+ (
+ invisible_range.clone(),
+ fetch_and_update_hints(false, invisible_range).await,
+ )
+ }),
+ )
+ .await;
+ for (range, result) in invisible_range_update_results {
if let Err(e) = result {
- error!("inlay hint update task failed: {e:#}");
+ query_range_failed(&range, e);
}
}
})
@@ -559,11 +846,53 @@ async fn fetch_and_update_hints(
visible_hints: Arc<Vec<Inlay>>,
cached_excerpt_hints: Option<Arc<RwLock<CachedExcerptHints>>>,
query: ExcerptQuery,
+ invalidate: bool,
fetch_range: Range<language::Anchor>,
+ lsp_request_limiter: Arc<Semaphore>,
mut cx: gpui::AsyncAppContext,
) -> anyhow::Result<()> {
+ let (lsp_request_guard, got_throttled) = if query.invalidate.should_invalidate() {
+ (None, false)
+ } else {
+ match lsp_request_limiter.try_acquire() {
+ Some(guard) => (Some(guard), false),
+ None => (Some(lsp_request_limiter.acquire().await), true),
+ }
+ };
+ let fetch_range_to_log =
+ fetch_range.start.to_point(&buffer_snapshot)..fetch_range.end.to_point(&buffer_snapshot);
let inlay_hints_fetch_task = editor
.update(&mut cx, |editor, cx| {
+ if got_throttled {
+ let query_not_around_visible_range = match editor.excerpt_visible_offsets(None, cx).remove(&query.excerpt_id) {
+ Some((_, _, current_visible_range)) => {
+ let visible_offset_length = current_visible_range.len();
+ let double_visible_range = current_visible_range
+ .start
+ .saturating_sub(visible_offset_length)
+ ..current_visible_range
+ .end
+ .saturating_add(visible_offset_length)
+ .min(buffer_snapshot.len());
+ !double_visible_range
+ .contains(&fetch_range.start.to_offset(&buffer_snapshot))
+ && !double_visible_range
+ .contains(&fetch_range.end.to_offset(&buffer_snapshot))
+ },
+ None => true,
+ };
+ if query_not_around_visible_range {
+ log::trace!("Fetching inlay hints for range {fetch_range_to_log:?} got throttled and fell off the current visible range, skipping.");
+ if let Some(task_ranges) = editor
+ .inlay_hint_cache
+ .update_tasks
+ .get_mut(&query.excerpt_id)
+ {
+ task_ranges.invalidate_range(&buffer_snapshot, &fetch_range);
+ }
+ return None;
+ }
+ }
editor
.buffer()
.read(cx)
@@ -578,16 +907,34 @@ async fn fetch_and_update_hints(
.ok()
.flatten();
let new_hints = match inlay_hints_fetch_task {
- Some(task) => task.await.context("inlay hint fetch task")?,
+ Some(fetch_task) => {
+ log::debug!(
+ "Fetching inlay hints for range {fetch_range_to_log:?}, reason: {query_reason}, invalidate: {invalidate}",
+ query_reason = query.reason,
+ );
+ log::trace!(
+ "Currently visible hints: {visible_hints:?}, cached hints present: {}",
+ cached_excerpt_hints.is_some(),
+ );
+ fetch_task.await.context("inlay hint fetch task")?
+ }
None => return Ok(()),
};
+ drop(lsp_request_guard);
+ log::debug!(
+ "Fetched {} hints for range {fetch_range_to_log:?}",
+ new_hints.len()
+ );
+ log::trace!("Fetched hints: {new_hints:?}");
+
let background_task_buffer_snapshot = buffer_snapshot.clone();
let backround_fetch_range = fetch_range.clone();
let new_update = cx
.background()
.spawn(async move {
calculate_hint_updates(
- query,
+ query.excerpt_id,
+ invalidate,
backround_fetch_range,
new_hints,
&background_task_buffer_snapshot,
@@ -597,12 +944,20 @@ async fn fetch_and_update_hints(
})
.await;
if let Some(new_update) = new_update {
+ log::info!(
+ "Applying update for range {fetch_range_to_log:?}: remove from editor: {}, remove from cache: {}, add to cache: {}",
+ new_update.remove_from_visible.len(),
+ new_update.remove_from_cache.len(),
+ new_update.add_to_cache.len()
+ );
+ log::trace!("New update: {new_update:?}");
editor
.update(&mut cx, |editor, cx| {
apply_hint_update(
editor,
new_update,
query,
+ invalidate,
buffer_snapshot,
multi_buffer_snapshot,
cx,
@@ -614,14 +969,15 @@ async fn fetch_and_update_hints(
}
fn calculate_hint_updates(
- query: ExcerptQuery,
+ excerpt_id: ExcerptId,
+ invalidate: bool,
fetch_range: Range<language::Anchor>,
new_excerpt_hints: Vec<InlayHint>,
buffer_snapshot: &BufferSnapshot,
cached_excerpt_hints: Option<Arc<RwLock<CachedExcerptHints>>>,
visible_hints: &[Inlay],
) -> Option<ExcerptHintsUpdate> {
- let mut add_to_cache: HashSet<InlayHint> = HashSet::default();
+ let mut add_to_cache = Vec::<InlayHint>::new();
let mut excerpt_hints_to_persist = HashMap::default();
for new_hint in new_excerpt_hints {
if !contains_position(&fetch_range, new_hint.position, buffer_snapshot) {
@@ -634,13 +990,21 @@ fn calculate_hint_updates(
probe.1.position.cmp(&new_hint.position, buffer_snapshot)
}) {
Ok(ix) => {
- let (cached_inlay_id, cached_hint) = &cached_excerpt_hints.hints[ix];
- if cached_hint == &new_hint {
- excerpt_hints_to_persist.insert(*cached_inlay_id, cached_hint.kind);
- false
- } else {
- true
+ let mut missing_from_cache = true;
+ for (cached_inlay_id, cached_hint) in &cached_excerpt_hints.hints[ix..] {
+ if new_hint
+ .position
+ .cmp(&cached_hint.position, buffer_snapshot)
+ .is_gt()
+ {
+ break;
+ }
+ if cached_hint == &new_hint {
+ excerpt_hints_to_persist.insert(*cached_inlay_id, cached_hint.kind);
+ missing_from_cache = false;
+ }
}
+ missing_from_cache
}
Err(_) => true,
}
@@ -648,17 +1012,17 @@ fn calculate_hint_updates(
None => true,
};
if missing_from_cache {
- add_to_cache.insert(new_hint);
+ add_to_cache.push(new_hint);
}
}
let mut remove_from_visible = Vec::new();
let mut remove_from_cache = HashSet::default();
- if query.invalidate.should_invalidate() {
+ if invalidate {
remove_from_visible.extend(
visible_hints
.iter()
- .filter(|hint| hint.position.excerpt_id == query.excerpt_id)
+ .filter(|hint| hint.position.excerpt_id == excerpt_id)
.map(|inlay_hint| inlay_hint.id)
.filter(|hint_id| !excerpt_hints_to_persist.contains_key(hint_id)),
);
@@ -681,7 +1045,7 @@ fn calculate_hint_updates(
None
} else {
Some(ExcerptHintsUpdate {
- excerpt_id: query.excerpt_id,
+ excerpt_id,
remove_from_visible,
remove_from_cache,
add_to_cache,
@@ -702,6 +1066,7 @@ fn apply_hint_update(
editor: &mut Editor,
new_update: ExcerptHintsUpdate,
query: ExcerptQuery,
+ invalidate: bool,
buffer_snapshot: BufferSnapshot,
multi_buffer_snapshot: MultiBufferSnapshot,
cx: &mut ViewContext<'_, '_, Editor>,
@@ -740,11 +1105,21 @@ fn apply_hint_update(
.binary_search_by(|probe| probe.1.position.cmp(&new_hint.position, &buffer_snapshot))
{
Ok(i) => {
- if cached_hints[i].1.text() == new_hint.text() {
- None
- } else {
- Some(i)
+ let mut insert_position = Some(i);
+ for (_, cached_hint) in &cached_hints[i..] {
+ if new_hint
+ .position
+ .cmp(&cached_hint.position, &buffer_snapshot)
+ .is_gt()
+ {
+ break;
+ }
+ if cached_hint.text() == new_hint.text() {
+ insert_position = None;
+ break;
+ }
}
+ insert_position
}
Err(i) => Some(i),
};
@@ -769,7 +1144,7 @@ fn apply_hint_update(
cached_excerpt_hints.buffer_version = buffer_snapshot.version().clone();
drop(cached_excerpt_hints);
- if query.invalidate.should_invalidate() {
+ if invalidate {
let mut outdated_excerpt_caches = HashSet::default();
for (excerpt_id, excerpt_hints) in &editor.inlay_hint_cache().hints {
let excerpt_hints = excerpt_hints.read();
@@ -806,8 +1181,8 @@ fn apply_hint_update(
}
#[cfg(test)]
-mod tests {
- use std::sync::atomic::{AtomicBool, AtomicU32, Ordering};
+pub mod tests {
+ use std::sync::atomic::{AtomicBool, AtomicU32, AtomicUsize, Ordering};
use crate::{
scroll::{autoscroll::Autoscroll, scroll_amount::ScrollAmount},
@@ -885,13 +1260,13 @@ mod tests {
let mut edits_made = 1;
editor.update(cx, |editor, cx| {
- let expected_layers = vec!["0".to_string()];
+ let expected_hints = vec!["0".to_string()];
assert_eq!(
- expected_layers,
+ expected_hints,
cached_hint_labels(editor),
"Should get its first hints when opening the editor"
);
- assert_eq!(expected_layers, visible_hint_labels(editor, cx));
+ assert_eq!(expected_hints, visible_hint_labels(editor, cx));
let inlay_cache = editor.inlay_hint_cache();
assert_eq!(
inlay_cache.allowed_hint_kinds, allowed_hint_kinds,
@@ -910,13 +1285,13 @@ mod tests {
});
cx.foreground().run_until_parked();
editor.update(cx, |editor, cx| {
- let expected_layers = vec!["0".to_string(), "1".to_string()];
+ let expected_hints = vec!["0".to_string(), "1".to_string()];
assert_eq!(
- expected_layers,
+ expected_hints,
cached_hint_labels(editor),
"Should get new hints after an edit"
);
- assert_eq!(expected_layers, visible_hint_labels(editor, cx));
+ assert_eq!(expected_hints, visible_hint_labels(editor, cx));
let inlay_cache = editor.inlay_hint_cache();
assert_eq!(
inlay_cache.allowed_hint_kinds, allowed_hint_kinds,
@@ -935,13 +1310,13 @@ mod tests {
edits_made += 1;
cx.foreground().run_until_parked();
editor.update(cx, |editor, cx| {
- let expected_layers = vec!["0".to_string(), "1".to_string(), "2".to_string()];
+ let expected_hints = vec!["0".to_string(), "1".to_string(), "2".to_string()];
assert_eq!(
- expected_layers,
+ expected_hints,
cached_hint_labels(editor),
"Should get new hints after hint refresh/ request"
);
- assert_eq!(expected_layers, visible_hint_labels(editor, cx));
+ assert_eq!(expected_hints, visible_hint_labels(editor, cx));
let inlay_cache = editor.inlay_hint_cache();
assert_eq!(
inlay_cache.allowed_hint_kinds, allowed_hint_kinds,
@@ -49,13 +49,18 @@ impl FollowableItem for Editor {
fn from_state_proto(
pane: ViewHandle<workspace::Pane>,
- project: ModelHandle<Project>,
+ workspace: ViewHandle<Workspace>,
remote_id: ViewId,
state: &mut Option<proto::view::Variant>,
cx: &mut AppContext,
) -> Option<Task<Result<ViewHandle<Self>>>> {
- let Some(proto::view::Variant::Editor(_)) = state else { return None };
- let Some(proto::view::Variant::Editor(state)) = state.take() else { unreachable!() };
+ let project = workspace.read(cx).project().to_owned();
+ let Some(proto::view::Variant::Editor(_)) = state else {
+ return None;
+ };
+ let Some(proto::view::Variant::Editor(state)) = state.take() else {
+ unreachable!()
+ };
let client = project.read(cx).client();
let replica_id = project.read(cx).replica_id();
@@ -340,10 +345,16 @@ async fn update_editor_from_message(
let mut insertions = message.inserted_excerpts.into_iter().peekable();
while let Some(insertion) = insertions.next() {
- let Some(excerpt) = insertion.excerpt else { continue };
- let Some(previous_excerpt_id) = insertion.previous_excerpt_id else { continue };
+ let Some(excerpt) = insertion.excerpt else {
+ continue;
+ };
+ let Some(previous_excerpt_id) = insertion.previous_excerpt_id else {
+ continue;
+ };
let buffer_id = excerpt.buffer_id;
- let Some(buffer) = project.read(cx).buffer_for_id(buffer_id, cx) else { continue };
+ let Some(buffer) = project.read(cx).buffer_for_id(buffer_id, cx) else {
+ continue;
+ };
let adjacent_excerpts = iter::from_fn(|| {
let insertion = insertions.peek()?;
@@ -614,7 +625,7 @@ impl Item for Editor {
fn workspace_deactivated(&mut self, cx: &mut ViewContext<Self>) {
hide_link_definition(self, cx);
- self.link_go_to_definition_state.last_mouse_location = None;
+ self.link_go_to_definition_state.last_trigger_point = None;
}
fn is_dirty(&self, cx: &AppContext) -> bool {
@@ -753,7 +764,7 @@ impl Item for Editor {
Some(Box::new(handle.clone()))
}
- fn pixel_position_of_cursor(&self) -> Option<Vector2F> {
+ fn pixel_position_of_cursor(&self, _: &AppContext) -> Option<Vector2F> {
self.pixel_position_of_newest_cursor
}
@@ -1,22 +1,108 @@
-use crate::{Anchor, DisplayPoint, Editor, EditorSnapshot, SelectPhase};
+use crate::{
+ display_map::{DisplaySnapshot, InlayOffset},
+ element::PointForPosition,
+ hover_popover::{self, InlayHover},
+ Anchor, DisplayPoint, Editor, EditorSnapshot, SelectPhase,
+};
use gpui::{Task, ViewContext};
use language::{Bias, ToOffset};
-use project::LocationLink;
+use lsp::LanguageServerId;
+use project::{
+ HoverBlock, HoverBlockKind, InlayHintLabelPartTooltip, InlayHintTooltip, LocationLink,
+ ResolveState,
+};
use std::ops::Range;
use util::TryFutureExt;
#[derive(Debug, Default)]
pub struct LinkGoToDefinitionState {
- pub last_mouse_location: Option<Anchor>,
- pub symbol_range: Option<Range<Anchor>>,
+ pub last_trigger_point: Option<TriggerPoint>,
+ pub symbol_range: Option<DocumentRange>,
pub kind: Option<LinkDefinitionKind>,
- pub definitions: Vec<LocationLink>,
+ pub definitions: Vec<GoToDefinitionLink>,
pub task: Option<Task<Option<()>>>,
}
+#[derive(Debug)]
+pub enum GoToDefinitionTrigger {
+ Text(DisplayPoint),
+ InlayHint(InlayRange, lsp::Location, LanguageServerId),
+}
+
+#[derive(Debug, Clone)]
+pub enum GoToDefinitionLink {
+ Text(LocationLink),
+ InlayHint(lsp::Location, LanguageServerId),
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub struct InlayRange {
+ pub inlay_position: Anchor,
+ pub highlight_start: InlayOffset,
+ pub highlight_end: InlayOffset,
+}
+
+#[derive(Debug, Clone)]
+pub enum TriggerPoint {
+ Text(Anchor),
+ InlayHint(InlayRange, lsp::Location, LanguageServerId),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum DocumentRange {
+ Text(Range<Anchor>),
+ Inlay(InlayRange),
+}
+
+impl DocumentRange {
+ pub fn as_text_range(&self) -> Option<Range<Anchor>> {
+ match self {
+ Self::Text(range) => Some(range.clone()),
+ Self::Inlay(_) => None,
+ }
+ }
+
+ fn point_within_range(&self, trigger_point: &TriggerPoint, snapshot: &EditorSnapshot) -> bool {
+ match (self, trigger_point) {
+ (DocumentRange::Text(range), TriggerPoint::Text(point)) => {
+ let point_after_start = range.start.cmp(point, &snapshot.buffer_snapshot).is_le();
+ point_after_start && range.end.cmp(point, &snapshot.buffer_snapshot).is_ge()
+ }
+ (DocumentRange::Inlay(range), TriggerPoint::InlayHint(point, _, _)) => {
+ range.highlight_start.cmp(&point.highlight_end).is_le()
+ && range.highlight_end.cmp(&point.highlight_end).is_ge()
+ }
+ (DocumentRange::Inlay(_), TriggerPoint::Text(_))
+ | (DocumentRange::Text(_), TriggerPoint::InlayHint(_, _, _)) => false,
+ }
+ }
+}
+
+impl TriggerPoint {
+ fn anchor(&self) -> &Anchor {
+ match self {
+ TriggerPoint::Text(anchor) => anchor,
+ TriggerPoint::InlayHint(range, _, _) => &range.inlay_position,
+ }
+ }
+
+ pub fn definition_kind(&self, shift: bool) -> LinkDefinitionKind {
+ match self {
+ TriggerPoint::Text(_) => {
+ if shift {
+ LinkDefinitionKind::Type
+ } else {
+ LinkDefinitionKind::Symbol
+ }
+ }
+ TriggerPoint::InlayHint(_, _, _) => LinkDefinitionKind::Type,
+ }
+ }
+}
+
pub fn update_go_to_definition_link(
editor: &mut Editor,
- point: Option<DisplayPoint>,
+ origin: Option<GoToDefinitionTrigger>,
cmd_held: bool,
shift_held: bool,
cx: &mut ViewContext<Editor>,
@@ -25,23 +111,43 @@ pub fn update_go_to_definition_link(
// Store new mouse point as an anchor
let snapshot = editor.snapshot(cx);
- let point = point.map(|point| {
- snapshot
- .buffer_snapshot
- .anchor_before(point.to_offset(&snapshot.display_snapshot, Bias::Left))
- });
+ let trigger_point = match origin {
+ Some(GoToDefinitionTrigger::Text(p)) => {
+ Some(TriggerPoint::Text(snapshot.buffer_snapshot.anchor_before(
+ p.to_offset(&snapshot.display_snapshot, Bias::Left),
+ )))
+ }
+ Some(GoToDefinitionTrigger::InlayHint(p, lsp_location, language_server_id)) => {
+ Some(TriggerPoint::InlayHint(p, lsp_location, language_server_id))
+ }
+ None => None,
+ };
// If the new point is the same as the previously stored one, return early
if let (Some(a), Some(b)) = (
- &point,
- &editor.link_go_to_definition_state.last_mouse_location,
+ &trigger_point,
+ &editor.link_go_to_definition_state.last_trigger_point,
) {
- if a.cmp(b, &snapshot.buffer_snapshot).is_eq() {
- return;
+ match (a, b) {
+ (TriggerPoint::Text(anchor_a), TriggerPoint::Text(anchor_b)) => {
+ if anchor_a.cmp(anchor_b, &snapshot.buffer_snapshot).is_eq() {
+ return;
+ }
+ }
+ (TriggerPoint::InlayHint(range_a, _, _), TriggerPoint::InlayHint(range_b, _, _)) => {
+ if range_a
+ .inlay_position
+ .cmp(&range_b.inlay_position, &snapshot.buffer_snapshot)
+ .is_eq()
+ {
+ return;
+ }
+ }
+ _ => {}
}
}
- editor.link_go_to_definition_state.last_mouse_location = point.clone();
+ editor.link_go_to_definition_state.last_trigger_point = trigger_point.clone();
if pending_nonempty_selection {
hide_link_definition(editor, cx);
@@ -49,14 +155,9 @@ pub fn update_go_to_definition_link(
}
if cmd_held {
- if let Some(point) = point {
- let kind = if shift_held {
- LinkDefinitionKind::Type
- } else {
- LinkDefinitionKind::Symbol
- };
-
- show_link_definition(kind, editor, point, snapshot, cx);
+ if let Some(trigger_point) = trigger_point {
+ let kind = trigger_point.definition_kind(shift_held);
+ show_link_definition(kind, editor, trigger_point, snapshot, cx);
return;
}
}
@@ -64,6 +165,182 @@ pub fn update_go_to_definition_link(
hide_link_definition(editor, cx);
}
+pub fn update_inlay_link_and_hover_points(
+ snapshot: &DisplaySnapshot,
+ point_for_position: PointForPosition,
+ editor: &mut Editor,
+ cmd_held: bool,
+ shift_held: bool,
+ cx: &mut ViewContext<'_, '_, Editor>,
+) {
+ let hint_start_offset =
+ snapshot.display_point_to_inlay_offset(point_for_position.previous_valid, Bias::Left);
+ let hint_end_offset =
+ snapshot.display_point_to_inlay_offset(point_for_position.next_valid, Bias::Right);
+ let hovered_offset = if point_for_position.column_overshoot_after_line_end == 0 {
+ Some(snapshot.display_point_to_inlay_offset(point_for_position.exact_unclipped, Bias::Left))
+ } else {
+ None
+ };
+ let mut go_to_definition_updated = false;
+ let mut hover_updated = false;
+ if let Some(hovered_offset) = hovered_offset {
+ let buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
+ let previous_valid_anchor = buffer_snapshot.anchor_at(
+ point_for_position.previous_valid.to_point(snapshot),
+ Bias::Left,
+ );
+ let next_valid_anchor = buffer_snapshot.anchor_at(
+ point_for_position.next_valid.to_point(snapshot),
+ Bias::Right,
+ );
+ if let Some(hovered_hint) = editor
+ .visible_inlay_hints(cx)
+ .into_iter()
+ .skip_while(|hint| {
+ hint.position
+ .cmp(&previous_valid_anchor, &buffer_snapshot)
+ .is_lt()
+ })
+ .take_while(|hint| {
+ hint.position
+ .cmp(&next_valid_anchor, &buffer_snapshot)
+ .is_le()
+ })
+ .max_by_key(|hint| hint.id)
+ {
+ let inlay_hint_cache = editor.inlay_hint_cache();
+ let excerpt_id = previous_valid_anchor.excerpt_id;
+ if let Some(cached_hint) = inlay_hint_cache.hint_by_id(excerpt_id, hovered_hint.id) {
+ match cached_hint.resolve_state {
+ ResolveState::CanResolve(_, _) => {
+ if let Some(buffer_id) = previous_valid_anchor.buffer_id {
+ inlay_hint_cache.spawn_hint_resolve(
+ buffer_id,
+ excerpt_id,
+ hovered_hint.id,
+ cx,
+ );
+ }
+ }
+ ResolveState::Resolved => {
+ let mut actual_hint_start = hint_start_offset;
+ let mut actual_hint_end = hint_end_offset;
+ if cached_hint.padding_left {
+ actual_hint_start.0 += 1;
+ actual_hint_end.0 += 1;
+ }
+ if cached_hint.padding_right {
+ actual_hint_start.0 += 1;
+ actual_hint_end.0 += 1;
+ }
+ match cached_hint.label {
+ project::InlayHintLabel::String(_) => {
+ if let Some(tooltip) = cached_hint.tooltip {
+ hover_popover::hover_at_inlay(
+ editor,
+ InlayHover {
+ excerpt: excerpt_id,
+ tooltip: match tooltip {
+ InlayHintTooltip::String(text) => HoverBlock {
+ text,
+ kind: HoverBlockKind::PlainText,
+ },
+ InlayHintTooltip::MarkupContent(content) => {
+ HoverBlock {
+ text: content.value,
+ kind: content.kind,
+ }
+ }
+ },
+ triggered_from: hovered_offset,
+ range: InlayRange {
+ inlay_position: hovered_hint.position,
+ highlight_start: actual_hint_start,
+ highlight_end: actual_hint_end,
+ },
+ },
+ cx,
+ );
+ hover_updated = true;
+ }
+ }
+ project::InlayHintLabel::LabelParts(label_parts) => {
+ if let Some((hovered_hint_part, part_range)) =
+ hover_popover::find_hovered_hint_part(
+ label_parts,
+ actual_hint_start..actual_hint_end,
+ hovered_offset,
+ )
+ {
+ if let Some(tooltip) = hovered_hint_part.tooltip {
+ hover_popover::hover_at_inlay(
+ editor,
+ InlayHover {
+ excerpt: excerpt_id,
+ tooltip: match tooltip {
+ InlayHintLabelPartTooltip::String(text) => {
+ HoverBlock {
+ text,
+ kind: HoverBlockKind::PlainText,
+ }
+ }
+ InlayHintLabelPartTooltip::MarkupContent(
+ content,
+ ) => HoverBlock {
+ text: content.value,
+ kind: content.kind,
+ },
+ },
+ triggered_from: hovered_offset,
+ range: InlayRange {
+ inlay_position: hovered_hint.position,
+ highlight_start: part_range.start,
+ highlight_end: part_range.end,
+ },
+ },
+ cx,
+ );
+ hover_updated = true;
+ }
+ if let Some((language_server_id, location)) =
+ hovered_hint_part.location
+ {
+ go_to_definition_updated = true;
+ update_go_to_definition_link(
+ editor,
+ Some(GoToDefinitionTrigger::InlayHint(
+ InlayRange {
+ inlay_position: hovered_hint.position,
+ highlight_start: part_range.start,
+ highlight_end: part_range.end,
+ },
+ location,
+ language_server_id,
+ )),
+ cmd_held,
+ shift_held,
+ cx,
+ );
+ }
+ }
+ }
+ };
+ }
+ ResolveState::Resolving => {}
+ }
+ }
+ }
+ }
+
+ if !go_to_definition_updated {
+ update_go_to_definition_link(editor, None, cmd_held, shift_held, cx);
+ }
+ if !hover_updated {
+ hover_popover::hover_at(editor, None, cx);
+ }
+}
+
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum LinkDefinitionKind {
Symbol,
@@ -73,7 +350,7 @@ pub enum LinkDefinitionKind {
pub fn show_link_definition(
definition_kind: LinkDefinitionKind,
editor: &mut Editor,
- trigger_point: Anchor,
+ trigger_point: TriggerPoint,
snapshot: EditorSnapshot,
cx: &mut ViewContext<Editor>,
) {
@@ -86,10 +363,11 @@ pub fn show_link_definition(
return;
}
+ let trigger_anchor = trigger_point.anchor();
let (buffer, buffer_position) = if let Some(output) = editor
.buffer
.read(cx)
- .text_anchor_for_position(trigger_point.clone(), cx)
+ .text_anchor_for_position(trigger_anchor.clone(), cx)
{
output
} else {
@@ -99,7 +377,7 @@ pub fn show_link_definition(
let excerpt_id = if let Some((excerpt_id, _, _)) = editor
.buffer()
.read(cx)
- .excerpt_containing(trigger_point.clone(), cx)
+ .excerpt_containing(trigger_anchor.clone(), cx)
{
excerpt_id
} else {
@@ -114,52 +392,57 @@ pub fn show_link_definition(
// Don't request again if the location is within the symbol region of a previous request with the same kind
if let Some(symbol_range) = &editor.link_go_to_definition_state.symbol_range {
- let point_after_start = symbol_range
- .start
- .cmp(&trigger_point, &snapshot.buffer_snapshot)
- .is_le();
-
- let point_before_end = symbol_range
- .end
- .cmp(&trigger_point, &snapshot.buffer_snapshot)
- .is_ge();
-
- let point_within_range = point_after_start && point_before_end;
- if point_within_range && same_kind {
+ if same_kind && symbol_range.point_within_range(&trigger_point, &snapshot) {
return;
}
}
let task = cx.spawn(|this, mut cx| {
async move {
- // query the LSP for definition info
- let definition_request = cx.update(|cx| {
- project.update(cx, |project, cx| match definition_kind {
- LinkDefinitionKind::Symbol => project.definition(&buffer, buffer_position, cx),
-
- LinkDefinitionKind::Type => {
- project.type_definition(&buffer, buffer_position, cx)
- }
- })
- });
+ let result = match &trigger_point {
+ TriggerPoint::Text(_) => {
+ // query the LSP for definition info
+ cx.update(|cx| {
+ project.update(cx, |project, cx| match definition_kind {
+ LinkDefinitionKind::Symbol => {
+ project.definition(&buffer, buffer_position, cx)
+ }
- let result = definition_request.await.ok().map(|definition_result| {
- (
- definition_result.iter().find_map(|link| {
- link.origin.as_ref().map(|origin| {
- let start = snapshot
- .buffer_snapshot
- .anchor_in_excerpt(excerpt_id.clone(), origin.range.start);
- let end = snapshot
- .buffer_snapshot
- .anchor_in_excerpt(excerpt_id.clone(), origin.range.end);
-
- start..end
+ LinkDefinitionKind::Type => {
+ project.type_definition(&buffer, buffer_position, cx)
+ }
})
- }),
- definition_result,
- )
- });
+ })
+ .await
+ .ok()
+ .map(|definition_result| {
+ (
+ definition_result.iter().find_map(|link| {
+ link.origin.as_ref().map(|origin| {
+ let start = snapshot
+ .buffer_snapshot
+ .anchor_in_excerpt(excerpt_id.clone(), origin.range.start);
+ let end = snapshot
+ .buffer_snapshot
+ .anchor_in_excerpt(excerpt_id.clone(), origin.range.end);
+ DocumentRange::Text(start..end)
+ })
+ }),
+ definition_result
+ .into_iter()
+ .map(GoToDefinitionLink::Text)
+ .collect(),
+ )
+ })
+ }
+ TriggerPoint::InlayHint(trigger_source, lsp_location, server_id) => Some((
+ Some(DocumentRange::Inlay(*trigger_source)),
+ vec![GoToDefinitionLink::InlayHint(
+ lsp_location.clone(),
+ *server_id,
+ )],
+ )),
+ };
this.update(&mut cx, |this, cx| {
// Clear any existing highlights
@@ -178,43 +461,67 @@ pub fn show_link_definition(
// the current location.
let any_definition_does_not_contain_current_location =
definitions.iter().any(|definition| {
- let target = &definition.target;
- if target.buffer == buffer {
- let range = &target.range;
- // Expand range by one character as lsp definition ranges include positions adjacent
- // but not contained by the symbol range
- let start = buffer_snapshot.clip_offset(
- range.start.to_offset(&buffer_snapshot).saturating_sub(1),
- Bias::Left,
- );
- let end = buffer_snapshot.clip_offset(
- range.end.to_offset(&buffer_snapshot) + 1,
- Bias::Right,
- );
- let offset = buffer_position.to_offset(&buffer_snapshot);
- !(start <= offset && end >= offset)
- } else {
- true
+ match &definition {
+ GoToDefinitionLink::Text(link) => {
+ if link.target.buffer == buffer {
+ let range = &link.target.range;
+ // Expand range by one character as lsp definition ranges include positions adjacent
+ // but not contained by the symbol range
+ let start = buffer_snapshot.clip_offset(
+ range
+ .start
+ .to_offset(&buffer_snapshot)
+ .saturating_sub(1),
+ Bias::Left,
+ );
+ let end = buffer_snapshot.clip_offset(
+ range.end.to_offset(&buffer_snapshot) + 1,
+ Bias::Right,
+ );
+ let offset = buffer_position.to_offset(&buffer_snapshot);
+ !(start <= offset && end >= offset)
+ } else {
+ true
+ }
+ }
+ GoToDefinitionLink::InlayHint(_, _) => true,
}
});
if any_definition_does_not_contain_current_location {
- // If no symbol range returned from language server, use the surrounding word.
- let highlight_range = symbol_range.unwrap_or_else(|| {
- let snapshot = &snapshot.buffer_snapshot;
- let (offset_range, _) = snapshot.surrounding_word(trigger_point);
-
- snapshot.anchor_before(offset_range.start)
- ..snapshot.anchor_after(offset_range.end)
- });
-
// Highlight symbol using theme link definition highlight style
let style = theme::current(cx).editor.link_definition;
- this.highlight_text::<LinkGoToDefinitionState>(
- vec![highlight_range],
- style,
- cx,
- );
+ let highlight_range =
+ symbol_range.unwrap_or_else(|| match &trigger_point {
+ TriggerPoint::Text(trigger_anchor) => {
+ let snapshot = &snapshot.buffer_snapshot;
+ // If no symbol range returned from language server, use the surrounding word.
+ let (offset_range, _) =
+ snapshot.surrounding_word(*trigger_anchor);
+ DocumentRange::Text(
+ snapshot.anchor_before(offset_range.start)
+ ..snapshot.anchor_after(offset_range.end),
+ )
+ }
+ TriggerPoint::InlayHint(inlay_coordinates, _, _) => {
+ DocumentRange::Inlay(*inlay_coordinates)
+ }
+ });
+
+ match highlight_range {
+ DocumentRange::Text(text_range) => this
+ .highlight_text::<LinkGoToDefinitionState>(
+ vec![text_range],
+ style,
+ cx,
+ ),
+ DocumentRange::Inlay(inlay_coordinates) => this
+ .highlight_inlays::<LinkGoToDefinitionState>(
+ vec![inlay_coordinates],
+ style,
+ cx,
+ ),
+ }
} else {
hide_link_definition(this, cx);
}
@@ -245,7 +552,7 @@ pub fn hide_link_definition(editor: &mut Editor, cx: &mut ViewContext<Editor>) {
pub fn go_to_fetched_definition(
editor: &mut Editor,
- point: DisplayPoint,
+ point: PointForPosition,
split: bool,
cx: &mut ViewContext<Editor>,
) {
@@ -254,7 +561,7 @@ pub fn go_to_fetched_definition(
pub fn go_to_fetched_type_definition(
editor: &mut Editor,
- point: DisplayPoint,
+ point: PointForPosition,
split: bool,
cx: &mut ViewContext<Editor>,
) {
@@ -264,7 +571,7 @@ pub fn go_to_fetched_type_definition(
fn go_to_fetched_definition_of_kind(
kind: LinkDefinitionKind,
editor: &mut Editor,
- point: DisplayPoint,
+ point: PointForPosition,
split: bool,
cx: &mut ViewContext<Editor>,
) {
@@ -282,16 +589,18 @@ fn go_to_fetched_definition_of_kind(
} else {
editor.select(
SelectPhase::Begin {
- position: point,
+ position: point.next_valid,
add: false,
click_count: 1,
},
cx,
);
- match kind {
- LinkDefinitionKind::Symbol => editor.go_to_definition(&Default::default(), cx),
- LinkDefinitionKind::Type => editor.go_to_type_definition(&Default::default(), cx),
+ if point.as_valid().is_some() {
+ match kind {
+ LinkDefinitionKind::Symbol => editor.go_to_definition(&Default::default(), cx),
+ LinkDefinitionKind::Type => editor.go_to_type_definition(&Default::default(), cx),
+ }
}
}
}
@@ -299,14 +608,21 @@ fn go_to_fetched_definition_of_kind(
#[cfg(test)]
mod tests {
use super::*;
- use crate::{editor_tests::init_test, test::editor_lsp_test_context::EditorLspTestContext};
+ use crate::{
+ display_map::ToDisplayPoint,
+ editor_tests::init_test,
+ inlay_hint_cache::tests::{cached_hint_labels, visible_hint_labels},
+ test::editor_lsp_test_context::EditorLspTestContext,
+ };
use futures::StreamExt;
use gpui::{
platform::{self, Modifiers, ModifiersChangedEvent},
View,
};
use indoc::indoc;
+ use language::language_settings::InlayHintSettings;
use lsp::request::{GotoDefinition, GotoTypeDefinition};
+ use util::assert_set_eq;
#[gpui::test]
async fn test_link_go_to_type_definition(cx: &mut gpui::TestAppContext) {
@@ -355,7 +671,13 @@ mod tests {
// Press cmd+shift to trigger highlight
cx.update_editor(|editor, cx| {
- update_go_to_definition_link(editor, Some(hover_point), true, true, cx);
+ update_go_to_definition_link(
+ editor,
+ Some(GoToDefinitionTrigger::Text(hover_point)),
+ true,
+ true,
+ cx,
+ );
});
requests.next().await;
cx.foreground().run_until_parked();
@@ -406,7 +728,7 @@ mod tests {
});
cx.update_editor(|editor, cx| {
- go_to_fetched_type_definition(editor, hover_point, false, cx);
+ go_to_fetched_type_definition(editor, PointForPosition::valid(hover_point), false, cx);
});
requests.next().await;
cx.foreground().run_until_parked();
@@ -461,7 +783,13 @@ mod tests {
});
cx.update_editor(|editor, cx| {
- update_go_to_definition_link(editor, Some(hover_point), true, false, cx);
+ update_go_to_definition_link(
+ editor,
+ Some(GoToDefinitionTrigger::Text(hover_point)),
+ true,
+ false,
+ cx,
+ );
});
requests.next().await;
cx.foreground().run_until_parked();
@@ -482,7 +810,7 @@ mod tests {
"});
// Response without source range still highlights word
- cx.update_editor(|editor, _| editor.link_go_to_definition_state.last_mouse_location = None);
+ cx.update_editor(|editor, _| editor.link_go_to_definition_state.last_trigger_point = None);
let mut requests = cx.handle_request::<GotoDefinition, _, _>(move |url, _, _| async move {
Ok(Some(lsp::GotoDefinitionResponse::Link(vec![
lsp::LocationLink {
@@ -495,7 +823,13 @@ mod tests {
])))
});
cx.update_editor(|editor, cx| {
- update_go_to_definition_link(editor, Some(hover_point), true, false, cx);
+ update_go_to_definition_link(
+ editor,
+ Some(GoToDefinitionTrigger::Text(hover_point)),
+ true,
+ false,
+ cx,
+ );
});
requests.next().await;
cx.foreground().run_until_parked();
@@ -517,7 +851,13 @@ mod tests {
Ok(Some(lsp::GotoDefinitionResponse::Link(vec![])))
});
cx.update_editor(|editor, cx| {
- update_go_to_definition_link(editor, Some(hover_point), true, false, cx);
+ update_go_to_definition_link(
+ editor,
+ Some(GoToDefinitionTrigger::Text(hover_point)),
+ true,
+ false,
+ cx,
+ );
});
requests.next().await;
cx.foreground().run_until_parked();
@@ -534,7 +874,13 @@ mod tests {
fn do_work() { teˇst(); }
"});
cx.update_editor(|editor, cx| {
- update_go_to_definition_link(editor, Some(hover_point), false, false, cx);
+ update_go_to_definition_link(
+ editor,
+ Some(GoToDefinitionTrigger::Text(hover_point)),
+ false,
+ false,
+ cx,
+ );
});
cx.foreground().run_until_parked();
@@ -593,7 +939,13 @@ mod tests {
// Moving the mouse restores the highlights.
cx.update_editor(|editor, cx| {
- update_go_to_definition_link(editor, Some(hover_point), true, false, cx);
+ update_go_to_definition_link(
+ editor,
+ Some(GoToDefinitionTrigger::Text(hover_point)),
+ true,
+ false,
+ cx,
+ );
});
cx.foreground().run_until_parked();
cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
@@ -607,7 +959,13 @@ mod tests {
fn do_work() { tesˇt(); }
"});
cx.update_editor(|editor, cx| {
- update_go_to_definition_link(editor, Some(hover_point), true, false, cx);
+ update_go_to_definition_link(
+ editor,
+ Some(GoToDefinitionTrigger::Text(hover_point)),
+ true,
+ false,
+ cx,
+ );
});
cx.foreground().run_until_parked();
cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
@@ -617,7 +975,7 @@ mod tests {
// Cmd click with existing definition doesn't re-request and dismisses highlight
cx.update_editor(|editor, cx| {
- go_to_fetched_definition(editor, hover_point, false, cx);
+ go_to_fetched_definition(editor, PointForPosition::valid(hover_point), false, cx);
});
// Assert selection moved to to definition
cx.lsp
@@ -626,6 +984,7 @@ mod tests {
// the cached location instead
Ok(Some(lsp::GotoDefinitionResponse::Link(vec![])))
});
+ cx.foreground().run_until_parked();
cx.assert_editor_state(indoc! {"
fn «testˇ»() { do_work(); }
fn do_work() { test(); }
@@ -658,7 +1017,7 @@ mod tests {
])))
});
cx.update_editor(|editor, cx| {
- go_to_fetched_definition(editor, hover_point, false, cx);
+ go_to_fetched_definition(editor, PointForPosition::valid(hover_point), false, cx);
});
requests.next().await;
cx.foreground().run_until_parked();
@@ -703,7 +1062,13 @@ mod tests {
});
});
cx.update_editor(|editor, cx| {
- update_go_to_definition_link(editor, Some(hover_point), true, false, cx);
+ update_go_to_definition_link(
+ editor,
+ Some(GoToDefinitionTrigger::Text(hover_point)),
+ true,
+ false,
+ cx,
+ );
});
cx.foreground().run_until_parked();
assert!(requests.try_next().is_err());
@@ -713,4 +1078,217 @@ mod tests {
"});
cx.foreground().run_until_parked();
}
+
+ #[gpui::test]
+ async fn test_link_go_to_inlay(cx: &mut gpui::TestAppContext) {
+ init_test(cx, |settings| {
+ settings.defaults.inlay_hints = Some(InlayHintSettings {
+ enabled: true,
+ show_type_hints: true,
+ show_parameter_hints: true,
+ show_other_hints: true,
+ })
+ });
+
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ inlay_hint_provider: Some(lsp::OneOf::Left(true)),
+ ..Default::default()
+ },
+ cx,
+ )
+ .await;
+ cx.set_state(indoc! {"
+ struct TestStruct;
+
+ fn main() {
+ let variableˇ = TestStruct;
+ }
+ "});
+ let hint_start_offset = cx.ranges(indoc! {"
+ struct TestStruct;
+
+ fn main() {
+ let variableˇ = TestStruct;
+ }
+ "})[0]
+ .start;
+ let hint_position = cx.to_lsp(hint_start_offset);
+ let target_range = cx.lsp_range(indoc! {"
+ struct «TestStruct»;
+
+ fn main() {
+ let variable = TestStruct;
+ }
+ "});
+
+ let expected_uri = cx.buffer_lsp_url.clone();
+ let hint_label = ": TestStruct";
+ cx.lsp
+ .handle_request::<lsp::request::InlayHintRequest, _, _>(move |params, _| {
+ let expected_uri = expected_uri.clone();
+ async move {
+ assert_eq!(params.text_document.uri, expected_uri);
+ Ok(Some(vec![lsp::InlayHint {
+ position: hint_position,
+ label: lsp::InlayHintLabel::LabelParts(vec![lsp::InlayHintLabelPart {
+ value: hint_label.to_string(),
+ location: Some(lsp::Location {
+ uri: params.text_document.uri,
+ range: target_range,
+ }),
+ ..Default::default()
+ }]),
+ kind: Some(lsp::InlayHintKind::TYPE),
+ text_edits: None,
+ tooltip: None,
+ padding_left: Some(false),
+ padding_right: Some(false),
+ data: None,
+ }]))
+ }
+ })
+ .next()
+ .await;
+ cx.foreground().run_until_parked();
+ cx.update_editor(|editor, cx| {
+ let expected_layers = vec![hint_label.to_string()];
+ assert_eq!(expected_layers, cached_hint_labels(editor));
+ assert_eq!(expected_layers, visible_hint_labels(editor, cx));
+ });
+
+ let inlay_range = cx
+ .ranges(indoc! {"
+ struct TestStruct;
+
+ fn main() {
+ let variable« »= TestStruct;
+ }
+ "})
+ .get(0)
+ .cloned()
+ .unwrap();
+ let hint_hover_position = cx.update_editor(|editor, cx| {
+ let snapshot = editor.snapshot(cx);
+ let previous_valid = inlay_range.start.to_display_point(&snapshot);
+ let next_valid = inlay_range.end.to_display_point(&snapshot);
+ assert_eq!(previous_valid.row(), next_valid.row());
+ assert!(previous_valid.column() < next_valid.column());
+ let exact_unclipped = DisplayPoint::new(
+ previous_valid.row(),
+ previous_valid.column() + (hint_label.len() / 2) as u32,
+ );
+ PointForPosition {
+ previous_valid,
+ next_valid,
+ exact_unclipped,
+ column_overshoot_after_line_end: 0,
+ }
+ });
+ // Press cmd to trigger highlight
+ cx.update_editor(|editor, cx| {
+ update_inlay_link_and_hover_points(
+ &editor.snapshot(cx),
+ hint_hover_position,
+ editor,
+ true,
+ false,
+ cx,
+ );
+ });
+ cx.foreground().run_until_parked();
+ cx.update_editor(|editor, cx| {
+ let snapshot = editor.snapshot(cx);
+ let actual_ranges = snapshot
+ .highlight_ranges::<LinkGoToDefinitionState>()
+ .map(|ranges| ranges.as_ref().clone().1)
+ .unwrap_or_default()
+ .into_iter()
+ .map(|range| match range {
+ DocumentRange::Text(range) => {
+ panic!("Unexpected regular text selection range {range:?}")
+ }
+ DocumentRange::Inlay(inlay_range) => inlay_range,
+ })
+ .collect::<Vec<_>>();
+
+ let buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx));
+ let expected_highlight_start = snapshot.display_point_to_inlay_offset(
+ inlay_range.start.to_display_point(&snapshot),
+ Bias::Left,
+ );
+ let expected_ranges = vec![InlayRange {
+ inlay_position: buffer_snapshot.anchor_at(inlay_range.start, Bias::Right),
+ highlight_start: expected_highlight_start,
+ highlight_end: InlayOffset(expected_highlight_start.0 + hint_label.len()),
+ }];
+ assert_set_eq!(actual_ranges, expected_ranges);
+ });
+
+ // Unpress cmd causes highlight to go away
+ cx.update_editor(|editor, cx| {
+ editor.modifiers_changed(
+ &platform::ModifiersChangedEvent {
+ modifiers: Modifiers {
+ cmd: false,
+ ..Default::default()
+ },
+ ..Default::default()
+ },
+ cx,
+ );
+ });
+ // Assert no link highlights
+ cx.update_editor(|editor, cx| {
+ let snapshot = editor.snapshot(cx);
+ let actual_ranges = snapshot
+ .highlight_ranges::<LinkGoToDefinitionState>()
+ .map(|ranges| ranges.as_ref().clone().1)
+ .unwrap_or_default()
+ .into_iter()
+ .map(|range| match range {
+ DocumentRange::Text(range) => {
+ panic!("Unexpected regular text selection range {range:?}")
+ }
+ DocumentRange::Inlay(inlay_range) => inlay_range,
+ })
+ .collect::<Vec<_>>();
+
+ assert!(actual_ranges.is_empty(), "When no cmd is pressed, should have no hint label selected, but got: {actual_ranges:?}");
+ });
+
+ // Cmd+click without existing definition requests and jumps
+ cx.update_editor(|editor, cx| {
+ editor.modifiers_changed(
+ &platform::ModifiersChangedEvent {
+ modifiers: Modifiers {
+ cmd: true,
+ ..Default::default()
+ },
+ ..Default::default()
+ },
+ cx,
+ );
+ update_inlay_link_and_hover_points(
+ &editor.snapshot(cx),
+ hint_hover_position,
+ editor,
+ true,
+ false,
+ cx,
+ );
+ });
+ cx.foreground().run_until_parked();
+ cx.update_editor(|editor, cx| {
+ go_to_fetched_type_definition(editor, hint_hover_position, false, cx);
+ });
+ cx.foreground().run_until_parked();
+ cx.assert_editor_state(indoc! {"
+ struct «TestStructˇ»;
+
+ fn main() {
+ let variable = TestStruct;
+ }
+ "});
+ }
}
@@ -756,7 +756,8 @@ mod tests {
.select_font(family_id, &Default::default())
.unwrap();
- let buffer = cx.add_model(|cx| Buffer::new(0, "abc\ndefg\nhijkl\nmn", cx));
+ let buffer =
+ cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abc\ndefg\nhijkl\nmn"));
let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0);
multibuffer.push_excerpts(
@@ -6,7 +6,7 @@ use clock::ReplicaId;
use collections::{BTreeMap, Bound, HashMap, HashSet};
use futures::{channel::mpsc, SinkExt};
use git::diff::DiffHunk;
-use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task};
+use gpui::{AppContext, Entity, ModelContext, ModelHandle};
pub use language::Completion;
use language::{
char_kind,
@@ -67,7 +67,9 @@ pub enum Event {
ExcerptsEdited {
ids: Vec<ExcerptId>,
},
- Edited,
+ Edited {
+ sigleton_buffer_edited: bool,
+ },
Reloaded,
DiffBaseChanged,
LanguageChanged,
@@ -788,59 +790,59 @@ impl MultiBuffer {
pub fn stream_excerpts_with_context_lines(
&mut self,
- excerpts: Vec<(ModelHandle<Buffer>, Vec<Range<text::Anchor>>)>,
+ buffer: ModelHandle<Buffer>,
+ ranges: Vec<Range<text::Anchor>>,
context_line_count: u32,
cx: &mut ModelContext<Self>,
- ) -> (Task<()>, mpsc::Receiver<Range<Anchor>>) {
+ ) -> mpsc::Receiver<Range<Anchor>> {
let (mut tx, rx) = mpsc::channel(256);
- let task = cx.spawn(|this, mut cx| async move {
- for (buffer, ranges) in excerpts {
- let (buffer_id, buffer_snapshot) =
- buffer.read_with(&cx, |buffer, _| (buffer.remote_id(), buffer.snapshot()));
-
- let mut excerpt_ranges = Vec::new();
- let mut range_counts = Vec::new();
- cx.background()
- .scoped(|scope| {
- scope.spawn(async {
- let (ranges, counts) =
- build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count);
- excerpt_ranges = ranges;
- range_counts = counts;
- });
- })
- .await;
-
- let mut ranges = ranges.into_iter();
- let mut range_counts = range_counts.into_iter();
- for excerpt_ranges in excerpt_ranges.chunks(100) {
- let excerpt_ids = this.update(&mut cx, |this, cx| {
- this.push_excerpts(buffer.clone(), excerpt_ranges.iter().cloned(), cx)
+ cx.spawn(|this, mut cx| async move {
+ let (buffer_id, buffer_snapshot) =
+ buffer.read_with(&cx, |buffer, _| (buffer.remote_id(), buffer.snapshot()));
+
+ let mut excerpt_ranges = Vec::new();
+ let mut range_counts = Vec::new();
+ cx.background()
+ .scoped(|scope| {
+ scope.spawn(async {
+ let (ranges, counts) =
+ build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count);
+ excerpt_ranges = ranges;
+ range_counts = counts;
});
+ })
+ .await;
- for (excerpt_id, range_count) in
- excerpt_ids.into_iter().zip(range_counts.by_ref())
- {
- for range in ranges.by_ref().take(range_count) {
- let start = Anchor {
- buffer_id: Some(buffer_id),
- excerpt_id: excerpt_id.clone(),
- text_anchor: range.start,
- };
- let end = Anchor {
- buffer_id: Some(buffer_id),
- excerpt_id: excerpt_id.clone(),
- text_anchor: range.end,
- };
- if tx.send(start..end).await.is_err() {
- break;
- }
+ let mut ranges = ranges.into_iter();
+ let mut range_counts = range_counts.into_iter();
+ for excerpt_ranges in excerpt_ranges.chunks(100) {
+ let excerpt_ids = this.update(&mut cx, |this, cx| {
+ this.push_excerpts(buffer.clone(), excerpt_ranges.iter().cloned(), cx)
+ });
+
+ for (excerpt_id, range_count) in excerpt_ids.into_iter().zip(range_counts.by_ref())
+ {
+ for range in ranges.by_ref().take(range_count) {
+ let start = Anchor {
+ buffer_id: Some(buffer_id),
+ excerpt_id: excerpt_id.clone(),
+ text_anchor: range.start,
+ };
+ let end = Anchor {
+ buffer_id: Some(buffer_id),
+ excerpt_id: excerpt_id.clone(),
+ text_anchor: range.end,
+ };
+ if tx.send(start..end).await.is_err() {
+ break;
}
}
}
}
- });
- (task, rx)
+ })
+ .detach();
+
+ rx
}
pub fn push_excerpts<O>(
@@ -1022,7 +1024,9 @@ impl MultiBuffer {
old: edit_start..edit_start,
new: edit_start..edit_end,
}]);
- cx.emit(Event::Edited);
+ cx.emit(Event::Edited {
+ sigleton_buffer_edited: false,
+ });
cx.emit(Event::ExcerptsAdded {
buffer,
predecessor: prev_excerpt_id,
@@ -1046,7 +1050,9 @@ impl MultiBuffer {
old: 0..prev_len,
new: 0..0,
}]);
- cx.emit(Event::Edited);
+ cx.emit(Event::Edited {
+ sigleton_buffer_edited: false,
+ });
cx.emit(Event::ExcerptsRemoved { ids });
cx.notify();
}
@@ -1254,7 +1260,9 @@ impl MultiBuffer {
}
self.subscriptions.publish_mut(edits);
- cx.emit(Event::Edited);
+ cx.emit(Event::Edited {
+ sigleton_buffer_edited: false,
+ });
cx.emit(Event::ExcerptsRemoved { ids });
cx.notify();
}
@@ -1315,7 +1323,9 @@ impl MultiBuffer {
cx: &mut ModelContext<Self>,
) {
cx.emit(match event {
- language::Event::Edited => Event::Edited,
+ language::Event::Edited => Event::Edited {
+ sigleton_buffer_edited: true,
+ },
language::Event::DirtyChanged => Event::DirtyChanged,
language::Event::Saved => Event::Saved,
language::Event::FileHandleChanged => Event::FileHandleChanged,
@@ -1560,7 +1570,7 @@ impl MultiBuffer {
#[cfg(any(test, feature = "test-support"))]
impl MultiBuffer {
pub fn build_simple(text: &str, cx: &mut gpui::AppContext) -> ModelHandle<Self> {
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
cx.add_model(|cx| Self::singleton(buffer, cx))
}
@@ -1570,7 +1580,7 @@ impl MultiBuffer {
) -> ModelHandle<Self> {
let multi = cx.add_model(|_| Self::new(0));
for (text, ranges) in excerpts {
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
let excerpt_ranges = ranges.into_iter().map(|range| ExcerptRange {
context: range,
primary: None,
@@ -1662,7 +1672,7 @@ impl MultiBuffer {
if excerpt_ids.is_empty() || (rng.gen() && excerpt_ids.len() < max_excerpts) {
let buffer_handle = if rng.gen() || self.buffers.borrow().is_empty() {
let text = RandomCharIter::new(&mut *rng).take(10).collect::<String>();
- buffers.push(cx.add_model(|cx| Buffer::new(0, text, cx)));
+ buffers.push(cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text)));
let buffer = buffers.last().unwrap().read(cx);
log::info!(
"Creating new buffer {} with text: {:?}",
@@ -2756,7 +2766,9 @@ impl MultiBufferSnapshot {
// Get the ranges of the innermost pair of brackets.
let mut result: Option<(Range<usize>, Range<usize>)> = None;
- let Some(enclosing_bracket_ranges) = self.enclosing_bracket_ranges(range.clone()) else { return None; };
+ let Some(enclosing_bracket_ranges) = self.enclosing_bracket_ranges(range.clone()) else {
+ return None;
+ };
for (open, close) in enclosing_bracket_ranges {
let len = close.end - open.start;
@@ -4010,7 +4022,8 @@ mod tests {
#[gpui::test]
fn test_singleton(cx: &mut AppContext) {
- let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx));
+ let buffer =
+ cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(6, 6, 'a')));
let multibuffer = cx.add_model(|cx| MultiBuffer::singleton(buffer.clone(), cx));
let snapshot = multibuffer.read(cx).snapshot(cx);
@@ -4037,7 +4050,7 @@ mod tests {
#[gpui::test]
fn test_remote(cx: &mut AppContext) {
- let host_buffer = cx.add_model(|cx| Buffer::new(0, "a", cx));
+ let host_buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "a"));
let guest_buffer = cx.add_model(|cx| {
let state = host_buffer.read(cx).to_proto();
let ops = cx
@@ -4068,15 +4081,17 @@ mod tests {
#[gpui::test]
fn test_excerpt_boundaries_and_clipping(cx: &mut AppContext) {
- let buffer_1 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx));
- let buffer_2 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'g'), cx));
+ let buffer_1 =
+ cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(6, 6, 'a')));
+ let buffer_2 =
+ cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(6, 6, 'g')));
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
let events = Rc::new(RefCell::new(Vec::<Event>::new()));
multibuffer.update(cx, |_, cx| {
let events = events.clone();
cx.subscribe(&multibuffer, move |_, _, event, _| {
- if let Event::Edited = event {
+ if let Event::Edited { .. } = event {
events.borrow_mut().push(event.clone())
}
})
@@ -4131,7 +4146,17 @@ mod tests {
// Adding excerpts emits an edited event.
assert_eq!(
events.borrow().as_slice(),
- &[Event::Edited, Event::Edited, Event::Edited]
+ &[
+ Event::Edited {
+ sigleton_buffer_edited: false
+ },
+ Event::Edited {
+ sigleton_buffer_edited: false
+ },
+ Event::Edited {
+ sigleton_buffer_edited: false
+ }
+ ]
);
let snapshot = multibuffer.read(cx).snapshot(cx);
@@ -4292,8 +4317,10 @@ mod tests {
#[gpui::test]
fn test_excerpt_events(cx: &mut AppContext) {
- let buffer_1 = cx.add_model(|cx| Buffer::new(0, sample_text(10, 3, 'a'), cx));
- let buffer_2 = cx.add_model(|cx| Buffer::new(0, sample_text(10, 3, 'm'), cx));
+ let buffer_1 =
+ cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(10, 3, 'a')));
+ let buffer_2 =
+ cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(10, 3, 'm')));
let leader_multibuffer = cx.add_model(|_| MultiBuffer::new(0));
let follower_multibuffer = cx.add_model(|_| MultiBuffer::new(0));
@@ -4310,7 +4337,7 @@ mod tests {
excerpts,
} => follower.insert_excerpts_with_ids_after(predecessor, buffer, excerpts, cx),
Event::ExcerptsRemoved { ids } => follower.remove_excerpts(ids, cx),
- Event::Edited => {
+ Event::Edited { .. } => {
*follower_edit_event_count.borrow_mut() += 1;
}
_ => {}
@@ -4398,7 +4425,8 @@ mod tests {
#[gpui::test]
fn test_push_excerpts_with_context_lines(cx: &mut AppContext) {
- let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(20, 3, 'a'), cx));
+ let buffer =
+ cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(20, 3, 'a')));
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| {
multibuffer.push_excerpts_with_context_lines(
@@ -4434,9 +4462,10 @@ mod tests {
#[gpui::test]
async fn test_stream_excerpts_with_context_lines(cx: &mut TestAppContext) {
- let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(20, 3, 'a'), cx));
+ let buffer =
+ cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(20, 3, 'a')));
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
- let (task, anchor_ranges) = multibuffer.update(cx, |multibuffer, cx| {
+ let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| {
let snapshot = buffer.read(cx);
let ranges = vec![
snapshot.anchor_before(Point::new(3, 2))..snapshot.anchor_before(Point::new(4, 2)),
@@ -4444,12 +4473,10 @@ mod tests {
snapshot.anchor_before(Point::new(15, 0))
..snapshot.anchor_before(Point::new(15, 0)),
];
- multibuffer.stream_excerpts_with_context_lines(vec![(buffer.clone(), ranges)], 2, cx)
+ multibuffer.stream_excerpts_with_context_lines(buffer.clone(), ranges, 2, cx)
});
let anchor_ranges = anchor_ranges.collect::<Vec<_>>().await;
- // Ensure task is finished when stream completes.
- task.await;
let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx));
assert_eq!(
@@ -4482,7 +4509,7 @@ mod tests {
#[gpui::test]
fn test_singleton_multibuffer_anchors(cx: &mut AppContext) {
- let buffer = cx.add_model(|cx| Buffer::new(0, "abcd", cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abcd"));
let multibuffer = cx.add_model(|cx| MultiBuffer::singleton(buffer.clone(), cx));
let old_snapshot = multibuffer.read(cx).snapshot(cx);
buffer.update(cx, |buffer, cx| {
@@ -4502,8 +4529,8 @@ mod tests {
#[gpui::test]
fn test_multibuffer_anchors(cx: &mut AppContext) {
- let buffer_1 = cx.add_model(|cx| Buffer::new(0, "abcd", cx));
- let buffer_2 = cx.add_model(|cx| Buffer::new(0, "efghi", cx));
+ let buffer_1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abcd"));
+ let buffer_2 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "efghi"));
let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0);
multibuffer.push_excerpts(
@@ -4560,8 +4587,8 @@ mod tests {
#[gpui::test]
fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut AppContext) {
- let buffer_1 = cx.add_model(|cx| Buffer::new(0, "abcd", cx));
- let buffer_2 = cx.add_model(|cx| Buffer::new(0, "ABCDEFGHIJKLMNOP", cx));
+ let buffer_1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abcd"));
+ let buffer_2 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "ABCDEFGHIJKLMNOP"));
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
// Create an insertion id in buffer 1 that doesn't exist in buffer 2.
@@ -4956,7 +4983,9 @@ mod tests {
let base_text = util::RandomCharIter::new(&mut rng)
.take(10)
.collect::<String>();
- buffers.push(cx.add_model(|cx| Buffer::new(0, base_text, cx)));
+ buffers.push(
+ cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, base_text)),
+ );
buffers.last().unwrap()
} else {
buffers.choose(&mut rng).unwrap()
@@ -5297,8 +5326,8 @@ mod tests {
fn test_history(cx: &mut AppContext) {
cx.set_global(SettingsStore::test(cx));
- let buffer_1 = cx.add_model(|cx| Buffer::new(0, "1234", cx));
- let buffer_2 = cx.add_model(|cx| Buffer::new(0, "5678", cx));
+ let buffer_1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "1234"));
+ let buffer_2 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "5678"));
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
let group_interval = multibuffer.read(cx).history.group_interval;
multibuffer.update(cx, |multibuffer, cx| {
@@ -65,47 +65,52 @@ impl Editor {
self.set_scroll_position(scroll_position, cx);
}
- let (autoscroll, local) =
- if let Some(autoscroll) = self.scroll_manager.autoscroll_request.take() {
- autoscroll
- } else {
- return false;
- };
-
- let first_cursor_top;
- let last_cursor_bottom;
+ let Some((autoscroll, local)) = self.scroll_manager.autoscroll_request.take() else {
+ return false;
+ };
+
+ let mut target_top;
+ let mut target_bottom;
if let Some(highlighted_rows) = &self.highlighted_rows {
- first_cursor_top = highlighted_rows.start as f32;
- last_cursor_bottom = first_cursor_top + 1.;
- } else if autoscroll == Autoscroll::newest() {
- let newest_selection = self.selections.newest::<Point>(cx);
- first_cursor_top = newest_selection.head().to_display_point(&display_map).row() as f32;
- last_cursor_bottom = first_cursor_top + 1.;
+ target_top = highlighted_rows.start as f32;
+ target_bottom = target_top + 1.;
} else {
let selections = self.selections.all::<Point>(cx);
- first_cursor_top = selections
+ target_top = selections
.first()
.unwrap()
.head()
.to_display_point(&display_map)
.row() as f32;
- last_cursor_bottom = selections
+ target_bottom = selections
.last()
.unwrap()
.head()
.to_display_point(&display_map)
.row() as f32
+ 1.0;
+
+ // If the selections can't all fit on screen, scroll to the newest.
+ if autoscroll == Autoscroll::newest()
+ || autoscroll == Autoscroll::fit() && target_bottom - target_top > visible_lines
+ {
+ let newest_selection_top = selections
+ .iter()
+ .max_by_key(|s| s.id)
+ .unwrap()
+ .head()
+ .to_display_point(&display_map)
+ .row() as f32;
+ target_top = newest_selection_top;
+ target_bottom = newest_selection_top + 1.;
+ }
}
let margin = if matches!(self.mode, EditorMode::AutoHeight { .. }) {
0.
} else {
- ((visible_lines - (last_cursor_bottom - first_cursor_top)) / 2.0).floor()
+ ((visible_lines - (target_bottom - target_top)) / 2.0).floor()
};
- if margin < 0.0 {
- return false;
- }
let strategy = match autoscroll {
Autoscroll::Strategy(strategy) => strategy,
@@ -113,8 +118,8 @@ impl Editor {
let last_autoscroll = &self.scroll_manager.last_autoscroll;
if let Some(last_autoscroll) = last_autoscroll {
if self.scroll_manager.anchor.offset == last_autoscroll.0
- && first_cursor_top == last_autoscroll.1
- && last_cursor_bottom == last_autoscroll.2
+ && target_top == last_autoscroll.1
+ && target_bottom == last_autoscroll.2
{
last_autoscroll.3.next()
} else {
@@ -129,37 +134,41 @@ impl Editor {
match strategy {
AutoscrollStrategy::Fit | AutoscrollStrategy::Newest => {
let margin = margin.min(self.scroll_manager.vertical_scroll_margin);
- let target_top = (first_cursor_top - margin).max(0.0);
- let target_bottom = last_cursor_bottom + margin;
+ let target_top = (target_top - margin).max(0.0);
+ let target_bottom = target_bottom + margin;
let start_row = scroll_position.y();
let end_row = start_row + visible_lines;
- if target_top < start_row {
+ let needs_scroll_up = target_top < start_row;
+ let needs_scroll_down = target_bottom >= end_row;
+
+ if needs_scroll_up && !needs_scroll_down {
scroll_position.set_y(target_top);
self.set_scroll_position_internal(scroll_position, local, true, cx);
- } else if target_bottom >= end_row {
+ }
+ if !needs_scroll_up && needs_scroll_down {
scroll_position.set_y(target_bottom - visible_lines);
self.set_scroll_position_internal(scroll_position, local, true, cx);
}
}
AutoscrollStrategy::Center => {
- scroll_position.set_y((first_cursor_top - margin).max(0.0));
+ scroll_position.set_y((target_top - margin).max(0.0));
self.set_scroll_position_internal(scroll_position, local, true, cx);
}
AutoscrollStrategy::Top => {
- scroll_position.set_y((first_cursor_top).max(0.0));
+ scroll_position.set_y((target_top).max(0.0));
self.set_scroll_position_internal(scroll_position, local, true, cx);
}
AutoscrollStrategy::Bottom => {
- scroll_position.set_y((last_cursor_bottom - visible_lines).max(0.0));
+ scroll_position.set_y((target_bottom - visible_lines).max(0.0));
self.set_scroll_position_internal(scroll_position, local, true, cx);
}
}
self.scroll_manager.last_autoscroll = Some((
self.scroll_manager.anchor.offset,
- first_cursor_top,
- last_cursor_bottom,
+ target_top,
+ target_bottom,
strategy,
));
@@ -225,6 +225,7 @@ impl<'a> EditorTestContext<'a> {
.map(|h| h.1.clone())
.unwrap_or_default()
.into_iter()
+ .filter_map(|range| range.as_text_range())
.map(|range| range.to_offset(&snapshot.buffer_snapshot))
.collect()
});
@@ -240,6 +241,7 @@ impl<'a> EditorTestContext<'a> {
.map(|ranges| ranges.as_ref().clone().1)
.unwrap_or_default()
.into_iter()
+ .filter_map(|range| range.as_text_range())
.map(|range| range.to_offset(&snapshot.buffer_snapshot))
.collect();
assert_set_eq!(actual_ranges, expected_ranges);
@@ -1,11 +1,11 @@
[package]
-name = "staff_mode"
+name = "feature_flags"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
-path = "src/staff_mode.rs"
+path = "src/feature_flags.rs"
[dependencies]
gpui = { path = "../gpui" }
@@ -0,0 +1,79 @@
+use gpui::{AppContext, Subscription, ViewContext};
+
+#[derive(Default)]
+struct FeatureFlags {
+ flags: Vec<String>,
+ staff: bool,
+}
+
+impl FeatureFlags {
+ fn has_flag(&self, flag: &str) -> bool {
+ self.staff || self.flags.iter().find(|f| f.as_str() == flag).is_some()
+ }
+}
+
+pub trait FeatureFlag {
+ const NAME: &'static str;
+}
+
+pub enum ChannelsAlpha {}
+
+impl FeatureFlag for ChannelsAlpha {
+ const NAME: &'static str = "channels_alpha";
+}
+
+pub trait FeatureFlagViewExt<V: 'static> {
+ fn observe_flag<T: FeatureFlag, F>(&mut self, callback: F) -> Subscription
+ where
+ F: Fn(bool, &mut V, &mut ViewContext<V>) + 'static;
+}
+
+impl<V: 'static> FeatureFlagViewExt<V> for ViewContext<'_, '_, V> {
+ fn observe_flag<T: FeatureFlag, F>(&mut self, callback: F) -> Subscription
+ where
+ F: Fn(bool, &mut V, &mut ViewContext<V>) + 'static,
+ {
+ self.observe_global::<FeatureFlags, _>(move |v, cx| {
+ let feature_flags = cx.global::<FeatureFlags>();
+ callback(feature_flags.has_flag(<T as FeatureFlag>::NAME), v, cx);
+ })
+ }
+}
+
+pub trait FeatureFlagAppExt {
+ fn update_flags(&mut self, staff: bool, flags: Vec<String>);
+ fn set_staff(&mut self, staff: bool);
+ fn has_flag<T: FeatureFlag>(&self) -> bool;
+ fn is_staff(&self) -> bool;
+}
+
+impl FeatureFlagAppExt for AppContext {
+ fn update_flags(&mut self, staff: bool, flags: Vec<String>) {
+ self.update_default_global::<FeatureFlags, _, _>(|feature_flags, _| {
+ feature_flags.staff = staff;
+ feature_flags.flags = flags;
+ })
+ }
+
+ fn set_staff(&mut self, staff: bool) {
+ self.update_default_global::<FeatureFlags, _, _>(|feature_flags, _| {
+ feature_flags.staff = staff;
+ })
+ }
+
+ fn has_flag<T: FeatureFlag>(&self) -> bool {
+ if self.has_global::<FeatureFlags>() {
+ self.global::<FeatureFlags>().has_flag(T::NAME)
+ } else {
+ false
+ }
+ }
+
+ fn is_staff(&self) -> bool {
+ if self.has_global::<FeatureFlags>() {
+ return self.global::<FeatureFlags>().staff;
+ } else {
+ false
+ }
+ }
+}
@@ -2,7 +2,7 @@ use button_component::Button;
use gpui::{
color::Color,
- elements::{Component, ContainerStyle, Flex, Label, ParentElement},
+ elements::{ContainerStyle, Flex, Label, ParentElement, StatefulComponent},
fonts::{self, TextStyle},
platform::WindowOptions,
AnyElement, App, Element, Entity, View, ViewContext,
@@ -114,7 +114,7 @@ mod theme {
// Component creation:
mod toggleable_button {
use gpui::{
- elements::{Component, ContainerStyle, LabelStyle},
+ elements::{ContainerStyle, LabelStyle, StatefulComponent},
scene::MouseClick,
EventContext, View,
};
@@ -156,7 +156,7 @@ mod toggleable_button {
}
}
- impl<V: View> Component<V> for ToggleableButton<V> {
+ impl<V: View> StatefulComponent<V> for ToggleableButton<V> {
fn render(self, v: &mut V, cx: &mut gpui::ViewContext<V>) -> gpui::AnyElement<V> {
let button = if let Some(style) = self.style {
self.button.with_style(*style.style_for(self.active))
@@ -171,7 +171,7 @@ mod toggleable_button {
mod button_component {
use gpui::{
- elements::{Component, ContainerStyle, Label, LabelStyle, MouseEventHandler},
+ elements::{ContainerStyle, Label, LabelStyle, MouseEventHandler, StatefulComponent},
platform::MouseButton,
scene::MouseClick,
AnyElement, Element, EventContext, TypeTag, View, ViewContext,
@@ -212,7 +212,7 @@ mod button_component {
}
}
- impl<V: View> Component<V> for Button<V> {
+ impl<V: View> StatefulComponent<V> for Button<V> {
fn render(self, _: &mut V, cx: &mut ViewContext<V>) -> AnyElement<V> {
let click_handler = self.click_handler;
@@ -3299,15 +3299,15 @@ impl<'a, 'b, V: 'static> ViewContext<'a, 'b, V> {
let region_id = MouseRegionId::new(tag, self.view_id, region_id);
MouseState {
hovered: self.window.hovered_region_ids.contains(®ion_id),
- clicked: if let Some((clicked_region_id, button)) = self.window.clicked_region {
- if region_id == clicked_region_id {
- Some(button)
- } else {
- None
- }
- } else {
- None
- },
+ mouse_down: !self.window.clicked_region_ids.is_empty(),
+ clicked: self
+ .window
+ .clicked_region_ids
+ .iter()
+ .find(|click_region_id| **click_region_id == region_id)
+ // If we've gotten here, there should always be a clicked region.
+ // But let's be defensive and return None if there isn't.
+ .and_then(|_| self.window.clicked_region.map(|(_, button)| button)),
accessed_hovered: false,
accessed_clicked: false,
}
@@ -3798,14 +3798,20 @@ impl<'a, T> DerefMut for Reference<'a, T> {
pub struct MouseState {
pub(crate) hovered: bool,
pub(crate) clicked: Option<MouseButton>,
+ pub(crate) mouse_down: bool,
pub(crate) accessed_hovered: bool,
pub(crate) accessed_clicked: bool,
}
impl MouseState {
+ pub fn dragging(&mut self) -> bool {
+ self.accessed_hovered = true;
+ self.hovered && self.mouse_down
+ }
+
pub fn hovered(&mut self) -> bool {
self.accessed_hovered = true;
- self.hovered
+ self.hovered && (!self.mouse_down || self.clicked.is_some())
}
pub fn clicked(&mut self) -> Option<MouseButton> {
@@ -4656,12 +4662,13 @@ impl AnyWeakModelHandle {
}
}
-#[derive(Copy)]
pub struct WeakViewHandle<T> {
any_handle: AnyWeakViewHandle,
view_type: PhantomData<T>,
}
+impl<T> Copy for WeakViewHandle<T> {}
+
impl<T> Debug for WeakViewHandle<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct(&format!("WeakViewHandle<{}>", type_name::<T>()))
@@ -624,10 +624,11 @@ impl<'a> WindowContext<'a> {
}
}
- if self
- .window
- .platform_window
- .is_topmost_for_position(*position)
+ if pressed_button.is_none()
+ && self
+ .window
+ .platform_window
+ .is_topmost_for_position(*position)
{
self.platform().set_cursor_style(style_to_assign);
}
@@ -791,6 +792,11 @@ impl<'a> WindowContext<'a> {
if clicked_region_ids.contains(&mouse_region.id()) {
if mouse_region.bounds.contains_point(self.mouse_position()) {
valid_regions.push(mouse_region.clone());
+ } else {
+ // Let the view know that it hasn't been clicked anymore
+ if mouse_region.notify_on_click {
+ notified_views.insert(mouse_region.id().view_id());
+ }
}
}
}
@@ -234,6 +234,27 @@ pub trait Element<V: 'static>: 'static {
{
MouseEventHandler::for_child::<Tag>(self.into_any(), region_id)
}
+
+ fn component(self) -> StatelessElementAdapter
+ where
+ Self: Sized,
+ {
+ StatelessElementAdapter::new(self.into_any())
+ }
+
+ fn stateful_component(self) -> StatefulElementAdapter<V>
+ where
+ Self: Sized,
+ {
+ StatefulElementAdapter::new(self.into_any())
+ }
+
+ fn styleable_component(self) -> StylableAdapter<StatelessElementAdapter>
+ where
+ Self: Sized,
+ {
+ StatelessElementAdapter::new(self.into_any()).stylable()
+ }
}
trait AnyElementState<V> {
@@ -1,47 +1,96 @@
-use std::marker::PhantomData;
+use std::{any::Any, marker::PhantomData};
use pathfinder_geometry::{rect::RectF, vector::Vector2F};
use crate::{
- AnyElement, Element, LayoutContext, PaintContext, SceneBuilder, SizeConstraint, View,
- ViewContext,
+ AnyElement, Element, LayoutContext, PaintContext, SceneBuilder, SizeConstraint, ViewContext,
};
use super::Empty;
-pub trait GeneralComponent {
- fn render<V: View>(self, v: &mut V, cx: &mut ViewContext<V>) -> AnyElement<V>;
- fn element<V: View>(self) -> ComponentAdapter<V, Self>
+/// The core stateless component trait, simply rendering an element tree
+pub trait Component {
+ fn render<V: 'static>(self, cx: &mut ViewContext<V>) -> AnyElement<V>;
+
+ fn element<V: 'static>(self) -> ComponentAdapter<V, Self>
where
Self: Sized,
{
ComponentAdapter::new(self)
}
+
+ fn stylable(self) -> StylableAdapter<Self>
+ where
+ Self: Sized,
+ {
+ StylableAdapter::new(self)
+ }
+
+ fn stateful<V: 'static>(self) -> StatefulAdapter<Self, V>
+ where
+ Self: Sized,
+ {
+ StatefulAdapter::new(self)
+ }
}
-pub trait StyleableComponent {
+/// Allows a a component's styles to be rebound in a simple way.
+pub trait Stylable: Component {
type Style: Clone;
- type Output: GeneralComponent;
+
+ fn with_style(self, style: Self::Style) -> Self;
+}
+
+/// This trait models the typestate pattern for a component's style,
+/// enforcing at compile time that a component is only usable after
+/// it has been styled while still allowing for late binding of the
+/// styling information
+pub trait SafeStylable {
+ type Style: Clone;
+ type Output: Component;
fn with_style(self, style: Self::Style) -> Self::Output;
}
-impl GeneralComponent for () {
- fn render<V: View>(self, _: &mut V, _: &mut ViewContext<V>) -> AnyElement<V> {
- Empty::new().into_any()
+/// All stylable components can trivially implement SafeStylable
+impl<C: Stylable> SafeStylable for C {
+ type Style = C::Style;
+
+ type Output = C;
+
+ fn with_style(self, style: Self::Style) -> Self::Output {
+ self.with_style(style)
+ }
+}
+
+/// Allows converting an unstylable component into a stylable one
+/// by using `()` as the style type
+pub struct StylableAdapter<C: Component> {
+ component: C,
+}
+
+impl<C: Component> StylableAdapter<C> {
+ pub fn new(component: C) -> Self {
+ Self { component }
}
}
-impl StyleableComponent for () {
+impl<C: Component> SafeStylable for StylableAdapter<C> {
type Style = ();
- type Output = ();
+
+ type Output = C;
fn with_style(self, _: Self::Style) -> Self::Output {
- ()
+ self.component
}
}
-pub trait Component<V: View> {
+/// This is a secondary trait for components that can be styled
+/// which rely on their view's state. This is useful for components that, for example,
+/// want to take click handler callbacks Unfortunately, the generic bound on the
+/// Component trait makes it incompatible with the stateless components above.
+// So let's just replicate them for now
+pub trait StatefulComponent<V: 'static> {
fn render(self, v: &mut V, cx: &mut ViewContext<V>) -> AnyElement<V>;
fn element(self) -> ComponentAdapter<V, Self>
@@ -50,21 +99,63 @@ pub trait Component<V: View> {
{
ComponentAdapter::new(self)
}
+
+ fn styleable(self) -> StatefulStylableAdapter<Self, V>
+ where
+ Self: Sized,
+ {
+ StatefulStylableAdapter::new(self)
+ }
+
+ fn stateless(self) -> StatelessElementAdapter
+ where
+ Self: Sized + 'static,
+ {
+ StatelessElementAdapter::new(self.element().into_any())
+ }
}
-impl<V: View, C: GeneralComponent> Component<V> for C {
- fn render(self, v: &mut V, cx: &mut ViewContext<V>) -> AnyElement<V> {
- self.render(v, cx)
+/// It is trivial to convert stateless components to stateful components, so lets
+/// do so en masse. Note that the reverse is impossible without a helper.
+impl<V: 'static, C: Component> StatefulComponent<V> for C {
+ fn render(self, _: &mut V, cx: &mut ViewContext<V>) -> AnyElement<V> {
+ self.render(cx)
}
}
-// StylableComponent -> GeneralComponent
-pub struct StylableComponentAdapter<C: Component<V>, V: View> {
+/// Same as stylable, but generic over a view type
+pub trait StatefulStylable<V: 'static>: StatefulComponent<V> {
+ type Style: Clone;
+
+ fn with_style(self, style: Self::Style) -> Self;
+}
+
+/// Same as SafeStylable, but generic over a view type
+pub trait StatefulSafeStylable<V: 'static> {
+ type Style: Clone;
+ type Output: StatefulComponent<V>;
+
+ fn with_style(self, style: Self::Style) -> Self::Output;
+}
+
+/// Converting from stateless to stateful
+impl<V: 'static, C: SafeStylable> StatefulSafeStylable<V> for C {
+ type Style = C::Style;
+
+ type Output = C::Output;
+
+ fn with_style(self, style: Self::Style) -> Self::Output {
+ self.with_style(style)
+ }
+}
+
+// A helper for converting stateless components into stateful ones
+pub struct StatefulAdapter<C, V> {
component: C,
phantom: std::marker::PhantomData<V>,
}
-impl<C: Component<V>, V: View> StylableComponentAdapter<C, V> {
+impl<C: Component, V: 'static> StatefulAdapter<C, V> {
pub fn new(component: C) -> Self {
Self {
component,
@@ -73,7 +164,31 @@ impl<C: Component<V>, V: View> StylableComponentAdapter<C, V> {
}
}
-impl<C: GeneralComponent, V: View> StyleableComponent for StylableComponentAdapter<C, V> {
+impl<C: Component, V: 'static> StatefulComponent<V> for StatefulAdapter<C, V> {
+ fn render(self, _: &mut V, cx: &mut ViewContext<V>) -> AnyElement<V> {
+ self.component.render(cx)
+ }
+}
+
+// A helper for converting stateful but style-less components into stylable ones
+// by using `()` as the style type
+pub struct StatefulStylableAdapter<C: StatefulComponent<V>, V: 'static> {
+ component: C,
+ phantom: std::marker::PhantomData<V>,
+}
+
+impl<C: StatefulComponent<V>, V: 'static> StatefulStylableAdapter<C, V> {
+ pub fn new(component: C) -> Self {
+ Self {
+ component,
+ phantom: std::marker::PhantomData,
+ }
+ }
+}
+
+impl<C: StatefulComponent<V>, V: 'static> StatefulSafeStylable<V>
+ for StatefulStylableAdapter<C, V>
+{
type Style = ();
type Output = C;
@@ -83,13 +198,37 @@ impl<C: GeneralComponent, V: View> StyleableComponent for StylableComponentAdapt
}
}
-// Element -> Component
-pub struct ElementAdapter<V: View> {
+/// A way of erasing the view generic from an element, useful
+/// for wrapping up an explicit element tree into stateless
+/// components
+pub struct StatelessElementAdapter {
+ element: Box<dyn Any>,
+}
+
+impl StatelessElementAdapter {
+ pub fn new<V: 'static>(element: AnyElement<V>) -> Self {
+ StatelessElementAdapter {
+ element: Box::new(element) as Box<dyn Any>,
+ }
+ }
+}
+
+impl Component for StatelessElementAdapter {
+ fn render<V: 'static>(self, _: &mut ViewContext<V>) -> AnyElement<V> {
+ *self
+ .element
+ .downcast::<AnyElement<V>>()
+ .expect("Don't move elements out of their view :(")
+ }
+}
+
+// For converting elements into stateful components
+pub struct StatefulElementAdapter<V: 'static> {
element: AnyElement<V>,
_phantom: std::marker::PhantomData<V>,
}
-impl<V: View> ElementAdapter<V> {
+impl<V: 'static> StatefulElementAdapter<V> {
pub fn new(element: AnyElement<V>) -> Self {
Self {
element,
@@ -98,20 +237,35 @@ impl<V: View> ElementAdapter<V> {
}
}
-impl<V: View> Component<V> for ElementAdapter<V> {
+impl<V: 'static> StatefulComponent<V> for StatefulElementAdapter<V> {
fn render(self, _: &mut V, _: &mut ViewContext<V>) -> AnyElement<V> {
self.element
}
}
-// Component -> Element
-pub struct ComponentAdapter<V: View, E> {
+/// A convenient shorthand for creating an empty component.
+impl Component for () {
+ fn render<V: 'static>(self, _: &mut ViewContext<V>) -> AnyElement<V> {
+ Empty::new().into_any()
+ }
+}
+
+impl Stylable for () {
+ type Style = ();
+
+ fn with_style(self, _: Self::Style) -> Self {
+ ()
+ }
+}
+
+// For converting components back into Elements
+pub struct ComponentAdapter<V: 'static, E> {
component: Option<E>,
element: Option<AnyElement<V>>,
phantom: PhantomData<V>,
}
-impl<E, V: View> ComponentAdapter<V, E> {
+impl<E, V: 'static> ComponentAdapter<V, E> {
pub fn new(e: E) -> Self {
Self {
component: Some(e),
@@ -121,7 +275,7 @@ impl<E, V: View> ComponentAdapter<V, E> {
}
}
-impl<V: View, C: Component<V> + 'static> Element<V> for ComponentAdapter<V, C> {
+impl<V: 'static, C: StatefulComponent<V> + 'static> Element<V> for ComponentAdapter<V, C> {
type LayoutState = ();
type PaintState = ();
@@ -184,6 +338,7 @@ impl<V: View, C: Component<V> + 'static> Element<V> for ComponentAdapter<V, C> {
) -> serde_json::Value {
serde_json::json!({
"type": "ComponentAdapter",
+ "component": std::any::type_name::<C>(),
"child": self.element.as_ref().map(|el| el.debug(view, cx)),
})
}
@@ -44,6 +44,14 @@ impl ContainerStyle {
..Default::default()
}
}
+
+ pub fn additional_length(&self) -> f32 {
+ self.padding.left
+ + self.padding.right
+ + self.border.width * 2.
+ + self.margin.left
+ + self.margin.right
+ }
}
pub struct Container<V> {
@@ -22,6 +22,7 @@ pub struct Flex<V> {
children: Vec<AnyElement<V>>,
scroll_state: Option<(ElementStateHandle<Rc<ScrollState>>, usize)>,
child_alignment: f32,
+ spacing: f32,
}
impl<V: 'static> Flex<V> {
@@ -31,6 +32,7 @@ impl<V: 'static> Flex<V> {
children: Default::default(),
scroll_state: None,
child_alignment: -1.,
+ spacing: 0.,
}
}
@@ -51,6 +53,11 @@ impl<V: 'static> Flex<V> {
self
}
+ pub fn with_spacing(mut self, spacing: f32) -> Self {
+ self.spacing = spacing;
+ self
+ }
+
pub fn scrollable<Tag>(
mut self,
element_id: usize,
@@ -81,7 +88,7 @@ impl<V: 'static> Flex<V> {
cx: &mut LayoutContext<V>,
) {
let cross_axis = self.axis.invert();
- for child in &mut self.children {
+ for child in self.children.iter_mut() {
if let Some(metadata) = child.metadata::<FlexParentData>() {
if let Some((flex, expanded)) = metadata.flex {
if expanded != layout_expanded {
@@ -132,12 +139,12 @@ impl<V: 'static> Element<V> for Flex<V> {
cx: &mut LayoutContext<V>,
) -> (Vector2F, Self::LayoutState) {
let mut total_flex = None;
- let mut fixed_space = 0.0;
+ let mut fixed_space = self.children.len().saturating_sub(1) as f32 * self.spacing;
let mut contains_float = false;
let cross_axis = self.axis.invert();
let mut cross_axis_max: f32 = 0.0;
- for child in &mut self.children {
+ for child in self.children.iter_mut() {
let metadata = child.metadata::<FlexParentData>();
contains_float |= metadata.map_or(false, |metadata| metadata.float);
@@ -315,7 +322,7 @@ impl<V: 'static> Element<V> for Flex<V> {
}
}
- for child in &mut self.children {
+ for child in self.children.iter_mut() {
if remaining_space > 0. {
if let Some(metadata) = child.metadata::<FlexParentData>() {
if metadata.float {
@@ -354,8 +361,8 @@ impl<V: 'static> Element<V> for Flex<V> {
child.paint(scene, aligned_child_origin, visible_bounds, view, cx);
match self.axis {
- Axis::Horizontal => child_origin += vec2f(child.size().x(), 0.0),
- Axis::Vertical => child_origin += vec2f(0.0, child.size().y()),
+ Axis::Horizontal => child_origin += vec2f(child.size().x() + self.spacing, 0.0),
+ Axis::Vertical => child_origin += vec2f(0.0, child.size().y() + self.spacing),
}
}
@@ -67,7 +67,9 @@ impl KeymapContextPredicate {
}
pub fn eval(&self, contexts: &[KeymapContext]) -> bool {
- let Some(context) = contexts.first() else { return false };
+ let Some(context) = contexts.first() else {
+ return false;
+ };
match self {
Self::Identifier(name) => (&context.set).contains(name.as_str()),
Self::Equal(left, right) => context
@@ -14,7 +14,7 @@ use crate::{
CodeLabel, LanguageScope, Outline,
};
use anyhow::{anyhow, Result};
-use clock::ReplicaId;
+pub use clock::ReplicaId;
use fs::LineEnding;
use futures::FutureExt as _;
use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, Task};
@@ -347,13 +347,17 @@ impl CharKind {
}
impl Buffer {
- pub fn new<T: Into<String>>(
- replica_id: ReplicaId,
- base_text: T,
- cx: &mut ModelContext<Self>,
- ) -> Self {
+ pub fn new<T: Into<String>>(replica_id: ReplicaId, id: u64, base_text: T) -> Self {
Self::build(
- TextBuffer::new(replica_id, cx.model_id() as u64, base_text.into()),
+ TextBuffer::new(replica_id, id, base_text.into()),
+ None,
+ None,
+ )
+ }
+
+ pub fn remote(remote_id: u64, replica_id: ReplicaId, base_text: String) -> Self {
+ Self::build(
+ TextBuffer::new(replica_id, remote_id, base_text),
None,
None,
)
@@ -2476,7 +2480,9 @@ impl BufferSnapshot {
matches.advance();
- let Some((open, close)) = open.zip(close) else { continue };
+ let Some((open, close)) = open.zip(close) else {
+ continue;
+ };
let bracket_range = open.start..=close.end;
if !bracket_range.overlaps(&range) {
@@ -43,8 +43,8 @@ fn test_line_endings(cx: &mut gpui::AppContext) {
init_settings(cx, |_| {});
cx.add_model(|cx| {
- let mut buffer =
- Buffer::new(0, "one\r\ntwo\rthree", cx).with_language(Arc::new(rust_lang()), cx);
+ let mut buffer = Buffer::new(0, cx.model_id() as u64, "one\r\ntwo\rthree")
+ .with_language(Arc::new(rust_lang()), cx);
assert_eq!(buffer.text(), "one\ntwo\nthree");
assert_eq!(buffer.line_ending(), LineEnding::Windows);
@@ -138,8 +138,8 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
- let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
- let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
+ let buffer1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abcdef"));
+ let buffer2 = cx.add_model(|cx| Buffer::new(1, cx.model_id() as u64, "abcdef"));
let buffer1_ops = Rc::new(RefCell::new(Vec::new()));
buffer1.update(cx, {
let buffer1_ops = buffer1_ops.clone();
@@ -222,7 +222,7 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
#[gpui::test]
async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
let anchor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(3, 3)));
let text = "a\nccc\ndddd\nffffff\n";
@@ -254,7 +254,7 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
]
.join("\n");
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
// Spawn a task to format the buffer's whitespace.
// Pause so that the foratting task starts running.
@@ -318,8 +318,9 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
#[gpui::test]
async fn test_reparse(cx: &mut gpui::TestAppContext) {
let text = "fn a() {}";
- let buffer =
- cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
+ let buffer = cx.add_model(|cx| {
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
+ });
// Wait for the initial text to parse
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
@@ -443,7 +444,8 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
#[gpui::test]
async fn test_resetting_language(cx: &mut gpui::TestAppContext) {
let buffer = cx.add_model(|cx| {
- let mut buffer = Buffer::new(0, "{}", cx).with_language(Arc::new(rust_lang()), cx);
+ let mut buffer =
+ Buffer::new(0, cx.model_id() as u64, "{}").with_language(Arc::new(rust_lang()), cx);
buffer.set_sync_parse_timeout(Duration::ZERO);
buffer
});
@@ -491,8 +493,9 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
"#
.unindent();
- let buffer =
- cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
+ let buffer = cx.add_model(|cx| {
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
+ });
let outline = buffer
.read_with(cx, |buffer, _| buffer.snapshot().outline(None))
.unwrap();
@@ -576,8 +579,9 @@ async fn test_outline_nodes_with_newlines(cx: &mut gpui::TestAppContext) {
"#
.unindent();
- let buffer =
- cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
+ let buffer = cx.add_model(|cx| {
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
+ });
let outline = buffer
.read_with(cx, |buffer, _| buffer.snapshot().outline(None))
.unwrap();
@@ -613,7 +617,9 @@ async fn test_outline_with_extra_context(cx: &mut gpui::TestAppContext) {
"#
.unindent();
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(language), cx));
+ let buffer = cx.add_model(|cx| {
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(language), cx)
+ });
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
// extra context nodes are included in the outline.
@@ -655,8 +661,9 @@ async fn test_symbols_containing(cx: &mut gpui::TestAppContext) {
"#
.unindent();
- let buffer =
- cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
+ let buffer = cx.add_model(|cx| {
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
+ });
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
// point is at the start of an item
@@ -877,7 +884,8 @@ fn test_enclosing_bracket_ranges_where_brackets_are_not_outermost_children(cx: &
fn test_range_for_syntax_ancestor(cx: &mut AppContext) {
cx.add_model(|cx| {
let text = "fn a() { b(|c| {}) }";
- let buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
+ let buffer =
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
let snapshot = buffer.snapshot();
assert_eq!(
@@ -917,7 +925,8 @@ fn test_autoindent_with_soft_tabs(cx: &mut AppContext) {
cx.add_model(|cx| {
let text = "fn a() {}";
- let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
+ let mut buffer =
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
assert_eq!(buffer.text(), "fn a() {\n \n}");
@@ -959,7 +968,8 @@ fn test_autoindent_with_hard_tabs(cx: &mut AppContext) {
cx.add_model(|cx| {
let text = "fn a() {}";
- let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
+ let mut buffer =
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
assert_eq!(buffer.text(), "fn a() {\n\t\n}");
@@ -1000,6 +1010,7 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
cx.add_model(|cx| {
let mut buffer = Buffer::new(
0,
+ cx.model_id() as u64,
"
fn a() {
c;
@@ -1007,7 +1018,6 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
}
"
.unindent(),
- cx,
)
.with_language(Arc::new(rust_lang()), cx);
@@ -1073,6 +1083,7 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
cx.add_model(|cx| {
let mut buffer = Buffer::new(
0,
+ cx.model_id() as u64,
"
fn a() {
b();
@@ -1080,7 +1091,6 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
"
.replace("|", "") // marker to preserve trailing whitespace
.unindent(),
- cx,
)
.with_language(Arc::new(rust_lang()), cx);
@@ -1136,13 +1146,13 @@ fn test_autoindent_does_not_adjust_lines_within_newly_created_errors(cx: &mut Ap
cx.add_model(|cx| {
let mut buffer = Buffer::new(
0,
+ cx.model_id() as u64,
"
fn a() {
i
}
"
.unindent(),
- cx,
)
.with_language(Arc::new(rust_lang()), cx);
@@ -1198,11 +1208,11 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut AppContext) {
cx.add_model(|cx| {
let mut buffer = Buffer::new(
0,
+ cx.model_id() as u64,
"
fn a() {}
"
.unindent(),
- cx,
)
.with_language(Arc::new(rust_lang()), cx);
@@ -1254,7 +1264,8 @@ fn test_autoindent_with_edit_at_end_of_buffer(cx: &mut AppContext) {
cx.add_model(|cx| {
let text = "a\nb";
- let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
+ let mut buffer =
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
buffer.edit(
[(0..1, "\n"), (2..3, "\n")],
Some(AutoindentMode::EachLine),
@@ -1280,7 +1291,8 @@ fn test_autoindent_multi_line_insertion(cx: &mut AppContext) {
"
.unindent();
- let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
+ let mut buffer =
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
buffer.edit(
[(Point::new(3, 0)..Point::new(3, 0), "e(\n f()\n);\n")],
Some(AutoindentMode::EachLine),
@@ -1317,7 +1329,8 @@ fn test_autoindent_block_mode(cx: &mut AppContext) {
}
"#
.unindent();
- let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
+ let mut buffer =
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
// When this text was copied, both of the quotation marks were at the same
// indent level, but the indentation of the first line was not included in
@@ -1402,7 +1415,8 @@ fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut AppContex
}
"#
.unindent();
- let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
+ let mut buffer =
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
// The original indent columns are not known, so this text is
// auto-indented in a block as if the first line was copied in
@@ -1481,7 +1495,7 @@ fn test_autoindent_language_without_indents_query(cx: &mut AppContext) {
"
.unindent();
- let mut buffer = Buffer::new(0, text, cx).with_language(
+ let mut buffer = Buffer::new(0, cx.model_id() as u64, text).with_language(
Arc::new(Language::new(
LanguageConfig {
name: "Markdown".into(),
@@ -1557,7 +1571,7 @@ fn test_autoindent_with_injected_languages(cx: &mut AppContext) {
false,
);
- let mut buffer = Buffer::new(0, text, cx);
+ let mut buffer = Buffer::new(0, cx.model_id() as u64, text);
buffer.set_language_registry(language_registry);
buffer.set_language(Some(html_language), cx);
buffer.edit(
@@ -1593,7 +1607,8 @@ fn test_autoindent_query_with_outdent_captures(cx: &mut AppContext) {
});
cx.add_model(|cx| {
- let mut buffer = Buffer::new(0, "", cx).with_language(Arc::new(ruby_lang()), cx);
+ let mut buffer =
+ Buffer::new(0, cx.model_id() as u64, "").with_language(Arc::new(ruby_lang()), cx);
let text = r#"
class C
@@ -1683,7 +1698,8 @@ fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
let text = r#"a["b"] = <C d="e"></C>;"#;
- let buffer = Buffer::new(0, text, cx).with_language(Arc::new(language), cx);
+ let buffer =
+ Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(language), cx);
let snapshot = buffer.snapshot();
let config = snapshot.language_scope_at(0).unwrap();
@@ -1762,7 +1778,8 @@ fn test_language_scope_at_with_rust(cx: &mut AppContext) {
"#
.unindent();
- let buffer = Buffer::new(0, text.clone(), cx).with_language(Arc::new(language), cx);
+ let buffer = Buffer::new(0, cx.model_id() as u64, text.clone())
+ .with_language(Arc::new(language), cx);
let snapshot = buffer.snapshot();
// By default, all brackets are enabled
@@ -1806,7 +1823,7 @@ fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
language_registry.add(Arc::new(html_lang()));
language_registry.add(Arc::new(erb_lang()));
- let mut buffer = Buffer::new(0, text, cx);
+ let mut buffer = Buffer::new(0, cx.model_id() as u64, text);
buffer.set_language_registry(language_registry.clone());
buffer.set_language(
language_registry
@@ -1838,7 +1855,7 @@ fn test_serialization(cx: &mut gpui::AppContext) {
let mut now = Instant::now();
let buffer1 = cx.add_model(|cx| {
- let mut buffer = Buffer::new(0, "abc", cx);
+ let mut buffer = Buffer::new(0, cx.model_id() as u64, "abc");
buffer.edit([(3..3, "D")], None, cx);
now += Duration::from_secs(1);
@@ -1893,7 +1910,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
let mut replica_ids = Vec::new();
let mut buffers = Vec::new();
let network = Rc::new(RefCell::new(Network::new(rng.clone())));
- let base_buffer = cx.add_model(|cx| Buffer::new(0, base_text.as_str(), cx));
+ let base_buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, base_text.as_str()));
for i in 0..rng.gen_range(min_peers..=max_peers) {
let buffer = cx.add_model(|cx| {
@@ -2394,7 +2411,8 @@ fn assert_bracket_pairs(
) {
let (expected_text, selection_ranges) = marked_text_ranges(selection_text, false);
let buffer = cx.add_model(|cx| {
- Buffer::new(0, expected_text.clone(), cx).with_language(Arc::new(language), cx)
+ Buffer::new(0, cx.model_id() as u64, expected_text.clone())
+ .with_language(Arc::new(language), cx)
});
let buffer = buffer.update(cx, |buffer, _cx| buffer.snapshot());
@@ -18,7 +18,7 @@ use futures::{
FutureExt, TryFutureExt as _,
};
use gpui::{executor::Background, AppContext, AsyncAppContext, Task};
-use highlight_map::HighlightMap;
+pub use highlight_map::HighlightMap;
use lazy_static::lazy_static;
use lsp::{CodeActionKind, LanguageServerBinary};
use parking_lot::{Mutex, RwLock};
@@ -207,6 +207,7 @@ pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
}
}
+// This behavior is currently copied in the collab database, for snapshotting channel notes
pub fn deserialize_operation(message: proto::Operation) -> Result<crate::Operation> {
Ok(
match message
@@ -310,7 +310,9 @@ impl SyntaxSnapshot {
// Ignore edits that end before the start of this layer, and don't consider them
// for any subsequent layers at this same depth.
loop {
- let Some((_, edit_range)) = edits.get(first_edit_ix_for_depth) else { continue 'outer };
+ let Some((_, edit_range)) = edits.get(first_edit_ix_for_depth) else {
+ continue 'outer;
+ };
if edit_range.end.cmp(&layer.range.start, text).is_le() {
first_edit_ix_for_depth += 1;
} else {
@@ -391,7 +393,9 @@ impl SyntaxSnapshot {
.filter::<_, ()>(|summary| summary.contains_unknown_injections);
cursor.next(text);
while let Some(layer) = cursor.item() {
- let SyntaxLayerContent::Pending { language_name } = &layer.content else { unreachable!() };
+ let SyntaxLayerContent::Pending { language_name } = &layer.content else {
+ unreachable!()
+ };
if registry
.language_for_name_or_extension(language_name)
.now_or_never()
@@ -533,7 +537,9 @@ impl SyntaxSnapshot {
let content = match step.language {
ParseStepLanguage::Loaded { language } => {
- let Some(grammar) = language.grammar() else { continue };
+ let Some(grammar) = language.grammar() else {
+ continue;
+ };
let tree;
let changed_ranges;
@@ -932,8 +932,12 @@ fn check_interpolation(
.zip(new_syntax_map.layers.iter())
{
assert_eq!(old_layer.range, new_layer.range);
- let Some(old_tree) = old_layer.content.tree() else { continue };
- let Some(new_tree) = new_layer.content.tree() else { continue };
+ let Some(old_tree) = old_layer.content.tree() else {
+ continue;
+ };
+ let Some(new_tree) = new_layer.content.tree() else {
+ continue;
+ };
let old_start_byte = old_layer.range.start.to_offset(old_buffer);
let new_start_byte = new_layer.range.start.to_offset(new_buffer);
let old_start_point = old_layer.range.start.to_point(old_buffer).to_ts_point();
@@ -176,7 +176,9 @@ impl LogStore {
cx.notify();
LanguageServerState {
rpc_state: None,
- log_buffer: cx.add_model(|cx| Buffer::new(0, "", cx)).clone(),
+ log_buffer: cx
+ .add_model(|cx| Buffer::new(0, cx.model_id() as u64, ""))
+ .clone(),
}
})
.log_buffer
@@ -241,7 +243,7 @@ impl LogStore {
let rpc_state = server_state.rpc_state.get_or_insert_with(|| {
let io_tx = self.io_tx.clone();
let language = project.read(cx).languages().language_for_name("JSON");
- let buffer = cx.add_model(|cx| Buffer::new(0, "", cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, ""));
cx.spawn_weak({
let buffer = buffer.clone();
|_, mut cx| async move {
@@ -327,7 +329,7 @@ impl LspLogView {
.projects
.get(&project.downgrade())
.and_then(|project| project.servers.keys().copied().next());
- let buffer = cx.add_model(|cx| Buffer::new(0, "", cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, ""));
let mut this = Self {
editor: Self::editor_for_buffer(project.clone(), buffer, cx),
project,
@@ -549,7 +551,9 @@ impl View for LspLogToolbarItemView {
fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
let theme = theme::current(cx).clone();
- let Some(log_view) = self.log_view.as_ref() else { return Empty::new().into_any() };
+ let Some(log_view) = self.log_view.as_ref() else {
+ return Empty::new().into_any();
+ };
let log_view = log_view.read(cx);
let menu_rows = log_view.menu_items(cx).unwrap_or_default();
@@ -77,7 +77,7 @@ pub enum Subscription {
}
#[derive(Serialize, Deserialize)]
-struct Request<'a, T> {
+pub struct Request<'a, T> {
jsonrpc: &'static str,
id: usize,
method: &'a str,
@@ -435,7 +435,13 @@ impl LanguageServer {
}),
inlay_hint: Some(InlayHintClientCapabilities {
resolve_support: Some(InlayHintResolveClientCapabilities {
- properties: vec!["textEdits".to_string(), "tooltip".to_string()],
+ properties: vec![
+ "textEdits".to_string(),
+ "tooltip".to_string(),
+ "label.tooltip".to_string(),
+ "label.location".to_string(),
+ "label.command".to_string(),
+ ],
}),
dynamic_registration: Some(false),
}),
@@ -1,21 +1,23 @@
use crate::{
DocumentHighlight, Hover, HoverBlock, HoverBlockKind, InlayHint, InlayHintLabel,
InlayHintLabelPart, InlayHintLabelPartTooltip, InlayHintTooltip, Location, LocationLink,
- MarkupContent, Project, ProjectTransaction,
+ MarkupContent, Project, ProjectTransaction, ResolveState,
};
use anyhow::{anyhow, Context, Result};
use async_trait::async_trait;
use client::proto::{self, PeerId};
use fs::LineEnding;
+use futures::future;
use gpui::{AppContext, AsyncAppContext, ModelHandle};
use language::{
language_settings::{language_settings, InlayHintKind},
point_from_lsp, point_to_lsp,
proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
- range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CharKind, CodeAction,
- Completion, OffsetRangeExt, PointUtf16, ToOffset, ToPointUtf16, Transaction, Unclipped,
+ range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CharKind,
+ CodeAction, Completion, OffsetRangeExt, PointUtf16, ToOffset, ToPointUtf16, Transaction,
+ Unclipped,
};
-use lsp::{DocumentHighlightKind, LanguageServer, LanguageServerId, ServerCapabilities};
+use lsp::{DocumentHighlightKind, LanguageServer, LanguageServerId, OneOf, ServerCapabilities};
use std::{cmp::Reverse, ops::Range, path::Path, sync::Arc};
pub fn lsp_formatting_options(tab_size: u32) -> lsp::FormattingOptions {
@@ -1431,7 +1433,7 @@ impl LspCommand for GetCompletions {
})
});
- Ok(futures::future::join_all(completions).await)
+ Ok(future::join_all(completions).await)
}
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetCompletions {
@@ -1499,7 +1501,7 @@ impl LspCommand for GetCompletions {
let completions = message.completions.into_iter().map(|completion| {
language::proto::deserialize_completion(completion, language.clone())
});
- futures::future::try_join_all(completions).await
+ future::try_join_all(completions).await
}
fn buffer_id_from_proto(message: &proto::GetCompletions) -> u64 {
@@ -1653,7 +1655,11 @@ impl LspCommand for OnTypeFormatting {
type ProtoRequest = proto::OnTypeFormatting;
fn check_capabilities(&self, server_capabilities: &lsp::ServerCapabilities) -> bool {
- let Some(on_type_formatting_options) = &server_capabilities.document_on_type_formatting_provider else { return false };
+ let Some(on_type_formatting_options) =
+ &server_capabilities.document_on_type_formatting_provider
+ else {
+ return false;
+ };
on_type_formatting_options
.first_trigger_character
.contains(&self.trigger)
@@ -1767,7 +1773,9 @@ impl LspCommand for OnTypeFormatting {
_: ModelHandle<Buffer>,
_: AsyncAppContext,
) -> Result<Option<Transaction>> {
- let Some(transaction) = message.transaction else { return Ok(None) };
+ let Some(transaction) = message.transaction else {
+ return Ok(None);
+ };
Ok(Some(language::proto::deserialize_transaction(transaction)?))
}
@@ -1776,6 +1784,377 @@ impl LspCommand for OnTypeFormatting {
}
}
+impl InlayHints {
+ pub async fn lsp_to_project_hint(
+ lsp_hint: lsp::InlayHint,
+ buffer_handle: &ModelHandle<Buffer>,
+ server_id: LanguageServerId,
+ resolve_state: ResolveState,
+ force_no_type_left_padding: bool,
+ cx: &mut AsyncAppContext,
+ ) -> anyhow::Result<InlayHint> {
+ let kind = lsp_hint.kind.and_then(|kind| match kind {
+ lsp::InlayHintKind::TYPE => Some(InlayHintKind::Type),
+ lsp::InlayHintKind::PARAMETER => Some(InlayHintKind::Parameter),
+ _ => None,
+ });
+
+ let position = cx.update(|cx| {
+ let buffer = buffer_handle.read(cx);
+ let position = buffer.clip_point_utf16(point_from_lsp(lsp_hint.position), Bias::Left);
+ if kind == Some(InlayHintKind::Parameter) {
+ buffer.anchor_before(position)
+ } else {
+ buffer.anchor_after(position)
+ }
+ });
+ let label = Self::lsp_inlay_label_to_project(lsp_hint.label, server_id)
+ .await
+ .context("lsp to project inlay hint conversion")?;
+ let padding_left = if force_no_type_left_padding && kind == Some(InlayHintKind::Type) {
+ false
+ } else {
+ lsp_hint.padding_left.unwrap_or(false)
+ };
+
+ Ok(InlayHint {
+ position,
+ padding_left,
+ padding_right: lsp_hint.padding_right.unwrap_or(false),
+ label,
+ kind,
+ tooltip: lsp_hint.tooltip.map(|tooltip| match tooltip {
+ lsp::InlayHintTooltip::String(s) => InlayHintTooltip::String(s),
+ lsp::InlayHintTooltip::MarkupContent(markup_content) => {
+ InlayHintTooltip::MarkupContent(MarkupContent {
+ kind: match markup_content.kind {
+ lsp::MarkupKind::PlainText => HoverBlockKind::PlainText,
+ lsp::MarkupKind::Markdown => HoverBlockKind::Markdown,
+ },
+ value: markup_content.value,
+ })
+ }
+ }),
+ resolve_state,
+ })
+ }
+
+ async fn lsp_inlay_label_to_project(
+ lsp_label: lsp::InlayHintLabel,
+ server_id: LanguageServerId,
+ ) -> anyhow::Result<InlayHintLabel> {
+ let label = match lsp_label {
+ lsp::InlayHintLabel::String(s) => InlayHintLabel::String(s),
+ lsp::InlayHintLabel::LabelParts(lsp_parts) => {
+ let mut parts = Vec::with_capacity(lsp_parts.len());
+ for lsp_part in lsp_parts {
+ parts.push(InlayHintLabelPart {
+ value: lsp_part.value,
+ tooltip: lsp_part.tooltip.map(|tooltip| match tooltip {
+ lsp::InlayHintLabelPartTooltip::String(s) => {
+ InlayHintLabelPartTooltip::String(s)
+ }
+ lsp::InlayHintLabelPartTooltip::MarkupContent(markup_content) => {
+ InlayHintLabelPartTooltip::MarkupContent(MarkupContent {
+ kind: match markup_content.kind {
+ lsp::MarkupKind::PlainText => HoverBlockKind::PlainText,
+ lsp::MarkupKind::Markdown => HoverBlockKind::Markdown,
+ },
+ value: markup_content.value,
+ })
+ }
+ }),
+ location: Some(server_id).zip(lsp_part.location),
+ });
+ }
+ InlayHintLabel::LabelParts(parts)
+ }
+ };
+
+ Ok(label)
+ }
+
+ pub fn project_to_proto_hint(response_hint: InlayHint) -> proto::InlayHint {
+ let (state, lsp_resolve_state) = match response_hint.resolve_state {
+ ResolveState::Resolved => (0, None),
+ ResolveState::CanResolve(server_id, resolve_data) => (
+ 1,
+ resolve_data
+ .map(|json_data| {
+ serde_json::to_string(&json_data)
+ .expect("failed to serialize resolve json data")
+ })
+ .map(|value| proto::resolve_state::LspResolveState {
+ server_id: server_id.0 as u64,
+ value,
+ }),
+ ),
+ ResolveState::Resolving => (2, None),
+ };
+ let resolve_state = Some(proto::ResolveState {
+ state,
+ lsp_resolve_state,
+ });
+ proto::InlayHint {
+ position: Some(language::proto::serialize_anchor(&response_hint.position)),
+ padding_left: response_hint.padding_left,
+ padding_right: response_hint.padding_right,
+ label: Some(proto::InlayHintLabel {
+ label: Some(match response_hint.label {
+ InlayHintLabel::String(s) => proto::inlay_hint_label::Label::Value(s),
+ InlayHintLabel::LabelParts(label_parts) => {
+ proto::inlay_hint_label::Label::LabelParts(proto::InlayHintLabelParts {
+ parts: label_parts.into_iter().map(|label_part| {
+ let location_url = label_part.location.as_ref().map(|(_, location)| location.uri.to_string());
+ let location_range_start = label_part.location.as_ref().map(|(_, location)| point_from_lsp(location.range.start).0).map(|point| proto::PointUtf16 { row: point.row, column: point.column });
+ let location_range_end = label_part.location.as_ref().map(|(_, location)| point_from_lsp(location.range.end).0).map(|point| proto::PointUtf16 { row: point.row, column: point.column });
+ proto::InlayHintLabelPart {
+ value: label_part.value,
+ tooltip: label_part.tooltip.map(|tooltip| {
+ let proto_tooltip = match tooltip {
+ InlayHintLabelPartTooltip::String(s) => proto::inlay_hint_label_part_tooltip::Content::Value(s),
+ InlayHintLabelPartTooltip::MarkupContent(markup_content) => proto::inlay_hint_label_part_tooltip::Content::MarkupContent(proto::MarkupContent {
+ is_markdown: markup_content.kind == HoverBlockKind::Markdown,
+ value: markup_content.value,
+ }),
+ };
+ proto::InlayHintLabelPartTooltip {content: Some(proto_tooltip)}
+ }),
+ location_url,
+ location_range_start,
+ location_range_end,
+ language_server_id: label_part.location.as_ref().map(|(server_id, _)| server_id.0 as u64),
+ }}).collect()
+ })
+ }
+ }),
+ }),
+ kind: response_hint.kind.map(|kind| kind.name().to_string()),
+ tooltip: response_hint.tooltip.map(|response_tooltip| {
+ let proto_tooltip = match response_tooltip {
+ InlayHintTooltip::String(s) => proto::inlay_hint_tooltip::Content::Value(s),
+ InlayHintTooltip::MarkupContent(markup_content) => {
+ proto::inlay_hint_tooltip::Content::MarkupContent(proto::MarkupContent {
+ is_markdown: markup_content.kind == HoverBlockKind::Markdown,
+ value: markup_content.value,
+ })
+ }
+ };
+ proto::InlayHintTooltip {
+ content: Some(proto_tooltip),
+ }
+ }),
+ resolve_state,
+ }
+ }
+
+ pub fn proto_to_project_hint(message_hint: proto::InlayHint) -> anyhow::Result<InlayHint> {
+ let resolve_state = message_hint.resolve_state.as_ref().unwrap_or_else(|| {
+ panic!("incorrect proto inlay hint message: no resolve state in hint {message_hint:?}",)
+ });
+ let resolve_state_data = resolve_state
+ .lsp_resolve_state.as_ref()
+ .map(|lsp_resolve_state| {
+ serde_json::from_str::<Option<lsp::LSPAny>>(&lsp_resolve_state.value)
+ .with_context(|| format!("incorrect proto inlay hint message: non-json resolve state {lsp_resolve_state:?}"))
+ .map(|state| (LanguageServerId(lsp_resolve_state.server_id as usize), state))
+ })
+ .transpose()?;
+ let resolve_state = match resolve_state.state {
+ 0 => ResolveState::Resolved,
+ 1 => {
+ let (server_id, lsp_resolve_state) = resolve_state_data.with_context(|| {
+ format!(
+ "No lsp resolve data for the hint that can be resolved: {message_hint:?}"
+ )
+ })?;
+ ResolveState::CanResolve(server_id, lsp_resolve_state)
+ }
+ 2 => ResolveState::Resolving,
+ invalid => {
+ anyhow::bail!("Unexpected resolve state {invalid} for hint {message_hint:?}")
+ }
+ };
+ Ok(InlayHint {
+ position: message_hint
+ .position
+ .and_then(language::proto::deserialize_anchor)
+ .context("invalid position")?,
+ label: match message_hint
+ .label
+ .and_then(|label| label.label)
+ .context("missing label")?
+ {
+ proto::inlay_hint_label::Label::Value(s) => InlayHintLabel::String(s),
+ proto::inlay_hint_label::Label::LabelParts(parts) => {
+ let mut label_parts = Vec::new();
+ for part in parts.parts {
+ label_parts.push(InlayHintLabelPart {
+ value: part.value,
+ tooltip: part.tooltip.map(|tooltip| match tooltip.content {
+ Some(proto::inlay_hint_label_part_tooltip::Content::Value(s)) => {
+ InlayHintLabelPartTooltip::String(s)
+ }
+ Some(
+ proto::inlay_hint_label_part_tooltip::Content::MarkupContent(
+ markup_content,
+ ),
+ ) => InlayHintLabelPartTooltip::MarkupContent(MarkupContent {
+ kind: if markup_content.is_markdown {
+ HoverBlockKind::Markdown
+ } else {
+ HoverBlockKind::PlainText
+ },
+ value: markup_content.value,
+ }),
+ None => InlayHintLabelPartTooltip::String(String::new()),
+ }),
+ location: {
+ match part
+ .location_url
+ .zip(
+ part.location_range_start.and_then(|start| {
+ Some(start..part.location_range_end?)
+ }),
+ )
+ .zip(part.language_server_id)
+ {
+ Some(((uri, range), server_id)) => Some((
+ LanguageServerId(server_id as usize),
+ lsp::Location {
+ uri: lsp::Url::parse(&uri)
+ .context("invalid uri in hint part {part:?}")?,
+ range: lsp::Range::new(
+ point_to_lsp(PointUtf16::new(
+ range.start.row,
+ range.start.column,
+ )),
+ point_to_lsp(PointUtf16::new(
+ range.end.row,
+ range.end.column,
+ )),
+ ),
+ },
+ )),
+ None => None,
+ }
+ },
+ });
+ }
+
+ InlayHintLabel::LabelParts(label_parts)
+ }
+ },
+ padding_left: message_hint.padding_left,
+ padding_right: message_hint.padding_right,
+ kind: message_hint
+ .kind
+ .as_deref()
+ .and_then(InlayHintKind::from_name),
+ tooltip: message_hint.tooltip.and_then(|tooltip| {
+ Some(match tooltip.content? {
+ proto::inlay_hint_tooltip::Content::Value(s) => InlayHintTooltip::String(s),
+ proto::inlay_hint_tooltip::Content::MarkupContent(markup_content) => {
+ InlayHintTooltip::MarkupContent(MarkupContent {
+ kind: if markup_content.is_markdown {
+ HoverBlockKind::Markdown
+ } else {
+ HoverBlockKind::PlainText
+ },
+ value: markup_content.value,
+ })
+ }
+ })
+ }),
+ resolve_state,
+ })
+ }
+
+ pub fn project_to_lsp_hint(hint: InlayHint, snapshot: &BufferSnapshot) -> lsp::InlayHint {
+ lsp::InlayHint {
+ position: point_to_lsp(hint.position.to_point_utf16(snapshot)),
+ kind: hint.kind.map(|kind| match kind {
+ InlayHintKind::Type => lsp::InlayHintKind::TYPE,
+ InlayHintKind::Parameter => lsp::InlayHintKind::PARAMETER,
+ }),
+ text_edits: None,
+ tooltip: hint.tooltip.and_then(|tooltip| {
+ Some(match tooltip {
+ InlayHintTooltip::String(s) => lsp::InlayHintTooltip::String(s),
+ InlayHintTooltip::MarkupContent(markup_content) => {
+ lsp::InlayHintTooltip::MarkupContent(lsp::MarkupContent {
+ kind: match markup_content.kind {
+ HoverBlockKind::PlainText => lsp::MarkupKind::PlainText,
+ HoverBlockKind::Markdown => lsp::MarkupKind::Markdown,
+ HoverBlockKind::Code { .. } => return None,
+ },
+ value: markup_content.value,
+ })
+ }
+ })
+ }),
+ label: match hint.label {
+ InlayHintLabel::String(s) => lsp::InlayHintLabel::String(s),
+ InlayHintLabel::LabelParts(label_parts) => lsp::InlayHintLabel::LabelParts(
+ label_parts
+ .into_iter()
+ .map(|part| lsp::InlayHintLabelPart {
+ value: part.value,
+ tooltip: part.tooltip.and_then(|tooltip| {
+ Some(match tooltip {
+ InlayHintLabelPartTooltip::String(s) => {
+ lsp::InlayHintLabelPartTooltip::String(s)
+ }
+ InlayHintLabelPartTooltip::MarkupContent(markup_content) => {
+ lsp::InlayHintLabelPartTooltip::MarkupContent(
+ lsp::MarkupContent {
+ kind: match markup_content.kind {
+ HoverBlockKind::PlainText => {
+ lsp::MarkupKind::PlainText
+ }
+ HoverBlockKind::Markdown => {
+ lsp::MarkupKind::Markdown
+ }
+ HoverBlockKind::Code { .. } => return None,
+ },
+ value: markup_content.value,
+ },
+ )
+ }
+ })
+ }),
+ location: part.location.map(|(_, location)| location),
+ command: None,
+ })
+ .collect(),
+ ),
+ },
+ padding_left: Some(hint.padding_left),
+ padding_right: Some(hint.padding_right),
+ data: match hint.resolve_state {
+ ResolveState::CanResolve(_, data) => data,
+ ResolveState::Resolving | ResolveState::Resolved => None,
+ },
+ }
+ }
+
+ pub fn can_resolve_inlays(capabilities: &ServerCapabilities) -> bool {
+ capabilities
+ .inlay_hint_provider
+ .as_ref()
+ .and_then(|options| match options {
+ OneOf::Left(_is_supported) => None,
+ OneOf::Right(capabilities) => match capabilities {
+ lsp::InlayHintServerCapabilities::Options(o) => o.resolve_provider,
+ lsp::InlayHintServerCapabilities::RegistrationOptions(o) => {
+ o.inlay_hint_options.resolve_provider
+ }
+ },
+ })
+ .unwrap_or(false)
+ }
+}
+
#[async_trait(?Send)]
impl LspCommand for InlayHints {
type Response = Vec<InlayHint>;
@@ -1783,7 +2162,9 @@ impl LspCommand for InlayHints {
type ProtoRequest = proto::InlayHints;
fn check_capabilities(&self, server_capabilities: &lsp::ServerCapabilities) -> bool {
- let Some(inlay_hint_provider) = &server_capabilities.inlay_hint_provider else { return false };
+ let Some(inlay_hint_provider) = &server_capabilities.inlay_hint_provider else {
+ return false;
+ };
match inlay_hint_provider {
lsp::OneOf::Left(enabled) => *enabled,
lsp::OneOf::Right(inlay_hint_capabilities) => match inlay_hint_capabilities {
@@ -1816,8 +2197,9 @@ impl LspCommand for InlayHints {
buffer: ModelHandle<Buffer>,
server_id: LanguageServerId,
mut cx: AsyncAppContext,
- ) -> Result<Vec<InlayHint>> {
- let (lsp_adapter, _) = language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
+ ) -> anyhow::Result<Vec<InlayHint>> {
+ let (lsp_adapter, lsp_server) =
+ language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
// `typescript-language-server` adds padding to the left for type hints, turning
// `const foo: boolean` into `const foo : boolean` which looks odd.
// `rust-analyzer` does not have the padding for this case, and we have to accomodate both.
@@ -1827,93 +2209,32 @@ impl LspCommand for InlayHints {
// Hence let's use a heuristic first to handle the most awkward case and look for more.
let force_no_type_left_padding =
lsp_adapter.name.0.as_ref() == "typescript-language-server";
- cx.read(|cx| {
- let origin_buffer = buffer.read(cx);
- Ok(message
- .unwrap_or_default()
- .into_iter()
- .map(|lsp_hint| {
- let kind = lsp_hint.kind.and_then(|kind| match kind {
- lsp::InlayHintKind::TYPE => Some(InlayHintKind::Type),
- lsp::InlayHintKind::PARAMETER => Some(InlayHintKind::Parameter),
- _ => None,
- });
- let position = origin_buffer
- .clip_point_utf16(point_from_lsp(lsp_hint.position), Bias::Left);
- let padding_left =
- if force_no_type_left_padding && kind == Some(InlayHintKind::Type) {
- false
- } else {
- lsp_hint.padding_left.unwrap_or(false)
- };
- InlayHint {
- buffer_id: origin_buffer.remote_id(),
- position: if kind == Some(InlayHintKind::Parameter) {
- origin_buffer.anchor_before(position)
- } else {
- origin_buffer.anchor_after(position)
- },
- padding_left,
- padding_right: lsp_hint.padding_right.unwrap_or(false),
- label: match lsp_hint.label {
- lsp::InlayHintLabel::String(s) => InlayHintLabel::String(s),
- lsp::InlayHintLabel::LabelParts(lsp_parts) => {
- InlayHintLabel::LabelParts(
- lsp_parts
- .into_iter()
- .map(|label_part| InlayHintLabelPart {
- value: label_part.value,
- tooltip: label_part.tooltip.map(
- |tooltip| {
- match tooltip {
- lsp::InlayHintLabelPartTooltip::String(s) => {
- InlayHintLabelPartTooltip::String(s)
- }
- lsp::InlayHintLabelPartTooltip::MarkupContent(
- markup_content,
- ) => InlayHintLabelPartTooltip::MarkupContent(
- MarkupContent {
- kind: format!("{:?}", markup_content.kind),
- value: markup_content.value,
- },
- ),
- }
- },
- ),
- location: label_part.location.map(|lsp_location| {
- let target_start = origin_buffer.clip_point_utf16(
- point_from_lsp(lsp_location.range.start),
- Bias::Left,
- );
- let target_end = origin_buffer.clip_point_utf16(
- point_from_lsp(lsp_location.range.end),
- Bias::Left,
- );
- Location {
- buffer: buffer.clone(),
- range: origin_buffer.anchor_after(target_start)
- ..origin_buffer.anchor_before(target_end),
- }
- }),
- })
- .collect(),
- )
- }
- },
- kind,
- tooltip: lsp_hint.tooltip.map(|tooltip| match tooltip {
- lsp::InlayHintTooltip::String(s) => InlayHintTooltip::String(s),
- lsp::InlayHintTooltip::MarkupContent(markup_content) => {
- InlayHintTooltip::MarkupContent(MarkupContent {
- kind: format!("{:?}", markup_content.kind),
- value: markup_content.value,
- })
- }
- }),
- }
- })
- .collect())
- })
+
+ let hints = message.unwrap_or_default().into_iter().map(|lsp_hint| {
+ let resolve_state = if InlayHints::can_resolve_inlays(lsp_server.capabilities()) {
+ ResolveState::CanResolve(lsp_server.server_id(), lsp_hint.data.clone())
+ } else {
+ ResolveState::Resolved
+ };
+
+ let buffer = buffer.clone();
+ cx.spawn(|mut cx| async move {
+ InlayHints::lsp_to_project_hint(
+ lsp_hint,
+ &buffer,
+ server_id,
+ resolve_state,
+ force_no_type_left_padding,
+ &mut cx,
+ )
+ .await
+ })
+ });
+ future::join_all(hints)
+ .await
+ .into_iter()
+ .collect::<anyhow::Result<_>>()
+ .context("lsp to project inlay hints conversion")
}
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::InlayHints {
@@ -1959,23 +2280,7 @@ impl LspCommand for InlayHints {
proto::InlayHintsResponse {
hints: response
.into_iter()
- .map(|response_hint| proto::InlayHint {
- position: Some(language::proto::serialize_anchor(&response_hint.position)),
- padding_left: response_hint.padding_left,
- padding_right: response_hint.padding_right,
- kind: response_hint.kind.map(|kind| kind.name().to_string()),
- // Do not pass extra data such as tooltips to clients: host can put tooltip data from the cache during resolution.
- tooltip: None,
- // Similarly, do not pass label parts to clients: host can return a detailed list during resolution.
- label: Some(proto::InlayHintLabel {
- label: Some(proto::inlay_hint_label::Label::Value(
- match response_hint.label {
- InlayHintLabel::String(s) => s,
- InlayHintLabel::LabelParts(_) => response_hint.text(),
- },
- )),
- }),
- })
+ .map(|response_hint| InlayHints::project_to_proto_hint(response_hint))
.collect(),
version: serialize_version(buffer_version),
}
@@ -1984,10 +2289,10 @@ impl LspCommand for InlayHints {
async fn response_from_proto(
self,
message: proto::InlayHintsResponse,
- project: ModelHandle<Project>,
+ _: ModelHandle<Project>,
buffer: ModelHandle<Buffer>,
mut cx: AsyncAppContext,
- ) -> Result<Vec<InlayHint>> {
+ ) -> anyhow::Result<Vec<InlayHint>> {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
@@ -1996,82 +2301,7 @@ impl LspCommand for InlayHints {
let mut hints = Vec::new();
for message_hint in message.hints {
- let buffer_id = message_hint
- .position
- .as_ref()
- .and_then(|location| location.buffer_id)
- .context("missing buffer id")?;
- let hint = InlayHint {
- buffer_id,
- position: message_hint
- .position
- .and_then(language::proto::deserialize_anchor)
- .context("invalid position")?,
- label: match message_hint
- .label
- .and_then(|label| label.label)
- .context("missing label")?
- {
- proto::inlay_hint_label::Label::Value(s) => InlayHintLabel::String(s),
- proto::inlay_hint_label::Label::LabelParts(parts) => {
- let mut label_parts = Vec::new();
- for part in parts.parts {
- label_parts.push(InlayHintLabelPart {
- value: part.value,
- tooltip: part.tooltip.map(|tooltip| match tooltip.content {
- Some(proto::inlay_hint_label_part_tooltip::Content::Value(s)) => InlayHintLabelPartTooltip::String(s),
- Some(proto::inlay_hint_label_part_tooltip::Content::MarkupContent(markup_content)) => InlayHintLabelPartTooltip::MarkupContent(MarkupContent {
- kind: markup_content.kind,
- value: markup_content.value,
- }),
- None => InlayHintLabelPartTooltip::String(String::new()),
- }),
- location: match part.location {
- Some(location) => {
- let target_buffer = project
- .update(&mut cx, |this, cx| {
- this.wait_for_remote_buffer(location.buffer_id, cx)
- })
- .await?;
- Some(Location {
- range: location
- .start
- .and_then(language::proto::deserialize_anchor)
- .context("invalid start")?
- ..location
- .end
- .and_then(language::proto::deserialize_anchor)
- .context("invalid end")?,
- buffer: target_buffer,
- })},
- None => None,
- },
- });
- }
-
- InlayHintLabel::LabelParts(label_parts)
- }
- },
- padding_left: message_hint.padding_left,
- padding_right: message_hint.padding_right,
- kind: message_hint
- .kind
- .as_deref()
- .and_then(InlayHintKind::from_name),
- tooltip: message_hint.tooltip.and_then(|tooltip| {
- Some(match tooltip.content? {
- proto::inlay_hint_tooltip::Content::Value(s) => InlayHintTooltip::String(s),
- proto::inlay_hint_tooltip::Content::MarkupContent(markup_content) => {
- InlayHintTooltip::MarkupContent(MarkupContent {
- kind: markup_content.kind,
- value: markup_content.value,
- })
- }
- })
- }),
- };
-
- hints.push(hint);
+ hints.push(InlayHints::proto_to_project_hint(message_hint)?);
}
Ok(hints)
@@ -11,7 +11,7 @@ mod project_tests;
mod worktree_tests;
use anyhow::{anyhow, Context, Result};
-use client::{proto, Client, TypedEnvelope, UserStore};
+use client::{proto, Client, TypedEnvelope, UserId, UserStore};
use clock::ReplicaId;
use collections::{hash_map, BTreeMap, HashMap, HashSet};
use copilot::Copilot;
@@ -26,8 +26,8 @@ use futures::{
};
use globset::{Glob, GlobSet, GlobSetBuilder};
use gpui::{
- AnyModelHandle, AppContext, AsyncAppContext, BorrowAppContext, Entity, ModelContext,
- ModelHandle, Task, WeakModelHandle,
+ executor::Background, AnyModelHandle, AppContext, AsyncAppContext, BorrowAppContext, Entity,
+ ModelContext, ModelHandle, Task, WeakModelHandle,
};
use itertools::Itertools;
use language::{
@@ -37,11 +37,11 @@ use language::{
deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
serialize_anchor, serialize_version,
},
- range_from_lsp, range_to_lsp, Bias, Buffer, CachedLspAdapter, CodeAction, CodeLabel,
- Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent, File as _,
- Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate, OffsetRangeExt,
- Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset,
- ToPointUtf16, Transaction, Unclipped,
+ range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeAction,
+ CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent,
+ File as _, Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate,
+ OffsetRangeExt, Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot,
+ ToOffset, ToPointUtf16, Transaction, Unclipped,
};
use log::error;
use lsp::{
@@ -57,8 +57,8 @@ use serde::Serialize;
use settings::SettingsStore;
use sha2::{Digest, Sha256};
use similar::{ChangeTag, TextDiff};
+use smol::channel::{Receiver, Sender};
use std::{
- cell::RefCell,
cmp::{self, Ordering},
convert::TryInto,
hash::Hash,
@@ -67,7 +67,6 @@ use std::{
ops::Range,
path::{self, Component, Path, PathBuf},
process::Stdio,
- rc::Rc,
str,
sync::{
atomic::{AtomicUsize, Ordering::SeqCst},
@@ -250,6 +249,7 @@ enum ProjectClientState {
pub struct Collaborator {
pub peer_id: proto::PeerId,
pub replica_id: ReplicaId,
+ pub user_id: UserId,
}
#[derive(Clone, Debug, PartialEq)]
@@ -281,6 +281,7 @@ pub enum Event {
old_peer_id: proto::PeerId,
new_peer_id: proto::PeerId,
},
+ CollaboratorJoined(proto::PeerId),
CollaboratorLeft(proto::PeerId),
RefreshInlayHints,
}
@@ -331,15 +332,22 @@ pub struct Location {
pub range: Range<language::Anchor>,
}
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, PartialEq, Eq)]
pub struct InlayHint {
- pub buffer_id: u64,
pub position: language::Anchor,
pub label: InlayHintLabel,
pub kind: Option<InlayHintKind>,
pub padding_left: bool,
pub padding_right: bool,
pub tooltip: Option<InlayHintTooltip>,
+ pub resolve_state: ResolveState,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ResolveState {
+ Resolved,
+ CanResolve(LanguageServerId, Option<lsp::LSPAny>),
+ Resolving,
}
impl InlayHint {
@@ -351,34 +359,34 @@ impl InlayHint {
}
}
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, PartialEq, Eq)]
pub enum InlayHintLabel {
String(String),
LabelParts(Vec<InlayHintLabelPart>),
}
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, PartialEq, Eq)]
pub struct InlayHintLabelPart {
pub value: String,
pub tooltip: Option<InlayHintLabelPartTooltip>,
- pub location: Option<Location>,
+ pub location: Option<(LanguageServerId, lsp::Location)>,
}
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, PartialEq, Eq)]
pub enum InlayHintTooltip {
String(String),
MarkupContent(MarkupContent),
}
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, PartialEq, Eq)]
pub enum InlayHintLabelPartTooltip {
String(String),
MarkupContent(MarkupContent),
}
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MarkupContent {
- pub kind: String,
+ pub kind: HoverBlockKind,
pub value: String,
}
@@ -412,7 +420,7 @@ pub struct HoverBlock {
pub kind: HoverBlockKind,
}
-#[derive(Clone, Debug, PartialEq)]
+#[derive(Clone, Debug, PartialEq, Eq)]
pub enum HoverBlockKind {
PlainText,
Markdown,
@@ -516,6 +524,28 @@ impl FormatTrigger {
}
}
}
+#[derive(Clone, Debug, PartialEq)]
+enum SearchMatchCandidate {
+ OpenBuffer {
+ buffer: ModelHandle<Buffer>,
+ // This might be an unnamed file without representation on filesystem
+ path: Option<Arc<Path>>,
+ },
+ Path {
+ worktree_id: WorktreeId,
+ path: Arc<Path>,
+ },
+}
+
+type SearchMatchCandidateIndex = usize;
+impl SearchMatchCandidate {
+ fn path(&self) -> Option<Arc<Path>> {
+ match self {
+ SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
+ SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
+ }
+ }
+}
impl Project {
pub fn init_settings(cx: &mut AppContext) {
@@ -549,6 +579,7 @@ impl Project {
client.add_model_request_handler(Self::handle_apply_code_action);
client.add_model_request_handler(Self::handle_on_type_formatting);
client.add_model_request_handler(Self::handle_inlay_hints);
+ client.add_model_request_handler(Self::handle_resolve_inlay_hint);
client.add_model_request_handler(Self::handle_refresh_inlay_hints);
client.add_model_request_handler(Self::handle_reload_buffers);
client.add_model_request_handler(Self::handle_synchronize_buffers);
@@ -1537,9 +1568,9 @@ impl Project {
if self.is_remote() {
return Err(anyhow!("creating buffers as a guest is not supported yet"));
}
-
+ let id = post_inc(&mut self.next_buffer_id);
let buffer = cx.add_model(|cx| {
- Buffer::new(self.replica_id(), text, cx)
+ Buffer::new(self.replica_id(), id, text)
.with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
});
self.register_buffer(&buffer, cx)?;
@@ -1677,7 +1708,7 @@ impl Project {
}
/// LanguageServerName is owned, because it is inserted into a map
- fn open_local_buffer_via_lsp(
+ pub fn open_local_buffer_via_lsp(
&mut self,
abs_path: lsp::Url,
language_server_id: LanguageServerId,
@@ -4967,7 +4998,7 @@ impl Project {
buffer_handle: ModelHandle<Buffer>,
range: Range<T>,
cx: &mut ModelContext<Self>,
- ) -> Task<Result<Vec<InlayHint>>> {
+ ) -> Task<anyhow::Result<Vec<InlayHint>>> {
let buffer = buffer_handle.read(cx);
let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
let range_start = range.start;
@@ -5017,192 +5048,79 @@ impl Project {
}
}
- #[allow(clippy::type_complexity)]
- pub fn search(
+ pub fn resolve_inlay_hint(
&self,
- query: SearchQuery,
+ hint: InlayHint,
+ buffer_handle: ModelHandle<Buffer>,
+ server_id: LanguageServerId,
cx: &mut ModelContext<Self>,
- ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
+ ) -> Task<anyhow::Result<InlayHint>> {
if self.is_local() {
- let snapshots = self
- .visible_worktrees(cx)
- .filter_map(|tree| {
- let tree = tree.read(cx).as_local()?;
- Some(tree.snapshot())
- })
- .collect::<Vec<_>>();
-
- let background = cx.background().clone();
- let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
- if path_count == 0 {
- return Task::ready(Ok(Default::default()));
+ let buffer = buffer_handle.read(cx);
+ let (_, lang_server) = if let Some((adapter, server)) =
+ self.language_server_for_buffer(buffer, server_id, cx)
+ {
+ (adapter.clone(), server.clone())
+ } else {
+ return Task::ready(Ok(hint));
+ };
+ if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
+ return Task::ready(Ok(hint));
}
- let workers = background.num_cpus().min(path_count);
- let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
- cx.background()
- .spawn({
- let fs = self.fs.clone();
- let background = cx.background().clone();
- let query = query.clone();
- async move {
- let fs = &fs;
- let query = &query;
- let matching_paths_tx = &matching_paths_tx;
- let paths_per_worker = (path_count + workers - 1) / workers;
- let snapshots = &snapshots;
- background
- .scoped(|scope| {
- for worker_ix in 0..workers {
- let worker_start_ix = worker_ix * paths_per_worker;
- let worker_end_ix = worker_start_ix + paths_per_worker;
- scope.spawn(async move {
- let mut snapshot_start_ix = 0;
- let mut abs_path = PathBuf::new();
- for snapshot in snapshots {
- let snapshot_end_ix =
- snapshot_start_ix + snapshot.visible_file_count();
- if worker_end_ix <= snapshot_start_ix {
- break;
- } else if worker_start_ix > snapshot_end_ix {
- snapshot_start_ix = snapshot_end_ix;
- continue;
- } else {
- let start_in_snapshot = worker_start_ix
- .saturating_sub(snapshot_start_ix);
- let end_in_snapshot =
- cmp::min(worker_end_ix, snapshot_end_ix)
- - snapshot_start_ix;
-
- for entry in snapshot
- .files(false, start_in_snapshot)
- .take(end_in_snapshot - start_in_snapshot)
- {
- if matching_paths_tx.is_closed() {
- break;
- }
- let matches = if query
- .file_matches(Some(&entry.path))
- {
- abs_path.clear();
- abs_path.push(&snapshot.abs_path());
- abs_path.push(&entry.path);
- if let Some(file) =
- fs.open_sync(&abs_path).await.log_err()
- {
- query.detect(file).unwrap_or(false)
- } else {
- false
- }
- } else {
- false
- };
-
- if matches {
- let project_path =
- (snapshot.id(), entry.path.clone());
- if matching_paths_tx
- .send(project_path)
- .await
- .is_err()
- {
- break;
- }
- }
- }
-
- snapshot_start_ix = snapshot_end_ix;
- }
- }
- });
- }
- })
- .await;
- }
- })
- .detach();
-
- let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
- let open_buffers = self
- .opened_buffers
- .values()
- .filter_map(|b| b.upgrade(cx))
- .collect::<HashSet<_>>();
- cx.spawn(|this, cx| async move {
- for buffer in &open_buffers {
- let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
- buffers_tx.send((buffer.clone(), snapshot)).await?;
- }
- let open_buffers = Rc::new(RefCell::new(open_buffers));
- while let Some(project_path) = matching_paths_rx.next().await {
- if buffers_tx.is_closed() {
- break;
- }
-
- let this = this.clone();
- let open_buffers = open_buffers.clone();
- let buffers_tx = buffers_tx.clone();
- cx.spawn(|mut cx| async move {
- if let Some(buffer) = this
- .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
- .await
- .log_err()
- {
- if open_buffers.borrow_mut().insert(buffer.clone()) {
- let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
- buffers_tx.send((buffer, snapshot)).await?;
- }
- }
-
- Ok::<_, anyhow::Error>(())
- })
- .detach();
- }
-
- Ok::<_, anyhow::Error>(())
+ let buffer_snapshot = buffer.snapshot();
+ cx.spawn(|_, mut cx| async move {
+ let resolve_task = lang_server.request::<lsp::request::InlayHintResolveRequest>(
+ InlayHints::project_to_lsp_hint(hint, &buffer_snapshot),
+ );
+ let resolved_hint = resolve_task
+ .await
+ .context("inlay hint resolve LSP request")?;
+ let resolved_hint = InlayHints::lsp_to_project_hint(
+ resolved_hint,
+ &buffer_handle,
+ server_id,
+ ResolveState::Resolved,
+ false,
+ &mut cx,
+ )
+ .await?;
+ Ok(resolved_hint)
})
- .detach_and_log_err(cx);
-
- let background = cx.background().clone();
- cx.background().spawn(async move {
- let query = &query;
- let mut matched_buffers = Vec::new();
- for _ in 0..workers {
- matched_buffers.push(HashMap::default());
+ } else if let Some(project_id) = self.remote_id() {
+ let client = self.client.clone();
+ let request = proto::ResolveInlayHint {
+ project_id,
+ buffer_id: buffer_handle.read(cx).remote_id(),
+ language_server_id: server_id.0 as u64,
+ hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
+ };
+ cx.spawn(|_, _| async move {
+ let response = client
+ .request(request)
+ .await
+ .context("inlay hints proto request")?;
+ match response.hint {
+ Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
+ .context("inlay hints proto resolve response conversion"),
+ None => Ok(hint),
}
- background
- .scoped(|scope| {
- for worker_matched_buffers in matched_buffers.iter_mut() {
- let mut buffers_rx = buffers_rx.clone();
- scope.spawn(async move {
- while let Some((buffer, snapshot)) = buffers_rx.next().await {
- let buffer_matches = if query.file_matches(
- snapshot.file().map(|file| file.path().as_ref()),
- ) {
- query
- .search(&snapshot, None)
- .await
- .iter()
- .map(|range| {
- snapshot.anchor_before(range.start)
- ..snapshot.anchor_after(range.end)
- })
- .collect()
- } else {
- Vec::new()
- };
- if !buffer_matches.is_empty() {
- worker_matched_buffers
- .insert(buffer.clone(), buffer_matches);
- }
- }
- });
- }
- })
- .await;
- Ok(matched_buffers.into_iter().flatten().collect())
})
+ } else {
+ Task::ready(Err(anyhow!("project does not have a remote id")))
+ }
+ }
+
+ #[allow(clippy::type_complexity)]
+ pub fn search(
+ &self,
+ query: SearchQuery,
+ cx: &mut ModelContext<Self>,
+ ) -> Receiver<(ModelHandle<Buffer>, Vec<Range<Anchor>>)> {
+ if self.is_local() {
+ self.search_local(query, cx)
} else if let Some(project_id) = self.remote_id() {
+ let (tx, rx) = smol::channel::unbounded();
let request = self.client.request(query.to_proto(project_id));
cx.spawn(|this, mut cx| async move {
let response = request.await?;
@@ -5226,11 +5144,301 @@ impl Project {
.or_insert(Vec::new())
.push(start..end)
}
- Ok(result)
+ for (buffer, ranges) in result {
+ let _ = tx.send((buffer, ranges)).await;
+ }
+ Result::<(), anyhow::Error>::Ok(())
})
+ .detach_and_log_err(cx);
+ rx
} else {
- Task::ready(Ok(Default::default()))
+ unimplemented!();
+ }
+ }
+
+ pub fn search_local(
+ &self,
+ query: SearchQuery,
+ cx: &mut ModelContext<Self>,
+ ) -> Receiver<(ModelHandle<Buffer>, Vec<Range<Anchor>>)> {
+ // Local search is split into several phases.
+ // TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
+ // and the second phase that finds positions of all the matches found in the candidate files.
+ // The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
+ //
+ // It gets a bit hairy though, because we must account for files that do not have a persistent representation
+ // on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
+ //
+ // 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
+ // Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
+ // of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
+ // 2. At this point, we have a list of all potentially matching buffers/files.
+ // We sort that list by buffer path - this list is retained for later use.
+ // We ensure that all buffers are now opened and available in project.
+ // 3. We run a scan over all the candidate buffers on multiple background threads.
+ // We cannot assume that there will even be a match - while at least one match
+ // is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
+ // There is also an auxilliary background thread responsible for result gathering.
+ // This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
+ // it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
+ // As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
+ // entry - which might already be available thanks to out-of-order processing.
+ //
+ // We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
+ // This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
+ // This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
+ // in face of constantly updating list of sorted matches.
+ // Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
+ let snapshots = self
+ .visible_worktrees(cx)
+ .filter_map(|tree| {
+ let tree = tree.read(cx).as_local()?;
+ Some(tree.snapshot())
+ })
+ .collect::<Vec<_>>();
+
+ let background = cx.background().clone();
+ let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
+ if path_count == 0 {
+ let (_, rx) = smol::channel::bounded(1024);
+ return rx;
+ }
+ let workers = background.num_cpus().min(path_count);
+ let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
+ let mut unnamed_files = vec![];
+ let opened_buffers = self
+ .opened_buffers
+ .iter()
+ .filter_map(|(_, b)| {
+ let buffer = b.upgrade(cx)?;
+ let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
+ if let Some(path) = snapshot.file().map(|file| file.path()) {
+ Some((path.clone(), (buffer, snapshot)))
+ } else {
+ unnamed_files.push(buffer);
+ None
+ }
+ })
+ .collect();
+ cx.background()
+ .spawn(Self::background_search(
+ unnamed_files,
+ opened_buffers,
+ cx.background().clone(),
+ self.fs.clone(),
+ workers,
+ query.clone(),
+ path_count,
+ snapshots,
+ matching_paths_tx,
+ ))
+ .detach();
+
+ let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
+ let background = cx.background().clone();
+ let (result_tx, result_rx) = smol::channel::bounded(1024);
+ cx.background()
+ .spawn(async move {
+ let Ok(buffers) = buffers.await else {
+ return;
+ };
+
+ let buffers_len = buffers.len();
+ if buffers_len == 0 {
+ return;
+ }
+ let query = &query;
+ let (finished_tx, mut finished_rx) = smol::channel::unbounded();
+ background
+ .scoped(|scope| {
+ #[derive(Clone)]
+ struct FinishedStatus {
+ entry: Option<(ModelHandle<Buffer>, Vec<Range<Anchor>>)>,
+ buffer_index: SearchMatchCandidateIndex,
+ }
+
+ for _ in 0..workers {
+ let finished_tx = finished_tx.clone();
+ let mut buffers_rx = buffers_rx.clone();
+ scope.spawn(async move {
+ while let Some((entry, buffer_index)) = buffers_rx.next().await {
+ let buffer_matches = if let Some((_, snapshot)) = entry.as_ref()
+ {
+ if query.file_matches(
+ snapshot.file().map(|file| file.path().as_ref()),
+ ) {
+ query
+ .search(&snapshot, None)
+ .await
+ .iter()
+ .map(|range| {
+ snapshot.anchor_before(range.start)
+ ..snapshot.anchor_after(range.end)
+ })
+ .collect()
+ } else {
+ Vec::new()
+ }
+ } else {
+ Vec::new()
+ };
+
+ let status = if !buffer_matches.is_empty() {
+ let entry = if let Some((buffer, _)) = entry.as_ref() {
+ Some((buffer.clone(), buffer_matches))
+ } else {
+ None
+ };
+ FinishedStatus {
+ entry,
+ buffer_index,
+ }
+ } else {
+ FinishedStatus {
+ entry: None,
+ buffer_index,
+ }
+ };
+ if finished_tx.send(status).await.is_err() {
+ break;
+ }
+ }
+ });
+ }
+ // Report sorted matches
+ scope.spawn(async move {
+ let mut current_index = 0;
+ let mut scratch = vec![None; buffers_len];
+ while let Some(status) = finished_rx.next().await {
+ debug_assert!(
+ scratch[status.buffer_index].is_none(),
+ "Got match status of position {} twice",
+ status.buffer_index
+ );
+ let index = status.buffer_index;
+ scratch[index] = Some(status);
+ while current_index < buffers_len {
+ let Some(current_entry) = scratch[current_index].take() else {
+ // We intentionally **do not** increment `current_index` here. When next element arrives
+ // from `finished_rx`, we will inspect the same position again, hoping for it to be Some(_)
+ // this time.
+ break;
+ };
+ if let Some(entry) = current_entry.entry {
+ result_tx.send(entry).await.log_err();
+ }
+ current_index += 1;
+ }
+ if current_index == buffers_len {
+ break;
+ }
+ }
+ });
+ })
+ .await;
+ })
+ .detach();
+ result_rx
+ }
+ /// Pick paths that might potentially contain a match of a given search query.
+ async fn background_search(
+ unnamed_buffers: Vec<ModelHandle<Buffer>>,
+ opened_buffers: HashMap<Arc<Path>, (ModelHandle<Buffer>, BufferSnapshot)>,
+ background: Arc<Background>,
+ fs: Arc<dyn Fs>,
+ workers: usize,
+ query: SearchQuery,
+ path_count: usize,
+ snapshots: Vec<LocalSnapshot>,
+ matching_paths_tx: Sender<SearchMatchCandidate>,
+ ) {
+ let fs = &fs;
+ let query = &query;
+ let matching_paths_tx = &matching_paths_tx;
+ let snapshots = &snapshots;
+ let paths_per_worker = (path_count + workers - 1) / workers;
+ for buffer in unnamed_buffers {
+ matching_paths_tx
+ .send(SearchMatchCandidate::OpenBuffer {
+ buffer: buffer.clone(),
+ path: None,
+ })
+ .await
+ .log_err();
}
+ for (path, (buffer, _)) in opened_buffers.iter() {
+ matching_paths_tx
+ .send(SearchMatchCandidate::OpenBuffer {
+ buffer: buffer.clone(),
+ path: Some(path.clone()),
+ })
+ .await
+ .log_err();
+ }
+ background
+ .scoped(|scope| {
+ for worker_ix in 0..workers {
+ let worker_start_ix = worker_ix * paths_per_worker;
+ let worker_end_ix = worker_start_ix + paths_per_worker;
+ let unnamed_buffers = opened_buffers.clone();
+ scope.spawn(async move {
+ let mut snapshot_start_ix = 0;
+ let mut abs_path = PathBuf::new();
+ for snapshot in snapshots {
+ let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count();
+ if worker_end_ix <= snapshot_start_ix {
+ break;
+ } else if worker_start_ix > snapshot_end_ix {
+ snapshot_start_ix = snapshot_end_ix;
+ continue;
+ } else {
+ let start_in_snapshot =
+ worker_start_ix.saturating_sub(snapshot_start_ix);
+ let end_in_snapshot =
+ cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
+
+ for entry in snapshot
+ .files(false, start_in_snapshot)
+ .take(end_in_snapshot - start_in_snapshot)
+ {
+ if matching_paths_tx.is_closed() {
+ break;
+ }
+ if unnamed_buffers.contains_key(&entry.path) {
+ continue;
+ }
+ let matches = if query.file_matches(Some(&entry.path)) {
+ abs_path.clear();
+ abs_path.push(&snapshot.abs_path());
+ abs_path.push(&entry.path);
+ if let Some(file) = fs.open_sync(&abs_path).await.log_err()
+ {
+ query.detect(file).unwrap_or(false)
+ } else {
+ false
+ }
+ } else {
+ false
+ };
+
+ if matches {
+ let project_path = SearchMatchCandidate::Path {
+ worktree_id: snapshot.id(),
+ path: entry.path.clone(),
+ };
+ if matching_paths_tx.send(project_path).await.is_err() {
+ break;
+ }
+ }
+ }
+
+ snapshot_start_ix = snapshot_end_ix;
+ }
+ }
+ });
+ }
+ })
+ .await;
}
// TODO: Wire this up to allow selecting a server?
@@ -5307,6 +5515,61 @@ impl Project {
Task::ready(Ok(Default::default()))
}
+ fn sort_candidates_and_open_buffers(
+ mut matching_paths_rx: Receiver<SearchMatchCandidate>,
+ cx: &mut ModelContext<Self>,
+ ) -> (
+ futures::channel::oneshot::Receiver<Vec<SearchMatchCandidate>>,
+ Receiver<(
+ Option<(ModelHandle<Buffer>, BufferSnapshot)>,
+ SearchMatchCandidateIndex,
+ )>,
+ ) {
+ let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
+ let (sorted_buffers_tx, sorted_buffers_rx) = futures::channel::oneshot::channel();
+ cx.spawn(|this, cx| async move {
+ let mut buffers = vec![];
+ while let Some(entry) = matching_paths_rx.next().await {
+ buffers.push(entry);
+ }
+ buffers.sort_by_key(|candidate| candidate.path());
+ let matching_paths = buffers.clone();
+ let _ = sorted_buffers_tx.send(buffers);
+ for (index, candidate) in matching_paths.into_iter().enumerate() {
+ if buffers_tx.is_closed() {
+ break;
+ }
+ let this = this.clone();
+ let buffers_tx = buffers_tx.clone();
+ cx.spawn(|mut cx| async move {
+ let buffer = match candidate {
+ SearchMatchCandidate::OpenBuffer { buffer, .. } => Some(buffer),
+ SearchMatchCandidate::Path { worktree_id, path } => this
+ .update(&mut cx, |this, cx| {
+ this.open_buffer((worktree_id, path), cx)
+ })
+ .await
+ .log_err(),
+ };
+ if let Some(buffer) = buffer {
+ let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
+ buffers_tx
+ .send((Some((buffer, snapshot)), index))
+ .await
+ .log_err();
+ } else {
+ buffers_tx.send((None, index)).await.log_err();
+ }
+
+ Ok::<_, anyhow::Error>(())
+ })
+ .detach();
+ }
+ })
+ .detach();
+ (sorted_buffers_rx, buffers_rx)
+ }
+
pub fn find_or_create_local_worktree(
&mut self,
abs_path: impl AsRef<Path>,
@@ -5930,6 +6193,7 @@ impl Project {
let collaborator = Collaborator::from_proto(collaborator)?;
this.update(&mut cx, |this, cx| {
this.shared_buffers.remove(&collaborator.peer_id);
+ cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
this.collaborators
.insert(collaborator.peer_id, collaborator);
cx.notify();
@@ -6813,6 +7077,40 @@ impl Project {
}))
}
+ async fn handle_resolve_inlay_hint(
+ this: ModelHandle<Self>,
+ envelope: TypedEnvelope<proto::ResolveInlayHint>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::ResolveInlayHintResponse> {
+ let proto_hint = envelope
+ .payload
+ .hint
+ .expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
+ let hint = InlayHints::proto_to_project_hint(proto_hint)
+ .context("resolved proto inlay hint conversion")?;
+ let buffer = this.update(&mut cx, |this, cx| {
+ this.opened_buffers
+ .get(&envelope.payload.buffer_id)
+ .and_then(|buffer| buffer.upgrade(cx))
+ .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
+ })?;
+ let response_hint = this
+ .update(&mut cx, |project, cx| {
+ project.resolve_inlay_hint(
+ hint,
+ buffer,
+ LanguageServerId(envelope.payload.language_server_id as usize),
+ cx,
+ )
+ })
+ .await
+ .context("inlay hints fetch")?;
+ Ok(proto::ResolveInlayHintResponse {
+ hint: Some(InlayHints::project_to_proto_hint(response_hint)),
+ })
+ }
+
async fn handle_refresh_inlay_hints(
this: ModelHandle<Self>,
_: TypedEnvelope<proto::RefreshInlayHints>,
@@ -6891,17 +7189,17 @@ impl Project {
) -> Result<proto::SearchProjectResponse> {
let peer_id = envelope.original_sender_id()?;
let query = SearchQuery::from_proto(envelope.payload)?;
- let result = this
- .update(&mut cx, |this, cx| this.search(query, cx))
- .await?;
+ let mut result = this.update(&mut cx, |this, cx| this.search(query, cx));
- this.update(&mut cx, |this, cx| {
+ cx.spawn(|mut cx| async move {
let mut locations = Vec::new();
- for (buffer, ranges) in result {
+ while let Some((buffer, ranges)) = result.next().await {
for range in ranges {
let start = serialize_anchor(&range.start);
let end = serialize_anchor(&range.end);
- let buffer_id = this.create_buffer_for_peer(&buffer, peer_id, cx);
+ let buffer_id = this.update(&mut cx, |this, cx| {
+ this.create_buffer_for_peer(&buffer, peer_id, cx)
+ });
locations.push(proto::Location {
buffer_id,
start: Some(start),
@@ -6911,6 +7209,7 @@ impl Project {
}
Ok(proto::SearchProjectResponse { locations })
})
+ .await
}
async fn handle_open_buffer_for_symbol(
@@ -7576,7 +7875,7 @@ impl Project {
self.language_servers_for_buffer(buffer, cx).next()
}
- fn language_server_for_buffer(
+ pub fn language_server_for_buffer(
&self,
buffer: &Buffer,
server_id: LanguageServerId,
@@ -7756,6 +8055,7 @@ impl Collaborator {
Ok(Self {
peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?,
replica_id: message.replica_id as ReplicaId,
+ user_id: message.user_id as UserId,
})
}
}
@@ -1,4 +1,4 @@
-use crate::{search::PathMatcher, worktree::WorktreeHandle, Event, *};
+use crate::{search::PathMatcher, worktree::WorktreeModelHandle, Event, *};
use fs::{FakeFs, LineEnding, RealFs};
use futures::{future, StreamExt};
use gpui::{executor::Deterministic, test::subscribe, AppContext};
@@ -3953,11 +3953,12 @@ async fn search(
query: SearchQuery,
cx: &mut gpui::TestAppContext,
) -> Result<HashMap<String, Vec<Range<usize>>>> {
- let results = project
- .update(cx, |project, cx| project.search(query, cx))
- .await?;
-
- Ok(results
+ let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
+ let mut result = HashMap::default();
+ while let Some((buffer, range)) = search_rx.next().await {
+ result.entry(buffer).or_insert(range);
+ }
+ Ok(result
.into_iter()
.map(|(buffer, ranges)| {
buffer.read_with(cx, |buffer, _| {
@@ -1,7 +1,13 @@
use crate::Project;
use gpui::{AnyWindowHandle, ModelContext, ModelHandle, WeakModelHandle};
-use std::path::PathBuf;
-use terminal::{Terminal, TerminalBuilder, TerminalSettings};
+use std::path::{Path, PathBuf};
+use terminal::{
+ terminal_settings::{self, TerminalSettings, VenvSettingsContent},
+ Terminal, TerminalBuilder,
+};
+
+#[cfg(target_os = "macos")]
+use std::os::unix::ffi::OsStrExt;
pub struct Terminals {
pub(crate) local_handles: Vec<WeakModelHandle<terminal::Terminal>>,
@@ -20,10 +26,12 @@ impl Project {
));
} else {
let settings = settings::get::<TerminalSettings>(cx);
+ let python_settings = settings.detect_venv.clone();
+ let shell = settings.shell.clone();
let terminal = TerminalBuilder::new(
working_directory.clone(),
- settings.shell.clone(),
+ shell.clone(),
settings.env.clone(),
Some(settings.blinking.clone()),
settings.alternate_scroll,
@@ -47,6 +55,15 @@ impl Project {
})
.detach();
+ if let Some(python_settings) = &python_settings.as_option() {
+ let activate_script_path =
+ self.find_activate_script_path(&python_settings, working_directory);
+ self.activate_python_virtual_environment(
+ activate_script_path,
+ &terminal_handle,
+ cx,
+ );
+ }
terminal_handle
});
@@ -54,6 +71,50 @@ impl Project {
}
}
+ pub fn find_activate_script_path(
+ &mut self,
+ settings: &VenvSettingsContent,
+ working_directory: Option<PathBuf>,
+ ) -> Option<PathBuf> {
+ // When we are unable to resolve the working directory, the terminal builder
+ // defaults to '/'. We should probably encode this directly somewhere, but for
+ // now, let's just hard code it here.
+ let working_directory = working_directory.unwrap_or_else(|| Path::new("/").to_path_buf());
+ let activate_script_name = match settings.activate_script {
+ terminal_settings::ActivateScript::Default => "activate",
+ terminal_settings::ActivateScript::Csh => "activate.csh",
+ terminal_settings::ActivateScript::Fish => "activate.fish",
+ };
+
+ for virtual_environment_name in settings.directories {
+ let mut path = working_directory.join(virtual_environment_name);
+ path.push("bin/");
+ path.push(activate_script_name);
+
+ if path.exists() {
+ return Some(path);
+ }
+ }
+
+ None
+ }
+
+ fn activate_python_virtual_environment(
+ &mut self,
+ activate_script: Option<PathBuf>,
+ terminal_handle: &ModelHandle<Terminal>,
+ cx: &mut ModelContext<Project>,
+ ) {
+ if let Some(activate_script) = activate_script {
+ // Paths are not strings so we need to jump through some hoops to format the command without `format!`
+ let mut command = Vec::from("source ".as_bytes());
+ command.extend_from_slice(activate_script.as_os_str().as_bytes());
+ command.push(b'\n');
+
+ terminal_handle.update(cx, |this, _| this.input_bytes(command));
+ }
+ }
+
pub fn local_terminal_handles(&self) -> &Vec<WeakModelHandle<terminal::Terminal>> {
&self.terminals.local_handles
}
@@ -2317,9 +2317,10 @@ impl BackgroundScannerState {
for changed_path in changed_paths {
let Some(dot_git_dir) = changed_path
.ancestors()
- .find(|ancestor| ancestor.file_name() == Some(&*DOT_GIT)) else {
- continue;
- };
+ .find(|ancestor| ancestor.file_name() == Some(&*DOT_GIT))
+ else {
+ continue;
+ };
// Avoid processing the same repository multiple times, if multiple paths
// within it have changed.
@@ -2348,7 +2349,10 @@ impl BackgroundScannerState {
let Some(work_dir) = self
.snapshot
.entry_for_id(entry_id)
- .map(|entry| RepositoryWorkDirectory(entry.path.clone())) else { continue };
+ .map(|entry| RepositoryWorkDirectory(entry.path.clone()))
+ else {
+ continue;
+ };
log::info!("reload git repository {:?}", dot_git_dir);
let repository = repository.repo_ptr.lock();
@@ -4026,7 +4030,7 @@ struct UpdateIgnoreStatusJob {
scan_queue: Sender<ScanJob>,
}
-pub trait WorktreeHandle {
+pub trait WorktreeModelHandle {
#[cfg(any(test, feature = "test-support"))]
fn flush_fs_events<'a>(
&self,
@@ -4034,7 +4038,7 @@ pub trait WorktreeHandle {
) -> futures::future::LocalBoxFuture<'a, ()>;
}
-impl WorktreeHandle for ModelHandle<Worktree> {
+impl WorktreeModelHandle for ModelHandle<Worktree> {
// When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
// occurred before the worktree was constructed. These events can cause the worktree to perform
// extra directory scans, and emit extra scan-state notifications.
@@ -1,5 +1,5 @@
use crate::{
- worktree::{Event, Snapshot, WorktreeHandle},
+ worktree::{Event, Snapshot, WorktreeModelHandle},
Entry, EntryKind, PathChange, Worktree,
};
use anyhow::Result;
@@ -40,7 +40,9 @@ impl View for QuickActionBar {
}
fn render(&mut self, cx: &mut gpui::ViewContext<'_, '_, Self>) -> gpui::AnyElement<Self> {
- let Some(editor) = self.active_editor() else { return Empty::new().into_any(); };
+ let Some(editor) = self.active_editor() else {
+ return Empty::new().into_any();
+ };
let inlay_hints_enabled = editor.read(cx).inlay_hints_enabled();
let mut bar = Flex::row().with_child(render_quick_action_bar_button(
@@ -150,9 +152,10 @@ impl ToolbarItemView for QuickActionBar {
cx.notify();
}
}));
+ ToolbarItemLocation::PrimaryRight { flex: None }
+ } else {
+ ToolbarItemLocation::Hidden
}
-
- ToolbarItemLocation::PrimaryRight { flex: None }
}
None => {
self.active_item = None;
@@ -23,7 +23,7 @@ async-tungstenite = "0.16"
base64 = "0.13"
futures.workspace = true
parking_lot.workspace = true
-prost = "0.8"
+prost.workspace = true
rand.workspace = true
rsa = "0.4"
serde.workspace = true
@@ -128,6 +128,8 @@ message Envelope {
InlayHints inlay_hints = 116;
InlayHintsResponse inlay_hints_response = 117;
+ ResolveInlayHint resolve_inlay_hint = 137;
+ ResolveInlayHintResponse resolve_inlay_hint_response = 138;
RefreshInlayHints refresh_inlay_hints = 118;
CreateChannel create_channel = 119;
@@ -142,6 +144,13 @@ message Envelope {
GetChannelMembersResponse get_channel_members_response = 128;
SetChannelMemberAdmin set_channel_member_admin = 129;
RenameChannel rename_channel = 130;
+
+ JoinChannelBuffer join_channel_buffer = 131;
+ JoinChannelBufferResponse join_channel_buffer_response = 132;
+ UpdateChannelBuffer update_channel_buffer = 133;
+ LeaveChannelBuffer leave_channel_buffer = 134;
+ AddChannelBufferCollaborator add_channel_buffer_collaborator = 135;
+ RemoveChannelBufferCollaborator remove_channel_buffer_collaborator = 136;
}
}
@@ -411,6 +420,16 @@ message RemoveProjectCollaborator {
PeerId peer_id = 2;
}
+message AddChannelBufferCollaborator {
+ uint64 channel_id = 1;
+ Collaborator collaborator = 2;
+}
+
+message RemoveChannelBufferCollaborator {
+ uint64 channel_id = 1;
+ PeerId peer_id = 2;
+}
+
message GetDefinition {
uint64 project_id = 1;
uint64 buffer_id = 2;
@@ -540,6 +559,11 @@ message UpdateBuffer {
repeated Operation operations = 3;
}
+message UpdateChannelBuffer {
+ uint64 channel_id = 1;
+ repeated Operation operations = 2;
+}
+
message UpdateBufferFile {
uint64 project_id = 1;
uint64 buffer_id = 2;
@@ -732,6 +756,7 @@ message InlayHint {
bool padding_left = 4;
bool padding_right = 5;
InlayHintTooltip tooltip = 6;
+ ResolveState resolve_state = 7;
}
message InlayHintLabel {
@@ -748,7 +773,10 @@ message InlayHintLabelParts {
message InlayHintLabelPart {
string value = 1;
InlayHintLabelPartTooltip tooltip = 2;
- Location location = 3;
+ optional string location_url = 3;
+ PointUtf16 location_range_start = 4;
+ PointUtf16 location_range_end = 5;
+ optional uint64 language_server_id = 6;
}
message InlayHintTooltip {
@@ -765,12 +793,39 @@ message InlayHintLabelPartTooltip {
}
}
+message ResolveState {
+ State state = 1;
+ LspResolveState lsp_resolve_state = 2;
+
+ enum State {
+ Resolved = 0;
+ CanResolve = 1;
+ Resolving = 2;
+ }
+
+ message LspResolveState {
+ string value = 1;
+ uint64 server_id = 2;
+ }
+}
+
+message ResolveInlayHint {
+ uint64 project_id = 1;
+ uint64 buffer_id = 2;
+ uint64 language_server_id = 3;
+ InlayHint hint = 4;
+}
+
+message ResolveInlayHintResponse {
+ InlayHint hint = 1;
+}
+
message RefreshInlayHints {
uint64 project_id = 1;
}
message MarkupContent {
- string kind = 1;
+ bool is_markdown = 1;
string value = 2;
}
@@ -948,6 +1003,22 @@ message RenameChannel {
string name = 2;
}
+message JoinChannelBuffer {
+ uint64 channel_id = 1;
+}
+
+message JoinChannelBufferResponse {
+ uint64 buffer_id = 1;
+ uint32 replica_id = 2;
+ string base_text = 3;
+ repeated Operation operations = 4;
+ repeated Collaborator collaborators = 5;
+}
+
+message LeaveChannelBuffer {
+ uint64 channel_id = 1;
+}
+
message RespondToChannelInvite {
uint64 channel_id = 1;
bool accept = 2;
@@ -1043,6 +1114,7 @@ message GetPrivateUserInfo {}
message GetPrivateUserInfoResponse {
string metrics_id = 1;
bool staff = 2;
+ repeated string flags = 3;
}
// Entities
@@ -1082,6 +1154,7 @@ message View {
oneof variant {
Editor editor = 3;
+ ChannelView channel_view = 4;
}
message Editor {
@@ -1094,6 +1167,11 @@ message View {
float scroll_x = 7;
float scroll_y = 8;
}
+
+ message ChannelView {
+ uint64 channel_id = 1;
+ Editor editor = 2;
+ }
}
message Collaborator {
@@ -1144,7 +1222,6 @@ enum GitStatus {
Conflict = 2;
}
-
message BufferState {
uint64 id = 1;
optional File file = 2;
@@ -171,12 +171,12 @@ impl Peer {
let this = self.clone();
let response_channels = connection_state.response_channels.clone();
let handle_io = async move {
- tracing::debug!(%connection_id, "handle io future: start");
+ tracing::trace!(%connection_id, "handle io future: start");
let _end_connection = util::defer(|| {
response_channels.lock().take();
this.connections.write().remove(&connection_id);
- tracing::debug!(%connection_id, "handle io future: end");
+ tracing::trace!(%connection_id, "handle io future: end");
});
// Send messages on this frequency so the connection isn't closed.
@@ -188,68 +188,68 @@ impl Peer {
futures::pin_mut!(receive_timeout);
loop {
- tracing::debug!(%connection_id, "outer loop iteration start");
+ tracing::trace!(%connection_id, "outer loop iteration start");
let read_message = reader.read().fuse();
futures::pin_mut!(read_message);
loop {
- tracing::debug!(%connection_id, "inner loop iteration start");
+ tracing::trace!(%connection_id, "inner loop iteration start");
futures::select_biased! {
outgoing = outgoing_rx.next().fuse() => match outgoing {
Some(outgoing) => {
- tracing::debug!(%connection_id, "outgoing rpc message: writing");
+ tracing::trace!(%connection_id, "outgoing rpc message: writing");
futures::select_biased! {
result = writer.write(outgoing).fuse() => {
- tracing::debug!(%connection_id, "outgoing rpc message: done writing");
+ tracing::trace!(%connection_id, "outgoing rpc message: done writing");
result.context("failed to write RPC message")?;
- tracing::debug!(%connection_id, "keepalive interval: resetting after sending message");
+ tracing::trace!(%connection_id, "keepalive interval: resetting after sending message");
keepalive_timer.set(create_timer(KEEPALIVE_INTERVAL).fuse());
}
_ = create_timer(WRITE_TIMEOUT).fuse() => {
- tracing::debug!(%connection_id, "outgoing rpc message: writing timed out");
+ tracing::trace!(%connection_id, "outgoing rpc message: writing timed out");
Err(anyhow!("timed out writing message"))?;
}
}
}
None => {
- tracing::debug!(%connection_id, "outgoing rpc message: channel closed");
+ tracing::trace!(%connection_id, "outgoing rpc message: channel closed");
return Ok(())
},
},
_ = keepalive_timer => {
- tracing::debug!(%connection_id, "keepalive interval: pinging");
+ tracing::trace!(%connection_id, "keepalive interval: pinging");
futures::select_biased! {
result = writer.write(proto::Message::Ping).fuse() => {
- tracing::debug!(%connection_id, "keepalive interval: done pinging");
+ tracing::trace!(%connection_id, "keepalive interval: done pinging");
result.context("failed to send keepalive")?;
- tracing::debug!(%connection_id, "keepalive interval: resetting after pinging");
+ tracing::trace!(%connection_id, "keepalive interval: resetting after pinging");
keepalive_timer.set(create_timer(KEEPALIVE_INTERVAL).fuse());
}
_ = create_timer(WRITE_TIMEOUT).fuse() => {
- tracing::debug!(%connection_id, "keepalive interval: pinging timed out");
+ tracing::trace!(%connection_id, "keepalive interval: pinging timed out");
Err(anyhow!("timed out sending keepalive"))?;
}
}
}
incoming = read_message => {
let incoming = incoming.context("error reading rpc message from socket")?;
- tracing::debug!(%connection_id, "incoming rpc message: received");
- tracing::debug!(%connection_id, "receive timeout: resetting");
+ tracing::trace!(%connection_id, "incoming rpc message: received");
+ tracing::trace!(%connection_id, "receive timeout: resetting");
receive_timeout.set(create_timer(RECEIVE_TIMEOUT).fuse());
if let proto::Message::Envelope(incoming) = incoming {
- tracing::debug!(%connection_id, "incoming rpc message: processing");
+ tracing::trace!(%connection_id, "incoming rpc message: processing");
futures::select_biased! {
result = incoming_tx.send(incoming).fuse() => match result {
Ok(_) => {
- tracing::debug!(%connection_id, "incoming rpc message: processed");
+ tracing::trace!(%connection_id, "incoming rpc message: processed");
}
Err(_) => {
- tracing::debug!(%connection_id, "incoming rpc message: channel closed");
+ tracing::trace!(%connection_id, "incoming rpc message: channel closed");
return Ok(())
}
},
_ = create_timer(WRITE_TIMEOUT).fuse() => {
- tracing::debug!(%connection_id, "incoming rpc message: processing timed out");
+ tracing::trace!(%connection_id, "incoming rpc message: processing timed out");
Err(anyhow!("timed out processing incoming message"))?
}
}
@@ -257,7 +257,7 @@ impl Peer {
break;
},
_ = receive_timeout => {
- tracing::debug!(%connection_id, "receive timeout: delay between messages too long");
+ tracing::trace!(%connection_id, "receive timeout: delay between messages too long");
Err(anyhow!("delay between messages too long"))?
}
}
@@ -274,13 +274,13 @@ impl Peer {
let response_channels = response_channels.clone();
async move {
let message_id = incoming.id;
- tracing::debug!(?incoming, "incoming message future: start");
+ tracing::trace!(?incoming, "incoming message future: start");
let _end = util::defer(move || {
- tracing::debug!(%connection_id, message_id, "incoming message future: end");
+ tracing::trace!(%connection_id, message_id, "incoming message future: end");
});
if let Some(responding_to) = incoming.responding_to {
- tracing::debug!(
+ tracing::trace!(
%connection_id,
message_id,
responding_to,
@@ -290,7 +290,7 @@ impl Peer {
if let Some(tx) = channel {
let requester_resumed = oneshot::channel();
if let Err(error) = tx.send((incoming, requester_resumed.0)) {
- tracing::debug!(
+ tracing::trace!(
%connection_id,
message_id,
responding_to = responding_to,
@@ -299,14 +299,14 @@ impl Peer {
);
}
- tracing::debug!(
+ tracing::trace!(
%connection_id,
message_id,
responding_to,
"incoming response: waiting to resume requester"
);
let _ = requester_resumed.1.await;
- tracing::debug!(
+ tracing::trace!(
%connection_id,
message_id,
responding_to,
@@ -323,7 +323,7 @@ impl Peer {
None
} else {
- tracing::debug!(%connection_id, message_id, "incoming message: received");
+ tracing::trace!(%connection_id, message_id, "incoming message: received");
proto::build_typed_envelope(connection_id, incoming).or_else(|| {
tracing::error!(
%connection_id,
@@ -197,6 +197,8 @@ messages!(
(OnTypeFormattingResponse, Background),
(InlayHints, Background),
(InlayHintsResponse, Background),
+ (ResolveInlayHint, Background),
+ (ResolveInlayHintResponse, Background),
(RefreshInlayHints, Foreground),
(Ping, Foreground),
(PrepareRename, Background),
@@ -248,7 +250,13 @@ messages!(
(GetPrivateUserInfo, Foreground),
(GetPrivateUserInfoResponse, Foreground),
(GetChannelMembers, Foreground),
- (GetChannelMembersResponse, Foreground)
+ (GetChannelMembersResponse, Foreground),
+ (JoinChannelBuffer, Foreground),
+ (JoinChannelBufferResponse, Foreground),
+ (LeaveChannelBuffer, Background),
+ (UpdateChannelBuffer, Foreground),
+ (RemoveChannelBufferCollaborator, Foreground),
+ (AddChannelBufferCollaborator, Foreground),
);
request_messages!(
@@ -293,6 +301,7 @@ request_messages!(
(PrepareRename, PrepareRenameResponse),
(OnTypeFormatting, OnTypeFormattingResponse),
(InlayHints, InlayHintsResponse),
+ (ResolveInlayHint, ResolveInlayHintResponse),
(RefreshInlayHints, Ack),
(ReloadBuffers, ReloadBuffersResponse),
(RequestContact, Ack),
@@ -315,6 +324,8 @@ request_messages!(
(UpdateParticipantLocation, Ack),
(UpdateProject, Ack),
(UpdateWorktree, Ack),
+ (JoinChannelBuffer, JoinChannelBufferResponse),
+ (LeaveChannelBuffer, Ack)
);
entity_messages!(
@@ -347,6 +358,7 @@ entity_messages!(
PerformRename,
OnTypeFormatting,
InlayHints,
+ ResolveInlayHint,
RefreshInlayHints,
PrepareRename,
ReloadBuffers,
@@ -370,6 +382,13 @@ entity_messages!(
UpdateDiffBase
);
+entity_messages!(
+ channel_id,
+ UpdateChannelBuffer,
+ RemoveChannelBufferCollaborator,
+ AddChannelBufferCollaborator
+);
+
const KIB: usize = 1024;
const MIB: usize = KIB * 1024;
const MAX_BUFFER_LEN: usize = MIB;
@@ -6,4 +6,4 @@ pub use conn::Connection;
pub use peer::*;
mod macros;
-pub const PROTOCOL_VERSION: u32 = 60;
+pub const PROTOCOL_VERSION: u32 = 61;
@@ -1,6 +1,6 @@
use crate::{
history::SearchHistory,
- mode::{next_mode, SearchMode},
+ mode::{next_mode, SearchMode, Side},
search_bar::{render_nav_button, render_search_mode_button},
CycleMode, NextHistoryQuery, PreviousHistoryQuery, SearchOptions, SelectAllMatches,
SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive, ToggleWholeWord,
@@ -156,11 +156,12 @@ impl View for BufferSearchBar {
self.query_editor.update(cx, |editor, cx| {
editor.set_placeholder_text(new_placeholder_text, cx);
});
- let search_button_for_mode = |mode, cx: &mut ViewContext<BufferSearchBar>| {
+ let search_button_for_mode = |mode, side, cx: &mut ViewContext<BufferSearchBar>| {
let is_active = self.current_mode == mode;
render_search_mode_button(
mode,
+ side,
is_active,
move |_, this, cx| {
this.activate_search_mode(mode, cx);
@@ -212,20 +213,11 @@ impl View for BufferSearchBar {
)
};
- let icon_style = theme.search.editor_icon.clone();
- let nav_column = Flex::row()
- .with_child(self.render_action_button("Select All", cx))
- .with_child(nav_button_for_direction("<", Direction::Prev, cx))
- .with_child(nav_button_for_direction(">", Direction::Next, cx))
- .with_child(Flex::row().with_children(match_count))
- .constrained()
- .with_height(theme.search.search_bar_row_height);
-
- let query = Flex::row()
+ let query_column = Flex::row()
.with_child(
- Svg::for_style(icon_style.icon)
+ Svg::for_style(theme.search.editor_icon.clone().icon)
.contained()
- .with_style(icon_style.container),
+ .with_style(theme.search.editor_icon.clone().container),
)
.with_child(ChildView::new(&self.query_editor, cx).flex(1., true))
.with_child(
@@ -244,49 +236,45 @@ impl View for BufferSearchBar {
.contained(),
)
.align_children_center()
- .flex(1., true);
- let editor_column = Flex::row()
- .with_child(
- query
- .contained()
- .with_style(query_container_style)
- .constrained()
- .with_min_width(theme.search.editor.min_width)
- .with_max_width(theme.search.editor.max_width)
- .with_height(theme.search.search_bar_row_height)
- .flex(1., false),
- )
.contained()
+ .with_style(query_container_style)
.constrained()
+ .with_min_width(theme.search.editor.min_width)
+ .with_max_width(theme.search.editor.max_width)
.with_height(theme.search.search_bar_row_height)
.flex(1., false);
+
let mode_column = Flex::row()
- .with_child(
- Flex::row()
- .with_child(search_button_for_mode(SearchMode::Text, cx))
- .with_child(search_button_for_mode(SearchMode::Regex, cx))
- .contained()
- .with_style(theme.search.modes_container),
- )
- .with_child(super::search_bar::render_close_button(
- "Dismiss Buffer Search",
- &theme.search,
+ .with_child(search_button_for_mode(
+ SearchMode::Text,
+ Some(Side::Left),
cx,
- |_, this, cx| this.dismiss(&Default::default(), cx),
- Some(Box::new(Dismiss)),
))
+ .with_child(search_button_for_mode(
+ SearchMode::Regex,
+ Some(Side::Right),
+ cx,
+ ))
+ .contained()
+ .with_style(theme.search.modes_container)
+ .constrained()
+ .with_height(theme.search.search_bar_row_height);
+
+ let nav_column = Flex::row()
+ .with_child(self.render_action_button("all", cx))
+ .with_child(Flex::row().with_children(match_count))
+ .with_child(nav_button_for_direction("<", Direction::Prev, cx))
+ .with_child(nav_button_for_direction(">", Direction::Next, cx))
.constrained()
.with_height(theme.search.search_bar_row_height)
- .aligned()
- .right()
.flex_float();
+
Flex::row()
- .with_child(editor_column)
- .with_child(nav_column)
+ .with_child(query_column)
.with_child(mode_column)
+ .with_child(nav_column)
.contained()
.with_style(theme.search.container)
- .aligned()
.into_any_named("search bar")
}
}
@@ -340,8 +328,9 @@ impl ToolbarItemView for BufferSearchBar {
ToolbarItemLocation::Hidden
}
}
+
fn row_count(&self, _: &ViewContext<Self>) -> usize {
- 2
+ 1
}
}
@@ -837,6 +826,7 @@ mod tests {
let buffer = cx.add_model(|cx| {
Buffer::new(
0,
+ cx.model_id() as u64,
r#"
A regular expression (shortened as regex or regexp;[1] also referred to as
rational expression[2][3]) is a sequence of characters that specifies a search
@@ -844,7 +834,6 @@ mod tests {
for "find" or "find and replace" operations on strings, or for input validation.
"#
.unindent(),
- cx,
)
});
let window = cx.add_window(|_| EmptyView);
@@ -1225,7 +1214,7 @@ mod tests {
expected_query_matches_count > 1,
"Should pick a query with multiple results"
);
- let buffer = cx.add_model(|cx| Buffer::new(0, buffer_text, cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, buffer_text));
let window = cx.add_window(|_| EmptyView);
let editor = window.add_view(cx, |cx| Editor::for_buffer(buffer.clone(), None, cx));
@@ -1412,7 +1401,7 @@ mod tests {
for "find" or "find and replace" operations on strings, or for input validation.
"#
.unindent();
- let buffer = cx.add_model(|cx| Buffer::new(0, buffer_text, cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, buffer_text));
let window = cx.add_window(|_| EmptyView);
let editor = window.add_view(cx, |cx| Editor::for_buffer(buffer.clone(), None, cx));
@@ -48,41 +48,18 @@ impl SearchMode {
SearchMode::Regex => Box::new(ActivateRegexMode),
}
}
-
- pub(crate) fn border_right(&self) -> bool {
- match self {
- SearchMode::Regex => true,
- SearchMode::Text => true,
- SearchMode::Semantic => true,
- }
- }
-
- pub(crate) fn border_left(&self) -> bool {
- match self {
- SearchMode::Text => true,
- _ => false,
- }
- }
-
- pub(crate) fn button_side(&self) -> Option<Side> {
- match self {
- SearchMode::Text => Some(Side::Left),
- SearchMode::Semantic => None,
- SearchMode::Regex => Some(Side::Right),
- }
- }
}
pub(crate) fn next_mode(mode: &SearchMode, semantic_enabled: bool) -> SearchMode {
- let next_text_state = if semantic_enabled {
- SearchMode::Semantic
- } else {
- SearchMode::Regex
- };
-
match mode {
- SearchMode::Text => next_text_state,
- SearchMode::Semantic => SearchMode::Regex,
- SearchMode::Regex => SearchMode::Text,
+ SearchMode::Text => SearchMode::Regex,
+ SearchMode::Regex => {
+ if semantic_enabled {
+ SearchMode::Semantic
+ } else {
+ SearchMode::Text
+ }
+ }
+ SearchMode::Semantic => SearchMode::Text,
}
}
@@ -1,6 +1,6 @@
use crate::{
history::SearchHistory,
- mode::SearchMode,
+ mode::{SearchMode, Side},
search_bar::{render_nav_button, render_option_button_icon, render_search_mode_button},
ActivateRegexMode, CycleMode, NextHistoryQuery, PreviousHistoryQuery, SearchOptions,
SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive, ToggleWholeWord,
@@ -185,28 +185,26 @@ impl ProjectSearch {
self.active_query = Some(query);
self.match_ranges.clear();
self.pending_search = Some(cx.spawn_weak(|this, mut cx| async move {
- let matches = search.await.log_err()?;
+ let mut matches = search;
let this = this.upgrade(&cx)?;
- let mut matches = matches.into_iter().collect::<Vec<_>>();
- let (_task, mut match_ranges) = this.update(&mut cx, |this, cx| {
+ this.update(&mut cx, |this, cx| {
this.match_ranges.clear();
+ this.excerpts.update(cx, |this, cx| this.clear(cx));
this.no_results = Some(true);
- matches.sort_by_key(|(buffer, _)| buffer.read(cx).file().map(|file| file.path()));
- this.excerpts.update(cx, |excerpts, cx| {
- excerpts.clear(cx);
- excerpts.stream_excerpts_with_context_lines(matches, 1, cx)
- })
});
- while let Some(match_range) = match_ranges.next().await {
- this.update(&mut cx, |this, cx| {
- this.match_ranges.push(match_range);
- while let Ok(Some(match_range)) = match_ranges.try_next() {
- this.match_ranges.push(match_range);
- }
+ while let Some((buffer, anchors)) = matches.next().await {
+ let mut ranges = this.update(&mut cx, |this, cx| {
this.no_results = Some(false);
- cx.notify();
+ this.excerpts.update(cx, |excerpts, cx| {
+ excerpts.stream_excerpts_with_context_lines(buffer, anchors, 1, cx)
+ })
});
+
+ while let Some(range) = ranges.next().await {
+ this.update(&mut cx, |this, _| this.match_ranges.push(range));
+ }
+ this.update(&mut cx, |_, cx| cx.notify());
}
this.update(&mut cx, |this, cx| {
@@ -238,29 +236,31 @@ impl ProjectSearch {
self.no_results = Some(true);
self.pending_search = Some(cx.spawn(|this, mut cx| async move {
let results = search?.await.log_err()?;
+ let matches = results
+ .into_iter()
+ .map(|result| (result.buffer, vec![result.range.start..result.range.start]));
- let (_task, mut match_ranges) = this.update(&mut cx, |this, cx| {
+ this.update(&mut cx, |this, cx| {
this.excerpts.update(cx, |excerpts, cx| {
excerpts.clear(cx);
-
- let matches = results
- .into_iter()
- .map(|result| (result.buffer, vec![result.range.start..result.range.start]))
- .collect();
-
- excerpts.stream_excerpts_with_context_lines(matches, 3, cx)
})
});
-
- while let Some(match_range) = match_ranges.next().await {
- this.update(&mut cx, |this, cx| {
- this.match_ranges.push(match_range);
- while let Ok(Some(match_range)) = match_ranges.try_next() {
- this.match_ranges.push(match_range);
- }
+ for (buffer, ranges) in matches {
+ let mut match_ranges = this.update(&mut cx, |this, cx| {
this.no_results = Some(false);
- cx.notify();
+ this.excerpts.update(cx, |excerpts, cx| {
+ excerpts.stream_excerpts_with_context_lines(buffer, ranges, 3, cx)
+ })
});
+ while let Some(match_range) = match_ranges.next().await {
+ this.update(&mut cx, |this, cx| {
+ this.match_ranges.push(match_range);
+ while let Ok(Some(match_range)) = match_ranges.try_next() {
+ this.match_ranges.push(match_range);
+ }
+ cx.notify();
+ });
+ }
}
this.update(&mut cx, |this, cx| {
@@ -640,6 +640,7 @@ impl ProjectSearchView {
self.search_options = SearchOptions::none();
let project = self.model.read(cx).project.clone();
+
let index_task = semantic_index.update(cx, |semantic_index, cx| {
semantic_index.index_project(project, cx)
});
@@ -884,7 +885,9 @@ impl ProjectSearchView {
if !dir_entry.is_dir() {
return;
}
- let Some(filter_str) = dir_entry.path.to_str() else { return; };
+ let Some(filter_str) = dir_entry.path.to_str() else {
+ return;
+ };
let model = cx.add_model(|cx| ProjectSearch::new(workspace.project().clone(), cx));
let search = cx.add_view(|cx| ProjectSearchView::new(model, cx));
@@ -893,6 +896,7 @@ impl ProjectSearchView {
search
.included_files_editor
.update(cx, |editor, cx| editor.set_text(filter_str, cx));
+ search.filters_enabled = true;
search.focus_query_editor(cx)
});
}
@@ -1420,8 +1424,13 @@ impl View for ProjectSearchBar {
},
cx,
);
+
let search = _search.read(cx);
+ let is_semantic_available = SemanticIndex::enabled(cx);
let is_semantic_disabled = search.semantic_state.is_none();
+ let icon_style = theme.search.editor_icon.clone();
+ let is_active = search.active_match_index.is_some();
+
let render_option_button_icon = |path, option, cx: &mut ViewContext<Self>| {
crate::search_bar::render_option_button_icon(
self.is_option_enabled(option, cx),
@@ -1447,28 +1456,23 @@ impl View for ProjectSearchBar {
render_option_button_icon("icons/word_search_12.svg", SearchOptions::WHOLE_WORD, cx)
});
- let search = _search.read(cx);
- let icon_style = theme.search.editor_icon.clone();
-
- // Editor Functionality
- let query = Flex::row()
- .with_child(
- Svg::for_style(icon_style.icon)
- .contained()
- .with_style(icon_style.container),
- )
- .with_child(ChildView::new(&search.query_editor, cx).flex(1., true))
- .with_child(
- Flex::row()
- .with_child(filter_button)
- .with_children(case_sensitive)
- .with_children(whole_word)
- .flex(1., false)
- .constrained()
- .contained(),
+ let search_button_for_mode = |mode, side, cx: &mut ViewContext<ProjectSearchBar>| {
+ let is_active = if let Some(search) = self.active_project_search.as_ref() {
+ let search = search.read(cx);
+ search.current_mode == mode
+ } else {
+ false
+ };
+ render_search_mode_button(
+ mode,
+ side,
+ is_active,
+ move |_, this, cx| {
+ this.activate_search_mode(mode, cx);
+ },
+ cx,
)
- .align_children_center()
- .flex(1., true);
+ };
let search = _search.read(cx);
@@ -1486,50 +1490,6 @@ impl View for ProjectSearchBar {
theme.search.include_exclude_editor.input.container
};
- let included_files_view = ChildView::new(&search.included_files_editor, cx)
- .contained()
- .flex(1., true);
- let excluded_files_view = ChildView::new(&search.excluded_files_editor, cx)
- .contained()
- .flex(1., true);
- let filters = search.filters_enabled.then(|| {
- Flex::row()
- .with_child(
- included_files_view
- .contained()
- .with_style(include_container_style)
- .constrained()
- .with_height(theme.search.search_bar_row_height)
- .with_min_width(theme.search.include_exclude_editor.min_width)
- .with_max_width(theme.search.include_exclude_editor.max_width),
- )
- .with_child(
- excluded_files_view
- .contained()
- .with_style(exclude_container_style)
- .constrained()
- .with_height(theme.search.search_bar_row_height)
- .with_min_width(theme.search.include_exclude_editor.min_width)
- .with_max_width(theme.search.include_exclude_editor.max_width),
- )
- .contained()
- .with_padding_top(theme.workspace.toolbar.container.padding.bottom)
- });
-
- let editor_column = Flex::column()
- .with_child(
- query
- .contained()
- .with_style(query_container_style)
- .constrained()
- .with_min_width(theme.search.editor.min_width)
- .with_max_width(theme.search.editor.max_width)
- .with_height(theme.search.search_bar_row_height)
- .flex(1., false),
- )
- .with_children(filters)
- .flex(1., false);
-
let matches = search.active_match_index.map(|match_ix| {
Label::new(
format!(
@@ -1544,25 +1504,81 @@ impl View for ProjectSearchBar {
.aligned()
});
- let search_button_for_mode = |mode, cx: &mut ViewContext<ProjectSearchBar>| {
- let is_active = if let Some(search) = self.active_project_search.as_ref() {
- let search = search.read(cx);
- search.current_mode == mode
- } else {
- false
- };
- render_search_mode_button(
- mode,
- is_active,
- move |_, this, cx| {
- this.activate_search_mode(mode, cx);
- },
- cx,
+ let query_column = Flex::column()
+ .with_spacing(theme.search.search_row_spacing)
+ .with_child(
+ Flex::row()
+ .with_child(
+ Svg::for_style(icon_style.icon)
+ .contained()
+ .with_style(icon_style.container),
+ )
+ .with_child(ChildView::new(&search.query_editor, cx).flex(1., true))
+ .with_child(
+ Flex::row()
+ .with_child(filter_button)
+ .with_children(case_sensitive)
+ .with_children(whole_word)
+ .flex(1., false)
+ .constrained()
+ .contained(),
+ )
+ .align_children_center()
+ .contained()
+ .with_style(query_container_style)
+ .constrained()
+ .with_min_width(theme.search.editor.min_width)
+ .with_max_width(theme.search.editor.max_width)
+ .with_height(theme.search.search_bar_row_height)
+ .flex(1., false),
)
- };
- let is_active = search.active_match_index.is_some();
- let semantic_index = SemanticIndex::enabled(cx)
- .then(|| search_button_for_mode(SearchMode::Semantic, cx));
+ .with_children(search.filters_enabled.then(|| {
+ Flex::row()
+ .with_child(
+ ChildView::new(&search.included_files_editor, cx)
+ .contained()
+ .with_style(include_container_style)
+ .constrained()
+ .with_height(theme.search.search_bar_row_height)
+ .flex(1., true),
+ )
+ .with_child(
+ ChildView::new(&search.excluded_files_editor, cx)
+ .contained()
+ .with_style(exclude_container_style)
+ .constrained()
+ .with_height(theme.search.search_bar_row_height)
+ .flex(1., true),
+ )
+ .constrained()
+ .with_min_width(theme.search.editor.min_width)
+ .with_max_width(theme.search.editor.max_width)
+ .flex(1., false)
+ }))
+ .flex(1., false);
+
+ let mode_column =
+ Flex::row()
+ .with_child(search_button_for_mode(
+ SearchMode::Text,
+ Some(Side::Left),
+ cx,
+ ))
+ .with_child(search_button_for_mode(
+ SearchMode::Regex,
+ if is_semantic_available {
+ None
+ } else {
+ Some(Side::Right)
+ },
+ cx,
+ ))
+ .with_children(is_semantic_available.then(|| {
+ search_button_for_mode(SearchMode::Semantic, Some(Side::Right), cx)
+ }))
+ .contained()
+ .with_style(theme.search.modes_container);
+
let nav_button_for_direction = |label, direction, cx: &mut ViewContext<Self>| {
render_nav_button(
label,
@@ -1578,43 +1594,17 @@ impl View for ProjectSearchBar {
};
let nav_column = Flex::row()
+ .with_child(Flex::row().with_children(matches))
.with_child(nav_button_for_direction("<", Direction::Prev, cx))
.with_child(nav_button_for_direction(">", Direction::Next, cx))
- .with_child(Flex::row().with_children(matches))
- .constrained()
- .with_height(theme.search.search_bar_row_height);
-
- let mode_column = Flex::row()
- .with_child(
- Flex::row()
- .with_child(search_button_for_mode(SearchMode::Text, cx))
- .with_children(semantic_index)
- .with_child(search_button_for_mode(SearchMode::Regex, cx))
- .contained()
- .with_style(theme.search.modes_container),
- )
- .with_child(super::search_bar::render_close_button(
- "Dismiss Project Search",
- &theme.search,
- cx,
- |_, this, cx| {
- if let Some(search) = this.active_project_search.as_mut() {
- search.update(cx, |_, cx| cx.emit(ViewEvent::Dismiss))
- }
- },
- None,
- ))
.constrained()
.with_height(theme.search.search_bar_row_height)
- .aligned()
- .right()
- .top()
.flex_float();
Flex::row()
- .with_child(editor_column)
- .with_child(nav_column)
+ .with_child(query_column)
.with_child(mode_column)
+ .with_child(nav_column)
.contained()
.with_style(theme.search.container)
.into_any_named("project search")
@@ -1634,10 +1624,16 @@ impl ToolbarItemView for ProjectSearchBar {
self.subscription = None;
self.active_project_search = None;
if let Some(search) = active_pane_item.and_then(|i| i.downcast::<ProjectSearchView>()) {
+ search.update(cx, |search, cx| {
+ if search.current_mode == SearchMode::Semantic {
+ search.index_project(cx);
+ }
+ });
+
self.subscription = Some(cx.observe(&search, |_, _, cx| cx.notify()));
self.active_project_search = Some(search);
ToolbarItemLocation::PrimaryLeft {
- flex: Some((1., false)),
+ flex: Some((1., true)),
}
} else {
ToolbarItemLocation::Hidden
@@ -1645,13 +1641,12 @@ impl ToolbarItemView for ProjectSearchBar {
}
fn row_count(&self, cx: &ViewContext<Self>) -> usize {
- self.active_project_search
- .as_ref()
- .map(|search| {
- let offset = search.read(cx).filters_enabled as usize;
- 2 + offset
- })
- .unwrap_or_else(|| 2)
+ if let Some(search) = self.active_project_search.as_ref() {
+ if search.read(cx).filters_enabled {
+ return 2;
+ }
+ }
+ 1
}
}
@@ -2,13 +2,13 @@ use bitflags::bitflags;
pub use buffer_search::BufferSearchBar;
use gpui::{
actions,
- elements::{Component, StyleableComponent, TooltipStyle},
+ elements::{Component, SafeStylable, TooltipStyle},
Action, AnyElement, AppContext, Element, View,
};
pub use mode::SearchMode;
use project::search::SearchQuery;
pub use project_search::{ProjectSearchBar, ProjectSearchView};
-use theme::components::{action_button::ActionButton, ComponentExt, ToggleIconButtonStyle};
+use theme::components::{action_button::Button, svg::Svg, ComponentExt, ToggleIconButtonStyle};
pub mod buffer_search;
mod history;
@@ -89,15 +89,12 @@ impl SearchOptions {
tooltip_style: TooltipStyle,
button_style: ToggleIconButtonStyle,
) -> AnyElement<V> {
- ActionButton::new_dynamic(
- self.to_toggle_action(),
- format!("Toggle {}", self.label()),
- tooltip_style,
- )
- .with_contents(theme::components::svg::Svg::new(self.icon()))
- .toggleable(active)
- .with_style(button_style)
- .element()
- .into_any()
+ Button::dynamic_action(self.to_toggle_action())
+ .with_tooltip(format!("Toggle {}", self.label()), tooltip_style)
+ .with_contents(Svg::new(self.icon()))
+ .toggleable(active)
+ .with_style(button_style)
+ .element()
+ .into_any()
}
}
@@ -13,34 +13,6 @@ use crate::{
SelectNextMatch, SelectPrevMatch,
};
-pub(super) fn render_close_button<V: View>(
- tooltip: &'static str,
- theme: &theme::Search,
- cx: &mut ViewContext<V>,
- on_click: impl Fn(MouseClick, &mut V, &mut EventContext<V>) + 'static,
- dismiss_action: Option<Box<dyn Action>>,
-) -> AnyElement<V> {
- let tooltip_style = theme::current(cx).tooltip.clone();
-
- enum CloseButton {}
- MouseEventHandler::new::<CloseButton, _>(0, cx, |state, _| {
- let style = theme.dismiss_button.style_for(state);
- Svg::new("icons/x_mark_8.svg")
- .with_color(style.color)
- .constrained()
- .with_width(style.icon_width)
- .aligned()
- .contained()
- .with_style(style.container)
- .constrained()
- .with_height(theme.search_bar_row_height)
- })
- .on_click(MouseButton::Left, on_click)
- .with_cursor_style(CursorStyle::PointingHand)
- .with_tooltip::<CloseButton>(0, tooltip.to_string(), dismiss_action, tooltip_style, cx)
- .into_any()
-}
-
pub(super) fn render_nav_button<V: View>(
icon: &'static str,
direction: Direction,
@@ -111,6 +83,7 @@ pub(super) fn render_nav_button<V: View>(
pub(crate) fn render_search_mode_button<V: View>(
mode: SearchMode,
+ side: Option<Side>,
is_active: bool,
on_click: impl Fn(MouseClick, &mut V, &mut EventContext<V>) + 'static,
cx: &mut ViewContext<V>,
@@ -119,41 +92,41 @@ pub(crate) fn render_search_mode_button<V: View>(
enum SearchModeButton {}
MouseEventHandler::new::<SearchModeButton, _>(mode.region_id(), cx, |state, cx| {
let theme = theme::current(cx);
- let mut style = theme
+ let style = theme
.search
.mode_button
.in_state(is_active)
.style_for(state)
.clone();
- style.container.border.left = mode.border_left();
- style.container.border.right = mode.border_right();
- let label = Label::new(mode.label(), style.text.clone())
- .aligned()
- .contained();
- let mut container_style = style.container.clone();
- if let Some(button_side) = mode.button_side() {
+ let mut container_style = style.container;
+ if let Some(button_side) = side {
if button_side == Side::Left {
+ container_style.border.left = true;
container_style.corner_radii = CornerRadii {
bottom_right: 0.,
top_right: 0.,
..container_style.corner_radii
};
- label.with_style(container_style)
} else {
+ container_style.border.left = false;
container_style.corner_radii = CornerRadii {
bottom_left: 0.,
top_left: 0.,
..container_style.corner_radii
};
- label.with_style(container_style)
}
} else {
+ container_style.border.left = false;
container_style.corner_radii = CornerRadii::default();
- label.with_style(container_style)
}
- .constrained()
- .with_height(theme.search.search_bar_row_height)
+
+ Label::new(mode.label(), style.text)
+ .aligned()
+ .contained()
+ .with_style(container_style)
+ .constrained()
+ .with_height(theme.search.search_bar_row_height)
})
.on_click(MouseButton::Left, on_click)
.with_cursor_style(CursorStyle::PointingHand)
@@ -38,6 +38,7 @@ parking_lot.workspace = true
rand.workspace = true
schemars.workspace = true
globset.workspace = true
+sha1 = "0.10.5"
[dev-dependencies]
gpui = { path = "../gpui", features = ["test-support"] }
@@ -26,6 +26,9 @@ pub struct FileRecord {
#[derive(Debug)]
struct Embedding(pub Vec<f32>);
+#[derive(Debug)]
+struct Sha1(pub Vec<u8>);
+
impl FromSql for Embedding {
fn column_result(value: ValueRef) -> FromSqlResult<Self> {
let bytes = value.as_blob()?;
@@ -37,6 +40,17 @@ impl FromSql for Embedding {
}
}
+impl FromSql for Sha1 {
+ fn column_result(value: ValueRef) -> FromSqlResult<Self> {
+ let bytes = value.as_blob()?;
+ let sha1: Result<Vec<u8>, Box<bincode::ErrorKind>> = bincode::deserialize(bytes);
+ if sha1.is_err() {
+ return Err(rusqlite::types::FromSqlError::Other(sha1.unwrap_err()));
+ }
+ return Ok(Sha1(sha1.unwrap()));
+ }
+}
+
pub struct VectorDatabase {
db: rusqlite::Connection,
}
@@ -132,6 +146,7 @@ impl VectorDatabase {
end_byte INTEGER NOT NULL,
name VARCHAR NOT NULL,
embedding BLOB NOT NULL,
+ sha1 BLOB NOT NULL,
FOREIGN KEY(file_id) REFERENCES files(id) ON DELETE CASCADE
)",
[],
@@ -182,15 +197,17 @@ impl VectorDatabase {
// I imagine we can speed this up with a bulk insert of some kind.
for document in documents {
let embedding_blob = bincode::serialize(&document.embedding)?;
+ let sha_blob = bincode::serialize(&document.sha1)?;
self.db.execute(
- "INSERT INTO documents (file_id, start_byte, end_byte, name, embedding) VALUES (?1, ?2, ?3, ?4, $5)",
+ "INSERT INTO documents (file_id, start_byte, end_byte, name, embedding, sha1) VALUES (?1, ?2, ?3, ?4, ?5, ?6)",
params![
file_id,
document.range.start.to_string(),
document.range.end.to_string(),
document.name,
- embedding_blob
+ embedding_blob,
+ sha_blob
],
)?;
}
@@ -106,8 +106,8 @@ impl OpenAIEmbeddings {
#[async_trait]
impl EmbeddingProvider for OpenAIEmbeddings {
async fn embed_batch(&self, spans: Vec<&str>) -> Result<Vec<Vec<f32>>> {
- const BACKOFF_SECONDS: [usize; 3] = [45, 75, 125];
- const MAX_RETRIES: usize = 3;
+ const BACKOFF_SECONDS: [usize; 4] = [3, 5, 15, 45];
+ const MAX_RETRIES: usize = 4;
let api_key = OPENAI_API_KEY
.as_ref()
@@ -1,5 +1,6 @@
use anyhow::{anyhow, Ok, Result};
use language::{Grammar, Language};
+use sha1::{Digest, Sha1};
use std::{
cmp::{self, Reverse},
collections::HashSet,
@@ -15,6 +16,7 @@ pub struct Document {
pub range: Range<usize>,
pub content: String,
pub embedding: Vec<f32>,
+ pub sha1: [u8; 20],
}
const CODE_CONTEXT_TEMPLATE: &str =
@@ -63,11 +65,15 @@ impl CodeContextRetriever {
.replace("<language>", language_name.as_ref())
.replace("<item>", &content);
+ let mut sha1 = Sha1::new();
+ sha1.update(&document_span);
+
Ok(vec![Document {
range: 0..content.len(),
content: document_span,
embedding: Vec::new(),
name: language_name.to_string(),
+ sha1: sha1.finalize().into(),
}])
}
@@ -76,11 +82,15 @@ impl CodeContextRetriever {
.replace("<path>", relative_path.to_string_lossy().as_ref())
.replace("<item>", &content);
+ let mut sha1 = Sha1::new();
+ sha1.update(&document_span);
+
Ok(vec![Document {
range: 0..content.len(),
content: document_span,
embedding: Vec::new(),
name: "Markdown".to_string(),
+ sha1: sha1.finalize().into(),
}])
}
@@ -253,11 +263,15 @@ impl CodeContextRetriever {
);
}
+ let mut sha1 = Sha1::new();
+ sha1.update(&document_content);
+
documents.push(Document {
name,
content: document_content,
range: item_range.clone(),
embedding: vec![],
+ sha1: sha1.finalize().into(),
})
}
@@ -16,7 +16,7 @@ use language::{Anchor, Buffer, Language, LanguageRegistry};
use parking_lot::Mutex;
use parsing::{CodeContextRetriever, Document, PARSEABLE_ENTIRE_FILE_TYPES};
use postage::watch;
-use project::{search::PathMatcher, Fs, Project, WorktreeId};
+use project::{search::PathMatcher, Fs, PathChange, Project, ProjectEntryId, WorktreeId};
use smol::channel;
use std::{
cmp::Ordering,
@@ -33,8 +33,9 @@ use util::{
paths::EMBEDDINGS_DIR,
ResultExt,
};
+use workspace::WorkspaceCreated;
-const SEMANTIC_INDEX_VERSION: usize = 6;
+const SEMANTIC_INDEX_VERSION: usize = 7;
const EMBEDDINGS_BATCH_SIZE: usize = 80;
pub fn init(
@@ -54,6 +55,24 @@ pub fn init(
return;
}
+ cx.subscribe_global::<WorkspaceCreated, _>({
+ move |event, cx| {
+ let Some(semantic_index) = SemanticIndex::global(cx) else {
+ return;
+ };
+ let workspace = &event.0;
+ if let Some(workspace) = workspace.upgrade(cx) {
+ let project = workspace.read(cx).project().clone();
+ if project.read(cx).is_local() {
+ semantic_index.update(cx, |index, cx| {
+ index.initialize_project(project, cx).detach_and_log_err(cx)
+ });
+ }
+ }
+ }
+ })
+ .detach();
+
cx.spawn(move |mut cx| async move {
let semantic_index = SemanticIndex::new(
fs,
@@ -92,8 +111,11 @@ pub struct SemanticIndex {
struct ProjectState {
worktree_db_ids: Vec<(WorktreeId, i64)>,
+ _subscription: gpui::Subscription,
outstanding_job_count_rx: watch::Receiver<usize>,
_outstanding_job_count_tx: Arc<Mutex<watch::Sender<usize>>>,
+ job_queue_tx: channel::Sender<IndexOperation>,
+ _queue_update_task: Task<()>,
}
#[derive(Clone)]
@@ -112,6 +134,72 @@ impl JobHandle {
}
}
impl ProjectState {
+ fn new(
+ cx: &mut AppContext,
+ subscription: gpui::Subscription,
+ worktree_db_ids: Vec<(WorktreeId, i64)>,
+ outstanding_job_count_rx: watch::Receiver<usize>,
+ _outstanding_job_count_tx: Arc<Mutex<watch::Sender<usize>>>,
+ ) -> Self {
+ let (job_queue_tx, job_queue_rx) = channel::unbounded();
+ let _queue_update_task = cx.background().spawn({
+ let mut worktree_queue = HashMap::new();
+ async move {
+ while let Ok(operation) = job_queue_rx.recv().await {
+ Self::update_queue(&mut worktree_queue, operation);
+ }
+ }
+ });
+
+ Self {
+ worktree_db_ids,
+ outstanding_job_count_rx,
+ _outstanding_job_count_tx,
+ _subscription: subscription,
+ _queue_update_task,
+ job_queue_tx,
+ }
+ }
+
+ pub fn get_outstanding_count(&self) -> usize {
+ self.outstanding_job_count_rx.borrow().clone()
+ }
+
+ fn update_queue(queue: &mut HashMap<PathBuf, IndexOperation>, operation: IndexOperation) {
+ match operation {
+ IndexOperation::FlushQueue => {
+ let queue = std::mem::take(queue);
+ for (_, op) in queue {
+ match op {
+ IndexOperation::IndexFile {
+ absolute_path: _,
+ payload,
+ tx,
+ } => {
+ let _ = tx.try_send(payload);
+ }
+ IndexOperation::DeleteFile {
+ absolute_path: _,
+ payload,
+ tx,
+ } => {
+ let _ = tx.try_send(payload);
+ }
+ _ => {}
+ }
+ }
+ }
+ IndexOperation::IndexFile {
+ ref absolute_path, ..
+ }
+ | IndexOperation::DeleteFile {
+ ref absolute_path, ..
+ } => {
+ queue.insert(absolute_path.clone(), operation);
+ }
+ }
+ }
+
fn db_id_for_worktree_id(&self, id: WorktreeId) -> Option<i64> {
self.worktree_db_ids
.iter()
@@ -137,6 +225,7 @@ impl ProjectState {
}
}
+#[derive(Clone)]
pub struct PendingFile {
worktree_db_id: i64,
relative_path: PathBuf,
@@ -145,6 +234,19 @@ pub struct PendingFile {
modified_time: SystemTime,
job_handle: JobHandle,
}
+enum IndexOperation {
+ IndexFile {
+ absolute_path: PathBuf,
+ payload: PendingFile,
+ tx: channel::Sender<PendingFile>,
+ },
+ DeleteFile {
+ absolute_path: PathBuf,
+ payload: DbOperation,
+ tx: channel::Sender<DbOperation>,
+ },
+ FlushQueue,
+}
pub struct SearchResult {
pub buffer: ModelHandle<Buffer>,
@@ -576,12 +678,112 @@ impl SemanticIndex {
})
}
- pub fn index_project(
+ fn project_entries_changed(
+ &self,
+ project: ModelHandle<Project>,
+ changes: Arc<[(Arc<Path>, ProjectEntryId, PathChange)]>,
+ cx: &mut ModelContext<'_, SemanticIndex>,
+ worktree_id: &WorktreeId,
+ ) -> Result<()> {
+ let parsing_files_tx = self.parsing_files_tx.clone();
+ let db_update_tx = self.db_update_tx.clone();
+ let (job_queue_tx, outstanding_job_tx, worktree_db_id) = {
+ let state = self
+ .projects
+ .get(&project.downgrade())
+ .ok_or(anyhow!("Project not yet initialized"))?;
+ let worktree_db_id = state
+ .db_id_for_worktree_id(*worktree_id)
+ .ok_or(anyhow!("Worktree ID in Database Not Available"))?;
+ (
+ state.job_queue_tx.clone(),
+ state._outstanding_job_count_tx.clone(),
+ worktree_db_id,
+ )
+ };
+
+ let language_registry = self.language_registry.clone();
+ let parsing_files_tx = parsing_files_tx.clone();
+ let db_update_tx = db_update_tx.clone();
+
+ let worktree = project
+ .read(cx)
+ .worktree_for_id(worktree_id.clone(), cx)
+ .ok_or(anyhow!("Worktree not available"))?
+ .read(cx)
+ .snapshot();
+ cx.spawn(|_, _| async move {
+ let worktree = worktree.clone();
+ for (path, entry_id, path_change) in changes.iter() {
+ let relative_path = path.to_path_buf();
+ let absolute_path = worktree.absolutize(path);
+
+ let Some(entry) = worktree.entry_for_id(*entry_id) else {
+ continue;
+ };
+ if entry.is_ignored || entry.is_symlink || entry.is_external {
+ continue;
+ }
+
+ log::trace!("File Event: {:?}, Path: {:?}", &path_change, &path);
+ match path_change {
+ PathChange::AddedOrUpdated | PathChange::Updated | PathChange::Added => {
+ if let Ok(language) = language_registry
+ .language_for_file(&relative_path, None)
+ .await
+ {
+ if !PARSEABLE_ENTIRE_FILE_TYPES.contains(&language.name().as_ref())
+ && &language.name().as_ref() != &"Markdown"
+ && language
+ .grammar()
+ .and_then(|grammar| grammar.embedding_config.as_ref())
+ .is_none()
+ {
+ continue;
+ }
+
+ let job_handle = JobHandle::new(&outstanding_job_tx);
+ let new_operation = IndexOperation::IndexFile {
+ absolute_path: absolute_path.clone(),
+ payload: PendingFile {
+ worktree_db_id,
+ relative_path,
+ absolute_path,
+ language,
+ modified_time: entry.mtime,
+ job_handle,
+ },
+ tx: parsing_files_tx.clone(),
+ };
+ let _ = job_queue_tx.try_send(new_operation);
+ }
+ }
+ PathChange::Removed => {
+ let new_operation = IndexOperation::DeleteFile {
+ absolute_path,
+ payload: DbOperation::Delete {
+ worktree_id: worktree_db_id,
+ path: relative_path,
+ },
+ tx: db_update_tx.clone(),
+ };
+ let _ = job_queue_tx.try_send(new_operation);
+ }
+ _ => {}
+ }
+ }
+ })
+ .detach();
+
+ Ok(())
+ }
+
+ pub fn initialize_project(
&mut self,
project: ModelHandle<Project>,
cx: &mut ModelContext<Self>,
- ) -> Task<Result<(usize, watch::Receiver<usize>)>> {
- let t0 = Instant::now();
+ ) -> Task<Result<()>> {
+ log::trace!("Initializing Project for Semantic Index");
let worktree_scans_complete = project
.read(cx)
.worktrees(cx)
@@ -592,6 +794,7 @@ impl SemanticIndex {
}
})
.collect::<Vec<_>>();
+
let worktree_db_ids = project
.read(cx)
.worktrees(cx)
@@ -600,15 +803,21 @@ impl SemanticIndex {
})
.collect::<Vec<_>>();
+ let _subscription = cx.subscribe(&project, |this, project, event, cx| {
+ if let project::Event::WorktreeUpdatedEntries(worktree_id, changes) = event {
+ let _ =
+ this.project_entries_changed(project.clone(), changes.clone(), cx, worktree_id);
+ };
+ });
+
let language_registry = self.language_registry.clone();
- let db_update_tx = self.db_update_tx.clone();
let parsing_files_tx = self.parsing_files_tx.clone();
+ let db_update_tx = self.db_update_tx.clone();
cx.spawn(|this, mut cx| async move {
futures::future::join_all(worktree_scans_complete).await;
let worktree_db_ids = futures::future::join_all(worktree_db_ids).await;
-
let worktrees = project.read_with(&cx, |project, cx| {
project
.worktrees(cx)
@@ -618,6 +827,7 @@ impl SemanticIndex {
let mut worktree_file_mtimes = HashMap::new();
let mut db_ids_by_worktree_id = HashMap::new();
+
for (worktree, db_id) in worktrees.iter().zip(worktree_db_ids) {
let db_id = db_id?;
db_ids_by_worktree_id.insert(worktree.id(), db_id);
@@ -628,34 +838,34 @@ impl SemanticIndex {
);
}
+ let worktree_db_ids = db_ids_by_worktree_id
+ .iter()
+ .map(|(a, b)| (*a, *b))
+ .collect();
+
let (job_count_tx, job_count_rx) = watch::channel_with(0);
let job_count_tx = Arc::new(Mutex::new(job_count_tx));
- this.update(&mut cx, |this, _| {
- this.projects.insert(
- project.downgrade(),
- ProjectState {
- worktree_db_ids: db_ids_by_worktree_id
- .iter()
- .map(|(a, b)| (*a, *b))
- .collect(),
- outstanding_job_count_rx: job_count_rx.clone(),
- _outstanding_job_count_tx: job_count_tx.clone(),
- },
- );
- });
+ let job_count_tx_longlived = job_count_tx.clone();
- cx.background()
+ let worktree_files = cx
+ .background()
.spawn(async move {
- let mut count = 0;
+ let mut worktree_files = Vec::new();
for worktree in worktrees.into_iter() {
let mut file_mtimes = worktree_file_mtimes.remove(&worktree.id()).unwrap();
+ let worktree_db_id = db_ids_by_worktree_id[&worktree.id()];
for file in worktree.files(false, 0) {
let absolute_path = worktree.absolutize(&file.path);
+ if file.is_external || file.is_ignored || file.is_symlink {
+ continue;
+ }
+
if let Ok(language) = language_registry
.language_for_file(&absolute_path, None)
.await
{
+ // Test if file is valid parseable file
if !PARSEABLE_ENTIRE_FILE_TYPES.contains(&language.name().as_ref())
&& &language.name().as_ref() != &"Markdown"
&& language
@@ -672,39 +882,84 @@ impl SemanticIndex {
.map_or(false, |existing_mtime| existing_mtime == file.mtime);
if !already_stored {
- count += 1;
-
let job_handle = JobHandle::new(&job_count_tx);
- parsing_files_tx
- .try_send(PendingFile {
- worktree_db_id: db_ids_by_worktree_id[&worktree.id()],
+ worktree_files.push(IndexOperation::IndexFile {
+ absolute_path: absolute_path.clone(),
+ payload: PendingFile {
+ worktree_db_id,
relative_path: path_buf,
absolute_path,
language,
job_handle,
modified_time: file.mtime,
- })
- .unwrap();
+ },
+ tx: parsing_files_tx.clone(),
+ });
}
}
}
- for file in file_mtimes.keys() {
- db_update_tx
- .try_send(DbOperation::Delete {
- worktree_id: db_ids_by_worktree_id[&worktree.id()],
- path: file.to_owned(),
- })
- .unwrap();
+ // Clean up entries from database that are no longer in the worktree.
+ for (path, _) in file_mtimes {
+ worktree_files.push(IndexOperation::DeleteFile {
+ absolute_path: worktree.absolutize(path.as_path()),
+ payload: DbOperation::Delete {
+ worktree_id: worktree_db_id,
+ path,
+ },
+ tx: db_update_tx.clone(),
+ });
}
}
- log::trace!(
- "walking worktree took {:?} milliseconds",
- t0.elapsed().as_millis()
- );
- anyhow::Ok((count, job_count_rx))
+ anyhow::Ok(worktree_files)
})
- .await
+ .await?;
+
+ this.update(&mut cx, |this, cx| {
+ let project_state = ProjectState::new(
+ cx,
+ _subscription,
+ worktree_db_ids,
+ job_count_rx,
+ job_count_tx_longlived,
+ );
+
+ for op in worktree_files {
+ let _ = project_state.job_queue_tx.try_send(op);
+ }
+
+ this.projects.insert(project.downgrade(), project_state);
+ });
+ Result::<(), _>::Ok(())
+ })
+ }
+
+ pub fn index_project(
+ &mut self,
+ project: ModelHandle<Project>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<(usize, watch::Receiver<usize>)>> {
+ let state = self.projects.get_mut(&project.downgrade());
+ let state = if state.is_none() {
+ return Task::Ready(Some(Err(anyhow!("Project not yet initialized"))));
+ } else {
+ state.unwrap()
+ };
+
+ // let parsing_files_tx = self.parsing_files_tx.clone();
+ // let db_update_tx = self.db_update_tx.clone();
+ let job_count_rx = state.outstanding_job_count_rx.clone();
+ let count = state.get_outstanding_count();
+
+ cx.spawn(|this, mut cx| async move {
+ this.update(&mut cx, |this, _| {
+ let Some(state) = this.projects.get_mut(&project.downgrade()) else {
+ return;
+ };
+ let _ = state.job_queue_tx.try_send(IndexOperation::FlushQueue);
+ });
+
+ Ok((count, job_count_rx))
})
}
@@ -86,6 +86,13 @@ async fn test_semantic_index(cx: &mut TestAppContext) {
.unwrap();
let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
+
+ let _ = store
+ .update(cx, |store, cx| {
+ store.initialize_project(project.clone(), cx)
+ })
+ .await;
+
let (file_count, outstanding_file_count) = store
.update(cx, |store, cx| store.index_project(project.clone(), cx))
.await
@@ -16,7 +16,7 @@ collections = { path = "../collections" }
gpui = { path = "../gpui" }
sqlez = { path = "../sqlez" }
fs = { path = "../fs" }
-staff_mode = { path = "../staff_mode" }
+feature_flags = { path = "../feature_flags" }
util = { path = "../util" }
anyhow.workspace = true
@@ -63,20 +63,23 @@ impl KeymapFile {
// string. But `RawValue` currently does not work inside of an untagged enum.
match action {
Value::Array(items) => {
- let Ok([name, data]): Result<[serde_json::Value; 2], _> = items.try_into() else {
+ let Ok([name, data]): Result<[serde_json::Value; 2], _> =
+ items.try_into()
+ else {
return Some(Err(anyhow!("Expected array of length 2")));
};
let serde_json::Value::String(name) = name else {
- return Some(Err(anyhow!("Expected first item in array to be a string.")))
+ return Some(Err(anyhow!(
+ "Expected first item in array to be a string."
+ )));
};
- cx.deserialize_action(
- &name,
- Some(data),
- )
- },
+ cx.deserialize_action(&name, Some(data))
+ }
Value::String(name) => cx.deserialize_action(&name, None),
Value::Null => Ok(no_action()),
- _ => return Some(Err(anyhow!("Expected two-element array, got {action:?}"))),
+ _ => {
+ return Some(Err(anyhow!("Expected two-element array, got {action:?}")))
+ }
}
.with_context(|| {
format!(
@@ -1,36 +0,0 @@
-use gpui::AppContext;
-
-#[derive(Debug, Default)]
-pub struct StaffMode(pub bool);
-
-impl std::ops::Deref for StaffMode {
- type Target = bool;
-
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-
-/// Despite what the type system requires me to tell you, the init function will only be called a once
-/// as soon as we know that the staff mode is enabled.
-pub fn staff_mode<F: FnMut(&mut AppContext) + 'static>(cx: &mut AppContext, mut init: F) {
- if **cx.default_global::<StaffMode>() {
- init(cx)
- } else {
- let mut once = Some(());
- cx.observe_global::<StaffMode, _>(move |cx| {
- if **cx.global::<StaffMode>() && once.take().is_some() {
- init(cx);
- }
- })
- .detach();
- }
-}
-
-/// Immediately checks and runs the init function if the staff mode is not enabled.
-/// This is only included for symettry with staff_mode() above
-pub fn not_staff_mode<F: FnOnce(&mut AppContext) + 'static>(cx: &mut AppContext, init: F) {
- if !**cx.default_global::<StaffMode>() {
- init(cx)
- }
-}
@@ -2,7 +2,7 @@ use std::{cmp::Ordering, fmt::Debug};
use crate::{Bias, Dimension, Edit, Item, KeyedItem, SeekTarget, SumTree, Summary};
-#[derive(Clone, Debug, PartialEq, Eq)]
+#[derive(Clone, PartialEq, Eq)]
pub struct TreeMap<K, V>(SumTree<MapEntry<K, V>>)
where
K: Clone + Debug + Default + Ord,
@@ -162,6 +162,16 @@ impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
}
}
+impl<K: Debug, V: Debug> Debug for TreeMap<K, V>
+where
+ K: Clone + Debug + Default + Ord,
+ V: Clone + Debug,
+{
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_map().entries(self.iter()).finish()
+ }
+}
+
#[derive(Debug)]
struct MapSeekTargetAdaptor<'a, T>(&'a T);
@@ -1,5 +1,6 @@
pub mod mappings;
pub use alacritty_terminal;
+pub mod terminal_settings;
use alacritty_terminal::{
ansi::{ClearMode, Handler},
@@ -7,7 +8,7 @@ use alacritty_terminal::{
event::{Event as AlacTermEvent, EventListener, Notify, WindowSize},
event_loop::{EventLoop, Msg, Notifier},
grid::{Dimensions, Scroll as AlacScroll},
- index::{Column, Direction as AlacDirection, Line, Point},
+ index::{Boundary, Column, Direction as AlacDirection, Line, Point},
selection::{Selection, SelectionRange, SelectionType},
sync::FairMutex,
term::{
@@ -31,8 +32,8 @@ use mappings::mouse::{
};
use procinfo::LocalProcessInfo;
-use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
+use terminal_settings::{AlternateScroll, Shell, TerminalBlink, TerminalSettings};
use util::truncate_and_trailoff;
use std::{
@@ -48,7 +49,6 @@ use std::{
use thiserror::Error;
use gpui::{
- fonts,
geometry::vector::{vec2f, Vector2F},
keymap_matcher::Keystroke,
platform::{Modifiers, MouseButton, MouseMovedEvent, TouchPhase},
@@ -78,7 +78,7 @@ lazy_static! {
// * use more strict regex for `file://` protocol matching: original regex has `file:` inside, but we want to avoid matching `some::file::module` strings.
static ref URL_REGEX: RegexSearch = RegexSearch::new(r#"(ipfs:|ipns:|magnet:|mailto:|gemini://|gopher://|https://|http://|news:|file://|git://|ssh:|ftp://)[^\u{0000}-\u{001F}\u{007F}-\u{009F}<>"\s{-}\^⟨⟩`]+"#).unwrap();
- static ref WORD_REGEX: RegexSearch = RegexSearch::new(r#"[\w.:/@\-~]+"#).unwrap();
+ static ref WORD_REGEX: RegexSearch = RegexSearch::new(r#"[\w.\[\]:/@\-~]+"#).unwrap();
}
///Upward flowing events, for changing the title and such
@@ -134,122 +134,6 @@ pub fn init(cx: &mut AppContext) {
settings::register::<TerminalSettings>(cx);
}
-#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
-#[serde(rename_all = "snake_case")]
-pub enum TerminalDockPosition {
- Left,
- Bottom,
- Right,
-}
-
-#[derive(Deserialize)]
-pub struct TerminalSettings {
- pub shell: Shell,
- pub working_directory: WorkingDirectory,
- font_size: Option<f32>,
- pub font_family: Option<String>,
- pub line_height: TerminalLineHeight,
- pub font_features: Option<fonts::Features>,
- pub env: HashMap<String, String>,
- pub blinking: TerminalBlink,
- pub alternate_scroll: AlternateScroll,
- pub option_as_meta: bool,
- pub copy_on_select: bool,
- pub dock: TerminalDockPosition,
- pub default_width: f32,
- pub default_height: f32,
-}
-
-#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
-pub struct TerminalSettingsContent {
- pub shell: Option<Shell>,
- pub working_directory: Option<WorkingDirectory>,
- pub font_size: Option<f32>,
- pub font_family: Option<String>,
- pub line_height: Option<TerminalLineHeight>,
- pub font_features: Option<fonts::Features>,
- pub env: Option<HashMap<String, String>>,
- pub blinking: Option<TerminalBlink>,
- pub alternate_scroll: Option<AlternateScroll>,
- pub option_as_meta: Option<bool>,
- pub copy_on_select: Option<bool>,
- pub dock: Option<TerminalDockPosition>,
- pub default_width: Option<f32>,
- pub default_height: Option<f32>,
-}
-
-impl TerminalSettings {
- pub fn font_size(&self, cx: &AppContext) -> Option<f32> {
- self.font_size
- .map(|size| theme::adjusted_font_size(size, cx))
- }
-}
-
-impl settings::Setting for TerminalSettings {
- const KEY: Option<&'static str> = Some("terminal");
-
- type FileContent = TerminalSettingsContent;
-
- fn load(
- default_value: &Self::FileContent,
- user_values: &[&Self::FileContent],
- _: &AppContext,
- ) -> Result<Self> {
- Self::load_via_json_merge(default_value, user_values)
- }
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, Default)]
-#[serde(rename_all = "snake_case")]
-pub enum TerminalLineHeight {
- #[default]
- Comfortable,
- Standard,
- Custom(f32),
-}
-
-impl TerminalLineHeight {
- pub fn value(&self) -> f32 {
- match self {
- TerminalLineHeight::Comfortable => 1.618,
- TerminalLineHeight::Standard => 1.3,
- TerminalLineHeight::Custom(line_height) => f32::max(*line_height, 1.),
- }
- }
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
-#[serde(rename_all = "snake_case")]
-pub enum TerminalBlink {
- Off,
- TerminalControlled,
- On,
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
-#[serde(rename_all = "snake_case")]
-pub enum Shell {
- System,
- Program(String),
- WithArguments { program: String, args: Vec<String> },
-}
-
-#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
-#[serde(rename_all = "snake_case")]
-pub enum AlternateScroll {
- On,
- Off,
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
-#[serde(rename_all = "snake_case")]
-pub enum WorkingDirectory {
- CurrentProjectDirectory,
- FirstProjectDirectory,
- AlwaysHome,
- Always { directory: String },
-}
-
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
pub struct TerminalSize {
pub cell_width: f32,
@@ -840,14 +724,13 @@ impl Terminal {
self.last_content.size,
term.grid().display_offset(),
)
- .grid_clamp(term, alacritty_terminal::index::Boundary::Grid);
+ .grid_clamp(term, Boundary::Grid);
let link = term.grid().index(point).hyperlink();
let found_word = if link.is_some() {
let mut min_index = point;
loop {
- let new_min_index =
- min_index.sub(term, alacritty_terminal::index::Boundary::Cursor, 1);
+ let new_min_index = min_index.sub(term, Boundary::Cursor, 1);
if new_min_index == min_index {
break;
} else if term.grid().index(new_min_index).hyperlink() != link {
@@ -859,8 +742,7 @@ impl Terminal {
let mut max_index = point;
loop {
- let new_max_index =
- max_index.add(term, alacritty_terminal::index::Boundary::Cursor, 1);
+ let new_max_index = max_index.add(term, Boundary::Cursor, 1);
if new_max_index == max_index {
break;
} else if term.grid().index(new_max_index).hyperlink() != link {
@@ -877,11 +759,34 @@ impl Terminal {
} else if let Some(word_match) = regex_match_at(term, point, &WORD_REGEX) {
let maybe_url_or_path =
term.bounds_to_string(*word_match.start(), *word_match.end());
+ let original_match = word_match.clone();
+ let (sanitized_match, sanitized_word) =
+ if maybe_url_or_path.starts_with('[') && maybe_url_or_path.ends_with(']') {
+ (
+ Match::new(
+ word_match.start().add(term, Boundary::Cursor, 1),
+ word_match.end().sub(term, Boundary::Cursor, 1),
+ ),
+ maybe_url_or_path[1..maybe_url_or_path.len() - 1].to_owned(),
+ )
+ } else {
+ (word_match, maybe_url_or_path)
+ };
+
let is_url = match regex_match_at(term, point, &URL_REGEX) {
- Some(url_match) => url_match == word_match,
+ Some(url_match) => {
+ // `]` is a valid symbol in the `file://` URL, so the regex match will include it
+ // consider that when ensuring that the URL match is the same as the original word
+ if sanitized_match != original_match {
+ url_match.start() == sanitized_match.start()
+ && url_match.end() == original_match.end()
+ } else {
+ url_match == sanitized_match
+ }
+ }
None => false,
};
- Some((maybe_url_or_path, is_url, word_match))
+ Some((sanitized_word, is_url, sanitized_match))
} else {
None
};
@@ -1018,6 +923,10 @@ impl Terminal {
self.pty_tx.notify(input.into_bytes());
}
+ fn write_bytes_to_pty(&self, input: Vec<u8>) {
+ self.pty_tx.notify(input);
+ }
+
pub fn input(&mut self, input: String) {
self.events
.push_back(InternalEvent::Scroll(AlacScroll::Bottom));
@@ -1026,6 +935,14 @@ impl Terminal {
self.write_to_pty(input);
}
+ pub fn input_bytes(&mut self, input: Vec<u8>) {
+ self.events
+ .push_back(InternalEvent::Scroll(AlacScroll::Bottom));
+ self.events.push_back(InternalEvent::SetSelection(None));
+
+ self.write_bytes_to_pty(input);
+ }
+
pub fn try_keystroke(&mut self, keystroke: &Keystroke, alt_is_meta: bool) -> bool {
let esc = to_esc_str(keystroke, &self.last_content.mode, alt_is_meta);
if let Some(esc) = esc {
@@ -0,0 +1,163 @@
+use std::{collections::HashMap, path::PathBuf};
+
+use gpui::{fonts, AppContext};
+use schemars::JsonSchema;
+use serde_derive::{Deserialize, Serialize};
+
+#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
+#[serde(rename_all = "snake_case")]
+pub enum TerminalDockPosition {
+ Left,
+ Bottom,
+ Right,
+}
+
+#[derive(Deserialize)]
+pub struct TerminalSettings {
+ pub shell: Shell,
+ pub working_directory: WorkingDirectory,
+ font_size: Option<f32>,
+ pub font_family: Option<String>,
+ pub line_height: TerminalLineHeight,
+ pub font_features: Option<fonts::Features>,
+ pub env: HashMap<String, String>,
+ pub blinking: TerminalBlink,
+ pub alternate_scroll: AlternateScroll,
+ pub option_as_meta: bool,
+ pub copy_on_select: bool,
+ pub dock: TerminalDockPosition,
+ pub default_width: f32,
+ pub default_height: f32,
+ pub detect_venv: VenvSettings,
+}
+
+#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub enum VenvSettings {
+ #[default]
+ Off,
+ On {
+ activate_script: Option<ActivateScript>,
+ directories: Option<Vec<PathBuf>>,
+ },
+}
+
+pub struct VenvSettingsContent<'a> {
+ pub activate_script: ActivateScript,
+ pub directories: &'a [PathBuf],
+}
+
+impl VenvSettings {
+ pub fn as_option(&self) -> Option<VenvSettingsContent> {
+ match self {
+ VenvSettings::Off => None,
+ VenvSettings::On {
+ activate_script,
+ directories,
+ } => Some(VenvSettingsContent {
+ activate_script: activate_script.unwrap_or(ActivateScript::Default),
+ directories: directories.as_deref().unwrap_or(&[]),
+ }),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub enum ActivateScript {
+ #[default]
+ Default,
+ Csh,
+ Fish,
+}
+
+#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
+pub struct TerminalSettingsContent {
+ pub shell: Option<Shell>,
+ pub working_directory: Option<WorkingDirectory>,
+ pub font_size: Option<f32>,
+ pub font_family: Option<String>,
+ pub line_height: Option<TerminalLineHeight>,
+ pub font_features: Option<fonts::Features>,
+ pub env: Option<HashMap<String, String>>,
+ pub blinking: Option<TerminalBlink>,
+ pub alternate_scroll: Option<AlternateScroll>,
+ pub option_as_meta: Option<bool>,
+ pub copy_on_select: Option<bool>,
+ pub dock: Option<TerminalDockPosition>,
+ pub default_width: Option<f32>,
+ pub default_height: Option<f32>,
+ pub detect_venv: Option<VenvSettings>,
+}
+
+impl TerminalSettings {
+ pub fn font_size(&self, cx: &AppContext) -> Option<f32> {
+ self.font_size
+ .map(|size| theme::adjusted_font_size(size, cx))
+ }
+}
+
+impl settings::Setting for TerminalSettings {
+ const KEY: Option<&'static str> = Some("terminal");
+
+ type FileContent = TerminalSettingsContent;
+
+ fn load(
+ default_value: &Self::FileContent,
+ user_values: &[&Self::FileContent],
+ _: &AppContext,
+ ) -> anyhow::Result<Self> {
+ Self::load_via_json_merge(default_value, user_values)
+ }
+}
+
+#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, Default)]
+#[serde(rename_all = "snake_case")]
+pub enum TerminalLineHeight {
+ #[default]
+ Comfortable,
+ Standard,
+ Custom(f32),
+}
+
+impl TerminalLineHeight {
+ pub fn value(&self) -> f32 {
+ match self {
+ TerminalLineHeight::Comfortable => 1.618,
+ TerminalLineHeight::Standard => 1.3,
+ TerminalLineHeight::Custom(line_height) => f32::max(*line_height, 1.),
+ }
+ }
+}
+
+#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub enum TerminalBlink {
+ Off,
+ TerminalControlled,
+ On,
+}
+
+#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub enum Shell {
+ System,
+ Program(String),
+ WithArguments { program: String, args: Vec<String> },
+}
+
+#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub enum AlternateScroll {
+ On,
+ Off,
+}
+
+#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub enum WorkingDirectory {
+ CurrentProjectDirectory,
+ FirstProjectDirectory,
+ AlwaysHome,
+ Always { directory: String },
+}
@@ -25,7 +25,8 @@ use terminal::{
term::{cell::Flags, TermMode},
},
mappings::colors::convert_color,
- IndexedCell, Terminal, TerminalContent, TerminalSettings, TerminalSize,
+ terminal_settings::TerminalSettings,
+ IndexedCell, Terminal, TerminalContent, TerminalSize,
};
use theme::{TerminalStyle, ThemeSettings};
use util::ResultExt;
@@ -9,7 +9,7 @@ use gpui::{
use project::Fs;
use serde::{Deserialize, Serialize};
use settings::SettingsStore;
-use terminal::{TerminalDockPosition, TerminalSettings};
+use terminal::terminal_settings::{TerminalDockPosition, TerminalSettings};
use util::{ResultExt, TryFutureExt};
use workspace::{
dock::{DockPosition, Panel},
@@ -33,7 +33,8 @@ use terminal::{
index::Point,
term::{search::RegexSearch, TermMode},
},
- Event, MaybeNavigationTarget, Terminal, TerminalBlink, WorkingDirectory,
+ terminal_settings::{TerminalBlink, TerminalSettings, WorkingDirectory},
+ Event, MaybeNavigationTarget, Terminal,
};
use util::{paths::PathLikeWithPosition, ResultExt};
use workspace::{
@@ -44,8 +45,6 @@ use workspace::{
NewCenterTerminal, Pane, ToolbarItemLocation, Workspace, WorkspaceId,
};
-pub use terminal::TerminalSettings;
-
const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500);
///Event to transmit the scroll from the element to the view
@@ -12,7 +12,7 @@ mod undo_map;
pub use anchor::*;
use anyhow::{anyhow, Result};
-use clock::ReplicaId;
+pub use clock::ReplicaId;
use collections::{HashMap, HashSet};
use fs::LineEnding;
use locator::Locator;
@@ -1,23 +1,143 @@
-use gpui::elements::StyleableComponent;
+use gpui::{elements::SafeStylable, Action};
use crate::{Interactive, Toggleable};
-use self::{action_button::ButtonStyle, svg::SvgStyle, toggle::Toggle};
+use self::{action_button::ButtonStyle, disclosure::Disclosable, svg::SvgStyle, toggle::Toggle};
-pub type ToggleIconButtonStyle = Toggleable<Interactive<ButtonStyle<SvgStyle>>>;
+pub type IconButtonStyle = Interactive<ButtonStyle<SvgStyle>>;
+pub type ToggleIconButtonStyle = Toggleable<IconButtonStyle>;
-pub trait ComponentExt<C: StyleableComponent> {
+pub trait ComponentExt<C: SafeStylable> {
fn toggleable(self, active: bool) -> Toggle<C, ()>;
+ fn disclosable(self, disclosed: Option<bool>, action: Box<dyn Action>) -> Disclosable<C, ()>;
}
-impl<C: StyleableComponent> ComponentExt<C> for C {
+impl<C: SafeStylable> ComponentExt<C> for C {
fn toggleable(self, active: bool) -> Toggle<C, ()> {
Toggle::new(self, active)
}
+
+ /// Some(True) => disclosed => content is visible
+ /// Some(false) => closed => content is hidden
+ /// None => No disclosure button, but reserve disclosure spacing
+ fn disclosable(self, disclosed: Option<bool>, action: Box<dyn Action>) -> Disclosable<C, ()> {
+ Disclosable::new(disclosed, self, action)
+ }
+}
+
+pub mod disclosure {
+
+ use gpui::{
+ elements::{Component, ContainerStyle, Empty, Flex, ParentElement, SafeStylable},
+ Action, Element,
+ };
+ use schemars::JsonSchema;
+ use serde_derive::Deserialize;
+
+ use super::{action_button::Button, svg::Svg, IconButtonStyle};
+
+ #[derive(Clone, Default, Deserialize, JsonSchema)]
+ pub struct DisclosureStyle<S> {
+ pub button: IconButtonStyle,
+ #[serde(flatten)]
+ pub container: ContainerStyle,
+ pub spacing: f32,
+ #[serde(flatten)]
+ content: S,
+ }
+
+ impl<S> DisclosureStyle<S> {
+ pub fn button_space(&self) -> f32 {
+ self.spacing + self.button.button_width.unwrap()
+ }
+ }
+
+ pub struct Disclosable<C, S> {
+ disclosed: Option<bool>,
+ action: Box<dyn Action>,
+ id: usize,
+ content: C,
+ style: S,
+ }
+
+ impl Disclosable<(), ()> {
+ pub fn new<C>(
+ disclosed: Option<bool>,
+ content: C,
+ action: Box<dyn Action>,
+ ) -> Disclosable<C, ()> {
+ Disclosable {
+ disclosed,
+ content,
+ action,
+ id: 0,
+ style: (),
+ }
+ }
+ }
+
+ impl<C> Disclosable<C, ()> {
+ pub fn with_id(mut self, id: usize) -> Disclosable<C, ()> {
+ self.id = id;
+ self
+ }
+ }
+
+ impl<C: SafeStylable> SafeStylable for Disclosable<C, ()> {
+ type Style = DisclosureStyle<C::Style>;
+
+ type Output = Disclosable<C, Self::Style>;
+
+ fn with_style(self, style: Self::Style) -> Self::Output {
+ Disclosable {
+ disclosed: self.disclosed,
+ action: self.action,
+ content: self.content,
+ id: self.id,
+ style,
+ }
+ }
+ }
+
+ impl<C: SafeStylable> Component for Disclosable<C, DisclosureStyle<C::Style>> {
+ fn render<V: 'static>(self, cx: &mut gpui::ViewContext<V>) -> gpui::AnyElement<V> {
+ Flex::row()
+ .with_spacing(self.style.spacing)
+ .with_child(if let Some(disclosed) = self.disclosed {
+ Button::dynamic_action(self.action)
+ .with_id(self.id)
+ .with_contents(Svg::new(if disclosed {
+ "icons/file_icons/chevron_down.svg"
+ } else {
+ "icons/file_icons/chevron_right.svg"
+ }))
+ .with_style(self.style.button)
+ .element()
+ .into_any()
+ } else {
+ Empty::new()
+ .into_any()
+ .constrained()
+ // TODO: Why is this optional at all?
+ .with_width(self.style.button.button_width.unwrap())
+ .into_any()
+ })
+ .with_child(
+ self.content
+ .with_style(self.style.content)
+ .render(cx)
+ .flex(1., true),
+ )
+ .align_children_center()
+ .contained()
+ .with_style(self.style.container)
+ .into_any()
+ }
+ }
}
pub mod toggle {
- use gpui::elements::{GeneralComponent, StyleableComponent};
+ use gpui::elements::{Component, SafeStylable};
use crate::Toggleable;
@@ -27,7 +147,7 @@ pub mod toggle {
component: C,
}
- impl<C: StyleableComponent> Toggle<C, ()> {
+ impl<C: SafeStylable> Toggle<C, ()> {
pub fn new(component: C, active: bool) -> Self {
Toggle {
active,
@@ -37,7 +157,7 @@ pub mod toggle {
}
}
- impl<C: StyleableComponent> StyleableComponent for Toggle<C, ()> {
+ impl<C: SafeStylable> SafeStylable for Toggle<C, ()> {
type Style = Toggleable<C::Style>;
type Output = Toggle<C, Self::Style>;
@@ -51,15 +171,11 @@ pub mod toggle {
}
}
- impl<C: StyleableComponent> GeneralComponent for Toggle<C, Toggleable<C::Style>> {
- fn render<V: gpui::View>(
- self,
- v: &mut V,
- cx: &mut gpui::ViewContext<V>,
- ) -> gpui::AnyElement<V> {
+ impl<C: SafeStylable> Component for Toggle<C, Toggleable<C::Style>> {
+ fn render<V: 'static>(self, cx: &mut gpui::ViewContext<V>) -> gpui::AnyElement<V> {
self.component
.with_style(self.style.in_state(self.active).clone())
- .render(v, cx)
+ .render(cx)
}
}
}
@@ -68,96 +184,103 @@ pub mod action_button {
use std::borrow::Cow;
use gpui::{
- elements::{
- ContainerStyle, GeneralComponent, MouseEventHandler, StyleableComponent, TooltipStyle,
- },
+ elements::{Component, ContainerStyle, MouseEventHandler, SafeStylable, TooltipStyle},
platform::{CursorStyle, MouseButton},
- Action, Element, TypeTag, View,
+ Action, Element, TypeTag,
};
use schemars::JsonSchema;
use serde_derive::Deserialize;
use crate::Interactive;
- pub struct ActionButton<C, S> {
- action: Box<dyn Action>,
- tooltip: Cow<'static, str>,
- tooltip_style: TooltipStyle,
- tag: TypeTag,
- contents: C,
- style: Interactive<S>,
- }
-
#[derive(Clone, Deserialize, Default, JsonSchema)]
pub struct ButtonStyle<C> {
#[serde(flatten)]
- container: ContainerStyle,
- button_width: Option<f32>,
- button_height: Option<f32>,
+ pub container: ContainerStyle,
+ // TODO: These are incorrect for the intended usage of the buttons.
+ // The size should be constant, but putting them here duplicates them
+ // across the states the buttons can be in
+ pub button_width: Option<f32>,
+ pub button_height: Option<f32>,
#[serde(flatten)]
contents: C,
}
- impl ActionButton<(), ()> {
- pub fn new_dynamic(
- action: Box<dyn Action>,
- tooltip: impl Into<Cow<'static, str>>,
- tooltip_style: TooltipStyle,
- ) -> Self {
+ pub struct Button<C, S> {
+ action: Box<dyn Action>,
+ tooltip: Option<(Cow<'static, str>, TooltipStyle)>,
+ tag: TypeTag,
+ id: usize,
+ contents: C,
+ style: Interactive<S>,
+ }
+
+ impl Button<(), ()> {
+ pub fn dynamic_action(action: Box<dyn Action>) -> Button<(), ()> {
Self {
contents: (),
tag: action.type_tag(),
- style: Interactive::new_blank(),
- tooltip: tooltip.into(),
- tooltip_style,
action,
+ style: Interactive::new_blank(),
+ tooltip: None,
+ id: 0,
}
}
- pub fn new<A: Action + Clone>(
- action: A,
+ pub fn action<A: Action + Clone>(action: A) -> Self {
+ Self::dynamic_action(Box::new(action))
+ }
+
+ pub fn with_tooltip(
+ mut self,
tooltip: impl Into<Cow<'static, str>>,
tooltip_style: TooltipStyle,
) -> Self {
- Self::new_dynamic(Box::new(action), tooltip, tooltip_style)
+ self.tooltip = Some((tooltip.into(), tooltip_style));
+ self
+ }
+
+ pub fn with_id(mut self, id: usize) -> Self {
+ self.id = id;
+ self
}
- pub fn with_contents<C: StyleableComponent>(self, contents: C) -> ActionButton<C, ()> {
- ActionButton {
+ pub fn with_contents<C: SafeStylable>(self, contents: C) -> Button<C, ()> {
+ Button {
action: self.action,
tag: self.tag,
style: self.style,
tooltip: self.tooltip,
- tooltip_style: self.tooltip_style,
+ id: self.id,
contents,
}
}
}
- impl<C: StyleableComponent> StyleableComponent for ActionButton<C, ()> {
+ impl<C: SafeStylable> SafeStylable for Button<C, ()> {
type Style = Interactive<ButtonStyle<C::Style>>;
- type Output = ActionButton<C, ButtonStyle<C::Style>>;
+ type Output = Button<C, ButtonStyle<C::Style>>;
fn with_style(self, style: Self::Style) -> Self::Output {
- ActionButton {
+ Button {
action: self.action,
tag: self.tag,
contents: self.contents,
tooltip: self.tooltip,
- tooltip_style: self.tooltip_style,
+ id: self.id,
style,
}
}
}
- impl<C: StyleableComponent> GeneralComponent for ActionButton<C, ButtonStyle<C::Style>> {
- fn render<V: View>(self, v: &mut V, cx: &mut gpui::ViewContext<V>) -> gpui::AnyElement<V> {
- MouseEventHandler::new_dynamic(self.tag, 0, cx, |state, cx| {
+ impl<C: SafeStylable> Component for Button<C, ButtonStyle<C::Style>> {
+ fn render<V: 'static>(self, cx: &mut gpui::ViewContext<V>) -> gpui::AnyElement<V> {
+ let mut button = MouseEventHandler::new_dynamic(self.tag, self.id, cx, |state, cx| {
let style = self.style.style_for(state);
let mut contents = self
.contents
.with_style(style.contents.to_owned())
- .render(v, cx)
+ .render(cx)
.contained()
.with_style(style.container)
.constrained();
@@ -185,15 +308,15 @@ pub mod action_button {
}
})
.with_cursor_style(CursorStyle::PointingHand)
- .with_dynamic_tooltip(
- self.tag,
- 0,
- self.tooltip,
- Some(self.action),
- self.tooltip_style,
- cx,
- )
- .into_any()
+ .into_any();
+
+ if let Some((tooltip, style)) = self.tooltip {
+ button = button
+ .with_dynamic_tooltip(self.tag, 0, tooltip, Some(self.action), style, cx)
+ .into_any()
+ }
+
+ button
}
}
}
@@ -202,7 +325,7 @@ pub mod svg {
use std::borrow::Cow;
use gpui::{
- elements::{GeneralComponent, StyleableComponent},
+ elements::{Component, Empty, SafeStylable},
Element,
};
use schemars::JsonSchema;
@@ -225,6 +348,7 @@ pub mod svg {
pub enum IconSize {
IconSize { icon_size: f32 },
Dimensions { width: f32, height: f32 },
+ IconDimensions { icon_width: f32, icon_height: f32 },
}
#[derive(Deserialize)]
@@ -248,6 +372,14 @@ pub mod svg {
icon_height: height,
color,
},
+ IconSize::IconDimensions {
+ icon_width,
+ icon_height,
+ } => SvgStyle {
+ icon_width,
+ icon_height,
+ color,
+ },
};
Ok(result)
@@ -255,20 +387,27 @@ pub mod svg {
}
pub struct Svg<S> {
- path: Cow<'static, str>,
+ path: Option<Cow<'static, str>>,
style: S,
}
impl Svg<()> {
pub fn new(path: impl Into<Cow<'static, str>>) -> Self {
Self {
- path: path.into(),
+ path: Some(path.into()),
+ style: (),
+ }
+ }
+
+ pub fn optional(path: Option<impl Into<Cow<'static, str>>>) -> Self {
+ Self {
+ path: path.map(Into::into),
style: (),
}
}
}
- impl StyleableComponent for Svg<()> {
+ impl SafeStylable for Svg<()> {
type Style = SvgStyle;
type Output = Svg<SvgStyle>;
@@ -281,18 +420,19 @@ pub mod svg {
}
}
- impl GeneralComponent for Svg<SvgStyle> {
- fn render<V: gpui::View>(
- self,
- _: &mut V,
- _: &mut gpui::ViewContext<V>,
- ) -> gpui::AnyElement<V> {
- gpui::elements::Svg::new(self.path)
- .with_color(self.style.color)
- .constrained()
- .with_width(self.style.icon_width)
- .with_height(self.style.icon_height)
- .into_any()
+ impl Component for Svg<SvgStyle> {
+ fn render<V: 'static>(self, _: &mut gpui::ViewContext<V>) -> gpui::AnyElement<V> {
+ if let Some(path) = self.path {
+ gpui::elements::Svg::new(path)
+ .with_color(self.style.color)
+ .constrained()
+ } else {
+ Empty::new().constrained()
+ }
+ .constrained()
+ .with_width(self.style.icon_width)
+ .with_height(self.style.icon_height)
+ .into_any()
}
}
}
@@ -301,7 +441,8 @@ pub mod label {
use std::borrow::Cow;
use gpui::{
- elements::{GeneralComponent, LabelStyle, StyleableComponent},
+ elements::{Component, LabelStyle, SafeStylable},
+ fonts::TextStyle,
Element,
};
@@ -319,25 +460,21 @@ pub mod label {
}
}
- impl StyleableComponent for Label<()> {
- type Style = LabelStyle;
+ impl SafeStylable for Label<()> {
+ type Style = TextStyle;
type Output = Label<LabelStyle>;
fn with_style(self, style: Self::Style) -> Self::Output {
Label {
text: self.text,
- style,
+ style: style.into(),
}
}
}
- impl GeneralComponent for Label<LabelStyle> {
- fn render<V: gpui::View>(
- self,
- _: &mut V,
- _: &mut gpui::ViewContext<V>,
- ) -> gpui::AnyElement<V> {
+ impl Component for Label<LabelStyle> {
+ fn render<V: 'static>(self, _: &mut gpui::ViewContext<V>) -> gpui::AnyElement<V> {
gpui::elements::Label::new(self.text, self.style).into_any()
}
}
@@ -3,7 +3,7 @@ mod theme_registry;
mod theme_settings;
pub mod ui;
-use components::ToggleIconButtonStyle;
+use components::{action_button::ButtonStyle, disclosure::DisclosureStyle, ToggleIconButtonStyle};
use gpui::{
color::Color,
elements::{ContainerStyle, ImageStyle, LabelStyle, Shadow, SvgStyle, TooltipStyle},
@@ -14,7 +14,7 @@ use schemars::JsonSchema;
use serde::{de::DeserializeOwned, Deserialize};
use serde_json::Value;
use settings::SettingsStore;
-use std::{collections::HashMap, sync::Arc};
+use std::{collections::HashMap, ops::Deref, sync::Arc};
use ui::{CheckboxStyle, CopilotCTAButton, IconStyle, ModalStyle};
pub use theme_registry::*;
@@ -66,6 +66,7 @@ pub struct Theme {
pub feedback: FeedbackStyle,
pub welcome: WelcomeStyle,
pub titlebar: Titlebar,
+ pub component_test: ComponentTest,
}
#[derive(Deserialize, Default, Clone, JsonSchema)]
@@ -221,6 +222,7 @@ pub struct CopilotAuthAuthorized {
pub struct CollabPanel {
#[serde(flatten)]
pub container: ContainerStyle,
+ pub disclosure: DisclosureStyle<()>,
pub list_empty_state: Toggleable<Interactive<ContainedText>>,
pub list_empty_icon: Icon,
pub list_empty_label_container: ContainerStyle,
@@ -259,6 +261,13 @@ pub struct CollabPanel {
pub face_overlap: f32,
}
+#[derive(Deserialize, Default, JsonSchema)]
+pub struct ComponentTest {
+ pub button: Interactive<ButtonStyle<TextStyle>>,
+ pub toggle: Toggleable<Interactive<ButtonStyle<TextStyle>>>,
+ pub disclosure: DisclosureStyle<TextStyle>,
+}
+
#[derive(Deserialize, Default, JsonSchema)]
pub struct TabbedModal {
pub tab_button: Toggleable<Interactive<ContainedText>>,
@@ -428,11 +437,11 @@ pub struct Search {
pub match_index: ContainedText,
pub major_results_status: TextStyle,
pub minor_results_status: TextStyle,
- pub dismiss_button: Interactive<IconButton>,
pub editor_icon: IconStyle,
pub mode_button: Toggleable<Interactive<ContainedText>>,
pub nav_button: Toggleable<Interactive<ContainedLabel>>,
pub search_bar_row_height: f32,
+ pub search_row_spacing: f32,
pub option_button_height: f32,
pub modes_container: ContainerStyle,
}
@@ -747,6 +756,7 @@ pub struct Editor {
pub line_number: Color,
pub line_number_active: Color,
pub guest_selections: Vec<SelectionStyle>,
+ pub absent_selection: SelectionStyle,
pub syntax: Arc<SyntaxTheme>,
pub hint: HighlightStyle,
pub suggestion: HighlightStyle,
@@ -890,6 +900,14 @@ pub struct Interactive<T> {
pub disabled: Option<T>,
}
+impl<T> Deref for Interactive<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.default
+ }
+}
+
impl Interactive<()> {
pub fn new_blank() -> Self {
Self {
@@ -907,6 +925,14 @@ pub struct Toggleable<T> {
inactive: T,
}
+impl<T> Deref for Toggleable<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.inactive
+ }
+}
+
impl Toggleable<()> {
pub fn new_blank() -> Self {
Self {
@@ -16,7 +16,7 @@ gpui = { path = "../gpui" }
picker = { path = "../picker" }
theme = { path = "../theme" }
settings = { path = "../settings" }
-staff_mode = { path = "../staff_mode" }
+feature_flags = { path = "../feature_flags" }
workspace = { path = "../workspace" }
util = { path = "../util" }
log.workspace = true
@@ -1,9 +1,9 @@
+use feature_flags::FeatureFlagAppExt;
use fs::Fs;
use fuzzy::{match_strings, StringMatch, StringMatchCandidate};
use gpui::{actions, elements::*, AnyElement, AppContext, Element, MouseState, ViewContext};
use picker::{Picker, PickerDelegate, PickerEvent};
use settings::{update_settings_file, SettingsStore};
-use staff_mode::StaffMode;
use std::sync::Arc;
use theme::{Theme, ThemeMeta, ThemeRegistry, ThemeSettings};
use util::ResultExt;
@@ -54,7 +54,7 @@ impl ThemeSelectorDelegate {
fn new(fs: Arc<dyn Fs>, cx: &mut ViewContext<ThemeSelector>) -> Self {
let original_theme = theme::current(cx).clone();
- let staff_mode = **cx.default_global::<StaffMode>();
+ let staff_mode = cx.is_staff();
let registry = cx.global::<Arc<ThemeRegistry>>();
let mut theme_names = registry.list(staff_mode).collect::<Vec<_>>();
theme_names.sort_unstable_by(|a, b| a.is_light.cmp(&b.is_light).then(a.name.cmp(&b.name)));
@@ -107,20 +107,15 @@ impl PickerDelegate for BranchListDelegate {
let delegate = view.delegate();
let project = delegate.workspace.read(cx).project().read(&cx);
- let Some(worktree) = project
- .visible_worktrees(cx)
- .next()
- else {
+ let Some(worktree) = project.visible_worktrees(cx).next() else {
bail!("Cannot update branch list as there are no visible worktrees")
};
- let mut cwd = worktree .read(cx)
- .abs_path()
- .to_path_buf();
+ let mut cwd = worktree.read(cx).abs_path().to_path_buf();
cwd.push(".git");
- let Some(repo) = project.fs().open_repo(&cwd) else {bail!("Project does not have associated git repository.")};
- let mut branches = repo
- .lock()
- .branches()?;
+ let Some(repo) = project.fs().open_repo(&cwd) else {
+ bail!("Project does not have associated git repository.")
+ };
+ let mut branches = repo.lock().branches()?;
const RECENT_BRANCHES_COUNT: usize = 10;
if query.is_empty() && branches.len() > RECENT_BRANCHES_COUNT {
// Truncate list of recent branches
@@ -142,8 +137,13 @@ impl PickerDelegate for BranchListDelegate {
})
.collect::<Vec<_>>())
})
- .log_err() else { return; };
- let Some(candidates) = candidates.log_err() else {return;};
+ .log_err()
+ else {
+ return;
+ };
+ let Some(candidates) = candidates.log_err() else {
+ return;
+ };
let matches = if query.is_empty() {
candidates
.into_iter()
@@ -184,7 +184,11 @@ impl PickerDelegate for BranchListDelegate {
fn confirm(&mut self, _: bool, cx: &mut ViewContext<Picker<Self>>) {
let current_pick = self.selected_index();
- let Some(current_pick) = self.matches.get(current_pick).map(|pick| pick.string.clone()) else {
+ let Some(current_pick) = self
+ .matches
+ .get(current_pick)
+ .map(|pick| pick.string.clone())
+ else {
return;
};
cx.spawn(|picker, mut cx| async move {
@@ -1,8 +1,8 @@
-use std::sync::Arc;
+use std::{cmp, sync::Arc};
use editor::{
char_kind,
- display_map::{DisplaySnapshot, ToDisplayPoint},
+ display_map::{DisplaySnapshot, FoldPoint, ToDisplayPoint},
movement, Bias, CharKind, DisplayPoint, ToOffset,
};
use gpui::{actions, impl_actions, AppContext, WindowContext};
@@ -21,16 +21,16 @@ use crate::{
pub enum Motion {
Left,
Backspace,
- Down,
- Up,
+ Down { display_lines: bool },
+ Up { display_lines: bool },
Right,
NextWordStart { ignore_punctuation: bool },
NextWordEnd { ignore_punctuation: bool },
PreviousWordStart { ignore_punctuation: bool },
- FirstNonWhitespace,
+ FirstNonWhitespace { display_lines: bool },
CurrentLine,
- StartOfLine,
- EndOfLine,
+ StartOfLine { display_lines: bool },
+ EndOfLine { display_lines: bool },
StartOfParagraph,
EndOfParagraph,
StartOfDocument,
@@ -62,6 +62,41 @@ struct PreviousWordStart {
ignore_punctuation: bool,
}
+#[derive(Clone, Deserialize, PartialEq)]
+#[serde(rename_all = "camelCase")]
+struct Up {
+ #[serde(default)]
+ display_lines: bool,
+}
+
+#[derive(Clone, Deserialize, PartialEq)]
+#[serde(rename_all = "camelCase")]
+struct Down {
+ #[serde(default)]
+ display_lines: bool,
+}
+
+#[derive(Clone, Deserialize, PartialEq)]
+#[serde(rename_all = "camelCase")]
+struct FirstNonWhitespace {
+ #[serde(default)]
+ display_lines: bool,
+}
+
+#[derive(Clone, Deserialize, PartialEq)]
+#[serde(rename_all = "camelCase")]
+struct EndOfLine {
+ #[serde(default)]
+ display_lines: bool,
+}
+
+#[derive(Clone, Deserialize, PartialEq)]
+#[serde(rename_all = "camelCase")]
+struct StartOfLine {
+ #[serde(default)]
+ display_lines: bool,
+}
+
#[derive(Clone, Deserialize, PartialEq)]
struct RepeatFind {
#[serde(default)]
@@ -73,12 +108,7 @@ actions!(
[
Left,
Backspace,
- Down,
- Up,
Right,
- FirstNonWhitespace,
- StartOfLine,
- EndOfLine,
CurrentLine,
StartOfParagraph,
EndOfParagraph,
@@ -90,20 +120,63 @@ actions!(
);
impl_actions!(
vim,
- [NextWordStart, NextWordEnd, PreviousWordStart, RepeatFind]
+ [
+ NextWordStart,
+ NextWordEnd,
+ PreviousWordStart,
+ RepeatFind,
+ Up,
+ Down,
+ FirstNonWhitespace,
+ EndOfLine,
+ StartOfLine,
+ ]
);
pub fn init(cx: &mut AppContext) {
cx.add_action(|_: &mut Workspace, _: &Left, cx: _| motion(Motion::Left, cx));
cx.add_action(|_: &mut Workspace, _: &Backspace, cx: _| motion(Motion::Backspace, cx));
- cx.add_action(|_: &mut Workspace, _: &Down, cx: _| motion(Motion::Down, cx));
- cx.add_action(|_: &mut Workspace, _: &Up, cx: _| motion(Motion::Up, cx));
+ cx.add_action(|_: &mut Workspace, action: &Down, cx: _| {
+ motion(
+ Motion::Down {
+ display_lines: action.display_lines,
+ },
+ cx,
+ )
+ });
+ cx.add_action(|_: &mut Workspace, action: &Up, cx: _| {
+ motion(
+ Motion::Up {
+ display_lines: action.display_lines,
+ },
+ cx,
+ )
+ });
cx.add_action(|_: &mut Workspace, _: &Right, cx: _| motion(Motion::Right, cx));
- cx.add_action(|_: &mut Workspace, _: &FirstNonWhitespace, cx: _| {
- motion(Motion::FirstNonWhitespace, cx)
+ cx.add_action(|_: &mut Workspace, action: &FirstNonWhitespace, cx: _| {
+ motion(
+ Motion::FirstNonWhitespace {
+ display_lines: action.display_lines,
+ },
+ cx,
+ )
+ });
+ cx.add_action(|_: &mut Workspace, action: &StartOfLine, cx: _| {
+ motion(
+ Motion::StartOfLine {
+ display_lines: action.display_lines,
+ },
+ cx,
+ )
+ });
+ cx.add_action(|_: &mut Workspace, action: &EndOfLine, cx: _| {
+ motion(
+ Motion::EndOfLine {
+ display_lines: action.display_lines,
+ },
+ cx,
+ )
});
- cx.add_action(|_: &mut Workspace, _: &StartOfLine, cx: _| motion(Motion::StartOfLine, cx));
- cx.add_action(|_: &mut Workspace, _: &EndOfLine, cx: _| motion(Motion::EndOfLine, cx));
cx.add_action(|_: &mut Workspace, _: &CurrentLine, cx: _| motion(Motion::CurrentLine, cx));
cx.add_action(|_: &mut Workspace, _: &StartOfParagraph, cx: _| {
motion(Motion::StartOfParagraph, cx)
@@ -192,19 +265,25 @@ impl Motion {
pub fn linewise(&self) -> bool {
use Motion::*;
match self {
- Down | Up | StartOfDocument | EndOfDocument | CurrentLine | NextLineStart
- | StartOfParagraph | EndOfParagraph => true,
- EndOfLine
+ Down { .. }
+ | Up { .. }
+ | StartOfDocument
+ | EndOfDocument
+ | CurrentLine
+ | NextLineStart
+ | StartOfParagraph
+ | EndOfParagraph => true,
+ EndOfLine { .. }
| NextWordEnd { .. }
| Matching
| FindForward { .. }
| Left
| Backspace
| Right
- | StartOfLine
+ | StartOfLine { .. }
| NextWordStart { .. }
| PreviousWordStart { .. }
- | FirstNonWhitespace
+ | FirstNonWhitespace { .. }
| FindBackward { .. } => false,
}
}
@@ -213,21 +292,21 @@ impl Motion {
use Motion::*;
match self {
StartOfDocument | EndOfDocument | CurrentLine => true,
- Down
- | Up
- | EndOfLine
+ Down { .. }
+ | Up { .. }
+ | EndOfLine { .. }
| NextWordEnd { .. }
| Matching
| FindForward { .. }
| Left
| Backspace
| Right
- | StartOfLine
+ | StartOfLine { .. }
| StartOfParagraph
| EndOfParagraph
| NextWordStart { .. }
| PreviousWordStart { .. }
- | FirstNonWhitespace
+ | FirstNonWhitespace { .. }
| FindBackward { .. }
| NextLineStart => false,
}
@@ -236,12 +315,12 @@ impl Motion {
pub fn inclusive(&self) -> bool {
use Motion::*;
match self {
- Down
- | Up
+ Down { .. }
+ | Up { .. }
| StartOfDocument
| EndOfDocument
| CurrentLine
- | EndOfLine
+ | EndOfLine { .. }
| NextWordEnd { .. }
| Matching
| FindForward { .. }
@@ -249,12 +328,12 @@ impl Motion {
Left
| Backspace
| Right
- | StartOfLine
+ | StartOfLine { .. }
| StartOfParagraph
| EndOfParagraph
| NextWordStart { .. }
| PreviousWordStart { .. }
- | FirstNonWhitespace
+ | FirstNonWhitespace { .. }
| FindBackward { .. } => false,
}
}
@@ -272,8 +351,18 @@ impl Motion {
let (new_point, goal) = match self {
Left => (left(map, point, times), SelectionGoal::None),
Backspace => (backspace(map, point, times), SelectionGoal::None),
- Down => down(map, point, goal, times),
- Up => up(map, point, goal, times),
+ Down {
+ display_lines: false,
+ } => down(map, point, goal, times),
+ Down {
+ display_lines: true,
+ } => down_display(map, point, goal, times),
+ Up {
+ display_lines: false,
+ } => up(map, point, goal, times),
+ Up {
+ display_lines: true,
+ } => up_display(map, point, goal, times),
Right => (right(map, point, times), SelectionGoal::None),
NextWordStart { ignore_punctuation } => (
next_word_start(map, point, *ignore_punctuation, times),
@@ -287,9 +376,17 @@ impl Motion {
previous_word_start(map, point, *ignore_punctuation, times),
SelectionGoal::None,
),
- FirstNonWhitespace => (first_non_whitespace(map, point), SelectionGoal::None),
- StartOfLine => (start_of_line(map, point), SelectionGoal::None),
- EndOfLine => (end_of_line(map, point), SelectionGoal::None),
+ FirstNonWhitespace { display_lines } => (
+ first_non_whitespace(map, *display_lines, point),
+ SelectionGoal::None,
+ ),
+ StartOfLine { display_lines } => (
+ start_of_line(map, *display_lines, point),
+ SelectionGoal::None,
+ ),
+ EndOfLine { display_lines } => {
+ (end_of_line(map, *display_lines, point), SelectionGoal::None)
+ }
StartOfParagraph => (
movement::start_of_paragraph(map, point, times),
SelectionGoal::None,
@@ -298,7 +395,7 @@ impl Motion {
map.clip_at_line_end(movement::end_of_paragraph(map, point, times)),
SelectionGoal::None,
),
- CurrentLine => (end_of_line(map, point), SelectionGoal::None),
+ CurrentLine => (end_of_line(map, false, point), SelectionGoal::None),
StartOfDocument => (start_of_document(map, point, times), SelectionGoal::None),
EndOfDocument => (
end_of_document(map, point, maybe_times),
@@ -399,6 +496,33 @@ fn backspace(map: &DisplaySnapshot, mut point: DisplayPoint, times: usize) -> Di
}
fn down(
+ map: &DisplaySnapshot,
+ point: DisplayPoint,
+ mut goal: SelectionGoal,
+ times: usize,
+) -> (DisplayPoint, SelectionGoal) {
+ let start = map.display_point_to_fold_point(point, Bias::Left);
+
+ let goal_column = match goal {
+ SelectionGoal::Column(column) => column,
+ SelectionGoal::ColumnRange { end, .. } => end,
+ _ => {
+ goal = SelectionGoal::Column(start.column());
+ start.column()
+ }
+ };
+
+ let new_row = cmp::min(
+ start.row() + times as u32,
+ map.buffer_snapshot.max_point().row,
+ );
+ let new_col = cmp::min(goal_column, map.fold_snapshot.line_len(new_row));
+ let point = map.fold_point_to_display_point(FoldPoint::new(new_row, new_col));
+
+ (map.clip_point(point, Bias::Left), goal)
+}
+
+fn down_display(
map: &DisplaySnapshot,
mut point: DisplayPoint,
mut goal: SelectionGoal,
@@ -407,10 +531,35 @@ fn down(
for _ in 0..times {
(point, goal) = movement::down(map, point, goal, true);
}
+
(point, goal)
}
-fn up(
+pub(crate) fn up(
+ map: &DisplaySnapshot,
+ point: DisplayPoint,
+ mut goal: SelectionGoal,
+ times: usize,
+) -> (DisplayPoint, SelectionGoal) {
+ let start = map.display_point_to_fold_point(point, Bias::Left);
+
+ let goal_column = match goal {
+ SelectionGoal::Column(column) => column,
+ SelectionGoal::ColumnRange { end, .. } => end,
+ _ => {
+ goal = SelectionGoal::Column(start.column());
+ start.column()
+ }
+ };
+
+ let new_row = start.row().saturating_sub(times as u32);
+ let new_col = cmp::min(goal_column, map.fold_snapshot.line_len(new_row));
+ let point = map.fold_point_to_display_point(FoldPoint::new(new_row, new_col));
+
+ (map.clip_point(point, Bias::Left), goal)
+}
+
+fn up_display(
map: &DisplaySnapshot,
mut point: DisplayPoint,
mut goal: SelectionGoal,
@@ -419,6 +568,7 @@ fn up(
for _ in 0..times {
(point, goal) = movement::up(map, point, goal, true);
}
+
(point, goal)
}
@@ -509,8 +659,12 @@ fn previous_word_start(
point
}
-fn first_non_whitespace(map: &DisplaySnapshot, from: DisplayPoint) -> DisplayPoint {
- let mut last_point = DisplayPoint::new(from.row(), 0);
+fn first_non_whitespace(
+ map: &DisplaySnapshot,
+ display_lines: bool,
+ from: DisplayPoint,
+) -> DisplayPoint {
+ let mut last_point = start_of_line(map, display_lines, from);
let language = map.buffer_snapshot.language_at(from.to_point(map));
for (ch, point) in map.chars_at(last_point) {
if ch == '\n' {
@@ -527,12 +681,31 @@ fn first_non_whitespace(map: &DisplaySnapshot, from: DisplayPoint) -> DisplayPoi
map.clip_point(last_point, Bias::Left)
}
-fn start_of_line(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint {
- map.prev_line_boundary(point.to_point(map)).1
+pub(crate) fn start_of_line(
+ map: &DisplaySnapshot,
+ display_lines: bool,
+ point: DisplayPoint,
+) -> DisplayPoint {
+ if display_lines {
+ map.clip_point(DisplayPoint::new(point.row(), 0), Bias::Right)
+ } else {
+ map.prev_line_boundary(point.to_point(map)).1
+ }
}
-fn end_of_line(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint {
- map.clip_point(map.next_line_boundary(point.to_point(map)).1, Bias::Left)
+pub(crate) fn end_of_line(
+ map: &DisplaySnapshot,
+ display_lines: bool,
+ point: DisplayPoint,
+) -> DisplayPoint {
+ if display_lines {
+ map.clip_point(
+ DisplayPoint::new(point.row(), map.line_len(point.row())),
+ Bias::Left,
+ )
+ } else {
+ map.clip_point(map.next_line_boundary(point.to_point(map)).1, Bias::Left)
+ }
}
fn start_of_document(map: &DisplaySnapshot, point: DisplayPoint, line: usize) -> DisplayPoint {
@@ -654,11 +827,8 @@ fn find_backward(
}
fn next_line_start(map: &DisplaySnapshot, point: DisplayPoint, times: usize) -> DisplayPoint {
- let new_row = (point.row() + times as u32).min(map.max_buffer_row());
- first_non_whitespace(
- map,
- map.clip_point(DisplayPoint::new(new_row, 0), Bias::Left),
- )
+ let correct_line = down(map, point, SelectionGoal::None, times).0;
+ first_non_whitespace(map, false, correct_line)
}
#[cfg(test)]
@@ -10,7 +10,7 @@ mod yank;
use std::sync::Arc;
use crate::{
- motion::Motion,
+ motion::{self, Motion},
object::Object,
state::{Mode, Operator},
Vim,
@@ -78,13 +78,27 @@ pub fn init(cx: &mut AppContext) {
cx.add_action(|_: &mut Workspace, _: &ChangeToEndOfLine, cx| {
Vim::update(cx, |vim, cx| {
let times = vim.pop_number_operator(cx);
- change_motion(vim, Motion::EndOfLine, times, cx);
+ change_motion(
+ vim,
+ Motion::EndOfLine {
+ display_lines: false,
+ },
+ times,
+ cx,
+ );
})
});
cx.add_action(|_: &mut Workspace, _: &DeleteToEndOfLine, cx| {
Vim::update(cx, |vim, cx| {
let times = vim.pop_number_operator(cx);
- delete_motion(vim, Motion::EndOfLine, times, cx);
+ delete_motion(
+ vim,
+ Motion::EndOfLine {
+ display_lines: false,
+ },
+ times,
+ cx,
+ );
})
});
scroll::init(cx);
@@ -165,7 +179,10 @@ fn insert_first_non_whitespace(
vim.update_active_editor(cx, |editor, cx| {
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.maybe_move_cursors_with(|map, cursor, goal| {
- Motion::FirstNonWhitespace.move_point(map, cursor, goal, None)
+ Motion::FirstNonWhitespace {
+ display_lines: false,
+ }
+ .move_point(map, cursor, goal, None)
});
});
});
@@ -178,7 +195,7 @@ fn insert_end_of_line(_: &mut Workspace, _: &InsertEndOfLine, cx: &mut ViewConte
vim.update_active_editor(cx, |editor, cx| {
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.maybe_move_cursors_with(|map, cursor, goal| {
- Motion::EndOfLine.move_point(map, cursor, goal, None)
+ Motion::CurrentLine.move_point(map, cursor, goal, None)
});
});
});
@@ -197,19 +214,19 @@ fn insert_line_above(_: &mut Workspace, _: &InsertLineAbove, cx: &mut ViewContex
.collect();
let edits = selection_start_rows.into_iter().map(|row| {
let (indent, _) = map.line_indent(row);
- let start_of_line = map
- .clip_point(DisplayPoint::new(row, 0), Bias::Left)
- .to_point(&map);
+ let start_of_line =
+ motion::start_of_line(&map, false, DisplayPoint::new(row, 0))
+ .to_point(&map);
let mut new_text = " ".repeat(indent as usize);
new_text.push('\n');
(start_of_line..start_of_line, new_text)
});
editor.edit_with_autoindent(edits, cx);
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
- s.move_cursors_with(|map, mut cursor, _| {
- *cursor.row_mut() -= 1;
- *cursor.column_mut() = map.line_len(cursor.row());
- (map.clip_point(cursor, Bias::Left), SelectionGoal::None)
+ s.move_cursors_with(|map, cursor, _| {
+ let previous_line = motion::up(map, cursor, SelectionGoal::None, 1).0;
+ let insert_point = motion::end_of_line(map, false, previous_line);
+ (insert_point, SelectionGoal::None)
});
});
});
@@ -223,22 +240,23 @@ fn insert_line_below(_: &mut Workspace, _: &InsertLineBelow, cx: &mut ViewContex
vim.update_active_editor(cx, |editor, cx| {
editor.transact(cx, |editor, cx| {
let (map, old_selections) = editor.selections.all_display(cx);
+
let selection_end_rows: HashSet<u32> = old_selections
.into_iter()
.map(|selection| selection.end.row())
.collect();
let edits = selection_end_rows.into_iter().map(|row| {
let (indent, _) = map.line_indent(row);
- let end_of_line = map
- .clip_point(DisplayPoint::new(row, map.line_len(row)), Bias::Left)
- .to_point(&map);
+ let end_of_line =
+ motion::end_of_line(&map, false, DisplayPoint::new(row, 0)).to_point(&map);
+
let mut new_text = "\n".to_string();
new_text.push_str(&" ".repeat(indent as usize));
(end_of_line..end_of_line, new_text)
});
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.maybe_move_cursors_with(|map, cursor, goal| {
- Motion::EndOfLine.move_point(map, cursor, goal, None)
+ Motion::CurrentLine.move_point(map, cursor, goal, None)
});
});
editor.edit_with_autoindent(edits, cx);
@@ -10,7 +10,11 @@ pub fn change_motion(vim: &mut Vim, motion: Motion, times: Option<usize>, cx: &m
// Some motions ignore failure when switching to normal mode
let mut motion_succeeded = matches!(
motion,
- Motion::Left | Motion::Right | Motion::EndOfLine | Motion::Backspace | Motion::StartOfLine
+ Motion::Left
+ | Motion::Right
+ | Motion::EndOfLine { .. }
+ | Motion::Backspace
+ | Motion::StartOfLine { .. }
);
vim.update_active_editor(cx, |editor, cx| {
editor.transact(cx, |editor, cx| {
@@ -33,7 +33,7 @@ fn paste(_: &mut Workspace, action: &Paste, cx: &mut ViewContext<Workspace>) {
editor.set_clip_at_line_ends(false, cx);
let Some(item) = cx.read_from_clipboard() else {
- return
+ return;
};
let clipboard_text = Cow::Borrowed(item.text());
if clipboard_text.is_empty() {
@@ -15,7 +15,10 @@ pub fn substitute(vim: &mut Vim, count: Option<usize>, cx: &mut WindowContext) {
}
if line_mode {
Motion::CurrentLine.expand_selection(map, selection, None, false);
- if let Some((point, _)) = Motion::FirstNonWhitespace.move_point(
+ if let Some((point, _)) = (Motion::FirstNonWhitespace {
+ display_lines: false,
+ })
+ .move_point(
map,
selection.start,
selection.goal,
@@ -1,7 +1,6 @@
mod neovim_backed_binding_test_context;
mod neovim_backed_test_context;
mod neovim_connection;
-mod vim_binding_test_context;
mod vim_test_context;
use std::sync::Arc;
@@ -10,7 +9,6 @@ use command_palette::CommandPalette;
use editor::DisplayPoint;
pub use neovim_backed_binding_test_context::*;
pub use neovim_backed_test_context::*;
-pub use vim_binding_test_context::*;
pub use vim_test_context::*;
use indoc::indoc;
@@ -287,3 +285,218 @@ async fn test_word_characters(cx: &mut gpui::TestAppContext) {
Mode::Visual,
)
}
+
+#[gpui::test]
+async fn test_wrapped_lines(cx: &mut gpui::TestAppContext) {
+ let mut cx = NeovimBackedTestContext::new(cx).await;
+
+ cx.set_shared_wrap(12).await;
+ // tests line wrap as follows:
+ // 1: twelve char
+ // twelve char
+ // 2: twelve char
+ cx.set_shared_state(indoc! { "
+ tˇwelve char twelve char
+ twelve char
+ "})
+ .await;
+ cx.simulate_shared_keystrokes(["j"]).await;
+ cx.assert_shared_state(indoc! { "
+ twelve char twelve char
+ tˇwelve char
+ "})
+ .await;
+ cx.simulate_shared_keystrokes(["k"]).await;
+ cx.assert_shared_state(indoc! { "
+ tˇwelve char twelve char
+ twelve char
+ "})
+ .await;
+ cx.simulate_shared_keystrokes(["g", "j"]).await;
+ cx.assert_shared_state(indoc! { "
+ twelve char tˇwelve char
+ twelve char
+ "})
+ .await;
+ cx.simulate_shared_keystrokes(["g", "j"]).await;
+ cx.assert_shared_state(indoc! { "
+ twelve char twelve char
+ tˇwelve char
+ "})
+ .await;
+
+ cx.simulate_shared_keystrokes(["g", "k"]).await;
+ cx.assert_shared_state(indoc! { "
+ twelve char tˇwelve char
+ twelve char
+ "})
+ .await;
+
+ cx.simulate_shared_keystrokes(["g", "^"]).await;
+ cx.assert_shared_state(indoc! { "
+ twelve char ˇtwelve char
+ twelve char
+ "})
+ .await;
+
+ cx.simulate_shared_keystrokes(["^"]).await;
+ cx.assert_shared_state(indoc! { "
+ ˇtwelve char twelve char
+ twelve char
+ "})
+ .await;
+
+ cx.simulate_shared_keystrokes(["g", "$"]).await;
+ cx.assert_shared_state(indoc! { "
+ twelve charˇ twelve char
+ twelve char
+ "})
+ .await;
+ cx.simulate_shared_keystrokes(["$"]).await;
+ cx.assert_shared_state(indoc! { "
+ twelve char twelve chaˇr
+ twelve char
+ "})
+ .await;
+
+ cx.set_shared_state(indoc! { "
+ tˇwelve char twelve char
+ twelve char
+ "})
+ .await;
+ cx.simulate_shared_keystrokes(["enter"]).await;
+ cx.assert_shared_state(indoc! { "
+ twelve char twelve char
+ ˇtwelve char
+ "})
+ .await;
+
+ cx.set_shared_state(indoc! { "
+ twelve char
+ tˇwelve char twelve char
+ twelve char
+ "})
+ .await;
+ cx.simulate_shared_keystrokes(["o", "o", "escape"]).await;
+ cx.assert_shared_state(indoc! { "
+ twelve char
+ twelve char twelve char
+ ˇo
+ twelve char
+ "})
+ .await;
+
+ cx.set_shared_state(indoc! { "
+ twelve char
+ tˇwelve char twelve char
+ twelve char
+ "})
+ .await;
+ cx.simulate_shared_keystrokes(["shift-a", "a", "escape"])
+ .await;
+ cx.assert_shared_state(indoc! { "
+ twelve char
+ twelve char twelve charˇa
+ twelve char
+ "})
+ .await;
+ cx.simulate_shared_keystrokes(["shift-i", "i", "escape"])
+ .await;
+ cx.assert_shared_state(indoc! { "
+ twelve char
+ ˇitwelve char twelve chara
+ twelve char
+ "})
+ .await;
+ cx.simulate_shared_keystrokes(["shift-d"]).await;
+ cx.assert_shared_state(indoc! { "
+ twelve char
+ ˇ
+ twelve char
+ "})
+ .await;
+
+ cx.set_shared_state(indoc! { "
+ twelve char
+ twelve char tˇwelve char
+ twelve char
+ "})
+ .await;
+ cx.simulate_shared_keystrokes(["shift-o", "o", "escape"])
+ .await;
+ cx.assert_shared_state(indoc! { "
+ twelve char
+ ˇo
+ twelve char twelve char
+ twelve char
+ "})
+ .await;
+}
+
+#[gpui::test]
+async fn test_folds(cx: &mut gpui::TestAppContext) {
+ let mut cx = NeovimBackedTestContext::new(cx).await;
+ cx.set_neovim_option("foldmethod=manual").await;
+
+ cx.set_shared_state(indoc! { "
+ fn boop() {
+ ˇbarp()
+ bazp()
+ }
+ "})
+ .await;
+ cx.simulate_shared_keystrokes(["shift-v", "j", "z", "f"])
+ .await;
+
+ // visual display is now:
+ // fn boop () {
+ // [FOLDED]
+ // }
+
+ // TODO: this should not be needed but currently zf does not
+ // return to normal mode.
+ cx.simulate_shared_keystrokes(["escape"]).await;
+
+ // skip over fold downward
+ cx.simulate_shared_keystrokes(["g", "g"]).await;
+ cx.assert_shared_state(indoc! { "
+ ˇfn boop() {
+ barp()
+ bazp()
+ }
+ "})
+ .await;
+
+ cx.simulate_shared_keystrokes(["j", "j"]).await;
+ cx.assert_shared_state(indoc! { "
+ fn boop() {
+ barp()
+ bazp()
+ ˇ}
+ "})
+ .await;
+
+ // skip over fold upward
+ cx.simulate_shared_keystrokes(["2", "k"]).await;
+ cx.assert_shared_state(indoc! { "
+ ˇfn boop() {
+ barp()
+ bazp()
+ }
+ "})
+ .await;
+
+ // yank the fold
+ cx.simulate_shared_keystrokes(["down", "y", "y"]).await;
+ cx.assert_shared_clipboard(" barp()\n bazp()\n").await;
+
+ // re-open
+ cx.simulate_shared_keystrokes(["z", "o"]).await;
+ cx.assert_shared_state(indoc! { "
+ fn boop() {
+ ˇ barp()
+ bazp()
+ }
+ "})
+ .await;
+}
@@ -1,9 +1,13 @@
use indoc::indoc;
+use settings::SettingsStore;
use std::ops::{Deref, DerefMut, Range};
use collections::{HashMap, HashSet};
use gpui::ContextHandle;
-use language::OffsetRangeExt;
+use language::{
+ language_settings::{AllLanguageSettings, SoftWrap},
+ OffsetRangeExt,
+};
use util::test::{generate_marked_text, marked_text_offsets};
use super::{neovim_connection::NeovimConnection, NeovimBackedBindingTestContext, VimTestContext};
@@ -127,6 +131,27 @@ impl<'a> NeovimBackedTestContext<'a> {
context_handle
}
+ pub async fn set_shared_wrap(&mut self, columns: u32) {
+ if columns < 12 {
+ panic!("nvim doesn't support columns < 12")
+ }
+ self.neovim.set_option("wrap").await;
+ self.neovim.set_option("columns=12").await;
+
+ self.update(|cx| {
+ cx.update_global(|settings: &mut SettingsStore, cx| {
+ settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
+ settings.defaults.soft_wrap = Some(SoftWrap::PreferredLineLength);
+ settings.defaults.preferred_line_length = Some(columns);
+ });
+ })
+ })
+ }
+
+ pub async fn set_neovim_option(&mut self, option: &str) {
+ self.neovim.set_option(option).await;
+ }
+
pub async fn assert_shared_state(&mut self, marked_text: &str) {
let neovim = self.neovim_state().await;
let editor = self.editor_state();
@@ -41,6 +41,7 @@ pub enum NeovimData {
Key(String),
Get { state: String, mode: Option<Mode> },
ReadRegister { name: char, value: String },
+ SetOption { value: String },
}
pub struct NeovimConnection {
@@ -222,6 +223,29 @@ impl NeovimConnection {
);
}
+ #[cfg(feature = "neovim")]
+ pub async fn set_option(&mut self, value: &str) {
+ self.nvim
+ .command_output(format!("set {}", value).as_str())
+ .await
+ .unwrap();
+
+ self.data.push_back(NeovimData::SetOption {
+ value: value.to_string(),
+ })
+ }
+
+ #[cfg(not(feature = "neovim"))]
+ pub async fn set_option(&mut self, value: &str) {
+ assert_eq!(
+ self.data.pop_front(),
+ Some(NeovimData::SetOption {
+ value: value.to_string(),
+ }),
+ "operation does not match recorded script. re-record with --features=neovim"
+ );
+ }
+
#[cfg(not(feature = "neovim"))]
pub async fn read_register(&mut self, register: char) -> String {
if let Some(NeovimData::Get { .. }) = self.data.front() {
@@ -1,64 +0,0 @@
-use std::ops::{Deref, DerefMut};
-
-use crate::*;
-
-use super::VimTestContext;
-
-pub struct VimBindingTestContext<'a, const COUNT: usize> {
- cx: VimTestContext<'a>,
- keystrokes_under_test: [&'static str; COUNT],
- mode_before: Mode,
- mode_after: Mode,
-}
-
-impl<'a, const COUNT: usize> VimBindingTestContext<'a, COUNT> {
- pub fn new(
- keystrokes_under_test: [&'static str; COUNT],
- mode_before: Mode,
- mode_after: Mode,
- cx: VimTestContext<'a>,
- ) -> Self {
- Self {
- cx,
- keystrokes_under_test,
- mode_before,
- mode_after,
- }
- }
-
- pub fn binding<const NEW_COUNT: usize>(
- self,
- keystrokes_under_test: [&'static str; NEW_COUNT],
- ) -> VimBindingTestContext<'a, NEW_COUNT> {
- VimBindingTestContext {
- keystrokes_under_test,
- cx: self.cx,
- mode_before: self.mode_before,
- mode_after: self.mode_after,
- }
- }
-
- pub fn assert(&mut self, initial_state: &str, state_after: &str) {
- self.cx.assert_binding(
- self.keystrokes_under_test,
- initial_state,
- self.mode_before,
- state_after,
- self.mode_after,
- )
- }
-}
-
-impl<'a, const COUNT: usize> Deref for VimBindingTestContext<'a, COUNT> {
- type Target = VimTestContext<'a>;
-
- fn deref(&self) -> &Self::Target {
- &self.cx
- }
-}
-
-impl<'a, const COUNT: usize> DerefMut for VimBindingTestContext<'a, COUNT> {
- fn deref_mut(&mut self) -> &mut Self::Target {
- &mut self.cx
- }
-}
@@ -8,8 +8,6 @@ use search::{BufferSearchBar, ProjectSearchBar};
use crate::{state::Operator, *};
-use super::VimBindingTestContext;
-
pub struct VimTestContext<'a> {
cx: EditorLspTestContext<'a>,
}
@@ -126,14 +124,6 @@ impl<'a> VimTestContext<'a> {
assert_eq!(self.mode(), mode_after, "{}", self.assertion_context());
assert_eq!(self.active_operator(), None, "{}", self.assertion_context());
}
-
- pub fn binding<const COUNT: usize>(
- mut self,
- keystrokes: [&'static str; COUNT],
- ) -> VimBindingTestContext<'a, COUNT> {
- let mode = self.mode();
- VimBindingTestContext::new(keystrokes, mode, mode, self)
- }
}
impl<'a> Deref for VimTestContext<'a> {
@@ -1,5 +1,6 @@
use editor::{ClipboardSelection, Editor};
use gpui::{AppContext, ClipboardItem};
+use language::Point;
pub fn copy_selections_content(editor: &mut Editor, linewise: bool, cx: &mut AppContext) {
let selections = editor.selections.all_adjusted(cx);
@@ -9,7 +10,7 @@ pub fn copy_selections_content(editor: &mut Editor, linewise: bool, cx: &mut App
{
let mut is_first = true;
for selection in selections.iter() {
- let start = selection.start;
+ let mut start = selection.start;
let end = selection.end;
if is_first {
is_first = false;
@@ -17,9 +18,25 @@ pub fn copy_selections_content(editor: &mut Editor, linewise: bool, cx: &mut App
text.push_str("\n");
}
let initial_len = text.len();
+
+ // if the file does not end with \n, and our line-mode selection ends on
+ // that line, we will have expanded the start of the selection to ensure it
+ // contains a newline (so that delete works as expected). We undo that change
+ // here.
+ let is_last_line = linewise
+ && end.row == buffer.max_buffer_row()
+ && buffer.max_point().column > 0
+ && start == Point::new(start.row, buffer.line_len(start.row));
+
+ if is_last_line {
+ start = Point::new(buffer.max_buffer_row(), 0);
+ }
for chunk in buffer.text_for_range(start..end) {
text.push_str(chunk);
}
+ if is_last_line {
+ text.push_str("\n");
+ }
clipboard_selections.push(ClipboardSelection {
len: text.len() - initial_len,
is_entire_line: linewise,
@@ -51,8 +51,15 @@ pub fn init(cx: &mut AppContext) {
pub fn visual_motion(motion: Motion, times: Option<usize>, cx: &mut WindowContext) {
Vim::update(cx, |vim, cx| {
vim.update_active_editor(cx, |editor, cx| {
- if vim.state().mode == Mode::VisualBlock && !matches!(motion, Motion::EndOfLine) {
- let is_up_or_down = matches!(motion, Motion::Up | Motion::Down);
+ if vim.state().mode == Mode::VisualBlock
+ && !matches!(
+ motion,
+ Motion::EndOfLine {
+ display_lines: false
+ }
+ )
+ {
+ let is_up_or_down = matches!(motion, Motion::Up { .. } | Motion::Down { .. });
visual_block_motion(is_up_or_down, editor, cx, |map, point, goal| {
motion.move_point(map, point, goal, times)
})
@@ -77,7 +84,10 @@ pub fn visual_motion(motion: Motion, times: Option<usize>, cx: &mut WindowContex
}
let Some((new_head, goal)) =
- motion.move_point(map, current_head, selection.goal, times) else { return };
+ motion.move_point(map, current_head, selection.goal, times)
+ else {
+ return;
+ };
selection.set_head(new_head, goal);
@@ -123,16 +133,21 @@ pub fn visual_block_motion(
let map = &s.display_map();
let mut head = s.newest_anchor().head().to_display_point(map);
let mut tail = s.oldest_anchor().tail().to_display_point(map);
- let mut goal = s.newest_anchor().goal;
- let was_reversed = tail.column() > head.column();
+ let (start, end) = match s.newest_anchor().goal {
+ SelectionGoal::ColumnRange { start, end } if preserve_goal => (start, end),
+ SelectionGoal::Column(start) if preserve_goal => (start, start + 1),
+ _ => (tail.column(), head.column()),
+ };
+ let goal = SelectionGoal::ColumnRange { start, end };
+ let was_reversed = tail.column() > head.column();
if !was_reversed && !preserve_goal {
head = movement::saturating_left(map, head);
}
let Some((new_head, _)) = move_selection(&map, head, goal) else {
- return
+ return;
};
head = new_head;
@@ -146,13 +161,6 @@ pub fn visual_block_motion(
head = movement::saturating_right(map, head)
}
- let (start, end) = match goal {
- SelectionGoal::ColumnRange { start, end } if preserve_goal => (start, end),
- SelectionGoal::Column(start) if preserve_goal => (start, start + 1),
- _ => (tail.column(), head.column()),
- };
- goal = SelectionGoal::ColumnRange { start, end };
-
let columns = if is_reversed {
head.column()..tail.column()
} else if head.column() == tail.column() {
@@ -391,7 +399,7 @@ mod test {
the lazy dog"
})
.await;
- let cursor = cx.update_editor(|editor, _| editor.pixel_position_of_cursor());
+ let cursor = cx.update_editor(|editor, cx| editor.pixel_position_of_cursor(cx));
// entering visual mode should select the character
// under cursor
@@ -400,7 +408,7 @@ mod test {
fox jumps over
the lazy dog"})
.await;
- cx.update_editor(|editor, _| assert_eq!(cursor, editor.pixel_position_of_cursor()));
+ cx.update_editor(|editor, cx| assert_eq!(cursor, editor.pixel_position_of_cursor(cx)));
// forwards motions should extend the selection
cx.simulate_shared_keystrokes(["w", "j"]).await;
@@ -430,7 +438,7 @@ mod test {
b
"})
.await;
- let cursor = cx.update_editor(|editor, _| editor.pixel_position_of_cursor());
+ let cursor = cx.update_editor(|editor, cx| editor.pixel_position_of_cursor(cx));
cx.simulate_shared_keystrokes(["v"]).await;
cx.assert_shared_state(indoc! {"
a
@@ -438,7 +446,7 @@ mod test {
ˇ»b
"})
.await;
- cx.update_editor(|editor, _| assert_eq!(cursor, editor.pixel_position_of_cursor()));
+ cx.update_editor(|editor, cx| assert_eq!(cursor, editor.pixel_position_of_cursor(cx)));
// toggles off again
cx.simulate_shared_keystrokes(["v"]).await;
@@ -510,7 +518,7 @@ mod test {
b
ˇ"})
.await;
- let cursor = cx.update_editor(|editor, _| editor.pixel_position_of_cursor());
+ let cursor = cx.update_editor(|editor, cx| editor.pixel_position_of_cursor(cx));
cx.simulate_shared_keystrokes(["shift-v"]).await;
cx.assert_shared_state(indoc! {"
a
@@ -518,7 +526,7 @@ mod test {
ˇ"})
.await;
assert_eq!(cx.mode(), cx.neovim_mode().await);
- cx.update_editor(|editor, _| assert_eq!(cursor, editor.pixel_position_of_cursor()));
+ cx.update_editor(|editor, cx| assert_eq!(cursor, editor.pixel_position_of_cursor(cx)));
cx.simulate_shared_keystrokes(["x"]).await;
cx.assert_shared_state(indoc! {"
a
@@ -563,38 +571,41 @@ mod test {
#[gpui::test]
async fn test_visual_line_delete(cx: &mut gpui::TestAppContext) {
- let mut cx = NeovimBackedTestContext::new(cx)
- .await
- .binding(["shift-v", "x"]);
- cx.assert(indoc! {"
+ let mut cx = NeovimBackedTestContext::new(cx).await;
+
+ cx.set_shared_state(indoc! {"
The quˇick brown
fox jumps over
the lazy dog"})
.await;
- // Test pasting code copied on delete
- cx.simulate_shared_keystroke("p").await;
+ cx.simulate_shared_keystrokes(["shift-v", "x"]).await;
cx.assert_state_matches().await;
- cx.assert_all(indoc! {"
- The quick brown
- fox juˇmps over
- the laˇzy dog"})
- .await;
- let mut cx = cx.binding(["shift-v", "j", "x"]);
- cx.assert(indoc! {"
- The quˇick brown
- fox jumps over
- the lazy dog"})
- .await;
// Test pasting code copied on delete
cx.simulate_shared_keystroke("p").await;
cx.assert_state_matches().await;
- cx.assert_all(indoc! {"
+ cx.set_shared_state(indoc! {"
The quick brown
- fox juˇmps over
+ fox jumps over
the laˇzy dog"})
.await;
+ cx.simulate_shared_keystrokes(["shift-v", "x"]).await;
+ cx.assert_state_matches().await;
+ cx.assert_shared_clipboard("the lazy dog\n").await;
+
+ for marked_text in cx.each_marked_position(indoc! {"
+ The quˇick brown
+ fox jumps over
+ the lazy dog"})
+ {
+ cx.set_shared_state(&marked_text).await;
+ cx.simulate_shared_keystrokes(["shift-v", "j", "x"]).await;
+ cx.assert_state_matches().await;
+ // Test pasting code copied on delete
+ cx.simulate_shared_keystroke("p").await;
+ cx.assert_state_matches().await;
+ }
cx.set_shared_state(indoc! {"
The ˇlong line
@@ -608,86 +619,57 @@ mod test {
#[gpui::test]
async fn test_visual_yank(cx: &mut gpui::TestAppContext) {
- let cx = VimTestContext::new(cx, true).await;
- let mut cx = cx.binding(["v", "w", "y"]);
- cx.assert("The quick ˇbrown", "The quick ˇbrown");
- cx.assert_clipboard_content(Some("brown"));
- let mut cx = cx.binding(["v", "w", "j", "y"]);
- cx.assert(
- indoc! {"
- The ˇquick brown
- fox jumps over
- the lazy dog"},
- indoc! {"
+ let mut cx = NeovimBackedTestContext::new(cx).await;
+
+ cx.set_shared_state("The quick ˇbrown").await;
+ cx.simulate_shared_keystrokes(["v", "w", "y"]).await;
+ cx.assert_shared_state("The quick ˇbrown").await;
+ cx.assert_shared_clipboard("brown").await;
+
+ cx.set_shared_state(indoc! {"
The ˇquick brown
fox jumps over
- the lazy dog"},
- );
- cx.assert_clipboard_content(Some(indoc! {"
- quick brown
- fox jumps o"}));
- cx.assert(
- indoc! {"
- The quick brown
- fox jumps over
- the ˇlazy dog"},
- indoc! {"
- The quick brown
- fox jumps over
- the ˇlazy dog"},
- );
- cx.assert_clipboard_content(Some("lazy d"));
- cx.assert(
- indoc! {"
- The quick brown
- fox jumps ˇover
- the lazy dog"},
- indoc! {"
- The quick brown
- fox jumps ˇover
- the lazy dog"},
- );
- cx.assert_clipboard_content(Some(indoc! {"
- over
- t"}));
+ the lazy dog"})
+ .await;
+ cx.simulate_shared_keystrokes(["v", "w", "j", "y"]).await;
+ cx.assert_shared_state(indoc! {"
+ The ˇquick brown
+ fox jumps over
+ the lazy dog"})
+ .await;
+ cx.assert_shared_clipboard(indoc! {"
+ quick brown
+ fox jumps o"})
+ .await;
+
+ cx.set_shared_state(indoc! {"
+ The quick brown
+ fox jumps over
+ the ˇlazy dog"})
+ .await;
+ cx.simulate_shared_keystrokes(["v", "w", "j", "y"]).await;
+ cx.assert_shared_state(indoc! {"
+ The quick brown
+ fox jumps over
+ the ˇlazy dog"})
+ .await;
+ cx.assert_shared_clipboard("lazy d").await;
+ cx.simulate_shared_keystrokes(["shift-v", "y"]).await;
+ cx.assert_shared_clipboard("the lazy dog\n").await;
+
let mut cx = cx.binding(["v", "b", "k", "y"]);
- cx.assert(
- indoc! {"
- The ˇquick brown
- fox jumps over
- the lazy dog"},
- indoc! {"
- ˇThe quick brown
- fox jumps over
- the lazy dog"},
- );
+ cx.set_shared_state(indoc! {"
+ The ˇquick brown
+ fox jumps over
+ the lazy dog"})
+ .await;
+ cx.simulate_shared_keystrokes(["v", "b", "k", "y"]).await;
+ cx.assert_shared_state(indoc! {"
+ ˇThe quick brown
+ fox jumps over
+ the lazy dog"})
+ .await;
cx.assert_clipboard_content(Some("The q"));
- cx.assert(
- indoc! {"
- The quick brown
- fox jumps over
- the ˇlazy dog"},
- indoc! {"
- The quick brown
- ˇfox jumps over
- the lazy dog"},
- );
- cx.assert_clipboard_content(Some(indoc! {"
- fox jumps over
- the l"}));
- cx.assert(
- indoc! {"
- The quick brown
- fox jumps ˇover
- the lazy dog"},
- indoc! {"
- The ˇquick brown
- fox jumps over
- the lazy dog"},
- );
- cx.assert_clipboard_content(Some(indoc! {"
- quick brown
- fox jumps o"}));
}
#[gpui::test]
@@ -814,6 +796,26 @@ mod test {
"
})
.await;
+
+ //https://github.com/zed-industries/community/issues/1950
+ cx.set_shared_state(indoc! {
+ "Theˇ quick brown
+
+ fox jumps over
+ the lazy dog
+ "
+ })
+ .await;
+ cx.simulate_shared_keystrokes(["l", "ctrl-v", "j", "j"])
+ .await;
+ cx.assert_shared_state(indoc! {
+ "The «qˇ»uick brown
+
+ fox «jˇ»umps over
+ the lazy dog
+ "
+ })
+ .await;
}
#[gpui::test]
@@ -0,0 +1,23 @@
+{"SetOption":{"value":"foldmethod=manual"}}
+{"Put":{"state":"fn boop() {\n ˇbarp()\n bazp()\n}\n"}}
+{"Key":"shift-v"}
+{"Key":"j"}
+{"Key":"z"}
+{"Key":"f"}
+{"Key":"escape"}
+{"Key":"g"}
+{"Key":"g"}
+{"Get":{"state":"ˇfn boop() {\n barp()\n bazp()\n}\n","mode":"Normal"}}
+{"Key":"j"}
+{"Key":"j"}
+{"Get":{"state":"fn boop() {\n barp()\n bazp()\nˇ}\n","mode":"Normal"}}
+{"Key":"2"}
+{"Key":"k"}
+{"Get":{"state":"ˇfn boop() {\n barp()\n bazp()\n}\n","mode":"Normal"}}
+{"Key":"down"}
+{"Key":"y"}
+{"Key":"y"}
+{"ReadRegister":{"name":"\"","value":" barp()\n bazp()\n"}}
+{"Key":"z"}
+{"Key":"o"}
+{"Get":{"state":"fn boop() {\nˇ barp()\n bazp()\n}\n","mode":"Normal"}}
@@ -30,3 +30,9 @@
{"Key":"o"}
{"Key":"escape"}
{"Get":{"state":"Theˇouick\nbroo\nfoxo\njumo over the\n\nlazy dog\n","mode":"Normal"}}
+{"Put":{"state":"Theˇ quick brown\n\nfox jumps over\nthe lazy dog\n"}}
+{"Key":"l"}
+{"Key":"ctrl-v"}
+{"Key":"j"}
+{"Key":"j"}
+{"Get":{"state":"The «qˇ»uick brown\n\nfox «jˇ»umps over\nthe lazy dog\n","mode":"VisualBlock"}}
@@ -4,14 +4,11 @@
{"Get":{"state":"fox juˇmps over\nthe lazy dog","mode":"Normal"}}
{"Key":"p"}
{"Get":{"state":"fox jumps over\nˇThe quick brown\nthe lazy dog","mode":"Normal"}}
-{"Put":{"state":"The quick brown\nfox juˇmps over\nthe lazy dog"}}
-{"Key":"shift-v"}
-{"Key":"x"}
-{"Get":{"state":"The quick brown\nthe laˇzy dog","mode":"Normal"}}
{"Put":{"state":"The quick brown\nfox jumps over\nthe laˇzy dog"}}
{"Key":"shift-v"}
{"Key":"x"}
{"Get":{"state":"The quick brown\nfox juˇmps over","mode":"Normal"}}
+{"ReadRegister":{"name":"\"","value":"the lazy dog\n"}}
{"Put":{"state":"The quˇick brown\nfox jumps over\nthe lazy dog"}}
{"Key":"shift-v"}
{"Key":"j"}
@@ -19,16 +16,6 @@
{"Get":{"state":"the laˇzy dog","mode":"Normal"}}
{"Key":"p"}
{"Get":{"state":"the lazy dog\nˇThe quick brown\nfox jumps over","mode":"Normal"}}
-{"Put":{"state":"The quick brown\nfox juˇmps over\nthe lazy dog"}}
-{"Key":"shift-v"}
-{"Key":"j"}
-{"Key":"x"}
-{"Get":{"state":"The quˇick brown","mode":"Normal"}}
-{"Put":{"state":"The quick brown\nfox jumps over\nthe laˇzy dog"}}
-{"Key":"shift-v"}
-{"Key":"j"}
-{"Key":"x"}
-{"Get":{"state":"The quick brown\nfox juˇmps over","mode":"Normal"}}
{"Put":{"state":"The ˇlong line\nshould not\ncrash\n"}}
{"Key":"shift-v"}
{"Key":"$"}
@@ -0,0 +1,29 @@
+{"Put":{"state":"The quick ˇbrown"}}
+{"Key":"v"}
+{"Key":"w"}
+{"Key":"y"}
+{"Get":{"state":"The quick ˇbrown","mode":"Normal"}}
+{"ReadRegister":{"name":"\"","value":"brown"}}
+{"Put":{"state":"The ˇquick brown\nfox jumps over\nthe lazy dog"}}
+{"Key":"v"}
+{"Key":"w"}
+{"Key":"j"}
+{"Key":"y"}
+{"Get":{"state":"The ˇquick brown\nfox jumps over\nthe lazy dog","mode":"Normal"}}
+{"ReadRegister":{"name":"\"","value":"quick brown\nfox jumps o"}}
+{"Put":{"state":"The quick brown\nfox jumps over\nthe ˇlazy dog"}}
+{"Key":"v"}
+{"Key":"w"}
+{"Key":"j"}
+{"Key":"y"}
+{"Get":{"state":"The quick brown\nfox jumps over\nthe ˇlazy dog","mode":"Normal"}}
+{"ReadRegister":{"name":"\"","value":"lazy d"}}
+{"Key":"shift-v"}
+{"Key":"y"}
+{"ReadRegister":{"name":"\"","value":"the lazy dog\n"}}
+{"Put":{"state":"The ˇquick brown\nfox jumps over\nthe lazy dog"}}
+{"Key":"v"}
+{"Key":"b"}
+{"Key":"k"}
+{"Key":"y"}
+{"Get":{"state":"ˇThe quick brown\nfox jumps over\nthe lazy dog","mode":"Normal"}}
@@ -0,0 +1,50 @@
+{"SetOption":{"value":"wrap"}}
+{"SetOption":{"value":"columns=12"}}
+{"Put":{"state":"tˇwelve char twelve char\ntwelve char\n"}}
+{"Key":"j"}
+{"Get":{"state":"twelve char twelve char\ntˇwelve char\n","mode":"Normal"}}
+{"Key":"k"}
+{"Get":{"state":"tˇwelve char twelve char\ntwelve char\n","mode":"Normal"}}
+{"Key":"g"}
+{"Key":"j"}
+{"Get":{"state":"twelve char tˇwelve char\ntwelve char\n","mode":"Normal"}}
+{"Key":"g"}
+{"Key":"j"}
+{"Get":{"state":"twelve char twelve char\ntˇwelve char\n","mode":"Normal"}}
+{"Key":"g"}
+{"Key":"k"}
+{"Get":{"state":"twelve char tˇwelve char\ntwelve char\n","mode":"Normal"}}
+{"Key":"g"}
+{"Key":"^"}
+{"Get":{"state":"twelve char ˇtwelve char\ntwelve char\n","mode":"Normal"}}
+{"Key":"^"}
+{"Get":{"state":"ˇtwelve char twelve char\ntwelve char\n","mode":"Normal"}}
+{"Key":"g"}
+{"Key":"$"}
+{"Get":{"state":"twelve charˇ twelve char\ntwelve char\n","mode":"Normal"}}
+{"Key":"$"}
+{"Get":{"state":"twelve char twelve chaˇr\ntwelve char\n","mode":"Normal"}}
+{"Put":{"state":"tˇwelve char twelve char\ntwelve char\n"}}
+{"Key":"enter"}
+{"Get":{"state":"twelve char twelve char\nˇtwelve char\n","mode":"Normal"}}
+{"Put":{"state":"twelve char\ntˇwelve char twelve char\ntwelve char\n"}}
+{"Key":"o"}
+{"Key":"o"}
+{"Key":"escape"}
+{"Get":{"state":"twelve char\ntwelve char twelve char\nˇo\ntwelve char\n","mode":"Normal"}}
+{"Put":{"state":"twelve char\ntˇwelve char twelve char\ntwelve char\n"}}
+{"Key":"shift-a"}
+{"Key":"a"}
+{"Key":"escape"}
+{"Get":{"state":"twelve char\ntwelve char twelve charˇa\ntwelve char\n","mode":"Normal"}}
+{"Key":"shift-i"}
+{"Key":"i"}
+{"Key":"escape"}
+{"Get":{"state":"twelve char\nˇitwelve char twelve chara\ntwelve char\n","mode":"Normal"}}
+{"Key":"shift-d"}
+{"Get":{"state":"twelve char\nˇ\ntwelve char\n","mode":"Normal"}}
+{"Put":{"state":"twelve char\ntwelve char tˇwelve char\ntwelve char\n"}}
+{"Key":"shift-o"}
+{"Key":"o"}
+{"Key":"escape"}
+{"Get":{"state":"twelve char\nˇo\ntwelve char twelve char\ntwelve char\n","mode":"Normal"}}
@@ -22,6 +22,7 @@ test-support = [
db = { path = "../db" }
call = { path = "../call" }
client = { path = "../client" }
+channel = { path = "../channel" }
collections = { path = "../collections" }
context_menu = { path = "../context_menu" }
drag_and_drop = { path = "../drag_and_drop" }
@@ -158,9 +158,7 @@ pub trait Item: View {
fn should_update_tab_on_event(_: &Self::Event) -> bool {
false
}
- fn is_edit_event(_: &Self::Event) -> bool {
- false
- }
+
fn act_as_type<'a>(
&'a self,
type_id: TypeId,
@@ -205,7 +203,7 @@ pub trait Item: View {
fn show_toolbar(&self) -> bool {
true
}
- fn pixel_position_of_cursor(&self) -> Option<Vector2F> {
+ fn pixel_position_of_cursor(&self, _: &AppContext) -> Option<Vector2F> {
None
}
}
@@ -623,7 +621,7 @@ impl<T: Item> ItemHandle for ViewHandle<T> {
}
fn pixel_position_of_cursor(&self, cx: &AppContext) -> Option<Vector2F> {
- self.read(cx).pixel_position_of_cursor()
+ self.read(cx).pixel_position_of_cursor(cx)
}
}
@@ -674,7 +672,7 @@ pub trait FollowableItem: Item {
fn to_state_proto(&self, cx: &AppContext) -> Option<proto::view::Variant>;
fn from_state_proto(
pane: ViewHandle<Pane>,
- project: ModelHandle<Project>,
+ project: ViewHandle<Workspace>,
id: ViewId,
state: &mut Option<proto::view::Variant>,
cx: &mut AppContext,
@@ -42,7 +42,11 @@ where
let mut handler = MouseEventHandler::above::<Tag, _>(region_id, cx, |state, cx| {
// Observing hovered will cause a render when the mouse enters regardless
// of if mouse position was accessed before
- let drag_position = if state.hovered() { drag_position } else { None };
+ let drag_position = if state.dragging() {
+ drag_position
+ } else {
+ None
+ };
Stack::new()
.with_child(render_child(state, cx))
.with_children(drag_position.map(|drag_position| {
@@ -742,8 +742,8 @@ mod element {
while proposed_current_pixel_change.abs() > 0. {
let Some(current_ix) = successors.next() else {
- break;
- };
+ break;
+ };
let next_target_size = f32::max(
size(current_ix + 1, flexes.as_slice()) - proposed_current_pixel_change,
@@ -81,10 +81,7 @@ impl View for Toolbar {
ToolbarItemLocation::PrimaryLeft { flex } => {
primary_items_row_count = primary_items_row_count.max(item.row_count(cx));
- let left_item = ChildView::new(item.as_any(), cx)
- .aligned()
- .contained()
- .with_margin_right(spacing);
+ let left_item = ChildView::new(item.as_any(), cx).aligned();
if let Some((flex, expanded)) = flex {
primary_left_items.push(left_item.flex(flex, expanded).into_any());
} else {
@@ -94,11 +91,7 @@ impl View for Toolbar {
ToolbarItemLocation::PrimaryRight { flex } => {
primary_items_row_count = primary_items_row_count.max(item.row_count(cx));
- let right_item = ChildView::new(item.as_any(), cx)
- .aligned()
- .contained()
- .with_margin_left(spacing)
- .flex_float();
+ let right_item = ChildView::new(item.as_any(), cx).aligned().flex_float();
if let Some((flex, expanded)) = flex {
primary_right_items.push(right_item.flex(flex, expanded).into_any());
} else {
@@ -120,7 +113,7 @@ impl View for Toolbar {
let container_style = theme.container;
let height = theme.height * primary_items_row_count as f32;
- let mut primary_items = Flex::row();
+ let mut primary_items = Flex::row().with_spacing(spacing);
primary_items.extend(primary_left_items);
primary_items.extend(primary_right_items);
@@ -12,9 +12,10 @@ mod workspace_settings;
use anyhow::{anyhow, Context, Result};
use call::ActiveCall;
+use channel::ChannelStore;
use client::{
proto::{self, PeerId},
- ChannelStore, Client, TypedEnvelope, UserStore,
+ Client, TypedEnvelope, UserStore,
};
use collections::{hash_map, HashMap, HashSet};
use drag_and_drop::DragAndDrop;
@@ -344,7 +345,7 @@ pub fn register_project_item<I: ProjectItem>(cx: &mut AppContext) {
type FollowableItemBuilder = fn(
ViewHandle<Pane>,
- ModelHandle<Project>,
+ ViewHandle<Workspace>,
ViewId,
&mut Option<proto::view::Variant>,
&mut AppContext,
@@ -361,8 +362,8 @@ pub fn register_followable_item<I: FollowableItem>(cx: &mut AppContext) {
builders.insert(
TypeId::of::<I>(),
(
- |pane, project, id, state, cx| {
- I::from_state_proto(pane, project, id, state, cx).map(|task| {
+ |pane, workspace, id, state, cx| {
+ I::from_state_proto(pane, workspace, id, state, cx).map(|task| {
cx.foreground()
.spawn(async move { Ok(Box::new(task.await?) as Box<_>) })
})
@@ -2313,8 +2314,12 @@ impl Workspace {
item_id_to_move: usize,
cx: &mut ViewContext<Self>,
) {
- let Some(pane_to_split) = pane_to_split.upgrade(cx) else { return; };
- let Some(from) = from.upgrade(cx) else { return; };
+ let Some(pane_to_split) = pane_to_split.upgrade(cx) else {
+ return;
+ };
+ let Some(from) = from.upgrade(cx) else {
+ return;
+ };
let new_pane = self.add_pane(cx);
self.move_item(from.clone(), new_pane.clone(), item_id_to_move, 0, cx);
@@ -2847,7 +2852,13 @@ impl Workspace {
views: Vec<proto::View>,
cx: &mut AsyncAppContext,
) -> Result<()> {
- let project = this.read_with(cx, |this, _| this.project.clone())?;
+ let this = this
+ .upgrade(cx)
+ .ok_or_else(|| anyhow!("workspace dropped"))?;
+ let project = this
+ .read_with(cx, |this, _| this.project.clone())
+ .ok_or_else(|| anyhow!("window dropped"))?;
+
let replica_id = project
.read_with(cx, |project, _| {
project
@@ -2873,12 +2884,11 @@ impl Workspace {
let id = ViewId::from_proto(id.clone())?;
let mut variant = view.variant.clone();
if variant.is_none() {
- Err(anyhow!("missing variant"))?;
+ Err(anyhow!("missing view variant"))?;
}
for build_item in &item_builders {
- let task = cx.update(|cx| {
- build_item(pane.clone(), project.clone(), id, &mut variant, cx)
- });
+ let task = cx
+ .update(|cx| build_item(pane.clone(), this.clone(), id, &mut variant, cx));
if let Some(task) = task {
item_tasks.push(task);
leader_view_ids.push(id);
@@ -2906,7 +2916,7 @@ impl Workspace {
}
Some(())
- })?;
+ });
}
Ok(())
}
@@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathansobo@gmail.com>"]
description = "The fast, collaborative code editor."
edition = "2021"
name = "zed"
-version = "0.101.0"
+version = "0.102.0"
publish = false
[lib]
@@ -21,10 +21,12 @@ activity_indicator = { path = "../activity_indicator" }
auto_update = { path = "../auto_update" }
breadcrumbs = { path = "../breadcrumbs" }
call = { path = "../call" }
+channel = { path = "../channel" }
cli = { path = "../cli" }
collab_ui = { path = "../collab_ui" }
collections = { path = "../collections" }
command_palette = { path = "../command_palette" }
+component_test = { path = "../component_test" }
context_menu = { path = "../context_menu" }
client = { path = "../client" }
clock = { path = "../clock" }
@@ -58,7 +60,7 @@ quick_action_bar = { path = "../quick_action_bar" }
recent_projects = { path = "../recent_projects" }
rpc = { path = "../rpc" }
settings = { path = "../settings" }
-staff_mode = { path = "../staff_mode" }
+feature_flags = { path = "../feature_flags" }
sum_tree = { path = "../sum_tree" }
text = { path = "../text" }
terminal_view = { path = "../terminal_view" }
@@ -93,7 +95,7 @@ postage.workspace = true
rand.workspace = true
regex.workspace = true
rsa = "0.4"
-rust-embed = { version = "6.8.1" }
+rust-embed.workspace = true
serde.workspace = true
serde_derive.workspace = true
serde_json.workspace = true
@@ -289,7 +289,7 @@ mod tests {
let language = crate::languages::language("c", tree_sitter_c::language(), None).await;
cx.add_model(|cx| {
- let mut buffer = Buffer::new(0, "", cx).with_language(language, cx);
+ let mut buffer = Buffer::new(0, cx.model_id() as u64, "").with_language(language, cx);
// empty function
buffer.edit([(0..0, "int main() {}")], None, cx);
@@ -1,6 +1,7 @@
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use collections::HashMap;
+use feature_flags::FeatureFlagAppExt;
use futures::{future::BoxFuture, FutureExt, StreamExt};
use gpui::AppContext;
use language::{LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate};
@@ -9,7 +10,6 @@ use node_runtime::NodeRuntime;
use serde_json::json;
use settings::{KeymapFile, SettingsJsonSchemaParams, SettingsStore};
use smol::fs;
-use staff_mode::StaffMode;
use std::{
any::Any,
ffi::OsString,
@@ -104,7 +104,7 @@ impl LspAdapter for JsonLspAdapter {
cx: &mut AppContext,
) -> Option<BoxFuture<'static, serde_json::Value>> {
let action_names = cx.all_action_names().collect::<Vec<_>>();
- let staff_mode = cx.default_global::<StaffMode>().0;
+ let staff_mode = cx.is_staff();
let language_names = &self.languages.language_names();
let settings_schema = cx.global::<SettingsStore>().json_schema(
&SettingsJsonSchemaParams {
@@ -89,7 +89,9 @@ impl LspAdapter for PythonLspAdapter {
// to allow our own fuzzy score to be used to break ties.
//
// see https://github.com/microsoft/pyright/blob/95ef4e103b9b2f129c9320427e51b73ea7cf78bd/packages/pyright-internal/src/languageService/completionProvider.ts#LL2873
- let Some(sort_text) = &mut item.sort_text else { return };
+ let Some(sort_text) = &mut item.sort_text else {
+ return;
+ };
let mut parts = sort_text.split('.');
let Some(first) = parts.next() else { return };
let Some(second) = parts.next() else { return };
@@ -208,7 +210,7 @@ mod tests {
});
cx.add_model(|cx| {
- let mut buffer = Buffer::new(0, "", cx).with_language(language, cx);
+ let mut buffer = Buffer::new(0, cx.model_id() as u64, "").with_language(language, cx);
let append = |buffer: &mut Buffer, text: &str, cx: &mut ModelContext<Buffer>| {
let ix = buffer.len();
buffer.edit([(ix..ix, text)], Some(AutoindentMode::EachLine), cx);
@@ -474,7 +474,7 @@ mod tests {
let language = crate::languages::language("rust", tree_sitter_rust::language(), None).await;
cx.add_model(|cx| {
- let mut buffer = Buffer::new(0, "", cx).with_language(language, cx);
+ let mut buffer = Buffer::new(0, cx.model_id() as u64, "").with_language(language, cx);
// indent between braces
buffer.set_text("fn a() {}", cx);
@@ -356,8 +356,9 @@ mod tests {
"#
.unindent();
- let buffer =
- cx.add_model(|cx| language::Buffer::new(0, text, cx).with_language(language, cx));
+ let buffer = cx.add_model(|cx| {
+ language::Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx)
+ });
let outline = buffer.read_with(cx, |buffer, _| buffer.snapshot().outline(None).unwrap());
assert_eq!(
outline
@@ -3,13 +3,12 @@
use anyhow::{anyhow, Context, Result};
use backtrace::Backtrace;
+use channel::ChannelStore;
use cli::{
ipc::{self, IpcSender},
CliRequest, CliResponse, IpcHandshake, FORCE_CLI_MODE_ENV_VAR_NAME,
};
-use client::{
- self, ChannelStore, TelemetrySettings, UserStore, ZED_APP_VERSION, ZED_SECRET_CLIENT_TOKEN,
-};
+use client::{self, TelemetrySettings, UserStore, ZED_APP_VERSION, ZED_SECRET_CLIENT_TOKEN};
use db::kvp::KEY_VALUE_STORE;
use editor::{scroll::autoscroll::Autoscroll, Editor};
use futures::{
@@ -32,7 +31,7 @@ use std::{
env,
ffi::OsStr,
fs::OpenOptions,
- io::Write as _,
+ io::{IsTerminal, Write as _},
os::unix::prelude::OsStrExt,
panic,
path::{Path, PathBuf},
@@ -54,8 +53,6 @@ use uuid::Uuid;
use welcome::{show_welcome_experience, FIRST_OPEN};
use fs::RealFs;
-#[cfg(debug_assertions)]
-use staff_mode::StaffMode;
use util::{channel::RELEASE_CHANNEL, paths, ResultExt, TryFutureExt};
use workspace::AppState;
use zed::{
@@ -123,7 +120,10 @@ fn main() {
cx.set_global(*RELEASE_CHANNEL);
#[cfg(debug_assertions)]
- cx.set_global(StaffMode(true));
+ {
+ use feature_flags::FeatureFlagAppExt;
+ cx.set_staff(true);
+ }
let mut store = SettingsStore::default();
store
@@ -159,6 +159,7 @@ fn main() {
outline::init(cx);
project_symbols::init(cx);
project_panel::init(Assets, cx);
+ channel::init(&client);
diagnostics::init(cx);
search::init(cx);
semantic_index::init(fs.clone(), http.clone(), languages.clone(), cx);
@@ -166,6 +167,7 @@ fn main() {
terminal_view::init(cx);
copilot::init(http.clone(), node_runtime, cx);
ai::init(cx);
+ component_test::init(cx);
cx.spawn(|cx| watch_themes(fs.clone(), cx)).detach();
cx.spawn(|_| watch_languages(fs.clone(), languages.clone()))
@@ -633,8 +635,7 @@ async fn load_login_shell_environment() -> Result<()> {
}
fn stdout_is_a_pty() -> bool {
- std::env::var(FORCE_CLI_MODE_ENV_VAR_NAME).ok().is_none()
- && unsafe { libc::isatty(libc::STDOUT_FILENO as i32) != 0 }
+ std::env::var(FORCE_CLI_MODE_ENV_VAR_NAME).ok().is_none() && std::io::stdout().is_terminal()
}
fn collect_path_args() -> Vec<PathBuf> {
@@ -1706,6 +1706,8 @@ mod tests {
.remove_file(Path::new("/root/a/file2"), Default::default())
.await
.unwrap();
+ cx.foreground().run_until_parked();
+
workspace
.update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await
@@ -1,4 +1,4 @@
[toolchain]
-channel = "1.71"
+channel = "1.72"
components = [ "rustfmt" ]
targets = [ "x86_64-apple-darwin", "aarch64-apple-darwin", "wasm32-wasi" ]
@@ -5,11 +5,29 @@ set -e
build_flag="--release"
target_dir="release"
open_result=false
+local_only=false
+overwrite_local_app=false
+bundle_name=""
+
+# Function for displaying help info
+help_info() {
+ echo "
+Usage: ${0##*/} [options] [bundle_name]
+Build the application bundle.
+
+Options:
+ -d Compile in debug mode and print the app bundle's path.
+ -l Compile for local architecture only and copy bundle to /Applications.
+ -o Open the resulting DMG or the app itself in local mode.
+ -f Overwrite the local app bundle if it exists.
+ -h Display this help and exit.
+ "
+}
# If -o option is specified, the folder of the resulting dmg will be opened in finder
# If -d is specified, Zed will be compiled in debug mode and the application's path printed
# If -od or -do is specified Zed will be bundled in debug and the application will be run.
-while getopts 'od' flag
+while getopts 'dlfoh' flag
do
case "${flag}" in
o) open_result=true;;
@@ -17,9 +35,21 @@ do
build_flag="";
target_dir="debug"
;;
+ l) local_only=true;;
+ f) overwrite_local_app=true;;
+ h)
+ help_info
+ exit 0
+ ;;
esac
done
+shift $((OPTIND-1))
+
+if [ "$1" ]; then
+ bundle_name=$1
+fi
+
export ZED_BUNDLE=true
export MACOSX_DEPLOYMENT_TARGET=10.15.7
@@ -33,14 +63,24 @@ rustup target add wasm32-wasi
# Deal with versions of macOS that don't include libstdc++ headers
export CXXFLAGS="-stdlib=libc++"
-echo "Compiling zed binary for aarch64-apple-darwin"
-cargo build ${build_flag} --package zed --target aarch64-apple-darwin
-echo "Compiling zed binary for x86_64-apple-darwin"
-cargo build ${build_flag} --package zed --target x86_64-apple-darwin
-echo "Compiling cli binary for aarch64-apple-darwin"
-cargo build ${build_flag} --package cli --target aarch64-apple-darwin
-echo "Compiling cli binary for x86_64-apple-darwin"
-cargo build ${build_flag} --package cli --target x86_64-apple-darwin
+version_info=$(rustc --version --verbose)
+host_line=$(echo "$version_info" | grep host)
+local_target_triple=${host_line#*: }
+
+if [ "$local_only" = true ]; then
+ echo "Building for local target only."
+ cargo build ${build_flag} --package zed
+ cargo build ${build_flag} --package cli
+else
+ echo "Compiling zed binary for aarch64-apple-darwin"
+ cargo build ${build_flag} --package zed --target aarch64-apple-darwin
+ echo "Compiling zed binary for x86_64-apple-darwin"
+ cargo build ${build_flag} --package zed --target x86_64-apple-darwin
+ echo "Compiling cli binary for aarch64-apple-darwin"
+ cargo build ${build_flag} --package cli --target aarch64-apple-darwin
+ echo "Compiling cli binary for x86_64-apple-darwin"
+ cargo build ${build_flag} --package cli --target x86_64-apple-darwin
+fi
echo "Creating application bundle"
pushd crates/zed
@@ -50,27 +90,34 @@ sed \
-i .backup \
"s/package.metadata.bundle-${channel}/package.metadata.bundle/" \
Cargo.toml
-app_path=$(cargo bundle ${build_flag} --target x86_64-apple-darwin --select-workspace-root | xargs)
+
+if [ "$local_only" = true ]; then
+ app_path=$(cargo bundle ${build_flag} --select-workspace-root | xargs)
+else
+ app_path=$(cargo bundle ${build_flag} --target x86_64-apple-darwin --select-workspace-root | xargs)
+fi
mv Cargo.toml.backup Cargo.toml
popd
echo "Bundled ${app_path}"
-echo "Creating fat binaries"
-lipo \
- -create \
- target/{x86_64-apple-darwin,aarch64-apple-darwin}/${target_dir}/Zed \
- -output \
- "${app_path}/Contents/MacOS/zed"
-lipo \
- -create \
- target/{x86_64-apple-darwin,aarch64-apple-darwin}/${target_dir}/cli \
- -output \
- "${app_path}/Contents/MacOS/cli"
+if [ "$local_only" = false ]; then
+ echo "Creating fat binaries"
+ lipo \
+ -create \
+ target/{x86_64-apple-darwin,aarch64-apple-darwin}/${target_dir}/Zed \
+ -output \
+ "${app_path}/Contents/MacOS/zed"
+ lipo \
+ -create \
+ target/{x86_64-apple-darwin,aarch64-apple-darwin}/${target_dir}/cli \
+ -output \
+ "${app_path}/Contents/MacOS/cli"
+fi
echo "Copying WebRTC.framework into the frameworks folder"
mkdir "${app_path}/Contents/Frameworks"
-cp -R target/x86_64-apple-darwin/${target_dir}/WebRTC.framework "${app_path}/Contents/Frameworks/"
+cp -R target/${local_target_triple}/${target_dir}/WebRTC.framework "${app_path}/Contents/Frameworks/"
if [[ -n $MACOS_CERTIFICATE && -n $MACOS_CERTIFICATE_PASSWORD && -n $APPLE_NOTARIZATION_USERNAME && -n $APPLE_NOTARIZATION_PASSWORD ]]; then
echo "Signing bundle with Apple-issued certificate"
@@ -99,31 +146,55 @@ if [ "$target_dir" = "debug" ]; then
exit 0
fi
-dmg_target_directory="target/${target_dir}"
-dmg_source_directory="${dmg_target_directory}/dmg"
-dmg_file_path="${dmg_target_directory}/Zed.dmg"
-
-echo "Creating DMG"
-rm -rf ${dmg_source_directory}
-mkdir -p ${dmg_source_directory}
-mv "${app_path}" "${dmg_source_directory}"
-
-ln -s /Applications ${dmg_source_directory}
-hdiutil create -volname Zed -srcfolder "${dmg_source_directory}" -ov -format UDZO "${dmg_file_path}"
-# If someone runs this bundle script locally, a symlink will be placed in `dmg_source_directory`.
-# This symlink causes CPU issues with Zed if the Zed codebase is the project being worked on, so we simply remove it for now.
-rm ${dmg_source_directory}/Applications
+if [ "$local_only" = true ]; then
+ # If bundle_name is not set or empty, use the basename of $app_path
+ if [ -z "$bundle_name" ]; then
+ bundle_name=$(basename "$app_path")
+ else
+ # If bundle_name doesn't end in .app, append it
+ if [[ "$bundle_name" != *.app ]]; then
+ bundle_name="$bundle_name.app"
+ fi
+ fi
-echo "Adding license agreement to DMG"
-npm install --global dmg-license minimist
-dmg-license script/eula/eula.json "${dmg_file_path}"
+ if [ "$overwrite_local_app" = true ]; then
+ rm -rf "/Applications/$bundle_name"
+ fi
+ mv "$app_path" "/Applications/$bundle_name"
-if [[ -n $MACOS_CERTIFICATE && -n $MACOS_CERTIFICATE_PASSWORD && -n $APPLE_NOTARIZATION_USERNAME && -n $APPLE_NOTARIZATION_PASSWORD ]]; then
- echo "Notarizing DMG with Apple"
- npm install -g notarize-cli
- npx notarize-cli --file "${dmg_file_path}" --bundle-id dev.zed.Zed --username "$APPLE_NOTARIZATION_USERNAME" --password "$APPLE_NOTARIZATION_PASSWORD"
-fi
+ if [ "$open_result" = true ]; then
+ open "/Applications/$bundle_name"
+ else
+ echo "Installed application bundle:"
+ echo "/Applications/$bundle_name"
+ fi
+else
+ echo "Creating DMG"
+ dmg_target_directory="target/${target_dir}"
+ dmg_source_directory="${dmg_target_directory}/dmg"
+ dmg_file_path="${dmg_target_directory}/Zed.dmg"
+
+ rm -rf ${dmg_source_directory}
+ mkdir -p ${dmg_source_directory}
+ mv "${app_path}" "${dmg_source_directory}"
+
+ ln -s /Applications ${dmg_source_directory}
+ hdiutil create -volname Zed -srcfolder "${dmg_source_directory}" -ov -format UDZO "${dmg_file_path}"
+ # If someone runs this bundle script locally, a symlink will be placed in `dmg_source_directory`.
+ # This symlink causes CPU issues with Zed if the Zed codebase is the project being worked on, so we simply remove it for now.
+ rm ${dmg_source_directory}/Applications
+
+ echo "Adding license agreement to DMG"
+ npm install --global dmg-license minimist
+ dmg-license script/eula/eula.json "${dmg_file_path}"
+
+ if [[ -n $MACOS_CERTIFICATE && -n $MACOS_CERTIFICATE_PASSWORD && -n $APPLE_NOTARIZATION_USERNAME && -n $APPLE_NOTARIZATION_PASSWORD ]]; then
+ echo "Notarizing DMG with Apple"
+ npm install -g notarize-cli
+ npx notarize-cli --file "${dmg_file_path}" --bundle-id dev.zed.Zed --username "$APPLE_NOTARIZATION_USERNAME" --password "$APPLE_NOTARIZATION_PASSWORD"
+ fi
-if [ "$open_result" = true ]; then
- open $dmg_target_directory
+ if [ "$open_result" = true ]; then
+ open $dmg_target_directory
+ fi
fi
@@ -44,10 +44,10 @@ export function icon_button({ color, margin, layer, variant, size }: IconButtonO
}
const padding = {
- top: size === Button.size.Small ? 0 : 2,
- bottom: size === Button.size.Small ? 0 : 2,
- left: size === Button.size.Small ? 0 : 4,
- right: size === Button.size.Small ? 0 : 4,
+ top: size === Button.size.Small ? 2 : 2,
+ bottom: size === Button.size.Small ? 2 : 2,
+ left: size === Button.size.Small ? 2 : 4,
+ right: size === Button.size.Small ? 2 : 4,
}
return interactive({
@@ -55,10 +55,10 @@ export function icon_button({ color, margin, layer, variant, size }: IconButtonO
corner_radius: 6,
padding: padding,
margin: m,
- icon_width: 14,
+ icon_width: 12,
icon_height: 14,
- button_width: 20,
- button_height: 16,
+ button_width: size === Button.size.Small ? 16 : 20,
+ button_height: 14,
},
state: {
default: {
@@ -1,78 +0,0 @@
-import { Interactive, interactive, toggleable, Toggleable } from "../element"
-import { TextStyle, background, text } from "../style_tree/components"
-import { useTheme } from "../theme"
-import { Button } from "./button"
-
-type LabelButtonStyle = {
- corder_radius: number
- background: string | null
- padding: {
- top: number
- bottom: number
- left: number
- right: number
- },
- margin: Button.Options['margin']
- button_height: number
-} & TextStyle
-
-/** Styles an Interactive<ContainedText> */
-export function label_button_style(
- options: Partial<Button.Options> = {
- variant: Button.variant.Default,
- shape: Button.shape.Rectangle,
- states: {
- hovered: true,
- pressed: true
- }
- }
-): Interactive<LabelButtonStyle> {
- const theme = useTheme()
-
- const base = Button.button_base(options)
- const layer = options.layer ?? theme.middle
- const color = options.color ?? "base"
-
- const default_state = {
- ...base,
- ...text(layer ?? theme.lowest, "sans", color),
- font_size: Button.FONT_SIZE,
- }
-
- return interactive({
- base: default_state,
- state: {
- hovered: {
- background: background(layer, options.background ?? color, "hovered")
- },
- clicked: {
- background: background(layer, options.background ?? color, "pressed")
- }
- }
- })
-}
-
-/** Styles an Toggleable<Interactive<ContainedText>> */
-export function toggle_label_button_style(
- options: Partial<Button.ToggleableOptions> = {
- variant: Button.variant.Default,
- shape: Button.shape.Rectangle,
- states: {
- hovered: true,
- pressed: true
- }
- }
-): Toggleable<Interactive<LabelButtonStyle>> {
- const activeOptions = {
- ...options,
- color: options.active_color || options.color,
- background: options.active_background || options.background
- }
-
- return toggleable({
- state: {
- inactive: label_button_style(options),
- active: label_button_style(activeOptions),
- },
- })
-}
@@ -0,0 +1,34 @@
+type MarginOptions = {
+ all?: number
+ left?: number
+ right?: number
+ top?: number
+ bottom?: number
+}
+
+export type MarginStyle = {
+ top: number
+ bottom: number
+ left: number
+ right: number
+}
+
+export const margin_style = (options: MarginOptions): MarginStyle => {
+ const { all, top, bottom, left, right } = options
+
+ if (all !== undefined) return {
+ top: all,
+ bottom: all,
+ left: all,
+ right: all
+ }
+
+ if (top === undefined && bottom === undefined && left === undefined && right === undefined) throw new Error("Margin must have at least one value")
+
+ return {
+ top: top || 0,
+ bottom: bottom || 0,
+ left: left || 0,
+ right: right || 0
+ }
+}
@@ -0,0 +1,34 @@
+type PaddingOptions = {
+ all?: number
+ left?: number
+ right?: number
+ top?: number
+ bottom?: number
+}
+
+export type PaddingStyle = {
+ top: number
+ bottom: number
+ left: number
+ right: number
+}
+
+export const padding_style = (options: PaddingOptions): PaddingStyle => {
+ const { all, top, bottom, left, right } = options
+
+ if (all !== undefined) return {
+ top: all,
+ bottom: all,
+ left: all,
+ right: all
+ }
+
+ if (top === undefined && bottom === undefined && left === undefined && right === undefined) throw new Error("Padding must have at least one value")
+
+ return {
+ top: top || 0,
+ bottom: bottom || 0,
+ left: left || 0,
+ right: right || 0
+ }
+}
@@ -17,6 +17,7 @@ interface TextButtonOptions {
variant?: Button.Variant
color?: keyof Theme["lowest"]
margin?: Partial<Margin>
+ disabled?: boolean
text_properties?: TextProperties
}
@@ -29,6 +30,7 @@ export function text_button({
color,
layer,
margin,
+ disabled,
text_properties,
}: TextButtonOptions = {}) {
const theme = useTheme()
@@ -65,13 +67,17 @@ export function text_button({
state: {
default: {
background: background_color,
- color: foreground(layer ?? theme.lowest, color),
+ color:
+ disabled
+ ? foreground(layer ?? theme.lowest, "disabled")
+ : foreground(layer ?? theme.lowest, color),
},
- hovered: {
- background: background(layer ?? theme.lowest, color, "hovered"),
- color: foreground(layer ?? theme.lowest, color, "hovered"),
- },
- clicked: {
+ hovered:
+ disabled ? {} : {
+ background: background(layer ?? theme.lowest, color, "hovered"),
+ color: foreground(layer ?? theme.lowest, color, "hovered"),
+ },
+ clicked: disabled ? {} : {
background: background(layer ?? theme.lowest, color, "pressed"),
color: foreground(layer ?? theme.lowest, color, "pressed"),
},
@@ -12,7 +12,6 @@ import simple_message_notification from "./simple_message_notification"
import project_shared_notification from "./project_shared_notification"
import tooltip from "./tooltip"
import terminal from "./terminal"
-import contact_finder from "./contact_finder"
import collab_panel from "./collab_panel"
import toolbar_dropdown_menu from "./toolbar_dropdown_menu"
import incoming_call_notification from "./incoming_call_notification"
@@ -22,6 +21,7 @@ import assistant from "./assistant"
import { titlebar } from "./titlebar"
import editor from "./editor"
import feedback from "./feedback"
+import component_test from "./component_test"
import { useTheme } from "../common"
export default function app(): any {
@@ -54,6 +54,7 @@ export default function app(): any {
tooltip: tooltip(),
terminal: terminal(),
assistant: assistant(),
- feedback: feedback()
+ feedback: feedback(),
+ component_test: component_test(),
}
}
@@ -14,6 +14,7 @@ import { indicator } from "../component/indicator"
export default function contacts_panel(): any {
const theme = useTheme()
+ const CHANNEL_SPACING = 4 as const
const NAME_MARGIN = 6 as const
const SPACING = 12 as const
const INDENT_SIZE = 8 as const
@@ -152,6 +153,10 @@ export default function contacts_panel(): any {
return {
...collab_modals(),
+ disclosure: {
+ button: icon_button({ variant: "ghost", size: "sm" }),
+ spacing: CHANNEL_SPACING,
+ },
log_in_button: interactive({
base: {
background: background(theme.middle),
@@ -194,7 +199,7 @@ export default function contacts_panel(): any {
add_channel_button: header_icon_button,
leave_call_button: header_icon_button,
row_height: ITEM_HEIGHT,
- channel_indent: INDENT_SIZE * 2,
+ channel_indent: INDENT_SIZE * 2 + 2,
section_icon_size: 14,
header_row: {
...text(layer, "sans", { size: "sm", weight: "bold" }),
@@ -264,7 +269,7 @@ export default function contacts_panel(): any {
channel_name: {
...text(layer, "sans", { size: "sm" }),
margin: {
- left: NAME_MARGIN,
+ left: CHANNEL_SPACING,
},
},
list_empty_label_container: {
@@ -0,0 +1,27 @@
+
+import { useTheme } from "../common"
+import { text_button } from "../component/text_button"
+import { icon_button } from "../component/icon_button"
+import { text } from "./components"
+import { toggleable } from "../element"
+
+export default function contacts_panel(): any {
+ const theme = useTheme()
+
+ return {
+ button: text_button({}),
+ toggle: toggleable({
+ base: text_button({}),
+ state: {
+ active: {
+ ...text_button({ color: "accent" })
+ }
+ }
+ }),
+ disclosure: {
+ ...text(theme.lowest, "sans", "base"),
+ button: icon_button({ variant: "ghost" }),
+ spacing: 4,
+ }
+ }
+}
@@ -184,6 +184,7 @@ export default function editor(): any {
theme.players[6],
theme.players[7],
],
+ absent_selection: theme.players[7],
autocomplete: {
background: background(theme.middle),
corner_radius: 8,
@@ -2,9 +2,23 @@ import { with_opacity } from "../theme/color"
import { background, border, foreground, text } from "./components"
import { interactive, toggleable } from "../element"
import { useTheme } from "../theme"
+import { text_button } from "../component/text_button"
+
+const search_results = () => {
+ const theme = useTheme()
+
+ return {
+ // TODO: Add an activeMatchBackground on the rust side to differentiate between active and inactive
+ match_background: with_opacity(
+ foreground(theme.highest, "accent"),
+ 0.4
+ ),
+ }
+}
export default function search(): any {
const theme = useTheme()
+ const SEARCH_ROW_SPACING = 12
// Search input
const editor = {
@@ -34,12 +48,8 @@ export default function search(): any {
}
return {
- padding: { top: 16, bottom: 16, left: 16, right: 16 },
- // TODO: Add an activeMatchBackground on the rust side to differentiate between active and inactive
- match_background: with_opacity(
- foreground(theme.highest, "accent"),
- 0.4
- ),
+ padding: { top: 4, bottom: 4 },
+
option_button: toggleable({
base: interactive({
base: {
@@ -153,47 +163,13 @@ export default function search(): any {
},
},
}),
+ // Search tool buttons
+ // HACK: This is not how disabled elements should be created
+ // Disabled elements should use a disabled state of an interactive element, not a toggleable element with the inactive state being disabled
action_button: toggleable({
- base: interactive({
- base: {
- ...text(theme.highest, "mono", "disabled"),
- background: background(theme.highest, "disabled"),
- corner_radius: 6,
- border: border(theme.highest, "disabled"),
- padding: {
- // bottom: 2,
- left: 10,
- right: 10,
- // top: 2,
- },
- margin: {
- right: 9,
- }
- },
- state: {
- hovered: {}
- },
- }),
state: {
- active: interactive({
- base: {
- ...text(theme.highest, "mono", "on"),
- background: background(theme.highest, "on"),
- border: border(theme.highest, "on"),
- },
- state: {
- hovered: {
- ...text(theme.highest, "mono", "on", "hovered"),
- background: background(theme.highest, "on", "hovered"),
- border: border(theme.highest, "on", "hovered"),
- },
- clicked: {
- ...text(theme.highest, "mono", "on", "pressed"),
- background: background(theme.highest, "on", "pressed"),
- border: border(theme.highest, "on", "pressed"),
- },
- },
- })
+ inactive: text_button({ variant: "ghost", layer: theme.highest, disabled: true, margin: { right: SEARCH_ROW_SPACING }, text_properties: { size: "sm" } }),
+ active: text_button({ variant: "ghost", layer: theme.highest, margin: { right: SEARCH_ROW_SPACING }, text_properties: { size: "sm" } })
}
}),
editor,
@@ -207,15 +183,15 @@ export default function search(): any {
border: border(theme.highest, "negative"),
},
match_index: {
- ...text(theme.highest, "mono", "variant"),
+ ...text(theme.highest, "mono", { size: "sm" }),
padding: {
- left: 9,
+ right: SEARCH_ROW_SPACING,
},
},
option_button_group: {
padding: {
- left: 12,
- right: 12,
+ left: SEARCH_ROW_SPACING,
+ right: SEARCH_ROW_SPACING,
},
},
include_exclude_inputs: {
@@ -232,52 +208,26 @@ export default function search(): any {
...text(theme.highest, "mono", "variant"),
size: 13,
},
- dismiss_button: interactive({
- base: {
- color: foreground(theme.highest, "variant"),
- icon_width: 14,
- button_width: 32,
- corner_radius: 6,
- padding: {
- // // top: 10,
- // bottom: 10,
- left: 10,
- right: 10,
- },
-
- background: background(theme.highest, "variant"),
-
- border: border(theme.highest, "on"),
- },
- state: {
- hovered: {
- color: foreground(theme.highest, "hovered"),
- background: background(theme.highest, "variant", "hovered")
- },
- clicked: {
- color: foreground(theme.highest, "pressed"),
- background: background(theme.highest, "variant", "pressed")
- },
- },
- }),
+ // Input Icon
editor_icon: {
icon: {
- color: foreground(theme.highest, "variant"),
- asset: "icons/magnifying_glass_12.svg",
+ color: foreground(theme.highest, "disabled"),
+ asset: "icons/magnifying_glass.svg",
dimensions: {
- width: 12,
- height: 12,
+ width: 14,
+ height: 14,
}
},
container: {
- margin: { right: 6 },
- padding: { left: 2, right: 2 },
+ margin: { right: 4 },
+ padding: { left: 1, right: 1 },
}
},
+ // Toggle group buttons - Text | Regex | Semantic
mode_button: toggleable({
base: interactive({
base: {
- ...text(theme.highest, "mono", "variant"),
+ ...text(theme.highest, "mono", "variant", { size: "sm" }),
background: background(theme.highest, "variant"),
border: {
@@ -285,21 +235,24 @@ export default function search(): any {
left: false,
right: false
},
-
+ margin: {
+ top: 1,
+ bottom: 1,
+ },
padding: {
- left: 10,
- right: 10,
+ left: 12,
+ right: 12,
},
corner_radius: 6,
},
state: {
hovered: {
- ...text(theme.highest, "mono", "variant", "hovered"),
+ ...text(theme.highest, "mono", "variant", "hovered", { size: "sm" }),
background: background(theme.highest, "variant", "hovered"),
border: border(theme.highest, "on", "hovered"),
},
clicked: {
- ...text(theme.highest, "mono", "variant", "pressed"),
+ ...text(theme.highest, "mono", "variant", "pressed", { size: "sm" }),
background: background(theme.highest, "variant", "pressed"),
border: border(theme.highest, "on", "pressed"),
},
@@ -308,20 +261,23 @@ export default function search(): any {
state: {
active: {
default: {
- ...text(theme.highest, "mono", "on"),
+ ...text(theme.highest, "mono", "on", { size: "sm" }),
background: background(theme.highest, "on")
},
hovered: {
- ...text(theme.highest, "mono", "on", "hovered"),
+ ...text(theme.highest, "mono", "on", "hovered", { size: "sm" }),
background: background(theme.highest, "on", "hovered")
},
clicked: {
- ...text(theme.highest, "mono", "on", "pressed"),
+ ...text(theme.highest, "mono", "on", "pressed", { size: "sm" }),
background: background(theme.highest, "on", "pressed")
},
},
},
}),
+ // Next/Previous Match buttons
+ // HACK: This is not how disabled elements should be created
+ // Disabled elements should use a disabled state of an interactive element, not a toggleable element with the inactive state being disabled
nav_button: toggleable({
state: {
inactive: interactive({
@@ -334,7 +290,10 @@ export default function search(): any {
left: false,
right: false,
},
-
+ margin: {
+ top: 1,
+ bottom: 1,
+ },
padding: {
left: 10,
right: 10,
@@ -354,7 +313,10 @@ export default function search(): any {
left: false,
right: false,
},
-
+ margin: {
+ top: 1,
+ bottom: 1,
+ },
padding: {
left: 10,
right: 10,
@@ -375,13 +337,10 @@ export default function search(): any {
})
}
}),
- search_bar_row_height: 32,
+ search_bar_row_height: 34,
+ search_row_spacing: 8,
option_button_height: 22,
- modes_container: {
- margin: {
- right: 9
- }
- }
-
+ modes_container: {},
+ ...search_results()
}
}
@@ -129,7 +129,7 @@ export default function workspace(): any {
status_bar: statusBar(),
titlebar: titlebar(),
toolbar: {
- height: 34,
+ height: 42,
background: background(theme.highest),
border: border(theme.highest, { bottom: true }),
item_spacing: 8,
@@ -138,7 +138,7 @@ export default function workspace(): any {
variant: "ghost",
active_color: "accent",
}),
- padding: { left: 8, right: 8, top: 4, bottom: 4 },
+ padding: { left: 8, right: 8 },
},
breadcrumb_height: 24,
breadcrumbs: interactive({
@@ -21,8 +21,7 @@
"experimentalDecorators": true,
"strictPropertyInitialization": false,
"skipLibCheck": true,
- "useUnknownInCatchVariables": false,
- "baseUrl": "."
+ "useUnknownInCatchVariables": false
},
"exclude": [
"node_modules"