Detailed changes
@@ -15,7 +15,7 @@ dependencies = [
"collections",
"env_logger 0.11.8",
"file_icons",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"image",
"indoc",
@@ -75,7 +75,7 @@ dependencies = [
"collections",
"ctor",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"language",
"log",
@@ -100,7 +100,7 @@ dependencies = [
"editor",
"extension_host",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"language",
"project",
@@ -164,7 +164,7 @@ dependencies = [
"eval_utils",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"git",
"gpui",
"gpui_tokio",
@@ -229,7 +229,7 @@ dependencies = [
"async-broadcast",
"async-trait",
"derive_more",
- "futures 0.3.31",
+ "futures 0.3.32",
"log",
"serde",
"serde_json",
@@ -265,7 +265,7 @@ dependencies = [
"env_logger 0.11.8",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"google_ai",
"gpui",
"gpui_tokio",
@@ -346,7 +346,7 @@ dependencies = [
"feature_flags",
"file_icons",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"git",
"gpui",
@@ -631,7 +631,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"chrono",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -752,7 +752,7 @@ name = "askpass"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"log",
"net",
@@ -947,7 +947,7 @@ name = "async-pipe"
version = "0.1.3"
source = "git+https://github.com/zed-industries/async-pipe-rs?rev=82d00a04211cf4e1236029aa03e6b6ce2a74c553#82d00a04211cf4e1236029aa03e6b6ce2a74c553"
dependencies = [
- "futures 0.3.31",
+ "futures 0.3.32",
"log",
]
@@ -1185,7 +1185,7 @@ dependencies = [
"clock",
"ctor",
"db",
- "futures 0.3.31",
+ "futures 0.3.32",
"futures-lite 1.13.0",
"gpui",
"http_client",
@@ -1864,7 +1864,7 @@ dependencies = [
"anyhow",
"aws-sdk-bedrockruntime",
"aws-smithy-types",
- "futures 0.3.31",
+ "futures 0.3.32",
"schemars",
"serde",
"serde_json",
@@ -2153,7 +2153,7 @@ version = "0.1.0"
dependencies = [
"clock",
"ctor",
- "futures 0.3.31",
+ "futures 0.3.32",
"git2",
"gpui",
"language",
@@ -2350,7 +2350,7 @@ dependencies = [
"collections",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_tokio",
"language",
@@ -2671,7 +2671,7 @@ dependencies = [
"client",
"clock",
"collections",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"language",
@@ -2866,7 +2866,7 @@ dependencies = [
"derive_more",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_tokio",
"http_client",
@@ -2922,7 +2922,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"cloud_api_types",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_tokio",
"http_client",
@@ -3054,7 +3054,7 @@ dependencies = [
"anyhow",
"edit_prediction",
"edit_prediction_types",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"icons",
@@ -3101,7 +3101,7 @@ dependencies = [
"extension",
"file_finder",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"git",
"git_hosting_providers",
"git_ui",
@@ -3178,7 +3178,7 @@ dependencies = [
"collections",
"db",
"editor",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"livekit_client",
@@ -3439,7 +3439,7 @@ dependencies = [
"async-trait",
"base64 0.22.1",
"collections",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"log",
@@ -3500,7 +3500,7 @@ dependencies = [
"edit_prediction_types",
"editor",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"icons",
"indoc",
@@ -3534,7 +3534,7 @@ dependencies = [
"collections",
"dirs 4.0.0",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"log",
@@ -3984,7 +3984,7 @@ version = "0.1.0"
dependencies = [
"cfg-if",
"crash-handler",
- "futures 0.3.31",
+ "futures 0.3.32",
"log",
"mach2 0.5.0",
"minidumper",
@@ -4320,7 +4320,7 @@ dependencies = [
"collections",
"dap-types",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"language",
@@ -4362,7 +4362,7 @@ dependencies = [
"dap",
"dotenvy",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"json_dotpath",
@@ -4533,7 +4533,7 @@ dependencies = [
"anyhow",
"dap",
"editor",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"project",
"serde_json",
@@ -4560,7 +4560,7 @@ dependencies = [
"editor",
"feature_flags",
"file_icons",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"hex",
@@ -4615,7 +4615,7 @@ name = "deepseek"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -4735,7 +4735,7 @@ dependencies = [
"async-trait",
"env_logger 0.11.8",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http 1.3.1",
"http_client",
@@ -5124,7 +5124,7 @@ dependencies = [
"edit_prediction_types",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"heapless",
"indoc",
@@ -5185,7 +5185,7 @@ dependencies = [
"extension",
"flate2",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gaoya",
"gpui",
"gpui_platform",
@@ -5237,7 +5237,7 @@ dependencies = [
"clock",
"collections",
"env_logger 0.11.8",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"indoc",
"language",
@@ -5286,7 +5286,7 @@ dependencies = [
"editor",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"indoc",
"language",
@@ -5331,7 +5331,7 @@ dependencies = [
"feature_flags",
"file_icons",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"git",
"gpui",
@@ -5745,7 +5745,7 @@ dependencies = [
"extension",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_platform",
"gpui_tokio",
@@ -5855,7 +5855,7 @@ dependencies = [
"collections",
"dap",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"heck 0.5.0",
"http_client",
@@ -5923,7 +5923,7 @@ dependencies = [
"dap",
"extension",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_tokio",
"http_client",
@@ -6131,7 +6131,7 @@ dependencies = [
"ctor",
"editor",
"file_icons",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"menu",
@@ -6433,7 +6433,7 @@ dependencies = [
"collections",
"dunce",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"git",
"gpui",
"ignore",
@@ -6531,9 +6531,9 @@ checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678"
[[package]]
name = "futures"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876"
+checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d"
dependencies = [
"futures-channel",
"futures-core",
@@ -6546,9 +6546,9 @@ dependencies = [
[[package]]
name = "futures-channel"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10"
+checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d"
dependencies = [
"futures-core",
"futures-sink",
@@ -6569,15 +6569,15 @@ dependencies = [
[[package]]
name = "futures-core"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
+checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d"
[[package]]
name = "futures-executor"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f"
+checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d"
dependencies = [
"futures-core",
"futures-task",
@@ -6597,9 +6597,9 @@ dependencies = [
[[package]]
name = "futures-io"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
+checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718"
[[package]]
name = "futures-lite"
@@ -6631,9 +6631,9 @@ dependencies = [
[[package]]
name = "futures-macro"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
+checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b"
dependencies = [
"proc-macro2",
"quote",
@@ -6642,21 +6642,21 @@ dependencies = [
[[package]]
name = "futures-sink"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7"
+checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893"
[[package]]
name = "futures-task"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988"
+checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393"
[[package]]
name = "futures-util"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
+checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6"
dependencies = [
"futures 0.1.31",
"futures-channel",
@@ -6665,9 +6665,9 @@ dependencies = [
"futures-macro",
"futures-sink",
"futures-task",
+ "libc",
"memchr",
"pin-project-lite",
- "pin-utils",
"slab",
"tokio-io",
]
@@ -7094,7 +7094,7 @@ dependencies = [
"async-trait",
"collections",
"derive_more",
- "futures 0.3.31",
+ "futures 0.3.32",
"git2",
"gpui",
"http_client",
@@ -7170,7 +7170,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"async-trait",
- "futures 0.3.31",
+ "futures 0.3.32",
"git",
"gpui",
"http_client",
@@ -7201,7 +7201,7 @@ dependencies = [
"db",
"editor",
"file_icons",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"git",
"gpui",
@@ -7406,7 +7406,7 @@ name = "google_ai"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -7476,7 +7476,7 @@ dependencies = [
"env_logger 0.11.8",
"etagere",
"foreign-types 0.5.0",
- "futures 0.3.31",
+ "futures 0.3.32",
"futures-concurrency",
"getrandom 0.3.4",
"gpui_macros",
@@ -7551,7 +7551,7 @@ dependencies = [
"calloop-wayland-source",
"collections",
"filedescriptor",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_wgpu",
"http_client",
@@ -7605,7 +7605,7 @@ dependencies = [
"dispatch2",
"etagere",
"foreign-types 0.5.0",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"image",
"itertools 0.14.0",
@@ -7674,7 +7674,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"console_error_panic_hook",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_wgpu",
"http_client",
@@ -7725,7 +7725,7 @@ dependencies = [
"anyhow",
"collections",
"etagere",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"image",
"itertools 0.14.0",
@@ -8209,7 +8209,7 @@ dependencies = [
"async-tar",
"bytes 1.11.1",
"derive_more",
- "futures 0.3.31",
+ "futures 0.3.32",
"http 1.3.1",
"http-body 1.0.1",
"log",
@@ -9092,7 +9092,7 @@ dependencies = [
"async-trait",
"bytes 1.11.1",
"chrono",
- "futures 0.3.31",
+ "futures 0.3.32",
"serde",
"serde_json",
"thiserror 2.0.17",
@@ -9108,7 +9108,7 @@ dependencies = [
"anyhow",
"async-trait",
"async-tungstenite",
- "futures 0.3.31",
+ "futures 0.3.32",
"jupyter-protocol",
"serde",
"serde_json",
@@ -9226,7 +9226,7 @@ dependencies = [
"ec4rs",
"encoding_rs",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"globset",
"gpui",
@@ -9306,7 +9306,7 @@ dependencies = [
"collections",
"extension",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"language",
"log",
@@ -9332,7 +9332,7 @@ dependencies = [
"collections",
"credentials_provider",
"env_var",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"icons",
@@ -9375,7 +9375,7 @@ dependencies = [
"extension",
"extension_host",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"google_ai",
"gpui",
"gpui_tokio",
@@ -9452,7 +9452,7 @@ dependencies = [
"command_palette_hooks",
"edit_prediction",
"editor",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"itertools 0.14.0",
"language",
@@ -9488,7 +9488,7 @@ dependencies = [
"chrono",
"collections",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"globset",
"gpui",
"grammars",
@@ -9881,7 +9881,7 @@ dependencies = [
"core-video",
"coreaudio-rs 0.12.1",
"cpal",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_platform",
"gpui_tokio",
@@ -9925,7 +9925,7 @@ name = "lmstudio"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -9996,7 +9996,7 @@ dependencies = [
"async-pipe",
"collections",
"ctor",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_util",
"log",
@@ -10136,7 +10136,7 @@ dependencies = [
"collections",
"env_logger 0.11.8",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_platform",
"html5ever 0.27.0",
@@ -10583,7 +10583,7 @@ name = "mistral"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -10772,7 +10772,7 @@ name = "nc"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"net",
"smol",
]
@@ -10868,7 +10868,7 @@ dependencies = [
"async-std",
"async-tar",
"async-trait",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"log",
"paths",
@@ -11192,7 +11192,7 @@ version = "0.9.2"
source = "git+https://github.com/KillTheMule/nvim-rs?rev=764dd270c642f77f10f3e19d05cc178a6cbe69f3#764dd270c642f77f10f3e19d05cc178a6cbe69f3"
dependencies = [
"async-trait",
- "futures 0.3.31",
+ "futures 0.3.32",
"log",
"rmp",
"rmpv",
@@ -11392,7 +11392,7 @@ name = "ollama"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -11499,7 +11499,7 @@ name = "open_ai"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"log",
"rand 0.9.2",
@@ -11517,7 +11517,7 @@ version = "0.1.0"
dependencies = [
"editor",
"file_icons",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"picker",
@@ -11538,7 +11538,7 @@ name = "open_router"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -11553,7 +11553,7 @@ name = "opencode"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"google_ai",
"http_client",
"schemars",
@@ -12867,7 +12867,7 @@ checksum = "af3fb618632874fb76937c2361a7f22afd393c982a2165595407edc75b06d3c1"
dependencies = [
"atomic",
"crossbeam-queue",
- "futures 0.3.31",
+ "futures 0.3.32",
"log",
"parking_lot",
"pin-project",
@@ -13100,7 +13100,7 @@ dependencies = [
"extension",
"fancy-regex 0.17.0",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"git",
"git2",
@@ -13167,7 +13167,7 @@ dependencies = [
"askpass",
"clap",
"client",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_platform",
"http_client",
@@ -13228,7 +13228,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"editor",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"language",
@@ -13270,7 +13270,7 @@ dependencies = [
"chrono",
"collections",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"handlebars 4.5.0",
@@ -14025,7 +14025,7 @@ dependencies = [
"extension",
"extension_host",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"http_client",
@@ -14209,7 +14209,7 @@ dependencies = [
"base64 0.22.1",
"collections",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"log",
"parking_lot",
@@ -14237,7 +14237,7 @@ dependencies = [
"anyhow",
"askpass",
"auto_update",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"log",
"markdown",
@@ -14275,7 +14275,7 @@ dependencies = [
"extension_host",
"fork",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"git",
"git2",
"git_hosting_providers",
@@ -14357,7 +14357,7 @@ dependencies = [
"editor",
"feature_flags",
"file_icons",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"html_to_markdown",
"http_client",
@@ -14482,7 +14482,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"bytes 1.11.1",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui_util",
"http_client",
"http_client_tls",
@@ -14654,7 +14654,7 @@ dependencies = [
"async-tungstenite",
"base64 0.22.1",
"collections",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"parking_lot",
"proto",
@@ -14747,7 +14747,7 @@ dependencies = [
"chrono",
"data-encoding",
"dirs 6.0.0",
- "futures 0.3.31",
+ "futures 0.3.32",
"glob",
"jupyter-protocol",
"serde",
@@ -15121,7 +15121,7 @@ dependencies = [
"backtrace",
"chrono",
"flume",
- "futures 0.3.31",
+ "futures 0.3.32",
"parking_lot",
"rand 0.9.2",
"web-time",
@@ -15349,7 +15349,7 @@ dependencies = [
"collections",
"editor",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"itertools 0.14.0",
"language",
@@ -15628,7 +15628,7 @@ dependencies = [
"collections",
"ec4rs",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"indoc",
"inventory",
@@ -15732,7 +15732,7 @@ dependencies = [
"editor",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"heck 0.5.0",
@@ -16108,7 +16108,7 @@ dependencies = [
"collections",
"extension",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"indoc",
"parking_lot",
@@ -16220,7 +16220,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"collections",
- "futures 0.3.31",
+ "futures 0.3.32",
"indoc",
"libsqlite3-sys",
"log",
@@ -17266,7 +17266,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"collections",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"hex",
"log",
@@ -17313,7 +17313,7 @@ dependencies = [
name = "telemetry"
version = "0.1.0"
dependencies = [
- "futures 0.3.31",
+ "futures 0.3.32",
"serde",
"serde_json",
"telemetry_events",
@@ -17368,7 +17368,7 @@ dependencies = [
"alacritty_terminal",
"anyhow",
"collections",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"itertools 0.14.0",
"libc",
@@ -17414,7 +17414,7 @@ dependencies = [
"db",
"dirs 4.0.0",
"editor",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"itertools 0.14.0",
"language",
@@ -18050,7 +18050,7 @@ dependencies = [
"anyhow",
"convert_case 0.8.0",
"editor",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"language",
@@ -18926,7 +18926,7 @@ dependencies = [
"command-fds",
"dirs 4.0.0",
"dunce",
- "futures 0.3.31",
+ "futures 0.3.32",
"futures-lite 1.13.0",
"git2",
"globset",
@@ -19083,7 +19083,7 @@ dependencies = [
"db",
"editor",
"env_logger 0.11.8",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"git_ui",
"gpui",
@@ -19447,7 +19447,7 @@ version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7516db7f32decdadb1c3b8deb1b7d78b9df7606c5cc2f6241737c2ab3a0258e"
dependencies = [
- "futures 0.3.31",
+ "futures 0.3.32",
"js-sys",
"wasm-bindgen",
"web-sys",
@@ -19803,7 +19803,7 @@ dependencies = [
"cap-std",
"cap-time-ext",
"fs-set-times",
- "futures 0.3.31",
+ "futures 0.3.32",
"io-extras",
"io-lifetimes",
"rustix 1.1.2",
@@ -19827,7 +19827,7 @@ dependencies = [
"anyhow",
"async-trait",
"bytes 1.11.1",
- "futures 0.3.31",
+ "futures 0.3.32",
"wasmtime",
]
@@ -1519,7 +1519,7 @@ mod tests {
stream: &mut UnboundedReceiver<EditAgentOutputEvent>,
) -> Vec<EditAgentOutputEvent> {
let mut events = Vec::new();
- while let Ok(Some(event)) = stream.try_next() {
+ while let Ok(event) = stream.try_recv() {
events.push(event);
}
events
@@ -6206,9 +6206,9 @@ async fn test_edit_file_tool_allow_rule_skips_confirmation(cx: &mut TestAppConte
cx.run_until_parked();
- let event = rx.try_next();
+ let event = rx.try_recv();
assert!(
- !matches!(event, Ok(Some(Ok(ThreadEvent::ToolCallAuthorization(_))))),
+ !matches!(event, Ok(Ok(ThreadEvent::ToolCallAuthorization(_)))),
"expected no authorization request for allowed .md file"
);
}
@@ -6350,9 +6350,9 @@ async fn test_fetch_tool_allow_rule_skips_confirmation(cx: &mut TestAppContext)
cx.run_until_parked();
- let event = rx.try_next();
+ let event = rx.try_recv();
assert!(
- !matches!(event, Ok(Some(Ok(ThreadEvent::ToolCallAuthorization(_))))),
+ !matches!(event, Ok(Ok(ThreadEvent::ToolCallAuthorization(_)))),
"expected no authorization request for allowed docs.rs URL"
);
}
@@ -383,8 +383,8 @@ mod tests {
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Expected a single authorization prompt",
);
@@ -450,8 +450,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -370,8 +370,8 @@ mod tests {
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Expected a single authorization prompt",
);
@@ -440,8 +440,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -439,8 +439,8 @@ mod tests {
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Expected a single authorization prompt",
);
@@ -513,8 +513,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -1188,7 +1188,7 @@ mod tests {
})
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
// Test 4: Path with .zed in the middle should require confirmation
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
@@ -1251,7 +1251,7 @@ mod tests {
})
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
// 5.3: Normal in-project path with allow — no confirmation needed
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
@@ -1268,7 +1268,7 @@ mod tests {
})
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
// 5.4: With Confirm default, non-project paths still prompt
cx.update(|cx| {
@@ -1586,8 +1586,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- stream_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ stream_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -1658,7 +1658,7 @@ mod tests {
} else {
auth.await.unwrap();
assert!(
- stream_rx.try_next().is_err(),
+ stream_rx.try_recv().is_err(),
"Failed for case: {} - path: {} - expected no confirmation but got one",
description,
path
@@ -1769,7 +1769,7 @@ mod tests {
} else {
auth.await.unwrap();
assert!(
- stream_rx.try_next().is_err(),
+ stream_rx.try_recv().is_err(),
"Failed for case: {} - path: {} - expected no confirmation but got one",
description,
path
@@ -1862,7 +1862,7 @@ mod tests {
stream_rx.expect_authorization().await;
} else {
assert!(
- stream_rx.try_next().is_err(),
+ stream_rx.try_recv().is_err(),
"Failed for case: {} - path: {} - expected no confirmation but got one",
description,
path
@@ -1963,7 +1963,7 @@ mod tests {
})
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
}
}
@@ -1007,13 +1007,11 @@ mod tests {
"Expected private path validation error, got: {error}"
);
- let event = event_rx.try_next();
+ let event = event_rx.try_recv();
assert!(
!matches!(
event,
- Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(
- _
- ))))
+ Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_)))
),
"No authorization should be requested when validation fails before listing",
);
@@ -1055,13 +1053,11 @@ mod tests {
"Normal path should succeed without authorization"
);
- let event = event_rx.try_next();
+ let event = event_rx.try_recv();
assert!(
!matches!(
event,
- Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(
- _
- ))))
+ Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_)))
),
"No authorization should be requested for normal paths",
);
@@ -1112,13 +1108,11 @@ mod tests {
"Intra-project symlink should succeed without authorization: {result:?}",
);
- let event = event_rx.try_next();
+ let event = event_rx.try_recv();
assert!(
!matches!(
event,
- Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(
- _
- ))))
+ Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_)))
),
"No authorization should be requested for intra-project symlinks",
);
@@ -390,8 +390,8 @@ mod tests {
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Expected a single authorization prompt",
);
@@ -457,8 +457,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -1353,13 +1353,11 @@ mod test {
"Expected private-files validation error, got: {error}"
);
- let event = event_rx.try_next();
+ let event = event_rx.try_recv();
assert!(
!matches!(
event,
- Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(
- _
- ))))
+ Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_)))
),
"No authorization should be requested when validation fails before read",
);
@@ -589,8 +589,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -662,8 +662,8 @@ mod tests {
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Expected a single authorization prompt",
);
@@ -584,8 +584,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -657,8 +657,8 @@ mod tests {
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Expected a single authorization prompt",
);
@@ -2493,7 +2493,7 @@ mod tests {
})
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
// Test 4: Path with .zed in the middle should require confirmation
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
@@ -2540,7 +2540,7 @@ mod tests {
cx.update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), "test 5.2", &stream_tx, cx))
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
// 5.3: Normal in-project path with allow — no confirmation needed
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
@@ -2554,7 +2554,7 @@ mod tests {
})
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
// 5.4: With Confirm default, non-project paths still prompt
cx.update(|cx| {
@@ -2767,8 +2767,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- stream_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ stream_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -2810,7 +2810,7 @@ mod tests {
} else {
auth.await.unwrap();
assert!(
- stream_rx.try_next().is_err(),
+ stream_rx.try_recv().is_err(),
"Failed for case: {} - path: {} - expected no confirmation but got one",
description,
path
@@ -2887,7 +2887,7 @@ mod tests {
} else {
auth.await.unwrap();
assert!(
- stream_rx.try_next().is_err(),
+ stream_rx.try_recv().is_err(),
"Failed for case: {} - path: {} - expected no confirmation but got one",
description,
path
@@ -2947,7 +2947,7 @@ mod tests {
stream_rx.expect_authorization().await;
} else {
assert!(
- stream_rx.try_next().is_err(),
+ stream_rx.try_recv().is_err(),
"Failed for case: {} - path: {} - expected no confirmation but got one",
description,
path
@@ -3015,7 +3015,7 @@ mod tests {
})
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
}
}
@@ -681,17 +681,17 @@ mod tests {
);
assert!(
!matches!(
- rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"invalid command should not request authorization"
);
assert!(
!matches!(
- rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallUpdate(
+ rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallUpdate(
acp_thread::ToolCallUpdate::UpdateFields(_)
- ))))
+ )))
),
"invalid command should not emit a terminal card update"
);
@@ -810,8 +810,8 @@ mod tests {
);
assert!(
!matches!(
- rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"hardcoded denial should not request authorization"
);
@@ -1058,8 +1058,8 @@ mod tests {
);
assert!(
!matches!(
- rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"rejected command {command:?} should not request authorization"
);
@@ -202,6 +202,7 @@ impl ModelInput {
.text(cx)
.parse::<u64>()
.map_err(|_| SharedString::from("Max Tokens must be a number"))?,
+ reasoning_effort: None,
capabilities: ModelCapabilities {
tools: self.capabilities.supports_tools.selected(),
images: self.capabilities.supports_images.selected(),
@@ -90,6 +90,7 @@ impl ProfileSelector {
if let Some((next_profile_id, _)) = profiles.get_index(next_index) {
self.provider.set_profile(next_profile_id.clone(), cx);
+ cx.notify();
}
}
@@ -91,14 +91,16 @@ impl TimeBucket {
}
fn fuzzy_match_positions(query: &str, text: &str) -> Option<Vec<usize>> {
- let query = query.to_lowercase();
- let text_lower = text.to_lowercase();
let mut positions = Vec::new();
let mut query_chars = query.chars().peekable();
- for (i, c) in text_lower.chars().enumerate() {
- if query_chars.peek() == Some(&c) {
- positions.push(i);
- query_chars.next();
+ for (byte_idx, candidate_char) in text.char_indices() {
+ if let Some(&query_char) = query_chars.peek() {
+ if candidate_char.eq_ignore_ascii_case(&query_char) {
+ positions.push(byte_idx);
+ query_chars.next();
+ }
+ } else {
+ break;
}
}
if query_chars.peek().is_none() {
@@ -1283,3 +1285,59 @@ impl PickerDelegate for ProjectPickerDelegate {
)
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_fuzzy_match_positions_returns_byte_indices() {
+ // "🔥abc" — the fire emoji is 4 bytes, so 'a' starts at byte 4, 'b' at 5, 'c' at 6.
+ let text = "🔥abc";
+ let positions = fuzzy_match_positions("ab", text).expect("should match");
+ assert_eq!(positions, vec![4, 5]);
+
+ // Verify positions are valid char boundaries (this is the assertion that
+ // panicked before the fix).
+ for &pos in &positions {
+ assert!(
+ text.is_char_boundary(pos),
+ "position {pos} is not a valid UTF-8 boundary in {text:?}"
+ );
+ }
+ }
+
+ #[test]
+ fn test_fuzzy_match_positions_ascii_still_works() {
+ let positions = fuzzy_match_positions("he", "hello").expect("should match");
+ assert_eq!(positions, vec![0, 1]);
+ }
+
+ #[test]
+ fn test_fuzzy_match_positions_case_insensitive() {
+ let positions = fuzzy_match_positions("HE", "hello").expect("should match");
+ assert_eq!(positions, vec![0, 1]);
+ }
+
+ #[test]
+ fn test_fuzzy_match_positions_no_match() {
+ assert!(fuzzy_match_positions("xyz", "hello").is_none());
+ }
+
+ #[test]
+ fn test_fuzzy_match_positions_multi_byte_interior() {
+ // "café" — 'é' is 2 bytes (0xC3 0xA9), so 'f' starts at byte 4, 'é' at byte 5.
+ let text = "café";
+ let positions = fuzzy_match_positions("fé", text).expect("should match");
+ // 'c'=0, 'a'=1, 'f'=2, 'é'=3..4 — wait, let's verify:
+ // Actually: c=1 byte, a=1 byte, f=1 byte, é=2 bytes
+ // So byte positions: c=0, a=1, f=2, é=3
+ assert_eq!(positions, vec![2, 3]);
+ for &pos in &positions {
+ assert!(
+ text.is_char_boundary(pos),
+ "position {pos} is not a valid UTF-8 boundary in {text:?}"
+ );
+ }
+ }
+}
@@ -2733,10 +2733,7 @@ mod tests {
assert!(refreshed);
assert_eq!(provider.access_token().as_deref(), Some("new-access"));
- let notified_session = rx
- .try_next()
- .unwrap()
- .expect("channel should have a session");
+ let notified_session = rx.try_recv().expect("channel should have a session");
assert_eq!(notified_session.tokens.access_token, "new-access");
assert_eq!(
notified_session.tokens.refresh_token.as_deref(),
@@ -2768,10 +2765,7 @@ mod tests {
let refreshed = provider.try_refresh().await.unwrap();
assert!(refreshed);
- let notified_session = rx
- .try_next()
- .unwrap()
- .expect("channel should have a session");
+ let notified_session = rx.try_recv().expect("channel should have a session");
assert_eq!(notified_session.tokens.access_token, "new-access");
assert_eq!(
notified_session.tokens.refresh_token.as_deref(),
@@ -1045,7 +1045,7 @@ mod tests {
});
executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
- assert!(copilot_requests.try_next().is_err());
+ assert!(copilot_requests.try_recv().is_err());
_ = editor.update(cx, |editor, window, cx| {
editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
@@ -1055,7 +1055,7 @@ mod tests {
});
executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
- assert!(copilot_requests.try_next().is_ok());
+ assert!(copilot_requests.try_recv().is_ok());
}
fn handle_copilot_completion_request(
@@ -2043,6 +2043,7 @@ impl BlockMapWriter<'_> {
multi_buffer: &MultiBuffer,
cx: &App,
) {
+ let multi_buffer_snapshot = multi_buffer.snapshot(cx);
let mut ranges = Vec::new();
let mut companion_buffer_ids = HashSet::default();
for buffer_id in buffer_ids {
@@ -2051,7 +2052,7 @@ impl BlockMapWriter<'_> {
} else {
self.block_map.folded_buffers.remove(&buffer_id);
}
- ranges.extend(multi_buffer.range_for_buffer(buffer_id, cx));
+ ranges.extend(multi_buffer_snapshot.range_for_buffer(buffer_id));
if let Some(companion) = &self.companion
&& companion.inverse.is_some()
{
@@ -11741,10 +11741,9 @@ impl Editor {
buffer_ids.extend(snapshot.buffer_ids_for_range(selection.range()))
}
- let buffer = self.buffer().read(cx);
let ranges = buffer_ids
.into_iter()
- .flat_map(|buffer_id| buffer.range_for_buffer(buffer_id, cx))
+ .flat_map(|buffer_id| snapshot.range_for_buffer(buffer_id))
.collect::<Vec<_>>();
self.restore_hunks_in_ranges(ranges, window, cx);
@@ -1166,7 +1166,7 @@ mod tests {
});
cx.simulate_mouse_move(hover_point, None, Modifiers::secondary_key());
cx.background_executor.run_until_parked();
- assert!(requests.try_next().is_err());
+ assert!(requests.try_recv().is_err());
cx.assert_editor_text_highlights(
HighlightKey::HoveredLinkState,
indoc! {"
@@ -1701,12 +1701,7 @@ impl rwh::HasWindowHandle for MacWindow {
impl rwh::HasDisplayHandle for MacWindow {
fn display_handle(&self) -> Result<rwh::DisplayHandle<'_>, rwh::HandleError> {
- // SAFETY: This is a no-op on macOS
- unsafe {
- Ok(rwh::DisplayHandle::borrow_raw(
- rwh::AppKitDisplayHandle::new().into(),
- ))
- }
+ Ok(rwh::DisplayHandle::appkit())
}
}
@@ -540,10 +540,9 @@ impl rwh::HasWindowHandle for WindowsWindow {
}
}
-// todo(windows)
impl rwh::HasDisplayHandle for WindowsWindow {
fn display_handle(&self) -> std::result::Result<rwh::DisplayHandle<'_>, rwh::HandleError> {
- unimplemented!()
+ Ok(rwh::DisplayHandle::windows())
}
}
@@ -402,7 +402,7 @@ impl LanguageModel for OpenAiCompatibleLanguageModel {
self.model.capabilities.parallel_tool_calls,
self.model.capabilities.prompt_cache_key,
self.max_output_tokens(),
- None,
+ self.model.reasoning_effort.clone(),
);
let completions = self.stream_completion(request, cx);
async move {
@@ -417,7 +417,7 @@ impl LanguageModel for OpenAiCompatibleLanguageModel {
self.model.capabilities.parallel_tool_calls,
self.model.capabilities.prompt_cache_key,
self.max_output_tokens(),
- None,
+ self.model.reasoning_effort.clone(),
);
let completions = self.stream_response(request, cx);
async move {
@@ -154,6 +154,8 @@ impl MarkdownStyle {
base_text_style: text_style.clone(),
syntax: cx.theme().syntax().clone(),
selection_background_color: colors.element_selection_background,
+ rule_color: colors.border,
+ block_quote_border_color: colors.border,
code_block_overflow_x_scroll: true,
heading_level_styles: Some(HeadingLevelStyles {
h1: Some(TextStyleRefinement {
@@ -1834,14 +1834,6 @@ impl MultiBuffer {
cx.notify();
}
- pub fn range_for_buffer(&self, buffer_id: BufferId, cx: &App) -> Option<Range<Point>> {
- let snapshot = self.read(cx);
- let path_key = snapshot.path_key_index_for_buffer(buffer_id)?;
- let start = Anchor::in_buffer(path_key, text::Anchor::min_for_buffer(buffer_id));
- let end = Anchor::in_buffer(path_key, text::Anchor::max_for_buffer(buffer_id));
- Some((start..end).to_point(&snapshot))
- }
-
// If point is at the end of the buffer, the last excerpt is returned
pub fn point_to_buffer_offset<T: ToOffset>(
&self,
@@ -4792,10 +4784,10 @@ impl MultiBufferSnapshot {
let mut diff_transforms_cursor = self
.diff_transforms
.cursor::<Dimensions<ExcerptDimension<MBD>, OutputDimension<MBD>>>(());
- diff_transforms_cursor.next();
if let Some(excerpt) = item {
if !excerpt.contains(anchor, self) {
+ diff_transforms_cursor.seek(&excerpt_start_position, Bias::Left);
return self.summary_for_excerpt_position_without_hunks(
Bias::Left,
excerpt_start_position,
@@ -4822,9 +4814,7 @@ impl MultiBufferSnapshot {
position += summary - excerpt_buffer_start;
}
- if diff_transforms_cursor.start().0 < position {
- diff_transforms_cursor.seek_forward(&position, Bias::Left);
- }
+ diff_transforms_cursor.seek(&position, Bias::Left);
self.summary_for_anchor_with_excerpt_position(
*anchor,
position,
@@ -4832,7 +4822,7 @@ impl MultiBufferSnapshot {
&buffer_snapshot,
)
} else {
- diff_transforms_cursor.seek_forward(&excerpt_start_position, Bias::Left);
+ diff_transforms_cursor.seek(&excerpt_start_position, Bias::Left);
self.summary_for_excerpt_position_without_hunks(
Bias::Right,
excerpt_start_position,
@@ -5040,6 +5030,7 @@ impl MultiBufferSnapshot {
if let Some(excerpt) = cursor.item() {
let buffer_snapshot = excerpt.buffer_snapshot(self);
if !excerpt.contains(&excerpt_anchor, self) {
+ diff_transforms_cursor.seek_forward(&excerpt_start_position, Bias::Left);
let position = self.summary_for_excerpt_position_without_hunks(
Bias::Left,
excerpt_start_position,
@@ -6740,6 +6731,13 @@ impl MultiBufferSnapshot {
.graphemes(true)
.count()
}
+
+ pub fn range_for_buffer(&self, buffer_id: BufferId) -> Option<Range<Point>> {
+ let path_key = self.path_key_index_for_buffer(buffer_id)?;
+ let start = Anchor::in_buffer(path_key, text::Anchor::min_for_buffer(buffer_id));
+ let end = Anchor::in_buffer(path_key, text::Anchor::max_for_buffer(buffer_id));
+ Some((start..end).to_point(self))
+ }
}
#[cfg(any(test, feature = "test-support"))]
@@ -2898,10 +2898,11 @@ struct ReferenceExcerpt {
struct ReferenceRegion {
buffer_id: Option<BufferId>,
range: Range<usize>,
- buffer_range: Option<Range<Point>>,
+ buffer_range: Range<Point>,
+ // if this is a deleted hunk, the main buffer anchor to which the deleted content is attached
+ deleted_hunk_anchor: Option<text::Anchor>,
status: Option<DiffHunkStatus>,
- excerpt_range: Option<Range<text::Anchor>>,
- excerpt_path_key_index: Option<PathKeyIndex>,
+ excerpt: Option<ReferenceExcerpt>,
}
impl ReferenceMultibuffer {
@@ -3055,7 +3056,15 @@ impl ReferenceMultibuffer {
}
}
- fn expected_content(&self, cx: &App) -> (String, Vec<RowInfo>, HashSet<MultiBufferRow>) {
+ fn expected_content(
+ &self,
+ cx: &App,
+ ) -> (
+ String,
+ Vec<RowInfo>,
+ HashSet<MultiBufferRow>,
+ Vec<ReferenceRegion>,
+ ) {
use util::maybe;
let mut text = String::new();
@@ -3093,12 +3102,10 @@ impl ReferenceMultibuffer {
regions.push(ReferenceRegion {
buffer_id: Some(buffer_id),
range: len..text.len(),
- buffer_range: Some(
- (offset..hunk_base_range.start).to_point(&buffer),
- ),
+ buffer_range: (offset..hunk_base_range.start).to_point(&buffer),
status: None,
- excerpt_range: Some(excerpt.range.clone()),
- excerpt_path_key_index: Some(excerpt.path_key_index),
+ excerpt: Some(excerpt.clone()),
+ deleted_hunk_anchor: None,
});
}
}
@@ -3110,10 +3117,10 @@ impl ReferenceMultibuffer {
regions.push(ReferenceRegion {
buffer_id: Some(buffer_id),
range: len..text.len(),
- buffer_range: Some(hunk_base_range.to_point(&buffer)),
+ buffer_range: hunk_base_range.to_point(&buffer),
status: Some(DiffHunkStatus::deleted(hunk.secondary_status)),
- excerpt_range: Some(excerpt.range.clone()),
- excerpt_path_key_index: Some(excerpt.path_key_index),
+ excerpt: Some(excerpt.clone()),
+ deleted_hunk_anchor: None,
});
}
@@ -3127,10 +3134,10 @@ impl ReferenceMultibuffer {
regions.push(ReferenceRegion {
buffer_id: Some(buffer_id),
range: len..text.len(),
- buffer_range: Some((offset..buffer_range.end).to_point(&buffer)),
+ buffer_range: (offset..buffer_range.end).to_point(&buffer),
status: None,
- excerpt_range: Some(excerpt.range.clone()),
- excerpt_path_key_index: Some(excerpt.path_key_index),
+ excerpt: Some(excerpt.clone()),
+ deleted_hunk_anchor: None,
});
} else {
let diff = self.diffs.get(&buffer_id).unwrap().read(cx).snapshot(cx);
@@ -3181,10 +3188,10 @@ impl ReferenceMultibuffer {
regions.push(ReferenceRegion {
buffer_id: Some(buffer_id),
range: len..text.len(),
- buffer_range: Some((offset..hunk_range.start).to_point(&buffer)),
+ buffer_range: (offset..hunk_range.start).to_point(&buffer),
status: None,
- excerpt_range: Some(excerpt.range.clone()),
- excerpt_path_key_index: Some(excerpt.path_key_index),
+ excerpt: Some(excerpt.clone()),
+ deleted_hunk_anchor: None,
});
}
@@ -3201,12 +3208,10 @@ impl ReferenceMultibuffer {
regions.push(ReferenceRegion {
buffer_id: Some(base_buffer.remote_id()),
range: len..text.len(),
- buffer_range: Some(
- hunk.diff_base_byte_range.to_point(&base_buffer),
- ),
+ buffer_range: hunk.diff_base_byte_range.to_point(&base_buffer),
status: Some(DiffHunkStatus::deleted(hunk.secondary_status)),
- excerpt_range: Some(excerpt.range.clone()),
- excerpt_path_key_index: Some(excerpt.path_key_index),
+ excerpt: Some(excerpt.clone()),
+ deleted_hunk_anchor: Some(hunk.buffer_range.start),
});
}
@@ -3221,10 +3226,10 @@ impl ReferenceMultibuffer {
let region = ReferenceRegion {
buffer_id: Some(buffer_id),
range,
- buffer_range: Some((offset..hunk_range.end).to_point(&buffer)),
+ buffer_range: (offset..hunk_range.end).to_point(&buffer),
status: Some(DiffHunkStatus::added(hunk.secondary_status)),
- excerpt_range: Some(excerpt.range.clone()),
- excerpt_path_key_index: Some(excerpt.path_key_index),
+ excerpt: Some(excerpt.clone()),
+ deleted_hunk_anchor: None,
};
offset = hunk_range.end;
regions.push(region);
@@ -3238,10 +3243,10 @@ impl ReferenceMultibuffer {
regions.push(ReferenceRegion {
buffer_id: Some(buffer_id),
range: len..text.len(),
- buffer_range: Some((offset..buffer_range.end).to_point(&buffer)),
+ buffer_range: (offset..buffer_range.end).to_point(&buffer),
status: None,
- excerpt_range: Some(excerpt.range.clone()),
- excerpt_path_key_index: Some(excerpt.path_key_index),
+ excerpt: Some(excerpt.clone()),
+ deleted_hunk_anchor: None,
});
}
}
@@ -3251,13 +3256,16 @@ impl ReferenceMultibuffer {
regions.push(ReferenceRegion {
buffer_id: None,
range: 0..1,
- buffer_range: Some(Point::new(0, 0)..Point::new(0, 1)),
+ buffer_range: Point::new(0, 0)..Point::new(0, 1),
status: None,
- excerpt_range: None,
- excerpt_path_key_index: None,
+ excerpt: None,
+ deleted_hunk_anchor: None,
});
} else {
text.pop();
+ let region = regions.last_mut().unwrap();
+ assert!(region.deleted_hunk_anchor.is_none());
+ region.range.end -= 1;
}
// Retrieve the row info using the region that contains
@@ -3268,37 +3276,38 @@ impl ReferenceMultibuffer {
.map(|line| {
let row_info = regions
.iter()
- .position(|region| region.range.contains(&ix))
+ .rposition(|region| {
+ region.range.contains(&ix) || (ix == text.len() && ix == region.range.end)
+ })
.map_or(RowInfo::default(), |region_ix| {
let region = regions[region_ix].clone();
- let buffer_row = region.buffer_range.as_ref().map(|buffer_range| {
- buffer_range.start.row
- + text[region.range.start..ix].matches('\n').count() as u32
- });
- let main_buffer = self
- .excerpts
- .iter()
- .find(|e| e.range == region.excerpt_range.clone().unwrap())
- .map(|e| e.buffer.clone());
+ let buffer_row = region.buffer_range.start.row
+ + text[region.range.start..ix].matches('\n').count() as u32;
+ let main_buffer = region.excerpt.as_ref().map(|e| e.buffer.clone());
+ let excerpt_range = region.excerpt.as_ref().map(|e| &e.range);
let is_excerpt_start = region_ix == 0
- || ®ions[region_ix - 1].excerpt_range != ®ion.excerpt_range
+ || regions[region_ix - 1].excerpt.as_ref().map(|e| &e.range)
+ != excerpt_range
|| regions[region_ix - 1].range.is_empty();
let mut is_excerpt_end = region_ix == regions.len() - 1
- || ®ions[region_ix + 1].excerpt_range != ®ion.excerpt_range;
+ || regions[region_ix + 1].excerpt.as_ref().map(|e| &e.range)
+ != excerpt_range;
let is_start = !text[region.range.start..ix].contains('\n');
+ let is_last_region = region_ix == regions.len() - 1;
let mut is_end = if region.range.end > text.len() {
!text[ix..].contains('\n')
} else {
- text[ix..region.range.end.min(text.len())]
+ let remaining_newlines = text[ix..region.range.end.min(text.len())]
.matches('\n')
- .count()
- == 1
+ .count();
+ remaining_newlines == if is_last_region { 0 } else { 1 }
};
if region_ix < regions.len() - 1
&& !text[ix..].contains("\n")
&& (region.status == Some(DiffHunkStatus::added_none())
|| region.status.is_some_and(|s| s.is_deleted()))
- && regions[region_ix + 1].excerpt_range == region.excerpt_range
+ && regions[region_ix + 1].excerpt.as_ref().map(|e| &e.range)
+ == excerpt_range
&& regions[region_ix + 1].range.start == text.len()
{
is_end = true;
@@ -3308,7 +3317,6 @@ impl ReferenceMultibuffer {
MultiBufferRow(text[..ix].matches('\n').count() as u32);
let mut expand_direction = None;
if let Some(buffer) = &main_buffer {
- let buffer_row = buffer_row.unwrap();
let needs_expand_up = is_excerpt_start && is_start && buffer_row > 0;
let needs_expand_down = is_excerpt_end
&& is_end
@@ -3326,19 +3334,18 @@ impl ReferenceMultibuffer {
RowInfo {
buffer_id: region.buffer_id,
diff_status: region.status,
- buffer_row,
+ buffer_row: Some(buffer_row),
wrapped_buffer_row: None,
multibuffer_row: Some(multibuffer_row),
expand_info: maybe!({
let direction = expand_direction?;
- let excerpt_range = region.excerpt_range?;
- let path_key_index = region.excerpt_path_key_index?;
+ let excerpt = region.excerpt.as_ref()?;
Some(ExpandInfo {
direction,
start_anchor: Anchor::in_buffer(
- path_key_index,
- excerpt_range.start,
+ excerpt.path_key_index,
+ excerpt.range.start,
),
})
}),
@@ -3349,7 +3356,7 @@ impl ReferenceMultibuffer {
})
.collect();
- (text, row_infos, excerpt_boundary_rows)
+ (text, row_infos, excerpt_boundary_rows, regions)
}
fn diffs_updated(&mut self, cx: &mut App) {
@@ -3414,6 +3421,95 @@ impl ReferenceMultibuffer {
})
});
}
+
+ fn anchor_to_offset(&self, anchor: &Anchor, cx: &App) -> Option<MultiBufferOffset> {
+ if anchor.diff_base_anchor().is_some() {
+ panic!("reference multibuffer cannot yet resolve anchors inside deleted hunks");
+ }
+ let (anchor, snapshot, path_key) = self.anchor_to_buffer_anchor(anchor, cx)?;
+ // TODO(cole) can maybe make this and expected content call a common function instead
+ let (text, _, _, regions) = self.expected_content(cx);
+
+ // Locate the first region that contains or is past the putative location of the buffer anchor
+ let ix = regions.partition_point(|region| {
+ let excerpt = region
+ .excerpt
+ .as_ref()
+ .expect("should have no buffers in empty reference multibuffer");
+ excerpt
+ .path_key
+ .cmp(&path_key)
+ .then_with(|| {
+ if excerpt.range.end.cmp(&anchor, &snapshot).is_lt() {
+ Ordering::Less
+ } else if excerpt.range.start.cmp(&anchor, &snapshot).is_gt() {
+ Ordering::Greater
+ } else {
+ Ordering::Equal
+ }
+ })
+ .then_with(|| {
+ if let Some(deleted_hunk_anchor) = region.deleted_hunk_anchor {
+ deleted_hunk_anchor.cmp(&anchor, &snapshot)
+ } else {
+ let point = anchor.to_point(&snapshot);
+ assert_eq!(region.buffer_id, Some(snapshot.remote_id()));
+ if region.buffer_range.end < point {
+ Ordering::Less
+ } else if region.buffer_range.start > point {
+ Ordering::Greater
+ } else {
+ Ordering::Equal
+ }
+ }
+ })
+ .is_lt()
+ });
+
+ let Some(region) = regions.get(ix) else {
+ return Some(MultiBufferOffset(text.len()));
+ };
+
+ let offset = if region.buffer_id == Some(snapshot.remote_id()) {
+ let buffer_offset = anchor.to_offset(&snapshot);
+ let buffer_range = region.buffer_range.to_offset(&snapshot);
+ assert!(buffer_offset <= buffer_range.end);
+ let overshoot = buffer_offset.saturating_sub(buffer_range.start);
+ region.range.start + overshoot
+ } else {
+ region.range.start
+ };
+ Some(MultiBufferOffset(offset))
+ }
+
+ fn anchor_to_buffer_anchor(
+ &self,
+ anchor: &Anchor,
+ cx: &App,
+ ) -> Option<(text::Anchor, BufferSnapshot, PathKey)> {
+ let (excerpt, anchor) = match anchor {
+ Anchor::Min => {
+ let excerpt = self.excerpts.first()?;
+ (excerpt, excerpt.range.start)
+ }
+ Anchor::Excerpt(excerpt_anchor) => (
+ self.excerpts.iter().find(|excerpt| {
+ excerpt.buffer.read(cx).remote_id() == excerpt_anchor.buffer_id()
+ })?,
+ excerpt_anchor.text_anchor,
+ ),
+ Anchor::Max => {
+ let excerpt = self.excerpts.last()?;
+ (excerpt, excerpt.range.end)
+ }
+ };
+
+ Some((
+ anchor,
+ excerpt.buffer.read(cx).snapshot(),
+ excerpt.path_key.clone(),
+ ))
+ }
}
#[gpui::test(iterations = 100)]
@@ -3791,12 +3887,13 @@ fn mutate_excerpt_ranges(
_ => {
let end_row = rng.random_range(0..=buffer.max_point().row);
let start_row = rng.random_range(0..=end_row);
+ let end_col = buffer.line_len(end_row);
log::info!(
"Inserting excerpt for buffer {:?}, row range {:?}",
buffer.remote_id(),
start_row..end_row
);
- ranges_to_add.push(Point::new(start_row, 0)..Point::new(end_row, 0));
+ ranges_to_add.push(Point::new(start_row, 0)..Point::new(end_row, end_col));
}
}
}
@@ -3820,8 +3917,36 @@ fn check_multibuffer(
.collect::<HashSet<_>>();
let actual_row_infos = snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>();
- let (expected_text, expected_row_infos, expected_boundary_rows) =
+ let anchors_to_check = anchors
+ .iter()
+ .filter_map(|anchor| {
+ snapshot
+ .anchor_to_buffer_anchor(*anchor)
+ .map(|(anchor, _)| anchor)
+ })
+ // Intentionally mix in some anchors that are (in general) not contained in any excerpt
+ .chain(
+ reference
+ .excerpts
+ .iter()
+ .map(|excerpt| excerpt.buffer.read(cx).remote_id())
+ .dedup()
+ .flat_map(|buffer_id| {
+ [
+ text::Anchor::min_for_buffer(buffer_id),
+ text::Anchor::max_for_buffer(buffer_id),
+ ]
+ }),
+ )
+ .map(|anchor| snapshot.anchor_in_buffer(anchor).unwrap())
+ .collect::<Vec<_>>();
+
+ let (expected_text, expected_row_infos, expected_boundary_rows, _) =
reference.expected_content(cx);
+ let expected_anchor_offsets = anchors_to_check
+ .iter()
+ .map(|anchor| reference.anchor_to_offset(anchor, cx).unwrap())
+ .collect::<Vec<_>>();
let has_diff = actual_row_infos
.iter()
@@ -3949,6 +4074,15 @@ fn check_multibuffer(
);
}
+ let actual_anchor_offsets = anchors_to_check
+ .into_iter()
+ .map(|anchor| anchor.to_offset(&snapshot))
+ .collect::<Vec<_>>();
+ assert_eq!(
+ actual_anchor_offsets, expected_anchor_offsets,
+ "buffer anchor resolves to wrong offset"
+ );
+
for _ in 0..10 {
let end_ix = text_rope.clip_offset(rng.random_range(0..=text_rope.len()), Bias::Right);
assert_eq!(
@@ -5911,3 +6045,104 @@ fn test_cannot_seek_backward_after_excerpt_replacement(cx: &mut TestAppContext)
snapshot.summaries_for_anchors::<Point, _>(&[anchor_in_e_b2, anchor_in_e_b3]);
});
}
+
+#[gpui::test]
+fn test_resolving_max_anchor_for_buffer(cx: &mut TestAppContext) {
+ let dock_base_text = indoc! {"
+ 0
+ 1
+ 2
+ 3
+ 4
+ 5
+ 6
+ 7
+ 8
+ 9
+ 10
+ 11
+ 12
+ "};
+
+ let dock_text = indoc! {"
+ 0
+ 4
+ 5
+ 6
+ 10
+ 11
+ 12
+ "};
+
+ let dock_buffer = cx.new(|cx| Buffer::local(dock_text, cx));
+ let diff = cx.new(|cx| {
+ BufferDiff::new_with_base_text(dock_base_text, &dock_buffer.read(cx).snapshot(), cx)
+ });
+
+ let workspace_text = "second buffer\n";
+ let workspace_buffer = cx.new(|cx| Buffer::local(workspace_text, cx));
+
+ let dock_path = PathKey::with_sort_prefix(0, rel_path("").into_arc());
+ let workspace_path = PathKey::with_sort_prefix(1, rel_path("").into_arc());
+
+ let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
+
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.set_excerpt_ranges_for_path(
+ dock_path,
+ dock_buffer.clone(),
+ &dock_buffer.read(cx).snapshot(),
+ vec![
+ ExcerptRange::new(Point::zero()..Point::new(1, 1)),
+ ExcerptRange::new(Point::new(3, 0)..Point::new(4, 2)),
+ ],
+ cx,
+ );
+ multibuffer.set_excerpt_ranges_for_path(
+ workspace_path,
+ workspace_buffer.clone(),
+ &workspace_buffer.read(cx).snapshot(),
+ vec![ExcerptRange::new(
+ Point::zero()..workspace_buffer.read(cx).max_point(),
+ )],
+ cx,
+ );
+ multibuffer.add_diff(diff, cx);
+ multibuffer.set_all_diff_hunks_expanded(cx);
+ });
+
+ let snapshot = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx));
+ let diff = format_diff(
+ &snapshot.text(),
+ &snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>(),
+ &Default::default(),
+ None,
+ );
+ assert_eq!(
+ diff,
+ indoc! {"
+ 0
+ - 1
+ - 2
+ - 3
+ 4 [↓]
+ 6 [↑]
+ - 7
+ - 8
+ - 9
+ 10 [↓]
+ second buffer
+ "}
+ );
+
+ multibuffer.update(cx, |multibuffer, cx| {
+ let snapshot = multibuffer.snapshot(cx);
+ let point = snapshot
+ .anchor_in_buffer(text::Anchor::max_for_buffer(
+ dock_buffer.read(cx).remote_id(),
+ ))
+ .unwrap()
+ .to_point(&snapshot);
+ assert_eq!(point, Point::new(10, 0));
+ })
+}
@@ -6581,7 +6581,7 @@ impl Repository {
let state = RepositoryState::Local(state);
let mut jobs = VecDeque::new();
loop {
- while let Ok(Some(next_job)) = job_rx.try_next() {
+ while let Ok(next_job) = job_rx.try_recv() {
jobs.push_back(next_job);
}
@@ -6617,7 +6617,7 @@ impl Repository {
let state = RepositoryState::Remote(state);
let mut jobs = VecDeque::new();
loop {
- while let Ok(Some(next_job)) = job_rx.try_next() {
+ while let Ok(next_job) = job_rx.try_recv() {
jobs.push_back(next_job);
}
@@ -4763,6 +4763,19 @@ impl Project {
});
}
+ pub fn remove_worktree_for_main_worktree_path(
+ &mut self,
+ path: impl AsRef<Path>,
+ cx: &mut Context<Self>,
+ ) {
+ let path = path.as_ref();
+ self.worktree_store.update(cx, |worktree_store, cx| {
+ if let Some(worktree) = worktree_store.worktree_for_main_worktree_path(path, cx) {
+ worktree_store.remove_worktree(worktree.read(cx).id(), cx);
+ }
+ });
+ }
+
fn add_worktree(&mut self, worktree: &Entity<Worktree>, cx: &mut Context<Self>) {
self.worktree_store.update(cx, |worktree_store, cx| {
worktree_store.add(worktree, cx);
@@ -850,6 +850,21 @@ impl WorktreeStore {
self.send_project_updates(cx);
}
+ pub fn worktree_for_main_worktree_path(
+ &self,
+ path: &Path,
+ cx: &App,
+ ) -> Option<Entity<Worktree>> {
+ self.visible_worktrees(cx).find(|worktree| {
+ let worktree = worktree.read(cx);
+ if let Some(common_dir) = worktree.root_repo_common_dir() {
+ common_dir.parent() == Some(path)
+ } else {
+ worktree.abs_path().as_ref() == path
+ }
+ })
+ }
+
pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool) {
self.worktrees_reordered = worktrees_reordered;
}
@@ -4448,7 +4448,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
// Assert no new language server started
cx.executor().run_until_parked();
- assert!(fake_servers.try_next().is_err());
+ assert!(fake_servers.try_recv().is_err());
assert_eq!(definitions.len(), 1);
let definition = definitions.pop().unwrap();
@@ -3406,17 +3406,15 @@ mod tests {
assert_eq!(initial_location, ToolbarItemLocation::Secondary);
- let mut events = cx.events(&search_bar);
+ let mut events = cx.events::<ToolbarItemEvent, BufferSearchBar>(&search_bar);
search_bar.update_in(cx, |search_bar, window, cx| {
search_bar.dismiss(&Dismiss, window, cx);
});
assert_eq!(
- events.try_next().unwrap(),
- Some(ToolbarItemEvent::ChangeLocation(
- ToolbarItemLocation::Hidden
- ))
+ events.try_recv().unwrap(),
+ (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Hidden))
);
search_bar.update_in(cx, |search_bar, window, cx| {
@@ -3424,10 +3422,8 @@ mod tests {
});
assert_eq!(
- events.try_next().unwrap(),
- Some(ToolbarItemEvent::ChangeLocation(
- ToolbarItemLocation::Secondary
- ))
+ events.try_recv().unwrap(),
+ (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Secondary))
);
}
@@ -3442,17 +3438,15 @@ mod tests {
assert_eq!(initial_location, ToolbarItemLocation::PrimaryLeft);
- let mut events = cx.events(&search_bar);
+ let mut events = cx.events::<ToolbarItemEvent, BufferSearchBar>(&search_bar);
search_bar.update_in(cx, |search_bar, window, cx| {
search_bar.dismiss(&Dismiss, window, cx);
});
assert_eq!(
- events.try_next().unwrap(),
- Some(ToolbarItemEvent::ChangeLocation(
- ToolbarItemLocation::PrimaryLeft
- ))
+ events.try_recv().unwrap(),
+ (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::PrimaryLeft))
);
search_bar.update_in(cx, |search_bar, window, cx| {
@@ -3460,10 +3454,8 @@ mod tests {
});
assert_eq!(
- events.try_next().unwrap(),
- Some(ToolbarItemEvent::ChangeLocation(
- ToolbarItemLocation::PrimaryLeft
- ))
+ events.try_recv().unwrap(),
+ (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::PrimaryLeft))
);
}
@@ -3482,17 +3474,15 @@ mod tests {
assert_eq!(initial_location, ToolbarItemLocation::Hidden);
- let mut events = cx.events(&search_bar);
+ let mut events = cx.events::<ToolbarItemEvent, BufferSearchBar>(&search_bar);
search_bar.update_in(cx, |search_bar, window, cx| {
search_bar.dismiss(&Dismiss, window, cx);
});
assert_eq!(
- events.try_next().unwrap(),
- Some(ToolbarItemEvent::ChangeLocation(
- ToolbarItemLocation::Hidden
- ))
+ events.try_recv().unwrap(),
+ (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Hidden))
);
search_bar.update_in(cx, |search_bar, window, cx| {
@@ -3500,10 +3490,8 @@ mod tests {
});
assert_eq!(
- events.try_next().unwrap(),
- Some(ToolbarItemEvent::ChangeLocation(
- ToolbarItemLocation::Secondary
- ))
+ events.try_recv().unwrap(),
+ (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Secondary))
);
}
@@ -278,6 +278,7 @@ pub struct OpenAiCompatibleAvailableModel {
pub max_tokens: u64,
pub max_output_tokens: Option<u64>,
pub max_completion_tokens: Option<u64>,
+ pub reasoning_effort: Option<OpenAiReasoningEffort>,
#[serde(default)]
pub capabilities: OpenAiCompatibleModelCapabilities,
}
@@ -689,12 +689,9 @@ impl Sidebar {
return;
};
- let paths: Vec<std::path::PathBuf> =
- path_list.paths().iter().map(|p| p.to_path_buf()).collect();
-
multi_workspace
- .update(cx, |mw, cx| {
- mw.open_project(paths, workspace::OpenMode::Activate, window, cx)
+ .update(cx, |this, cx| {
+ this.find_or_create_local_workspace(path_list.clone(), window, cx)
})
.detach_and_log_err(cx);
}
@@ -1439,10 +1436,7 @@ impl Sidebar {
})
}),
)
- .child({
- let workspace_for_new_thread = workspace.clone();
- let path_list_for_new_thread = path_list.clone();
-
+ .child(
h_flex()
.when(self.project_header_menu_ix != Some(ix), |this| {
this.visible_on_hover(group_name)
@@ -1450,13 +1444,7 @@ impl Sidebar {
.on_mouse_down(gpui::MouseButton::Left, |_, _, cx| {
cx.stop_propagation();
})
- .when_some(workspace, |this, workspace| {
- this.child(
- self.render_project_header_menu(
- ix, id_prefix, &workspace, &workspace, cx,
- ),
- )
- })
+ .child(self.render_project_header_menu(ix, id_prefix, key, cx))
.when(view_more_expanded && !is_collapsed, |this| {
this.child(
IconButton::new(
@@ -1478,12 +1466,10 @@ impl Sidebar {
})),
)
})
- .when(
- show_new_thread_button && workspace_for_new_thread.is_some(),
- |this| {
- let workspace_for_new_thread =
- workspace_for_new_thread.clone().unwrap();
- let path_list_for_new_thread = path_list_for_new_thread.clone();
+ .when_some(
+ workspace.filter(|_| show_new_thread_button),
+ |this, workspace| {
+ let path_list = path_list.clone();
this.child(
IconButton::new(
SharedString::from(format!(
@@ -1495,26 +1481,22 @@ impl Sidebar {
.tooltip(Tooltip::text("New Thread"))
.on_click(cx.listener(
move |this, _, window, cx| {
- this.collapsed_groups.remove(&path_list_for_new_thread);
+ this.collapsed_groups.remove(&path_list);
this.selection = None;
- this.create_new_thread(
- &workspace_for_new_thread,
- window,
- cx,
- );
+ this.create_new_thread(&workspace, window, cx);
},
)),
)
},
- )
- })
+ ),
+ )
.when(!is_active, |this| {
- let path_list_for_open = path_list.clone();
+ let path_list = path_list.clone();
this.cursor_pointer()
.hover(|s| s.bg(hover_color))
.tooltip(Tooltip::text("Open Workspace"))
.on_click(cx.listener(move |this, _, window, cx| {
- if let Some(workspace) = this.workspace_for_group(&path_list_for_open, cx) {
+ if let Some(workspace) = this.workspace_for_group(&path_list, cx) {
this.active_entry = Some(ActiveEntry::Draft(workspace.clone()));
if let Some(multi_workspace) = this.multi_workspace.upgrade() {
multi_workspace.update(cx, |multi_workspace, cx| {
@@ -1527,7 +1509,7 @@ impl Sidebar {
});
}
} else {
- this.open_workspace_for_group(&path_list_for_open, window, cx);
+ this.open_workspace_for_group(&path_list, window, cx);
}
}))
})
@@ -1538,14 +1520,12 @@ impl Sidebar {
&self,
ix: usize,
id_prefix: &str,
- workspace: &Entity<Workspace>,
- workspace_for_remove: &Entity<Workspace>,
+ project_group_key: &ProjectGroupKey,
cx: &mut Context<Self>,
) -> impl IntoElement {
- let workspace_for_menu = workspace.clone();
- let workspace_for_remove = workspace_for_remove.clone();
let multi_workspace = self.multi_workspace.clone();
let this = cx.weak_entity();
+ let project_group_key = project_group_key.clone();
PopoverMenu::new(format!("{id_prefix}project-header-menu-{ix}"))
.on_open(Rc::new({
@@ -1559,116 +1539,102 @@ impl Sidebar {
}
}))
.menu(move |window, cx| {
- let workspace = workspace_for_menu.clone();
- let workspace_for_remove = workspace_for_remove.clone();
let multi_workspace = multi_workspace.clone();
+ let project_group_key = project_group_key.clone();
let menu = ContextMenu::build_persistent(window, cx, move |menu, _window, cx| {
- let worktrees: Vec<_> = workspace
- .read(cx)
- .visible_worktrees(cx)
- .map(|worktree| {
- let worktree_read = worktree.read(cx);
- let id = worktree_read.id();
- let name: SharedString =
- worktree_read.root_name().as_unix_str().to_string().into();
- (id, name)
- })
- .collect();
-
- let worktree_count = worktrees.len();
-
let mut menu = menu
.header("Project Folders")
.end_slot_action(Box::new(menu::EndSlot));
- for (worktree_id, name) in &worktrees {
- let worktree_id = *worktree_id;
- let workspace_for_worktree = workspace.clone();
- let workspace_for_remove_worktree = workspace_for_remove.clone();
- let multi_workspace_for_worktree = multi_workspace.clone();
-
- let remove_handler = move |window: &mut Window, cx: &mut App| {
- if worktree_count <= 1 {
- if let Some(mw) = multi_workspace_for_worktree.upgrade() {
- let ws = workspace_for_remove_worktree.clone();
- mw.update(cx, |multi_workspace, cx| {
- multi_workspace.remove(&ws, window, cx);
- });
- }
- } else {
- workspace_for_worktree.update(cx, |workspace, cx| {
- workspace.project().update(cx, |project, cx| {
- project.remove_worktree(worktree_id, cx);
- });
- });
- }
+ for path in project_group_key.path_list().paths() {
+ let Some(name) = path.file_name() else {
+ continue;
};
-
+ let name: SharedString = name.to_string_lossy().into_owned().into();
+ let path = path.clone();
+ let project_group_key = project_group_key.clone();
+ let multi_workspace = multi_workspace.clone();
menu = menu.entry_with_end_slot_on_hover(
name.clone(),
None,
|_, _| {},
IconName::Close,
"Remove Folder".into(),
- remove_handler,
+ move |_window, cx| {
+ multi_workspace
+ .update(cx, |multi_workspace, cx| {
+ multi_workspace.remove_folder_from_project_group(
+ &project_group_key,
+ &path,
+ cx,
+ );
+ })
+ .ok();
+ },
);
}
- let workspace_for_add = workspace.clone();
- let multi_workspace_for_add = multi_workspace.clone();
let menu = menu.separator().entry(
"Add Folder to Project",
Some(Box::new(AddFolderToProject)),
- move |window, cx| {
- if let Some(mw) = multi_workspace_for_add.upgrade() {
- mw.update(cx, |mw, cx| {
- mw.activate(workspace_for_add.clone(), window, cx);
- });
+ {
+ let project_group_key = project_group_key.clone();
+ let multi_workspace = multi_workspace.clone();
+ move |window, cx| {
+ multi_workspace
+ .update(cx, |multi_workspace, cx| {
+ multi_workspace.prompt_to_add_folders_to_project_group(
+ &project_group_key,
+ window,
+ cx,
+ );
+ })
+ .ok();
}
- workspace_for_add.update(cx, |workspace, cx| {
- workspace.add_folder_to_project(&AddFolderToProject, window, cx);
- });
},
);
- let workspace_count = multi_workspace
+ let group_count = multi_workspace
.upgrade()
- .map_or(0, |mw| mw.read(cx).workspaces().len());
- let menu = if workspace_count > 1 {
- let workspace_for_move = workspace.clone();
- let multi_workspace_for_move = multi_workspace.clone();
+ .map_or(0, |mw| mw.read(cx).project_group_keys().count());
+ let menu = if group_count > 1 {
+ let project_group_key = project_group_key.clone();
+ let multi_workspace = multi_workspace.clone();
menu.entry(
"Move to New Window",
Some(Box::new(
zed_actions::agents_sidebar::MoveWorkspaceToNewWindow,
)),
move |window, cx| {
- if let Some(mw) = multi_workspace_for_move.upgrade() {
- mw.update(cx, |multi_workspace, cx| {
- multi_workspace.move_workspace_to_new_window(
- &workspace_for_move,
+ multi_workspace
+ .update(cx, |multi_workspace, cx| {
+ multi_workspace.move_project_group_to_new_window(
+ &project_group_key,
window,
cx,
);
- });
- }
+ })
+ .ok();
},
)
} else {
menu
};
- let workspace_for_remove = workspace_for_remove.clone();
- let multi_workspace_for_remove = multi_workspace.clone();
+ let project_group_key = project_group_key.clone();
+ let multi_workspace = multi_workspace.clone();
menu.separator()
.entry("Remove Project", None, move |window, cx| {
- if let Some(mw) = multi_workspace_for_remove.upgrade() {
- let ws = workspace_for_remove.clone();
- mw.update(cx, |multi_workspace, cx| {
- multi_workspace.remove(&ws, window, cx);
- });
- }
+ multi_workspace
+ .update(cx, |multi_workspace, cx| {
+ multi_workspace.remove_project_group(
+ &project_group_key,
+ window,
+ cx,
+ );
+ })
+ .ok();
})
});
@@ -2176,16 +2142,12 @@ impl Sidebar {
return;
};
- let paths: Vec<std::path::PathBuf> =
- path_list.paths().iter().map(|p| p.to_path_buf()).collect();
-
- let open_task = multi_workspace.update(cx, |mw, cx| {
- mw.open_project(paths, workspace::OpenMode::Activate, window, cx)
+ let open_task = multi_workspace.update(cx, |this, cx| {
+ this.find_or_create_local_workspace(path_list, window, cx)
});
cx.spawn_in(window, async move |this, cx| {
let workspace = open_task.await?;
-
this.update_in(cx, |this, window, cx| {
this.activate_thread(metadata, &workspace, window, cx);
})?;
@@ -65,6 +65,16 @@ impl PathList {
self.paths.is_empty()
}
+ /// Returns a new `PathList` with the given path removed.
+ pub fn without_path(&self, path_to_remove: &Path) -> PathList {
+ let paths: Vec<PathBuf> = self
+ .ordered_paths()
+ .filter(|p| p.as_path() != path_to_remove)
+ .cloned()
+ .collect();
+ PathList::new(&paths)
+ }
+
/// Get the paths in lexicographic order.
pub fn paths(&self) -> &[PathBuf] {
self.paths.as_ref()
@@ -9,7 +9,7 @@ use crate::{
};
use anyhow::Result;
use client::{Client, proto};
-use futures::{StreamExt, channel::mpsc};
+use futures::channel::mpsc;
use gpui::{
Action, AnyElement, AnyEntity, AnyView, App, AppContext, Context, Entity, EntityId,
EventEmitter, FocusHandle, Focusable, Font, Pixels, Point, Render, SharedString, Task,
@@ -777,8 +777,8 @@ impl<T: Item> ItemHandle for Entity<T> {
send_follower_updates = Some(cx.spawn_in(window, {
let pending_update = pending_update.clone();
async move |workspace, cx| {
- while let Some(mut leader_id) = pending_update_rx.next().await {
- while let Ok(Some(id)) = pending_update_rx.try_next() {
+ while let Ok(mut leader_id) = pending_update_rx.recv().await {
+ while let Ok(id) = pending_update_rx.try_recv() {
leader_id = id;
}
@@ -1,5 +1,6 @@
use anyhow::Result;
use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt};
+use gpui::PathPromptOptions;
use gpui::{
AnyView, App, Context, DragMoveEvent, Entity, EntityId, EventEmitter, FocusHandle, Focusable,
ManagedView, MouseButton, Pixels, Render, Subscription, Task, Tiling, Window, WindowId,
@@ -7,14 +8,16 @@ use gpui::{
};
#[cfg(any(test, feature = "test-support"))]
use project::Project;
-use project::{DisableAiSettings, ProjectGroupKey};
+use project::{DirectoryLister, DisableAiSettings, ProjectGroupKey};
use settings::Settings;
pub use settings::SidebarSide;
use std::future::Future;
+use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use ui::prelude::*;
use util::ResultExt;
+use util::path_list::PathList;
use zed_actions::agents_sidebar::{MoveWorkspaceToNewWindow, ToggleThreadSwitcher};
use agent_settings::AgentSettings;
@@ -23,6 +26,7 @@ use ui::{ContextMenu, right_click_menu};
const SIDEBAR_RESIZE_HANDLE_SIZE: Pixels = px(6.0);
+use crate::AppState;
use crate::{
CloseIntent, CloseWindow, DockPosition, Event as WorkspaceEvent, Item, ModalView, OpenMode,
Panel, Workspace, WorkspaceId, client_side_decorations,
@@ -470,6 +474,16 @@ impl MultiWorkspace {
self.project_group_keys.push(project_group_key);
}
+ pub fn restore_project_group_keys(&mut self, keys: Vec<ProjectGroupKey>) {
+ let mut restored = keys;
+ for existing_key in &self.project_group_keys {
+ if !restored.contains(existing_key) {
+ restored.push(existing_key.clone());
+ }
+ }
+ self.project_group_keys = restored;
+ }
+
pub fn project_group_keys(&self) -> impl Iterator<Item = &ProjectGroupKey> {
self.project_group_keys.iter()
}
@@ -494,6 +508,176 @@ impl MultiWorkspace {
groups.into_iter()
}
+ pub fn workspaces_for_project_group(
+ &self,
+ project_group_key: &ProjectGroupKey,
+ cx: &App,
+ ) -> impl Iterator<Item = &Entity<Workspace>> {
+ self.workspaces
+ .iter()
+ .filter(move |ws| ws.read(cx).project_group_key(cx) == *project_group_key)
+ }
+
+ pub fn remove_folder_from_project_group(
+ &mut self,
+ project_group_key: &ProjectGroupKey,
+ path: &Path,
+ cx: &mut Context<Self>,
+ ) {
+ let new_path_list = project_group_key.path_list().without_path(path);
+ if new_path_list.is_empty() {
+ return;
+ }
+
+ let new_key = ProjectGroupKey::new(project_group_key.host(), new_path_list);
+
+ let workspaces: Vec<_> = self
+ .workspaces_for_project_group(project_group_key, cx)
+ .cloned()
+ .collect();
+
+ self.add_project_group_key(new_key);
+
+ for workspace in workspaces {
+ let project = workspace.read(cx).project().clone();
+ project.update(cx, |project, cx| {
+ project.remove_worktree_for_main_worktree_path(path, cx);
+ });
+ }
+
+ self.serialize(cx);
+ cx.notify();
+ }
+
+ pub fn prompt_to_add_folders_to_project_group(
+ &mut self,
+ key: &ProjectGroupKey,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let paths = self.workspace().update(cx, |workspace, cx| {
+ workspace.prompt_for_open_path(
+ PathPromptOptions {
+ files: false,
+ directories: true,
+ multiple: true,
+ prompt: None,
+ },
+ DirectoryLister::Project(workspace.project().clone()),
+ window,
+ cx,
+ )
+ });
+
+ let key = key.clone();
+ cx.spawn_in(window, async move |this, cx| {
+ if let Some(new_paths) = paths.await.ok().flatten() {
+ if !new_paths.is_empty() {
+ this.update(cx, |multi_workspace, cx| {
+ multi_workspace.add_folders_to_project_group(&key, new_paths, cx);
+ })?;
+ }
+ }
+ anyhow::Ok(())
+ })
+ .detach_and_log_err(cx);
+ }
+
+ pub fn add_folders_to_project_group(
+ &mut self,
+ project_group_key: &ProjectGroupKey,
+ new_paths: Vec<PathBuf>,
+ cx: &mut Context<Self>,
+ ) {
+ let mut all_paths: Vec<PathBuf> = project_group_key.path_list().paths().to_vec();
+ all_paths.extend(new_paths.iter().cloned());
+ let new_path_list = PathList::new(&all_paths);
+ let new_key = ProjectGroupKey::new(project_group_key.host(), new_path_list);
+
+ let workspaces: Vec<_> = self
+ .workspaces_for_project_group(project_group_key, cx)
+ .cloned()
+ .collect();
+
+ self.add_project_group_key(new_key);
+
+ for workspace in workspaces {
+ let project = workspace.read(cx).project().clone();
+ for path in &new_paths {
+ project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree(path, true, cx)
+ })
+ .detach_and_log_err(cx);
+ }
+ }
+
+ self.serialize(cx);
+ cx.notify();
+ }
+
+ pub fn remove_project_group(
+ &mut self,
+ key: &ProjectGroupKey,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.project_group_keys.retain(|k| k != key);
+
+ let workspaces: Vec<_> = self
+ .workspaces_for_project_group(key, cx)
+ .cloned()
+ .collect();
+ for workspace in workspaces {
+ self.remove(&workspace, window, cx);
+ }
+
+ self.serialize(cx);
+ cx.notify();
+ }
+
+ /// Finds an existing workspace in this multi-workspace whose paths match,
+ /// or creates a new one (deserializing its saved state from the database).
+ /// Never searches other windows or matches workspaces with a superset of
+ /// the requested paths.
+ pub fn find_or_create_local_workspace(
+ &mut self,
+ path_list: PathList,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Entity<Workspace>>> {
+ if let Some(workspace) = self
+ .workspaces
+ .iter()
+ .find(|ws| ws.read(cx).project_group_key(cx).path_list() == &path_list)
+ .cloned()
+ {
+ self.activate(workspace.clone(), window, cx);
+ return Task::ready(Ok(workspace));
+ }
+
+ let paths = path_list.paths().to_vec();
+ let app_state = self.workspace().read(cx).app_state().clone();
+ let requesting_window = window.window_handle().downcast::<MultiWorkspace>();
+
+ cx.spawn(async move |_this, cx| {
+ let result = cx
+ .update(|cx| {
+ Workspace::new_local(
+ paths,
+ app_state,
+ requesting_window,
+ None,
+ None,
+ OpenMode::Activate,
+ cx,
+ )
+ })
+ .await?;
+ Ok(result.workspace)
+ })
+ }
+
pub fn workspace(&self) -> &Entity<Workspace> {
&self.workspaces[self.active_workspace_index]
}
@@ -892,7 +1076,7 @@ impl MultiWorkspace {
return;
}
- let app_state: Arc<crate::AppState> = workspace.read(cx).app_state().clone();
+ let app_state: Arc<AppState> = workspace.read(cx).app_state().clone();
cx.defer(move |cx| {
let options = (app_state.build_window_options)(None, cx);
@@ -909,7 +1093,58 @@ impl MultiWorkspace {
});
}
- // TODO: Move group to a new window?
+ pub fn move_project_group_to_new_window(
+ &mut self,
+ key: &ProjectGroupKey,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let workspaces: Vec<_> = self
+ .workspaces_for_project_group(key, cx)
+ .cloned()
+ .collect();
+ if workspaces.is_empty() {
+ return;
+ }
+
+ self.project_group_keys.retain(|k| k != key);
+
+ let mut removed = Vec::new();
+ for workspace in &workspaces {
+ if self.remove(workspace, window, cx) {
+ removed.push(workspace.clone());
+ }
+ }
+
+ if removed.is_empty() {
+ return;
+ }
+
+ let app_state = removed[0].read(cx).app_state().clone();
+
+ cx.defer(move |cx| {
+ let options = (app_state.build_window_options)(None, cx);
+
+ let first = removed[0].clone();
+ let rest = removed[1..].to_vec();
+
+ let Ok(new_window) = cx.open_window(options, |window, cx| {
+ cx.new(|cx| MultiWorkspace::new(first, window, cx))
+ }) else {
+ return;
+ };
+
+ new_window
+ .update(cx, |mw, window, cx| {
+ for workspace in rest {
+ mw.activate(workspace, window, cx);
+ }
+ window.activate_window();
+ })
+ .log_err();
+ });
+ }
+
fn move_active_workspace_to_new_window(
&mut self,
_: &MoveWorkspaceToNewWindow,
@@ -927,16 +1162,10 @@ impl MultiWorkspace {
window: &mut Window,
cx: &mut Context<Self>,
) -> Task<Result<Entity<Workspace>>> {
- let workspace = self.workspace().clone();
-
- let needs_close_prompt = !self.multi_workspace_enabled(cx);
- let open_mode = if self.multi_workspace_enabled(cx) {
- open_mode
+ if self.multi_workspace_enabled(cx) {
+ self.find_or_create_local_workspace(PathList::new(&paths), window, cx)
} else {
- OpenMode::Activate
- };
-
- if needs_close_prompt {
+ let workspace = self.workspace().clone();
cx.spawn_in(window, async move |_this, cx| {
let should_continue = workspace
.update_in(cx, |workspace, window, cx| {
@@ -953,10 +1182,6 @@ impl MultiWorkspace {
Ok(workspace)
}
})
- } else {
- workspace.update(cx, |workspace, cx| {
- workspace.open_workspace_for_paths(open_mode, paths, window, cx)
- })
}
}
}
@@ -337,15 +337,20 @@ pub fn read_serialized_multi_workspaces(
window_groups
.into_iter()
- .map(|group| {
+ .filter_map(|group| {
let window_id = group.first().and_then(|sw| sw.window_id);
let state = window_id
.map(|wid| read_multi_workspace_state(wid, cx))
.unwrap_or_default();
- model::SerializedMultiWorkspace {
- workspaces: group,
+ let active_workspace = state
+ .active_workspace_id
+ .and_then(|id| group.iter().position(|ws| ws.workspace_id == id))
+ .or(Some(0))
+ .and_then(|index| group.into_iter().nth(index))?;
+ Some(model::SerializedMultiWorkspace {
+ active_workspace,
state,
- }
+ })
})
.collect()
}
@@ -2488,11 +2493,20 @@ pub fn delete_unloaded_items(
#[cfg(test)]
mod tests {
use super::*;
- use crate::persistence::model::{
- SerializedItem, SerializedPane, SerializedPaneGroup, SerializedWorkspace, SessionWorkspace,
+ use crate::{
+ multi_workspace::MultiWorkspace,
+ persistence::{
+ model::{
+ SerializedItem, SerializedPane, SerializedPaneGroup, SerializedWorkspace,
+ SessionWorkspace,
+ },
+ read_multi_workspace_state,
+ },
};
- use gpui;
+ use feature_flags::FeatureFlagAppExt;
+ use gpui::AppContext as _;
use pretty_assertions::assert_eq;
+ use project::{Project, ProjectGroupKey};
use remote::SshConnectionOptions;
use serde_json::json;
use std::{thread, time::Duration};
@@ -2507,12 +2521,6 @@ mod tests {
#[gpui::test]
async fn test_multi_workspace_serializes_on_add_and_remove(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use crate::persistence::read_multi_workspace_state;
- use feature_flags::FeatureFlagAppExt;
- use gpui::AppContext as _;
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4042,35 +4050,30 @@ mod tests {
let results = cx.update(|cx| read_serialized_multi_workspaces(session_workspaces, cx));
- // Should produce 3 groups: window 10, window 20, and the orphan.
+ // Should produce 3 results: window 10, window 20, and the orphan.
assert_eq!(results.len(), 3);
- // Window 10 group: 2 workspaces, active_workspace_id = 2, sidebar open.
+ // Window 10: active_workspace_id = 2 picks workspace 2 (paths /b), sidebar open.
let group_10 = &results[0];
- assert_eq!(group_10.workspaces.len(), 2);
+ assert_eq!(group_10.active_workspace.workspace_id, WorkspaceId(2));
assert_eq!(group_10.state.active_workspace_id, Some(WorkspaceId(2)));
assert_eq!(group_10.state.sidebar_open, true);
- // Window 20 group: 1 workspace, active_workspace_id = 3, sidebar closed.
+ // Window 20: active_workspace_id = 3 picks workspace 3 (paths /c), sidebar closed.
let group_20 = &results[1];
- assert_eq!(group_20.workspaces.len(), 1);
+ assert_eq!(group_20.active_workspace.workspace_id, WorkspaceId(3));
assert_eq!(group_20.state.active_workspace_id, Some(WorkspaceId(3)));
assert_eq!(group_20.state.sidebar_open, false);
- // Orphan group: no window_id, so state is default.
+ // Orphan: no active_workspace_id, falls back to first workspace (id 4).
let group_none = &results[2];
- assert_eq!(group_none.workspaces.len(), 1);
+ assert_eq!(group_none.active_workspace.workspace_id, WorkspaceId(4));
assert_eq!(group_none.state.active_workspace_id, None);
assert_eq!(group_none.state.sidebar_open, false);
}
#[gpui::test]
async fn test_flush_serialization_completes_before_quit(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use feature_flags::FeatureFlagAppExt;
-
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4116,12 +4119,6 @@ mod tests {
#[gpui::test]
async fn test_create_workspace_serialization(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use crate::persistence::read_multi_workspace_state;
- use feature_flags::FeatureFlagAppExt;
-
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4179,11 +4176,6 @@ mod tests {
#[gpui::test]
async fn test_remove_workspace_clears_session_binding(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use feature_flags::FeatureFlagAppExt;
- use gpui::AppContext as _;
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4271,11 +4263,6 @@ mod tests {
#[gpui::test]
async fn test_remove_workspace_not_restored_as_zombie(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use feature_flags::FeatureFlagAppExt;
- use gpui::AppContext as _;
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4378,11 +4365,6 @@ mod tests {
#[gpui::test]
async fn test_pending_removal_tasks_drained_on_flush(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use feature_flags::FeatureFlagAppExt;
- use gpui::AppContext as _;
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4484,10 +4466,6 @@ mod tests {
#[gpui::test]
async fn test_create_workspace_bounds_observer_uses_fresh_id(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use feature_flags::FeatureFlagAppExt;
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4545,10 +4523,6 @@ mod tests {
#[gpui::test]
async fn test_flush_serialization_writes_bounds(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use feature_flags::FeatureFlagAppExt;
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4704,4 +4678,219 @@ mod tests {
assert_eq!(result[2].2.paths(), &[PathBuf::from("/plain-project")]);
assert_eq!(result[2].0, WorkspaceId(4));
}
+
+ #[gpui::test]
+ async fn test_restore_window_with_linked_worktree_and_multiple_project_groups(
+ cx: &mut gpui::TestAppContext,
+ ) {
+ crate::tests::init_test(cx);
+
+ cx.update(|cx| {
+ cx.set_staff(true);
+ cx.update_flags(true, vec!["agent-v2".to_string()]);
+ });
+
+ let fs = fs::FakeFs::new(cx.executor());
+
+ // Main git repo at /repo
+ fs.insert_tree(
+ "/repo",
+ json!({
+ ".git": {
+ "HEAD": "ref: refs/heads/main",
+ "worktrees": {
+ "feature": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature"
+ }
+ }
+ },
+ "src": { "main.rs": "" }
+ }),
+ )
+ .await;
+
+ // Linked worktree checkout pointing back to /repo
+ fs.insert_tree(
+ "/worktree-feature",
+ json!({
+ ".git": "gitdir: /repo/.git/worktrees/feature",
+ "src": { "lib.rs": "" }
+ }),
+ )
+ .await;
+
+ // --- Phase 1: Set up the original multi-workspace window ---
+
+ let project_1 = Project::test(fs.clone(), ["/repo".as_ref()], cx).await;
+ let project_1_linked_worktree =
+ Project::test(fs.clone(), ["/worktree-feature".as_ref()], cx).await;
+
+ // Wait for git discovery to finish.
+ cx.run_until_parked();
+
+ // Create a second, unrelated project so we have two distinct project groups.
+ fs.insert_tree(
+ "/other-project",
+ json!({
+ ".git": { "HEAD": "ref: refs/heads/main" },
+ "readme.md": ""
+ }),
+ )
+ .await;
+ let project_2 = Project::test(fs.clone(), ["/other-project".as_ref()], cx).await;
+ cx.run_until_parked();
+
+ // Create the MultiWorkspace with project_2, then add the main repo
+ // and its linked worktree. The linked worktree is added last and
+ // becomes the active workspace.
+ let (multi_workspace, cx) = cx
+ .add_window_view(|window, cx| MultiWorkspace::test_new(project_2.clone(), window, cx));
+
+ multi_workspace.update_in(cx, |mw, window, cx| {
+ mw.test_add_workspace(project_1.clone(), window, cx);
+ });
+
+ let workspace_worktree = multi_workspace.update_in(cx, |mw, window, cx| {
+ mw.test_add_workspace(project_1_linked_worktree.clone(), window, cx)
+ });
+
+ // Assign database IDs and set up session bindings so serialization
+ // writes real rows.
+ multi_workspace.update_in(cx, |mw, _, cx| {
+ for workspace in mw.workspaces() {
+ workspace.update(cx, |ws, _cx| {
+ ws.set_random_database_id();
+ });
+ }
+ });
+
+ // Flush serialization for each individual workspace (writes to SQLite)
+ // and for the MultiWorkspace (writes to KVP).
+ let tasks = multi_workspace.update_in(cx, |mw, window, cx| {
+ let session_id = mw.workspace().read(cx).session_id();
+ let window_id_u64 = window.window_handle().window_id().as_u64();
+
+ let mut tasks: Vec<Task<()>> = Vec::new();
+ for workspace in mw.workspaces() {
+ tasks.push(workspace.update(cx, |ws, cx| ws.flush_serialization(window, cx)));
+ if let Some(db_id) = workspace.read(cx).database_id() {
+ let db = WorkspaceDb::global(cx);
+ let session_id = session_id.clone();
+ tasks.push(cx.background_spawn(async move {
+ db.set_session_binding(db_id, session_id, Some(window_id_u64))
+ .await
+ .log_err();
+ }));
+ }
+ }
+ mw.serialize(cx);
+ tasks
+ });
+ cx.run_until_parked();
+ for task in tasks {
+ task.await;
+ }
+ cx.run_until_parked();
+
+ let active_db_id = workspace_worktree.read_with(cx, |ws, _| ws.database_id());
+ assert!(
+ active_db_id.is_some(),
+ "Active workspace should have a database ID"
+ );
+
+ // --- Phase 2: Read back and verify the serialized state ---
+
+ let session_id = multi_workspace
+ .read_with(cx, |mw, cx| mw.workspace().read(cx).session_id())
+ .unwrap();
+ let db = cx.update(|_, cx| WorkspaceDb::global(cx));
+ let session_workspaces = db
+ .last_session_workspace_locations(&session_id, None, fs.as_ref())
+ .await
+ .expect("should load session workspaces");
+ assert!(
+ !session_workspaces.is_empty(),
+ "Should have at least one session workspace"
+ );
+
+ let multi_workspaces =
+ cx.update(|_, cx| read_serialized_multi_workspaces(session_workspaces, cx));
+ assert_eq!(
+ multi_workspaces.len(),
+ 1,
+ "All workspaces share one window, so there should be exactly one multi-workspace"
+ );
+
+ let serialized = &multi_workspaces[0];
+ assert_eq!(
+ serialized.active_workspace.workspace_id,
+ active_db_id.unwrap(),
+ );
+ assert_eq!(serialized.state.project_group_keys.len(), 2,);
+
+ // Verify the serialized project group keys round-trip back to the
+ // originals.
+ let restored_keys: Vec<ProjectGroupKey> = serialized
+ .state
+ .project_group_keys
+ .iter()
+ .cloned()
+ .map(Into::into)
+ .collect();
+ let expected_keys = vec![
+ ProjectGroupKey::new(None, PathList::new(&["/other-project"])),
+ ProjectGroupKey::new(None, PathList::new(&["/repo"])),
+ ];
+ assert_eq!(
+ restored_keys, expected_keys,
+ "Deserialized project group keys should match the originals"
+ );
+
+ // --- Phase 3: Restore the window and verify the result ---
+
+ let app_state =
+ multi_workspace.read_with(cx, |mw, cx| mw.workspace().read(cx).app_state().clone());
+
+ let serialized_mw = multi_workspaces.into_iter().next().unwrap();
+ let restored_handle: gpui::WindowHandle<MultiWorkspace> = cx
+ .update(|_, cx| {
+ cx.spawn(async move |mut cx| {
+ crate::restore_multiworkspace(serialized_mw, app_state, &mut cx).await
+ })
+ })
+ .await
+ .expect("restore_multiworkspace should succeed");
+
+ cx.run_until_parked();
+
+ // The restored window should have the same project group keys.
+ let restored_keys: Vec<ProjectGroupKey> = restored_handle
+ .read_with(cx, |mw: &MultiWorkspace, _cx| {
+ mw.project_group_keys().cloned().collect()
+ })
+ .unwrap();
+ assert_eq!(
+ restored_keys, expected_keys,
+ "Restored window should have the same project group keys as the original"
+ );
+
+ // The active workspace in the restored window should have the linked
+ // worktree paths.
+ let active_paths: Vec<PathBuf> = restored_handle
+ .read_with(cx, |mw: &MultiWorkspace, cx| {
+ mw.workspace()
+ .read(cx)
+ .root_paths(cx)
+ .into_iter()
+ .map(|p: Arc<Path>| p.to_path_buf())
+ .collect()
+ })
+ .unwrap();
+ assert_eq!(
+ active_paths,
+ vec![PathBuf::from("/worktree-feature")],
+ "The restored active workspace should be the linked worktree project"
+ );
+ }
}
@@ -77,6 +77,17 @@ impl From<ProjectGroupKey> for SerializedProjectGroupKey {
}
}
+impl From<SerializedProjectGroupKey> for ProjectGroupKey {
+ fn from(value: SerializedProjectGroupKey) -> Self {
+ let path_list = PathList::deserialize(&value.path_list);
+ let host = match value.location {
+ SerializedWorkspaceLocation::Local => None,
+ SerializedWorkspaceLocation::Remote(opts) => Some(opts),
+ };
+ ProjectGroupKey::new(host, path_list)
+ }
+}
+
/// Per-window state for a MultiWorkspace, persisted to KVP.
#[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)]
pub struct MultiWorkspaceState {
@@ -88,11 +99,11 @@ pub struct MultiWorkspaceState {
}
/// The serialized state of a single MultiWorkspace window from a previous session:
-/// all workspaces that shared the window, which one was active, and whether the
-/// sidebar was open.
+/// the active workspace to restore plus window-level state (project group keys,
+/// sidebar).
#[derive(Debug, Clone)]
pub struct SerializedMultiWorkspace {
- pub workspaces: Vec<SessionWorkspace>,
+ pub active_workspace: SessionWorkspace,
pub state: MultiWorkspaceState,
}
@@ -84,8 +84,8 @@ use persistence::{SerializedWindowBounds, model::SerializedWorkspace};
pub use persistence::{
WorkspaceDb, delete_unloaded_items,
model::{
- DockStructure, ItemId, SerializedMultiWorkspace, SerializedWorkspaceLocation,
- SessionWorkspace,
+ DockStructure, ItemId, MultiWorkspaceState, SerializedMultiWorkspace,
+ SerializedWorkspaceLocation, SessionWorkspace,
},
read_serialized_multi_workspaces, resolve_worktree_workspaces,
};
@@ -8621,30 +8621,32 @@ pub async fn last_session_workspace_locations(
.log_err()
}
-pub struct MultiWorkspaceRestoreResult {
- pub window_handle: WindowHandle<MultiWorkspace>,
- pub errors: Vec<anyhow::Error>,
-}
-
pub async fn restore_multiworkspace(
multi_workspace: SerializedMultiWorkspace,
app_state: Arc<AppState>,
cx: &mut AsyncApp,
-) -> anyhow::Result<MultiWorkspaceRestoreResult> {
- let SerializedMultiWorkspace { workspaces, state } = multi_workspace;
- let mut group_iter = workspaces.into_iter();
- let first = group_iter
- .next()
- .context("window group must not be empty")?;
-
- let window_handle = if first.paths.is_empty() {
- cx.update(|cx| open_workspace_by_id(first.workspace_id, app_state.clone(), None, cx))
- .await?
+) -> anyhow::Result<WindowHandle<MultiWorkspace>> {
+ let SerializedMultiWorkspace {
+ active_workspace,
+ state,
+ } = multi_workspace;
+ let MultiWorkspaceState {
+ sidebar_open,
+ project_group_keys,
+ sidebar_state,
+ ..
+ } = state;
+
+ let window_handle = if active_workspace.paths.is_empty() {
+ cx.update(|cx| {
+ open_workspace_by_id(active_workspace.workspace_id, app_state.clone(), None, cx)
+ })
+ .await?
} else {
let OpenResult { window, .. } = cx
.update(|cx| {
Workspace::new_local(
- first.paths.paths().to_vec(),
+ active_workspace.paths.paths().to_vec(),
app_state.clone(),
None,
None,
@@ -8657,65 +8659,17 @@ pub async fn restore_multiworkspace(
window
};
- let mut errors = Vec::new();
-
- for session_workspace in group_iter {
- let error = if session_workspace.paths.is_empty() {
- cx.update(|cx| {
- open_workspace_by_id(
- session_workspace.workspace_id,
- app_state.clone(),
- Some(window_handle),
- cx,
- )
- })
- .await
- .err()
- } else {
- cx.update(|cx| {
- Workspace::new_local(
- session_workspace.paths.paths().to_vec(),
- app_state.clone(),
- Some(window_handle),
- None,
- None,
- OpenMode::Add,
- cx,
- )
- })
- .await
- .err()
- };
-
- if let Some(error) = error {
- errors.push(error);
- }
- }
-
- if let Some(target_id) = state.active_workspace_id {
+ if !project_group_keys.is_empty() {
+ let restored_keys: Vec<ProjectGroupKey> =
+ project_group_keys.into_iter().map(Into::into).collect();
window_handle
- .update(cx, |multi_workspace, window, cx| {
- let target_index = multi_workspace
- .workspaces()
- .iter()
- .position(|ws| ws.read(cx).database_id() == Some(target_id));
- let index = target_index.unwrap_or(0);
- if let Some(workspace) = multi_workspace.workspaces().get(index).cloned() {
- multi_workspace.activate(workspace, window, cx);
- }
- })
- .ok();
- } else {
- window_handle
- .update(cx, |multi_workspace, window, cx| {
- if let Some(workspace) = multi_workspace.workspaces().first().cloned() {
- multi_workspace.activate(workspace, window, cx);
- }
+ .update(cx, |multi_workspace, _window, _cx| {
+ multi_workspace.restore_project_group_keys(restored_keys);
})
.ok();
}
- if state.sidebar_open {
+ if sidebar_open {
window_handle
.update(cx, |multi_workspace, _, cx| {
multi_workspace.open_sidebar(cx);
@@ -8723,8 +8677,7 @@ pub async fn restore_multiworkspace(
.ok();
}
- if let Some(sidebar_state) = &state.sidebar_state {
- let sidebar_state = sidebar_state.clone();
+ if let Some(sidebar_state) = sidebar_state {
window_handle
.update(cx, |multi_workspace, window, cx| {
if let Some(sidebar) = multi_workspace.sidebar() {
@@ -8741,10 +8694,7 @@ pub async fn restore_multiworkspace(
})
.ok();
- Ok(MultiWorkspaceRestoreResult {
- window_handle,
- errors,
- })
+ Ok(window_handle)
}
actions!(
@@ -867,9 +867,8 @@ fn main() {
}
match open_rx
- .try_next()
+ .try_recv()
.ok()
- .flatten()
.and_then(|request| OpenRequest::parse(request, cx).log_err())
{
Some(request) => {
@@ -1364,16 +1363,10 @@ pub(crate) async fn restore_or_create_workspace(
let mut tasks = Vec::new();
for multi_workspace in multi_workspaces {
- match restore_multiworkspace(multi_workspace, app_state.clone(), cx).await {
- Ok(result) => {
- for error in result.errors {
- log::error!("Failed to restore workspace in group: {error:#}");
- results.push(Err(error));
- }
- }
- Err(e) => {
- results.push(Err(e));
- }
+ if let Err(error) = restore_multiworkspace(multi_workspace, app_state.clone(), cx).await
+ {
+ log::error!("Failed to restore workspace: {error:#}");
+ results.push(Err(error));
}
}
@@ -2071,7 +2071,7 @@ fn run_agent_thread_view_test(
let mut tool_content: Vec<acp::ToolCallContent> = Vec::new();
let mut tool_locations: Vec<acp::ToolCallLocation> = Vec::new();
- while let Ok(Some(event)) = event_receiver.try_next() {
+ while let Ok(event) = event_receiver.try_recv() {
if let Ok(agent::ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateFields(
update,
))) = event
@@ -5957,7 +5957,9 @@ mod tests {
#[gpui::test]
async fn test_multi_workspace_session_restore(cx: &mut TestAppContext) {
use collections::HashMap;
+ use project::ProjectGroupKey;
use session::Session;
+ use util::path_list::PathList;
use workspace::{OpenMode, Workspace, WorkspaceId};
let app_state = init_test(cx);
@@ -6117,94 +6119,50 @@ mod tests {
.filter_map(|window| window.downcast::<MultiWorkspace>())
.collect()
});
+ assert_eq!(restored_windows.len(), 2,);
+
+ // Identify restored windows by their active workspace root paths.
+ let (restored_a, restored_b) = {
+ let (mut with_dir1, mut with_dir3) = (None, None);
+ for window in &restored_windows {
+ let active_paths = window
+ .read_with(cx, |mw, cx| mw.workspace().read(cx).root_paths(cx))
+ .unwrap();
+ if active_paths.iter().any(|p| p.as_ref() == Path::new(dir1)) {
+ with_dir1 = Some(window);
+ } else {
+ with_dir3 = Some(window);
+ }
+ }
+ (
+ with_dir1.expect("expected a window with dir1 active"),
+ with_dir3.expect("expected a window with dir3 active"),
+ )
+ };
- assert_eq!(
- restored_windows.len(),
- 2,
- "expected 2 restored windows, got {}",
- restored_windows.len()
- );
-
- let workspace_counts: Vec<usize> = restored_windows
- .iter()
- .map(|window| {
- window
- .read_with(cx, |multi_workspace, _| multi_workspace.workspaces().len())
- .unwrap()
- })
- .collect();
- let mut sorted_counts = workspace_counts.clone();
- sorted_counts.sort();
- assert_eq!(
- sorted_counts,
- vec![1, 2],
- "expected one window with 1 workspace and one with 2, got {workspace_counts:?}"
- );
-
- let dir1_path: Arc<Path> = Path::new(dir1).into();
- let dir2_path: Arc<Path> = Path::new(dir2).into();
- let dir3_path: Arc<Path> = Path::new(dir3).into();
-
- let all_restored_paths: Vec<Vec<Vec<Arc<Path>>>> = restored_windows
- .iter()
- .map(|window| {
- window
- .read_with(cx, |multi_workspace, cx| {
- multi_workspace
- .workspaces()
- .iter()
- .map(|ws| ws.read(cx).root_paths(cx))
- .collect()
- })
- .unwrap()
+ // Window A (dir1+dir2): 1 workspace restored, but 2 project group keys.
+ restored_a
+ .read_with(cx, |mw, _| {
+ assert_eq!(
+ mw.project_group_keys().cloned().collect::<Vec<_>>(),
+ vec![
+ ProjectGroupKey::new(None, PathList::new(&[dir1])),
+ ProjectGroupKey::new(None, PathList::new(&[dir2])),
+ ]
+ );
+ assert_eq!(mw.workspaces().len(), 1);
})
- .collect();
-
- let two_ws_window = all_restored_paths
- .iter()
- .find(|paths| paths.len() == 2)
- .expect("expected a window with 2 workspaces");
- assert!(
- two_ws_window.iter().any(|p| p.contains(&dir1_path)),
- "2-workspace window should contain dir1, got {two_ws_window:?}"
- );
- assert!(
- two_ws_window.iter().any(|p| p.contains(&dir2_path)),
- "2-workspace window should contain dir2, got {two_ws_window:?}"
- );
-
- let one_ws_window = all_restored_paths
- .iter()
- .find(|paths| paths.len() == 1)
- .expect("expected a window with 1 workspace");
- assert!(
- one_ws_window[0].contains(&dir3_path),
- "1-workspace window should contain dir3, got {one_ws_window:?}"
- );
-
- // --- Verify the active workspace is preserved ---
- for window in &restored_windows {
- let (active_paths, workspace_count) = window
- .read_with(cx, |multi_workspace, cx| {
- let active = multi_workspace.workspace();
- (
- active.read(cx).root_paths(cx),
- multi_workspace.workspaces().len(),
- )
- })
- .unwrap();
+ .unwrap();
- if workspace_count == 2 {
- assert!(
- active_paths.contains(&dir1_path),
- "2-workspace window should have dir1 active, got {active_paths:?}"
- );
- } else {
- assert!(
- active_paths.contains(&dir3_path),
- "1-workspace window should have dir3 active, got {active_paths:?}"
+ // Window B (dir3): 1 workspace, 1 project group key.
+ restored_b
+ .read_with(cx, |mw, _| {
+ assert_eq!(
+ mw.project_group_keys().cloned().collect::<Vec<_>>(),
+ vec![ProjectGroupKey::new(None, PathList::new(&[dir3]))]
);
- }
- }
+ assert_eq!(mw.workspaces().len(), 1);
+ })
+ .unwrap();
}
}
@@ -1,6 +1,6 @@
[package]
name = "zed_glsl"
-version = "0.2.2"
+version = "0.2.3"
edition.workspace = true
publish.workspace = true
license = "Apache-2.0"
@@ -1,7 +1,7 @@
id = "glsl"
name = "GLSL"
description = "GLSL support."
-version = "0.2.2"
+version = "0.2.3"
schema_version = 1
authors = ["Mikayla Maki <mikayla@zed.dev>"]
repository = "https://github.com/zed-industries/zed"
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
@@ -1,6 +1,6 @@
[package]
name = "zed_proto"
-version = "0.3.1"
+version = "0.3.2"
edition.workspace = true
publish.workspace = true
license = "Apache-2.0"
@@ -1,7 +1,7 @@
id = "proto"
name = "Proto"
description = "Protocol Buffers support."
-version = "0.3.1"
+version = "0.3.2"
schema_version = 1
authors = ["Zed Industries <support@zed.dev>"]
repository = "https://github.com/zed-industries/zed"
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))