From 4fd2baf92f47cc0cdfb6398fcd8442d52e8e915b Mon Sep 17 00:00:00 2001 From: Sean Hagstrom Date: Thu, 9 Apr 2026 02:10:39 -0700 Subject: [PATCH 01/67] editor: Add Ctrl+scroll wheel zoom for buffer font size (#53452) Closes https://github.com/zed-industries/zed/pull/53452 Release Notes: - Add event handling on editor to increase/decrease font-size when using the scroll-wheel and holding the secondary modifier (Ctrl on Linux/Windows, and Cmd on macOS) Screen Capture: https://github.com/user-attachments/assets/bf298be4-e2c9-470c-afef-b7e79c2d3ae6 --------- Co-authored-by: Dmitry Soluyanov --- crates/editor/src/element.rs | 120 +++++++++++++++++++++-------------- crates/zed/src/zed.rs | 97 +++++++++++++++++++++++++++- 2 files changed, 168 insertions(+), 49 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 3e41aaceb6955653bfedfd5bb8464ff6b4b66353..12a79fa6695ddff8371fa1b648056f43bec0cb98 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -7672,59 +7672,85 @@ impl EditorElement { .max(0.01); move |event: &ScrollWheelEvent, phase, window, cx| { - let scroll_sensitivity = { - if event.modifiers.alt { - fast_scroll_sensitivity - } else { - base_scroll_sensitivity - } - }; - if phase == DispatchPhase::Bubble && hitbox.should_handle_scroll(window) { - delta = delta.coalesce(event.delta); - editor.update(cx, |editor, cx| { - let position_map: &PositionMap = &position_map; - - let line_height = position_map.line_height; - let glyph_width = position_map.em_layout_width; - let (delta, axis) = match delta { - gpui::ScrollDelta::Pixels(mut pixels) => { - //Trackpad - let axis = position_map.snapshot.ongoing_scroll.filter(&mut pixels); - (pixels, axis) - } + if event.modifiers.secondary() { + let delta_y = match event.delta { + ScrollDelta::Pixels(pixels) => pixels.y.into(), + ScrollDelta::Lines(lines) => lines.y, + }; + + if delta_y > 0.0 { + window.dispatch_action( + Box::new(zed_actions::IncreaseBufferFontSize { persist: false }), + cx, + ); + } else if delta_y < 0.0 { + window.dispatch_action( + Box::new(zed_actions::DecreaseBufferFontSize { persist: false }), + cx, + ); + } - gpui::ScrollDelta::Lines(lines) => { - //Not trackpad - let pixels = point(lines.x * glyph_width, lines.y * line_height); - (pixels, None) + cx.stop_propagation(); + } else { + let scroll_sensitivity = { + if event.modifiers.alt { + fast_scroll_sensitivity + } else { + base_scroll_sensitivity } }; - let current_scroll_position = position_map.snapshot.scroll_position(); - let x = (current_scroll_position.x * ScrollPixelOffset::from(glyph_width) - - ScrollPixelOffset::from(delta.x * scroll_sensitivity)) - / ScrollPixelOffset::from(glyph_width); - let y = (current_scroll_position.y * ScrollPixelOffset::from(line_height) - - ScrollPixelOffset::from(delta.y * scroll_sensitivity)) - / ScrollPixelOffset::from(line_height); - let mut scroll_position = - point(x, y).clamp(&point(0., 0.), &position_map.scroll_max); - let forbid_vertical_scroll = editor.scroll_manager.forbid_vertical_scroll(); - if forbid_vertical_scroll { - scroll_position.y = current_scroll_position.y; - } + delta = delta.coalesce(event.delta); + editor.update(cx, |editor, cx| { + let position_map: &PositionMap = &position_map; + + let line_height = position_map.line_height; + let glyph_width = position_map.em_layout_width; + let (delta, axis) = match delta { + gpui::ScrollDelta::Pixels(mut pixels) => { + //Trackpad + let axis = + position_map.snapshot.ongoing_scroll.filter(&mut pixels); + (pixels, axis) + } - if scroll_position != current_scroll_position { - editor.scroll(scroll_position, axis, window, cx); - cx.stop_propagation(); - } else if y < 0. { - // Due to clamping, we may fail to detect cases of overscroll to the top; - // We want the scroll manager to get an update in such cases and detect the change of direction - // on the next frame. - cx.notify(); - } - }); + gpui::ScrollDelta::Lines(lines) => { + //Not trackpad + let pixels = + point(lines.x * glyph_width, lines.y * line_height); + (pixels, None) + } + }; + + let current_scroll_position = position_map.snapshot.scroll_position(); + let x = (current_scroll_position.x + * ScrollPixelOffset::from(glyph_width) + - ScrollPixelOffset::from(delta.x * scroll_sensitivity)) + / ScrollPixelOffset::from(glyph_width); + let y = (current_scroll_position.y + * ScrollPixelOffset::from(line_height) + - ScrollPixelOffset::from(delta.y * scroll_sensitivity)) + / ScrollPixelOffset::from(line_height); + let mut scroll_position = + point(x, y).clamp(&point(0., 0.), &position_map.scroll_max); + let forbid_vertical_scroll = + editor.scroll_manager.forbid_vertical_scroll(); + if forbid_vertical_scroll { + scroll_position.y = current_scroll_position.y; + } + + if scroll_position != current_scroll_position { + editor.scroll(scroll_position, axis, window, cx); + cx.stop_propagation(); + } else if y < 0. { + // Due to clamping, we may fail to detect cases of overscroll to the top; + // We want the scroll manager to get an update in such cases and detect the change of direction + // on the next frame. + cx.notify(); + } + }); + } } } }); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 6dbe602f082c436e9055bfc8949526f0bb8f37c9..3d4ada8a1b90020090eb74a8a6ea752fa7a44ab3 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -2406,8 +2406,8 @@ mod tests { DisplayPoint, Editor, MultiBufferOffset, SelectionEffects, display_map::DisplayRow, }; use gpui::{ - Action, AnyWindowHandle, App, AssetSource, BorrowAppContext, TestAppContext, UpdateGlobal, - VisualTestContext, WindowHandle, actions, + Action, AnyWindowHandle, App, AssetSource, BorrowAppContext, Modifiers, TestAppContext, + UpdateGlobal, VisualTestContext, WindowHandle, actions, point, px, }; use language::LanguageRegistry; use languages::{markdown_lang, rust_lang}; @@ -4089,6 +4089,99 @@ mod tests { buffer.assert_released(); } + #[gpui::test] + async fn test_editor_zoom_with_scroll_wheel(cx: &mut TestAppContext) { + let app_state = init_test(cx); + app_state + .fs + .as_fake() + .insert_tree(path!("/root"), json!({ "file.txt": "hello\nworld\n" })) + .await; + + let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; + let window = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + let cx = &mut VisualTestContext::from_window(*window, cx); + + let mouse_position = point(px(250.), px(250.)); + + let event_modifiers = { + #[cfg(target_os = "macos")] + { + Modifiers { + platform: true, + ..Modifiers::default() + } + } + + #[cfg(not(target_os = "macos"))] + { + Modifiers { + control: true, + ..Modifiers::default() + } + } + }; + + workspace + .update_in(cx, |workspace, window, cx| { + workspace.open_abs_path( + PathBuf::from(path!("/root/file.txt")), + OpenOptions::default(), + window, + cx, + ) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + cx.update(|window, cx| { + window.draw(cx).clear(); + }); + + let initial_font_size = + cx.update(|_, cx| ThemeSettings::get_global(cx).buffer_font_size(cx).as_f32()); + + cx.simulate_event(gpui::ScrollWheelEvent { + position: mouse_position, + delta: gpui::ScrollDelta::Pixels(point(px(0.), px(1.))), + modifiers: event_modifiers, + ..Default::default() + }); + + let increased_font_size = + cx.update(|_, cx| ThemeSettings::get_global(cx).buffer_font_size(cx).as_f32()); + + assert!( + increased_font_size > initial_font_size, + "Editor buffer font-size should have increased from scroll-zoom" + ); + + cx.update(|window, cx| { + window.draw(cx).clear(); + }); + + cx.simulate_event(gpui::ScrollWheelEvent { + position: mouse_position, + delta: gpui::ScrollDelta::Pixels(point(px(0.), px(-1.))), + modifiers: event_modifiers, + ..Default::default() + }); + + let decreased_font_size = + cx.update(|_, cx| ThemeSettings::get_global(cx).buffer_font_size(cx).as_f32()); + + assert!( + decreased_font_size < increased_font_size, + "Editor buffer font-size should have decreased from scroll-zoom" + ); + } + #[gpui::test] async fn test_navigation(cx: &mut TestAppContext) { let app_state = init_test(cx); From 50856c9e74426fb85de356f3d447cad8dca7e579 Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Thu, 9 Apr 2026 13:48:04 +0300 Subject: [PATCH 02/67] ep: Make .prompt.expected_output optional (#53505) Release Notes: - N/A --- crates/edit_prediction_cli/src/example.rs | 3 ++- crates/edit_prediction_cli/src/format_prompt.rs | 7 +++---- crates/edit_prediction_cli/src/predict.rs | 2 +- crates/edit_prediction_cli/src/pull_examples.rs | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/crates/edit_prediction_cli/src/example.rs b/crates/edit_prediction_cli/src/example.rs index 682671141d050836d25705b2732f11500f159209..3795a375d380e12557f1989a2b81dc77e1826c03 100644 --- a/crates/edit_prediction_cli/src/example.rs +++ b/crates/edit_prediction_cli/src/example.rs @@ -65,7 +65,8 @@ pub struct ExampleState { #[derive(Clone, Debug, Serialize, Deserialize)] pub struct ExamplePrompt { pub input: String, - pub expected_output: String, + #[serde(default)] + pub expected_output: Option, pub rejected_output: Option, // For DPO #[serde(default)] pub prefill: Option, diff --git a/crates/edit_prediction_cli/src/format_prompt.rs b/crates/edit_prediction_cli/src/format_prompt.rs index 24a6f1acd470fb8ee77e87d993079298f45b390c..ae0d60ecb508f3a7ab46daf0b35a8d741e39d5dc 100644 --- a/crates/edit_prediction_cli/src/format_prompt.rs +++ b/crates/edit_prediction_cli/src/format_prompt.rs @@ -43,7 +43,7 @@ pub async fn run_format_prompt( let prompt = TeacherPrompt::format_prompt(example, editable_range, context_range); example.prompt = Some(ExamplePrompt { input: prompt, - expected_output: String::new(), + expected_output: None, rejected_output: None, prefill: None, provider: args.provider, @@ -61,7 +61,7 @@ pub async fn run_format_prompt( TeacherMultiRegionPrompt::format_prompt(example, editable_range, context_range); example.prompt = Some(ExamplePrompt { input: prompt, - expected_output: String::new(), + expected_output: None, rejected_output: None, prefill: None, provider: args.provider, @@ -85,8 +85,7 @@ pub async fn run_format_prompt( zeta_format, ) .ok() - }) - .unwrap_or_default(); + }); let rejected_output = example.spec.rejected_patch.as_ref().and_then(|patch| { zeta2_output_for_patch(prompt_inputs, patch, None, zeta_format).ok() diff --git a/crates/edit_prediction_cli/src/predict.rs b/crates/edit_prediction_cli/src/predict.rs index f2a55455b36326b58daa0adada7ec39124ffc317..99d90f0f4e524256ee3e7ec8f1bfdd6af34c566b 100644 --- a/crates/edit_prediction_cli/src/predict.rs +++ b/crates/edit_prediction_cli/src/predict.rs @@ -195,7 +195,7 @@ pub async fn run_prediction( if matches!(provider, PredictionProvider::Zeta2(_)) { updated_example.prompt.get_or_insert(ExamplePrompt { input: prompt, - expected_output: String::new(), + expected_output: None, rejected_output: None, provider, prefill: None, diff --git a/crates/edit_prediction_cli/src/pull_examples.rs b/crates/edit_prediction_cli/src/pull_examples.rs index 9ea8ac3bda1fa17295dab29bb3d5c78eaa54d765..f1687f6be3d7420ca2af9e064b63aaeb2504af2f 100644 --- a/crates/edit_prediction_cli/src/pull_examples.rs +++ b/crates/edit_prediction_cli/src/pull_examples.rs @@ -1674,7 +1674,7 @@ fn build_rejected_example( example.spec.rejected_patch = Some(rejected_patch); example.prompt = prompt.map(|prompt| ExamplePrompt { input: prompt, - expected_output: String::new(), + expected_output: None, rejected_output: Some(output), prefill: None, provider: PredictionProvider::default(), From 72eb8425405911b902a03df4bab6f4c8cd94912a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 9 Apr 2026 12:28:43 +0100 Subject: [PATCH 03/67] gpui: Throttle framerate to 30 for unfocused windows (#52970) This should reduce energy consumption when having agents running in background windows, as the spinner that is being animated won't refresh at the display framerate anymore. Release Notes: - N/A or Added/Fixed/Improved ... --- crates/gpui/src/window.rs | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 5d39998700ff41433b3f8d2281d3f00892165794..5778d6ac7372f4b13f14d4fa7d0ebca54a03fd1d 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -1211,18 +1211,31 @@ impl Window { .update(&mut cx, |_, _, cx| cx.thermal_state()) .log_err(); - if thermal_state == Some(ThermalState::Serious) - || thermal_state == Some(ThermalState::Critical) + // Throttle frame rate based on conditions: + // - Thermal pressure (Serious/Critical): cap to ~60fps + // - Inactive window (not focused): cap to ~30fps to save energy + let min_frame_interval = if !request_frame_options.force_render + && !request_frame_options.require_presentation + && next_frame_callbacks.borrow().is_empty() { - let now = Instant::now(); - let last_frame_time = last_frame_time.replace(Some(now)); + None + } else if !active.get() { + Some(Duration::from_micros(33333)) + } else if let Some(ThermalState::Critical | ThermalState::Serious) = thermal_state { + Some(Duration::from_micros(16667)) + } else { + None + }; - if let Some(last_frame) = last_frame_time - && now.duration_since(last_frame) < Duration::from_micros(16667) + let now = Instant::now(); + if let Some(min_interval) = min_frame_interval { + if let Some(last_frame) = last_frame_time.get() + && now.duration_since(last_frame) < min_interval { return; } } + last_frame_time.set(Some(now)); let next_frame_callbacks = next_frame_callbacks.take(); if !next_frame_callbacks.is_empty() { From 1391918bc705c7b67816c86b612d519f7b535529 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 9 Apr 2026 13:32:27 +0100 Subject: [PATCH 04/67] Remove storybook and story crates (#53511) Remove the standalone storybook binary and the story crate, as component previews are now handled by the component_preview crate. Also removes the stories features from the ui and title_bar crates. Release Notes: - N/A or Added/Fixed/Improved ... --- Cargo.lock | 83 ------- Cargo.toml | 4 - crates/story/Cargo.toml | 17 -- crates/story/LICENSE-GPL | 1 - crates/story/src/story.rs | 209 ------------------ crates/storybook/Cargo.toml | 41 ---- crates/storybook/LICENSE-GPL | 1 - crates/storybook/build.rs | 9 - crates/storybook/docs/thoughts.md | 57 ----- crates/storybook/src/actions.rs | 2 - crates/storybook/src/app_menus.rs | 7 - crates/storybook/src/assets.rs | 32 --- crates/storybook/src/stories.rs | 23 -- .../src/stories/auto_height_editor.rs | 36 --- crates/storybook/src/stories/cursor.rs | 109 --------- crates/storybook/src/stories/focus.rs | 123 ----------- crates/storybook/src/stories/indent_guides.rs | 82 ------- crates/storybook/src/stories/kitchen_sink.rs | 32 --- .../storybook/src/stories/overflow_scroll.rs | 41 ---- crates/storybook/src/stories/picker.rs | 206 ----------------- crates/storybook/src/stories/scroll.rs | 52 ----- crates/storybook/src/stories/text.rs | 120 ---------- .../storybook/src/stories/viewport_units.rs | 32 --- crates/storybook/src/stories/with_rem_size.rs | 61 ----- crates/storybook/src/story_selector.rs | 109 --------- crates/storybook/src/storybook.rs | 162 -------------- crates/theme/Cargo.toml | 1 - crates/theme/src/registry.rs | 17 +- crates/theme/src/theme.rs | 27 ++- crates/title_bar/Cargo.toml | 3 +- .../title_bar/src/stories/application_menu.rs | 29 --- crates/title_bar/src/title_bar.rs | 6 - crates/ui/Cargo.toml | 2 - crates/ui/src/components.rs | 6 - .../ui/src/components/stories/context_menu.rs | 81 ------- 35 files changed, 40 insertions(+), 1783 deletions(-) delete mode 100644 crates/story/Cargo.toml delete mode 120000 crates/story/LICENSE-GPL delete mode 100644 crates/story/src/story.rs delete mode 100644 crates/storybook/Cargo.toml delete mode 120000 crates/storybook/LICENSE-GPL delete mode 100644 crates/storybook/build.rs delete mode 100644 crates/storybook/docs/thoughts.md delete mode 100644 crates/storybook/src/actions.rs delete mode 100644 crates/storybook/src/app_menus.rs delete mode 100644 crates/storybook/src/assets.rs delete mode 100644 crates/storybook/src/stories.rs delete mode 100644 crates/storybook/src/stories/auto_height_editor.rs delete mode 100644 crates/storybook/src/stories/cursor.rs delete mode 100644 crates/storybook/src/stories/focus.rs delete mode 100644 crates/storybook/src/stories/indent_guides.rs delete mode 100644 crates/storybook/src/stories/kitchen_sink.rs delete mode 100644 crates/storybook/src/stories/overflow_scroll.rs delete mode 100644 crates/storybook/src/stories/picker.rs delete mode 100644 crates/storybook/src/stories/scroll.rs delete mode 100644 crates/storybook/src/stories/text.rs delete mode 100644 crates/storybook/src/stories/viewport_units.rs delete mode 100644 crates/storybook/src/stories/with_rem_size.rs delete mode 100644 crates/storybook/src/story_selector.rs delete mode 100644 crates/storybook/src/storybook.rs delete mode 100644 crates/title_bar/src/stories/application_menu.rs delete mode 100644 crates/ui/src/components/stories/context_menu.rs diff --git a/Cargo.lock b/Cargo.lock index 8c0f51e2893f53c3e6ef78dbd5ef711f8d9dc95c..85d3f9ac3a68aa8a420910dfa61102f03c9812ef 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3418,19 +3418,6 @@ dependencies = [ "crossbeam-utils", ] -[[package]] -name = "console" -version = "0.15.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8" -dependencies = [ - "encode_unicode", - "libc", - "once_cell", - "unicode-width", - "windows-sys 0.59.0", -] - [[package]] name = "console_error_panic_hook" version = "0.1.7" @@ -4851,20 +4838,6 @@ dependencies = [ "zlog", ] -[[package]] -name = "dialoguer" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "658bce805d770f407bc62102fca7c2c64ceef2fbcb2b8bd19d2765ce093980de" -dependencies = [ - "console", - "fuzzy-matcher", - "shell-words", - "tempfile", - "thiserror 1.0.69", - "zeroize", -] - [[package]] name = "diff" version = "0.1.13" @@ -5546,12 +5519,6 @@ dependencies = [ "phf 0.11.3", ] -[[package]] -name = "encode_unicode" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" - [[package]] name = "encoding_rs" version = "0.8.35" @@ -6747,15 +6714,6 @@ dependencies = [ "util", ] -[[package]] -name = "fuzzy-matcher" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54614a3312934d066701a80f20f15fa3b56d67ac7722b39eea5b4c9dd1d66c94" -dependencies = [ - "thread_local", -] - [[package]] name = "fuzzy_nucleo" version = "0.1.0" @@ -16745,44 +16703,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" -[[package]] -name = "story" -version = "0.1.0" -dependencies = [ - "gpui", - "itertools 0.14.0", - "smallvec", -] - -[[package]] -name = "storybook" -version = "0.1.0" -dependencies = [ - "anyhow", - "clap", - "ctrlc", - "dialoguer", - "editor", - "fuzzy", - "gpui", - "gpui_platform", - "indoc", - "language", - "log", - "menu", - "picker", - "reqwest_client", - "rust-embed", - "settings", - "simplelog", - "story", - "strum 0.27.2", - "theme", - "theme_settings", - "title_bar", - "ui", -] - [[package]] name = "streaming-iterator" version = "0.1.9" @@ -17704,7 +17624,6 @@ version = "0.1.0" dependencies = [ "anyhow", "collections", - "derive_more", "gpui", "palette", "parking_lot", @@ -18027,7 +17946,6 @@ dependencies = [ "serde", "settings", "smallvec", - "story", "telemetry", "theme", "ui", @@ -18926,7 +18844,6 @@ dependencies = [ "schemars", "serde", "smallvec", - "story", "strum 0.27.2", "theme", "ui_macros", diff --git a/Cargo.toml b/Cargo.toml index 59f8e265694f060c9f65b30143b79e324de1f08c..b9e99a5a87020a7eda8c8a2983bcf7b07fabc82c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -183,8 +183,6 @@ members = [ "crates/snippets_ui", "crates/sqlez", "crates/sqlez_macros", - "crates/story", - "crates/storybook", "crates/streaming_diff", "crates/sum_tree", "crates/svg_preview", @@ -437,7 +435,6 @@ snippet_provider = { path = "crates/snippet_provider" } snippets_ui = { path = "crates/snippets_ui" } sqlez = { path = "crates/sqlez" } sqlez_macros = { path = "crates/sqlez_macros" } -story = { path = "crates/story" } streaming_diff = { path = "crates/streaming_diff" } sum_tree = { path = "crates/sum_tree" } codestral = { path = "crates/codestral" } @@ -935,7 +932,6 @@ session = { codegen-units = 1 } sidebar = { codegen-units = 1 } snippet = { codegen-units = 1 } snippets_ui = { codegen-units = 1 } -story = { codegen-units = 1 } telemetry_events = { codegen-units = 1 } theme_selector = { codegen-units = 1 } time_format = { codegen-units = 1 } diff --git a/crates/story/Cargo.toml b/crates/story/Cargo.toml deleted file mode 100644 index 798461402de00c102af9325c091eb9edfdf89b09..0000000000000000000000000000000000000000 --- a/crates/story/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -name = "story" -version = "0.1.0" -edition.workspace = true -publish.workspace = true -license = "GPL-3.0-or-later" - -[lib] -path = "src/story.rs" - -[lints] -workspace = true - -[dependencies] -gpui.workspace = true -itertools.workspace = true -smallvec.workspace = true diff --git a/crates/story/LICENSE-GPL b/crates/story/LICENSE-GPL deleted file mode 120000 index 89e542f750cd3860a0598eff0dc34b56d7336dc4..0000000000000000000000000000000000000000 --- a/crates/story/LICENSE-GPL +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-GPL \ No newline at end of file diff --git a/crates/story/src/story.rs b/crates/story/src/story.rs deleted file mode 100644 index b59cb6fb99086de7eb22ab2645dc01dbe15fb959..0000000000000000000000000000000000000000 --- a/crates/story/src/story.rs +++ /dev/null @@ -1,209 +0,0 @@ -use gpui::{ - AnyElement, App, Div, SharedString, Window, colors::DefaultColors, div, prelude::*, px, rems, -}; -use itertools::Itertools; -use smallvec::SmallVec; - -pub struct Story {} - -impl Story { - pub fn container(cx: &App) -> gpui::Stateful
{ - div() - .id("story_container") - .overflow_y_scroll() - .w_full() - .min_h_full() - .flex() - .flex_col() - .text_color(cx.default_colors().text) - .bg(cx.default_colors().background) - } - - pub fn title(title: impl Into, cx: &App) -> impl Element { - div() - .text_xs() - .text_color(cx.default_colors().text) - .child(title.into()) - } - - pub fn title_for(cx: &App) -> impl Element { - Self::title(std::any::type_name::(), cx) - } - - pub fn section(cx: &App) -> Div { - div() - .p_4() - .m_4() - .border_1() - .border_color(cx.default_colors().separator) - } - - pub fn section_title(cx: &App) -> Div { - div().text_lg().text_color(cx.default_colors().text) - } - - pub fn group(cx: &App) -> Div { - div().my_2().bg(cx.default_colors().container) - } - - pub fn code_block(code: impl Into, cx: &App) -> Div { - div() - .size_full() - .p_2() - .max_w(rems(36.)) - .bg(cx.default_colors().container) - .rounded_sm() - .text_sm() - .text_color(cx.default_colors().text) - .overflow_hidden() - .child(code.into()) - } - - pub fn divider(cx: &App) -> Div { - div().my_2().h(px(1.)).bg(cx.default_colors().separator) - } - - pub fn description(description: impl Into, cx: &App) -> impl Element { - div() - .text_sm() - .text_color(cx.default_colors().text) - .min_w_96() - .child(description.into()) - } - - pub fn label(label: impl Into, cx: &App) -> impl Element { - div() - .text_xs() - .text_color(cx.default_colors().text) - .child(label.into()) - } - - /// Note: Not `ui::v_flex` as the `story` crate doesn't depend on the `ui` crate. - pub fn v_flex() -> Div { - div().flex().flex_col().gap_1() - } -} - -#[derive(IntoElement)] -pub struct StoryItem { - label: SharedString, - item: AnyElement, - description: Option, - usage: Option, -} - -impl StoryItem { - pub fn new(label: impl Into, item: impl IntoElement) -> Self { - Self { - label: label.into(), - item: item.into_any_element(), - description: None, - usage: None, - } - } - - pub fn description(mut self, description: impl Into) -> Self { - self.description = Some(description.into()); - self - } - - pub fn usage(mut self, code: impl Into) -> Self { - self.usage = Some(code.into()); - self - } -} - -impl RenderOnce for StoryItem { - fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { - let colors = cx.default_colors(); - - div() - .my_2() - .flex() - .gap_4() - .w_full() - .child( - Story::v_flex() - .px_2() - .w_1_2() - .min_h_px() - .child(Story::label(self.label, cx)) - .child( - div() - .rounded_sm() - .bg(colors.background) - .border_1() - .border_color(colors.border) - .py_1() - .px_2() - .overflow_hidden() - .child(self.item), - ) - .when_some(self.description, |this, description| { - this.child(Story::description(description, cx)) - }), - ) - .child( - Story::v_flex() - .px_2() - .flex_none() - .w_1_2() - .min_h_px() - .when_some(self.usage, |this, usage| { - this.child(Story::label("Example Usage", cx)) - .child(Story::code_block(usage, cx)) - }), - ) - } -} - -#[derive(IntoElement)] -pub struct StorySection { - description: Option, - children: SmallVec<[AnyElement; 2]>, -} - -impl Default for StorySection { - fn default() -> Self { - Self::new() - } -} - -impl StorySection { - pub fn new() -> Self { - Self { - description: None, - children: SmallVec::new(), - } - } - - pub fn description(mut self, description: impl Into) -> Self { - self.description = Some(description.into()); - self - } -} - -impl RenderOnce for StorySection { - fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { - let children: SmallVec<[AnyElement; 2]> = SmallVec::from_iter(Itertools::intersperse_with( - self.children.into_iter(), - || Story::divider(cx).into_any_element(), - )); - - Story::section(cx) - // Section title - .py_2() - // Section description - .when_some(self.description, |section, description| { - section.child(Story::description(description, cx)) - }) - .child(div().flex().flex_col().gap_2().children(children)) - .child(Story::divider(cx)) - } -} - -impl ParentElement for StorySection { - fn extend(&mut self, elements: impl IntoIterator) { - self.children.extend(elements) - } -} diff --git a/crates/storybook/Cargo.toml b/crates/storybook/Cargo.toml deleted file mode 100644 index b641e5cbd8b5ce5e66f9fb082e74ea42124f8993..0000000000000000000000000000000000000000 --- a/crates/storybook/Cargo.toml +++ /dev/null @@ -1,41 +0,0 @@ -[package] -name = "storybook" -version = "0.1.0" -edition.workspace = true -publish.workspace = true -license = "GPL-3.0-or-later" - -[lints] -workspace = true - -[[bin]] -name = "storybook" -path = "src/storybook.rs" - -[dependencies] -anyhow.workspace = true -clap = { workspace = true, features = ["derive", "string"] } -ctrlc = "3.4" -dialoguer = { version = "0.11.0", features = ["fuzzy-select"] } -editor.workspace = true -fuzzy.workspace = true -gpui = { workspace = true, default-features = true } -gpui_platform.workspace = true -indoc.workspace = true -language.workspace = true -log.workspace = true -menu.workspace = true -picker.workspace = true -reqwest_client.workspace = true -rust-embed.workspace = true -settings.workspace = true -theme_settings.workspace = true -simplelog.workspace = true -story.workspace = true -strum = { workspace = true, features = ["derive"] } -theme.workspace = true -title_bar = { workspace = true, features = ["stories"] } -ui = { workspace = true, features = ["stories"] } - -[dev-dependencies] -gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/storybook/LICENSE-GPL b/crates/storybook/LICENSE-GPL deleted file mode 120000 index 89e542f750cd3860a0598eff0dc34b56d7336dc4..0000000000000000000000000000000000000000 --- a/crates/storybook/LICENSE-GPL +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-GPL \ No newline at end of file diff --git a/crates/storybook/build.rs b/crates/storybook/build.rs deleted file mode 100644 index 66791cae4218e34d6d1fa5e156fc900eb6cf8c59..0000000000000000000000000000000000000000 --- a/crates/storybook/build.rs +++ /dev/null @@ -1,9 +0,0 @@ -fn main() { - #[cfg(target_os = "windows")] - { - #[cfg(target_env = "msvc")] - { - println!("cargo:rustc-link-arg=/stack:{}", 8 * 1024 * 1024); - } - } -} diff --git a/crates/storybook/docs/thoughts.md b/crates/storybook/docs/thoughts.md deleted file mode 100644 index cdeef621f362f4ff0deff7551af894b314ed96d4..0000000000000000000000000000000000000000 --- a/crates/storybook/docs/thoughts.md +++ /dev/null @@ -1,57 +0,0 @@ -Much of element styling is now handled by an external engine. - -How do I make an element hover. - -There's a hover style. - -Hoverable needs to wrap another element. That element can be styled. - -```rs -struct Hoverable { - -} - -impl Element for Hoverable { - -} -``` - -```rs -#[derive(Styled, Interactive)] -pub struct Div { - declared_style: StyleRefinement, - interactions: Interactions -} - -pub trait Styled { - fn declared_style(&mut self) -> &mut StyleRefinement; - fn compute_style(&mut self) -> Style { - Style::default().refine(self.declared_style()) - } - - // All the tailwind classes, modifying self.declared_style() -} - -impl Style { - pub fn paint_background(layout: Layout, cx: &mut PaintContext); - pub fn paint_foreground(layout: Layout, cx: &mut PaintContext); -} - -pub trait Interactive { - fn interactions(&mut self) -> &mut Interactions; - - fn on_click(self, ) -} - -struct Interactions { - click: SmallVec<[; 1]>, -} -``` - -```rs -trait Stylable { - type Style; - - fn with_style(self, style: Self::Style) -> Self; -} -``` diff --git a/crates/storybook/src/actions.rs b/crates/storybook/src/actions.rs deleted file mode 100644 index 03ee5b580c55d8ffbbc745214f1298d2dd0a19be..0000000000000000000000000000000000000000 --- a/crates/storybook/src/actions.rs +++ /dev/null @@ -1,2 +0,0 @@ -use gpui::actions; -actions!(storybook, [Quit]); diff --git a/crates/storybook/src/app_menus.rs b/crates/storybook/src/app_menus.rs deleted file mode 100644 index c3045cf7999b851245a2f540c6318b7d0ef57b4f..0000000000000000000000000000000000000000 --- a/crates/storybook/src/app_menus.rs +++ /dev/null @@ -1,7 +0,0 @@ -use gpui::{Menu, MenuItem}; - -pub fn app_menus() -> Vec { - use crate::actions::Quit; - - vec![Menu::new("Storybook").items([MenuItem::action("Quit", Quit)])] -} diff --git a/crates/storybook/src/assets.rs b/crates/storybook/src/assets.rs deleted file mode 100644 index 4da4081212c0c4d97fe881e5e2b792462c2318e6..0000000000000000000000000000000000000000 --- a/crates/storybook/src/assets.rs +++ /dev/null @@ -1,32 +0,0 @@ -use std::borrow::Cow; - -use anyhow::{Context as _, Result}; -use gpui::{AssetSource, SharedString}; -use rust_embed::RustEmbed; - -#[derive(RustEmbed)] -#[folder = "../../assets"] -#[include = "fonts/**/*"] -#[include = "icons/**/*"] -#[include = "images/**/*"] -#[include = "themes/**/*"] -#[include = "sounds/**/*"] -#[include = "*.md"] -#[exclude = "*.DS_Store"] -pub struct Assets; - -impl AssetSource for Assets { - fn load(&self, path: &str) -> Result>> { - Self::get(path) - .map(|f| f.data) - .with_context(|| format!("could not find asset at path {path:?}")) - .map(Some) - } - - fn list(&self, path: &str) -> Result> { - Ok(Self::iter() - .filter(|p| p.starts_with(path)) - .map(SharedString::from) - .collect()) - } -} diff --git a/crates/storybook/src/stories.rs b/crates/storybook/src/stories.rs deleted file mode 100644 index 63992d259c7a1cb76a3684f53c55fe255522aced..0000000000000000000000000000000000000000 --- a/crates/storybook/src/stories.rs +++ /dev/null @@ -1,23 +0,0 @@ -mod auto_height_editor; -mod cursor; -mod focus; -mod indent_guides; -mod kitchen_sink; -mod overflow_scroll; -mod picker; -mod scroll; -mod text; -mod viewport_units; -mod with_rem_size; - -pub use auto_height_editor::*; -pub use cursor::*; -pub use focus::*; -pub use indent_guides::*; -pub use kitchen_sink::*; -pub use overflow_scroll::*; -pub use picker::*; -pub use scroll::*; -pub use text::*; -pub use viewport_units::*; -pub use with_rem_size::*; diff --git a/crates/storybook/src/stories/auto_height_editor.rs b/crates/storybook/src/stories/auto_height_editor.rs deleted file mode 100644 index 702d5774f2c7f353ae9ea600e9b17309f7051ead..0000000000000000000000000000000000000000 --- a/crates/storybook/src/stories/auto_height_editor.rs +++ /dev/null @@ -1,36 +0,0 @@ -use editor::Editor; -use gpui::{ - App, AppContext as _, Context, Entity, IntoElement, KeyBinding, ParentElement, Render, Styled, - Window, div, white, -}; - -pub struct AutoHeightEditorStory { - editor: Entity, -} - -impl AutoHeightEditorStory { - pub fn new(window: &mut Window, cx: &mut App) -> gpui::Entity { - cx.bind_keys([KeyBinding::new( - "enter", - editor::actions::Newline, - Some("Editor"), - )]); - cx.new(|cx| Self { - editor: cx.new(|cx| { - let mut editor = Editor::auto_height(1, 3, window, cx); - editor.set_soft_wrap_mode(language::language_settings::SoftWrap::EditorWidth, cx); - editor - }), - }) - } -} - -impl Render for AutoHeightEditorStory { - fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { - div() - .size_full() - .bg(white()) - .text_sm() - .child(div().w_32().bg(gpui::black()).child(self.editor.clone())) - } -} diff --git a/crates/storybook/src/stories/cursor.rs b/crates/storybook/src/stories/cursor.rs deleted file mode 100644 index 00bae999172a50ed9041d5e9fff2903d0c3fbc46..0000000000000000000000000000000000000000 --- a/crates/storybook/src/stories/cursor.rs +++ /dev/null @@ -1,109 +0,0 @@ -use gpui::{Div, Render, Stateful}; -use story::Story; -use ui::prelude::*; - -pub struct CursorStory; - -impl Render for CursorStory { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - let all_cursors: [(&str, Box) -> Stateful
>); 19] = [ - ( - "cursor_default", - Box::new(|el: Stateful
| el.cursor_default()), - ), - ( - "cursor_pointer", - Box::new(|el: Stateful
| el.cursor_pointer()), - ), - ( - "cursor_text", - Box::new(|el: Stateful
| el.cursor_text()), - ), - ( - "cursor_move", - Box::new(|el: Stateful
| el.cursor_move()), - ), - ( - "cursor_not_allowed", - Box::new(|el: Stateful
| el.cursor_not_allowed()), - ), - ( - "cursor_context_menu", - Box::new(|el: Stateful
| el.cursor_context_menu()), - ), - ( - "cursor_crosshair", - Box::new(|el: Stateful
| el.cursor_crosshair()), - ), - ( - "cursor_vertical_text", - Box::new(|el: Stateful
| el.cursor_vertical_text()), - ), - ( - "cursor_alias", - Box::new(|el: Stateful
| el.cursor_alias()), - ), - ( - "cursor_copy", - Box::new(|el: Stateful
| el.cursor_copy()), - ), - ( - "cursor_no_drop", - Box::new(|el: Stateful
| el.cursor_no_drop()), - ), - ( - "cursor_grab", - Box::new(|el: Stateful
| el.cursor_grab()), - ), - ( - "cursor_grabbing", - Box::new(|el: Stateful
| el.cursor_grabbing()), - ), - ( - "cursor_col_resize", - Box::new(|el: Stateful
| el.cursor_col_resize()), - ), - ( - "cursor_row_resize", - Box::new(|el: Stateful
| el.cursor_row_resize()), - ), - ( - "cursor_n_resize", - Box::new(|el: Stateful
| el.cursor_n_resize()), - ), - ( - "cursor_e_resize", - Box::new(|el: Stateful
| el.cursor_e_resize()), - ), - ( - "cursor_s_resize", - Box::new(|el: Stateful
| el.cursor_s_resize()), - ), - ( - "cursor_w_resize", - Box::new(|el: Stateful
| el.cursor_w_resize()), - ), - ]; - - Story::container(cx) - .flex() - .gap_1() - .child(Story::title("cursor", cx)) - .children(all_cursors.map(|(name, apply_cursor)| { - div().gap_1().flex().text_color(gpui::white()).child( - div() - .flex() - .items_center() - .justify_center() - .id(name) - .map(apply_cursor) - .w_64() - .h_8() - .bg(gpui::red()) - .active(|style| style.bg(gpui::green())) - .text_sm() - .child(Story::label(name, cx)), - ) - })) - } -} diff --git a/crates/storybook/src/stories/focus.rs b/crates/storybook/src/stories/focus.rs deleted file mode 100644 index a64c272ba75a902f3debe3dedfe7b95c969e0d45..0000000000000000000000000000000000000000 --- a/crates/storybook/src/stories/focus.rs +++ /dev/null @@ -1,123 +0,0 @@ -use gpui::{ - App, Entity, FocusHandle, KeyBinding, Render, Subscription, Window, actions, div, prelude::*, -}; -use ui::prelude::*; - -actions!(focus, [ActionA, ActionB, ActionC]); - -pub struct FocusStory { - parent_focus: FocusHandle, - child_1_focus: FocusHandle, - child_2_focus: FocusHandle, - _focus_subscriptions: Vec, -} - -impl FocusStory { - pub fn model(window: &mut Window, cx: &mut App) -> Entity { - cx.bind_keys([ - KeyBinding::new("cmd-a", ActionA, Some("parent")), - KeyBinding::new("cmd-a", ActionB, Some("child-1")), - KeyBinding::new("cmd-c", ActionC, None), - ]); - - cx.new(|cx| { - let parent_focus = cx.focus_handle(); - let child_1_focus = cx.focus_handle(); - let child_2_focus = cx.focus_handle(); - let _focus_subscriptions = vec![ - cx.on_focus(&parent_focus, window, |_, _, _| { - println!("Parent focused"); - }), - cx.on_blur(&parent_focus, window, |_, _, _| { - println!("Parent blurred"); - }), - cx.on_focus(&child_1_focus, window, |_, _, _| { - println!("Child 1 focused"); - }), - cx.on_blur(&child_1_focus, window, |_, _, _| { - println!("Child 1 blurred"); - }), - cx.on_focus(&child_2_focus, window, |_, _, _| { - println!("Child 2 focused"); - }), - cx.on_blur(&child_2_focus, window, |_, _, _| { - println!("Child 2 blurred"); - }), - ]; - - Self { - parent_focus, - child_1_focus, - child_2_focus, - _focus_subscriptions, - } - }) - } -} - -impl Render for FocusStory { - fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - let theme = cx.theme(); - let color_1 = theme.status().created; - let color_2 = theme.status().modified; - let color_4 = theme.status().conflict; - let color_5 = theme.status().ignored; - let color_6 = theme.status().renamed; - let color_7 = theme.status().hint; - - div() - .id("parent") - .active(|style| style.bg(color_7)) - .track_focus(&self.parent_focus) - .key_context("parent") - .on_action(cx.listener(|_, _action: &ActionA, _window, _cx| { - println!("Action A dispatched on parent"); - })) - .on_action(cx.listener(|_, _action: &ActionB, _window, _cx| { - println!("Action B dispatched on parent"); - })) - .on_key_down(cx.listener(|_, event, _, _| println!("Key down on parent {:?}", event))) - .on_key_up(cx.listener(|_, event, _, _| println!("Key up on parent {:?}", event))) - .size_full() - .bg(color_1) - .focus(|style| style.bg(color_2)) - .child( - div() - .track_focus(&self.child_1_focus) - .key_context("child-1") - .on_action(cx.listener(|_, _action: &ActionB, _window, _cx| { - println!("Action B dispatched on child 1 during"); - })) - .w_full() - .h_6() - .bg(color_4) - .focus(|style| style.bg(color_5)) - .in_focus(|style| style.bg(color_6)) - .on_key_down( - cx.listener(|_, event, _, _| println!("Key down on child 1 {:?}", event)), - ) - .on_key_up( - cx.listener(|_, event, _, _| println!("Key up on child 1 {:?}", event)), - ) - .child("Child 1"), - ) - .child( - div() - .track_focus(&self.child_2_focus) - .key_context("child-2") - .on_action(cx.listener(|_, _action: &ActionC, _window, _cx| { - println!("Action C dispatched on child 2"); - })) - .w_full() - .h_6() - .bg(color_4) - .on_key_down( - cx.listener(|_, event, _, _| println!("Key down on child 2 {:?}", event)), - ) - .on_key_up( - cx.listener(|_, event, _, _| println!("Key up on child 2 {:?}", event)), - ) - .child("Child 2"), - ) - } -} diff --git a/crates/storybook/src/stories/indent_guides.rs b/crates/storybook/src/stories/indent_guides.rs deleted file mode 100644 index db23ea79bd43c267e02e4f81b1b0586b0c1d19cd..0000000000000000000000000000000000000000 --- a/crates/storybook/src/stories/indent_guides.rs +++ /dev/null @@ -1,82 +0,0 @@ -use std::ops::Range; - -use gpui::{Entity, Render, div, uniform_list}; -use gpui::{prelude::*, *}; -use ui::{AbsoluteLength, Color, DefiniteLength, Label, LabelCommon, px, v_flex}; - -use story::Story; - -const LENGTH: usize = 100; - -pub struct IndentGuidesStory { - depths: Vec, -} - -impl IndentGuidesStory { - pub fn model(_window: &mut Window, cx: &mut App) -> Entity { - let mut depths = Vec::new(); - depths.push(0); - depths.push(1); - depths.push(2); - for _ in 0..LENGTH - 6 { - depths.push(3); - } - depths.push(2); - depths.push(1); - depths.push(0); - - cx.new(|_cx| Self { depths }) - } -} - -impl Render for IndentGuidesStory { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - Story::container(cx) - .child(Story::title("Indent guides", cx)) - .child( - v_flex().size_full().child( - uniform_list( - "some-list", - self.depths.len(), - cx.processor(move |this, range: Range, _window, _cx| { - this.depths - .iter() - .enumerate() - .skip(range.start) - .take(range.end - range.start) - .map(|(i, depth)| { - div() - .pl(DefiniteLength::Absolute(AbsoluteLength::Pixels(px( - 16. * (*depth as f32), - )))) - .child(Label::new(format!("Item {}", i)).color(Color::Info)) - }) - .collect() - }), - ) - .with_sizing_behavior(gpui::ListSizingBehavior::Infer) - .with_decoration( - ui::indent_guides( - px(16.), - ui::IndentGuideColors { - default: Color::Info.color(cx), - hover: Color::Accent.color(cx), - active: Color::Accent.color(cx), - }, - ) - .with_compute_indents_fn( - cx.entity(), - |this, range, _cx, _context| { - this.depths - .iter() - .skip(range.start) - .take(range.end - range.start) - .cloned() - .collect() - }, - ), - ), - ), - ) - } -} diff --git a/crates/storybook/src/stories/kitchen_sink.rs b/crates/storybook/src/stories/kitchen_sink.rs deleted file mode 100644 index aaddf733f8201874580e766055b8ea0cfb4c10fb..0000000000000000000000000000000000000000 --- a/crates/storybook/src/stories/kitchen_sink.rs +++ /dev/null @@ -1,32 +0,0 @@ -use gpui::{Entity, Render, prelude::*}; -use story::Story; -use strum::IntoEnumIterator; -use ui::prelude::*; - -use crate::story_selector::ComponentStory; - -pub struct KitchenSinkStory; - -impl KitchenSinkStory { - pub fn model(cx: &mut App) -> Entity { - cx.new(|_| Self) - } -} - -impl Render for KitchenSinkStory { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - let component_stories = ComponentStory::iter() - .map(|selector| selector.story(window, cx)) - .collect::>(); - - Story::container(cx) - .id("kitchen-sink") - .overflow_y_scroll() - .child(Story::title("Kitchen Sink", cx)) - .child(Story::label("Components", cx)) - .child(div().flex().flex_col().children(component_stories)) - // Add a bit of space at the bottom of the kitchen sink so elements - // don't end up squished right up against the bottom of the screen. - .child(div().p_4()) - } -} diff --git a/crates/storybook/src/stories/overflow_scroll.rs b/crates/storybook/src/stories/overflow_scroll.rs deleted file mode 100644 index a9ba09d6a30bfe825e8275c6f2b5432dd8a1941b..0000000000000000000000000000000000000000 --- a/crates/storybook/src/stories/overflow_scroll.rs +++ /dev/null @@ -1,41 +0,0 @@ -use gpui::Render; -use story::Story; - -use ui::prelude::*; - -pub struct OverflowScrollStory; - -impl Render for OverflowScrollStory { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - Story::container(cx) - .child(Story::title("Overflow Scroll", cx)) - .child(Story::label("`overflow_x_scroll`", cx)) - .child( - h_flex() - .id("overflow_x_scroll") - .gap_2() - .overflow_x_scroll() - .children((0..100).map(|i| { - div() - .p_4() - .debug_bg_cyan() - .child(SharedString::from(format!("Child {}", i + 1))) - })), - ) - .child(Story::label("`overflow_y_scroll`", cx)) - .child( - v_flex() - .w_full() - .flex_1() - .id("overflow_y_scroll") - .gap_2() - .overflow_y_scroll() - .children((0..100).map(|i| { - div() - .p_4() - .debug_bg_green() - .child(SharedString::from(format!("Child {}", i + 1))) - })), - ) - } -} diff --git a/crates/storybook/src/stories/picker.rs b/crates/storybook/src/stories/picker.rs deleted file mode 100644 index fa65fd085dc158a22666262a5ed84573eb744651..0000000000000000000000000000000000000000 --- a/crates/storybook/src/stories/picker.rs +++ /dev/null @@ -1,206 +0,0 @@ -use fuzzy::StringMatchCandidate; -use gpui::{App, Entity, KeyBinding, Render, SharedString, Styled, Task, Window, div, prelude::*}; -use picker::{Picker, PickerDelegate}; -use std::sync::Arc; -use ui::{Label, ListItem}; -use ui::{ListItemSpacing, prelude::*}; - -pub struct PickerStory { - picker: Entity>, -} - -struct Delegate { - candidates: Arc<[StringMatchCandidate]>, - matches: Vec, - selected_ix: usize, -} - -impl Delegate { - fn new(strings: &[&str]) -> Self { - Self { - candidates: strings - .iter() - .copied() - .enumerate() - .map(|(id, string)| StringMatchCandidate::new(id, string)) - .collect(), - matches: vec![], - selected_ix: 0, - } - } -} - -impl PickerDelegate for Delegate { - type ListItem = ListItem; - - fn match_count(&self) -> usize { - self.candidates.len() - } - - fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { - "Test".into() - } - - fn render_match( - &self, - ix: usize, - selected: bool, - _window: &mut Window, - _cx: &mut Context>, - ) -> Option { - let candidate_ix = self.matches.get(ix)?; - // TASK: Make StringMatchCandidate::string a SharedString - let candidate = SharedString::from(self.candidates[*candidate_ix].string.clone()); - - Some( - ListItem::new(ix) - .inset(true) - .spacing(ListItemSpacing::Sparse) - .toggle_state(selected) - .child(Label::new(candidate)), - ) - } - - fn selected_index(&self) -> usize { - self.selected_ix - } - - fn set_selected_index(&mut self, ix: usize, _: &mut Window, cx: &mut Context>) { - self.selected_ix = ix; - cx.notify(); - } - - fn confirm(&mut self, secondary: bool, _window: &mut Window, _cx: &mut Context>) { - let candidate_ix = self.matches[self.selected_ix]; - let candidate = self.candidates[candidate_ix].string.clone(); - - if secondary { - eprintln!("Secondary confirmed {}", candidate) - } else { - eprintln!("Confirmed {}", candidate) - } - } - - fn dismissed(&mut self, _: &mut Window, cx: &mut Context>) { - cx.quit(); - } - - fn update_matches( - &mut self, - query: String, - _: &mut Window, - cx: &mut Context>, - ) -> Task<()> { - let candidates = self.candidates.clone(); - self.matches = cx - .foreground_executor() - .block_on(fuzzy::match_strings( - &candidates, - &query, - true, - true, - 100, - &Default::default(), - cx.background_executor().clone(), - )) - .into_iter() - .map(|r| r.candidate_id) - .collect(); - self.selected_ix = 0; - Task::ready(()) - } -} - -impl PickerStory { - pub fn new(window: &mut Window, cx: &mut App) -> Entity { - cx.new(|cx| { - cx.bind_keys([ - KeyBinding::new("up", menu::SelectPrevious, Some("picker")), - KeyBinding::new("pageup", menu::SelectFirst, Some("picker")), - KeyBinding::new("shift-pageup", menu::SelectFirst, Some("picker")), - KeyBinding::new("ctrl-p", menu::SelectPrevious, Some("picker")), - KeyBinding::new("down", menu::SelectNext, Some("picker")), - KeyBinding::new("pagedown", menu::SelectLast, Some("picker")), - KeyBinding::new("shift-pagedown", menu::SelectFirst, Some("picker")), - KeyBinding::new("ctrl-n", menu::SelectNext, Some("picker")), - KeyBinding::new("cmd-up", menu::SelectFirst, Some("picker")), - KeyBinding::new("cmd-down", menu::SelectLast, Some("picker")), - KeyBinding::new("enter", menu::Confirm, Some("picker")), - KeyBinding::new("ctrl-enter", menu::SecondaryConfirm, Some("picker")), - KeyBinding::new("cmd-enter", menu::SecondaryConfirm, Some("picker")), - KeyBinding::new("escape", menu::Cancel, Some("picker")), - KeyBinding::new("ctrl-c", menu::Cancel, Some("picker")), - ]); - - PickerStory { - picker: cx.new(|cx| { - let mut delegate = Delegate::new(&[ - "Baguette (France)", - "Baklava (Turkey)", - "Beef Wellington (UK)", - "Biryani (India)", - "Borscht (Ukraine)", - "Bratwurst (Germany)", - "Bulgogi (Korea)", - "Burrito (USA)", - "Ceviche (Peru)", - "Chicken Tikka Masala (India)", - "Churrasco (Brazil)", - "Couscous (North Africa)", - "Croissant (France)", - "Dim Sum (China)", - "Empanada (Argentina)", - "Fajitas (Mexico)", - "Falafel (Middle East)", - "Feijoada (Brazil)", - "Fish and Chips (UK)", - "Fondue (Switzerland)", - "Goulash (Hungary)", - "Haggis (Scotland)", - "Kebab (Middle East)", - "Kimchi (Korea)", - "Lasagna (Italy)", - "Maple Syrup Pancakes (Canada)", - "Moussaka (Greece)", - "Pad Thai (Thailand)", - "Paella (Spain)", - "Pancakes (USA)", - "Pasta Carbonara (Italy)", - "Pavlova (Australia)", - "Peking Duck (China)", - "Pho (Vietnam)", - "Pierogi (Poland)", - "Pizza (Italy)", - "Poutine (Canada)", - "Pretzel (Germany)", - "Ramen (Japan)", - "Rendang (Indonesia)", - "Sashimi (Japan)", - "Satay (Indonesia)", - "Shepherd's Pie (Ireland)", - "Sushi (Japan)", - "Tacos (Mexico)", - "Tandoori Chicken (India)", - "Tortilla (Spain)", - "Tzatziki (Greece)", - "Wiener Schnitzel (Austria)", - ]); - delegate.update_matches("".into(), window, cx).detach(); - - let picker = Picker::uniform_list(delegate, window, cx); - picker.focus(window, cx); - picker - }), - } - }) - } -} - -impl Render for PickerStory { - fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - div() - .bg(cx.theme().styles.colors.background) - .size_full() - .child(self.picker.clone()) - } -} diff --git a/crates/storybook/src/stories/scroll.rs b/crates/storybook/src/stories/scroll.rs deleted file mode 100644 index 8a4c7ea7689042675764bc55faf019a8cc8fc2a9..0000000000000000000000000000000000000000 --- a/crates/storybook/src/stories/scroll.rs +++ /dev/null @@ -1,52 +0,0 @@ -use gpui::{App, Entity, Render, SharedString, Styled, Window, div, prelude::*, px}; -use ui::Tooltip; -use ui::prelude::*; - -pub struct ScrollStory; - -impl ScrollStory { - pub fn model(cx: &mut App) -> Entity { - cx.new(|_| ScrollStory) - } -} - -impl Render for ScrollStory { - fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - let theme = cx.theme(); - let color_1 = theme.status().created; - let color_2 = theme.status().modified; - - div() - .id("parent") - .bg(theme.colors().background) - .size_full() - .overflow_scroll() - .children((0..10).map(|row| { - div() - .w(px(1000.)) - .h(px(100.)) - .flex() - .flex_row() - .children((0..10).map(|column| { - let id = SharedString::from(format!("{}, {}", row, column)); - let bg = if row % 2 == column % 2 { - color_1 - } else { - color_2 - }; - div() - .id(id.clone()) - .tooltip(Tooltip::text(id)) - .bg(bg) - .size(px(100_f32)) - .when(row >= 5 && column >= 5, |d| { - d.overflow_scroll() - .child(div().size(px(50.)).bg(color_1)) - .child(div().size(px(50.)).bg(color_2)) - .child(div().size(px(50.)).bg(color_1)) - .child(div().size(px(50.)).bg(color_2)) - }) - })) - })) - } -} diff --git a/crates/storybook/src/stories/text.rs b/crates/storybook/src/stories/text.rs deleted file mode 100644 index 7ba2378307e8e7ff9827534978da3abf23261e6d..0000000000000000000000000000000000000000 --- a/crates/storybook/src/stories/text.rs +++ /dev/null @@ -1,120 +0,0 @@ -use gpui::{ - App, AppContext as _, Context, Entity, HighlightStyle, InteractiveText, IntoElement, - ParentElement, Render, Styled, StyledText, Window, div, green, red, -}; -use indoc::indoc; -use story::*; - -pub struct TextStory; - -impl TextStory { - pub fn model(cx: &mut App) -> Entity { - cx.new(|_| Self) - } -} - -impl Render for TextStory { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - Story::container(cx) - .child(Story::title("Text", cx)) - .children(vec![ - StorySection::new() - .child( - StoryItem::new("Default", div().bg(gpui::blue()).child("Hello World!")) - .usage(indoc! {r##" - div() - .child("Hello World!") - "## - }), - ) - .child( - StoryItem::new( - "Wrapping Text", - div().max_w_96().child(concat!( - "The quick brown fox jumps over the lazy dog. ", - "Meanwhile, the lazy dog decided it was time for a change. ", - "He started daily workout routines, ate healthier and became the fastest dog in town.", - )), - ) - .description("Set a width or max-width to enable text wrapping.") - .usage(indoc! {r##" - div() - .max_w_96() - .child("Some text that you want to wrap.") - "## - }), - ) - .child( - StoryItem::new( - "tbd", - div().flex().w_96().child( - div().overflow_hidden().child(concat!( - "flex-row. width 96. overflow-hidden. The quick brown fox jumps over the lazy dog. ", - "Meanwhile, the lazy dog decided it was time for a change. ", - "He started daily workout routines, ate healthier and became the fastest dog in town.", - )), - ), - ), - ) - .child( - StoryItem::new( - "Text in Horizontal Flex", - div().flex().w_96().bg(red()).child(concat!( - "flex-row. width 96. The quick brown fox jumps over the lazy dog. ", - "Meanwhile, the lazy dog decided it was time for a change. ", - "He started daily workout routines, ate healthier and became the fastest dog in town.", - )), - ) - .usage(indoc! {r##" - // NOTE: When rendering text in a horizontal flex container, - // Taffy will not pass width constraints down from the parent. - // To fix this, render text in a parent with overflow: hidden - - div() - .max_w_96() - .child("Some text that you want to wrap.") - "## - }), - ) - .child( - StoryItem::new( - "Interactive Text", - InteractiveText::new( - "interactive", - StyledText::new("Hello world, how is it going?").with_default_highlights( - &window.text_style(), - [ - ( - 6..11, - HighlightStyle { - background_color: Some(green()), - ..Default::default() - }, - ), - ], - ), - ) - .on_click(vec![2..4, 1..3, 7..9], |range_ix, _, _cx| { - println!("Clicked range {range_ix}"); - }), - ) - .usage(indoc! {r##" - InteractiveText::new( - "interactive", - StyledText::new("Hello world, how is it going?").with_highlights(&window.text_style(), [ - (6..11, HighlightStyle { - background_color: Some(green()), - ..Default::default() - }), - ]), - ) - .on_click(vec![2..4, 1..3, 7..9], |range_ix, _cx| { - println!("Clicked range {range_ix}"); - }) - "## - }), - ), - ]) - .into_element() - } -} diff --git a/crates/storybook/src/stories/viewport_units.rs b/crates/storybook/src/stories/viewport_units.rs deleted file mode 100644 index 1259a713ee888deedd3c1beb2a1ccd30a3eff252..0000000000000000000000000000000000000000 --- a/crates/storybook/src/stories/viewport_units.rs +++ /dev/null @@ -1,32 +0,0 @@ -use gpui::Render; -use story::Story; - -use ui::prelude::*; - -pub struct ViewportUnitsStory; - -impl Render for ViewportUnitsStory { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - Story::container(cx).child( - div() - .flex() - .flex_row() - .child( - div() - .w(vw(0.5, window)) - .h(vh(0.8, window)) - .bg(gpui::red()) - .text_color(gpui::white()) - .child("50vw, 80vh"), - ) - .child( - div() - .w(vw(0.25, window)) - .h(vh(0.33, window)) - .bg(gpui::green()) - .text_color(gpui::white()) - .child("25vw, 33vh"), - ), - ) - } -} diff --git a/crates/storybook/src/stories/with_rem_size.rs b/crates/storybook/src/stories/with_rem_size.rs deleted file mode 100644 index eeca3fb89f6382dbc13d696f2701f0c6b28027ee..0000000000000000000000000000000000000000 --- a/crates/storybook/src/stories/with_rem_size.rs +++ /dev/null @@ -1,61 +0,0 @@ -use gpui::{AnyElement, Hsla, Render}; -use story::Story; - -use ui::{prelude::*, utils::WithRemSize}; - -pub struct WithRemSizeStory; - -impl Render for WithRemSizeStory { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - Story::container(cx).child( - Example::new(16., gpui::red()) - .child( - Example::new(24., gpui::green()) - .child(Example::new(8., gpui::blue())) - .child(Example::new(16., gpui::yellow())), - ) - .child( - Example::new(12., gpui::green()) - .child(Example::new(48., gpui::blue())) - .child(Example::new(16., gpui::yellow())), - ), - ) - } -} - -#[derive(IntoElement)] -struct Example { - rem_size: Pixels, - border_color: Hsla, - children: Vec, -} - -impl Example { - pub fn new(rem_size: impl Into, border_color: Hsla) -> Self { - Self { - rem_size: rem_size.into(), - border_color, - children: Vec::new(), - } - } -} - -impl ParentElement for Example { - fn extend(&mut self, elements: impl IntoIterator) { - self.children.extend(elements); - } -} - -impl RenderOnce for Example { - fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { - WithRemSize::new(self.rem_size).child( - v_flex() - .gap_2() - .p_2() - .border_2() - .border_color(self.border_color) - .child(Label::new(format!("1rem = {}px", f32::from(self.rem_size)))) - .children(self.children), - ) - } -} diff --git a/crates/storybook/src/story_selector.rs b/crates/storybook/src/story_selector.rs deleted file mode 100644 index 4c1113f70f54051bab973595f6f7e7bdbc9c0029..0000000000000000000000000000000000000000 --- a/crates/storybook/src/story_selector.rs +++ /dev/null @@ -1,109 +0,0 @@ -use std::str::FromStr; -use std::sync::OnceLock; - -use crate::stories::*; -use clap::ValueEnum; -use clap::builder::PossibleValue; -use gpui::AnyView; -use strum::{EnumIter, EnumString, IntoEnumIterator}; -use ui::prelude::*; - -#[derive(Debug, PartialEq, Eq, Clone, Copy, strum::Display, EnumString, EnumIter)] -#[strum(serialize_all = "snake_case")] -pub enum ComponentStory { - ApplicationMenu, - AutoHeightEditor, - ContextMenu, - Cursor, - Focus, - OverflowScroll, - Picker, - Scroll, - Text, - ViewportUnits, - WithRemSize, - IndentGuides, -} - -impl ComponentStory { - pub fn story(&self, window: &mut Window, cx: &mut App) -> AnyView { - match self { - Self::ApplicationMenu => cx - .new(|cx| title_bar::ApplicationMenuStory::new(window, cx)) - .into(), - Self::AutoHeightEditor => AutoHeightEditorStory::new(window, cx).into(), - Self::ContextMenu => cx.new(|_| ui::ContextMenuStory).into(), - Self::Cursor => cx.new(|_| crate::stories::CursorStory).into(), - Self::Focus => FocusStory::model(window, cx).into(), - Self::OverflowScroll => cx.new(|_| crate::stories::OverflowScrollStory).into(), - Self::Picker => PickerStory::new(window, cx).into(), - Self::Scroll => ScrollStory::model(cx).into(), - Self::Text => TextStory::model(cx).into(), - Self::ViewportUnits => cx.new(|_| crate::stories::ViewportUnitsStory).into(), - Self::WithRemSize => cx.new(|_| crate::stories::WithRemSizeStory).into(), - Self::IndentGuides => crate::stories::IndentGuidesStory::model(window, cx).into(), - } - } -} - -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum StorySelector { - Component(ComponentStory), - KitchenSink, -} - -impl FromStr for StorySelector { - type Err = anyhow::Error; - - fn from_str(raw_story_name: &str) -> std::result::Result { - use anyhow::Context as _; - - let story = raw_story_name.to_ascii_lowercase(); - - if story == "kitchen_sink" { - return Ok(Self::KitchenSink); - } - - if let Some((_, story)) = story.split_once("components/") { - let component_story = ComponentStory::from_str(story) - .with_context(|| format!("story not found for component '{story}'"))?; - - return Ok(Self::Component(component_story)); - } - - anyhow::bail!("story not found for '{raw_story_name}'") - } -} - -impl StorySelector { - pub fn story(&self, window: &mut Window, cx: &mut App) -> AnyView { - match self { - Self::Component(component_story) => component_story.story(window, cx), - Self::KitchenSink => KitchenSinkStory::model(cx).into(), - } - } -} - -/// The list of all stories available in the storybook. -static ALL_STORY_SELECTORS: OnceLock> = OnceLock::new(); - -impl ValueEnum for StorySelector { - fn value_variants<'a>() -> &'a [Self] { - (ALL_STORY_SELECTORS.get_or_init(|| { - let component_stories = ComponentStory::iter().map(StorySelector::Component); - - component_stories - .chain(std::iter::once(StorySelector::KitchenSink)) - .collect::>() - })) as _ - } - - fn to_possible_value(&self) -> Option { - let value = match self { - Self::Component(story) => format!("components/{story}"), - Self::KitchenSink => "kitchen_sink".to_string(), - }; - - Some(PossibleValue::new(value)) - } -} diff --git a/crates/storybook/src/storybook.rs b/crates/storybook/src/storybook.rs deleted file mode 100644 index d3df9bbc3a078793ab8e00c71cd4cb5cb9810fa6..0000000000000000000000000000000000000000 --- a/crates/storybook/src/storybook.rs +++ /dev/null @@ -1,162 +0,0 @@ -mod actions; -mod app_menus; -mod assets; -mod stories; -mod story_selector; - -use std::sync::Arc; - -use clap::Parser; -use dialoguer::FuzzySelect; -use gpui::{ - AnyView, App, Bounds, Context, Render, Window, WindowBounds, WindowOptions, - colors::{Colors, GlobalColors}, - div, px, size, -}; -use log::LevelFilter; -use reqwest_client::ReqwestClient; -use settings::{KeymapFile, Settings as _}; -use simplelog::SimpleLogger; -use strum::IntoEnumIterator; -use theme_settings::ThemeSettings; -use ui::prelude::*; - -use crate::app_menus::app_menus; -use crate::assets::Assets; -use crate::story_selector::{ComponentStory, StorySelector}; -use actions::Quit; -pub use indoc::indoc; - -#[derive(Parser)] -#[command(author, version, about, long_about = None)] -struct Args { - #[arg(value_enum)] - story: Option, - - /// The name of the theme to use in the storybook. - /// - /// If not provided, the default theme will be used. - #[arg(long)] - theme: Option, -} - -fn main() { - SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger"); - - menu::init(); - let args = Args::parse(); - - let story_selector = args.story.unwrap_or_else(|| { - let stories = ComponentStory::iter().collect::>(); - - ctrlc::set_handler(move || {}).unwrap(); - - let result = FuzzySelect::new() - .with_prompt("Choose a story to run:") - .items(&stories) - .interact(); - - let Ok(selection) = result else { - dialoguer::console::Term::stderr().show_cursor().unwrap(); - std::process::exit(0); - }; - - StorySelector::Component(stories[selection]) - }); - let theme_name = args.theme.unwrap_or("One Dark".to_string()); - - gpui_platform::application() - .with_assets(Assets) - .run(move |cx| { - load_embedded_fonts(cx).unwrap(); - - cx.set_global(GlobalColors(Arc::new(Colors::default()))); - - let http_client = ReqwestClient::user_agent("zed_storybook").unwrap(); - cx.set_http_client(Arc::new(http_client)); - - settings::init(cx); - theme_settings::init(theme::LoadThemes::All(Box::new(Assets)), cx); - - let selector = story_selector; - - let mut theme_settings = ThemeSettings::get_global(cx).clone(); - theme_settings.theme = - theme_settings::ThemeSelection::Static(settings::ThemeName(theme_name.into())); - ThemeSettings::override_global(theme_settings, cx); - - editor::init(cx); - init(cx); - load_storybook_keymap(cx); - cx.set_menus(app_menus()); - - let size = size(px(1500.), px(780.)); - let bounds = Bounds::centered(None, size, cx); - let _window = cx.open_window( - WindowOptions { - window_bounds: Some(WindowBounds::Windowed(bounds)), - ..Default::default() - }, - move |window, cx| { - theme_settings::setup_ui_font(window, cx); - - cx.new(|cx| StoryWrapper::new(selector.story(window, cx))) - }, - ); - - cx.activate(true); - }); -} - -#[derive(Clone)] -pub struct StoryWrapper { - story: AnyView, -} - -impl StoryWrapper { - pub(crate) fn new(story: AnyView) -> Self { - Self { story } - } -} - -impl Render for StoryWrapper { - fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { - div() - .flex() - .flex_col() - .size_full() - .font_family(".ZedMono") - .child(self.story.clone()) - } -} - -fn load_embedded_fonts(cx: &App) -> anyhow::Result<()> { - let font_paths = cx.asset_source().list("fonts")?; - let mut embedded_fonts = Vec::new(); - for font_path in font_paths { - if font_path.ends_with(".ttf") { - let font_bytes = cx - .asset_source() - .load(&font_path)? - .expect("Should never be None in the storybook"); - embedded_fonts.push(font_bytes); - } - } - - cx.text_system().add_fonts(embedded_fonts) -} - -fn load_storybook_keymap(cx: &mut App) { - cx.bind_keys(KeymapFile::load_asset("keymaps/storybook.json", None, cx).unwrap()); -} - -pub fn init(cx: &mut App) { - cx.on_action(quit); -} - -fn quit(_: &Quit, cx: &mut App) { - cx.spawn(async move |cx| { - cx.update(|cx| cx.quit()); - }) - .detach(); -} diff --git a/crates/theme/Cargo.toml b/crates/theme/Cargo.toml index 5bb624dd0c101aa978e296a7ff33c02b2faa99c1..77570b2ae4abd71bb54c11e3394f4514d9dc156e 100644 --- a/crates/theme/Cargo.toml +++ b/crates/theme/Cargo.toml @@ -19,7 +19,6 @@ doctest = false [dependencies] anyhow.workspace = true collections.workspace = true -derive_more.workspace = true gpui.workspace = true syntax_theme.workspace = true palette = { workspace = true, default-features = false, features = ["std"] } diff --git a/crates/theme/src/registry.rs b/crates/theme/src/registry.rs index 8630af9deda0d403e257bbc173f0f260ef32e184..6d8ae4ab86389de6ada63d12f2593863b21dcfae 100644 --- a/crates/theme/src/registry.rs +++ b/crates/theme/src/registry.rs @@ -3,7 +3,6 @@ use std::{fmt::Debug, path::Path}; use anyhow::Result; use collections::HashMap; -use derive_more::{Deref, DerefMut}; use gpui::{App, AssetSource, Global, SharedString}; use parking_lot::RwLock; use thiserror::Error; @@ -38,9 +37,23 @@ pub struct IconThemeNotFoundError(pub SharedString); /// inserting the [`ThemeRegistry`] into the context as a global. /// /// This should not be exposed outside of this module. -#[derive(Default, Deref, DerefMut)] +#[derive(Default)] struct GlobalThemeRegistry(Arc); +impl std::ops::DerefMut for GlobalThemeRegistry { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl std::ops::Deref for GlobalThemeRegistry { + type Target = Arc; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + impl Global for GlobalThemeRegistry {} struct ThemeRegistryState { diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index faa18bd3ce9ed71f4afed6d21d577d48b14680fb..cf4203dc763a6bbb04c8798d55e39b78f8e8a645 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -22,7 +22,6 @@ mod ui_density; use std::sync::Arc; -use derive_more::{Deref, DerefMut}; use gpui::BorrowAppContext; use gpui::Global; use gpui::{ @@ -129,18 +128,40 @@ impl ActiveTheme for App { } /// The appearance of the system. -#[derive(Debug, Clone, Copy, Deref)] +#[derive(Debug, Clone, Copy)] pub struct SystemAppearance(pub Appearance); +impl std::ops::Deref for SystemAppearance { + type Target = Appearance; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + impl Default for SystemAppearance { fn default() -> Self { Self(Appearance::Dark) } } -#[derive(Deref, DerefMut, Default)] +#[derive(Default)] struct GlobalSystemAppearance(SystemAppearance); +impl std::ops::DerefMut for GlobalSystemAppearance { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl std::ops::Deref for GlobalSystemAppearance { + type Target = SystemAppearance; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + impl Global for GlobalSystemAppearance {} impl SystemAppearance { diff --git a/crates/title_bar/Cargo.toml b/crates/title_bar/Cargo.toml index ef59ada28baa878d2cfc37ba52b4912e261274e8..eed94c839c0d0489944bf64725537234052c8e5f 100644 --- a/crates/title_bar/Cargo.toml +++ b/crates/title_bar/Cargo.toml @@ -14,7 +14,7 @@ doctest = false [features] default = [] -stories = ["dep:story"] + test-support = [ "call/test-support", "client/test-support", @@ -53,7 +53,6 @@ schemars.workspace = true serde.workspace = true settings.workspace = true smallvec.workspace = true -story = { workspace = true, optional = true } telemetry.workspace = true theme.workspace = true ui.workspace = true diff --git a/crates/title_bar/src/stories/application_menu.rs b/crates/title_bar/src/stories/application_menu.rs deleted file mode 100644 index f47f2a6c76b0781c6011993690d1aada95414545..0000000000000000000000000000000000000000 --- a/crates/title_bar/src/stories/application_menu.rs +++ /dev/null @@ -1,29 +0,0 @@ -use gpui::{Entity, Render}; -use story::{Story, StoryItem, StorySection}; - -use ui::prelude::*; - -use crate::application_menu::ApplicationMenu; - -pub struct ApplicationMenuStory { - menu: Entity, -} - -impl ApplicationMenuStory { - pub fn new(window: &mut Window, cx: &mut App) -> Self { - Self { - menu: cx.new(|cx| ApplicationMenu::new(window, cx)), - } - } -} - -impl Render for ApplicationMenuStory { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - Story::container(cx) - .child(Story::title_for::(cx)) - .child(StorySection::new().child(StoryItem::new( - "Application Menu", - h_flex().child(self.menu.clone()), - ))) - } -} diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 164cefc296e0a618e8698f1da5e387b84648ff96..34cad6f9540b1f8ba17aca08176b6950cdc7febe 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -5,9 +5,6 @@ mod plan_chip; mod title_bar_settings; mod update_version; -#[cfg(feature = "stories")] -mod stories; - use crate::application_menu::{ApplicationMenu, show_menus}; use crate::plan_chip::PlanChip; pub use platform_title_bar::{ @@ -56,9 +53,6 @@ use zed_actions::OpenRemote; pub use onboarding_banner::restore_banner; -#[cfg(feature = "stories")] -pub use stories::*; - const MAX_PROJECT_NAME_LENGTH: usize = 40; const MAX_BRANCH_NAME_LENGTH: usize = 40; const MAX_SHORT_SHA_LENGTH: usize = 8; diff --git a/crates/ui/Cargo.toml b/crates/ui/Cargo.toml index 05433bf8eebf78eccbbedff7a4bfcfb39b0022a7..4ae0e6d2e46b393d2962671de1f0a49f050fda19 100644 --- a/crates/ui/Cargo.toml +++ b/crates/ui/Cargo.toml @@ -24,7 +24,6 @@ menu.workspace = true schemars.workspace = true serde.workspace = true smallvec.workspace = true -story = { workspace = true, optional = true } strum.workspace = true theme.workspace = true ui_macros.workspace = true @@ -38,4 +37,3 @@ gpui = { workspace = true, features = ["test-support"] } [features] default = [] -stories = ["dep:story"] diff --git a/crates/ui/src/components.rs b/crates/ui/src/components.rs index 367d80d79c9af8722091e36c8e04bafb7ef0d8b5..6c0242a79130641376aefd744e2ffa08bbc65a40 100644 --- a/crates/ui/src/components.rs +++ b/crates/ui/src/components.rs @@ -40,9 +40,6 @@ mod toggle; mod tooltip; mod tree_view_item; -#[cfg(feature = "stories")] -mod stories; - pub use ai::*; pub use avatar::*; pub use banner::*; @@ -84,6 +81,3 @@ pub use tab_bar::*; pub use toggle::*; pub use tooltip::*; pub use tree_view_item::*; - -#[cfg(feature = "stories")] -pub use stories::*; diff --git a/crates/ui/src/components/stories/context_menu.rs b/crates/ui/src/components/stories/context_menu.rs deleted file mode 100644 index 197964adc86ef25b52eacd0631e4e7989b49bec0..0000000000000000000000000000000000000000 --- a/crates/ui/src/components/stories/context_menu.rs +++ /dev/null @@ -1,81 +0,0 @@ -use gpui::{Corner, Entity, Render, actions}; -use story::Story; - -use crate::prelude::*; -use crate::{ContextMenu, Label, right_click_menu}; - -actions!(stories, [PrintCurrentDate, PrintBestFood]); - -fn build_menu( - window: &mut Window, - cx: &mut App, - header: impl Into, -) -> Entity { - ContextMenu::build(window, cx, |menu, _, _| { - menu.header(header) - .separator() - .action("Print current time", Box::new(PrintCurrentDate)) - .entry( - "Print best food", - Some(Box::new(PrintBestFood)), - |window, cx| window.dispatch_action(Box::new(PrintBestFood), cx), - ) - }) -} - -pub struct ContextMenuStory; - -impl Render for ContextMenuStory { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - Story::container(cx) - .on_action(|_: &PrintCurrentDate, _, _| { - println!("printing unix time!"); - if let Ok(unix_time) = std::time::UNIX_EPOCH.elapsed() { - println!("Current Unix time is {:?}", unix_time.as_secs()); - } - }) - .on_action(|_: &PrintBestFood, _, _| { - println!("burrito"); - }) - .flex() - .flex_row() - .justify_between() - .child( - div() - .flex() - .flex_col() - .justify_between() - .child( - right_click_menu("test2") - .trigger(|_, _, _| Label::new("TOP LEFT")) - .menu(move |window, cx| build_menu(window, cx, "top left")), - ) - .child( - right_click_menu("test1") - .trigger(|_, _, _| Label::new("BOTTOM LEFT")) - .anchor(Corner::BottomLeft) - .attach(Corner::TopLeft) - .menu(move |window, cx| build_menu(window, cx, "bottom left")), - ), - ) - .child( - div() - .flex() - .flex_col() - .justify_between() - .child( - right_click_menu("test3") - .trigger(|_, _, _| Label::new("TOP RIGHT")) - .anchor(Corner::TopRight) - .menu(move |window, cx| build_menu(window, cx, "top right")), - ) - .child( - right_click_menu("test4") - .trigger(|_, _, _| Label::new("BOTTOM RIGHT")) - .anchor(Corner::BottomRight) - .attach(Corner::TopRight) - .menu(move |window, cx| build_menu(window, cx, "bottom right")), - ), - ) - } -} From 37fd7f71a9920ebdd64e0e39ca9612ccd74ae665 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 9 Apr 2026 10:42:09 -0300 Subject: [PATCH 05/67] sidebar: Add setting to control side in the settings UI (#53516) Release Notes: - N/A --- assets/settings/default.json | 2 +- crates/settings_content/src/agent.rs | 4 ++-- crates/settings_ui/src/page_data.rs | 15 ++++++++++++++- crates/settings_ui/src/settings_ui.rs | 1 + 4 files changed, 18 insertions(+), 4 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 07d2ea111e3b2b9480979a7189094e445b21b655..56dff8b2ad632ed74045887ea274cfa112140b4b 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -964,7 +964,7 @@ // // Default: true "flexible": true, - // Where to position the sidebar. Can be 'left' or 'right'. + // Where to position the threads sidebar. Can be 'left' or 'right'. "sidebar_side": "left", // Default width when the agent panel is docked to the left or right. "default_width": 640, diff --git a/crates/settings_content/src/agent.rs b/crates/settings_content/src/agent.rs index 81edf85c8dd97e64567fab522bdcbbebed23997d..76891185c42ee36324c1cc160edfb27d63ecc0d6 100644 --- a/crates/settings_content/src/agent.rs +++ b/crates/settings_content/src/agent.rs @@ -33,7 +33,7 @@ pub enum NewThreadLocation { NewWorktree, } -/// Where to position the sidebar. +/// Where to position the threads sidebar. #[derive( Clone, Copy, @@ -114,7 +114,7 @@ pub struct AgentSettingsContent { /// /// Default: true pub flexible: Option, - /// Where to position the sidebar. + /// Where to position the threads sidebar. /// /// Default: left pub sidebar_side: Option, diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index 2fa48198dacaf9d9862ffd6e753e0ed735a6ca7b..1bab4984a1515627ef26042fa7937a328877df0a 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -7186,7 +7186,7 @@ fn collaboration_page() -> SettingsPage { } fn ai_page(cx: &App) -> SettingsPage { - fn general_section() -> [SettingsPageItem; 2] { + fn general_section() -> [SettingsPageItem; 3] { [ SettingsPageItem::SectionHeader("General"), SettingsPageItem::SettingItem(SettingItem { @@ -7202,6 +7202,19 @@ fn ai_page(cx: &App) -> SettingsPage { metadata: None, files: USER | PROJECT, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Threads Sidebar Side", + description: "Which side of the window the threads sidebar appears on.", + field: Box::new(SettingField { + json_path: Some("agent.sidebar_side"), + pick: |settings_content| settings_content.agent.as_ref()?.sidebar_side.as_ref(), + write: |settings_content, value| { + settings_content.agent.get_or_insert_default().sidebar_side = value; + }, + }), + metadata: None, + files: USER, + }), ] } diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index bbe05a3c23113b1faa968fdd9c084f604debc0c4..9a5a7dafea4708205569b53e7aa460510a9fbb1e 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -474,6 +474,7 @@ fn init_renderers(cx: &mut App) { .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) From 399d3d267ecf7ad5425bec673d818b963eab8f91 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 9 Apr 2026 10:42:21 -0300 Subject: [PATCH 06/67] docs: Update mentions to "assistant panel" (#53514) We don't use this terminology anymore; now it's "agent panel". Release Notes: - N/A --- crates/anthropic/src/anthropic.rs | 2 +- crates/deepseek/src/deepseek.rs | 2 +- crates/google_ai/src/google_ai.rs | 2 +- crates/mistral/src/mistral.rs | 2 +- crates/open_ai/src/open_ai.rs | 2 +- crates/settings_content/src/language_model.rs | 6 +++--- crates/vercel/src/vercel.rs | 2 +- crates/x_ai/src/x_ai.rs | 2 +- docs/src/migrate/intellij.md | 2 +- docs/src/migrate/pycharm.md | 2 +- docs/src/migrate/rustrover.md | 2 +- docs/src/migrate/webstorm.md | 2 +- docs/src/vim.md | 2 +- 13 files changed, 15 insertions(+), 15 deletions(-) diff --git a/crates/anthropic/src/anthropic.rs b/crates/anthropic/src/anthropic.rs index 48fa318d7c1d87e63725cef836baf9c945966206..ba79eb2315f96f52d7f695ae2e94c616763f94a0 100644 --- a/crates/anthropic/src/anthropic.rs +++ b/crates/anthropic/src/anthropic.rs @@ -109,7 +109,7 @@ pub enum Model { Custom { name: String, max_tokens: u64, - /// The name displayed in the UI, such as in the assistant panel model dropdown menu. + /// The name displayed in the UI, such as in the agent panel model dropdown menu. display_name: Option, /// Override this model with a different Anthropic model for tool calls. tool_override: Option, diff --git a/crates/deepseek/src/deepseek.rs b/crates/deepseek/src/deepseek.rs index 636258a5a132ce79cb5d15b1aaa25d6e4d3af643..19e1f6c2466512f4c4c96b6762885087202c65f3 100644 --- a/crates/deepseek/src/deepseek.rs +++ b/crates/deepseek/src/deepseek.rs @@ -56,7 +56,7 @@ pub enum Model { #[serde(rename = "custom")] Custom { name: String, - /// The name displayed in the UI, such as in the assistant panel model dropdown menu. + /// The name displayed in the UI, such as in the agent panel model dropdown menu. display_name: Option, max_tokens: u64, max_output_tokens: Option, diff --git a/crates/google_ai/src/google_ai.rs b/crates/google_ai/src/google_ai.rs index 5770c9a020b04bf280908993911b67ec3a5b980f..7917eb45c6292d05ede5267ba669a942348e575a 100644 --- a/crates/google_ai/src/google_ai.rs +++ b/crates/google_ai/src/google_ai.rs @@ -518,7 +518,7 @@ pub enum Model { #[serde(rename = "custom")] Custom { name: String, - /// The name displayed in the UI, such as in the assistant panel model dropdown menu. + /// The name displayed in the UI, such as in the agent panel model dropdown menu. display_name: Option, max_tokens: u64, #[serde(default)] diff --git a/crates/mistral/src/mistral.rs b/crates/mistral/src/mistral.rs index 0244f904468a5eb3e03b520a2687b31a1168f52b..e8227ca833eabcc712abf6f6b75e52e6f5b9583d 100644 --- a/crates/mistral/src/mistral.rs +++ b/crates/mistral/src/mistral.rs @@ -79,7 +79,7 @@ pub enum Model { #[serde(rename = "custom")] Custom { name: String, - /// The name displayed in the UI, such as in the assistant panel model dropdown menu. + /// The name displayed in the UI, such as in the agent panel model dropdown menu. display_name: Option, max_tokens: u64, max_output_tokens: Option, diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index 5423d9c5dcaa13589a8a7d658548b42fd467f67f..256b78f8a2ec921e842a846cbee75a4147745e00 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -98,7 +98,7 @@ pub enum Model { #[serde(rename = "custom")] Custom { name: String, - /// The name displayed in the UI, such as in the assistant panel model dropdown menu. + /// The name displayed in the UI, such as in the agent panel model dropdown menu. display_name: Option, max_tokens: u64, max_output_tokens: Option, diff --git a/crates/settings_content/src/language_model.rs b/crates/settings_content/src/language_model.rs index 00ecf42537459496102495c51628b54405968214..17beef9df25f7662caedd1380e867ad4aefbb7cc 100644 --- a/crates/settings_content/src/language_model.rs +++ b/crates/settings_content/src/language_model.rs @@ -39,7 +39,7 @@ pub struct AnthropicSettingsContent { pub struct AnthropicAvailableModel { /// The model's name in the Anthropic API. e.g. claude-3-5-sonnet-latest, claude-3-opus-20240229, etc pub name: String, - /// The model's name in Zed's UI, such as in the model selector dropdown menu in the assistant panel. + /// The model's name in Zed's UI, such as in the model selector dropdown menu in the agent panel. pub display_name: Option, /// The model's context window size. pub max_tokens: u64, @@ -109,7 +109,7 @@ pub struct OllamaSettingsContent { pub struct OllamaAvailableModel { /// The model name in the Ollama API (e.g. "llama3.2:latest") pub name: String, - /// The model's name in Zed's UI, such as in the model selector dropdown menu in the assistant panel. + /// The model's name in Zed's UI, such as in the model selector dropdown menu in the agent panel. pub display_name: Option, /// The Context Length parameter to the model (aka num_ctx or n_ctx) pub max_tokens: u64, @@ -388,7 +388,7 @@ pub struct ZedDotDevAvailableModel { pub provider: ZedDotDevAvailableProvider, /// The model's name in the provider's API. e.g. claude-3-5-sonnet-20240620 pub name: String, - /// The name displayed in the UI, such as in the assistant panel model dropdown menu. + /// The name displayed in the UI, such as in the agent panel model dropdown menu. pub display_name: Option, /// The size of the context window, indicating the maximum number of tokens the model can process. pub max_tokens: usize, diff --git a/crates/vercel/src/vercel.rs b/crates/vercel/src/vercel.rs index 8686fda53fbb1d19090f14ff944ec0641ac16c07..02ac291b9dd54a27aafa5af2a770131e87301184 100644 --- a/crates/vercel/src/vercel.rs +++ b/crates/vercel/src/vercel.rs @@ -13,7 +13,7 @@ pub enum Model { #[serde(rename = "custom")] Custom { name: String, - /// The name displayed in the UI, such as in the assistant panel model dropdown menu. + /// The name displayed in the UI, such as in the agent panel model dropdown menu. display_name: Option, max_tokens: u64, max_output_tokens: Option, diff --git a/crates/x_ai/src/x_ai.rs b/crates/x_ai/src/x_ai.rs index fd141a1723a28d235311d5d875bf4cc0388cab61..bc49a3e2b37d6ac83c66a2fba3af83ea7a451576 100644 --- a/crates/x_ai/src/x_ai.rs +++ b/crates/x_ai/src/x_ai.rs @@ -48,7 +48,7 @@ pub enum Model { #[serde(rename = "custom")] Custom { name: String, - /// The name displayed in the UI, such as in the assistant panel model dropdown menu. + /// The name displayed in the UI, such as in the agent panel model dropdown menu. display_name: Option, max_tokens: u64, max_output_tokens: Option, diff --git a/docs/src/migrate/intellij.md b/docs/src/migrate/intellij.md index adf0e20bef761385b66ad6bf55e387dd662088f4..74f7cf226c8620dbe122c59d952252b80c069362 100644 --- a/docs/src/migrate/intellij.md +++ b/docs/src/migrate/intellij.md @@ -119,7 +119,7 @@ If you chose the JetBrains keymap during onboarding, most of your shortcuts shou | Action | Shortcut | Notes | | ----------------- | -------------------------- | ------------------------------ | -| Toggle Right Dock | `Cmd + R` | Assistant panel, notifications | +| Toggle Right Dock | `Cmd + R` | Agent panel, notifications | | Split Panes | `Cmd + K`, then arrow keys | Create splits in any direction | ### How to Customize Keybindings diff --git a/docs/src/migrate/pycharm.md b/docs/src/migrate/pycharm.md index 0ce769b06bcc1363a4dde1d9ae3c138c0b4539f1..9f45135268e518476af00cc9ed8451d0f6a3e0ed 100644 --- a/docs/src/migrate/pycharm.md +++ b/docs/src/migrate/pycharm.md @@ -119,7 +119,7 @@ If you chose the JetBrains keymap during onboarding, most of your shortcuts shou | Action | Shortcut | Notes | | ----------------- | -------------------------- | ------------------------------ | -| Toggle Right Dock | `Cmd + R` | Assistant panel, notifications | +| Toggle Right Dock | `Cmd + R` | Agent panel, notifications | | Split Panes | `Cmd + K`, then arrow keys | Create splits in any direction | ### How to Customize Keybindings diff --git a/docs/src/migrate/rustrover.md b/docs/src/migrate/rustrover.md index 1e12202233ff1dc8f958b7acfc71a16723ed34ff..34cf03393e649f07202388675e02e9f8504c8d85 100644 --- a/docs/src/migrate/rustrover.md +++ b/docs/src/migrate/rustrover.md @@ -132,7 +132,7 @@ If you chose the JetBrains keymap during onboarding, most of your shortcuts shou | Action | Shortcut | Notes | | ----------------- | -------------------------- | ------------------------------ | -| Toggle Right Dock | `Cmd + R` | Assistant panel, notifications | +| Toggle Right Dock | `Cmd + R` | Agent panel, notifications | | Split Panes | `Cmd + K`, then arrow keys | Create splits in any direction | ### How to Customize Keybindings diff --git a/docs/src/migrate/webstorm.md b/docs/src/migrate/webstorm.md index 3708d8dec825caf23b831a4151ee60e95c04287d..e5313251ec12340c2a208c6a8b9cbeb116e85145 100644 --- a/docs/src/migrate/webstorm.md +++ b/docs/src/migrate/webstorm.md @@ -112,7 +112,7 @@ If you chose the JetBrains keymap during onboarding, most of your shortcuts shou | Action | Keybinding | Notes | | ----------------- | -------------------------------- | ------------------------------------------------------------- | -| Toggle Right Dock | {#kb workspace::ToggleRightDock} | Assistant panel, notifications | +| Toggle Right Dock | {#kb workspace::ToggleRightDock} | Agent panel, notifications | | Split Pane Right | {#kb pane::SplitRight} | Use other arrow keys to create splits in different directions | ### How to Customize Keybindings diff --git a/docs/src/vim.md b/docs/src/vim.md index 8e93edff081681a3e094c811e2d76822766ef67e..e60e084ac13cf935be49ab3b87304317df971d77 100644 --- a/docs/src/vim.md +++ b/docs/src/vim.md @@ -448,7 +448,7 @@ Here's a template with useful vim mode contexts to help you customize your vim m By default, you can navigate between the different files open in the editor with shortcuts like `ctrl+w` followed by one of `hjkl` to move to the left, down, up, or right, respectively. -But you cannot use the same shortcuts to move between all the editor docks (the terminal, project panel, assistant panel, ...). If you want to use the same shortcuts to navigate to the docks, you can add the following key bindings to your user keymap. +But you cannot use the same shortcuts to move between all the editor docks (the terminal, project panel, agent panel, ...). If you want to use the same shortcuts to navigate to the docks, you can add the following key bindings to your user keymap. ```json [keymap] { From c998b4a053b2893e15b9d4cbb4ca57faf03e7108 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Thu, 9 Apr 2026 10:05:40 -0400 Subject: [PATCH 07/67] diagnostics: Fall back to `multibuffer_context_lines` when syntactic expansion produces a single-line range (#53526) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- crates/diagnostics/src/diagnostics.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index dc3708e9307032a43b062289764656fa05b20d46..49e17e69b00c6061c5209c5ad5440c7ea816dd80 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -989,6 +989,7 @@ async fn context_range_for_entry( cx, ) .await + .filter(|rows| rows.start() != rows.end()) { Range { start: Point::new(*rows.start(), 0), From db3ea01097afb42338d7397fc4989dec69790b45 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 9 Apr 2026 10:33:08 -0400 Subject: [PATCH 08/67] client: Store organization configuration (#53450) This PR updates the `UserStore` to store the configuration for each organization. We'll be reading from this in subsequent PRs. Closes CLO-628. Release Notes: - N/A --- crates/client/src/test.rs | 1 + crates/client/src/user.rs | 12 ++++++++++++ crates/cloud_api_types/src/cloud_api_types.rs | 15 +++++++++++++++ 3 files changed, 28 insertions(+) diff --git a/crates/client/src/test.rs b/crates/client/src/test.rs index 99fc594e7fd38ea7621e6f62e56ccd74e77b2e9b..00d29fe537cd4879db945cb0fa1f3767c189403b 100644 --- a/crates/client/src/test.rs +++ b/crates/client/src/test.rs @@ -271,6 +271,7 @@ pub fn make_get_authenticated_user_response( organizations: vec![], default_organization_id: None, plans_by_organization: BTreeMap::new(), + configuration_by_organization: BTreeMap::new(), plan: PlanInfo { plan: KnownOrUnknown::Known(Plan::ZedPro), subscription_period: None, diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index e9b9acf68573ef5a05d642c09ed96a4d8aa23580..270180cefd74b92075c1237a0f8320454a500b04 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -5,6 +5,7 @@ use cloud_api_client::websocket_protocol::MessageToClient; use cloud_api_client::{ GetAuthenticatedUserResponse, KnownOrUnknown, Organization, OrganizationId, Plan, PlanInfo, }; +use cloud_api_types::OrganizationConfiguration; use cloud_llm_client::{ EDIT_PREDICTIONS_USAGE_AMOUNT_HEADER_NAME, EDIT_PREDICTIONS_USAGE_LIMIT_HEADER_NAME, UsageLimit, }; @@ -117,6 +118,7 @@ pub struct UserStore { current_organization: Option>, organizations: Vec>, plans_by_organization: HashMap, + configuration_by_organization: HashMap, contacts: Vec>, incoming_contact_requests: Vec>, outgoing_contact_requests: Vec>, @@ -193,6 +195,7 @@ impl UserStore { current_organization: None, organizations: Vec::new(), plans_by_organization: HashMap::default(), + configuration_by_organization: HashMap::default(), plan_info: None, edit_prediction_usage: None, contacts: Default::default(), @@ -730,6 +733,13 @@ impl UserStore { self.plans_by_organization.get(organization_id).copied() } + pub fn current_organization_configuration(&self) -> Option<&OrganizationConfiguration> { + let current_organization = self.current_organization.as_ref()?; + + self.configuration_by_organization + .get(¤t_organization.id) + } + pub fn plan(&self) -> Option { #[cfg(debug_assertions)] if let Ok(plan) = std::env::var("ZED_SIMULATE_PLAN").as_ref() { @@ -865,6 +875,8 @@ impl UserStore { (organization_id, plan) }) .collect(); + self.configuration_by_organization = + response.configuration_by_organization.into_iter().collect(); self.edit_prediction_usage = Some(EditPredictionUsage(RequestUsage { limit: response.plan.usage.edit_predictions.limit, diff --git a/crates/cloud_api_types/src/cloud_api_types.rs b/crates/cloud_api_types/src/cloud_api_types.rs index a606b61923074b4eda42c861afddee9efba5f4b5..b4435f5bf0d53d6df3df54ef28bd99124b622421 100644 --- a/crates/cloud_api_types/src/cloud_api_types.rs +++ b/crates/cloud_api_types/src/cloud_api_types.rs @@ -26,6 +26,8 @@ pub struct GetAuthenticatedUserResponse { pub default_organization_id: Option, #[serde(default)] pub plans_by_organization: BTreeMap>, + #[serde(default)] + pub configuration_by_organization: BTreeMap, pub plan: PlanInfo, } @@ -50,6 +52,19 @@ pub struct Organization { pub is_personal: bool, } +#[derive(Debug, PartialEq, Serialize, Deserialize)] +pub struct OrganizationConfiguration { + pub is_zed_model_provider_enabled: bool, + pub is_agent_thread_feedback_enabled: bool, + pub edit_prediction: OrganizationEditPredictionConfiguration, +} + +#[derive(Debug, PartialEq, Serialize, Deserialize)] +pub struct OrganizationEditPredictionConfiguration { + pub is_enabled: bool, + pub is_feedback_enabled: bool, +} + #[derive(Debug, PartialEq, Serialize, Deserialize)] pub struct AcceptTermsOfServiceResponse { pub user: AuthenticatedUser, From 55c02b45b336e49c72b1f49837f455228c66916f Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 9 Apr 2026 10:58:00 -0400 Subject: [PATCH 09/67] agent_ui: Disable thread feedback based on organization configuration (#53454) This PR updates the agent thread UI to disable the thread feedback controls based on the organization's configuration. Closes CLO-629. Release Notes: - N/A --- .../src/conversation_view/thread_view.rs | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs index ae9bf17c76bde99cdacea9d5bb205074a1a4ee39..412778e054cab1596b2e9555a9cd4a12c3edb6ec 100644 --- a/crates/agent_ui/src/conversation_view/thread_view.rs +++ b/crates/agent_ui/src/conversation_view/thread_view.rs @@ -4872,9 +4872,20 @@ impl ThreadView { }, ); - if AgentSettings::get_global(cx).enable_feedback - && self.thread.read(cx).connection().telemetry().is_some() - { + let enable_thread_feedback = util::maybe!({ + let project = thread.read(cx).project().read(cx); + let user_store = project.user_store(); + if let Some(configuration) = user_store.read(cx).current_organization_configuration() { + if !configuration.is_agent_thread_feedback_enabled { + return false; + } + } + + AgentSettings::get_global(cx).enable_feedback + && self.thread.read(cx).connection().telemetry().is_some() + }); + + if enable_thread_feedback { let feedback = self.thread_feedback.feedback; let tooltip_meta = || { From b150663d45171d3a43b0b97f747ba65434b0eb0d Mon Sep 17 00:00:00 2001 From: Dong Date: Fri, 10 Apr 2026 00:13:51 +0800 Subject: [PATCH 10/67] markdown_preview: Support anchor link for headings (#53184) ## What does this PR did - Generate [GitHub-flavored heading slugs](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax#section-links) for markdown headings - Handle `[label](#heading)` same-document anchor links that scroll the preview and editor to the target heading - Handle `[label](./file.md#heading)` cross-file anchor links that open the file, scroll the preview, and move the editor cursor to the heading https://github.com/user-attachments/assets/ecc468bf-bed0-4543-a988-703025a61bf8 ## What to test - [ ] Create a markdown file with `[Go to section](#section-name)` links, verify clicking scrolls preview and editor - [ ] Create two markdown files with cross-file links like `[See other](./other.md#heading)`, verify file opens and preview scrolls to heading - [ ] Verify duplicate headings produce correct slugs (`heading`, `heading-1`) - [ ] Verify external URLs (`https://...`) are unaffected Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [ ] Performance impact has been considered and is acceptable Closes #18699 Release Notes: - Added support for anchor links for headings in Markdown Preview. --------- Co-authored-by: Smit Barmase --- crates/markdown/src/markdown.rs | 23 ++- crates/markdown/src/mermaid.rs | 2 +- crates/markdown/src/parser.rs | 152 ++++++++++++++++-- .../src/markdown_preview_view.rs | 70 +++++++- crates/util/src/markdown.rs | 108 +++++++++++++ 5 files changed, 332 insertions(+), 23 deletions(-) diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index e6ad1b1f2ac9154eaabc6d18dbcb9c8695ae019d..e873a458cdaf981635f14c4e3ab18456e700f048 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -272,6 +272,7 @@ pub struct MarkdownOptions { pub parse_links_only: bool, pub parse_html: bool, pub render_mermaid_diagrams: bool, + pub parse_heading_slugs: bool, } #[derive(Clone, Copy, PartialEq, Eq)] @@ -498,6 +499,16 @@ impl Markdown { self.pending_parse.is_some() } + pub fn scroll_to_heading(&mut self, slug: &str, cx: &mut Context) -> Option { + if let Some(source_index) = self.parsed_markdown.heading_slugs.get(slug).copied() { + self.autoscroll_request = Some(source_index); + cx.notify(); + Some(source_index) + } else { + None + } + } + pub fn source(&self) -> &str { &self.source } @@ -669,6 +680,7 @@ impl Markdown { let should_parse_links_only = self.options.parse_links_only; let should_parse_html = self.options.parse_html; let should_render_mermaid_diagrams = self.options.render_mermaid_diagrams; + let should_parse_heading_slugs = self.options.parse_heading_slugs; let language_registry = self.language_registry.clone(); let fallback = self.fallback_code_block_language.clone(); @@ -683,17 +695,20 @@ impl Markdown { root_block_starts: Arc::default(), html_blocks: BTreeMap::default(), mermaid_diagrams: BTreeMap::default(), + heading_slugs: HashMap::default(), }, Default::default(), ); } - let parsed = parse_markdown_with_options(&source, should_parse_html); + let parsed = + parse_markdown_with_options(&source, should_parse_html, should_parse_heading_slugs); let events = parsed.events; let language_names = parsed.language_names; let paths = parsed.language_paths; let root_block_starts = parsed.root_block_starts; let html_blocks = parsed.html_blocks; + let heading_slugs = parsed.heading_slugs; let mermaid_diagrams = if should_render_mermaid_diagrams { extract_mermaid_diagrams(&source, &events) } else { @@ -760,6 +775,7 @@ impl Markdown { root_block_starts: Arc::from(root_block_starts), html_blocks, mermaid_diagrams, + heading_slugs, }, images_by_source_offset, ) @@ -883,6 +899,7 @@ pub struct ParsedMarkdown { pub root_block_starts: Arc<[usize]>, pub(crate) html_blocks: BTreeMap, pub(crate) mermaid_diagrams: BTreeMap, + pub heading_slugs: HashMap, } impl ParsedMarkdown { @@ -3120,7 +3137,7 @@ mod tests { #[test] fn test_table_checkbox_detection() { let md = "| Done |\n|------|\n| [x] |\n| [ ] |"; - let events = crate::parser::parse_markdown_with_options(md, false).events; + let events = crate::parser::parse_markdown_with_options(md, false, false).events; let mut in_table = false; let mut cell_texts: Vec = Vec::new(); @@ -3338,7 +3355,7 @@ mod tests { } fn has_code_block(markdown: &str) -> bool { - let parsed_data = parse_markdown_with_options(markdown, false); + let parsed_data = parse_markdown_with_options(markdown, false, false); parsed_data .events .iter() diff --git a/crates/markdown/src/mermaid.rs b/crates/markdown/src/mermaid.rs index b8e40ebe7ec16cbbb8d9b11ab3edfc75da46f3a9..08f063e570517340035965e46e9a60edc32d9f77 100644 --- a/crates/markdown/src/mermaid.rs +++ b/crates/markdown/src/mermaid.rs @@ -371,7 +371,7 @@ mod tests { #[test] fn test_extract_mermaid_diagrams_parses_scale() { let markdown = "```mermaid 150\ngraph TD;\n```\n\n```rust\nfn main() {}\n```"; - let events = crate::parser::parse_markdown_with_options(markdown, false).events; + let events = crate::parser::parse_markdown_with_options(markdown, false, false).events; let diagrams = extract_mermaid_diagrams(markdown, &events); assert_eq!(diagrams.len(), 1); diff --git a/crates/markdown/src/parser.rs b/crates/markdown/src/parser.rs index 6de5b16a71053f9a61a3e9c2d66d91cd962540a0..c6c988083fddeac357b92d0b6604e0bbd564308f 100644 --- a/crates/markdown/src/parser.rs +++ b/crates/markdown/src/parser.rs @@ -1,12 +1,12 @@ +use collections::{BTreeMap, HashMap, HashSet}; use gpui::SharedString; use linkify::LinkFinder; pub use pulldown_cmark::TagEnd as MarkdownTagEnd; use pulldown_cmark::{ Alignment, CowStr, HeadingLevel, LinkType, MetadataBlockKind, Options, Parser, }; -use std::{collections::BTreeMap, ops::Range, sync::Arc}; - -use collections::HashSet; +use std::{ops::Range, sync::Arc}; +use util::markdown::generate_heading_slug; use crate::{html, path_range::PathWithRange}; @@ -37,6 +37,7 @@ pub(crate) struct ParsedMarkdownData { pub language_paths: HashSet>, pub root_block_starts: Vec, pub html_blocks: BTreeMap, + pub heading_slugs: HashMap, } impl ParseState { @@ -80,7 +81,78 @@ impl ParseState { } } -pub(crate) fn parse_markdown_with_options(text: &str, parse_html: bool) -> ParsedMarkdownData { +const MAX_DUPLICATE_HEADING_SLUGS: usize = 128; + +fn build_heading_slugs( + source: &str, + events: &[(Range, MarkdownEvent)], +) -> HashMap { + let mut slugs = HashMap::default(); + let mut slug_counts: HashMap = HashMap::default(); + let mut inside_heading = false; + let mut heading_text = String::new(); + let mut heading_source_start: Option = None; + + for (range, event) in events { + match event { + MarkdownEvent::Start(MarkdownTag::Heading { .. }) => { + inside_heading = true; + heading_text.clear(); + heading_source_start = None; + } + MarkdownEvent::End(MarkdownTagEnd::Heading(_)) => { + if inside_heading { + let source_offset = heading_source_start.unwrap_or(range.start); + let base_slug = generate_heading_slug(&heading_text); + let count = slug_counts.entry(base_slug.clone()).or_insert(0); + let mut slug = if *count == 0 { + base_slug.clone() + } else { + format!("{base_slug}-{count}") + }; + *count += 1; + while slugs.contains_key(slug.as_str()) { + let Some(count) = slug_counts.get_mut(&base_slug) else { + slug.clear(); + break; + }; + if *count >= MAX_DUPLICATE_HEADING_SLUGS { + slug.clear(); + break; + } + slug = format!("{base_slug}-{count}"); + *count += 1; + } + if !slug.is_empty() { + slugs.insert(SharedString::from(slug), source_offset); + } + inside_heading = false; + } + } + MarkdownEvent::Text | MarkdownEvent::Code if inside_heading => { + if heading_source_start.is_none() { + heading_source_start = Some(range.start); + } + heading_text.push_str(&source[range.clone()]); + } + MarkdownEvent::SubstitutedText(substituted) if inside_heading => { + if heading_source_start.is_none() { + heading_source_start = Some(range.start); + } + heading_text.push_str(substituted); + } + _ => {} + } + } + + slugs +} + +pub(crate) fn parse_markdown_with_options( + text: &str, + parse_html: bool, + parse_heading_slugs: bool, +) -> ParsedMarkdownData { let mut state = ParseState::default(); let mut language_names = HashSet::default(); let mut language_paths = HashSet::default(); @@ -440,12 +512,19 @@ pub(crate) fn parse_markdown_with_options(text: &str, parse_html: bool) -> Parse } } + let heading_slugs = if parse_heading_slugs { + build_heading_slugs(text, &state.events) + } else { + HashMap::default() + }; + ParsedMarkdownData { events: state.events, language_names, language_paths, root_block_starts: state.root_block_starts, html_blocks, + heading_slugs, } } @@ -697,7 +776,7 @@ mod tests { #[test] fn test_html_comments() { assert_eq!( - parse_markdown_with_options(" \nReturns", false), + parse_markdown_with_options(" \nReturns", false, false), ParsedMarkdownData { events: vec![ (2..30, RootStart), @@ -725,7 +804,8 @@ mod tests { assert_eq!( parse_markdown_with_options( "   https://some.url some \\`►\\` text", - false + false, + false, ), ParsedMarkdownData { events: vec![ @@ -764,7 +844,8 @@ mod tests { assert_eq!( parse_markdown_with_options( "You can use the [GitHub Search API](https://docs.github.com/en", - false + false, + false, ) .events, vec![ @@ -797,7 +878,8 @@ mod tests { assert_eq!( parse_markdown_with_options( "-- --- ... \"double quoted\" 'single quoted' ----------", - false + false, + false, ), ParsedMarkdownData { events: vec![ @@ -830,7 +912,7 @@ mod tests { #[test] fn test_code_block_metadata() { assert_eq!( - parse_markdown_with_options("```rust\nfn main() {\n let a = 1;\n}\n```", false), + parse_markdown_with_options("```rust\nfn main() {\n let a = 1;\n}\n```", false, false), ParsedMarkdownData { events: vec![ (0..37, RootStart), @@ -858,7 +940,7 @@ mod tests { } ); assert_eq!( - parse_markdown_with_options(" fn main() {}", false), + parse_markdown_with_options(" fn main() {}", false, false), ParsedMarkdownData { events: vec![ (4..16, RootStart), @@ -883,7 +965,7 @@ mod tests { } fn assert_code_block_does_not_emit_links(markdown: &str) { - let parsed = parse_markdown_with_options(markdown, false); + let parsed = parse_markdown_with_options(markdown, false, false); let mut code_block_depth = 0; let mut code_block_count = 0; let mut saw_text_inside_code_block = false; @@ -937,7 +1019,7 @@ mod tests { #[test] fn test_metadata_blocks_do_not_affect_root_blocks() { assert_eq!( - parse_markdown_with_options("+++\ntitle = \"Example\"\n+++\n\nParagraph", false), + parse_markdown_with_options("+++\ntitle = \"Example\"\n+++\n\nParagraph", false, false), ParsedMarkdownData { events: vec![ (27..36, RootStart), @@ -959,7 +1041,7 @@ mod tests { |------|---------| | [x] | Fix bug | | [ ] | Add feature |"; - let parsed = parse_markdown_with_options(markdown, false); + let parsed = parse_markdown_with_options(markdown, false, false); let mut in_table = false; let mut saw_task_list_marker = false; @@ -1038,7 +1120,8 @@ mod tests { assert_eq!( parse_markdown_with_options( "https:/\\/example.com is equivalent to https://example.com!", - false + false, + false, ) .events, vec![ @@ -1079,7 +1162,8 @@ mod tests { assert_eq!( parse_markdown_with_options( "Visit https://example.com/cat\\/é‍☕ for coffee!", - false + false, + false, ) .events, [ @@ -1106,4 +1190,42 @@ mod tests { ] ); } + + #[test] + fn test_heading_slugs() { + let parsed = parse_markdown_with_options( + "# Hello World\n\n## Code `block`\n\n### Third Level\n\n#### Fourth Level\n\n## Hello World", + false, + true, + ); + assert_eq!(parsed.heading_slugs.len(), 5); + assert!(parsed.heading_slugs.contains_key("hello-world")); + assert!(parsed.heading_slugs.contains_key("code-block")); + assert!(parsed.heading_slugs.contains_key("third-level")); + assert!(parsed.heading_slugs.contains_key("fourth-level")); + assert!(parsed.heading_slugs.contains_key("hello-world-1")); + } + + #[test] + fn test_heading_source_index_for_slug() { + let parsed = parse_markdown_with_options( + "# Duplicate\n\nText\n\n## Duplicate\n\nMore text", + false, + true, + ); + let first = parsed.heading_slugs.get("duplicate").copied(); + let second = parsed.heading_slugs.get("duplicate-1").copied(); + assert!(first.is_some()); + assert!(second.is_some()); + assert!(first.expect("first slug missing") < second.expect("second slug missing")); + } + + #[test] + fn test_heading_slug_collision_with_dedup_suffix() { + let parsed = parse_markdown_with_options("# Foo\n\n## Foo\n\n## Foo 1", false, true); + assert_eq!(parsed.heading_slugs.len(), 3); + assert!(parsed.heading_slugs.contains_key("foo")); + assert!(parsed.heading_slugs.contains_key("foo-1")); + assert!(parsed.heading_slugs.contains_key("foo-1-1")); + } } diff --git a/crates/markdown_preview/src/markdown_preview_view.rs b/crates/markdown_preview/src/markdown_preview_view.rs index 3e6423b36603e247ba5da2a2166a8357701fa5cd..b97a559edf8760084586218a5b39e677c49727f5 100644 --- a/crates/markdown_preview/src/markdown_preview_view.rs +++ b/crates/markdown_preview/src/markdown_preview_view.rs @@ -21,6 +21,7 @@ use project::search::SearchQuery; use settings::Settings; use theme_settings::ThemeSettings; use ui::{WithScrollbar, prelude::*}; +use util::markdown::split_local_url_fragment; use util::normalize_path; use workspace::item::{Item, ItemBufferKind, ItemHandle}; use workspace::searchable::{ @@ -218,6 +219,7 @@ impl MarkdownPreviewView { MarkdownOptions { parse_html: true, render_mermaid_diagrams: true, + parse_heading_slugs: true, ..Default::default() }, cx, @@ -580,8 +582,6 @@ impl MarkdownPreviewView { window: &mut Window, cx: &mut Context, ) -> MarkdownElement { - let workspace = self.workspace.clone(); - let base_directory = self.base_directory.clone(); let active_editor = self .active_editor .as_ref() @@ -615,8 +615,20 @@ impl MarkdownPreviewView { ) } }) - .on_url_click(move |url, window, cx| { - open_preview_url(url, base_directory.clone(), &workspace, window, cx); + .on_url_click({ + let view_handle = cx.entity().downgrade(); + let workspace = self.workspace.clone(); + let base_directory = self.base_directory.clone(); + move |url, window, cx| { + handle_url_click( + url, + &view_handle, + base_directory.clone(), + &workspace, + window, + cx, + ); + } }); if let Some(active_editor) = active_editor { @@ -655,6 +667,56 @@ impl MarkdownPreviewView { } } +fn handle_url_click( + url: SharedString, + view: &WeakEntity, + base_directory: Option, + workspace: &WeakEntity, + window: &mut Window, + cx: &mut App, +) { + let (path_part, fragment) = split_local_url_fragment(url.as_ref()); + + if path_part.is_empty() { + if let Some(fragment) = fragment { + let view = view.clone(); + let slug = SharedString::from(fragment.to_string()); + window.defer(cx, move |window, cx| { + if let Some(view) = view.upgrade() { + let markdown = view.read(cx).markdown.clone(); + let active_editor = view + .read(cx) + .active_editor + .as_ref() + .map(|state| state.editor.clone()); + + let source_index = + markdown.update(cx, |markdown, cx| markdown.scroll_to_heading(&slug, cx)); + + if let Some(source_index) = source_index { + if let Some(editor) = active_editor { + MarkdownPreviewView::move_cursor_to_source_index( + &editor, + source_index, + window, + cx, + ); + } + } + } + }); + } + } else { + open_preview_url( + SharedString::from(path_part.to_string()), + base_directory, + workspace, + window, + cx, + ); + } +} + fn open_preview_url( url: SharedString, base_directory: Option, diff --git a/crates/util/src/markdown.rs b/crates/util/src/markdown.rs index 303dbe0cf59d868209c4f350fa88a0b156f66464..e42ce13b59b8d9823e0c9765c4010ffa50c78d40 100644 --- a/crates/util/src/markdown.rs +++ b/crates/util/src/markdown.rs @@ -1,5 +1,62 @@ use std::fmt::{Display, Formatter}; +/// Generates a URL-friendly slug from heading text (e.g. "Hello World" → "hello-world"). +pub fn generate_heading_slug(text: &str) -> String { + text.trim() + .chars() + .filter_map(|c| { + if c.is_alphanumeric() || c == '-' || c == '_' { + Some(c.to_lowercase().next().unwrap_or(c)) + } else if c == ' ' { + Some('-') + } else { + None + } + }) + .collect() +} + +/// Returns true if the URL starts with a URI scheme (RFC 3986 §3.1). +fn has_uri_scheme(url: &str) -> bool { + let mut chars = url.chars(); + match chars.next() { + Some(c) if c.is_ascii_alphabetic() => {} + _ => return false, + } + for c in chars { + if c == ':' { + return true; + } + if !(c.is_ascii_alphanumeric() || c == '+' || c == '-' || c == '.') { + return false; + } + } + false +} + +/// Splits a relative URL into its path and `#fragment` parts. +/// Absolute URLs are returned as-is with no fragment. +pub fn split_local_url_fragment(url: &str) -> (&str, Option<&str>) { + if has_uri_scheme(url) { + return (url, None); + } + match url.find('#') { + Some(pos) => { + let path = &url[..pos]; + let fragment = &url[pos + 1..]; + ( + path, + if fragment.is_empty() { + None + } else { + Some(fragment) + }, + ) + } + None => (url, None), + } +} + /// Indicates that the wrapped `String` is markdown text. #[derive(Debug, Clone)] pub struct MarkdownString(pub String); @@ -265,4 +322,55 @@ mod tests { "it can't be downgraded later" ); } + + #[test] + fn test_split_local_url_fragment() { + assert_eq!(split_local_url_fragment("#heading"), ("", Some("heading"))); + assert_eq!( + split_local_url_fragment("./file.md#heading"), + ("./file.md", Some("heading")) + ); + assert_eq!(split_local_url_fragment("./file.md"), ("./file.md", None)); + assert_eq!( + split_local_url_fragment("https://example.com#frag"), + ("https://example.com#frag", None) + ); + assert_eq!( + split_local_url_fragment("mailto:user@example.com"), + ("mailto:user@example.com", None) + ); + assert_eq!(split_local_url_fragment("#"), ("", None)); + assert_eq!( + split_local_url_fragment("../other.md#section"), + ("../other.md", Some("section")) + ); + assert_eq!( + split_local_url_fragment("123:not-a-scheme#frag"), + ("123:not-a-scheme", Some("frag")) + ); + } + + #[test] + fn test_generate_heading_slug() { + assert_eq!(generate_heading_slug("Hello World"), "hello-world"); + assert_eq!(generate_heading_slug("Hello World"), "hello--world"); + assert_eq!(generate_heading_slug("Hello-World"), "hello-world"); + assert_eq!( + generate_heading_slug("Some **bold** text"), + "some-bold-text" + ); + assert_eq!(generate_heading_slug("Let's try with Ü"), "lets-try-with-ü"); + assert_eq!( + generate_heading_slug("heading with 123 numbers"), + "heading-with-123-numbers" + ); + assert_eq!( + generate_heading_slug("What about (parens)?"), + "what-about-parens" + ); + assert_eq!( + generate_heading_slug(" leading spaces "), + "leading-spaces" + ); + } } From d80ed5488436cf8e6f596ac3ce4dcf29454b39d3 Mon Sep 17 00:00:00 2001 From: Cameron Mcloughlin Date: Thu, 9 Apr 2026 17:15:28 +0100 Subject: [PATCH 11/67] sidebar: More vim actions (#53419) Release Notes: - N/A or Added/Fixed/Improved ... --- assets/keymaps/vim.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index cbfea5d7fddc3ccdc41bab0167f550ad5e53172a..464270274af7750903dda3130978501ba29dade7 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -1148,6 +1148,11 @@ "g g": "menu::SelectFirst", "shift-g": "menu::SelectLast", "/": "agents_sidebar::FocusSidebarFilter", + "d d": "agent::RemoveSelectedThread", + "o": "agents_sidebar::NewThreadInGroup", + "shift-o": "agents_sidebar::NewThreadInGroup", + "] p": "multi_workspace::NextProject", + "[ p": "multi_workspace::PreviousProject", "z a": "editor::ToggleFold", "z c": "menu::SelectParent", "z o": "menu::SelectChild", From 6184b2457c796efe19d4df2dc7286310db7797f3 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Thu, 9 Apr 2026 12:38:07 -0400 Subject: [PATCH 12/67] Fix project symbol picker UTF-8 highlight panic (#53485) This panic was caused because we incorrectly assumed that each character was one byte when converting character indices to highlight range byte indices. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #53479 Release Notes: - Fix a panic that could occur in the project symbol search picker --------- Co-authored-by: Lukas Wirth --- crates/project_symbols/src/project_symbols.rs | 111 +++++++++++++++++- 1 file changed, 107 insertions(+), 4 deletions(-) diff --git a/crates/project_symbols/src/project_symbols.rs b/crates/project_symbols/src/project_symbols.rs index 931e332d93d869bc31909643190d5b35f32409dc..8edcd9a80d1759d965dc38ecb1c88f0ea76056ad 100644 --- a/crates/project_symbols/src/project_symbols.rs +++ b/crates/project_symbols/src/project_symbols.rs @@ -288,7 +288,7 @@ impl PickerDelegate for ProjectSymbolsDelegate { let custom_highlights = string_match .positions .iter() - .map(|pos| (*pos..pos + 1, highlight_style)); + .map(|pos| (*pos..label.ceil_char_boundary(pos + 1), highlight_style)); let highlights = gpui::combine_highlights(custom_highlights, syntax_runs); @@ -299,9 +299,12 @@ impl PickerDelegate for ProjectSymbolsDelegate { .toggle_state(selected) .child( v_flex() - .child(LabelLike::new().child( - StyledText::new(label).with_default_highlights(&text_style, highlights), - )) + .child( + LabelLike::new().child( + StyledText::new(&label) + .with_default_highlights(&text_style, highlights), + ), + ) .child( h_flex() .child(Label::new(path).size(LabelSize::Small).color(Color::Muted)) @@ -483,6 +486,106 @@ mod tests { }); } + #[gpui::test] + async fn test_project_symbols_renders_utf8_match(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({ "test.rs": "" })) + .await; + + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(Arc::new(Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + ..Default::default() + }, + None, + ))); + let mut fake_servers = language_registry.register_fake_lsp( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + workspace_symbol_provider: Some(OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + }, + ); + + let _buffer = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx) + }) + .await + .unwrap(); + + let fake_symbols = [symbol("안녕", path!("/dir/test.rs"))]; + let fake_server = fake_servers.next().await.unwrap(); + fake_server.set_request_handler::( + move |params: lsp::WorkspaceSymbolParams, cx| { + let executor = cx.background_executor().clone(); + let fake_symbols = fake_symbols.clone(); + async move { + let candidates = fake_symbols + .iter() + .enumerate() + .map(|(id, symbol)| StringMatchCandidate::new(id, &symbol.name)) + .collect::>(); + let matches = fuzzy::match_strings( + &candidates, + ¶ms.query, + true, + true, + 100, + &Default::default(), + executor, + ) + .await; + + Ok(Some(lsp::WorkspaceSymbolResponse::Flat( + matches + .into_iter() + .map(|mat| fake_symbols[mat.candidate_id].clone()) + .collect(), + ))) + } + }, + ); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + + let symbols = cx.new_window_entity(|window, cx| { + Picker::uniform_list( + ProjectSymbolsDelegate::new(workspace.downgrade(), project.clone()), + window, + cx, + ) + }); + + symbols.update_in(cx, |p, window, cx| { + p.update_matches("안".to_string(), window, cx); + }); + + cx.run_until_parked(); + symbols.read_with(cx, |symbols, _| { + assert_eq!(symbols.delegate.matches.len(), 1); + assert_eq!(symbols.delegate.matches[0].string, "안녕"); + }); + + symbols.update_in(cx, |p, window, cx| { + assert!(p.delegate.render_match(0, false, window, cx).is_some()); + }); + } + fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let store = SettingsStore::test(cx); From b6562e8afadaeb73828dfd7dfa94fe8cf8876e18 Mon Sep 17 00:00:00 2001 From: Dino Date: Thu, 9 Apr 2026 17:41:58 +0100 Subject: [PATCH 13/67] fs: Return trashed file location (#52012) Update both `Fs::trash_dir` and `Fs::trash_file` to now return the location of the trashed directory or file, as well as adding the `trash-rs` create dependency and updating the `RealFs` implementation for these methods to simply leverage `trash::delete_with_info`. * Add `fs::Fs::TrashedEntry` struct, which allows us to track the original file path and the new path in the OS' trash * Update the `fs::Fs::trash_dir` and `fs::Fs::trash_file` signatures to now return `Result` instead of `Result<()>` * The `options` argument was removed because it was never used by implementations other than the default one, and with this change to the signature type, we no longer have a default implementation, so the `options` argument would no longer make sense * Update `fs::RealFs::trash_dir` and `fs::RealFs::trash_file` implementations to simply delegate to `trash-rs` and convert the result to a `TrashedEntry` * Add `fs::FakeFs::trash` so we can simulate the OS' trash during tests that touch the filesystem * Add `fs::FakeFs::trash_file` implementation to leverage `fs::FakeFs::trash` * Add `fs::FakeFs::trash_dir` implementation to leverage `fs::FakeFs::trash` --- Cargo.lock | 69 ++++++- crates/fs/Cargo.toml | 5 +- crates/fs/src/fs.rs | 317 +++++++++++++++++------------- crates/fs/tests/integration/fs.rs | 61 ++++++ crates/worktree/src/worktree.rs | 11 +- 5 files changed, 308 insertions(+), 155 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 85d3f9ac3a68aa8a420910dfa61102f03c9812ef..6fd8ea62fd5d5f31890fdb58e8b4cf4e8d108fa8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6462,7 +6462,6 @@ dependencies = [ "ashpd", "async-tar", "async-trait", - "cocoa 0.26.0", "collections", "dunce", "fs", @@ -6474,7 +6473,6 @@ dependencies = [ "libc", "log", "notify 8.2.0", - "objc", "parking_lot", "paths", "proto", @@ -6485,6 +6483,7 @@ dependencies = [ "tempfile", "text", "time", + "trash", "util", "windows 0.61.3", ] @@ -8444,7 +8443,7 @@ dependencies = [ "js-sys", "log", "wasm-bindgen", - "windows-core 0.62.2", + "windows-core 0.57.0", ] [[package]] @@ -18416,7 +18415,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2fb391ac70462b3097a755618fbf9c8f95ecc1eb379a414f7b46f202ed10db1f" dependencies = [ "cc", - "windows-targets 0.52.6", + "windows-targets 0.48.5", ] [[package]] @@ -18429,6 +18428,24 @@ dependencies = [ "strength_reduce", ] +[[package]] +name = "trash" +version = "5.2.5" +source = "git+https://github.com/zed-industries/trash-rs?rev=3bf27effd4eb8699f2e484d3326b852fe3e53af7#3bf27effd4eb8699f2e484d3326b852fe3e53af7" +dependencies = [ + "chrono", + "libc", + "log", + "objc2", + "objc2-foundation", + "once_cell", + "percent-encoding", + "scopeguard", + "urlencoding", + "windows 0.56.0", + "windows-core 0.56.0", +] + [[package]] name = "tree-sitter" version = "0.26.8" @@ -20539,6 +20556,16 @@ dependencies = [ "wasmtime-internal-math", ] +[[package]] +name = "windows" +version = "0.56.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1de69df01bdf1ead2f4ac895dc77c9351aefff65b2f3db429a343f9cbf05e132" +dependencies = [ + "windows-core 0.56.0", + "windows-targets 0.52.6", +] + [[package]] name = "windows" version = "0.57.0" @@ -20627,6 +20654,18 @@ dependencies = [ "windows-core 0.62.2", ] +[[package]] +name = "windows-core" +version = "0.56.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4698e52ed2d08f8658ab0c39512a7c00ee5fe2688c65f8c0a4f06750d729f2a6" +dependencies = [ + "windows-implement 0.56.0", + "windows-interface 0.56.0", + "windows-result 0.1.2", + "windows-targets 0.52.6", +] + [[package]] name = "windows-core" version = "0.57.0" @@ -20700,6 +20739,17 @@ dependencies = [ "windows-threading 0.2.1", ] +[[package]] +name = "windows-implement" +version = "0.56.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6fc35f58ecd95a9b71c4f2329b911016e6bec66b3f2e6a4aad86bd2e99e2f9b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "windows-implement" version = "0.57.0" @@ -20733,6 +20783,17 @@ dependencies = [ "syn 2.0.117", ] +[[package]] +name = "windows-interface" +version = "0.56.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08990546bf4edef8f431fa6326e032865f27138718c587dc21bc0265bbcb57cc" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "windows-interface" version = "0.57.0" diff --git a/crates/fs/Cargo.toml b/crates/fs/Cargo.toml index 371057c3f8abfd50eea34f0edfcc3e3f7d52df7b..f8c5ae9169972137fa922606a6e5428131c01e63 100644 --- a/crates/fs/Cargo.toml +++ b/crates/fs/Cargo.toml @@ -41,10 +41,7 @@ time.workspace = true util.workspace = true is_executable = "1.0.5" notify = "8.2.0" - -[target.'cfg(target_os = "macos")'.dependencies] -objc.workspace = true -cocoa = "0.26" +trash = { git = "https://github.com/zed-industries/trash-rs", rev = "3bf27effd4eb8699f2e484d3326b852fe3e53af7" } [target.'cfg(target_os = "windows")'.dependencies] windows.workspace = true diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 52cae537b6f00837b50123af0cae7c093699dedf..6b486cafe00d6f9103721571724898a7b7b6f428 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -1,13 +1,12 @@ pub mod fs_watcher; use parking_lot::Mutex; +use std::ffi::OsString; use std::sync::atomic::{AtomicU8, AtomicUsize, Ordering}; use std::time::Instant; use util::maybe; use anyhow::{Context as _, Result, anyhow}; -#[cfg(any(target_os = "linux", target_os = "freebsd"))] -use ashpd::desktop::trash; use futures::stream::iter; use gpui::App; use gpui::BackgroundExecutor; @@ -110,14 +109,27 @@ pub trait Fs: Send + Sync { ) -> Result<()>; async fn copy_file(&self, source: &Path, target: &Path, options: CopyOptions) -> Result<()>; async fn rename(&self, source: &Path, target: &Path, options: RenameOptions) -> Result<()>; + + /// Removes a directory from the filesystem. + /// There is no expectation that the directory will be preserved in the + /// system trash. async fn remove_dir(&self, path: &Path, options: RemoveOptions) -> Result<()>; - async fn trash_dir(&self, path: &Path, options: RemoveOptions) -> Result<()> { - self.remove_dir(path, options).await - } + + /// Moves a directory to the system trash. + /// Returns a [`TrashedEntry`] that can be used to keep track of the + /// location of the trashed directory in the system's trash. + async fn trash_dir(&self, path: &Path) -> Result; + + /// Removes a file from the filesystem. + /// There is no expectation that the file will be preserved in the system + /// trash. async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()>; - async fn trash_file(&self, path: &Path, options: RemoveOptions) -> Result<()> { - self.remove_file(path, options).await - } + + /// Moves a file to the system trash. + /// Returns a [`TrashedEntry`] that can be used to keep track of the + /// location of the trashed file in the system's trash. + async fn trash_file(&self, path: &Path) -> Result; + async fn open_handle(&self, path: &Path) -> Result>; async fn open_sync(&self, path: &Path) -> Result>; async fn load(&self, path: &Path) -> Result { @@ -164,6 +176,35 @@ pub trait Fs: Send + Sync { } } +// We use our own type rather than `trash::TrashItem` directly to avoid carrying +// over fields we don't need (e.g. `time_deleted`) and to insulate callers and +// tests from changes to that crate's API surface. +/// Represents a file or directory that has been moved to the system trash, +/// retaining enough information to restore it to its original location. +#[derive(Clone)] +pub struct TrashedEntry { + /// Platform-specific identifier for the file/directory in the trash. + /// + /// * Freedesktop – Path to the `.trashinfo` file. + /// * macOS & Windows – Full path to the file/directory in the system's + /// trash. + pub id: OsString, + /// Original name of the file/directory before it was moved to the trash. + pub name: OsString, + /// Original parent directory. + pub original_parent: PathBuf, +} + +impl From for TrashedEntry { + fn from(item: trash::TrashItem) -> Self { + Self { + id: item.id, + name: item.name, + original_parent: item.original_parent, + } + } +} + struct GlobalFs(Arc); impl Global for GlobalFs {} @@ -718,93 +759,12 @@ impl Fs for RealFs { } } - #[cfg(target_os = "macos")] - async fn trash_file(&self, path: &Path, _options: RemoveOptions) -> Result<()> { - use cocoa::{ - base::{id, nil}, - foundation::{NSAutoreleasePool, NSString}, - }; - use objc::{class, msg_send, sel, sel_impl}; - - unsafe { - /// Allow NSString::alloc use here because it sets autorelease - #[allow(clippy::disallowed_methods)] - unsafe fn ns_string(string: &str) -> id { - unsafe { NSString::alloc(nil).init_str(string).autorelease() } - } - - let url: id = msg_send![class!(NSURL), fileURLWithPath: ns_string(path.to_string_lossy().as_ref())]; - let array: id = msg_send![class!(NSArray), arrayWithObject: url]; - let workspace: id = msg_send![class!(NSWorkspace), sharedWorkspace]; - - let _: id = msg_send![workspace, recycleURLs: array completionHandler: nil]; - } - Ok(()) - } - - #[cfg(any(target_os = "linux", target_os = "freebsd"))] - async fn trash_file(&self, path: &Path, _options: RemoveOptions) -> Result<()> { - if let Ok(Some(metadata)) = self.metadata(path).await - && metadata.is_symlink - { - // TODO: trash_file does not support trashing symlinks yet - https://github.com/bilelmoussaoui/ashpd/issues/255 - return self.remove_file(path, RemoveOptions::default()).await; - } - let file = smol::fs::File::open(path).await?; - match trash::trash_file(&file.as_fd()).await { - Ok(_) => Ok(()), - Err(err) => { - log::error!("Failed to trash file: {}", err); - // Trashing files can fail if you don't have a trashing dbus service configured. - // In that case, delete the file directly instead. - return self.remove_file(path, RemoveOptions::default()).await; - } - } - } - - #[cfg(target_os = "windows")] - async fn trash_file(&self, path: &Path, _options: RemoveOptions) -> Result<()> { - use util::paths::SanitizedPath; - use windows::{ - Storage::{StorageDeleteOption, StorageFile}, - core::HSTRING, - }; - // todo(windows) - // When new version of `windows-rs` release, make this operation `async` - let path = path.canonicalize()?; - let path = SanitizedPath::new(&path); - let path_string = path.to_string(); - let file = StorageFile::GetFileFromPathAsync(&HSTRING::from(path_string))?.get()?; - file.DeleteAsync(StorageDeleteOption::Default)?.get()?; - Ok(()) - } - - #[cfg(target_os = "macos")] - async fn trash_dir(&self, path: &Path, options: RemoveOptions) -> Result<()> { - self.trash_file(path, options).await - } - - #[cfg(any(target_os = "linux", target_os = "freebsd"))] - async fn trash_dir(&self, path: &Path, options: RemoveOptions) -> Result<()> { - self.trash_file(path, options).await + async fn trash_file(&self, path: &Path) -> Result { + Ok(trash::delete_with_info(path)?.into()) } - #[cfg(target_os = "windows")] - async fn trash_dir(&self, path: &Path, _options: RemoveOptions) -> Result<()> { - use util::paths::SanitizedPath; - use windows::{ - Storage::{StorageDeleteOption, StorageFolder}, - core::HSTRING, - }; - - // todo(windows) - // When new version of `windows-rs` release, make this operation `async` - let path = path.canonicalize()?; - let path = SanitizedPath::new(&path); - let path_string = path.to_string(); - let folder = StorageFolder::GetFolderFromPathAsync(&HSTRING::from(path_string))?.get()?; - folder.DeleteAsync(StorageDeleteOption::Default)?.get()?; - Ok(()) + async fn trash_dir(&self, path: &Path) -> Result { + self.trash_file(path).await } async fn open_sync(&self, path: &Path) -> Result> { @@ -1287,6 +1247,7 @@ struct FakeFsState { path_write_counts: std::collections::HashMap, moves: std::collections::HashMap, job_event_subscribers: Arc>>, + trash: Vec<(TrashedEntry, FakeFsEntry)>, } #[cfg(feature = "test-support")] @@ -1572,6 +1533,7 @@ impl FakeFs { path_write_counts: Default::default(), moves: Default::default(), job_event_subscribers: Arc::new(Mutex::new(Vec::new())), + trash: Vec::new(), })), }); @@ -2397,6 +2359,90 @@ impl FakeFs { fn simulate_random_delay(&self) -> impl futures::Future { self.executor.simulate_random_delay() } + + /// Returns list of all tracked trash entries. + pub fn trash_entries(&self) -> Vec { + self.state + .lock() + .trash + .iter() + .map(|(entry, _)| entry.clone()) + .collect() + } + + async fn remove_dir_inner( + &self, + path: &Path, + options: RemoveOptions, + ) -> Result> { + self.simulate_random_delay().await; + + let path = normalize_path(path); + let parent_path = path.parent().context("cannot remove the root")?; + let base_name = path.file_name().context("cannot remove the root")?; + + let mut state = self.state.lock(); + let parent_entry = state.entry(parent_path)?; + let entry = parent_entry + .dir_entries(parent_path)? + .entry(base_name.to_str().unwrap().into()); + + let removed = match entry { + btree_map::Entry::Vacant(_) => { + if !options.ignore_if_not_exists { + anyhow::bail!("{path:?} does not exist"); + } + + None + } + btree_map::Entry::Occupied(mut entry) => { + { + let children = entry.get_mut().dir_entries(&path)?; + if !options.recursive && !children.is_empty() { + anyhow::bail!("{path:?} is not empty"); + } + } + + Some(entry.remove()) + } + }; + + state.emit_event([(path, Some(PathEventKind::Removed))]); + Ok(removed) + } + + async fn remove_file_inner( + &self, + path: &Path, + options: RemoveOptions, + ) -> Result> { + self.simulate_random_delay().await; + + let path = normalize_path(path); + let parent_path = path.parent().context("cannot remove the root")?; + let base_name = path.file_name().unwrap(); + let mut state = self.state.lock(); + let parent_entry = state.entry(parent_path)?; + let entry = parent_entry + .dir_entries(parent_path)? + .entry(base_name.to_str().unwrap().into()); + let removed = match entry { + btree_map::Entry::Vacant(_) => { + if !options.ignore_if_not_exists { + anyhow::bail!("{path:?} does not exist"); + } + + None + } + btree_map::Entry::Occupied(mut entry) => { + entry.get_mut().file_content(&path)?; + Some(entry.remove()) + } + }; + + state.emit_event([(path, Some(PathEventKind::Removed))]); + Ok(removed) + } } #[cfg(feature = "test-support")] @@ -2696,62 +2742,57 @@ impl Fs for FakeFs { } async fn remove_dir(&self, path: &Path, options: RemoveOptions) -> Result<()> { - self.simulate_random_delay().await; + self.remove_dir_inner(path, options).await.map(|_| ()) + } - let path = normalize_path(path); - let parent_path = path.parent().context("cannot remove the root")?; - let base_name = path.file_name().context("cannot remove the root")?; + async fn trash_dir(&self, path: &Path) -> Result { + let normalized_path = normalize_path(path); + let parent_path = normalized_path.parent().context("cannot remove the root")?; + let base_name = normalized_path.file_name().unwrap(); + let options = RemoveOptions { + recursive: true, + ..Default::default() + }; - let mut state = self.state.lock(); - let parent_entry = state.entry(parent_path)?; - let entry = parent_entry - .dir_entries(parent_path)? - .entry(base_name.to_str().unwrap().into()); + match self.remove_dir_inner(path, options).await? { + Some(fake_entry) => { + let trashed_entry = TrashedEntry { + id: base_name.to_str().unwrap().into(), + name: base_name.to_str().unwrap().into(), + original_parent: parent_path.to_path_buf(), + }; - match entry { - btree_map::Entry::Vacant(_) => { - if !options.ignore_if_not_exists { - anyhow::bail!("{path:?} does not exist"); - } - } - btree_map::Entry::Occupied(mut entry) => { - { - let children = entry.get_mut().dir_entries(&path)?; - if !options.recursive && !children.is_empty() { - anyhow::bail!("{path:?} is not empty"); - } - } - entry.remove(); + let mut state = self.state.lock(); + state.trash.push((trashed_entry.clone(), fake_entry)); + Ok(trashed_entry) } + None => anyhow::bail!("{normalized_path:?} does not exist"), } - state.emit_event([(path, Some(PathEventKind::Removed))]); - Ok(()) } async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()> { - self.simulate_random_delay().await; + self.remove_file_inner(path, options).await.map(|_| ()) + } - let path = normalize_path(path); - let parent_path = path.parent().context("cannot remove the root")?; - let base_name = path.file_name().unwrap(); - let mut state = self.state.lock(); - let parent_entry = state.entry(parent_path)?; - let entry = parent_entry - .dir_entries(parent_path)? - .entry(base_name.to_str().unwrap().into()); - match entry { - btree_map::Entry::Vacant(_) => { - if !options.ignore_if_not_exists { - anyhow::bail!("{path:?} does not exist"); - } - } - btree_map::Entry::Occupied(mut entry) => { - entry.get_mut().file_content(&path)?; - entry.remove(); + async fn trash_file(&self, path: &Path) -> Result { + let normalized_path = normalize_path(path); + let parent_path = normalized_path.parent().context("cannot remove the root")?; + let base_name = normalized_path.file_name().unwrap(); + + match self.remove_file_inner(path, Default::default()).await? { + Some(fake_entry) => { + let trashed_entry = TrashedEntry { + id: base_name.to_str().unwrap().into(), + name: base_name.to_str().unwrap().into(), + original_parent: parent_path.to_path_buf(), + }; + + let mut state = self.state.lock(); + state.trash.push((trashed_entry.clone(), fake_entry)); + Ok(trashed_entry) } + None => anyhow::bail!("{normalized_path:?} does not exist"), } - state.emit_event([(path, Some(PathEventKind::Removed))]); - Ok(()) } async fn open_sync(&self, path: &Path) -> Result> { diff --git a/crates/fs/tests/integration/fs.rs b/crates/fs/tests/integration/fs.rs index 34c1430a995402bd1e28817785c3b4ff707d4abd..83956c76c9f1dbe44ae4899ff14f7f8939d0006d 100644 --- a/crates/fs/tests/integration/fs.rs +++ b/crates/fs/tests/integration/fs.rs @@ -626,6 +626,67 @@ async fn test_realfs_symlink_loop_metadata(executor: BackgroundExecutor) { // don't care about len or mtime on symlinks? } +#[gpui::test] +async fn test_fake_fs_trash_file(executor: BackgroundExecutor) { + let fs = FakeFs::new(executor.clone()); + fs.insert_tree( + path!("/root"), + json!({ + "file_a.txt": "File A", + "file_b.txt": "File B", + }), + ) + .await; + + let root_path = PathBuf::from(path!("/root")); + let path = path!("/root/file_a.txt").as_ref(); + let trashed_entry = fs + .trash_file(path) + .await + .expect("should be able to trash {path:?}"); + + assert_eq!(trashed_entry.name, "file_a.txt"); + assert_eq!(trashed_entry.original_parent, root_path); + assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_b.txt"))]); + + let trash_entries = fs.trash_entries(); + assert_eq!(trash_entries.len(), 1); + assert_eq!(trash_entries[0].name, "file_a.txt"); + assert_eq!(trash_entries[0].original_parent, root_path); +} + +#[gpui::test] +async fn test_fake_fs_trash_dir(executor: BackgroundExecutor) { + let fs = FakeFs::new(executor.clone()); + fs.insert_tree( + path!("/root"), + json!({ + "src": { + "file_a.txt": "File A", + "file_b.txt": "File B", + }, + "file_c.txt": "File C", + }), + ) + .await; + + let root_path = PathBuf::from(path!("/root")); + let path = path!("/root/src").as_ref(); + let trashed_entry = fs + .trash_dir(path) + .await + .expect("should be able to trash {path:?}"); + + assert_eq!(trashed_entry.name, "src"); + assert_eq!(trashed_entry.original_parent, root_path); + assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_c.txt"))]); + + let trash_entries = fs.trash_entries(); + assert_eq!(trash_entries.len(), 1); + assert_eq!(trash_entries[0].name, "src"); + assert_eq!(trash_entries[0].original_parent, root_path); +} + #[gpui::test] #[ignore = "stress test; run explicitly when needed"] async fn test_realfs_watch_stress_reports_missed_paths( diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index fea550e0c2ca1987fd3d9fa88c48f38596c4dd1b..7046e4afdfd95ba341b7b87846c9e3e4520849f6 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -1693,19 +1693,12 @@ impl LocalWorktree { let delete = cx.background_spawn(async move { if entry.is_file() { if trash { - fs.trash_file(&abs_path, Default::default()).await?; + fs.trash_file(&abs_path).await?; } else { fs.remove_file(&abs_path, Default::default()).await?; } } else if trash { - fs.trash_dir( - &abs_path, - RemoveOptions { - recursive: true, - ignore_if_not_exists: false, - }, - ) - .await?; + fs.trash_dir(&abs_path).await?; } else { fs.remove_dir( &abs_path, From fd285c8ec73af9a71ad77b88f3bed08224808b1a Mon Sep 17 00:00:00 2001 From: Dino Date: Thu, 9 Apr 2026 18:01:47 +0100 Subject: [PATCH 14/67] fs: Add support for restoring trashed files (#52014) Introduce a new `fs::Fs::restore` method which, given a `fs::TrashedEntry` should attempt to restore the file or directory back to its original path. --- crates/fs/src/fs.rs | 96 +++++++++++++++++++++- crates/fs/tests/integration/fs.rs | 129 ++++++++++++++++++++++++++++++ 2 files changed, 224 insertions(+), 1 deletion(-) diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 6b486cafe00d6f9103721571724898a7b7b6f428..bdeb139088bf33e1251bc23a5583a0ee3c9f4bf2 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -170,6 +170,13 @@ pub trait Fs: Send + Sync { async fn is_case_sensitive(&self) -> bool; fn subscribe_to_jobs(&self) -> JobEventReceiver; + /// Restores a given `TrashedEntry`, moving it from the system's trash back + /// to the original path. + async fn restore( + &self, + trashed_entry: TrashedEntry, + ) -> std::result::Result<(), TrashRestoreError>; + #[cfg(feature = "test-support")] fn as_fake(&self) -> Arc { panic!("called as_fake on a real fs"); @@ -181,7 +188,7 @@ pub trait Fs: Send + Sync { // tests from changes to that crate's API surface. /// Represents a file or directory that has been moved to the system trash, /// retaining enough information to restore it to its original location. -#[derive(Clone)] +#[derive(Clone, PartialEq)] pub struct TrashedEntry { /// Platform-specific identifier for the file/directory in the trash. /// @@ -205,6 +212,41 @@ impl From for TrashedEntry { } } +impl TrashedEntry { + fn into_trash_item(self) -> trash::TrashItem { + trash::TrashItem { + id: self.id, + name: self.name, + original_parent: self.original_parent, + // `TrashedEntry` doesn't preserve `time_deleted` as we don't + // currently need it for restore, so we default it to 0 here. + time_deleted: 0, + } + } +} + +#[derive(Debug)] +pub enum TrashRestoreError { + /// The specified `path` was not found in the system's trash. + NotFound { path: PathBuf }, + /// A file or directory already exists at the restore destination. + Collision { path: PathBuf }, + /// Any other platform-specific error. + Unknown { description: String }, +} + +impl From for TrashRestoreError { + fn from(err: trash::Error) -> Self { + match err { + trash::Error::RestoreCollision { path, .. } => Self::Collision { path }, + trash::Error::Unknown { description } => Self::Unknown { description }, + other => Self::Unknown { + description: other.to_string(), + }, + } + } +} + struct GlobalFs(Arc); impl Global for GlobalFs {} @@ -1212,6 +1254,13 @@ impl Fs for RealFs { ); res } + + async fn restore( + &self, + trashed_entry: TrashedEntry, + ) -> std::result::Result<(), TrashRestoreError> { + trash::restore_all([trashed_entry.into_trash_item()]).map_err(Into::into) + } } #[cfg(not(any(target_os = "linux", target_os = "freebsd")))] @@ -3043,6 +3092,51 @@ impl Fs for FakeFs { receiver } + async fn restore( + &self, + trashed_entry: TrashedEntry, + ) -> std::result::Result<(), TrashRestoreError> { + let mut state = self.state.lock(); + + let Some((trashed_entry, fake_entry)) = state + .trash + .iter() + .find(|(entry, _)| *entry == trashed_entry) + .cloned() + else { + return Err(TrashRestoreError::NotFound { + path: PathBuf::from(trashed_entry.id), + }); + }; + + let path = trashed_entry + .original_parent + .join(trashed_entry.name.clone()); + + let result = state.write_path(&path, |entry| match entry { + btree_map::Entry::Vacant(entry) => { + entry.insert(fake_entry); + Ok(()) + } + btree_map::Entry::Occupied(_) => { + anyhow::bail!("Failed to restore {:?}", path); + } + }); + + match result { + Ok(_) => { + state.trash.retain(|(entry, _)| *entry != trashed_entry); + Ok(()) + } + Err(_) => { + // For now we'll just assume that this failed because it was a + // collision error, which I think that, for the time being, is + // the only case where this could fail? + Err(TrashRestoreError::Collision { path }) + } + } + } + #[cfg(feature = "test-support")] fn as_fake(&self) -> Arc { self.this.upgrade().unwrap() diff --git a/crates/fs/tests/integration/fs.rs b/crates/fs/tests/integration/fs.rs index 83956c76c9f1dbe44ae4899ff14f7f8939d0006d..fce8a98dea64fb153cacb5998f005a5cbd5cc11a 100644 --- a/crates/fs/tests/integration/fs.rs +++ b/crates/fs/tests/integration/fs.rs @@ -1,5 +1,6 @@ use std::{ collections::BTreeSet, + ffi::OsString, io::Write, path::{Path, PathBuf}, time::Duration, @@ -687,6 +688,134 @@ async fn test_fake_fs_trash_dir(executor: BackgroundExecutor) { assert_eq!(trash_entries[0].original_parent, root_path); } +#[gpui::test] +async fn test_fake_fs_restore(executor: BackgroundExecutor) { + let fs = FakeFs::new(executor.clone()); + fs.insert_tree( + path!("/root"), + json!({ + "src": { + "file_a.txt": "File A", + "file_b.txt": "File B", + }, + "file_c.txt": "File C", + }), + ) + .await; + + // Providing a non-existent `TrashedEntry` should result in an error. + let id: OsString = "/trash/file_c.txt".into(); + let name: OsString = "file_c.txt".into(); + let original_parent = PathBuf::from(path!("/root")); + let trashed_entry = TrashedEntry { + id, + name, + original_parent, + }; + let result = fs.restore(trashed_entry).await; + assert!(matches!(result, Err(TrashRestoreError::NotFound { .. }))); + + // Attempt deleting a file, asserting that the filesystem no longer reports + // it as part of its list of files, restore it and verify that the list of + // files and trash has been updated accordingly. + let path = path!("/root/src/file_a.txt").as_ref(); + let trashed_entry = fs.trash_file(path).await.unwrap(); + + assert_eq!(fs.trash_entries().len(), 1); + assert_eq!( + fs.files(), + vec![ + PathBuf::from(path!("/root/file_c.txt")), + PathBuf::from(path!("/root/src/file_b.txt")) + ] + ); + + fs.restore(trashed_entry).await.unwrap(); + + assert_eq!(fs.trash_entries().len(), 0); + assert_eq!( + fs.files(), + vec![ + PathBuf::from(path!("/root/file_c.txt")), + PathBuf::from(path!("/root/src/file_a.txt")), + PathBuf::from(path!("/root/src/file_b.txt")) + ] + ); + + // Deleting and restoring a directory should also remove all of its files + // but create a single trashed entry, which should be removed after + // restoration. + let path = path!("/root/src/").as_ref(); + let trashed_entry = fs.trash_dir(path).await.unwrap(); + + assert_eq!(fs.trash_entries().len(), 1); + assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_c.txt"))]); + + fs.restore(trashed_entry).await.unwrap(); + + assert_eq!( + fs.files(), + vec![ + PathBuf::from(path!("/root/file_c.txt")), + PathBuf::from(path!("/root/src/file_a.txt")), + PathBuf::from(path!("/root/src/file_b.txt")) + ] + ); + assert_eq!(fs.trash_entries().len(), 0); + + // A collision error should be returned in case a file is being restored to + // a path where a file already exists. + let path = path!("/root/src/file_a.txt").as_ref(); + let trashed_entry = fs.trash_file(path).await.unwrap(); + + assert_eq!(fs.trash_entries().len(), 1); + assert_eq!( + fs.files(), + vec![ + PathBuf::from(path!("/root/file_c.txt")), + PathBuf::from(path!("/root/src/file_b.txt")) + ] + ); + + fs.write(path, "New File A".as_bytes()).await.unwrap(); + + assert_eq!(fs.trash_entries().len(), 1); + assert_eq!( + fs.files(), + vec![ + PathBuf::from(path!("/root/file_c.txt")), + PathBuf::from(path!("/root/src/file_a.txt")), + PathBuf::from(path!("/root/src/file_b.txt")) + ] + ); + + let file_contents = fs.files_with_contents(path); + assert!(fs.restore(trashed_entry).await.is_err()); + assert_eq!( + file_contents, + vec![(PathBuf::from(path), b"New File A".to_vec())] + ); + + // A collision error should be returned in case a directory is being + // restored to a path where a directory already exists. + let path = path!("/root/src/").as_ref(); + let trashed_entry = fs.trash_dir(path).await.unwrap(); + + assert_eq!(fs.trash_entries().len(), 2); + assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_c.txt"))]); + + fs.create_dir(path).await.unwrap(); + + assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_c.txt"))]); + assert_eq!(fs.trash_entries().len(), 2); + + let result = fs.restore(trashed_entry).await; + assert!(result.is_err()); + + assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_c.txt"))]); + assert_eq!(fs.trash_entries().len(), 2); +} + #[gpui::test] #[ignore = "stress test; run explicitly when needed"] async fn test_realfs_watch_stress_reports_missed_paths( From e25885bbe6e3ff7af565756290173a2f60b7acc8 Mon Sep 17 00:00:00 2001 From: Dino Date: Thu, 9 Apr 2026 18:49:16 +0100 Subject: [PATCH 15/67] project_panel: Add redo and restore support (#53311) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Introduce `project_panel::Redo` action - Update all platform keymaps in order to map `redo`/`ctrl-shift-z`/`cmd-shift-z` to the `project_panel::Redo` action ### Restore Entry Support - Update both `Project::delete_entry` and `Worktree::delete_entry` to return the resulting `fs::TrashedEntry` - Introduce both `Project::restore_entry` and `Worktree::restore_entry` to allow restoring an entry in a worktree, given the `fs::TrashedEntry` - Worth pointing out that support for restoring is not yet implemented for remote worktrees, as that will be dealt with in a separate pull request ### Undo Manager - Split `ProjectPanelOperation` into two different enums, `Change` and `Operation` - While thinking through this, we noticed that simply recording the operation that user was performing was not enough, specifically in the case where undoing would restore the file, as in that specific case, we needed the `trash::TrashedEntry` in order to be able to restore, so we actually needed the result of executing the operation. - Having that in mind, we decided to separate the operation (intent) from the change (result), and record the change instead. With the change being recorded, we can easily building the operation that needs to be executed in order to invert that change. - For example, if an user creates a new file, we record the `ProjectPath` where the file was created, so that undoing can be a matter of trashing that file. When undoing, we keep track of the `trash::TrashedEntry` resulting from trashing the originally created file, such that, redoing is a matter of restoring the `trash::TrashedEntry`. - Refer to the documentation in the `project_panel::undo` module for a better breakdown on how this is implemented/handled. - Introduce a task queue for dealing with recording changes, as well as undo and redo requests in a sequential manner - This meant moving some of the details in `UndoManager` to a `project_panel::undo::Inner` implementation, and `UndoManager` now serves as a simple wrapper/client around the inner implementation, simply communicating with it to record changes and handle undo/redo requests - Callers that depend on the `UndoManager` now simply record which changes they wish to track, which are then sent to the undo manager's inner implementation - Same for the undo and redo requests, those are simply sent to the undo manager's inner implementation, which then deals with picking the correct change from the history and executing its inverse operation - Introduce support for tracking restore changes and operations - `project_panel::undo::Change::Restored` – Keeps track that the file/directory associated with the `ProjectPath` was a result of restoring a trashed entry, for which we now that reverting is simply a matter of trashing the path again - `project_panel::undo::Operation::Restore` – Keeps track of both the worktree id and the `TrashedEntry`, from which we can build the original `ProjectPath` where the trashed entry needs to be restored - Move project panel's undo tests to a separate module `project_panel::tests::undo` to avoid growing the `project::project_panel_tests` module into a monolithic test module - Some of the functions in `project::project_panel_tests` were made `pub(crate)` in order for us to be able to call those from `project_panel::tests::undo` ### FS Changes - Refactored the `Fs::trash_file` and `Fs::trash_dir` methods into a single `Fs::trash` method - This can now be done because `RealFs::trash_dir` and `RealFs::trash_file` were simply calling `trash::delete_with_info`, so we can simplify the trait - Tests have also been simplified to reflect this new change, so we no longer need a separate test for trashing a file and trashing a directory - Update `Fs::trash` and `Fs::restore` to be async - On the `RealFs` implementation we're now spawning a thread to perform the trash/restore operation Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Relates to #5039 Release Notes: - N/A --------- Co-authored-by: Yara Co-authored-by: Miguel Raz Guzmán Macedo Co-authored-by: Marshall Bowers --- Cargo.lock | 3 + Cargo.toml | 2 +- assets/keymaps/default-linux.json | 2 + assets/keymaps/default-macos.json | 1 + assets/keymaps/default-windows.json | 1 + crates/action_log/src/action_log.rs | 9 +- crates/feature_flags/src/flags.rs | 2 +- crates/fs/Cargo.toml | 1 + crates/fs/src/fs.rs | 106 ++- crates/fs/tests/integration/fs.rs | 72 +- crates/project/src/project.rs | 25 +- crates/project_panel/Cargo.toml | 2 + crates/project_panel/src/project_panel.rs | 162 ++-- .../project_panel/src/project_panel_tests.rs | 568 +------------- crates/project_panel/src/tests.rs | 1 + crates/project_panel/src/tests/undo.rs | 384 +++++++++ crates/project_panel/src/undo.rs | 740 ++++++++++++------ crates/worktree/src/worktree.rs | 126 ++- crates/worktree/tests/integration/main.rs | 9 +- 19 files changed, 1236 insertions(+), 980 deletions(-) create mode 100644 crates/project_panel/src/tests.rs create mode 100644 crates/project_panel/src/tests/undo.rs diff --git a/Cargo.lock b/Cargo.lock index 6fd8ea62fd5d5f31890fdb58e8b4cf4e8d108fa8..67495074258f02a658b5b95eb9b8e6625d6cbeb0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6482,6 +6482,7 @@ dependencies = [ "smol", "tempfile", "text", + "thiserror 2.0.17", "time", "trash", "util", @@ -13349,6 +13350,8 @@ dependencies = [ "editor", "feature_flags", "file_icons", + "fs", + "futures 0.3.32", "git", "git_ui", "gpui", diff --git a/Cargo.toml b/Cargo.toml index b9e99a5a87020a7eda8c8a2983bcf7b07fabc82c..9825c8319a7bb3440782b155d9952619096bdfd5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -571,7 +571,7 @@ encoding_rs = "0.8" exec = "0.3.1" fancy-regex = "0.17.0" fork = "0.4.0" -futures = "0.3" +futures = "0.3.32" futures-concurrency = "7.7.1" futures-lite = "1.13" gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "37f3c0575d379c218a9c455ee67585184e40d43f" } diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index fb305fe768931dd6f52f1b5d890ad6771b7b5cac..6433a420b87be8cf0678dc615e7c4736eed9d0b0 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -936,6 +936,8 @@ "alt-ctrl-shift-c": "workspace::CopyRelativePath", "undo": "project_panel::Undo", "ctrl-z": "project_panel::Undo", + "redo": "project_panel::Redo", + "ctrl-shift-z": "project_panel::Redo", "enter": "project_panel::Rename", "f2": "project_panel::Rename", "backspace": ["project_panel::Trash", { "skip_prompt": false }], diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 5fb408640b2c5083f4d3379bf927178c96bed4b6..5e27dbd99e41861f0c9bace86c03d98788264d81 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -991,6 +991,7 @@ "cmd-alt-c": "workspace::CopyPath", "alt-cmd-shift-c": "workspace::CopyRelativePath", "cmd-z": "project_panel::Undo", + "cmd-shift-z": "project_panel::Redo", "enter": "project_panel::Rename", "f2": "project_panel::Rename", "backspace": ["project_panel::Trash", { "skip_prompt": false }], diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 34d161577ee315857becf7c9e3c9353402e56876..1e33a71d8815e72daba07adbc76b330795ff9d52 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -929,6 +929,7 @@ "shift-alt-c": "project_panel::CopyPath", "ctrl-k ctrl-shift-c": "workspace::CopyRelativePath", "ctrl-z": "project_panel::Undo", + "ctrl-shift-z": "project_panel::Redo", "enter": "project_panel::Rename", "f2": "project_panel::Rename", "backspace": ["project_panel::Trash", { "skip_prompt": false }], diff --git a/crates/action_log/src/action_log.rs b/crates/action_log/src/action_log.rs index 1f17d38f7d2a2770350026f2f145a53723ef7481..cd17392704e1c6c932a3e4d8716b1c6f37489576 100644 --- a/crates/action_log/src/action_log.rs +++ b/crates/action_log/src/action_log.rs @@ -777,7 +777,7 @@ impl ActionLog { initial_version == current_version && current_content == tracked_content; if is_ai_only_content { - buffer + let task = buffer .read(cx) .entry_id(cx) .and_then(|entry_id| { @@ -785,7 +785,12 @@ impl ActionLog { project.delete_entry(entry_id, false, cx) }) }) - .unwrap_or(Task::ready(Ok(()))) + .unwrap_or_else(|| Task::ready(Ok(None))); + + cx.background_spawn(async move { + task.await?; + Ok(()) + }) } else { // Not sure how to disentangle edits made by the user // from edits made by the AI at this point. diff --git a/crates/feature_flags/src/flags.rs b/crates/feature_flags/src/flags.rs index 4a206a2bb4c48db951f1364d1aa408947165c24b..474f5b35bb536349ce7c4693f5dbedd6ef8b474a 100644 --- a/crates/feature_flags/src/flags.rs +++ b/crates/feature_flags/src/flags.rs @@ -63,6 +63,6 @@ impl FeatureFlag for ProjectPanelUndoRedoFeatureFlag { const NAME: &'static str = "project-panel-undo-redo"; fn enabled_for_staff() -> bool { - false + true } } diff --git a/crates/fs/Cargo.toml b/crates/fs/Cargo.toml index f8c5ae9169972137fa922606a6e5428131c01e63..e7b8dcd4ebda7810ef8087e112ae43819702bdf6 100644 --- a/crates/fs/Cargo.toml +++ b/crates/fs/Cargo.toml @@ -32,6 +32,7 @@ parking_lot.workspace = true paths.workspace = true rope.workspace = true proto.workspace = true +thiserror.workspace = true serde.workspace = true serde_json.workspace = true smol.workspace = true diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index bdeb139088bf33e1251bc23a5583a0ee3c9f4bf2..e44f557646239da5dd84354e364422cf16e14233 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -115,21 +115,16 @@ pub trait Fs: Send + Sync { /// system trash. async fn remove_dir(&self, path: &Path, options: RemoveOptions) -> Result<()>; - /// Moves a directory to the system trash. + /// Moves a file or directory to the system trash. /// Returns a [`TrashedEntry`] that can be used to keep track of the - /// location of the trashed directory in the system's trash. - async fn trash_dir(&self, path: &Path) -> Result; + /// location of the trashed item in the system's trash. + async fn trash(&self, path: &Path, options: RemoveOptions) -> Result; /// Removes a file from the filesystem. /// There is no expectation that the file will be preserved in the system /// trash. async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()>; - /// Moves a file to the system trash. - /// Returns a [`TrashedEntry`] that can be used to keep track of the - /// location of the trashed file in the system's trash. - async fn trash_file(&self, path: &Path) -> Result; - async fn open_handle(&self, path: &Path) -> Result>; async fn open_sync(&self, path: &Path) -> Result>; async fn load(&self, path: &Path) -> Result { @@ -175,7 +170,7 @@ pub trait Fs: Send + Sync { async fn restore( &self, trashed_entry: TrashedEntry, - ) -> std::result::Result<(), TrashRestoreError>; + ) -> std::result::Result; #[cfg(feature = "test-support")] fn as_fake(&self) -> Arc { @@ -188,7 +183,7 @@ pub trait Fs: Send + Sync { // tests from changes to that crate's API surface. /// Represents a file or directory that has been moved to the system trash, /// retaining enough information to restore it to its original location. -#[derive(Clone, PartialEq)] +#[derive(Clone, PartialEq, Debug)] pub struct TrashedEntry { /// Platform-specific identifier for the file/directory in the trash. /// @@ -196,9 +191,9 @@ pub struct TrashedEntry { /// * macOS & Windows – Full path to the file/directory in the system's /// trash. pub id: OsString, - /// Original name of the file/directory before it was moved to the trash. + /// Name of the file/directory at the time of trashing, including extension. pub name: OsString, - /// Original parent directory. + /// Absolute path to the parent directory at the time of trashing. pub original_parent: PathBuf, } @@ -225,13 +220,13 @@ impl TrashedEntry { } } -#[derive(Debug)] +#[derive(Debug, thiserror::Error)] pub enum TrashRestoreError { - /// The specified `path` was not found in the system's trash. + #[error("The specified `path` ({}) was not found in the system's trash.", path.display())] NotFound { path: PathBuf }, - /// A file or directory already exists at the restore destination. + #[error("File or directory ({}) already exists at the restore destination.", path.display())] Collision { path: PathBuf }, - /// Any other platform-specific error. + #[error("Unknown error ({description})")] Unknown { description: String }, } @@ -801,12 +796,26 @@ impl Fs for RealFs { } } - async fn trash_file(&self, path: &Path) -> Result { - Ok(trash::delete_with_info(path)?.into()) - } + async fn trash(&self, path: &Path, _options: RemoveOptions) -> Result { + // We must make the path absolute or trash will make a weird abomination + // of the zed working directory (not usually the worktree) and whatever + // the path variable holds. + let path = self + .canonicalize(path) + .await + .context("Could not canonicalize the path of the file")?; + + let (tx, rx) = futures::channel::oneshot::channel(); + std::thread::Builder::new() + .name("trash file or dir".to_string()) + .spawn(|| tx.send(trash::delete_with_info(path))) + .expect("The os can spawn threads"); - async fn trash_dir(&self, path: &Path) -> Result { - self.trash_file(path).await + Ok(rx + .await + .context("Tx dropped or fs.restore panicked")? + .context("Could not trash file or dir")? + .into()) } async fn open_sync(&self, path: &Path) -> Result> { @@ -1258,8 +1267,19 @@ impl Fs for RealFs { async fn restore( &self, trashed_entry: TrashedEntry, - ) -> std::result::Result<(), TrashRestoreError> { - trash::restore_all([trashed_entry.into_trash_item()]).map_err(Into::into) + ) -> std::result::Result { + let restored_item_path = trashed_entry.original_parent.join(&trashed_entry.name); + + let (tx, rx) = futures::channel::oneshot::channel(); + std::thread::Builder::new() + .name("restore trashed item".to_string()) + .spawn(move || { + let res = trash::restore_all([trashed_entry.into_trash_item()]); + tx.send(res) + }) + .expect("The OS can spawn a threads"); + rx.await.expect("Restore all never panics")?; + Ok(restored_item_path) } } @@ -2794,16 +2814,17 @@ impl Fs for FakeFs { self.remove_dir_inner(path, options).await.map(|_| ()) } - async fn trash_dir(&self, path: &Path) -> Result { + async fn trash(&self, path: &Path, options: RemoveOptions) -> Result { let normalized_path = normalize_path(path); let parent_path = normalized_path.parent().context("cannot remove the root")?; let base_name = normalized_path.file_name().unwrap(); - let options = RemoveOptions { - recursive: true, - ..Default::default() + let result = if self.is_dir(path).await { + self.remove_dir_inner(path, options).await? + } else { + self.remove_file_inner(path, options).await? }; - match self.remove_dir_inner(path, options).await? { + match result { Some(fake_entry) => { let trashed_entry = TrashedEntry { id: base_name.to_str().unwrap().into(), @@ -2823,27 +2844,6 @@ impl Fs for FakeFs { self.remove_file_inner(path, options).await.map(|_| ()) } - async fn trash_file(&self, path: &Path) -> Result { - let normalized_path = normalize_path(path); - let parent_path = normalized_path.parent().context("cannot remove the root")?; - let base_name = normalized_path.file_name().unwrap(); - - match self.remove_file_inner(path, Default::default()).await? { - Some(fake_entry) => { - let trashed_entry = TrashedEntry { - id: base_name.to_str().unwrap().into(), - name: base_name.to_str().unwrap().into(), - original_parent: parent_path.to_path_buf(), - }; - - let mut state = self.state.lock(); - state.trash.push((trashed_entry.clone(), fake_entry)); - Ok(trashed_entry) - } - None => anyhow::bail!("{normalized_path:?} does not exist"), - } - } - async fn open_sync(&self, path: &Path) -> Result> { let bytes = self.load_internal(path).await?; Ok(Box::new(io::Cursor::new(bytes))) @@ -3092,10 +3092,7 @@ impl Fs for FakeFs { receiver } - async fn restore( - &self, - trashed_entry: TrashedEntry, - ) -> std::result::Result<(), TrashRestoreError> { + async fn restore(&self, trashed_entry: TrashedEntry) -> Result { let mut state = self.state.lock(); let Some((trashed_entry, fake_entry)) = state @@ -3126,7 +3123,8 @@ impl Fs for FakeFs { match result { Ok(_) => { state.trash.retain(|(entry, _)| *entry != trashed_entry); - Ok(()) + state.emit_event([(path.clone(), Some(PathEventKind::Created))]); + Ok(path) } Err(_) => { // For now we'll just assume that this failed because it was a diff --git a/crates/fs/tests/integration/fs.rs b/crates/fs/tests/integration/fs.rs index fce8a98dea64fb153cacb5998f005a5cbd5cc11a..97ec90bea09651bc888dfdea332ad6a4964ede2f 100644 --- a/crates/fs/tests/integration/fs.rs +++ b/crates/fs/tests/integration/fs.rs @@ -628,64 +628,66 @@ async fn test_realfs_symlink_loop_metadata(executor: BackgroundExecutor) { } #[gpui::test] -async fn test_fake_fs_trash_file(executor: BackgroundExecutor) { +async fn test_fake_fs_trash(executor: BackgroundExecutor) { let fs = FakeFs::new(executor.clone()); fs.insert_tree( path!("/root"), json!({ + "src": { + "file_c.txt": "File C", + "file_d.txt": "File D" + }, "file_a.txt": "File A", "file_b.txt": "File B", }), ) .await; + // Trashing a file. let root_path = PathBuf::from(path!("/root")); let path = path!("/root/file_a.txt").as_ref(); let trashed_entry = fs - .trash_file(path) + .trash(path, Default::default()) .await .expect("should be able to trash {path:?}"); assert_eq!(trashed_entry.name, "file_a.txt"); assert_eq!(trashed_entry.original_parent, root_path); - assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_b.txt"))]); + assert_eq!( + fs.files(), + vec![ + PathBuf::from(path!("/root/file_b.txt")), + PathBuf::from(path!("/root/src/file_c.txt")), + PathBuf::from(path!("/root/src/file_d.txt")) + ] + ); let trash_entries = fs.trash_entries(); assert_eq!(trash_entries.len(), 1); assert_eq!(trash_entries[0].name, "file_a.txt"); assert_eq!(trash_entries[0].original_parent, root_path); -} -#[gpui::test] -async fn test_fake_fs_trash_dir(executor: BackgroundExecutor) { - let fs = FakeFs::new(executor.clone()); - fs.insert_tree( - path!("/root"), - json!({ - "src": { - "file_a.txt": "File A", - "file_b.txt": "File B", - }, - "file_c.txt": "File C", - }), - ) - .await; - - let root_path = PathBuf::from(path!("/root")); + // Trashing a directory. let path = path!("/root/src").as_ref(); let trashed_entry = fs - .trash_dir(path) + .trash( + path, + RemoveOptions { + recursive: true, + ..Default::default() + }, + ) .await .expect("should be able to trash {path:?}"); assert_eq!(trashed_entry.name, "src"); assert_eq!(trashed_entry.original_parent, root_path); - assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_c.txt"))]); + assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_b.txt"))]); let trash_entries = fs.trash_entries(); - assert_eq!(trash_entries.len(), 1); - assert_eq!(trash_entries[0].name, "src"); - assert_eq!(trash_entries[0].original_parent, root_path); + assert_eq!(trash_entries.len(), 2); + assert_eq!(trash_entries[1].name, "src"); + assert_eq!(trash_entries[1].original_parent, root_path); } #[gpui::test] @@ -704,8 +706,8 @@ async fn test_fake_fs_restore(executor: BackgroundExecutor) { .await; // Providing a non-existent `TrashedEntry` should result in an error. - let id: OsString = "/trash/file_c.txt".into(); - let name: OsString = "file_c.txt".into(); + let id = OsString::from("/trash/file_c.txt"); + let name = OsString::from("file_c.txt"); let original_parent = PathBuf::from(path!("/root")); let trashed_entry = TrashedEntry { id, @@ -719,7 +721,7 @@ async fn test_fake_fs_restore(executor: BackgroundExecutor) { // it as part of its list of files, restore it and verify that the list of // files and trash has been updated accordingly. let path = path!("/root/src/file_a.txt").as_ref(); - let trashed_entry = fs.trash_file(path).await.unwrap(); + let trashed_entry = fs.trash(path, Default::default()).await.unwrap(); assert_eq!(fs.trash_entries().len(), 1); assert_eq!( @@ -745,8 +747,12 @@ async fn test_fake_fs_restore(executor: BackgroundExecutor) { // Deleting and restoring a directory should also remove all of its files // but create a single trashed entry, which should be removed after // restoration. + let options = RemoveOptions { + recursive: true, + ..Default::default() + }; let path = path!("/root/src/").as_ref(); - let trashed_entry = fs.trash_dir(path).await.unwrap(); + let trashed_entry = fs.trash(path, options).await.unwrap(); assert_eq!(fs.trash_entries().len(), 1); assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_c.txt"))]); @@ -766,7 +772,7 @@ async fn test_fake_fs_restore(executor: BackgroundExecutor) { // A collision error should be returned in case a file is being restored to // a path where a file already exists. let path = path!("/root/src/file_a.txt").as_ref(); - let trashed_entry = fs.trash_file(path).await.unwrap(); + let trashed_entry = fs.trash(path, Default::default()).await.unwrap(); assert_eq!(fs.trash_entries().len(), 1); assert_eq!( @@ -798,8 +804,12 @@ async fn test_fake_fs_restore(executor: BackgroundExecutor) { // A collision error should be returned in case a directory is being // restored to a path where a directory already exists. + let options = RemoveOptions { + recursive: true, + ..Default::default() + }; let path = path!("/root/src/").as_ref(); - let trashed_entry = fs.trash_dir(path).await.unwrap(); + let trashed_entry = fs.trash(path, options).await.unwrap(); assert_eq!(fs.trash_entries().len(), 2); assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_c.txt"))]); diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index e992f86fd2fbc49d27f94b8bc80fe0666c162c15..abfea741aeac56bfb921560a505e11281a254fe2 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -2566,7 +2566,7 @@ impl Project { path: ProjectPath, trash: bool, cx: &mut Context, - ) -> Option>> { + ) -> Option>>> { let entry = self.entry_for_path(&path, cx)?; self.delete_entry(entry.id, trash, cx) } @@ -2577,7 +2577,7 @@ impl Project { entry_id: ProjectEntryId, trash: bool, cx: &mut Context, - ) -> Option>> { + ) -> Option>>> { let worktree = self.worktree_for_entry(entry_id, cx)?; cx.emit(Event::DeletedEntry(worktree.read(cx).id(), entry_id)); worktree.update(cx, |worktree, cx| { @@ -2585,6 +2585,27 @@ impl Project { }) } + #[inline] + pub fn restore_entry( + &self, + worktree_id: WorktreeId, + trash_entry: TrashedEntry, + cx: &mut Context<'_, Self>, + ) -> Task> { + let Some(worktree) = self.worktree_for_id(worktree_id, cx) else { + return Task::ready(Err(anyhow!("No worktree for id {worktree_id:?}"))); + }; + + cx.spawn(async move |_, cx| { + Worktree::restore_entry(trash_entry, worktree, cx) + .await + .map(|rel_path_buf| ProjectPath { + worktree_id: worktree_id, + path: Arc::from(rel_path_buf.as_rel_path()), + }) + }) + } + #[inline] pub fn expand_entry( &mut self, diff --git a/crates/project_panel/Cargo.toml b/crates/project_panel/Cargo.toml index 2192b8daf3a301d580a3cef73426f6348508a566..62ebe3eb9f5aa06bc7a1a06e611a71c8f1f6215a 100644 --- a/crates/project_panel/Cargo.toml +++ b/crates/project_panel/Cargo.toml @@ -22,6 +22,7 @@ collections.workspace = true command_palette_hooks.workspace = true editor.workspace = true file_icons.workspace = true +futures.workspace = true git_ui.workspace = true git.workspace = true gpui.workspace = true @@ -48,6 +49,7 @@ zed_actions.workspace = true telemetry.workspace = true notifications.workspace = true feature_flags.workspace = true +fs.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 3d10903eaa7881a75199eb6b1f981479659498f4..b409962d9fd20621ad4c1153ab723cf9e08d85a0 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -47,16 +47,16 @@ use settings::{ update_settings_file, }; use smallvec::SmallVec; -use std::ops::Neg; -use std::{any::TypeId, time::Instant}; use std::{ + any::TypeId, cell::OnceCell, cmp, collections::HashSet, + ops::Neg, ops::Range, path::{Path, PathBuf}, sync::Arc, - time::Duration, + time::{Duration, Instant}, }; use theme_settings::ThemeSettings; use ui::{ @@ -84,7 +84,7 @@ use zed_actions::{ use crate::{ project_panel_settings::ProjectPanelScrollbarProxy, - undo::{ProjectPanelOperation, UndoManager}, + undo::{Change, UndoManager}, }; const PROJECT_PANEL_KEY: &str = "ProjectPanel"; @@ -401,6 +401,8 @@ actions!( CompareMarkedFiles, /// Undoes the last file operation. Undo, + /// Redoes the last undone file operation. + Redo, ] ); @@ -861,6 +863,7 @@ impl ProjectPanel { .detach(); let scroll_handle = UniformListScrollHandle::new(); + let weak_project_panel = cx.weak_entity(); let mut this = Self { project: project.clone(), hover_scroll_task: None, @@ -896,7 +899,7 @@ impl ProjectPanel { unfolded_dir_ids: Default::default(), }, update_visible_entries_task: Default::default(), - undo_manager: UndoManager::new(workspace.weak_handle()), + undo_manager: UndoManager::new(workspace.weak_handle(), weak_project_panel, &cx), }; this.update_visible_entries(None, false, false, window, cx); @@ -1176,6 +1179,11 @@ impl ProjectPanel { "Undo", Box::new(Undo), ) + .action_disabled_when( + !self.undo_manager.can_redo(), + "Redo", + Box::new(Redo), + ) }) .when(is_remote, |menu| { menu.separator() @@ -1874,16 +1882,12 @@ impl ProjectPanel { // Record the operation if the edit was applied if new_entry.is_ok() { let operation = if let Some(old_entry) = edited_entry { - ProjectPanelOperation::Rename { - old_path: (worktree_id, old_entry.path).into(), - new_path: new_project_path, - } + Change::Renamed((worktree_id, old_entry.path).into(), new_project_path) } else { - ProjectPanelOperation::Create { - project_path: new_project_path, - } + Change::Created(new_project_path) }; - project_panel.undo_manager.record(operation); + + project_panel.undo_manager.record([operation]).log_err(); } cx.notify(); @@ -2136,9 +2140,12 @@ impl ProjectPanel { } } - pub fn undo(&mut self, _: &Undo, _window: &mut Window, cx: &mut Context) { - self.undo_manager.undo(cx); - cx.notify(); + pub fn undo(&mut self, _: &Undo, _window: &mut Window, _cx: &mut Context) { + self.undo_manager.undo().log_err(); + } + + pub fn redo(&mut self, _: &Redo, _window: &mut Window, _cx: &mut Context) { + self.undo_manager.redo().log_err(); } fn rename_impl( @@ -2331,6 +2338,7 @@ impl ProjectPanel { Some(( selection.entry_id, + selection.worktree_id, project_path.path.file_name()?.to_string(), )) }) @@ -2346,7 +2354,7 @@ impl ProjectPanel { "Are you sure you want to permanently delete" }; let prompt = match file_paths.first() { - Some((_, path)) if file_paths.len() == 1 => { + Some((_, _, path)) if file_paths.len() == 1 => { let unsaved_warning = if dirty_buffers > 0 { "\n\nIt has unsaved changes, which will be lost." } else { @@ -2361,7 +2369,7 @@ impl ProjectPanel { let truncated_path_counts = file_paths.len() - CUTOFF_POINT; let mut paths = file_paths .iter() - .map(|(_, path)| path.clone()) + .map(|(_, _, path)| path.clone()) .take(CUTOFF_POINT) .collect::>(); paths.truncate(CUTOFF_POINT); @@ -2372,7 +2380,7 @@ impl ProjectPanel { } paths } else { - file_paths.iter().map(|(_, path)| path.clone()).collect() + file_paths.iter().map(|(_, _, path)| path.clone()).collect() }; let unsaved_warning = if dirty_buffers == 0 { String::new() @@ -2409,8 +2417,11 @@ impl ProjectPanel { { return anyhow::Ok(()); } - for (entry_id, _) in file_paths { - panel + + let mut changes = Vec::new(); + + for (entry_id, worktree_id, _) in file_paths { + let trashed_entry = panel .update(cx, |panel, cx| { panel .project @@ -2418,8 +2429,19 @@ impl ProjectPanel { .context("no such entry") })?? .await?; + + // Keep track of trashed change so that we can then record + // all of the changes at once, such that undoing and redoing + // restores or trashes all files in batch. + if trash && let Some(trashed_entry) = trashed_entry { + changes.push(Change::Trashed(worktree_id, trashed_entry)); + } } panel.update_in(cx, |panel, window, cx| { + if trash { + panel.undo_manager.record(changes).log_err(); + } + if let Some(next_selection) = next_selection { panel.update_visible_entries( Some((next_selection.worktree_id, next_selection.entry_id)), @@ -3071,8 +3093,8 @@ impl ProjectPanel { enum PasteTask { Rename { task: Task>, - old_path: ProjectPath, - new_path: ProjectPath, + from: ProjectPath, + to: ProjectPath, }, Copy { task: Task>>, @@ -3089,14 +3111,14 @@ impl ProjectPanel { let clip_entry_id = clipboard_entry.entry_id; let destination: ProjectPath = (worktree_id, new_path).into(); let task = if clipboard_entries.is_cut() { - let old_path = self.project.read(cx).path_for_entry(clip_entry_id, cx)?; + let original_path = self.project.read(cx).path_for_entry(clip_entry_id, cx)?; let task = self.project.update(cx, |project, cx| { project.rename_entry(clip_entry_id, destination.clone(), cx) }); PasteTask::Rename { task, - old_path, - new_path: destination, + from: original_path, + to: destination, } } else { let task = self.project.update(cx, |project, cx| { @@ -3113,21 +3135,16 @@ impl ProjectPanel { cx.spawn_in(window, async move |project_panel, mut cx| { let mut last_succeed = None; - let mut operations = Vec::new(); + let mut changes = Vec::new(); for task in paste_tasks { match task { - PasteTask::Rename { - task, - old_path, - new_path, - } => { + PasteTask::Rename { task, from, to } => { if let Some(CreatedEntry::Included(entry)) = task .await .notify_workspace_async_err(workspace.clone(), &mut cx) { - operations - .push(ProjectPanelOperation::Rename { old_path, new_path }); + changes.push(Change::Renamed(from, to)); last_succeed = Some(entry); } } @@ -3136,9 +3153,7 @@ impl ProjectPanel { .await .notify_workspace_async_err(workspace.clone(), &mut cx) { - operations.push(ProjectPanelOperation::Create { - project_path: destination, - }); + changes.push(Change::Created(destination)); last_succeed = Some(entry); } } @@ -3147,7 +3162,7 @@ impl ProjectPanel { project_panel .update(cx, |this, _| { - this.undo_manager.record_batch(operations); + this.undo_manager.record(changes).log_err(); }) .ok(); @@ -4371,6 +4386,20 @@ impl ProjectPanel { this.marked_entries.clear(); this.update_visible_entries(new_selection, false, false, window, cx); } + + let changes: Vec = opened_entries + .iter() + .filter_map(|entry_id| { + worktree.read(cx).entry_for_id(*entry_id).map(|entry| { + Change::Created(ProjectPath { + worktree_id, + path: entry.path.clone(), + }) + }) + }) + .collect(); + + this.undo_manager.record(changes).log_err(); }) } .log_err() @@ -4449,33 +4478,30 @@ impl ProjectPanel { cx.spawn_in(window, async move |project_panel, cx| { let mut last_succeed = None; - let mut operations = Vec::new(); + let mut changes = Vec::new(); for task in copy_tasks.into_iter() { if let Some(Some(entry)) = task.await.log_err() { last_succeed = Some(entry.id); - operations.push(ProjectPanelOperation::Create { - project_path: (worktree_id, entry.path).into(), - }); + changes.push(Change::Created((worktree_id, entry.path).into())); } } // update selection if let Some(entry_id) = last_succeed { - project_panel - .update_in(cx, |project_panel, window, cx| { - project_panel.selection = Some(SelectedEntry { - worktree_id, - entry_id, - }); - - project_panel.undo_manager.record_batch(operations); + project_panel.update_in(cx, |project_panel, window, cx| { + project_panel.selection = Some(SelectedEntry { + worktree_id, + entry_id, + }); + // if only one entry was dragged and it was disambiguated, open the rename editor + if item_count == 1 && disambiguation_range.is_some() { + project_panel.rename_impl(disambiguation_range, window, cx); + } - // if only one entry was dragged and it was disambiguated, open the rename editor - if item_count == 1 && disambiguation_range.is_some() { - project_panel.rename_impl(disambiguation_range, window, cx); - } - }) - .ok(); + project_panel.undo_manager.record(changes) + })??; } + + std::result::Result::Ok::<(), anyhow::Error>(()) }) .detach(); Some(()) @@ -4551,7 +4577,7 @@ impl ProjectPanel { let workspace = self.workspace.clone(); if folded_selection_info.is_empty() { cx.spawn_in(window, async move |project_panel, mut cx| { - let mut operations = Vec::new(); + let mut changes = Vec::new(); for (entry_id, task) in move_tasks { if let Some(CreatedEntry::Included(new_entry)) = task .await @@ -4560,16 +4586,16 @@ impl ProjectPanel { if let (Some(old_path), Some(worktree_id)) = (old_paths.get(&entry_id), destination_worktree_id) { - operations.push(ProjectPanelOperation::Rename { - old_path: old_path.clone(), - new_path: (worktree_id, new_entry.path).into(), - }); + changes.push(Change::Renamed( + old_path.clone(), + (worktree_id, new_entry.path).into(), + )); } } } project_panel .update(cx, |this, _| { - this.undo_manager.record_batch(operations); + this.undo_manager.record(changes).log_err(); }) .ok(); }) @@ -4587,10 +4613,10 @@ impl ProjectPanel { if let (Some(old_path), Some(worktree_id)) = (old_paths.get(&entry_id), destination_worktree_id) { - operations.push(ProjectPanelOperation::Rename { - old_path: old_path.clone(), - new_path: (worktree_id, new_entry.path.clone()).into(), - }); + operations.push(Change::Renamed( + old_path.clone(), + (worktree_id, new_entry.path.clone()).into(), + )); } move_results.push((entry_id, new_entry)); } @@ -4602,7 +4628,7 @@ impl ProjectPanel { project_panel .update(cx, |this, _| { - this.undo_manager.record_batch(operations); + this.undo_manager.record(operations).log_err(); }) .ok(); @@ -6640,6 +6666,7 @@ impl Render for ProjectPanel { .on_action(cx.listener(Self::compare_marked_files)) .when(cx.has_flag::(), |el| { el.on_action(cx.listener(Self::undo)) + .on_action(cx.listener(Self::redo)) }) .when(!project.is_read_only(cx), |el| { el.on_action(cx.listener(Self::new_file)) @@ -7333,3 +7360,4 @@ fn git_status_indicator(git_status: GitSummary) -> Option<(&'static str, Color)> #[cfg(test)] mod project_panel_tests; +mod tests; diff --git a/crates/project_panel/src/project_panel_tests.rs b/crates/project_panel/src/project_panel_tests.rs index a49b32a694620d4313d4496390d21d85839e4230..db4e8ba3b6b0d7733bf6bee0cf778aa5f5281d40 100644 --- a/crates/project_panel/src/project_panel_tests.rs +++ b/crates/project_panel/src/project_panel_tests.rs @@ -1,4 +1,5 @@ use super::*; +// use crate::undo::tests::{build_create_operation, build_rename_operation}; use collections::HashSet; use editor::MultiBufferOffset; use gpui::{Empty, Entity, TestAppContext, VisualTestContext}; @@ -1994,555 +1995,6 @@ async fn test_copy_paste_nested_and_root_entries(cx: &mut gpui::TestAppContext) ); } -#[gpui::test] -async fn test_undo_rename(cx: &mut gpui::TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "a.txt": "", - "b.txt": "", - }), - ) - .await; - - let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; - let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - let workspace = window - .read_with(cx, |mw, _| mw.workspace().clone()) - .unwrap(); - let cx = &mut VisualTestContext::from_window(window.into(), cx); - let panel = workspace.update_in(cx, ProjectPanel::new); - cx.run_until_parked(); - - select_path(&panel, "root/a.txt", cx); - panel.update_in(cx, |panel, window, cx| panel.rename(&Rename, window, cx)); - cx.run_until_parked(); - - let confirm = panel.update_in(cx, |panel, window, cx| { - panel - .filename_editor - .update(cx, |editor, cx| editor.set_text("renamed.txt", window, cx)); - panel.confirm_edit(true, window, cx).unwrap() - }); - confirm.await.unwrap(); - cx.run_until_parked(); - - assert!( - find_project_entry(&panel, "root/renamed.txt", cx).is_some(), - "File should be renamed to renamed.txt" - ); - assert_eq!( - find_project_entry(&panel, "root/a.txt", cx), - None, - "Original file should no longer exist" - ); - - panel.update_in(cx, |panel, window, cx| { - panel.undo(&Undo, window, cx); - }); - cx.run_until_parked(); - - assert!( - find_project_entry(&panel, "root/a.txt", cx).is_some(), - "File should be restored to original name after undo" - ); - assert_eq!( - find_project_entry(&panel, "root/renamed.txt", cx), - None, - "Renamed file should no longer exist after undo" - ); -} - -#[gpui::test] -async fn test_undo_create_file(cx: &mut gpui::TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "existing.txt": "", - }), - ) - .await; - - let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; - let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - let workspace = window - .read_with(cx, |mw, _| mw.workspace().clone()) - .unwrap(); - let cx = &mut VisualTestContext::from_window(window.into(), cx); - let panel = workspace.update_in(cx, ProjectPanel::new); - cx.run_until_parked(); - - select_path(&panel, "root", cx); - panel.update_in(cx, |panel, window, cx| panel.new_file(&NewFile, window, cx)); - cx.run_until_parked(); - - let confirm = panel.update_in(cx, |panel, window, cx| { - panel - .filename_editor - .update(cx, |editor, cx| editor.set_text("new.txt", window, cx)); - panel.confirm_edit(true, window, cx).unwrap() - }); - confirm.await.unwrap(); - cx.run_until_parked(); - - assert!( - find_project_entry(&panel, "root/new.txt", cx).is_some(), - "New file should exist" - ); - - panel.update_in(cx, |panel, window, cx| { - panel.undo(&Undo, window, cx); - }); - cx.run_until_parked(); - - assert_eq!( - find_project_entry(&panel, "root/new.txt", cx), - None, - "New file should be removed after undo" - ); - assert!( - find_project_entry(&panel, "root/existing.txt", cx).is_some(), - "Existing file should still be present" - ); -} - -#[gpui::test] -async fn test_undo_create_directory(cx: &mut gpui::TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "existing.txt": "", - }), - ) - .await; - - let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; - let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - let workspace = window - .read_with(cx, |mw, _| mw.workspace().clone()) - .unwrap(); - let cx = &mut VisualTestContext::from_window(window.into(), cx); - let panel = workspace.update_in(cx, ProjectPanel::new); - cx.run_until_parked(); - - select_path(&panel, "root", cx); - panel.update_in(cx, |panel, window, cx| { - panel.new_directory(&NewDirectory, window, cx) - }); - cx.run_until_parked(); - - let confirm = panel.update_in(cx, |panel, window, cx| { - panel - .filename_editor - .update(cx, |editor, cx| editor.set_text("new_dir", window, cx)); - panel.confirm_edit(true, window, cx).unwrap() - }); - confirm.await.unwrap(); - cx.run_until_parked(); - - assert!( - find_project_entry(&panel, "root/new_dir", cx).is_some(), - "New directory should exist" - ); - - panel.update_in(cx, |panel, window, cx| { - panel.undo(&Undo, window, cx); - }); - cx.run_until_parked(); - - assert_eq!( - find_project_entry(&panel, "root/new_dir", cx), - None, - "New directory should be removed after undo" - ); -} - -#[gpui::test] -async fn test_undo_cut_paste(cx: &mut gpui::TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "src": { - "file.txt": "content", - }, - "dst": {}, - }), - ) - .await; - - let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; - let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - let workspace = window - .read_with(cx, |mw, _| mw.workspace().clone()) - .unwrap(); - let cx = &mut VisualTestContext::from_window(window.into(), cx); - let panel = workspace.update_in(cx, ProjectPanel::new); - cx.run_until_parked(); - - toggle_expand_dir(&panel, "root/src", cx); - - select_path_with_mark(&panel, "root/src/file.txt", cx); - panel.update_in(cx, |panel, window, cx| { - panel.cut(&Default::default(), window, cx); - }); - - select_path(&panel, "root/dst", cx); - panel.update_in(cx, |panel, window, cx| { - panel.paste(&Default::default(), window, cx); - }); - cx.run_until_parked(); - - assert!( - find_project_entry(&panel, "root/dst/file.txt", cx).is_some(), - "File should be moved to dst" - ); - assert_eq!( - find_project_entry(&panel, "root/src/file.txt", cx), - None, - "File should no longer be in src" - ); - - panel.update_in(cx, |panel, window, cx| { - panel.undo(&Undo, window, cx); - }); - cx.run_until_parked(); - - assert!( - find_project_entry(&panel, "root/src/file.txt", cx).is_some(), - "File should be back in src after undo" - ); - assert_eq!( - find_project_entry(&panel, "root/dst/file.txt", cx), - None, - "File should no longer be in dst after undo" - ); -} - -#[gpui::test] -async fn test_undo_drag_single_entry(cx: &mut gpui::TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "src": { - "main.rs": "", - }, - "dst": {}, - }), - ) - .await; - - let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; - let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - let workspace = window - .read_with(cx, |mw, _| mw.workspace().clone()) - .unwrap(); - let cx = &mut VisualTestContext::from_window(window.into(), cx); - let panel = workspace.update_in(cx, ProjectPanel::new); - cx.run_until_parked(); - - toggle_expand_dir(&panel, "root/src", cx); - - panel.update(cx, |panel, _| panel.marked_entries.clear()); - select_path_with_mark(&panel, "root/src/main.rs", cx); - drag_selection_to(&panel, "root/dst", false, cx); - - assert!( - find_project_entry(&panel, "root/dst/main.rs", cx).is_some(), - "File should be in dst after drag" - ); - assert_eq!( - find_project_entry(&panel, "root/src/main.rs", cx), - None, - "File should no longer be in src after drag" - ); - - panel.update_in(cx, |panel, window, cx| { - panel.undo(&Undo, window, cx); - }); - cx.run_until_parked(); - - assert!( - find_project_entry(&panel, "root/src/main.rs", cx).is_some(), - "File should be back in src after undo" - ); - assert_eq!( - find_project_entry(&panel, "root/dst/main.rs", cx), - None, - "File should no longer be in dst after undo" - ); -} - -#[gpui::test] -async fn test_undo_drag_multiple_entries(cx: &mut gpui::TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "src": { - "alpha.txt": "", - "beta.txt": "", - }, - "dst": {}, - }), - ) - .await; - - let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; - let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - let workspace = window - .read_with(cx, |mw, _| mw.workspace().clone()) - .unwrap(); - let cx = &mut VisualTestContext::from_window(window.into(), cx); - let panel = workspace.update_in(cx, ProjectPanel::new); - cx.run_until_parked(); - - toggle_expand_dir(&panel, "root/src", cx); - - panel.update(cx, |panel, _| panel.marked_entries.clear()); - select_path_with_mark(&panel, "root/src/alpha.txt", cx); - select_path_with_mark(&panel, "root/src/beta.txt", cx); - drag_selection_to(&panel, "root/dst", false, cx); - - assert!( - find_project_entry(&panel, "root/dst/alpha.txt", cx).is_some(), - "alpha.txt should be in dst after drag" - ); - assert!( - find_project_entry(&panel, "root/dst/beta.txt", cx).is_some(), - "beta.txt should be in dst after drag" - ); - - // A single undo should revert the entire batch - panel.update_in(cx, |panel, window, cx| { - panel.undo(&Undo, window, cx); - }); - cx.run_until_parked(); - - assert!( - find_project_entry(&panel, "root/src/alpha.txt", cx).is_some(), - "alpha.txt should be back in src after undo" - ); - assert!( - find_project_entry(&panel, "root/src/beta.txt", cx).is_some(), - "beta.txt should be back in src after undo" - ); - assert_eq!( - find_project_entry(&panel, "root/dst/alpha.txt", cx), - None, - "alpha.txt should no longer be in dst after undo" - ); - assert_eq!( - find_project_entry(&panel, "root/dst/beta.txt", cx), - None, - "beta.txt should no longer be in dst after undo" - ); -} - -#[gpui::test] -async fn test_multiple_sequential_undos(cx: &mut gpui::TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "a.txt": "", - }), - ) - .await; - - let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; - let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - let workspace = window - .read_with(cx, |mw, _| mw.workspace().clone()) - .unwrap(); - let cx = &mut VisualTestContext::from_window(window.into(), cx); - let panel = workspace.update_in(cx, ProjectPanel::new); - cx.run_until_parked(); - - select_path(&panel, "root/a.txt", cx); - panel.update_in(cx, |panel, window, cx| panel.rename(&Rename, window, cx)); - cx.run_until_parked(); - let confirm = panel.update_in(cx, |panel, window, cx| { - panel - .filename_editor - .update(cx, |editor, cx| editor.set_text("b.txt", window, cx)); - panel.confirm_edit(true, window, cx).unwrap() - }); - confirm.await.unwrap(); - cx.run_until_parked(); - - assert!(find_project_entry(&panel, "root/b.txt", cx).is_some()); - - select_path(&panel, "root", cx); - panel.update_in(cx, |panel, window, cx| panel.new_file(&NewFile, window, cx)); - cx.run_until_parked(); - let confirm = panel.update_in(cx, |panel, window, cx| { - panel - .filename_editor - .update(cx, |editor, cx| editor.set_text("c.txt", window, cx)); - panel.confirm_edit(true, window, cx).unwrap() - }); - confirm.await.unwrap(); - cx.run_until_parked(); - - assert!(find_project_entry(&panel, "root/b.txt", cx).is_some()); - assert!(find_project_entry(&panel, "root/c.txt", cx).is_some()); - - panel.update_in(cx, |panel, window, cx| { - panel.undo(&Undo, window, cx); - }); - cx.run_until_parked(); - - assert_eq!( - find_project_entry(&panel, "root/c.txt", cx), - None, - "c.txt should be removed after first undo" - ); - assert!( - find_project_entry(&panel, "root/b.txt", cx).is_some(), - "b.txt should still exist after first undo" - ); - - panel.update_in(cx, |panel, window, cx| { - panel.undo(&Undo, window, cx); - }); - cx.run_until_parked(); - - assert!( - find_project_entry(&panel, "root/a.txt", cx).is_some(), - "a.txt should be restored after second undo" - ); - assert_eq!( - find_project_entry(&panel, "root/b.txt", cx), - None, - "b.txt should no longer exist after second undo" - ); -} - -#[gpui::test] -async fn test_undo_with_empty_stack(cx: &mut gpui::TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "a.txt": "", - }), - ) - .await; - - let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; - let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - let workspace = window - .read_with(cx, |mw, _| mw.workspace().clone()) - .unwrap(); - let cx = &mut VisualTestContext::from_window(window.into(), cx); - let panel = workspace.update_in(cx, ProjectPanel::new); - cx.run_until_parked(); - - panel.update_in(cx, |panel, window, cx| { - panel.undo(&Undo, window, cx); - }); - cx.run_until_parked(); - - assert!( - find_project_entry(&panel, "root/a.txt", cx).is_some(), - "File tree should be unchanged after undo on empty stack" - ); -} - -#[gpui::test] -async fn test_undo_batch(cx: &mut gpui::TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "src": { - "main.rs": "// Code!" - } - }), - ) - .await; - - let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; - let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - let workspace = window - .read_with(cx, |mw, _| mw.workspace().clone()) - .unwrap(); - let cx = &mut VisualTestContext::from_window(window.into(), cx); - let panel = workspace.update_in(cx, ProjectPanel::new); - let worktree_id = project.update(cx, |project, cx| { - project.visible_worktrees(cx).next().unwrap().read(cx).id() - }); - cx.run_until_parked(); - - // Since there currently isn't a way to both create a folder and the file - // within it as two separate operations batched under the same - // `ProjectPanelOperation::Batch` operation, we'll simply record those - // ourselves, knowing that the filesystem already has the folder and file - // being provided in the operations. - panel.update(cx, |panel, _cx| { - panel.undo_manager.record_batch(vec![ - ProjectPanelOperation::Create { - project_path: ProjectPath { - worktree_id, - path: Arc::from(rel_path("src/main.rs")), - }, - }, - ProjectPanelOperation::Create { - project_path: ProjectPath { - worktree_id, - path: Arc::from(rel_path("src/")), - }, - }, - ]); - }); - - // Ensure that `src/main.rs` is present in the filesystem before proceeding, - // otherwise this test is irrelevant. - assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/src/main.rs"))]); - assert_eq!( - fs.directories(false), - vec![ - PathBuf::from(path!("/")), - PathBuf::from(path!("/root/")), - PathBuf::from(path!("/root/src/")) - ] - ); - - panel.update_in(cx, |panel, window, cx| { - panel.undo(&Undo, window, cx); - }); - cx.run_until_parked(); - - assert_eq!(fs.files().len(), 0); - assert_eq!( - fs.directories(false), - vec![PathBuf::from(path!("/")), PathBuf::from(path!("/root/"))] - ); -} - #[gpui::test] async fn test_paste_external_paths(cx: &mut gpui::TestAppContext) { init_test(cx); @@ -7348,7 +6800,11 @@ async fn test_selection_fallback_to_next_highest_worktree(cx: &mut gpui::TestApp ); } -fn toggle_expand_dir(panel: &Entity, path: &str, cx: &mut VisualTestContext) { +pub(crate) fn toggle_expand_dir( + panel: &Entity, + path: &str, + cx: &mut VisualTestContext, +) { let path = rel_path(path); panel.update_in(cx, |panel, window, cx| { for worktree in panel.project.read(cx).worktrees(cx).collect::>() { @@ -9700,7 +9156,7 @@ async fn test_hide_hidden_entries(cx: &mut gpui::TestAppContext) { ); } -fn select_path(panel: &Entity, path: &str, cx: &mut VisualTestContext) { +pub(crate) fn select_path(panel: &Entity, path: &str, cx: &mut VisualTestContext) { let path = rel_path(path); panel.update_in(cx, |panel, window, cx| { for worktree in panel.project.read(cx).worktrees(cx).collect::>() { @@ -9722,7 +9178,11 @@ fn select_path(panel: &Entity, path: &str, cx: &mut VisualTestCont cx.run_until_parked(); } -fn select_path_with_mark(panel: &Entity, path: &str, cx: &mut VisualTestContext) { +pub(crate) fn select_path_with_mark( + panel: &Entity, + path: &str, + cx: &mut VisualTestContext, +) { let path = rel_path(path); panel.update(cx, |panel, cx| { for worktree in panel.project.read(cx).worktrees(cx).collect::>() { @@ -9810,7 +9270,7 @@ fn set_folded_active_ancestor( }); } -fn drag_selection_to( +pub(crate) fn drag_selection_to( panel: &Entity, target_path: &str, is_file: bool, @@ -9835,7 +9295,7 @@ fn drag_selection_to( cx.executor().run_until_parked(); } -fn find_project_entry( +pub(crate) fn find_project_entry( panel: &Entity, path: &str, cx: &mut VisualTestContext, diff --git a/crates/project_panel/src/tests.rs b/crates/project_panel/src/tests.rs new file mode 100644 index 0000000000000000000000000000000000000000..e726758b64b0f5fa9acec59dfa53690f7f9f6f6a --- /dev/null +++ b/crates/project_panel/src/tests.rs @@ -0,0 +1 @@ +pub(crate) mod undo; diff --git a/crates/project_panel/src/tests/undo.rs b/crates/project_panel/src/tests/undo.rs new file mode 100644 index 0000000000000000000000000000000000000000..4315a6ecb4c3d2e5b64487b15144d1956d046228 --- /dev/null +++ b/crates/project_panel/src/tests/undo.rs @@ -0,0 +1,384 @@ +#![cfg(test)] + +use collections::HashSet; +use fs::{FakeFs, Fs}; +use gpui::{Entity, VisualTestContext}; +use project::Project; +use serde_json::{Value, json}; +use std::path::Path; +use std::sync::Arc; +use workspace::MultiWorkspace; + +use crate::project_panel_tests::{self, find_project_entry, select_path}; +use crate::{NewDirectory, NewFile, ProjectPanel, Redo, Rename, Trash, Undo}; + +struct TestContext { + panel: Entity, + fs: Arc, + cx: VisualTestContext, +} + +// Using the `util::path` macro requires a string literal, which would mean that +// callers of, for example, `rename`, would now need to know about `/` and +// use `path!` in tests. +// +// As such, we define it as a function here to make the helper methods more +// ergonomic for our use case. +fn path(path: impl AsRef) -> String { + let path = path.as_ref(); + #[cfg(target_os = "windows")] + { + let mut path = path.replace("/", "\\"); + if path.starts_with("\\") { + path = format!("C:{}", &path); + } + path + } + + #[cfg(not(target_os = "windows"))] + { + path.to_string() + } +} + +impl TestContext { + async fn undo(&mut self) { + self.panel.update_in(&mut self.cx, |panel, window, cx| { + panel.undo(&Undo, window, cx); + }); + self.cx.run_until_parked(); + } + async fn redo(&mut self) { + self.panel.update_in(&mut self.cx, |panel, window, cx| { + panel.redo(&Redo, window, cx); + }); + self.cx.run_until_parked(); + } + + /// Note this only works when every file has an extension + fn assert_fs_state_is(&mut self, state: &[&str]) { + let state: HashSet<_> = state + .into_iter() + .map(|s| path(format!("/workspace/{s}"))) + .chain([path("/workspace"), path("/")]) + .map(|s| Path::new(&s).to_path_buf()) + .collect(); + + let dirs: HashSet<_> = state + .iter() + .map(|p| match p.extension() { + Some(_) => p.parent().unwrap_or(Path::new(&path("/"))).to_owned(), + None => p.clone(), + }) + .collect(); + + assert_eq!( + self.fs + .directories(true) + .into_iter() + .collect::>(), + dirs + ); + assert_eq!( + self.fs.paths(true).into_iter().collect::>(), + state + ); + } + + fn assert_exists(&mut self, file: &str) { + assert!( + find_project_entry(&self.panel, &format!("workspace/{file}"), &mut self.cx).is_some(), + "{file} should exist" + ); + } + + fn assert_not_exists(&mut self, file: &str) { + assert_eq!( + find_project_entry(&self.panel, &format!("workspace/{file}"), &mut self.cx), + None, + "{file} should not exist" + ); + } + + async fn rename(&mut self, from: &str, to: &str) { + let from = format!("workspace/{from}"); + let Self { panel, cx, .. } = self; + select_path(&panel, &from, cx); + panel.update_in(cx, |panel, window, cx| panel.rename(&Rename, window, cx)); + cx.run_until_parked(); + + let confirm = panel.update_in(cx, |panel, window, cx| { + panel + .filename_editor + .update(cx, |editor, cx| editor.set_text(to, window, cx)); + panel.confirm_edit(true, window, cx).unwrap() + }); + confirm.await.unwrap(); + cx.run_until_parked(); + } + + async fn create_file(&mut self, path: &str) { + let Self { panel, cx, .. } = self; + select_path(&panel, "workspace", cx); + panel.update_in(cx, |panel, window, cx| panel.new_file(&NewFile, window, cx)); + cx.run_until_parked(); + + let confirm = panel.update_in(cx, |panel, window, cx| { + panel + .filename_editor + .update(cx, |editor, cx| editor.set_text(path, window, cx)); + panel.confirm_edit(true, window, cx).unwrap() + }); + confirm.await.unwrap(); + cx.run_until_parked(); + } + + async fn create_directory(&mut self, path: &str) { + let Self { panel, cx, .. } = self; + + select_path(&panel, "workspace", cx); + panel.update_in(cx, |panel, window, cx| { + panel.new_directory(&NewDirectory, window, cx) + }); + cx.run_until_parked(); + + let confirm = panel.update_in(cx, |panel, window, cx| { + panel + .filename_editor + .update(cx, |editor, cx| editor.set_text(path, window, cx)); + panel.confirm_edit(true, window, cx).unwrap() + }); + confirm.await.unwrap(); + cx.run_until_parked(); + } + + /// Drags the `files` to the provided `directory`. + fn drag(&mut self, files: &[&str], directory: &str) { + self.panel + .update(&mut self.cx, |panel, _| panel.marked_entries.clear()); + files.into_iter().for_each(|file| { + project_panel_tests::select_path_with_mark( + &self.panel, + &format!("workspace/{file}"), + &mut self.cx, + ) + }); + project_panel_tests::drag_selection_to( + &self.panel, + &format!("workspace/{directory}"), + false, + &mut self.cx, + ); + } + + /// Only supports files in root (otherwise would need toggle_expand_dir). + /// For undo redo the paths themselves do not matter so this is fine + async fn cut(&mut self, file: &str) { + project_panel_tests::select_path_with_mark( + &self.panel, + &format!("workspace/{file}"), + &mut self.cx, + ); + self.panel.update_in(&mut self.cx, |panel, window, cx| { + panel.cut(&Default::default(), window, cx); + }); + } + + /// Only supports files in root (otherwise would need toggle_expand_dir). + /// For undo redo the paths themselves do not matter so this is fine + async fn paste(&mut self, directory: &str) { + select_path(&self.panel, &format!("workspace/{directory}"), &mut self.cx); + self.panel.update_in(&mut self.cx, |panel, window, cx| { + panel.paste(&Default::default(), window, cx); + }); + self.cx.run_until_parked(); + } + + async fn trash(&mut self, paths: &[&str]) { + paths.iter().for_each(|p| { + project_panel_tests::select_path_with_mark( + &self.panel, + &format!("workspace/{p}"), + &mut self.cx, + ) + }); + + self.panel.update_in(&mut self.cx, |panel, window, cx| { + panel.trash(&Trash { skip_prompt: true }, window, cx); + }); + + self.cx.run_until_parked(); + } + + /// The test tree is: + /// ```txt + /// a.txt + /// b.txt + /// ``` + /// a and b are empty, x has the text "content" inside + async fn new(cx: &mut gpui::TestAppContext) -> TestContext { + Self::new_with_tree( + cx, + json!({ + "a.txt": "", + "b.txt": "", + }), + ) + .await + } + + async fn new_with_tree(cx: &mut gpui::TestAppContext, tree: Value) -> TestContext { + project_panel_tests::init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/workspace", tree).await; + let project = Project::test(fs.clone(), ["/workspace".as_ref()], cx).await; + let window = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + let mut cx = VisualTestContext::from_window(window.into(), cx); + let panel = workspace.update_in(&mut cx, ProjectPanel::new); + cx.run_until_parked(); + + TestContext { panel, fs, cx } + } +} + +#[gpui::test] +async fn rename_undo_redo(cx: &mut gpui::TestAppContext) { + let mut cx = TestContext::new(cx).await; + + cx.rename("a.txt", "renamed.txt").await; + cx.assert_fs_state_is(&["b.txt", "renamed.txt"]); + + cx.undo().await; + cx.assert_fs_state_is(&["a.txt", "b.txt"]); + + cx.redo().await; + cx.assert_fs_state_is(&["b.txt", "renamed.txt"]); +} + +#[gpui::test] +async fn create_undo_redo(cx: &mut gpui::TestAppContext) { + let mut cx = TestContext::new(cx).await; + let path = path("/workspace/c.txt"); + + cx.create_file("c.txt").await; + cx.assert_exists("c.txt"); + + // We'll now insert some content into `c.txt` in order to ensure that, after + // undoing the trash operation, i.e., when the file is restored, the actual + // file's contents are preserved instead of a new one with the same path + // being created. + cx.fs.write(Path::new(&path), b"Hello!").await.unwrap(); + + cx.undo().await; + cx.assert_not_exists("c.txt"); + + cx.redo().await; + cx.assert_exists("c.txt"); + assert_eq!(cx.fs.load(Path::new(&path)).await.unwrap(), "Hello!"); +} + +#[gpui::test] +async fn create_dir_undo(cx: &mut gpui::TestAppContext) { + let mut cx = TestContext::new(cx).await; + + cx.create_directory("new_dir").await; + cx.assert_exists("new_dir"); + cx.undo().await; + cx.assert_not_exists("new_dir"); +} + +#[gpui::test] +async fn cut_paste_undo(cx: &mut gpui::TestAppContext) { + let mut cx = TestContext::new(cx).await; + + cx.create_directory("files").await; + cx.cut("a.txt").await; + cx.paste("files").await; + cx.assert_fs_state_is(&["b.txt", "files/", "files/a.txt"]); + + cx.undo().await; + cx.assert_fs_state_is(&["a.txt", "b.txt", "files/"]); + + cx.redo().await; + cx.assert_fs_state_is(&["b.txt", "files/", "files/a.txt"]); +} + +#[gpui::test] +async fn drag_undo_redo(cx: &mut gpui::TestAppContext) { + let mut cx = TestContext::new(cx).await; + + cx.create_directory("src").await; + cx.create_file("src/a.rs").await; + cx.assert_fs_state_is(&["a.txt", "b.txt", "src/", "src/a.rs"]); + + cx.drag(&["src/a.rs"], ""); + cx.assert_fs_state_is(&["a.txt", "b.txt", "a.rs", "src/"]); + + cx.undo().await; + cx.assert_fs_state_is(&["a.txt", "b.txt", "src/", "src/a.rs"]); + + cx.redo().await; + cx.assert_fs_state_is(&["a.txt", "b.txt", "a.rs", "src/"]); +} + +#[gpui::test] +async fn drag_multiple_undo_redo(cx: &mut gpui::TestAppContext) { + let mut cx = TestContext::new(cx).await; + + cx.create_directory("src").await; + cx.create_file("src/x.rs").await; + cx.create_file("src/y.rs").await; + + cx.drag(&["src/x.rs", "src/y.rs"], ""); + cx.assert_fs_state_is(&["a.txt", "b.txt", "x.rs", "y.rs", "src/"]); + + cx.undo().await; + cx.assert_fs_state_is(&["a.txt", "b.txt", "src/", "src/x.rs", "src/y.rs"]); + + cx.redo().await; + cx.assert_fs_state_is(&["a.txt", "b.txt", "x.rs", "y.rs", "src/"]); +} + +#[gpui::test] +async fn two_sequential_undos(cx: &mut gpui::TestAppContext) { + let mut cx = TestContext::new(cx).await; + + cx.rename("a.txt", "x.txt").await; + cx.create_file("y.txt").await; + cx.assert_fs_state_is(&["b.txt", "x.txt", "y.txt"]); + + cx.undo().await; + cx.assert_fs_state_is(&["b.txt", "x.txt"]); + + cx.undo().await; + cx.assert_fs_state_is(&["a.txt", "b.txt"]); +} + +#[gpui::test] +async fn undo_without_history(cx: &mut gpui::TestAppContext) { + let mut cx = TestContext::new(cx).await; + + // Undoing without any history should just result in the filesystem state + // remaining unchanged. + cx.undo().await; + cx.assert_fs_state_is(&["a.txt", "b.txt"]) +} + +#[gpui::test] +async fn trash_undo_redo(cx: &mut gpui::TestAppContext) { + let mut cx = TestContext::new(cx).await; + + cx.trash(&["a.txt", "b.txt"]).await; + cx.assert_fs_state_is(&[]); + + cx.undo().await; + cx.assert_fs_state_is(&["a.txt", "b.txt"]); + + cx.redo().await; + cx.assert_fs_state_is(&[]); +} diff --git a/crates/project_panel/src/undo.rs b/crates/project_panel/src/undo.rs index 3a8baa23c55db8f3572174ee667196936e633281..ca4fc9ed375a9a297955f8613fa3df9fe24d568a 100644 --- a/crates/project_panel/src/undo.rs +++ b/crates/project_panel/src/undo.rs @@ -1,286 +1,558 @@ -use anyhow::anyhow; -use gpui::{AppContext, SharedString, Task, WeakEntity}; -use project::ProjectPath; -use std::collections::VecDeque; -use ui::{App, IntoElement, Label, ParentElement, Styled, v_flex}; +//! # Undo Manager +//! +//! ## Operations and Results +//! +//! Undo and Redo actions execute an operation against the filesystem, producing +//! a result that is recorded back into the history in place of the original +//! entry. Each result is the semantic inverse of its paired operation, so the +//! cycle can repeat for continued undo and redo. +//! +//! Operations Results +//! ───────────────────────────────── ────────────────────────────────────── +//! Create(ProjectPath) → Created(ProjectPath) +//! Trash(ProjectPath) → Trashed(TrashedEntry) +//! Rename(ProjectPath, ProjectPath) → Renamed(ProjectPath, ProjectPath) +//! Restore(TrashedEntry) → Restored(ProjectPath) +//! Batch(Vec) → Batch(Vec) +//! +//! +//! ## History and Cursor +//! +//! The undo manager maintains an operation history with a cursor position (↑). +//! Recording an operation appends it to the history and advances the cursor to +//! the end. The cursor separates past entries (left of ↑) from future entries +//! (right of ↑). +//! +//! ─ **Undo**: Takes the history entry just *before* ↑, executes its inverse, +//! records the result back in its place, and moves ↑ one step to the left. +//! ─ **Redo**: Takes the history entry just *at* ↑, executes its inverse, +//! records the result back in its place, and advances ↑ one step to the right. +//! +//! +//! ## Example +//! +//! User Operation Create(src/main.rs) +//! History +//! 0 Created(src/main.rs) +//! 1 +++cursor+++ +//! +//! User Operation Rename(README.md, readme.md) +//! History +//! 0 Created(src/main.rs) +//! 1 Renamed(README.md, readme.md) +//! 2 +++cursor+++ +//! +//! User Operation Create(CONTRIBUTING.md) +//! History +//! 0 Created(src/main.rs) +//! 1 Renamed(README.md, readme.md) +//! 2 Created(CONTRIBUTING.md) ──┐ +//! 3 +++cursor+++ │(before the cursor) +//! │ +//! ┌──────────────────────────────┴─────────────────────────────────────────────┐ +//! Redoing will take the result at the cursor position, convert that into the +//! operation that can revert that result, execute that operation and replace +//! the result in the history with the new result, obtained from running the +//! inverse operation, advancing the cursor position. +//! └──────────────────────────────┬─────────────────────────────────────────────┘ +//! │ +//! │ +//! User Operation Undo v +//! Execute Created(CONTRIBUTING.md) ────────> Trash(CONTRIBUTING.md) +//! Record Trashed(TrashedEntry(1)) +//! History +//! 0 Created(src/main.rs) +//! 1 Renamed(README.md, readme.md) ─┐ +//! 2 +++cursor+++ │(before the cursor) +//! 2 Trashed(TrashedEntry(1)) │ +//! │ +//! User Operation Undo v +//! Execute Renamed(README.md, readme.md) ───> Rename(readme.md, README.md) +//! Record Renamed(readme.md, README.md) +//! History +//! 0 Created(src/main.rs) +//! 1 +++cursor+++ +//! 1 Renamed(readme.md, README.md) ─┐ (at the cursor) +//! 2 Trashed(TrashedEntry(1)) │ +//! │ +//! ┌──────────────────────────────────┴─────────────────────────────────────────┐ +//! Redoing will take the result at the cursor position, convert that into the +//! operation that can revert that result, execute that operation and replace +//! the result in the history with the new result, obtained from running the +//! inverse operation, advancing the cursor position. +//! └──────────────────────────────────┬─────────────────────────────────────────┘ +//! │ +//! │ +//! User Operation Redo v +//! Execute Renamed(readme.md, README.md) ───> Rename(README.md, readme.md) +//! Record Renamed(README.md, readme.md) +//! History +//! 0 Created(src/main.rs) +//! 1 Renamed(README.md, readme.md) +//! 2 +++cursor+++ +//! 2 Trashed(TrashedEntry(1))────┐ (at the cursor) +//! │ +//! User Operation Redo v +//! Execute Trashed(TrashedEntry(1)) ────────> Restore(TrashedEntry(1)) +//! Record Restored(ProjectPath) +//! History +//! 0 Created(src/main.rs) +//! 1 Renamed(README.md, readme.md) +//! 2 Restored(ProjectPath) +//! 2 +++cursor+++ + +//! +//! create A; A +//! rename A -> B; B +//! undo (rename B -> A) (takes 10s for some reason) B (still b cause it's hanging for 10s) +//! remove B _ +//! create B B +//! put important content in B B +//! undo manger renames (does not hang) A +//! remove A _ +//! user sad + +//! +//! create A; A +//! rename A -> B; B +//! undo (rename B -> A) (takes 10s for some reason) B (still b cause it's hanging for 10s) +//! create C B +//! -- src/c.rs +//! -- + +//! +//! create docs/files/ directory docs/files/ +//! create docs/files/a.txt docs/files/ +//! undo (rename B -> A) (takes 10s for some reason) B (still b cause it's hanging for 10s) +//! create C B +//! -- src/c.rs +//! -- + +//! List of "tainted files" that the user may not operate on + +use crate::ProjectPanel; +use anyhow::{Context, Result, anyhow}; +use fs::TrashedEntry; +use futures::channel::mpsc; +use gpui::{AppContext, AsyncApp, SharedString, Task, WeakEntity}; +use project::{ProjectPath, WorktreeId}; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::{collections::VecDeque, sync::Arc}; +use ui::App; use workspace::{ Workspace, notifications::{NotificationId, simple_message_notification::MessageNotification}, }; +use worktree::CreatedEntry; -const MAX_UNDO_OPERATIONS: usize = 10_000; +enum Operation { + Trash(ProjectPath), + Rename(ProjectPath, ProjectPath), + Restore(WorktreeId, TrashedEntry), + Batch(Vec), +} -#[derive(Clone)] -pub enum ProjectPanelOperation { - Batch(Vec), - Create { - project_path: ProjectPath, - }, - Rename { - old_path: ProjectPath, - new_path: ProjectPath, - }, +impl Operation { + async fn execute(self, undo_manager: &Inner, cx: &mut AsyncApp) -> Result { + Ok(match self { + Operation::Trash(project_path) => { + let trash_entry = undo_manager.trash(&project_path, cx).await?; + Change::Trashed(project_path.worktree_id, trash_entry) + } + Operation::Rename(from, to) => { + undo_manager.rename(&from, &to, cx).await?; + Change::Renamed(from, to) + } + Operation::Restore(worktree_id, trashed_entry) => { + let project_path = undo_manager.restore(worktree_id, trashed_entry, cx).await?; + Change::Restored(project_path) + } + Operation::Batch(operations) => { + let mut res = Vec::new(); + for op in operations { + res.push(Box::pin(op.execute(undo_manager, cx)).await?); + } + Change::Batched(res) + } + }) + } } -pub struct UndoManager { +#[derive(Clone, Debug)] +pub(crate) enum Change { + Created(ProjectPath), + Trashed(WorktreeId, TrashedEntry), + Renamed(ProjectPath, ProjectPath), + Restored(ProjectPath), + Batched(Vec), +} + +impl Change { + fn to_inverse(self) -> Operation { + match self { + Change::Created(project_path) => Operation::Trash(project_path), + Change::Trashed(worktree_id, trashed_entry) => { + Operation::Restore(worktree_id, trashed_entry) + } + Change::Renamed(from, to) => Operation::Rename(to, from), + Change::Restored(project_path) => Operation::Trash(project_path), + // When inverting a batch of operations, we reverse the order of + // operations to handle dependencies between them. For example, if a + // batch contains the following order of operations: + // + // 1. Create `src/` + // 2. Create `src/main.rs` + // + // If we first tried to revert the directory creation, it would fail + // because there's still files inside the directory. + Change::Batched(changes) => { + Operation::Batch(changes.into_iter().rev().map(Change::to_inverse).collect()) + } + } + } +} + +// Imagine pressing undo 10000+ times?! +const MAX_UNDO_OPERATIONS: usize = 10_000; + +struct Inner { workspace: WeakEntity, - stack: VecDeque, - /// Maximum number of operations to keep on the undo stack. + panel: WeakEntity, + history: VecDeque, + cursor: usize, + /// Maximum number of operations to keep on the undo history. limit: usize, + can_undo: Arc, + can_redo: Arc, + rx: mpsc::Receiver, +} + +/// pls arc this +#[derive(Clone)] +pub struct UndoManager { + tx: mpsc::Sender, + can_undo: Arc, + can_redo: Arc, } impl UndoManager { - pub fn new(workspace: WeakEntity) -> Self { - Self::new_with_limit(workspace, MAX_UNDO_OPERATIONS) + pub fn new( + workspace: WeakEntity, + panel: WeakEntity, + cx: &App, + ) -> Self { + let (tx, rx) = mpsc::channel(1024); + let inner = Inner::new(workspace, panel, rx); + + let this = Self { + tx, + can_undo: Arc::clone(&inner.can_undo), + can_redo: Arc::clone(&inner.can_redo), + }; + + cx.spawn(async move |cx| inner.manage_undo_and_redo(cx.clone()).await) + .detach(); + + this } - pub fn new_with_limit(workspace: WeakEntity, limit: usize) -> Self { + pub fn undo(&mut self) -> Result<()> { + self.tx + .try_send(UndoMessage::Undo) + .context("Undo and redo task can not keep up") + } + pub fn redo(&mut self) -> Result<()> { + self.tx + .try_send(UndoMessage::Redo) + .context("Undo and redo task can not keep up") + } + pub fn record(&mut self, changes: impl IntoIterator) -> Result<()> { + self.tx + .try_send(UndoMessage::Changed(changes.into_iter().collect())) + .context("Undo and redo task can not keep up") + } + /// just for the UI, an undo may still fail if there are concurrent file + /// operations happening. + pub fn can_undo(&self) -> bool { + self.can_undo.load(Ordering::Relaxed) + } + /// just for the UI, an undo may still fail if there are concurrent file + /// operations happening. + pub fn can_redo(&self) -> bool { + self.can_redo.load(Ordering::Relaxed) + } +} + +#[derive(Debug)] +enum UndoMessage { + Changed(Vec), + Undo, + Redo, +} + +impl UndoMessage { + fn error_title(&self) -> &'static str { + match self { + UndoMessage::Changed(_) => { + "this is a bug in the manage_undo_and_redo task please report" + } + UndoMessage::Undo => "Undo failed", + UndoMessage::Redo => "Redo failed", + } + } +} + +impl Inner { + async fn manage_undo_and_redo(mut self, mut cx: AsyncApp) { + loop { + let Ok(new) = self.rx.recv().await else { + // project panel got closed + return; + }; + + let error_title = new.error_title(); + let res = match new { + UndoMessage::Changed(changes) => { + self.record(changes); + Ok(()) + } + UndoMessage::Undo => { + let res = self.undo(&mut cx).await; + let _ = self.panel.update(&mut cx, |_, cx| cx.notify()); + res + } + UndoMessage::Redo => { + let res = self.redo(&mut cx).await; + let _ = self.panel.update(&mut cx, |_, cx| cx.notify()); + res + } + }; + + if let Err(e) = res { + Self::show_error(error_title, self.workspace.clone(), e.to_string(), &mut cx); + } + + self.can_undo.store(self.can_undo(), Ordering::Relaxed); + self.can_redo.store(self.can_redo(), Ordering::Relaxed); + } + } +} + +impl Inner { + pub fn new( + workspace: WeakEntity, + panel: WeakEntity, + rx: mpsc::Receiver, + ) -> Self { + Self::new_with_limit(workspace, panel, MAX_UNDO_OPERATIONS, rx) + } + + pub fn new_with_limit( + workspace: WeakEntity, + panel: WeakEntity, + limit: usize, + rx: mpsc::Receiver, + ) -> Self { Self { workspace, + panel, + history: VecDeque::new(), + cursor: 0usize, limit, - stack: VecDeque::new(), + can_undo: Arc::new(AtomicBool::new(false)), + can_redo: Arc::new(AtomicBool::new(false)), + rx, } } pub fn can_undo(&self) -> bool { - !self.stack.is_empty() + self.cursor > 0 } - pub fn undo(&mut self, cx: &mut App) { - if let Some(operation) = self.stack.pop_back() { - let task = self.revert_operation(operation, cx); - let workspace = self.workspace.clone(); - - cx.spawn(async move |cx| { - let errors = task.await; - if !errors.is_empty() { - cx.update(|cx| { - let messages = errors - .iter() - .map(|err| SharedString::from(err.to_string())) - .collect(); - - Self::show_errors(workspace, messages, cx) - }) - } - }) - .detach(); + pub fn can_redo(&self) -> bool { + self.cursor < self.history.len() + } + + pub async fn undo(&mut self, cx: &mut AsyncApp) -> Result<()> { + if !self.can_undo() { + return Ok(()); } + + // Undo failure: + // + // History + // 0 Created(src/main.rs) + // 1 Renamed(README.md, readme.md) ─┐ + // 2 +++cursor+++ │(before the cursor) + // 2 Trashed(TrashedEntry(1)) │ + // │ + // User Operation Undo v + // Failed execute Renamed(README.md, readme.md) ───> Rename(readme.md, README.md) + // Record nothing + // History + // 0 Created(src/main.rs) + // 1 +++cursor+++ + // 1 Trashed(TrashedEntry(1)) ----- + // |(at the cursor) + // User Operation Redo v + // Execute Trashed(TrashedEntry(1)) ────────> Restore(TrashedEntry(1)) + // Record Restored(ProjectPath) + // History + // 0 Created(src/main.rs) + // 1 Restored(ProjectPath) + // 1 +++cursor+++ + + // We always want to move the cursor back regardless of whether undoing + // succeeds or fails, otherwise the cursor could end up pointing to a + // position outside of the history, as we remove the change before the + // cursor, in case undo fails. + let before_cursor = self.cursor - 1; // see docs above + self.cursor -= 1; // take a step back into the past + + // If undoing fails, the user would be in a stuck state from which + // manual intervention would likely be needed in order to undo. As such, + // we remove the change from the `history` even before attempting to + // execute its inversion. + let undo_change = self + .history + .remove(before_cursor) + .expect("we can undo") + .to_inverse() + .execute(self, cx) + .await?; + self.history.insert(before_cursor, undo_change); + Ok(()) } - pub fn record(&mut self, operation: ProjectPanelOperation) { - if self.stack.len() >= self.limit { - self.stack.pop_front(); + pub async fn redo(&mut self, cx: &mut AsyncApp) -> Result<()> { + if !self.can_redo() { + return Ok(()); } - self.stack.push_back(operation); + // If redoing fails, the user would be in a stuck state from which + // manual intervention would likely be needed in order to redo. As such, + // we remove the change from the `history` even before attempting to + // execute its inversion. + let redo_change = self + .history + .remove(self.cursor) + .expect("we can redo") + .to_inverse() + .execute(self, cx) + .await?; + self.history.insert(self.cursor, redo_change); + self.cursor += 1; + Ok(()) } - pub fn record_batch(&mut self, operations: impl IntoIterator) { - let mut operations = operations.into_iter().collect::>(); - let operation = match operations.len() { + /// Passed in changes will always be performed as a single step + pub fn record(&mut self, mut changes: Vec) { + let change = match changes.len() { 0 => return, - 1 => operations.pop().unwrap(), - _ => ProjectPanelOperation::Batch(operations), + 1 => changes.remove(0), + _ => Change::Batched(changes), }; - self.record(operation); + // When recording a new change, discard any changes that could still be + // redone. + if self.cursor < self.history.len() { + self.history.drain(self.cursor..); + } + + // Ensure that the number of recorded changes does not exceed the + // maximum amount of tracked changes. + if self.history.len() >= self.limit { + self.history.pop_front(); + } else { + self.cursor += 1; + } + + self.history.push_back(change); } - /// Attempts to revert the provided `operation`, returning a vector of errors - /// in case there was any failure while reverting the operation. - /// - /// For all operations other than [`crate::undo::ProjectPanelOperation::Batch`], a maximum - /// of one error is returned. - fn revert_operation( + async fn rename( &self, - operation: ProjectPanelOperation, - cx: &mut App, - ) -> Task> { - match operation { - ProjectPanelOperation::Create { project_path } => { - let Some(workspace) = self.workspace.upgrade() else { - return Task::ready(vec![anyhow!("Failed to obtain workspace.")]); - }; - - let result = workspace.update(cx, |workspace, cx| { - workspace.project().update(cx, |project, cx| { - let entry_id = project - .entry_for_path(&project_path, cx) - .map(|entry| entry.id) - .ok_or_else(|| anyhow!("No entry for path."))?; - - project - .delete_entry(entry_id, true, cx) - .ok_or_else(|| anyhow!("Failed to trash entry.")) - }) - }); - - let task = match result { - Ok(task) => task, - Err(err) => return Task::ready(vec![err]), - }; - - cx.spawn(async move |_| match task.await { - Ok(_) => vec![], - Err(err) => vec![err], - }) - } - ProjectPanelOperation::Rename { old_path, new_path } => { - let Some(workspace) = self.workspace.upgrade() else { - return Task::ready(vec![anyhow!("Failed to obtain workspace.")]); - }; - - let result = workspace.update(cx, |workspace, cx| { - workspace.project().update(cx, |project, cx| { - let entry_id = project - .entry_for_path(&new_path, cx) - .map(|entry| entry.id) - .ok_or_else(|| anyhow!("No entry for path."))?; - - Ok(project.rename_entry(entry_id, old_path.clone(), cx)) - }) - }); - - let task = match result { - Ok(task) => task, - Err(err) => return Task::ready(vec![err]), - }; - - cx.spawn(async move |_| match task.await { - Ok(_) => vec![], - Err(err) => vec![err], + from: &ProjectPath, + to: &ProjectPath, + cx: &mut AsyncApp, + ) -> Result { + let Some(workspace) = self.workspace.upgrade() else { + return Err(anyhow!("Failed to obtain workspace.")); + }; + + let res: Result>> = workspace.update(cx, |workspace, cx| { + workspace.project().update(cx, |project, cx| { + let entry_id = project + .entry_for_path(from, cx) + .map(|entry| entry.id) + .ok_or_else(|| anyhow!("No entry for path."))?; + + Ok(project.rename_entry(entry_id, to.clone(), cx)) + }) + }); + + res?.await + } + + async fn trash(&self, project_path: &ProjectPath, cx: &mut AsyncApp) -> Result { + let Some(workspace) = self.workspace.upgrade() else { + return Err(anyhow!("Failed to obtain workspace.")); + }; + + workspace + .update(cx, |workspace, cx| { + workspace.project().update(cx, |project, cx| { + let entry_id = project + .entry_for_path(&project_path, cx) + .map(|entry| entry.id) + .ok_or_else(|| anyhow!("No entry for path."))?; + + project + .delete_entry(entry_id, true, cx) + .ok_or_else(|| anyhow!("Worktree entry should exist")) }) - } - ProjectPanelOperation::Batch(operations) => { - // When reverting operations in a batch, we reverse the order of - // operations to handle dependencies between them. For example, - // if a batch contains the following order of operations: - // - // 1. Create `src/` - // 2. Create `src/main.rs` - // - // If we first try to revert the directory creation, it would - // fail because there's still files inside the directory. - // Operations are also reverted sequentially in order to avoid - // this same problem. - let tasks: Vec<_> = operations - .into_iter() - .rev() - .map(|operation| self.revert_operation(operation, cx)) - .collect(); - - cx.spawn(async move |_| { - let mut errors = Vec::new(); - for task in tasks { - errors.extend(task.await); - } - errors + })? + .await + .and_then(|entry| { + entry.ok_or_else(|| anyhow!("When trashing we should always get a trashentry")) + }) + } + + async fn restore( + &self, + worktree_id: WorktreeId, + trashed_entry: TrashedEntry, + cx: &mut AsyncApp, + ) -> Result { + let Some(workspace) = self.workspace.upgrade() else { + return Err(anyhow!("Failed to obtain workspace.")); + }; + + workspace + .update(cx, |workspace, cx| { + workspace.project().update(cx, |project, cx| { + project.restore_entry(worktree_id, trashed_entry, cx) }) - } - } + }) + .await } - /// Displays a notification with the list of provided errors ensuring that, - /// when more than one error is provided, which can be the case when dealing - /// with undoing a [`crate::undo::ProjectPanelOperation::Batch`], a list is - /// displayed with each of the errors, instead of a single message. - fn show_errors(workspace: WeakEntity, messages: Vec, cx: &mut App) { + /// Displays a notification with the provided `title` and `error`. + fn show_error( + title: impl Into, + workspace: WeakEntity, + error: String, + cx: &mut AsyncApp, + ) { workspace .update(cx, move |workspace, cx| { let notification_id = NotificationId::Named(SharedString::new_static("project_panel_undo")); workspace.show_notification(notification_id, cx, move |cx| { - cx.new(|cx| { - if let [err] = messages.as_slice() { - MessageNotification::new(err.to_string(), cx) - .with_title("Failed to undo Project Panel Operation") - } else { - MessageNotification::new_from_builder(cx, move |_, _| { - v_flex() - .gap_1() - .children( - messages - .iter() - .map(|message| Label::new(format!("- {message}"))), - ) - .into_any_element() - }) - .with_title("Failed to undo Project Panel Operations") - } - }) + cx.new(|cx| MessageNotification::new(error, cx).with_title(title)) }) }) .ok(); } } - -#[cfg(test)] -mod test { - use crate::{ - ProjectPanel, project_panel_tests, - undo::{ProjectPanelOperation, UndoManager}, - }; - use gpui::{Entity, TestAppContext, VisualTestContext}; - use project::{FakeFs, Project, ProjectPath}; - use std::sync::Arc; - use util::rel_path::rel_path; - use workspace::MultiWorkspace; - - struct TestContext { - project: Entity, - panel: Entity, - } - - async fn init_test(cx: &mut TestAppContext) -> TestContext { - project_panel_tests::init_test(cx); - - let fs = FakeFs::new(cx.executor()); - let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; - let window = - cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - let workspace = window - .read_with(cx, |mw, _| mw.workspace().clone()) - .unwrap(); - let cx = &mut VisualTestContext::from_window(window.into(), cx); - let panel = workspace.update_in(cx, ProjectPanel::new); - cx.run_until_parked(); - - TestContext { project, panel } - } - - #[gpui::test] - async fn test_limit(cx: &mut TestAppContext) { - let test_context = init_test(cx).await; - let worktree_id = test_context.project.update(cx, |project, cx| { - project.visible_worktrees(cx).next().unwrap().read(cx).id() - }); - - let build_create_operation = |file_name: &str| ProjectPanelOperation::Create { - project_path: ProjectPath { - path: Arc::from(rel_path(file_name)), - worktree_id, - }, - }; - - // Since we're updating the `ProjectPanel`'s undo manager with one whose - // limit is 3 operations, we only need to create 4 operations which - // we'll record, in order to confirm that the oldest operation is - // evicted. - let operation_a = build_create_operation("file_a.txt"); - let operation_b = build_create_operation("file_b.txt"); - let operation_c = build_create_operation("file_c.txt"); - let operation_d = build_create_operation("file_d.txt"); - - test_context.panel.update(cx, move |panel, _cx| { - panel.undo_manager = UndoManager::new_with_limit(panel.workspace.clone(), 3); - panel.undo_manager.record(operation_a); - panel.undo_manager.record(operation_b); - panel.undo_manager.record(operation_c); - panel.undo_manager.record(operation_d); - - assert_eq!(panel.undo_manager.stack.len(), 3); - }); - } -} diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 7046e4afdfd95ba341b7b87846c9e3e4520849f6..ab8d448d793fb63a1c5bb5fe1c3bb05886c0866d 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -8,7 +8,8 @@ use clock::ReplicaId; use collections::{HashMap, HashSet, VecDeque}; use encoding_rs::Encoding; use fs::{ - Fs, MTime, PathEvent, PathEventKind, RemoveOptions, Watcher, copy_recursive, read_dir_items, + Fs, MTime, PathEvent, PathEventKind, RemoveOptions, TrashedEntry, Watcher, copy_recursive, + read_dir_items, }; use futures::{ FutureExt as _, Stream, StreamExt, @@ -70,7 +71,7 @@ use text::{LineEnding, Rope}; use util::{ ResultExt, maybe, paths::{PathMatcher, PathStyle, SanitizedPath, home_dir}, - rel_path::RelPath, + rel_path::{RelPath, RelPathBuf}, }; pub use worktree_settings::WorktreeSettings; @@ -848,7 +849,7 @@ impl Worktree { entry_id: ProjectEntryId, trash: bool, cx: &mut Context, - ) -> Option>> { + ) -> Option>>> { let task = match self { Worktree::Local(this) => this.delete_entry(entry_id, trash, cx), Worktree::Remote(this) => this.delete_entry(entry_id, trash, cx), @@ -870,6 +871,20 @@ impl Worktree { Some(task) } + pub async fn restore_entry( + trash_entry: TrashedEntry, + worktree: Entity, + cx: &mut AsyncApp, + ) -> Result { + let is_local = worktree.read_with(cx, |this, _| this.is_local()); + if is_local { + LocalWorktree::restore_entry(trash_entry, worktree, cx).await + } else { + // TODO(dino): Add support for restoring entries in remote worktrees. + Err(anyhow!("Unsupported")) + } + } + fn get_children_ids_recursive(&self, path: &RelPath, ids: &mut Vec) { let children_iter = self.child_entries(path); for child in children_iter { @@ -1685,35 +1700,46 @@ impl LocalWorktree { entry_id: ProjectEntryId, trash: bool, cx: &Context, - ) -> Option>> { + ) -> Option>>> { let entry = self.entry_for_id(entry_id)?.clone(); let abs_path = self.absolutize(&entry.path); let fs = self.fs.clone(); let delete = cx.background_spawn(async move { - if entry.is_file() { - if trash { - fs.trash_file(&abs_path).await?; - } else { + let trashed_entry = match (entry.is_file(), trash) { + (true, true) => Some(fs.trash(&abs_path, Default::default()).await?), + (false, true) => Some( + fs.trash( + &abs_path, + RemoveOptions { + recursive: true, + ignore_if_not_exists: false, + }, + ) + .await?, + ), + (true, false) => { fs.remove_file(&abs_path, Default::default()).await?; + None } - } else if trash { - fs.trash_dir(&abs_path).await?; - } else { - fs.remove_dir( - &abs_path, - RemoveOptions { - recursive: true, - ignore_if_not_exists: false, - }, - ) - .await?; - } - anyhow::Ok(entry.path) + (false, false) => { + fs.remove_dir( + &abs_path, + RemoveOptions { + recursive: true, + ignore_if_not_exists: false, + }, + ) + .await?; + None + } + }; + + anyhow::Ok((trashed_entry, entry.path)) }); Some(cx.spawn(async move |this, cx| { - let path = delete.await?; + let (trashed_entry, path) = delete.await?; this.update(cx, |this, _| { this.as_local_mut() .unwrap() @@ -1721,10 +1747,39 @@ impl LocalWorktree { })? .recv() .await; - Ok(()) + + Ok(trashed_entry) })) } + pub async fn restore_entry( + trash_entry: TrashedEntry, + this: Entity, + cx: &mut AsyncApp, + ) -> Result { + let Some((fs, worktree_abs_path, path_style)) = this.read_with(cx, |this, _cx| { + let local_worktree = match this { + Worktree::Local(local_worktree) => local_worktree, + Worktree::Remote(_) => return None, + }; + + let fs = local_worktree.fs.clone(); + let path_style = local_worktree.path_style(); + Some((fs, Arc::clone(local_worktree.abs_path()), path_style)) + }) else { + return Err(anyhow!("Localworktree should not change into a remote one")); + }; + + let path_buf = fs.restore(trash_entry).await?; + let path = path_buf + .strip_prefix(worktree_abs_path) + .context("Could not strip prefix")?; + let path = RelPath::new(&path, path_style)?; + let path = path.into_owned(); + + Ok(path) + } + pub fn copy_external_entries( &self, target_directory: Arc, @@ -2092,7 +2147,7 @@ impl RemoteWorktree { entry_id: ProjectEntryId, trash: bool, cx: &Context, - ) -> Option>> { + ) -> Option>>> { let response = self.client.request(proto::DeleteProjectEntry { project_id: self.project_id, entry_id: entry_id.to_proto(), @@ -2112,6 +2167,12 @@ impl RemoteWorktree { let snapshot = &mut this.background_snapshot.lock().0; snapshot.delete_entry(entry_id); this.snapshot = snapshot.clone(); + + // TODO: How can we actually track the deleted entry when + // working in remote? We likely only need to keep this + // information on the remote side in order to support restoring + // the trashed file. + None }) })) } @@ -2578,15 +2639,14 @@ impl Snapshot { } pub fn entry_for_path(&self, path: &RelPath) -> Option<&Entry> { - self.traverse_from_path(true, true, true, path) - .entry() - .and_then(|entry| { - if entry.path.as_ref() == path { - Some(entry) - } else { - None - } - }) + let entry = self.traverse_from_path(true, true, true, path).entry(); + entry.and_then(|entry| { + if entry.path.as_ref() == path { + Some(entry) + } else { + None + } + }) } /// Resolves a path to an executable using the following heuristics: diff --git a/crates/worktree/tests/integration/main.rs b/crates/worktree/tests/integration/main.rs index 633a04ad7ac1b7cb0aea93ddcc60ca38fba5fe98..47ce5e6b0a98baab6c710cd4116bef52f45dc8a1 100644 --- a/crates/worktree/tests/integration/main.rs +++ b/crates/worktree/tests/integration/main.rs @@ -2207,7 +2207,14 @@ fn randomly_mutate_worktree( match rng.random_range(0_u32..100) { 0..=33 if entry.path.as_ref() != RelPath::empty() => { log::info!("deleting entry {:?} ({})", entry.path, entry.id.to_usize()); - worktree.delete_entry(entry.id, false, cx).unwrap() + let task = worktree + .delete_entry(entry.id, false, cx) + .unwrap_or_else(|| Task::ready(Ok(None))); + + cx.background_spawn(async move { + task.await?; + Ok(()) + }) } _ => { if entry.is_dir() { From e7ba1715688433c92586e7493decc11e424e804e Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Thu, 9 Apr 2026 13:54:34 -0400 Subject: [PATCH 16/67] Clean up orphaned files on git worktree creation failure (#53287) Update `await_and_rollback_on_failure` in `agent_panel.rs` to comprehensively clean up both git metadata and filesystem artifacts when worktree creation fails. Release Notes: - Clean up files and git metadata when worktree creation fails during new agent thread setup. --- crates/agent_ui/src/agent_panel.rs | 369 +++++++++++++++++++++++++++-- 1 file changed, 343 insertions(+), 26 deletions(-) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index f5bc572f853d770981d36853222cf10f7108a26b..3c735832f7ca30deca30977f12506697df25841f 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -2795,6 +2795,7 @@ impl AgentPanel { PathBuf, futures::channel::oneshot::Receiver>, )>, + fs: Arc, cx: &mut AsyncWindowContext, ) -> Result> { let mut created_paths: Vec = Vec::new(); @@ -2803,10 +2804,10 @@ impl AgentPanel { let mut first_error: Option = None; for (repo, new_path, receiver) in creation_infos { + repos_and_paths.push((repo.clone(), new_path.clone())); match receiver.await { Ok(Ok(())) => { - created_paths.push(new_path.clone()); - repos_and_paths.push((repo, new_path)); + created_paths.push(new_path); } Ok(Err(err)) => { if first_error.is_none() { @@ -2825,34 +2826,66 @@ impl AgentPanel { return Ok(created_paths); }; - // Rollback all successfully created worktrees - let mut rollback_receivers = Vec::new(); + // Rollback all attempted worktrees (both successful and failed) + let mut rollback_futures = Vec::new(); for (rollback_repo, rollback_path) in &repos_and_paths { - if let Ok(receiver) = cx.update(|_, cx| { - rollback_repo.update(cx, |repo, _cx| { - repo.remove_worktree(rollback_path.clone(), true) + let receiver = cx + .update(|_, cx| { + rollback_repo.update(cx, |repo, _cx| { + repo.remove_worktree(rollback_path.clone(), true) + }) }) - }) { - rollback_receivers.push((rollback_path.clone(), receiver)); - } + .ok(); + + rollback_futures.push((rollback_path.clone(), receiver)); } + let mut rollback_failures: Vec = Vec::new(); - for (path, receiver) in rollback_receivers { - match receiver.await { - Ok(Ok(())) => {} - Ok(Err(rollback_err)) => { - log::error!( - "failed to rollback worktree at {}: {rollback_err}", - path.display() - ); - rollback_failures.push(format!("{}: {rollback_err}", path.display())); + for (path, receiver_opt) in rollback_futures { + let mut git_remove_failed = false; + + if let Some(receiver) = receiver_opt { + match receiver.await { + Ok(Ok(())) => {} + Ok(Err(rollback_err)) => { + log::error!( + "git worktree remove failed for {}: {rollback_err}", + path.display() + ); + git_remove_failed = true; + } + Err(canceled) => { + log::error!( + "git worktree remove failed for {}: {canceled}", + path.display() + ); + git_remove_failed = true; + } } - Err(rollback_err) => { - log::error!( - "failed to rollback worktree at {}: {rollback_err}", - path.display() - ); - rollback_failures.push(format!("{}: {rollback_err}", path.display())); + } else { + log::error!( + "failed to dispatch git worktree remove for {}", + path.display() + ); + git_remove_failed = true; + } + + // `git worktree remove` normally removes this directory, but since + // `git worktree remove` failed (or wasn't dispatched), manually rm the directory. + if git_remove_failed { + if let Err(fs_err) = fs + .remove_dir( + &path, + fs::RemoveOptions { + recursive: true, + ignore_if_not_exists: true, + }, + ) + .await + { + let msg = format!("{}: failed to remove directory: {fs_err}", path.display()); + log::error!("{}", msg); + rollback_failures.push(msg); } } } @@ -3058,8 +3091,10 @@ impl AgentPanel { } }; + let fs = cx.update(|_, cx| ::global(cx))?; + let created_paths = - match Self::await_and_rollback_on_failure(creation_infos, cx).await { + match Self::await_and_rollback_on_failure(creation_infos, fs, cx).await { Ok(paths) => paths, Err(err) => { this.update_in(cx, |this, window, cx| { @@ -4769,6 +4804,7 @@ mod tests { }; use acp_thread::{StubAgentConnection, ThreadStatus}; use agent_servers::CODEX_ID; + use feature_flags::FeatureFlagAppExt; use fs::FakeFs; use gpui::{TestAppContext, VisualTestContext}; use project::Project; @@ -6671,6 +6707,287 @@ mod tests { }); } + #[gpui::test] + async fn test_rollback_all_succeed_returns_ok(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + ::set_global(fs.clone(), cx); + }); + + fs.insert_tree( + "/project", + json!({ + ".git": {}, + "src": { "main.rs": "fn main() {}" } + }), + ) + .await; + + let project = Project::test(fs.clone(), [Path::new("/project")], cx).await; + cx.executor().run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + let multi_workspace = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let path_a = PathBuf::from("/worktrees/branch/project_a"); + let path_b = PathBuf::from("/worktrees/branch/project_b"); + + let (sender_a, receiver_a) = futures::channel::oneshot::channel::>(); + let (sender_b, receiver_b) = futures::channel::oneshot::channel::>(); + sender_a.send(Ok(())).unwrap(); + sender_b.send(Ok(())).unwrap(); + + let creation_infos = vec![ + (repository.clone(), path_a.clone(), receiver_a), + (repository.clone(), path_b.clone(), receiver_b), + ]; + + let fs_clone = fs.clone(); + let result = multi_workspace + .update(cx, |_, window, cx| { + window.spawn(cx, async move |cx| { + AgentPanel::await_and_rollback_on_failure(creation_infos, fs_clone, cx).await + }) + }) + .unwrap() + .await; + + let paths = result.expect("all succeed should return Ok"); + assert_eq!(paths, vec![path_a, path_b]); + } + + #[gpui::test] + async fn test_rollback_on_failure_attempts_all_worktrees(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + ::set_global(fs.clone(), cx); + }); + + fs.insert_tree( + "/project", + json!({ + ".git": {}, + "src": { "main.rs": "fn main() {}" } + }), + ) + .await; + + let project = Project::test(fs.clone(), [Path::new("/project")], cx).await; + cx.executor().run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + // Actually create a worktree so it exists in FakeFs for rollback to find. + let success_path = PathBuf::from("/worktrees/branch/project"); + cx.update(|cx| { + repository.update(cx, |repo, _| { + repo.create_worktree( + git::repository::CreateWorktreeTarget::NewBranch { + branch_name: "branch".to_string(), + base_sha: None, + }, + success_path.clone(), + ) + }) + }) + .await + .unwrap() + .unwrap(); + cx.executor().run_until_parked(); + + // Verify the worktree directory exists before rollback. + assert!( + fs.is_dir(&success_path).await, + "worktree directory should exist before rollback" + ); + + let multi_workspace = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + // Build creation_infos: one success, one failure. + let failed_path = PathBuf::from("/worktrees/branch/failed_project"); + + let (sender_ok, receiver_ok) = futures::channel::oneshot::channel::>(); + let (sender_err, receiver_err) = futures::channel::oneshot::channel::>(); + sender_ok.send(Ok(())).unwrap(); + sender_err + .send(Err(anyhow!("branch already exists"))) + .unwrap(); + + let creation_infos = vec![ + (repository.clone(), success_path.clone(), receiver_ok), + (repository.clone(), failed_path.clone(), receiver_err), + ]; + + let fs_clone = fs.clone(); + let result = multi_workspace + .update(cx, |_, window, cx| { + window.spawn(cx, async move |cx| { + AgentPanel::await_and_rollback_on_failure(creation_infos, fs_clone, cx).await + }) + }) + .unwrap() + .await; + + assert!( + result.is_err(), + "should return error when any creation fails" + ); + let err_msg = result.unwrap_err().to_string(); + assert!( + err_msg.contains("branch already exists"), + "error should mention the original failure: {err_msg}" + ); + + // The successful worktree should have been rolled back by git. + cx.executor().run_until_parked(); + assert!( + !fs.is_dir(&success_path).await, + "successful worktree directory should be removed by rollback" + ); + } + + #[gpui::test] + async fn test_rollback_on_canceled_receiver(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + ::set_global(fs.clone(), cx); + }); + + fs.insert_tree( + "/project", + json!({ + ".git": {}, + "src": { "main.rs": "fn main() {}" } + }), + ) + .await; + + let project = Project::test(fs.clone(), [Path::new("/project")], cx).await; + cx.executor().run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + let multi_workspace = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let path = PathBuf::from("/worktrees/branch/project"); + + // Drop the sender to simulate a canceled receiver. + let (_sender, receiver) = futures::channel::oneshot::channel::>(); + drop(_sender); + + let creation_infos = vec![(repository.clone(), path.clone(), receiver)]; + + let fs_clone = fs.clone(); + let result = multi_workspace + .update(cx, |_, window, cx| { + window.spawn(cx, async move |cx| { + AgentPanel::await_and_rollback_on_failure(creation_infos, fs_clone, cx).await + }) + }) + .unwrap() + .await; + + assert!( + result.is_err(), + "should return error when receiver is canceled" + ); + let err_msg = result.unwrap_err().to_string(); + assert!( + err_msg.contains("canceled"), + "error should mention cancellation: {err_msg}" + ); + } + + #[gpui::test] + async fn test_rollback_cleans_up_orphan_directories(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + ::set_global(fs.clone(), cx); + }); + + fs.insert_tree( + "/project", + json!({ + ".git": {}, + "src": { "main.rs": "fn main() {}" } + }), + ) + .await; + + let project = Project::test(fs.clone(), [Path::new("/project")], cx).await; + cx.executor().run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + let multi_workspace = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + // Simulate the orphan state: create_dir_all was called but git + // worktree add failed, leaving a directory with leftover files. + let orphan_path = PathBuf::from("/worktrees/branch/orphan_project"); + fs.insert_tree( + "/worktrees/branch/orphan_project", + json!({ "leftover.txt": "junk" }), + ) + .await; + + assert!( + fs.is_dir(&orphan_path).await, + "orphan dir should exist before rollback" + ); + + let (sender, receiver) = futures::channel::oneshot::channel::>(); + sender.send(Err(anyhow!("hook failed"))).unwrap(); + + let creation_infos = vec![(repository.clone(), orphan_path.clone(), receiver)]; + + let fs_clone = fs.clone(); + let result = multi_workspace + .update(cx, |_, window, cx| { + window.spawn(cx, async move |cx| { + AgentPanel::await_and_rollback_on_failure(creation_infos, fs_clone, cx).await + }) + }) + .unwrap() + .await; + + cx.executor().run_until_parked(); + + assert!(result.is_err()); + assert!( + !fs.is_dir(&orphan_path).await, + "orphan worktree directory should be removed by filesystem cleanup" + ); + } + #[gpui::test] async fn test_worktree_creation_for_remote_project( cx: &mut TestAppContext, From b3bcfc6060be636b1d334e4db20b242324af6a68 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Thu, 9 Apr 2026 14:04:57 -0400 Subject: [PATCH 17/67] Remove `Fix with Assistant` (#53521) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - The "Fix with Assistant" code action for diagnostics has been removed. The inline assistant remains available and can be deployed with the `assistant: inline assist` action. --- crates/agent_ui/src/inline_assistant.rs | 178 +----------------------- 1 file changed, 5 insertions(+), 173 deletions(-) diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index 39d70790e0d4a18554b2a1c11510e529d921cd1b..f2beb719cc7e5638cfc36f339419bda405a8e773 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -1,10 +1,8 @@ use language_models::provider::anthropic::telemetry::{ AnthropicCompletionType, AnthropicEventData, AnthropicEventType, report_anthropic_event, }; -use std::cmp; use std::mem; use std::ops::Range; -use std::rc::Rc; use std::sync::Arc; use uuid::Uuid; @@ -27,8 +25,8 @@ use editor::RowExt; use editor::SelectionEffects; use editor::scroll::ScrollOffset; use editor::{ - Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, HighlightKey, MultiBuffer, - MultiBufferSnapshot, ToOffset as _, ToPoint, + Anchor, AnchorRangeExt, Editor, EditorEvent, HighlightKey, MultiBuffer, MultiBufferSnapshot, + ToOffset as _, ToPoint, actions::SelectAll, display_map::{ BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, EditorMargins, @@ -45,15 +43,14 @@ use language::{Buffer, Point, Selection, TransactionId}; use language_model::{ConfigurationError, ConfiguredModel, LanguageModelRegistry}; use multi_buffer::MultiBufferRow; use parking_lot::Mutex; -use project::{CodeAction, DisableAiSettings, LspAction, Project, ProjectTransaction}; +use project::{DisableAiSettings, Project}; use prompt_store::{PromptBuilder, PromptStore}; use settings::{Settings, SettingsStore}; use terminal_view::{TerminalView, terminal_panel::TerminalPanel}; -use text::{OffsetRangeExt, ToPoint as _}; use ui::prelude::*; use util::{RangeExt, ResultExt, maybe}; -use workspace::{ItemHandle, Toast, Workspace, dock::Panel, notifications::NotificationId}; +use workspace::{Toast, Workspace, dock::Panel, notifications::NotificationId}; use zed_actions::agent::OpenSettings; pub fn init(fs: Arc, prompt_builder: Arc, cx: &mut App) { @@ -184,7 +181,7 @@ impl InlineAssistant { fn handle_workspace_event( &mut self, - workspace: Entity, + _workspace: Entity, event: &workspace::Event, window: &mut Window, cx: &mut App, @@ -203,51 +200,10 @@ impl InlineAssistant { } } } - workspace::Event::ItemAdded { item } => { - self.register_workspace_item(&workspace, item.as_ref(), window, cx); - } _ => (), } } - fn register_workspace_item( - &mut self, - workspace: &Entity, - item: &dyn ItemHandle, - window: &mut Window, - cx: &mut App, - ) { - let is_ai_enabled = !DisableAiSettings::get_global(cx).disable_ai; - - if let Some(editor) = item.act_as::(cx) { - editor.update(cx, |editor, cx| { - if is_ai_enabled { - editor.add_code_action_provider( - Rc::new(AssistantCodeActionProvider { - editor: cx.entity().downgrade(), - workspace: workspace.downgrade(), - }), - window, - cx, - ); - - if DisableAiSettings::get_global(cx).disable_ai { - // Cancel any active edit predictions - if editor.has_active_edit_prediction() { - editor.cancel(&Default::default(), window, cx); - } - } - } else { - editor.remove_code_action_provider( - ASSISTANT_CODE_ACTION_PROVIDER_ID.into(), - window, - cx, - ); - } - }); - } - } - pub fn inline_assist( workspace: &mut Workspace, action: &zed_actions::assistant::InlineAssist, @@ -1875,130 +1831,6 @@ struct InlineAssistDecorations { end_block_id: CustomBlockId, } -struct AssistantCodeActionProvider { - editor: WeakEntity, - workspace: WeakEntity, -} - -const ASSISTANT_CODE_ACTION_PROVIDER_ID: &str = "assistant"; - -impl CodeActionProvider for AssistantCodeActionProvider { - fn id(&self) -> Arc { - ASSISTANT_CODE_ACTION_PROVIDER_ID.into() - } - - fn code_actions( - &self, - buffer: &Entity, - range: Range, - _: &mut Window, - cx: &mut App, - ) -> Task>> { - if !AgentSettings::get_global(cx).enabled(cx) { - return Task::ready(Ok(Vec::new())); - } - - let snapshot = buffer.read(cx).snapshot(); - let mut range = range.to_point(&snapshot); - - // Expand the range to line boundaries. - range.start.column = 0; - range.end.column = snapshot.line_len(range.end.row); - - let mut has_diagnostics = false; - for diagnostic in snapshot.diagnostics_in_range::<_, Point>(range.clone(), false) { - range.start = cmp::min(range.start, diagnostic.range.start); - range.end = cmp::max(range.end, diagnostic.range.end); - has_diagnostics = true; - } - if has_diagnostics { - let symbols_containing_start = snapshot.symbols_containing(range.start, None); - if let Some(symbol) = symbols_containing_start.last() { - range.start = cmp::min(range.start, symbol.range.start.to_point(&snapshot)); - range.end = cmp::max(range.end, symbol.range.end.to_point(&snapshot)); - } - let symbols_containing_end = snapshot.symbols_containing(range.end, None); - if let Some(symbol) = symbols_containing_end.last() { - range.start = cmp::min(range.start, symbol.range.start.to_point(&snapshot)); - range.end = cmp::max(range.end, symbol.range.end.to_point(&snapshot)); - } - - Task::ready(Ok(vec![CodeAction { - server_id: language::LanguageServerId(0), - range: snapshot.anchor_before(range.start)..snapshot.anchor_after(range.end), - lsp_action: LspAction::Action(Box::new(lsp::CodeAction { - title: "Fix with Assistant".into(), - ..Default::default() - })), - resolved: true, - }])) - } else { - Task::ready(Ok(Vec::new())) - } - } - - fn apply_code_action( - &self, - _buffer: Entity, - action: CodeAction, - _push_to_history: bool, - window: &mut Window, - cx: &mut App, - ) -> Task> { - let editor = self.editor.clone(); - let workspace = self.workspace.clone(); - let prompt_store = PromptStore::global(cx); - window.spawn(cx, async move |cx| { - let workspace = workspace.upgrade().context("workspace was released")?; - let (thread_store, history) = cx.update(|_window, cx| { - let panel = workspace - .read(cx) - .panel::(cx) - .context("missing agent panel")? - .read(cx); - - let history = panel - .connection_store() - .read(cx) - .entry(&crate::Agent::NativeAgent) - .and_then(|e| e.read(cx).history()) - .map(|h| h.downgrade()); - - anyhow::Ok((panel.thread_store().clone(), history)) - })??; - let editor = editor.upgrade().context("editor was released")?; - let range = editor - .update(cx, |editor, cx| { - editor.buffer().update(cx, |multibuffer, cx| { - let multibuffer_snapshot = multibuffer.read(cx); - multibuffer_snapshot.buffer_anchor_range_to_anchor_range(action.range) - }) - }) - .context("invalid range")?; - - let prompt_store = prompt_store.await.ok(); - cx.update_global(|assistant: &mut InlineAssistant, window, cx| { - let assist_id = assistant.suggest_assist( - &editor, - range, - "Fix Diagnostics".into(), - None, - true, - workspace, - thread_store, - prompt_store, - history, - window, - cx, - ); - assistant.start_assist(assist_id, window, cx); - })?; - - Ok(ProjectTransaction::default()) - }) - } -} - fn merge_ranges(ranges: &mut Vec>, buffer: &MultiBufferSnapshot) { ranges.sort_unstable_by(|a, b| { a.start From 58e59b1822f1779563f7ea26bca1832aebfe6d08 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Thu, 9 Apr 2026 14:11:13 -0400 Subject: [PATCH 18/67] Allow canceling unarchive (#53463) Now you can cancel the Unarchive operation if it's taking too long. (No release notes because this unarchive behavior isn't even on Preview yet.) Release Notes: - N/A --- crates/agent_ui/src/threads_archive_view.rs | 52 ++++- crates/sidebar/src/sidebar.rs | 228 +++++++++++--------- 2 files changed, 177 insertions(+), 103 deletions(-) diff --git a/crates/agent_ui/src/threads_archive_view.rs b/crates/agent_ui/src/threads_archive_view.rs index 4e9d8b2e0883e6648d729f2cf39832dd6bca41a8..44d3e71c170111f6d647c74f237d57705d55f183 100644 --- a/crates/agent_ui/src/threads_archive_view.rs +++ b/crates/agent_ui/src/threads_archive_view.rs @@ -26,7 +26,7 @@ use picker::{ use project::{AgentId, AgentServerStore}; use settings::Settings as _; use theme::ActiveTheme; -use ui::ThreadItem; +use ui::{AgentThreadStatus, ThreadItem}; use ui::{ Divider, KeyBinding, ListItem, ListItemSpacing, ListSubHeader, Tooltip, WithScrollbar, prelude::*, utils::platform_title_bar_height, @@ -113,6 +113,7 @@ fn fuzzy_match_positions(query: &str, text: &str) -> Option> { pub enum ThreadsArchiveViewEvent { Close, Unarchive { thread: ThreadMetadata }, + CancelRestore { session_id: acp::SessionId }, } impl EventEmitter for ThreadsArchiveView {} @@ -131,6 +132,7 @@ pub struct ThreadsArchiveView { workspace: WeakEntity, agent_connection_store: WeakEntity, agent_server_store: WeakEntity, + restoring: HashSet, } impl ThreadsArchiveView { @@ -199,6 +201,7 @@ impl ThreadsArchiveView { workspace, agent_connection_store, agent_server_store, + restoring: HashSet::default(), }; this.update_items(cx); @@ -213,6 +216,16 @@ impl ThreadsArchiveView { self.selection = None; } + pub fn mark_restoring(&mut self, session_id: &acp::SessionId, cx: &mut Context) { + self.restoring.insert(session_id.clone()); + cx.notify(); + } + + pub fn clear_restoring(&mut self, session_id: &acp::SessionId, cx: &mut Context) { + self.restoring.remove(session_id); + cx.notify(); + } + pub fn focus_filter_editor(&self, window: &mut Window, cx: &mut App) { let handle = self.filter_editor.read(cx).focus_handle(cx); handle.focus(window, cx); @@ -323,11 +336,16 @@ impl ThreadsArchiveView { window: &mut Window, cx: &mut Context, ) { + if self.restoring.contains(&thread.session_id) { + return; + } + if thread.folder_paths.is_empty() { self.show_project_picker_for_thread(thread, window, cx); return; } + self.mark_restoring(&thread.session_id, cx); self.selection = None; self.reset_filter_editor_text(window, cx); cx.emit(ThreadsArchiveViewEvent::Unarchive { thread }); @@ -510,7 +528,9 @@ impl ThreadsArchiveView { IconName::Sparkle }; - ThreadItem::new(id, thread.title.clone()) + let is_restoring = self.restoring.contains(&thread.session_id); + + let base = ThreadItem::new(id, thread.title.clone()) .icon(icon) .when_some(icon_from_external_svg, |this, svg| { this.custom_icon_from_external_svg(svg) @@ -527,8 +547,31 @@ impl ThreadsArchiveView { this.hovered_index = None; } cx.notify(); - })) - .action_slot( + })); + + if is_restoring { + base.status(AgentThreadStatus::Running) + .action_slot( + IconButton::new("cancel-restore", IconName::Close) + .style(ButtonStyle::Filled) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .tooltip(Tooltip::text("Cancel Restore")) + .on_click({ + let session_id = thread.session_id.clone(); + cx.listener(move |this, _, _, cx| { + this.clear_restoring(&session_id, cx); + cx.emit(ThreadsArchiveViewEvent::CancelRestore { + session_id: session_id.clone(), + }); + cx.stop_propagation(); + }) + }), + ) + .tooltip(Tooltip::text("Restoring\u{2026}")) + .into_any_element() + } else { + base.action_slot( IconButton::new("delete-thread", IconName::Trash) .style(ButtonStyle::Filled) .icon_size(IconSize::Small) @@ -561,6 +604,7 @@ impl ThreadsArchiveView { }) }) .into_any_element() + } } } } diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 4d3030c1e37206831ea71beb7466e66d528ee3cd..5d660b2087198ffa96cc363764088826150d6c6b 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -441,6 +441,7 @@ pub struct Sidebar { _thread_switcher_subscriptions: Vec, pending_remote_thread_activation: Option, view: SidebarView, + restoring_tasks: HashMap>, recent_projects_popover_handle: PopoverMenuHandle, project_header_menu_ix: Option, _subscriptions: Vec, @@ -528,6 +529,7 @@ impl Sidebar { _thread_switcher_subscriptions: Vec::new(), pending_remote_thread_activation: None, view: SidebarView::default(), + restoring_tasks: HashMap::new(), recent_projects_popover_handle: PopoverMenuHandle::default(), project_header_menu_ix: None, _subscriptions: Vec::new(), @@ -2335,10 +2337,15 @@ impl Sidebar { cx: &mut Context, ) { let session_id = metadata.session_id.clone(); - - ThreadMetadataStore::global(cx).update(cx, |store, cx| store.unarchive(&session_id, cx)); + let weak_archive_view = match &self.view { + SidebarView::Archive(view) => Some(view.downgrade()), + _ => None, + }; if metadata.folder_paths.paths().is_empty() { + ThreadMetadataStore::global(cx) + .update(cx, |store, cx| store.unarchive(&session_id, cx)); + let active_workspace = self .multi_workspace .upgrade() @@ -2353,12 +2360,11 @@ impl Sidebar { { self.activate_thread_in_other_window(metadata, workspace, target_window, cx); } else { - // Archived thread metadata doesn't carry the remote host, - // so we construct a local-only key as a best-effort fallback. let key = ProjectGroupKey::new(None, path_list.clone()); self.open_workspace_and_activate_thread(metadata, path_list, &key, window, cx); } } + self.show_thread_list(window, cx); return; } @@ -2368,109 +2374,131 @@ impl Sidebar { .get_archived_worktrees_for_thread(session_id.0.to_string(), cx); let path_list = metadata.folder_paths.clone(); - cx.spawn_in(window, async move |this, cx| { - let archived_worktrees = task.await?; - - // No archived worktrees means the thread wasn't associated with a - // linked worktree that got deleted, so we just need to find (or - // open) a workspace that matches the thread's folder paths. - if archived_worktrees.is_empty() { - this.update_in(cx, |this, window, cx| { - if let Some(workspace) = - this.find_current_workspace_for_path_list(&path_list, cx) - { - this.activate_thread_locally(&metadata, &workspace, false, window, cx); - } else if let Some((target_window, workspace)) = - this.find_open_workspace_for_path_list(&path_list, cx) - { - this.activate_thread_in_other_window( - metadata, - workspace, - target_window, - cx, - ); - } else { - let key = ProjectGroupKey::new(None, path_list.clone()); - this.open_workspace_and_activate_thread( - metadata, path_list, &key, window, cx, - ); - } - })?; - return anyhow::Ok(()); - } + let task_session_id = session_id.clone(); + let restore_task = cx.spawn_in(window, async move |this, cx| { + let result: anyhow::Result<()> = async { + let archived_worktrees = task.await?; - // Restore each archived worktree back to disk via git. If the - // worktree already exists (e.g. a previous unarchive of a different - // thread on the same worktree already restored it), it's reused - // as-is. We track (old_path, restored_path) pairs so we can update - // the thread's folder_paths afterward. - let mut path_replacements: Vec<(PathBuf, PathBuf)> = Vec::new(); - for row in &archived_worktrees { - match thread_worktree_archive::restore_worktree_via_git(row, &mut *cx).await { - Ok(restored_path) => { - // The worktree is on disk now; clean up the DB record - // and git ref we created during archival. - thread_worktree_archive::cleanup_archived_worktree_record(row, &mut *cx) + if archived_worktrees.is_empty() { + this.update_in(cx, |this, window, cx| { + this.restoring_tasks.remove(&session_id); + ThreadMetadataStore::global(cx) + .update(cx, |store, cx| store.unarchive(&session_id, cx)); + + if let Some(workspace) = + this.find_current_workspace_for_path_list(&path_list, cx) + { + this.activate_thread_locally(&metadata, &workspace, false, window, cx); + } else if let Some((target_window, workspace)) = + this.find_open_workspace_for_path_list(&path_list, cx) + { + this.activate_thread_in_other_window( + metadata, + workspace, + target_window, + cx, + ); + } else { + let key = ProjectGroupKey::new(None, path_list.clone()); + this.open_workspace_and_activate_thread( + metadata, path_list, &key, window, cx, + ); + } + this.show_thread_list(window, cx); + })?; + return anyhow::Ok(()); + } + + let mut path_replacements: Vec<(PathBuf, PathBuf)> = Vec::new(); + for row in &archived_worktrees { + match thread_worktree_archive::restore_worktree_via_git(row, &mut *cx).await { + Ok(restored_path) => { + thread_worktree_archive::cleanup_archived_worktree_record( + row, &mut *cx, + ) .await; - path_replacements.push((row.worktree_path.clone(), restored_path)); - } - Err(error) => { - log::error!("Failed to restore worktree: {error:#}"); - this.update_in(cx, |this, _window, cx| { - if let Some(multi_workspace) = this.multi_workspace.upgrade() { - let workspace = multi_workspace.read(cx).workspace().clone(); - workspace.update(cx, |workspace, cx| { - struct RestoreWorktreeErrorToast; - workspace.show_toast( - Toast::new( - NotificationId::unique::(), - format!("Failed to restore worktree: {error:#}"), - ) - .autohide(), - cx, - ); - }); - } - }) - .ok(); - return anyhow::Ok(()); + path_replacements.push((row.worktree_path.clone(), restored_path)); + } + Err(error) => { + log::error!("Failed to restore worktree: {error:#}"); + this.update_in(cx, |this, _window, cx| { + this.restoring_tasks.remove(&session_id); + if let Some(weak_archive_view) = &weak_archive_view { + weak_archive_view + .update(cx, |view, cx| { + view.clear_restoring(&session_id, cx); + }) + .ok(); + } + + if let Some(multi_workspace) = this.multi_workspace.upgrade() { + let workspace = multi_workspace.read(cx).workspace().clone(); + workspace.update(cx, |workspace, cx| { + struct RestoreWorktreeErrorToast; + workspace.show_toast( + Toast::new( + NotificationId::unique::( + ), + format!("Failed to restore worktree: {error:#}"), + ) + .autohide(), + cx, + ); + }); + } + }) + .ok(); + return anyhow::Ok(()); + } } } - } - if !path_replacements.is_empty() { - // Update the thread's stored folder_paths: swap each old - // worktree path for the restored path (which may differ if - // the worktree was restored to a new location). - cx.update(|_window, cx| { - store.update(cx, |store, cx| { - store.update_restored_worktree_paths(&session_id, &path_replacements, cx); - }); - })?; + if !path_replacements.is_empty() { + cx.update(|_window, cx| { + store.update(cx, |store, cx| { + store.update_restored_worktree_paths( + &session_id, + &path_replacements, + cx, + ); + }); + })?; - // Re-read the metadata (now with updated paths) and open - // the workspace so the user lands in the restored worktree. - let updated_metadata = - cx.update(|_window, cx| store.read(cx).entry(&session_id).cloned())?; + let updated_metadata = + cx.update(|_window, cx| store.read(cx).entry(&session_id).cloned())?; - if let Some(updated_metadata) = updated_metadata { - let new_paths = updated_metadata.folder_paths.clone(); - this.update_in(cx, |this, window, cx| { - let key = ProjectGroupKey::new(None, new_paths.clone()); - this.open_workspace_and_activate_thread( - updated_metadata, - new_paths, - &key, - window, - cx, - ); - })?; + if let Some(updated_metadata) = updated_metadata { + let new_paths = updated_metadata.folder_paths.clone(); + + cx.update(|_window, cx| { + store.update(cx, |store, cx| { + store.unarchive(&updated_metadata.session_id, cx); + }); + })?; + + this.update_in(cx, |this, window, cx| { + this.restoring_tasks.remove(&session_id); + let key = ProjectGroupKey::new(None, new_paths.clone()); + this.open_workspace_and_activate_thread( + updated_metadata, + new_paths, + &key, + window, + cx, + ); + this.show_thread_list(window, cx); + })?; + } } - } - anyhow::Ok(()) - }) - .detach_and_log_err(cx); + anyhow::Ok(()) + } + .await; + if let Err(error) = result { + log::error!("{error:#}"); + } + }); + self.restoring_tasks.insert(task_session_id, restore_task); } fn expand_selected_entry( @@ -4367,9 +4395,11 @@ impl Sidebar { this.show_thread_list(window, cx); } ThreadsArchiveViewEvent::Unarchive { thread } => { - this.show_thread_list(window, cx); this.activate_archived_thread(thread.clone(), window, cx); } + ThreadsArchiveViewEvent::CancelRestore { session_id } => { + this.restoring_tasks.remove(session_id); + } }, ); From 5d32b56e07fa7fb69144ef14590cdd1b99ef6bec Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Thu, 9 Apr 2026 14:11:55 -0400 Subject: [PATCH 19/67] Disambiguate project names (#52848) Disambiguate project names in the sidebar (and project picker) so that we don't show e.g. `zed, zed` but rather `foo/zed, bar/zed` if the last path component is the same but they are different absolute paths. Release Notes: - N/A --- crates/project/src/project.rs | 34 +- crates/recent_projects/src/recent_projects.rs | 57 +++- crates/sidebar/src/sidebar.rs | 23 +- crates/util/src/disambiguate.rs | 202 ++++++++++++ crates/util/src/util.rs | 1 + crates/workspace/src/pane.rs | 33 +- crates/zed/src/visual_test_runner.rs | 294 ++++++++++++++++++ 7 files changed, 587 insertions(+), 57 deletions(-) create mode 100644 crates/util/src/disambiguate.rs diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index abfea741aeac56bfb921560a505e11281a254fe2..39e0cc9a0a00f4cd5861e60b1b100a8afef93eb8 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -6131,30 +6131,48 @@ impl ProjectGroupKey { Self { paths, host } } - pub fn display_name(&self) -> SharedString { + pub fn path_list(&self) -> &PathList { + &self.paths + } + + pub fn display_name( + &self, + path_detail_map: &std::collections::HashMap, + ) -> SharedString { let mut names = Vec::with_capacity(self.paths.paths().len()); for abs_path in self.paths.paths() { - if let Some(name) = abs_path.file_name() { - names.push(name.to_string_lossy().to_string()); + let detail = path_detail_map.get(abs_path).copied().unwrap_or(0); + let suffix = path_suffix(abs_path, detail); + if !suffix.is_empty() { + names.push(suffix); } } if names.is_empty() { - // TODO: Can we do something better in this case? "Empty Workspace".into() } else { names.join(", ").into() } } - pub fn path_list(&self) -> &PathList { - &self.paths - } - pub fn host(&self) -> Option { self.host.clone() } } +pub fn path_suffix(path: &Path, detail: usize) -> String { + let mut components: Vec<_> = path + .components() + .rev() + .filter_map(|component| match component { + std::path::Component::Normal(s) => Some(s.to_string_lossy()), + _ => None, + }) + .take(detail + 1) + .collect(); + components.reverse(); + components.join("/") +} + pub struct PathMatchCandidateSet { pub snapshot: Snapshot, pub include_ignored: bool, diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 9bf496817103246abece6891ead8fd32196cef3b..9a6015ba843b06dfe678fee1b5de2fac38295849 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -99,27 +99,40 @@ pub async fn get_recent_projects( .await .unwrap_or_default(); - let entries: Vec = workspaces + let filtered: Vec<_> = workspaces .into_iter() .filter(|(id, _, _, _)| Some(*id) != current_workspace_id) .filter(|(_, location, _, _)| matches!(location, SerializedWorkspaceLocation::Local)) + .collect(); + + let mut all_paths: Vec = filtered + .iter() + .flat_map(|(_, _, path_list, _)| path_list.paths().iter().cloned()) + .collect(); + all_paths.sort(); + all_paths.dedup(); + let path_details = + util::disambiguate::compute_disambiguation_details(&all_paths, |path, detail| { + project::path_suffix(path, detail) + }); + let path_detail_map: std::collections::HashMap = + all_paths.into_iter().zip(path_details).collect(); + + let entries: Vec = filtered + .into_iter() .map(|(workspace_id, _, path_list, timestamp)| { let paths: Vec = path_list.paths().to_vec(); let ordered_paths: Vec<&PathBuf> = path_list.ordered_paths().collect(); - let name = if ordered_paths.len() == 1 { - ordered_paths[0] - .file_name() - .map(|n| n.to_string_lossy().to_string()) - .unwrap_or_else(|| ordered_paths[0].to_string_lossy().to_string()) - } else { - ordered_paths - .iter() - .filter_map(|p| p.file_name()) - .map(|n| n.to_string_lossy().to_string()) - .collect::>() - .join(", ") - }; + let name = ordered_paths + .iter() + .map(|p| { + let detail = path_detail_map.get(*p).copied().unwrap_or(0); + project::path_suffix(p, detail) + }) + .filter(|s| !s.is_empty()) + .collect::>() + .join(", "); let full_path = ordered_paths .iter() @@ -172,6 +185,19 @@ fn get_open_folders(workspace: &Workspace, cx: &App) -> Vec { .map(|wt| wt.read(cx).id()) }); + let mut all_paths: Vec = visible_worktrees + .iter() + .map(|wt| wt.read(cx).abs_path().to_path_buf()) + .collect(); + all_paths.sort(); + all_paths.dedup(); + let path_details = + util::disambiguate::compute_disambiguation_details(&all_paths, |path, detail| { + project::path_suffix(path, detail) + }); + let path_detail_map: std::collections::HashMap = + all_paths.into_iter().zip(path_details).collect(); + let git_store = project.git_store().read(cx); let repositories: Vec<_> = git_store.repositories().values().cloned().collect(); @@ -180,8 +206,9 @@ fn get_open_folders(workspace: &Workspace, cx: &App) -> Vec { .map(|worktree| { let worktree_ref = worktree.read(cx); let worktree_id = worktree_ref.id(); - let name = SharedString::from(worktree_ref.root_name().as_unix_str().to_string()); let path = worktree_ref.abs_path().to_path_buf(); + let detail = path_detail_map.get(&path).copied().unwrap_or(0); + let name = SharedString::from(project::path_suffix(&path, detail)); let branch = get_branch_for_worktree(worktree_ref, &repositories, cx); let is_active = active_worktree_id == Some(worktree_id); OpenFolderEntry { diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 5d660b2087198ffa96cc363764088826150d6c6b..57fe5a04ac6e656f790d72b1a99dff3e14fa8ead 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -883,12 +883,27 @@ impl Sidebar { (icon, icon_from_external_svg) }; - for (group_key, group_workspaces) in mw.project_groups(cx) { + let groups: Vec<_> = mw.project_groups(cx).collect(); + + let mut all_paths: Vec = groups + .iter() + .flat_map(|(key, _)| key.path_list().paths().iter().cloned()) + .collect(); + all_paths.sort(); + all_paths.dedup(); + let path_details = + util::disambiguate::compute_disambiguation_details(&all_paths, |path, detail| { + project::path_suffix(path, detail) + }); + let path_detail_map: HashMap = + all_paths.into_iter().zip(path_details).collect(); + + for (group_key, group_workspaces) in &groups { if group_key.path_list().paths().is_empty() { continue; } - let label = group_key.display_name(); + let label = group_key.display_name(&path_detail_map); let is_collapsed = self.collapsed_groups.contains(&group_key); let should_load_threads = !is_collapsed || !query.is_empty(); @@ -989,7 +1004,7 @@ impl Sidebar { // Load any legacy threads for any single linked wortree of this project group. let mut linked_worktree_paths = HashSet::new(); - for workspace in &group_workspaces { + for workspace in group_workspaces { if workspace.read(cx).visible_worktrees(cx).count() != 1 { continue; } @@ -1192,7 +1207,7 @@ impl Sidebar { None }; let thread_store = ThreadMetadataStore::global(cx); - for ws in &group_workspaces { + for ws in group_workspaces { if Some(ws.entity_id()) == draft_ws_id { continue; } diff --git a/crates/util/src/disambiguate.rs b/crates/util/src/disambiguate.rs new file mode 100644 index 0000000000000000000000000000000000000000..490182598b52ab3419633d0c56700e85f91d81a9 --- /dev/null +++ b/crates/util/src/disambiguate.rs @@ -0,0 +1,202 @@ +use std::collections::HashMap; +use std::hash::Hash; + +/// Computes the minimum detail level needed for each item so that no two items +/// share the same description. Items whose descriptions are unique at level 0 +/// stay at 0; items that collide get their detail level incremented until either +/// the collision is resolved or increasing the level no longer changes the +/// description (preventing infinite loops for truly identical items). +/// +/// The `get_description` closure must return a sequence that eventually reaches +/// a "fixed point" where increasing `detail` no longer changes the output. If +/// an item reaches its fixed point, it is assumed it will no longer change and +/// will no longer be checked for collisions. +pub fn compute_disambiguation_details( + items: &[T], + get_description: impl Fn(&T, usize) -> D, +) -> Vec +where + D: Eq + Hash + Clone, +{ + let mut details = vec![0usize; items.len()]; + let mut descriptions: HashMap> = HashMap::default(); + let mut current_descriptions: Vec = + items.iter().map(|item| get_description(item, 0)).collect(); + + loop { + let mut any_collisions = false; + + for (index, (item, &detail)) in items.iter().zip(&details).enumerate() { + if detail > 0 { + let new_description = get_description(item, detail); + if new_description == current_descriptions[index] { + continue; + } + current_descriptions[index] = new_description; + } + descriptions + .entry(current_descriptions[index].clone()) + .or_insert_with(Vec::new) + .push(index); + } + + for (_, indices) in descriptions.drain() { + if indices.len() > 1 { + any_collisions = true; + for index in indices { + details[index] += 1; + } + } + } + + if !any_collisions { + break; + } + } + + details +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_no_conflicts() { + let items = vec!["alpha", "beta", "gamma"]; + let details = compute_disambiguation_details(&items, |item, _detail| item.to_string()); + assert_eq!(details, vec![0, 0, 0]); + } + + #[test] + fn test_simple_two_way_conflict() { + // Two items with the same base name but different parents. + let items = vec![("src/foo.rs", "foo.rs"), ("lib/foo.rs", "foo.rs")]; + let details = compute_disambiguation_details(&items, |item, detail| match detail { + 0 => item.1.to_string(), + _ => item.0.to_string(), + }); + assert_eq!(details, vec![1, 1]); + } + + #[test] + fn test_three_way_conflict() { + let items = vec![ + ("foo.rs", "a/foo.rs"), + ("foo.rs", "b/foo.rs"), + ("foo.rs", "c/foo.rs"), + ]; + let details = compute_disambiguation_details(&items, |item, detail| match detail { + 0 => item.0.to_string(), + _ => item.1.to_string(), + }); + assert_eq!(details, vec![1, 1, 1]); + } + + #[test] + fn test_deeper_conflict() { + // At detail 0, all three show "file.rs". + // At detail 1, items 0 and 1 both show "src/file.rs", item 2 shows "lib/file.rs". + // At detail 2, item 0 shows "a/src/file.rs", item 1 shows "b/src/file.rs". + let items = vec![ + vec!["file.rs", "src/file.rs", "a/src/file.rs"], + vec!["file.rs", "src/file.rs", "b/src/file.rs"], + vec!["file.rs", "lib/file.rs", "x/lib/file.rs"], + ]; + let details = compute_disambiguation_details(&items, |item, detail| { + let clamped = detail.min(item.len() - 1); + item[clamped].to_string() + }); + assert_eq!(details, vec![2, 2, 1]); + } + + #[test] + fn test_mixed_conflicting_and_unique() { + let items = vec![ + ("src/foo.rs", "foo.rs"), + ("lib/foo.rs", "foo.rs"), + ("src/bar.rs", "bar.rs"), + ]; + let details = compute_disambiguation_details(&items, |item, detail| match detail { + 0 => item.1.to_string(), + _ => item.0.to_string(), + }); + assert_eq!(details, vec![1, 1, 0]); + } + + #[test] + fn test_identical_items_terminates() { + // All items return the same description at every detail level. + // The algorithm must terminate rather than looping forever. + let items = vec!["same", "same", "same"]; + let details = compute_disambiguation_details(&items, |item, _detail| item.to_string()); + // After bumping to 1, the description doesn't change from level 0, + // so the items are skipped and the loop terminates. + assert_eq!(details, vec![1, 1, 1]); + } + + #[test] + fn test_single_item() { + let items = vec!["only"]; + let details = compute_disambiguation_details(&items, |item, _detail| item.to_string()); + assert_eq!(details, vec![0]); + } + + #[test] + fn test_empty_input() { + let items: Vec<&str> = vec![]; + let details = compute_disambiguation_details(&items, |item, _detail| item.to_string()); + let expected: Vec = vec![]; + assert_eq!(details, expected); + } + + #[test] + fn test_duplicate_paths_from_multiple_groups() { + use std::path::Path; + + // Simulates the sidebar scenario: a path like /Users/rtfeldman/code/zed + // appears in two project groups (e.g. "zed" alone and "zed, roc"). + // After deduplication, only unique paths should be disambiguated. + // + // Paths: + // /Users/rtfeldman/code/worktrees/zed/focal-arrow/zed (group 1) + // /Users/rtfeldman/code/zed (group 2) + // /Users/rtfeldman/code/zed (group 3, same path as group 2) + // /Users/rtfeldman/code/roc (group 3) + // + // A naive flat_map collects duplicates. The duplicate /code/zed entries + // collide with each other and drive the detail to the full path. + // The fix is to deduplicate before disambiguating. + + fn path_suffix(path: &Path, detail: usize) -> String { + let mut components: Vec<_> = path + .components() + .rev() + .filter_map(|c| match c { + std::path::Component::Normal(s) => Some(s.to_string_lossy()), + _ => None, + }) + .take(detail + 1) + .collect(); + components.reverse(); + components.join("/") + } + + let all_paths: Vec<&Path> = vec![ + Path::new("/Users/rtfeldman/code/worktrees/zed/focal-arrow/zed"), + Path::new("/Users/rtfeldman/code/zed"), + Path::new("/Users/rtfeldman/code/roc"), + ]; + + let details = + compute_disambiguation_details(&all_paths, |path, detail| path_suffix(path, detail)); + + // focal-arrow/zed and code/zed both end in "zed", so they need detail 1. + // "roc" is unique at detail 0. + assert_eq!(details, vec![1, 1, 0]); + + assert_eq!(path_suffix(all_paths[0], details[0]), "focal-arrow/zed"); + assert_eq!(path_suffix(all_paths[1], details[1]), "code/zed"); + assert_eq!(path_suffix(all_paths[2], details[2]), "roc"); + } +} diff --git a/crates/util/src/util.rs b/crates/util/src/util.rs index bd8ab4e2d4d99864c5e0dc228410904f3338d7c6..3b704e50a531c5302024e215754cb9a866f0036b 100644 --- a/crates/util/src/util.rs +++ b/crates/util/src/util.rs @@ -1,5 +1,6 @@ pub mod archive; pub mod command; +pub mod disambiguate; pub mod fs; pub mod markdown; pub mod path_list; diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index cbcd60b734644cb61473bef85e27f2403e3c7d3c..785d4111a38ad859f415983209bcd1eeae484d5e 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -4897,36 +4897,9 @@ fn dirty_message_for(buffer_path: Option, path_style: PathStyle) -> } pub fn tab_details(items: &[Box], _window: &Window, cx: &App) -> Vec { - let mut tab_details = items.iter().map(|_| 0).collect::>(); - let mut tab_descriptions = HashMap::default(); - let mut done = false; - while !done { - done = true; - - // Store item indices by their tab description. - for (ix, (item, detail)) in items.iter().zip(&tab_details).enumerate() { - let description = item.tab_content_text(*detail, cx); - if *detail == 0 || description != item.tab_content_text(detail - 1, cx) { - tab_descriptions - .entry(description) - .or_insert(Vec::new()) - .push(ix); - } - } - - // If two or more items have the same tab description, increase their level - // of detail and try again. - for (_, item_ixs) in tab_descriptions.drain() { - if item_ixs.len() > 1 { - done = false; - for ix in item_ixs { - tab_details[ix] += 1; - } - } - } - } - - tab_details + util::disambiguate::compute_disambiguation_details(items, |item, detail| { + item.tab_content_text(detail, cx) + }) } pub fn render_item_indicator(item: Box, cx: &App) -> Option { diff --git a/crates/zed/src/visual_test_runner.rs b/crates/zed/src/visual_test_runner.rs index af1a60589483443e56506e7eeb7a8424d16a4143..2f043bfb0c9e66d4ee56bfc78d0b9d69244d3777 100644 --- a/crates/zed/src/visual_test_runner.rs +++ b/crates/zed/src/visual_test_runner.rs @@ -573,6 +573,27 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()> } } + // Run Test: Sidebar with duplicate project names + println!("\n--- Test: sidebar_duplicate_names ---"); + match run_sidebar_duplicate_project_names_visual_tests( + app_state.clone(), + &mut cx, + update_baseline, + ) { + Ok(TestResult::Passed) => { + println!("✓ sidebar_duplicate_names: PASSED"); + passed += 1; + } + Ok(TestResult::BaselineUpdated(_)) => { + println!("✓ sidebar_duplicate_names: Baselines updated"); + updated += 1; + } + Err(e) => { + eprintln!("✗ sidebar_duplicate_names: FAILED - {}", e); + failed += 1; + } + } + // Run Test 9: Tool Permissions Settings UI visual test println!("\n--- Test 9: tool_permissions_settings ---"); match run_tool_permissions_visual_tests(app_state.clone(), &mut cx, update_baseline) { @@ -3069,6 +3090,279 @@ fn run_git_command(args: &[&str], dir: &std::path::Path) -> Result<()> { Ok(()) } +#[cfg(target_os = "macos")] +/// Helper to create a project, add a worktree at the given path, and return the project. +fn create_project_with_worktree( + worktree_dir: &Path, + app_state: &Arc, + cx: &mut VisualTestAppContext, +) -> Result> { + let project = cx.update(|cx| { + project::Project::local( + app_state.client.clone(), + app_state.node_runtime.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + None, + project::LocalProjectFlags { + init_worktree_trust: false, + ..Default::default() + }, + cx, + ) + }); + + let add_task = cx.update(|cx| { + project.update(cx, |project, cx| { + project.find_or_create_worktree(worktree_dir, true, cx) + }) + }); + + cx.background_executor.allow_parking(); + cx.foreground_executor + .block_test(add_task) + .context("Failed to add worktree")?; + cx.background_executor.forbid_parking(); + + cx.run_until_parked(); + Ok(project) +} + +#[cfg(target_os = "macos")] +fn open_sidebar_test_window( + projects: Vec>, + app_state: &Arc, + cx: &mut VisualTestAppContext, +) -> Result> { + anyhow::ensure!(!projects.is_empty(), "need at least one project"); + + let window_size = size(px(400.0), px(600.0)); + let bounds = Bounds { + origin: point(px(0.0), px(0.0)), + size: window_size, + }; + + let mut projects_iter = projects.into_iter(); + let first_project = projects_iter + .next() + .ok_or_else(|| anyhow::anyhow!("need at least one project"))?; + let remaining: Vec<_> = projects_iter.collect(); + + let multi_workspace_window: WindowHandle = cx + .update(|cx| { + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + focus: false, + show: false, + ..Default::default() + }, + |window, cx| { + let first_ws = cx.new(|cx| { + Workspace::new(None, first_project.clone(), app_state.clone(), window, cx) + }); + cx.new(|cx| { + let mut mw = MultiWorkspace::new(first_ws, window, cx); + for project in remaining { + let ws = cx.new(|cx| { + Workspace::new(None, project, app_state.clone(), window, cx) + }); + mw.activate(ws, window, cx); + } + mw + }) + }, + ) + }) + .context("Failed to open MultiWorkspace window")?; + + cx.run_until_parked(); + + // Create the sidebar outside the MultiWorkspace update to avoid a + // re-entrant read panic (Sidebar::new reads the MultiWorkspace). + let sidebar = cx + .update_window(multi_workspace_window.into(), |root_view, window, cx| { + let mw_handle: Entity = root_view + .downcast() + .map_err(|_| anyhow::anyhow!("Failed to downcast root view to MultiWorkspace"))?; + Ok::<_, anyhow::Error>(cx.new(|cx| sidebar::Sidebar::new(mw_handle, window, cx))) + }) + .context("Failed to create sidebar")??; + + multi_workspace_window + .update(cx, |mw, _window, cx| { + mw.register_sidebar(sidebar.clone(), cx); + }) + .context("Failed to register sidebar")?; + + cx.run_until_parked(); + + // Open the sidebar + multi_workspace_window + .update(cx, |mw, window, cx| { + mw.toggle_sidebar(window, cx); + }) + .context("Failed to toggle sidebar")?; + + // Let rendering settle + for _ in 0..10 { + cx.advance_clock(Duration::from_millis(100)); + cx.run_until_parked(); + } + + // Refresh the window + cx.update_window(multi_workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + + cx.run_until_parked(); + + Ok(multi_workspace_window) +} + +#[cfg(target_os = "macos")] +fn cleanup_sidebar_test_window( + window: WindowHandle, + cx: &mut VisualTestAppContext, +) -> Result<()> { + window.update(cx, |mw, _window, cx| { + for workspace in mw.workspaces() { + let project = workspace.read(cx).project().clone(); + project.update(cx, |project, cx| { + let ids: Vec<_> = project.worktrees(cx).map(|wt| wt.read(cx).id()).collect(); + for id in ids { + project.remove_worktree(id, cx); + } + }); + } + })?; + + cx.run_until_parked(); + + cx.update_window(window.into(), |_, window, _cx| { + window.remove_window(); + })?; + + cx.run_until_parked(); + + for _ in 0..15 { + cx.advance_clock(Duration::from_millis(100)); + cx.run_until_parked(); + } + + Ok(()) +} + +#[cfg(target_os = "macos")] +fn run_sidebar_duplicate_project_names_visual_tests( + app_state: Arc, + cx: &mut VisualTestAppContext, + update_baseline: bool, +) -> Result { + let temp_dir = tempfile::tempdir()?; + let temp_path = temp_dir.keep(); + let canonical_temp = temp_path.canonicalize()?; + + // Create directory structure where every leaf directory is named "zed" but + // lives at a distinct path. This lets us test that the sidebar correctly + // disambiguates projects whose names would otherwise collide. + // + // code/zed/ — project1 (single worktree) + // code/foo/zed/ — project2 (single worktree) + // code/bar/zed/ — project3, first worktree + // code/baz/zed/ — project3, second worktree + // + // No two projects share a worktree path, so ProjectGroupBuilder will + // place each in its own group. + let code_zed = canonical_temp.join("code").join("zed"); + let foo_zed = canonical_temp.join("code").join("foo").join("zed"); + let bar_zed = canonical_temp.join("code").join("bar").join("zed"); + let baz_zed = canonical_temp.join("code").join("baz").join("zed"); + std::fs::create_dir_all(&code_zed)?; + std::fs::create_dir_all(&foo_zed)?; + std::fs::create_dir_all(&bar_zed)?; + std::fs::create_dir_all(&baz_zed)?; + + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + }); + + let mut has_baseline_update = None; + + // Two single-worktree projects whose leaf name is "zed" + { + let project1 = create_project_with_worktree(&code_zed, &app_state, cx)?; + let project2 = create_project_with_worktree(&foo_zed, &app_state, cx)?; + + let window = open_sidebar_test_window(vec![project1, project2], &app_state, cx)?; + + let result = run_visual_test( + "sidebar_two_projects_same_leaf_name", + window.into(), + cx, + update_baseline, + ); + + cleanup_sidebar_test_window(window, cx)?; + match result? { + TestResult::Passed => {} + TestResult::BaselineUpdated(path) => { + has_baseline_update = Some(path); + } + } + } + + // Three projects, third has two worktrees (all leaf names "zed") + // + // project1: code/zed + // project2: code/foo/zed + // project3: code/bar/zed + code/baz/zed + // + // Each project has a unique set of worktree paths, so they form + // separate groups. The sidebar must disambiguate all three. + { + let project1 = create_project_with_worktree(&code_zed, &app_state, cx)?; + let project2 = create_project_with_worktree(&foo_zed, &app_state, cx)?; + + let project3 = create_project_with_worktree(&bar_zed, &app_state, cx)?; + let add_second_worktree = cx.update(|cx| { + project3.update(cx, |project, cx| { + project.find_or_create_worktree(&baz_zed, true, cx) + }) + }); + cx.background_executor.allow_parking(); + cx.foreground_executor + .block_test(add_second_worktree) + .context("Failed to add second worktree to project 3")?; + cx.background_executor.forbid_parking(); + cx.run_until_parked(); + + let window = open_sidebar_test_window(vec![project1, project2, project3], &app_state, cx)?; + + let result = run_visual_test( + "sidebar_three_projects_with_multi_worktree", + window.into(), + cx, + update_baseline, + ); + + cleanup_sidebar_test_window(window, cx)?; + match result? { + TestResult::Passed => {} + TestResult::BaselineUpdated(path) => { + has_baseline_update = Some(path); + } + } + } + + if let Some(path) = has_baseline_update { + Ok(TestResult::BaselineUpdated(path)) + } else { + Ok(TestResult::Passed) + } +} + #[cfg(all(target_os = "macos", feature = "visual-tests"))] fn run_start_thread_in_selector_visual_tests( app_state: Arc, From 78c2e0d778d76c3bdf2e7f79909c666d968d07f2 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Thu, 9 Apr 2026 16:03:25 -0400 Subject: [PATCH 20/67] Scope worktree creation spinner to the thread that initiated it (#53544) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The "Creating Worktree…" spinner, error banner, and disabled selector state were stored as panel-wide state. This meant switching to a different thread while a worktree was being created would still show the spinner on the new thread. Release Notes: - N/A --- crates/agent_ui/src/agent_panel.rs | 63 ++++++++++++++++++++---------- 1 file changed, 43 insertions(+), 20 deletions(-) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 3c735832f7ca30deca30977f12506697df25841f..adb255ea62454d4e3eb3dcaa3f0d6cf481f66472 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -56,8 +56,9 @@ use extension_host::ExtensionStore; use fs::Fs; use gpui::{ Action, Animation, AnimationExt, AnyElement, App, AsyncWindowContext, ClipboardItem, Corner, - DismissEvent, Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable, KeyContext, Pixels, - Subscription, Task, UpdateGlobal, WeakEntity, prelude::*, pulsating_between, + DismissEvent, Entity, EntityId, EventEmitter, ExternalPaths, FocusHandle, Focusable, + KeyContext, Pixels, Subscription, Task, UpdateGlobal, WeakEntity, prelude::*, + pulsating_between, }; use language::LanguageRegistry; use language_model::LanguageModelRegistry; @@ -819,7 +820,7 @@ pub struct AgentPanel { agent_layout_onboarding_dismissed: AtomicBool, selected_agent: Agent, start_thread_in: StartThreadIn, - worktree_creation_status: Option, + worktree_creation_status: Option<(EntityId, WorktreeCreationStatus)>, _thread_view_subscription: Option, _active_thread_focus_subscription: Option, _worktree_creation_task: Option>, @@ -2903,7 +2904,9 @@ impl AgentPanel { window: &mut Window, cx: &mut Context, ) { - self.worktree_creation_status = Some(WorktreeCreationStatus::Error(message)); + if let Some((_, status)) = &mut self.worktree_creation_status { + *status = WorktreeCreationStatus::Error(message); + } if matches!(self.active_view, ActiveView::Uninitialized) { let selected_agent = self.selected_agent.clone(); self.new_agent_thread(selected_agent, window, cx); @@ -2920,12 +2923,17 @@ impl AgentPanel { ) { if matches!( self.worktree_creation_status, - Some(WorktreeCreationStatus::Creating) + Some((_, WorktreeCreationStatus::Creating)) ) { return; } - self.worktree_creation_status = Some(WorktreeCreationStatus::Creating); + let conversation_view_id = self + .active_conversation_view() + .map(|v| v.entity_id()) + .unwrap_or_else(|| EntityId::from(0u64)); + self.worktree_creation_status = + Some((conversation_view_id, WorktreeCreationStatus::Creating)); cx.notify(); let (git_repos, non_git_paths) = self.classify_worktrees(cx); @@ -3441,7 +3449,7 @@ impl Panel for AgentPanel { && matches!(self.active_view, ActiveView::Uninitialized) && !matches!( self.worktree_creation_status, - Some(WorktreeCreationStatus::Creating) + Some((_, WorktreeCreationStatus::Creating)) ) { let selected_agent = self.selected_agent.clone(); @@ -3681,13 +3689,19 @@ impl AgentPanel { !self.project.read(cx).repositories(cx).is_empty() } + fn is_active_view_creating_worktree(&self, _cx: &App) -> bool { + match &self.worktree_creation_status { + Some((view_id, WorktreeCreationStatus::Creating)) => { + self.active_conversation_view().map(|v| v.entity_id()) == Some(*view_id) + } + _ => false, + } + } + fn render_start_thread_in_selector(&self, cx: &mut Context) -> impl IntoElement { let focus_handle = self.focus_handle(cx); - let is_creating = matches!( - self.worktree_creation_status, - Some(WorktreeCreationStatus::Creating) - ); + let is_creating = self.is_active_view_creating_worktree(cx); let trigger_parts = self .start_thread_in @@ -3740,10 +3754,7 @@ impl AgentPanel { } fn render_new_worktree_branch_selector(&self, cx: &mut Context) -> impl IntoElement { - let is_creating = matches!( - self.worktree_creation_status, - Some(WorktreeCreationStatus::Creating) - ); + let is_creating = self.is_active_view_creating_worktree(cx); let project_ref = self.project.read(cx); let trigger_parts = self @@ -4211,7 +4222,11 @@ impl AgentPanel { } fn render_worktree_creation_status(&self, cx: &mut Context) -> Option { - let status = self.worktree_creation_status.as_ref()?; + let (view_id, status) = self.worktree_creation_status.as_ref()?; + let active_view_id = self.active_conversation_view().map(|v| v.entity_id()); + if active_view_id != Some(*view_id) { + return None; + } match status { WorktreeCreationStatus::Creating => Some( h_flex() @@ -4751,10 +4766,11 @@ impl AgentPanel { /// /// This is a test-only helper for visual tests. pub fn worktree_creation_status_for_tests(&self) -> Option<&WorktreeCreationStatus> { - self.worktree_creation_status.as_ref() + self.worktree_creation_status.as_ref().map(|(_, s)| s) } - /// Sets the worktree creation status directly. + /// Sets the worktree creation status directly, associating it with the + /// currently active conversation view. /// /// This is a test-only helper for visual tests that need to show the /// "Creating worktree…" spinner or error banners. @@ -4763,7 +4779,13 @@ impl AgentPanel { status: Option, cx: &mut Context, ) { - self.worktree_creation_status = status; + self.worktree_creation_status = status.map(|s| { + let view_id = self + .active_conversation_view() + .map(|v| v.entity_id()) + .unwrap_or_else(|| EntityId::from(0u64)); + (view_id, s) + }); cx.notify(); } @@ -6011,7 +6033,8 @@ mod tests { // Simulate worktree creation in progress and reset to Uninitialized panel.update_in(cx, |panel, window, cx| { - panel.worktree_creation_status = Some(WorktreeCreationStatus::Creating); + panel.worktree_creation_status = + Some((EntityId::from(0u64), WorktreeCreationStatus::Creating)); panel.active_view = ActiveView::Uninitialized; Panel::set_active(panel, true, window, cx); assert!( From bd50418150e4673149a8cc551949acacd0f49d36 Mon Sep 17 00:00:00 2001 From: KyleBarton Date: Thu, 9 Apr 2026 13:27:29 -0700 Subject: [PATCH 21/67] Fix dockerfile image and alias parsing (#53538) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #52928 Release Notes: - Fixed handling of multi-stage and stage-specified dockerfiles in dev container manifests - Fixed the way we find the base image in a dev container when build args need expansion --- Cargo.lock | 1 + crates/dev_container/Cargo.toml | 1 + crates/dev_container/src/devcontainer_json.rs | 41 +- .../src/devcontainer_manifest.rs | 516 +++++++++++++----- crates/dev_container/src/docker.rs | 2 + 5 files changed, 422 insertions(+), 139 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 67495074258f02a658b5b95eb9b8e6625d6cbeb0..1e5c390ac823bfe3b4c0839d2d98102b5a0e87f3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4793,6 +4793,7 @@ dependencies = [ "paths", "picker", "project", + "regex", "serde", "serde_json", "serde_json_lenient", diff --git a/crates/dev_container/Cargo.toml b/crates/dev_container/Cargo.toml index e04b965b076fe1ba6c5a8f47e548b922dab55d4a..92c42f97a29ebae0d246f113203a5985042b4c27 100644 --- a/crates/dev_container/Cargo.toml +++ b/crates/dev_container/Cargo.toml @@ -19,6 +19,7 @@ futures.workspace = true log.workspace = true menu.workspace = true paths.workspace = true +regex.workspace = true picker.workspace = true project.workspace = true settings.workspace = true diff --git a/crates/dev_container/src/devcontainer_json.rs b/crates/dev_container/src/devcontainer_json.rs index 34ee99ed3834d76fbc24afc68aa663df037fa8da..f62f7cdde5941733582baa9d3baf761c60a53c81 100644 --- a/crates/dev_container/src/devcontainer_json.rs +++ b/crates/dev_container/src/devcontainer_json.rs @@ -138,7 +138,7 @@ pub(crate) struct ContainerBuild { context: Option, pub(crate) args: Option>, options: Option>, - target: Option, + pub(crate) target: Option, #[serde(default, deserialize_with = "deserialize_string_or_array")] cache_from: Option>, } @@ -185,8 +185,8 @@ pub(crate) enum LifecycleCommand { #[derive(Debug, PartialEq, Eq)] pub(crate) enum DevContainerBuildType { - Image, - Dockerfile, + Image(String), + Dockerfile(ContainerBuild), DockerCompose, None, } @@ -249,14 +249,15 @@ pub(crate) fn deserialize_devcontainer_json(json: &str) -> Result DevContainerBuildType { - if self.image.is_some() { - return DevContainerBuildType::Image; + if let Some(image) = &self.image { + DevContainerBuildType::Image(image.clone()) } else if self.docker_compose_file.is_some() { - return DevContainerBuildType::DockerCompose; - } else if self.build.is_some() { - return DevContainerBuildType::Dockerfile; + DevContainerBuildType::DockerCompose + } else if let Some(build) = &self.build { + DevContainerBuildType::Dockerfile(build.clone()) + } else { + DevContainerBuildType::None } - return DevContainerBuildType::None; } } @@ -911,7 +912,12 @@ mod test { } ); - assert_eq!(devcontainer.build_type(), DevContainerBuildType::Image); + assert_eq!( + devcontainer.build_type(), + DevContainerBuildType::Image(String::from( + "mcr.microsoft.com/devcontainers/base:ubuntu" + )) + ); } #[test] @@ -1366,7 +1372,20 @@ mod test { } ); - assert_eq!(devcontainer.build_type(), DevContainerBuildType::Dockerfile); + assert_eq!( + devcontainer.build_type(), + DevContainerBuildType::Dockerfile(ContainerBuild { + dockerfile: "DockerFile".to_string(), + context: Some("..".to_string()), + args: Some(HashMap::from([( + "MYARG".to_string(), + "MYVALUE".to_string() + )])), + options: Some(vec!["--some-option".to_string(), "--mount".to_string()]), + target: Some("development".to_string()), + cache_from: Some(vec!["some_image".to_string()]), + }) + ); } #[test] diff --git a/crates/dev_container/src/devcontainer_manifest.rs b/crates/dev_container/src/devcontainer_manifest.rs index 29dc0f9fe96d160d9362597fba4e10f86d026604..0ab80a9d2ad901d8d0eb12bef254c0f49aea0dfa 100644 --- a/crates/dev_container/src/devcontainer_manifest.rs +++ b/crates/dev_container/src/devcontainer_manifest.rs @@ -6,6 +6,8 @@ use std::{ sync::Arc, }; +use regex::Regex; + use fs::Fs; use http_client::HttpClient; use util::{ResultExt, command::Command}; @@ -217,11 +219,10 @@ impl DevContainerManifest { async fn dockerfile_location(&self) -> Option { let dev_container = self.dev_container(); match dev_container.build_type() { - DevContainerBuildType::Image => None, - DevContainerBuildType::Dockerfile => dev_container - .build - .as_ref() - .map(|build| self.config_directory.join(&build.dockerfile)), + DevContainerBuildType::Image(_) => None, + DevContainerBuildType::Dockerfile(build) => { + Some(self.config_directory.join(&build.dockerfile)) + } DevContainerBuildType::DockerCompose => { let Ok(docker_compose_manifest) = self.docker_compose_manifest().await else { return None; @@ -260,48 +261,50 @@ impl DevContainerManifest { /// - The image sourced in the docker-compose main service dockerfile, if one is specified /// If no such image is available, return an error async fn get_base_image_from_config(&self) -> Result { - if let Some(image) = &self.dev_container().image { - return Ok(image.to_string()); - } - if let Some(dockerfile) = self.dev_container().build.as_ref().map(|b| &b.dockerfile) { - let dockerfile_contents = self - .fs - .load(&self.config_directory.join(dockerfile)) - .await - .map_err(|e| { - log::error!("Error reading dockerfile: {e}"); - DevContainerError::DevContainerParseFailed - })?; - return image_from_dockerfile(self, dockerfile_contents); - } - if self.dev_container().docker_compose_file.is_some() { - let docker_compose_manifest = self.docker_compose_manifest().await?; - let (_, main_service) = find_primary_service(&docker_compose_manifest, &self)?; + match self.dev_container().build_type() { + DevContainerBuildType::Image(image) => { + return Ok(image); + } + DevContainerBuildType::Dockerfile(build) => { + let dockerfile_contents = self.expanded_dockerfile_content().await?; + return image_from_dockerfile(dockerfile_contents, &build.target).ok_or_else( + || { + log::error!("Unable to find base image in Dockerfile"); + DevContainerError::DevContainerParseFailed + }, + ); + } + DevContainerBuildType::DockerCompose => { + let docker_compose_manifest = self.docker_compose_manifest().await?; + let (_, main_service) = find_primary_service(&docker_compose_manifest, &self)?; - if let Some(dockerfile) = main_service - .build - .as_ref() - .and_then(|b| b.dockerfile.as_ref()) - { - let dockerfile_contents = self - .fs - .load(&self.config_directory.join(dockerfile)) - .await - .map_err(|e| { - log::error!("Error reading dockerfile: {e}"); + if let Some(_) = main_service + .build + .as_ref() + .and_then(|b| b.dockerfile.as_ref()) + { + let dockerfile_contents = self.expanded_dockerfile_content().await?; + return image_from_dockerfile( + dockerfile_contents, + &main_service.build.as_ref().and_then(|b| b.target.clone()), + ) + .ok_or_else(|| { + log::error!("Unable to find base image in Dockerfile"); DevContainerError::DevContainerParseFailed - })?; - return image_from_dockerfile(self, dockerfile_contents); + }); + } + if let Some(image) = &main_service.image { + return Ok(image.to_string()); + } + + log::error!("No valid base image found in docker-compose configuration"); + return Err(DevContainerError::DevContainerParseFailed); } - if let Some(image) = &main_service.image { - return Ok(image.to_string()); + DevContainerBuildType::None => { + log::error!("Not a valid devcontainer config for build"); + return Err(DevContainerError::NotInValidProject); } - - log::error!("No valid base image found in docker-compose configuration"); - return Err(DevContainerError::DevContainerParseFailed); } - log::error!("No valid base image found in dev container configuration"); - Err(DevContainerError::DevContainerParseFailed) } async fn download_feature_and_dockerfile_resources(&mut self) -> Result<(), DevContainerError> { @@ -505,7 +508,10 @@ impl DevContainerManifest { // --- Phase 3: Generate extended Dockerfile from the inflated manifests --- - let is_compose = dev_container.build_type() == DevContainerBuildType::DockerCompose; + let is_compose = match dev_container.build_type() { + DevContainerBuildType::DockerCompose => true, + _ => false, + }; let use_buildkit = self.docker_client.supports_compose_buildkit() || !is_compose; let dockerfile_base_content = if let Some(location) = &self.dockerfile_location().await { @@ -514,10 +520,29 @@ impl DevContainerManifest { None }; + let build_target = if is_compose { + find_primary_service(&self.docker_compose_manifest().await?, self)? + .1 + .build + .and_then(|b| b.target) + } else { + dev_container.build.as_ref().and_then(|b| b.target.clone()) + }; + + let dockerfile_content = dockerfile_base_content + .map(|content| { + dockerfile_inject_alias( + &content, + "dev_container_auto_added_stage_label", + build_target, + ) + }) + .unwrap_or_default(); + let dockerfile_content = self.generate_dockerfile_extended( &container_user, &remote_user, - dockerfile_base_content, + dockerfile_content, use_buildkit, ); @@ -544,7 +569,7 @@ impl DevContainerManifest { &self, container_user: &str, remote_user: &str, - dockerfile_content: Option, + dockerfile_content: String, use_buildkit: bool, ) -> String { #[cfg(not(target_os = "windows"))] @@ -565,16 +590,6 @@ impl DevContainerManifest { let container_home_cmd = get_ent_passwd_shell_command(container_user); let remote_home_cmd = get_ent_passwd_shell_command(remote_user); - let dockerfile_content = dockerfile_content - .map(|content| { - if dockerfile_alias(&content).is_some() { - content - } else { - dockerfile_inject_alias(&content, "dev_container_auto_added_stage_label") - } - }) - .unwrap_or("".to_string()); - let dest = FEATURES_CONTAINER_TEMP_DEST_FOLDER; let feature_content_source_stage = if use_buildkit { @@ -694,20 +709,17 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true } let dev_container = self.dev_container(); match dev_container.build_type() { - DevContainerBuildType::Image => { + DevContainerBuildType::Image(base_image) => { let built_docker_image = self.build_docker_image().await?; - let Some(base_image) = dev_container.image.as_ref() else { - log::error!("Dev container is using and image which can't be referenced"); - return Err(DevContainerError::DevContainerParseFailed); - }; + let built_docker_image = self - .update_remote_user_uid(built_docker_image, base_image) + .update_remote_user_uid(built_docker_image, &base_image) .await?; let resources = self.build_merged_resources(built_docker_image)?; Ok(DevContainerBuildResources::Docker(resources)) } - DevContainerBuildType::Dockerfile => { + DevContainerBuildType::Dockerfile(_) => { let built_docker_image = self.build_docker_image().await?; let Some(features_build_info) = &self.features_build_info else { log::error!( @@ -892,6 +904,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true }), ), dockerfile: Some(dockerfile_path.display().to_string()), + target: Some("dev_containers_target_stage".to_string()), args: Some(build_args), additional_contexts, }), @@ -983,6 +996,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true features_build_info.empty_context_dir.display().to_string(), ), dockerfile: Some(dockerfile_path.display().to_string()), + target: Some("dev_containers_target_stage".to_string()), args: Some(build_args), additional_contexts, }), @@ -1252,11 +1266,8 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true }; match dev_container.build_type() { - DevContainerBuildType::Image => { - let Some(image_tag) = &dev_container.image else { - return Err(DevContainerError::DevContainerParseFailed); - }; - let base_image = self.docker_client.inspect(image_tag).await?; + DevContainerBuildType::Image(image_tag) => { + let base_image = self.docker_client.inspect(&image_tag).await?; if dev_container .features .as_ref() @@ -1266,7 +1277,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true return Ok(base_image); } } - DevContainerBuildType::Dockerfile => {} + DevContainerBuildType::Dockerfile(_) => {} DevContainerBuildType::DockerCompose | DevContainerBuildType::None => { return Err(DevContainerError::DevContainerParseFailed); } @@ -1390,7 +1401,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true DevContainerError::FilesystemError })?; - let updated_image_tag = format!("{}-uid", features_build_info.image_tag); + let updated_image_tag = features_build_info.image_tag.clone(); let mut command = Command::new(self.docker_client.docker_cli()); command.args(["build"]); @@ -1603,7 +1614,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true command.args(["-t", &features_build_info.image_tag]); - if dev_container.build_type() == DevContainerBuildType::Dockerfile { + if let DevContainerBuildType::Dockerfile(_) = dev_container.build_type() { command.arg(self.config_directory.display().to_string()); } else { // Use an empty folder as the build context to avoid pulling in unneeded files. @@ -1784,7 +1795,6 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true } for app_port in &self.dev_container().app_port { command.arg("-p"); - // Should just implement display for an AppPort struct which takes care of this; it might be a custom map like (literally) "8081:8080" command.arg(app_port); } @@ -1987,6 +1997,65 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true safe_id_lower(alternate_name) } } + + async fn expanded_dockerfile_content(&self) -> Result { + let Some(dockerfile_path) = self.dockerfile_location().await else { + log::error!("Tried to expand dockerfile for an image-type config"); + return Err(DevContainerError::DevContainerParseFailed); + }; + + let devcontainer_args = self + .dev_container() + .build + .as_ref() + .and_then(|b| b.args.clone()) + .unwrap_or_default(); + let contents = self.fs.load(&dockerfile_path).await.map_err(|e| { + log::error!("Failed to load Dockerfile: {e}"); + DevContainerError::FilesystemError + })?; + let mut parsed_lines: Vec = Vec::new(); + let mut inline_args: Vec<(String, String)> = Vec::new(); + let key_regex = Regex::new(r"(?:^|\s)(\w+)=").expect("valid regex"); + + for line in contents.lines() { + let mut parsed_line = line.to_string(); + // Replace from devcontainer args first, since they take precedence + for (key, value) in &devcontainer_args { + parsed_line = parsed_line.replace(&format!("${{{key}}}"), value) + } + for (key, value) in &inline_args { + parsed_line = parsed_line.replace(&format!("${{{key}}}"), value); + } + if let Some(arg_directives) = parsed_line.strip_prefix("ARG ") { + let trimmed = arg_directives.trim(); + let key_matches: Vec<_> = key_regex.captures_iter(trimmed).collect(); + for (i, captures) in key_matches.iter().enumerate() { + let key = captures[1].to_string(); + // Insert the devcontainer overrides here if needed + let value_start = captures.get(0).expect("full match").end(); + let value_end = if i + 1 < key_matches.len() { + key_matches[i + 1].get(0).expect("full match").start() + } else { + trimmed.len() + }; + let raw_value = trimmed[value_start..value_end].trim(); + let value = if raw_value.starts_with('"') + && raw_value.ends_with('"') + && raw_value.len() > 1 + { + &raw_value[1..raw_value.len() - 1] + } else { + raw_value + }; + inline_args.push((key, value.to_string())); + } + } + parsed_lines.push(parsed_line); + } + + Ok(parsed_lines.join("\n")) + } } /// Holds all the information needed to construct a `docker buildx build` command @@ -2237,46 +2306,37 @@ chmod +x ./install.sh Ok(script) } -// Dockerfile actions need to be moved to their own file -fn dockerfile_alias(dockerfile_content: &str) -> Option { - dockerfile_content - .lines() - .find(|line| line.starts_with("FROM")) - .and_then(|line| { - let words: Vec<&str> = line.split(" ").collect(); - if words.len() > 2 && words[words.len() - 2].to_lowercase() == "as" { - return Some(words[words.len() - 1].to_string()); - } else { - return None; - } - }) -} - -fn dockerfile_inject_alias(dockerfile_content: &str, alias: &str) -> String { - if dockerfile_alias(dockerfile_content).is_some() { - dockerfile_content.to_string() - } else { - dockerfile_content - .lines() - .map(|line| { - if line.starts_with("FROM") { - format!("{} AS {}", line, alias) - } else { - line.to_string() - } - }) - .collect::>() - .join("\n") +fn dockerfile_inject_alias( + dockerfile_content: &str, + alias: &str, + build_target: Option, +) -> String { + match image_from_dockerfile(dockerfile_content.to_string(), &build_target) { + Some(target) => format!( + r#"{dockerfile_content} +FROM {target} AS {alias}"# + ), + None => dockerfile_content.to_string(), } } -fn image_from_dockerfile( - devcontainer: &DevContainerManifest, - dockerfile_contents: String, -) -> Result { - let mut raw_contents = dockerfile_contents +fn image_from_dockerfile(dockerfile_contents: String, target: &Option) -> Option { + dockerfile_contents .lines() - .find(|line| line.starts_with("FROM")) + .filter(|line| line.starts_with("FROM")) + .rfind(|from_line| match &target { + Some(target) => { + let parts = from_line.split(' ').collect::>(); + if parts.len() >= 3 + && parts.get(parts.len() - 2).unwrap_or(&"").to_lowercase() == "as" + { + parts.last().unwrap_or(&"").to_lowercase() == target.to_lowercase() + } else { + false + } + } + None => true, + }) .and_then(|from_line| { from_line .split(' ') @@ -2284,21 +2344,6 @@ fn image_from_dockerfile( .get(1) .map(|s| s.to_string()) }) - .ok_or_else(|| { - log::error!("Could not find an image definition in dockerfile"); - DevContainerError::DevContainerParseFailed - })?; - - for (k, v) in devcontainer - .dev_container() - .build - .as_ref() - .and_then(|b| b.args.as_ref()) - .unwrap_or(&HashMap::new()) - { - raw_contents = raw_contents.replace(&format!("${{{}}}", k), v); - } - Ok(raw_contents) } // Container user things @@ -2386,6 +2431,7 @@ mod test { devcontainer_manifest::{ ConfigStatus, DevContainerManifest, DockerBuildResources, DockerComposeResources, DockerInspect, extract_feature_id, find_primary_service, get_remote_user_from_config, + image_from_dockerfile, }, docker::{ DockerClient, DockerComposeConfig, DockerComposeService, DockerComposeServiceBuild, @@ -3087,7 +3133,7 @@ RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/hom # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. ARG VARIANT="16-bullseye" -FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} AS dev_container_auto_added_stage_label +FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} RUN mkdir -p /workspaces && chown node:node /workspaces @@ -3100,6 +3146,7 @@ RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/hom && mkdir -p /home/$USERNAME/commandhistory \ && touch /home/$USERNAME/commandhistory/.bash_history \ && chown -R $USERNAME /home/$USERNAME/commandhistory +FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} AS dev_container_auto_added_stage_label FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize USER root @@ -3426,13 +3473,14 @@ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ &feature_dockerfile, r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder -FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label +FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm # Include lld linker to improve build times either by using environment variable # RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml). RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ && apt-get -y install clang lld \ && apt-get autoremove -y && apt-get clean -y +FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize USER root @@ -3748,13 +3796,14 @@ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ &feature_dockerfile, r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder -FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label +FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm # Include lld linker to improve build times either by using environment variable # RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml). RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ && apt-get -y install clang lld \ && apt-get autoremove -y && apt-get clean -y +FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize USER root @@ -3927,13 +3976,14 @@ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ &feature_dockerfile, r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder -FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label +FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm # Include lld linker to improve build times either by using environment variable # RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml). RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ && apt-get -y install clang lld \ && apt-get autoremove -y && apt-get clean -y +FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label FROM dev_container_feature_content_temp as dev_containers_feature_content_source @@ -4046,6 +4096,7 @@ ENV DOCKER_BUILDKIT=1 "VARIANT": "18-bookworm", "FOO": "bar", }, + "target": "development", }, "workspaceMount": "source=${localWorkspaceFolder},target=${containerWorkspaceFolder},type=bind,consistency=cached", "workspaceFolder": "/workspace2", @@ -4131,7 +4182,8 @@ ENV DOCKER_BUILDKIT=1 # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. ARG VARIANT="16-bullseye" -FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} +FROM mcr.microsoft.com/devcontainers/typescript-node:latest as predev +FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} as development RUN mkdir -p /workspaces && chown node:node /workspaces @@ -4174,7 +4226,8 @@ RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/hom # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. ARG VARIANT="16-bullseye" -FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} AS dev_container_auto_added_stage_label +FROM mcr.microsoft.com/devcontainers/typescript-node:latest as predev +FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} as development RUN mkdir -p /workspaces && chown node:node /workspaces @@ -4187,6 +4240,7 @@ RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/hom && mkdir -p /home/$USERNAME/commandhistory \ && touch /home/$USERNAME/commandhistory/.bash_history \ && chown -R $USERNAME /home/$USERNAME/commandhistory +FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} AS dev_container_auto_added_stage_label FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize USER root @@ -4477,6 +4531,211 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true ); } + #[gpui::test] + async fn test_gets_base_image_from_dockerfile(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + env_logger::try_init().ok(); + let given_devcontainer_contents = r#" + { + "name": "cli-${devcontainerId}", + "build": { + "dockerfile": "Dockerfile", + "args": { + "VERSION": "1.22", + } + }, + } + "#; + + let (test_dependencies, mut devcontainer_manifest) = + init_default_devcontainer_manifest(cx, given_devcontainer_contents) + .await + .unwrap(); + + test_dependencies + .fs + .atomic_write( + PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"), + r#" +FROM dontgrabme as build_context +ARG VERSION=1.21 +ARG REPOSITORY=mybuild +ARG REGISTRY=docker.io/stuff + +ARG IMAGE=${REGISTRY}/${REPOSITORY}:${VERSION} + +FROM ${IMAGE} AS devcontainer + "# + .trim() + .to_string(), + ) + .await + .unwrap(); + + devcontainer_manifest.parse_nonremote_vars().unwrap(); + + let dockerfile_contents = devcontainer_manifest + .expanded_dockerfile_content() + .await + .unwrap(); + let base_image = image_from_dockerfile( + dockerfile_contents, + &devcontainer_manifest + .dev_container() + .build + .as_ref() + .and_then(|b| b.target.clone()), + ) + .unwrap(); + + assert_eq!(base_image, "docker.io/stuff/mybuild:1.22".to_string()); + } + + #[gpui::test] + async fn test_gets_base_image_from_dockerfile_with_target_specified(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + env_logger::try_init().ok(); + let given_devcontainer_contents = r#" + { + "name": "cli-${devcontainerId}", + "build": { + "dockerfile": "Dockerfile", + "args": { + "VERSION": "1.22", + }, + "target": "development" + }, + } + "#; + + let (test_dependencies, mut devcontainer_manifest) = + init_default_devcontainer_manifest(cx, given_devcontainer_contents) + .await + .unwrap(); + + test_dependencies + .fs + .atomic_write( + PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"), + r#" +FROM dontgrabme as build_context +ARG VERSION=1.21 +ARG REPOSITORY=mybuild +ARG REGISTRY=docker.io/stuff + +ARG IMAGE=${REGISTRY}/${REPOSITORY}:${VERSION} +ARG DEV_IMAGE=${REGISTRY}/${REPOSITORY}:latest + +FROM ${DEV_IMAGE} AS development +FROM ${IMAGE} AS production + "# + .trim() + .to_string(), + ) + .await + .unwrap(); + + devcontainer_manifest.parse_nonremote_vars().unwrap(); + + let dockerfile_contents = devcontainer_manifest + .expanded_dockerfile_content() + .await + .unwrap(); + let base_image = image_from_dockerfile( + dockerfile_contents, + &devcontainer_manifest + .dev_container() + .build + .as_ref() + .and_then(|b| b.target.clone()), + ) + .unwrap(); + + assert_eq!(base_image, "docker.io/stuff/mybuild:latest".to_string()); + } + + #[gpui::test] + async fn test_expands_args_in_dockerfile(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + env_logger::try_init().ok(); + let given_devcontainer_contents = r#" + { + "name": "cli-${devcontainerId}", + "build": { + "dockerfile": "Dockerfile", + "args": { + "JSON_ARG": "some-value", + "ELIXIR_VERSION": "1.21", + } + }, + } + "#; + + let (test_dependencies, mut devcontainer_manifest) = + init_default_devcontainer_manifest(cx, given_devcontainer_contents) + .await + .unwrap(); + + test_dependencies + .fs + .atomic_write( + PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"), + r#" +ARG INVALID_FORWARD_REFERENCE=${OTP_VERSION} +ARG ELIXIR_VERSION=1.20.0-rc.4 +ARG FOO=foo BAR=bar +ARG FOOBAR=${FOO}${BAR} +ARG OTP_VERSION=28.4.1 +ARG DEBIAN_VERSION=trixie-20260316-slim +ARG IMAGE="docker.io/hexpm/elixir:${ELIXIR_VERSION}-erlang-${OTP_VERSION}-debian-${DEBIAN_VERSION}" +ARG NESTED_MAP="{"key1": "val1", "key2": "val2"}" +ARG WRAPPING_MAP={"nested_map": ${NESTED_MAP}} +ARG FROM_JSON=${JSON_ARG} + +FROM ${IMAGE} AS devcontainer + "# + .trim() + .to_string(), + ) + .await + .unwrap(); + + devcontainer_manifest.parse_nonremote_vars().unwrap(); + + let expanded_dockerfile = devcontainer_manifest + .expanded_dockerfile_content() + .await + .unwrap(); + + assert_eq!( + &expanded_dockerfile, + r#" +ARG INVALID_FORWARD_REFERENCE=${OTP_VERSION} +ARG ELIXIR_VERSION=1.20.0-rc.4 +ARG FOO=foo BAR=bar +ARG FOOBAR=foobar +ARG OTP_VERSION=28.4.1 +ARG DEBIAN_VERSION=trixie-20260316-slim +ARG IMAGE="docker.io/hexpm/elixir:1.21-erlang-28.4.1-debian-trixie-20260316-slim" +ARG NESTED_MAP="{"key1": "val1", "key2": "val2"}" +ARG WRAPPING_MAP={"nested_map": {"key1": "val1", "key2": "val2"}} +ARG FROM_JSON=some-value + +FROM docker.io/hexpm/elixir:1.21-erlang-28.4.1-debian-trixie-20260316-slim AS devcontainer + "# + .trim() + ) + } + + #[test] + fn test_aliases_dockerfile_with_pre_existing_aliases_for_build() {} + + #[test] + fn test_aliases_dockerfile_with_no_aliases_for_build() {} + + #[test] + fn test_aliases_dockerfile_with_build_target_specified() {} + pub(crate) struct RecordedExecCommand { pub(crate) _container_id: String, pub(crate) _remote_folder: String, @@ -4641,6 +4900,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true dockerfile: Some("Dockerfile".to_string()), args: None, additional_contexts: None, + target: None, }), volumes: vec![MountDefinition { source: Some("../..".to_string()), diff --git a/crates/dev_container/src/docker.rs b/crates/dev_container/src/docker.rs index b913aea5fd068fdc75337284f05d99a2266dba05..c02bafb195ae20e203bffdf471c4a284a44a15e0 100644 --- a/crates/dev_container/src/docker.rs +++ b/crates/dev_container/src/docker.rs @@ -80,6 +80,8 @@ pub(crate) struct DockerComposeServiceBuild { #[serde(skip_serializing_if = "Option::is_none")] pub(crate) dockerfile: Option, #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) target: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub(crate) args: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) additional_contexts: Option>, From b3bc52e28c91a00340686170e73ecc473449ef70 Mon Sep 17 00:00:00 2001 From: Eric Holk Date: Thu, 9 Apr 2026 14:20:58 -0700 Subject: [PATCH 22/67] Show workspace project group keys in workspace debug dump (#53556) Also indicates if the multiworkspace and the worspace disagree on the right key. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- crates/sidebar/src/sidebar.rs | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 57fe5a04ac6e656f790d72b1a99dff3e14fa8ead..488127eb0bd04b064c2c6e3b1d8dc297ada9c477 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -4727,6 +4727,36 @@ pub fn dump_workspace_info( ) .ok(); + // project_group_key_for_workspace internally reads the workspace, + // so we can only call it for workspaces other than this_entity + // (which is already being updated). + if let Some(mw) = &multi_workspace { + if *ws == this_entity { + let workspace_key = workspace.project_group_key(cx); + writeln!(output, "ProjectGroupKey: {workspace_key:?}").ok(); + } else { + let effective_key = mw.read(cx).project_group_key_for_workspace(ws, cx); + let workspace_key = ws.read(cx).project_group_key(cx); + if effective_key != workspace_key { + writeln!( + output, + "ProjectGroupKey (multi_workspace): {effective_key:?}" + ) + .ok(); + writeln!( + output, + "ProjectGroupKey (workspace, DISAGREES): {workspace_key:?}" + ) + .ok(); + } else { + writeln!(output, "ProjectGroupKey: {effective_key:?}").ok(); + } + } + } else { + let workspace_key = workspace.project_group_key(cx); + writeln!(output, "ProjectGroupKey: {workspace_key:?}").ok(); + } + // The action handler is already inside an update on `this_entity`, // so we must avoid a nested read/update on that same entity. if *ws == this_entity { From 3436e510420e8ae1d9283bd49cc1c03bf61f627d Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 9 Apr 2026 17:47:24 -0700 Subject: [PATCH 23/67] Change name of web search tool (#53573) Self-Review Checklist: - [ ] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- .github/workflows/run_tests.yml | 2 +- assets/settings/default.json | 4 +- crates/agent/src/tools/web_search_tool.rs | 2 +- crates/migrator/src/migrations.rs | 6 + .../src/migrations/m_2026_04_10/settings.rs | 64 ++++ crates/migrator/src/migrator.rs | 298 ++++++++++++++++++ .../src/pages/tool_permissions_setup.rs | 6 +- docs/.doc-examples/reference.md | 2 +- docs/src/ai/tool-permissions.md | 2 +- docs/src/ai/tools.md | 2 +- .../xtask/src/tasks/workflows/run_tests.rs | 2 +- 11 files changed, 379 insertions(+), 11 deletions(-) create mode 100644 crates/migrator/src/migrations/m_2026_04_10/settings.rs diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index 2fee8b3ae9fe54619639d35f521d4eec946eef37..a1e15e7beb8a7fe3f03536bf8a4fb41519aa4e0a 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -80,7 +80,7 @@ jobs: # If assets/ changed, add crates that depend on those assets if echo "$CHANGED_FILES" | grep -qP '^assets/'; then - FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u) + FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "assets" | sort -u) fi # Combine all changed packages diff --git a/assets/settings/default.json b/assets/settings/default.json index 56dff8b2ad632ed74045887ea274cfa112140b4b..799a34d6a6f4dea367cc2c5cc4ce774ff0ad312e 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1075,7 +1075,7 @@ "terminal": true, "thinking": true, "update_plan": true, - "web_search": true, + "search_web": true, }, }, "ask": { @@ -1095,7 +1095,7 @@ "spawn_agent": true, "thinking": true, "update_plan": true, - "web_search": true, + "search_web": true, }, }, "minimal": { diff --git a/crates/agent/src/tools/web_search_tool.rs b/crates/agent/src/tools/web_search_tool.rs index c697a5b78f1fe8c84d6ed58db13f651a493ae8c3..75d7689fd7c8e22a4daf45f96f5517f7888977a4 100644 --- a/crates/agent/src/tools/web_search_tool.rs +++ b/crates/agent/src/tools/web_search_tool.rs @@ -53,7 +53,7 @@ impl AgentTool for WebSearchTool { type Input = WebSearchToolInput; type Output = WebSearchToolOutput; - const NAME: &'static str = "web_search"; + const NAME: &'static str = "search_web"; fn kind() -> acp::ToolKind { acp::ToolKind::Fetch diff --git a/crates/migrator/src/migrations.rs b/crates/migrator/src/migrations.rs index 625bd27e91e117662f9a47edaaac2ddaa7d2ba1c..ed9c6ff51513b706a7eda93fafe59438feb90c59 100644 --- a/crates/migrator/src/migrations.rs +++ b/crates/migrator/src/migrations.rs @@ -328,3 +328,9 @@ pub(crate) mod m_2026_04_01 { pub(crate) use settings::restructure_profiles_with_settings_key; } + +pub(crate) mod m_2026_04_10 { + mod settings; + + pub(crate) use settings::rename_web_search_to_search_web; +} diff --git a/crates/migrator/src/migrations/m_2026_04_10/settings.rs b/crates/migrator/src/migrations/m_2026_04_10/settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..5430523149480772b8070f734f5c03daac8505d2 --- /dev/null +++ b/crates/migrator/src/migrations/m_2026_04_10/settings.rs @@ -0,0 +1,64 @@ +use anyhow::Result; +use serde_json::Value; + +use crate::migrations::migrate_settings; + +const AGENT_KEY: &str = "agent"; +const PROFILES_KEY: &str = "profiles"; +const SETTINGS_KEY: &str = "settings"; +const TOOL_PERMISSIONS_KEY: &str = "tool_permissions"; +const TOOLS_KEY: &str = "tools"; +const OLD_TOOL_NAME: &str = "web_search"; +const NEW_TOOL_NAME: &str = "search_web"; + +pub fn rename_web_search_to_search_web(value: &mut Value) -> Result<()> { + migrate_settings(value, &mut migrate_one) +} + +fn migrate_one(object: &mut serde_json::Map) -> Result<()> { + migrate_agent_value(object)?; + + // Root-level profiles have a `settings` wrapper after m_2026_04_01, + // but `migrate_settings` calls us with the profile map directly, + // so we need to look inside `settings` too. + if let Some(settings) = object.get_mut(SETTINGS_KEY).and_then(|v| v.as_object_mut()) { + migrate_agent_value(settings)?; + } + + Ok(()) +} + +fn migrate_agent_value(object: &mut serde_json::Map) -> Result<()> { + let Some(agent) = object.get_mut(AGENT_KEY).and_then(|v| v.as_object_mut()) else { + return Ok(()); + }; + + if let Some(tools) = agent + .get_mut(TOOL_PERMISSIONS_KEY) + .and_then(|v| v.as_object_mut()) + .and_then(|tp| tp.get_mut(TOOLS_KEY)) + .and_then(|v| v.as_object_mut()) + { + rename_key(tools); + } + + if let Some(profiles) = agent.get_mut(PROFILES_KEY).and_then(|v| v.as_object_mut()) { + for (_profile_name, profile) in profiles.iter_mut() { + if let Some(tools) = profile + .as_object_mut() + .and_then(|p| p.get_mut(TOOLS_KEY)) + .and_then(|v| v.as_object_mut()) + { + rename_key(tools); + } + } + } + + Ok(()) +} + +fn rename_key(tools: &mut serde_json::Map) { + if let Some(value) = tools.remove(OLD_TOOL_NAME) { + tools.insert(NEW_TOOL_NAME.to_string(), value); + } +} diff --git a/crates/migrator/src/migrator.rs b/crates/migrator/src/migrator.rs index f49d102213c446be17c7d240d272cf4b516d912c..4a9873f9b574d9194052156cbdb685f93bccfcb4 100644 --- a/crates/migrator/src/migrator.rs +++ b/crates/migrator/src/migrator.rs @@ -249,6 +249,7 @@ pub fn migrate_settings(text: &str) -> Result> { ), MigrationType::Json(migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum), MigrationType::Json(migrations::m_2026_04_01::restructure_profiles_with_settings_key), + MigrationType::Json(migrations::m_2026_04_10::rename_web_search_to_search_web), ]; run_migrations(text, migrations) } @@ -4682,4 +4683,301 @@ mod tests { None, ); } + + #[test] + fn test_rename_web_search_to_search_web_in_tool_permissions() { + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_04_10::rename_web_search_to_search_web, + )], + &r#" + { + "agent": { + "tool_permissions": { + "tools": { + "web_search": { + "allow": true + } + } + } + } + } + "# + .unindent(), + Some( + &r#" + { + "agent": { + "tool_permissions": { + "tools": { + "search_web": { + "allow": true + } + } + } + } + } + "# + .unindent(), + ), + ); + } + + #[test] + fn test_rename_web_search_to_search_web_in_profiles() { + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_04_10::rename_web_search_to_search_web, + )], + &r#" + { + "agent": { + "profiles": { + "write": { + "tools": { + "web_search": false + } + } + } + } + } + "# + .unindent(), + Some( + &r#" + { + "agent": { + "profiles": { + "write": { + "tools": { + "search_web": false + } + } + } + } + } + "# + .unindent(), + ), + ); + } + + #[test] + fn test_rename_web_search_to_search_web_no_change_when_already_migrated() { + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_04_10::rename_web_search_to_search_web, + )], + &r#" + { + "agent": { + "tool_permissions": { + "tools": { + "search_web": { + "allow": true + } + } + } + } + } + "# + .unindent(), + None, + ); + } + + #[test] + fn test_rename_web_search_to_search_web_no_clobber() { + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_04_10::rename_web_search_to_search_web, + )], + &r#" + { + "agent": { + "tool_permissions": { + "tools": { + "web_search": { + "allow": false + }, + "search_web": { + "allow": true + } + } + } + } + } + "# + .unindent(), + Some( + &r#" + { + "agent": { + "tool_permissions": { + "tools": { + "search_web": { + "allow": false + } + } + } + } + } + "# + .unindent(), + ), + ); + } + + #[test] + fn test_rename_web_search_to_search_web_platform_override() { + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_04_10::rename_web_search_to_search_web, + )], + &r#" + { + "linux": { + "agent": { + "tool_permissions": { + "tools": { + "web_search": { + "allow": true + } + } + } + } + } + } + "# + .unindent(), + Some( + &r#" + { + "linux": { + "agent": { + "tool_permissions": { + "tools": { + "search_web": { + "allow": true + } + } + } + } + } + } + "# + .unindent(), + ), + ); + } + + #[test] + fn test_rename_web_search_to_search_web_release_channel_override() { + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_04_10::rename_web_search_to_search_web, + )], + &r#" + { + "nightly": { + "agent": { + "tool_permissions": { + "tools": { + "web_search": { + "default": "allow" + } + } + } + } + } + } + "# + .unindent(), + Some( + &r#" + { + "nightly": { + "agent": { + "tool_permissions": { + "tools": { + "search_web": { + "default": "allow" + } + } + } + } + } + } + "# + .unindent(), + ), + ); + } + + #[test] + fn test_rename_web_search_to_search_web_no_agent() { + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_04_10::rename_web_search_to_search_web, + )], + &r#" + { + "buffer_font_size": 14 + } + "# + .unindent(), + None, + ); + } + + #[test] + fn test_rename_web_search_to_search_web_root_level_profile() { + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_04_10::rename_web_search_to_search_web, + )], + &r#" + { + "profiles": { + "Work": { + "settings": { + "agent": { + "tool_permissions": { + "tools": { + "web_search": { + "default": "allow" + } + } + } + } + } + } + } + } + "# + .unindent(), + Some( + &r#" + { + "profiles": { + "Work": { + "settings": { + "agent": { + "tool_permissions": { + "tools": { + "search_web": { + "default": "allow" + } + } + } + } + } + } + } + } + "# + .unindent(), + ), + ); + } } diff --git a/crates/settings_ui/src/pages/tool_permissions_setup.rs b/crates/settings_ui/src/pages/tool_permissions_setup.rs index 61d6c8c6f4cb09246bc3b6ee11e87e065ed52b3a..bbfcd1849dd561764a031a95cdc28fadfdeab87e 100644 --- a/crates/settings_ui/src/pages/tool_permissions_setup.rs +++ b/crates/settings_ui/src/pages/tool_permissions_setup.rs @@ -69,7 +69,7 @@ const TOOLS: &[ToolInfo] = &[ regex_explanation: "Patterns are matched against the URL being fetched.", }, ToolInfo { - id: "web_search", + id: "search_web", name: "Web Search", description: "Web search queries", regex_explanation: "Patterns are matched against the search query.", @@ -309,7 +309,7 @@ fn get_tool_render_fn( "create_directory" => render_create_directory_tool_config, "save_file" => render_save_file_tool_config, "fetch" => render_fetch_tool_config, - "web_search" => render_web_search_tool_config, + "search_web" => render_web_search_tool_config, "restore_file_from_disk" => render_restore_file_from_disk_tool_config, _ => render_terminal_tool_config, // fallback } @@ -1389,7 +1389,7 @@ tool_config_page_fn!(render_move_path_tool_config, "move_path"); tool_config_page_fn!(render_create_directory_tool_config, "create_directory"); tool_config_page_fn!(render_save_file_tool_config, "save_file"); tool_config_page_fn!(render_fetch_tool_config, "fetch"); -tool_config_page_fn!(render_web_search_tool_config, "web_search"); +tool_config_page_fn!(render_web_search_tool_config, "search_web"); tool_config_page_fn!( render_restore_file_from_disk_tool_config, "restore_file_from_disk" diff --git a/docs/.doc-examples/reference.md b/docs/.doc-examples/reference.md index ce774e02c8e3fac0914388c891898841209c13e0..5b250fc0047ba38d373bd39baf945f02addb18f5 100644 --- a/docs/.doc-examples/reference.md +++ b/docs/.doc-examples/reference.md @@ -68,7 +68,7 @@ Reads the content of a specified file in the project, allowing access to file co Allows the Agent to work through problems, brainstorm ideas, or plan without executing actions, useful for complex problem-solving. -### `web_search` {#web-search} +### `search_web` {#search-web} Searches the web for information, providing results with snippets and links from relevant web pages, useful for accessing real-time information. diff --git a/docs/src/ai/tool-permissions.md b/docs/src/ai/tool-permissions.md index 27ee114e343366ab2700580fa92eea010f40966b..61d599a9d1e04074420e7587428f4b6c8de20e0b 100644 --- a/docs/src/ai/tool-permissions.md +++ b/docs/src/ai/tool-permissions.md @@ -54,7 +54,7 @@ The `tool_permissions` setting lets you customize tool permissions by specifying | `restore_file_from_disk` | The file paths | | `save_file` | The file paths | | `fetch` | The URL | -| `web_search` | The search query | +| `search_web` | The search query | For MCP tools, use the format `mcp::`. For example, a tool called `create_issue` on a server called `github` would be `mcp:github:create_issue`. diff --git a/docs/src/ai/tools.md b/docs/src/ai/tools.md index bc57f3c378fbc03429fe84993c349b0a5b3ce0d0..3ada0ce025976a35250c6960745ef69ac0d2c5db 100644 --- a/docs/src/ai/tools.md +++ b/docs/src/ai/tools.md @@ -57,7 +57,7 @@ Reads the content of a specified file in the project, allowing access to file co Allows the Agent to work through problems, brainstorm ideas, or plan without executing actions, useful for complex problem-solving. -### `web_search` +### `search_web` Searches the web for information, providing results with snippets and links from relevant web pages, useful for accessing real-time information. diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs index 4d936b89ff7e39c562b5f7e5c89842e55cadf929..b8d6e0636078289b80184edfea29a516774c1fd7 100644 --- a/tooling/xtask/src/tasks/workflows/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -203,7 +203,7 @@ fn orchestrate_impl(rules: &[&PathCondition], target: OrchestrateTarget) -> Name # If assets/ changed, add crates that depend on those assets if echo "$CHANGED_FILES" | grep -qP '^assets/'; then - FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "storybook" "assets" | sort -u) + FILE_CHANGED_PKGS=$(printf '%s\n%s\n%s' "$FILE_CHANGED_PKGS" "settings" "assets" | sort -u) fi # Combine all changed packages From 4a1fb25f67b42cd930de8aa494dfeb96dd2d7f0a Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 9 Apr 2026 18:21:19 -0700 Subject: [PATCH 24/67] Handle changing root paths without splitting in the sidebar (#53566) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A --------- Co-authored-by: Eric Holk --- crates/agent_ui/src/thread_metadata_store.rs | 30 + crates/sidebar/src/sidebar.rs | 179 ++- crates/sidebar/src/sidebar_tests.rs | 1348 ++++++++++++++--- crates/workspace/src/multi_workspace.rs | 209 ++- crates/workspace/src/multi_workspace_tests.rs | 154 -- crates/workspace/src/workspace.rs | 2 +- 6 files changed, 1476 insertions(+), 446 deletions(-) diff --git a/crates/agent_ui/src/thread_metadata_store.rs b/crates/agent_ui/src/thread_metadata_store.rs index 127f746a9edd35bc3b62b489277980868faba1c8..69c5377465a420b2e9f64e16139736fe04b65e5a 100644 --- a/crates/agent_ui/src/thread_metadata_store.rs +++ b/crates/agent_ui/src/thread_metadata_store.rs @@ -477,6 +477,36 @@ impl ThreadMetadataStore { } } + pub fn update_main_worktree_paths( + &mut self, + old_paths: &PathList, + new_paths: PathList, + cx: &mut Context, + ) { + let session_ids = match self.threads_by_main_paths.remove(old_paths) { + Some(ids) if !ids.is_empty() => ids, + _ => return, + }; + + let new_index = self + .threads_by_main_paths + .entry(new_paths.clone()) + .or_default(); + + for session_id in &session_ids { + new_index.insert(session_id.clone()); + + if let Some(thread) = self.threads.get_mut(session_id) { + thread.main_worktree_paths = new_paths.clone(); + self.pending_thread_ops_tx + .try_send(DbOperation::Upsert(thread.clone())) + .log_err(); + } + } + + cx.notify(); + } + pub fn create_archived_worktree( &self, worktree_path: String, diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 488127eb0bd04b064c2c6e3b1d8dc297ada9c477..547d3bd83cf97986dbe0006a2454223fba255886 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -283,10 +283,8 @@ impl ListEntry { } } ListEntry::ProjectHeader { key, .. } => multi_workspace - .workspaces() - .find(|ws| PathList::new(&ws.read(cx).root_paths(cx)) == *key.path_list()) + .workspaces_for_project_group(key, cx) .cloned() - .into_iter() .collect(), ListEntry::ViewMore { .. } => Vec::new(), } @@ -365,35 +363,81 @@ fn workspace_path_list(workspace: &Entity, cx: &App) -> PathList { /// /// For each path in the thread's `folder_paths`, produces a /// [`WorktreeInfo`] with a short display name, full path, and whether -/// the worktree is the main checkout or a linked git worktree. +/// the worktree is the main checkout or a linked git worktree. When +/// multiple main paths exist and a linked worktree's short name alone +/// wouldn't identify which main project it belongs to, the main project +/// name is prefixed for disambiguation (e.g. `project:feature`). +/// +/// `linked_to_main` maps linked worktree abs paths to their main repo +/// abs path, used to pick the correct prefix. Falls back to a heuristic +/// when no mapping is available. fn worktree_info_from_thread_paths( folder_paths: &PathList, - group_key: &project::ProjectGroupKey, -) -> impl Iterator { - let main_paths = group_key.path_list().paths(); - folder_paths.paths().iter().filter_map(|path| { + main_worktree_paths: &PathList, + linked_to_main: &HashMap, +) -> Vec { + let main_paths = main_worktree_paths.paths(); + + let mut infos: Vec = Vec::new(); + let mut linked_short_names: Vec<(SharedString, SharedString)> = Vec::new(); + + for path in folder_paths.paths().iter() { let is_main = main_paths.iter().any(|mp| mp.as_path() == path.as_path()); if is_main { - let name = path.file_name()?.to_string_lossy().to_string(); - Some(WorktreeInfo { - name: SharedString::from(name), + let Some(name) = path.file_name() else { + continue; + }; + infos.push(WorktreeInfo { + name: SharedString::from(name.to_string_lossy().to_string()), full_path: SharedString::from(path.display().to_string()), highlight_positions: Vec::new(), kind: ui::WorktreeKind::Main, - }) + }); } else { - let main_path = main_paths - .iter() - .find(|mp| mp.file_name() == path.file_name()) - .or(main_paths.first())?; - Some(WorktreeInfo { - name: linked_worktree_short_name(main_path, path).unwrap_or_default(), + let Some(main_path) = linked_to_main + .get(&**path) + .and_then(|main| main_paths.iter().find(|mp| mp.as_path() == main.as_path())) + .or_else(|| { + main_paths + .iter() + .find(|mp| mp.file_name() == path.file_name()) + .or(main_paths.first()) + }) + else { + continue; + }; + let short_name = linked_worktree_short_name(main_path, path).unwrap_or_default(); + let project_name = main_path + .file_name() + .map(|n| SharedString::from(n.to_string_lossy().to_string())) + .unwrap_or_default(); + linked_short_names.push((short_name.clone(), project_name)); + infos.push(WorktreeInfo { + name: short_name, full_path: SharedString::from(path.display().to_string()), highlight_positions: Vec::new(), kind: ui::WorktreeKind::Linked, - }) + }); } - }) + } + + // When the group has multiple main worktree paths and the thread's + // folder paths don't all share the same short name, prefix each + // linked worktree chip with its main project name so the user knows + // which project it belongs to. + let all_same_name = infos.len() > 1 && infos.iter().all(|i| i.name == infos[0].name); + + if main_paths.len() > 1 && !all_same_name { + for (info, (_short_name, project_name)) in infos + .iter_mut() + .filter(|i| i.kind == ui::WorktreeKind::Linked) + .zip(linked_short_names.iter()) + { + info.name = SharedString::from(format!("{}:{}", project_name, info.name)); + } + } + + infos } /// Shows a [`RemoteConnectionModal`] on the given workspace and establishes @@ -480,6 +524,16 @@ impl Sidebar { MultiWorkspaceEvent::WorkspaceRemoved(_) => { this.update_entries(cx); } + MultiWorkspaceEvent::ProjectGroupKeyChanged { old_key, new_key } => { + ThreadMetadataStore::global(cx).update(cx, |store, cx| { + store.update_main_worktree_paths( + old_key.path_list(), + new_key.path_list().clone(), + cx, + ); + }); + this.update_entries(cx); + } }, ) .detach(); @@ -912,6 +966,21 @@ impl Sidebar { .as_ref() .is_some_and(|active| group_workspaces.contains(active)); + // Build a mapping from linked worktree paths to their main + // repo path, used to correctly attribute chips. + let linked_to_main: HashMap = group_workspaces + .iter() + .flat_map(|ws| root_repository_snapshots(ws, cx)) + .flat_map(|snapshot| { + let main_path = snapshot.original_repo_abs_path.to_path_buf(); + snapshot + .linked_worktrees() + .iter() + .map(move |wt| (wt.path.clone(), main_path.clone())) + .collect::>() + }) + .collect(); + // Collect live thread infos from all workspaces in this group. let live_infos: Vec<_> = group_workspaces .iter() @@ -949,26 +1018,28 @@ impl Sidebar { }; // Build a ThreadEntry from a metadata row. - let make_thread_entry = |row: ThreadMetadata, - workspace: ThreadEntryWorkspace| - -> ThreadEntry { - let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id); - let worktrees: Vec = - worktree_info_from_thread_paths(&row.folder_paths, &group_key).collect(); - ThreadEntry { - metadata: row, - icon, - icon_from_external_svg, - status: AgentThreadStatus::default(), - workspace, - is_live: false, - is_background: false, - is_title_generating: false, - highlight_positions: Vec::new(), - worktrees, - diff_stats: DiffStats::default(), - } - }; + let make_thread_entry = + |row: ThreadMetadata, workspace: ThreadEntryWorkspace| -> ThreadEntry { + let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id); + let worktrees = worktree_info_from_thread_paths( + &row.folder_paths, + &row.main_worktree_paths, + &linked_to_main, + ); + ThreadEntry { + metadata: row, + icon, + icon_from_external_svg, + status: AgentThreadStatus::default(), + workspace, + is_live: false, + is_background: false, + is_title_generating: false, + highlight_positions: Vec::new(), + worktrees, + diff_stats: DiffStats::default(), + } + }; // Main code path: one query per group via main_worktree_paths. // The main_worktree_paths column is set on all new threads and @@ -1184,11 +1255,17 @@ impl Sidebar { if is_draft_for_group { if let Some(ActiveEntry::Draft(draft_ws)) = &self.active_entry { let ws_path_list = workspace_path_list(draft_ws, cx); - let worktrees = worktree_info_from_thread_paths(&ws_path_list, &group_key); + let main_worktree_paths = + draft_ws.read(cx).project_group_key(cx).path_list().clone(); + let worktrees = worktree_info_from_thread_paths( + &ws_path_list, + &main_worktree_paths, + &linked_to_main, + ); entries.push(ListEntry::DraftThread { key: group_key.clone(), workspace: None, - worktrees: worktrees.collect(), + worktrees, }); } } @@ -1212,9 +1289,14 @@ impl Sidebar { continue; } let ws_path_list = workspace_path_list(ws, cx); - let has_linked_worktrees = - worktree_info_from_thread_paths(&ws_path_list, &group_key) - .any(|wt| wt.kind == ui::WorktreeKind::Linked); + let ws_main_paths = ws.read(cx).project_group_key(cx).path_list().clone(); + let has_linked_worktrees = worktree_info_from_thread_paths( + &ws_path_list, + &ws_main_paths, + &linked_to_main, + ) + .iter() + .any(|wt| wt.kind == ui::WorktreeKind::Linked); if !has_linked_worktrees { continue; } @@ -1227,8 +1309,11 @@ impl Sidebar { if has_threads { continue; } - let worktrees: Vec = - worktree_info_from_thread_paths(&ws_path_list, &group_key).collect(); + let worktrees = worktree_info_from_thread_paths( + &ws_path_list, + &ws_main_paths, + &linked_to_main, + ); entries.push(ListEntry::DraftThread { key: group_key.clone(), diff --git a/crates/sidebar/src/sidebar_tests.rs b/crates/sidebar/src/sidebar_tests.rs index 8ced8d6f71f6d88ff24a522404417ef7db3a6a7c..420eae134e11309e4d1cc90c45335f7fce76bb22 100644 --- a/crates/sidebar/src/sidebar_tests.rs +++ b/crates/sidebar/src/sidebar_tests.rs @@ -251,6 +251,32 @@ fn save_thread_metadata( cx.run_until_parked(); } +fn save_thread_metadata_with_main_paths( + session_id: &str, + title: &str, + folder_paths: PathList, + main_worktree_paths: PathList, + cx: &mut TestAppContext, +) { + let session_id = acp::SessionId::new(Arc::from(session_id)); + let title = SharedString::from(title.to_string()); + let updated_at = chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(); + let metadata = ThreadMetadata { + session_id, + agent_id: agent::ZED_AGENT_ID.clone(), + title, + updated_at, + created_at: None, + folder_paths, + main_worktree_paths, + archived: false, + }; + cx.update(|cx| { + ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx)); + }); + cx.run_until_parked(); +} + fn focus_sidebar(sidebar: &Entity, cx: &mut gpui::VisualTestContext) { sidebar.update_in(cx, |_, window, cx| { cx.focus_self(window); @@ -322,6 +348,11 @@ fn visible_entries_as_strings( } else { "" }; + let is_active = sidebar + .active_entry + .as_ref() + .is_some_and(|active| active.matches_entry(entry)); + let active_indicator = if is_active { " (active)" } else { "" }; match entry { ListEntry::ProjectHeader { label, @@ -338,7 +369,7 @@ fn visible_entries_as_strings( } ListEntry::Thread(thread) => { let title = thread.metadata.title.as_ref(); - let active = if thread.is_live { " *" } else { "" }; + let live = if thread.is_live { " *" } else { "" }; let status_str = match thread.status { AgentThreadStatus::Running => " (running)", AgentThreadStatus::Error => " (error)", @@ -354,7 +385,7 @@ fn visible_entries_as_strings( "" }; let worktree = format_linked_worktree_chips(&thread.worktrees); - format!(" {title}{worktree}{active}{status_str}{notified}{selected}") + format!(" {title}{worktree}{live}{status_str}{notified}{active_indicator}{selected}") } ListEntry::ViewMore { is_fully_expanded, .. @@ -374,7 +405,7 @@ fn visible_entries_as_strings( if workspace.is_some() { format!(" [+ New Thread{}]{}", worktree, selected) } else { - format!(" [~ Draft{}]{}", worktree, selected) + format!(" [~ Draft{}]{}{}", worktree, active_indicator, selected) } } } @@ -543,7 +574,10 @@ async fn test_single_workspace_no_threads(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]"] + vec![ + // + "v [my-project]", + ] ); } @@ -579,6 +613,7 @@ async fn test_single_workspace_with_saved_threads(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [my-project]", " Fix crash in project panel", " Add inline diff view", @@ -609,7 +644,11 @@ async fn test_workspace_lifecycle(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project-a]", " Thread A1"] + vec![ + // + "v [project-a]", + " Thread A1", + ] ); // Add a second workspace @@ -620,7 +659,11 @@ async fn test_workspace_lifecycle(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project-a]", " Thread A1",] + vec![ + // + "v [project-a]", + " Thread A1", + ] ); } @@ -639,6 +682,7 @@ async fn test_view_more_pagination(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [my-project]", " Thread 12", " Thread 11", @@ -749,7 +793,11 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Thread 1"] + vec![ + // + "v [my-project]", + " Thread 1", + ] ); // Collapse @@ -760,7 +808,10 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["> [my-project]"] + vec![ + // + "> [my-project]", + ] ); // Expand @@ -771,7 +822,11 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Thread 1"] + vec![ + // + "v [my-project]", + " Thread 1", + ] ); } @@ -941,6 +996,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [expanded-project]", " Completed thread", " Running thread * (running) <== selected", @@ -1104,10 +1160,14 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Thread 1"] + vec![ + // + "v [my-project]", + " Thread 1", + ] ); - // Focus the sidebar and select the header (index 0) + // Focus the sidebar and select the header focus_sidebar(&sidebar, cx); sidebar.update_in(cx, |sidebar, _window, _cx| { sidebar.selection = Some(0); @@ -1119,7 +1179,10 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["> [my-project] <== selected"] + vec![ + // + "> [my-project] <== selected", + ] ); // Confirm again expands the group @@ -1128,7 +1191,11 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project] <== selected", " Thread 1",] + vec![ + // + "v [my-project] <== selected", + " Thread 1", + ] ); } @@ -1179,7 +1246,11 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Thread 1"] + vec![ + // + "v [my-project]", + " Thread 1", + ] ); // Focus sidebar and manually select the header (index 0). Press left to collapse. @@ -1193,7 +1264,10 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["> [my-project] <== selected"] + vec![ + // + "> [my-project] <== selected", + ] ); // Press right to expand @@ -1202,7 +1276,11 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project] <== selected", " Thread 1",] + vec![ + // + "v [my-project] <== selected", + " Thread 1", + ] ); // Press right again on already-expanded header moves selection down @@ -1229,7 +1307,11 @@ async fn test_keyboard_collapse_from_child_selects_parent(cx: &mut TestAppContex assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Thread 1 <== selected",] + vec![ + // + "v [my-project]", + " Thread 1 <== selected", + ] ); // Pressing left on a child collapses the parent group and selects it @@ -1239,7 +1321,10 @@ async fn test_keyboard_collapse_from_child_selects_parent(cx: &mut TestAppContex assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["> [my-project] <== selected"] + vec![ + // + "> [my-project] <== selected", + ] ); } @@ -1253,7 +1338,10 @@ async fn test_keyboard_navigation_on_empty_list(cx: &mut TestAppContext) { // An empty project has only the header. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [empty-project]"] + vec![ + // + "v [empty-project]", + ] ); // Focus sidebar — focus_in does not set a selection @@ -1385,7 +1473,12 @@ async fn test_parallel_threads_shown_with_live_status(cx: &mut TestAppContext) { entries[1..].sort(); assert_eq!( entries, - vec!["v [my-project]", " Hello *", " Hello * (running)",] + vec![ + // + "v [my-project]", + " Hello * (active)", + " Hello * (running)", + ] ); } @@ -1478,7 +1571,11 @@ async fn test_background_thread_completion_triggers_notification(cx: &mut TestAp // Thread A is still running; no notification yet. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project-a]", " Hello * (running)",] + vec![ + // + "v [project-a]", + " Hello * (running) (active)", + ] ); // Complete thread A's turn (transition Running → Completed). @@ -1488,7 +1585,11 @@ async fn test_background_thread_completion_triggers_notification(cx: &mut TestAp // The completed background thread shows a notification indicator. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project-a]", " Hello * (!)",] + vec![ + // + "v [project-a]", + " Hello * (!) (active)", + ] ); } @@ -1528,6 +1629,7 @@ async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [my-project]", " Fix crash in project panel", " Add inline diff view", @@ -1540,7 +1642,11 @@ async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext) type_in_search(&sidebar, "diff", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Add inline diff view <== selected",] + vec![ + // + "v [my-project]", + " Add inline diff view <== selected", + ] ); // User changes query to something with no matches — list is empty. @@ -1575,6 +1681,7 @@ async fn test_search_matches_regardless_of_case(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [my-project]", " Fix Crash In Project Panel <== selected", ] @@ -1585,6 +1692,7 @@ async fn test_search_matches_regardless_of_case(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [my-project]", " Fix Crash In Project Panel <== selected", ] @@ -1615,7 +1723,12 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex // Confirm the full list is showing. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Alpha thread", " Beta thread",] + vec![ + // + "v [my-project]", + " Alpha thread", + " Beta thread", + ] ); // User types a search query to filter down. @@ -1623,7 +1736,11 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex type_in_search(&sidebar, "alpha", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Alpha thread <== selected",] + vec![ + // + "v [my-project]", + " Alpha thread <== selected", + ] ); // User presses Escape — filter clears, full list is restored. @@ -1633,6 +1750,7 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [my-project]", " Alpha thread <== selected", " Beta thread", @@ -1689,6 +1807,7 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [project-a]", " Fix bug in sidebar", " Add tests for editor", @@ -1699,7 +1818,11 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC type_in_search(&sidebar, "sidebar", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project-a]", " Fix bug in sidebar <== selected",] + vec![ + // + "v [project-a]", + " Fix bug in sidebar <== selected", + ] ); // "typo" only matches in the second workspace — the first header disappears. @@ -1715,6 +1838,7 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [project-a]", " Fix bug in sidebar <== selected", " Add tests for editor", @@ -1774,6 +1898,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [alpha-project]", " Fix bug in sidebar <== selected", " Add tests for editor", @@ -1785,7 +1910,11 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { type_in_search(&sidebar, "sidebar", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [alpha-project]", " Fix bug in sidebar <== selected",] + vec![ + // + "v [alpha-project]", + " Fix bug in sidebar <== selected", + ] ); // "alpha sidebar" matches the workspace name "alpha-project" (fuzzy: a-l-p-h-a-s-i-d-e-b-a-r @@ -1795,7 +1924,11 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { type_in_search(&sidebar, "fix", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [alpha-project]", " Fix bug in sidebar <== selected",] + vec![ + // + "v [alpha-project]", + " Fix bug in sidebar <== selected", + ] ); // A query that matches a workspace name AND a thread in that same workspace. @@ -1804,6 +1937,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [alpha-project]", " Fix bug in sidebar <== selected", " Add tests for editor", @@ -1817,6 +1951,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [alpha-project]", " Fix bug in sidebar <== selected", " Add tests for editor", @@ -1866,7 +2001,11 @@ async fn test_search_finds_threads_hidden_behind_view_more(cx: &mut TestAppConte let filtered = visible_entries_as_strings(&sidebar, cx); assert_eq!( filtered, - vec!["v [my-project]", " Hidden gem thread <== selected",] + vec![ + // + "v [my-project]", + " Hidden gem thread <== selected", + ] ); assert!( !filtered.iter().any(|e| e.contains("View More")), @@ -1902,14 +2041,21 @@ async fn test_search_finds_threads_inside_collapsed_groups(cx: &mut TestAppConte assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["> [my-project] <== selected"] + vec![ + // + "> [my-project] <== selected", + ] ); // User types a search — the thread appears even though its group is collapsed. type_in_search(&sidebar, "important", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["> [my-project]", " Important thread <== selected",] + vec![ + // + "> [my-project]", + " Important thread <== selected", + ] ); } @@ -1943,6 +2089,7 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [my-project]", " Fix crash in panel <== selected", " Fix lint warnings", @@ -1955,6 +2102,7 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [my-project]", " Fix crash in panel", " Fix lint warnings <== selected", @@ -1966,6 +2114,7 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [my-project]", " Fix crash in panel <== selected", " Fix lint warnings", @@ -2006,7 +2155,11 @@ async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppC assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Historical Thread",] + vec![ + // + "v [my-project]", + " Historical Thread", + ] ); // Switch to workspace 1 so we can verify the confirm switches back. @@ -2067,7 +2220,12 @@ async fn test_click_clears_selection_and_focus_in_restores_it(cx: &mut TestAppCo assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Thread A", " Thread B",] + vec![ + // + "v [my-project]", + " Thread A", + " Thread B", + ] ); // Keyboard confirm preserves selection. @@ -2119,7 +2277,11 @@ async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Hello *"] + vec![ + // + "v [my-project]", + " Hello * (active)", + ] ); // Simulate the agent generating a title. The notification chain is: @@ -2141,7 +2303,11 @@ async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Friendly Greeting with AI *"] + vec![ + // + "v [my-project]", + " Friendly Greeting with AI * (active)", + ] ); } @@ -2292,177 +2458,816 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { ); }); - let connection_b2 = StubAgentConnection::new(); - connection_b2.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( - acp::ContentChunk::new(DEFAULT_THREAD_TITLE.into()), - )]); - open_thread_with_connection(&panel_b, connection_b2, cx); - send_message(&panel_b, cx); - let session_id_b2 = active_session_id(&panel_b, cx); - save_test_thread_metadata(&session_id_b2, &project_b, cx).await; - cx.run_until_parked(); + let connection_b2 = StubAgentConnection::new(); + connection_b2.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new(DEFAULT_THREAD_TITLE.into()), + )]); + open_thread_with_connection(&panel_b, connection_b2, cx); + send_message(&panel_b, cx); + let session_id_b2 = active_session_id(&panel_b, cx); + save_test_thread_metadata(&session_id_b2, &project_b, cx).await; + cx.run_until_parked(); + + // Panel B is not the active workspace's panel (workspace A is + // active), so opening a thread there should not change focused_thread. + // This prevents running threads in background workspaces from causing + // the selection highlight to jump around. + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_a, + "Opening a thread in a non-active panel should not change focused_thread", + ); + }); + + workspace_b.update_in(cx, |workspace, window, cx| { + workspace.focus_handle(cx).focus(window, cx); + }); + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_a, + "Defocusing the sidebar should not change focused_thread", + ); + }); + + // Switching workspaces via the multi_workspace (simulates clicking + // a workspace header) should clear focused_thread. + multi_workspace.update_in(cx, |mw, window, cx| { + let workspace = mw.workspaces().find(|w| *w == &workspace_b).cloned(); + if let Some(workspace) = workspace { + mw.activate(workspace, window, cx); + } + }); + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_b2, + "Switching workspace should seed focused_thread from the new active panel", + ); + assert!( + has_thread_entry(sidebar, &session_id_b2), + "The seeded thread should be present in the entries" + ); + }); + + // ── 8. Focusing the agent panel thread keeps focused_thread ──── + // Workspace B still has session_id_b2 loaded in the agent panel. + // Clicking into the thread (simulated by focusing its view) should + // keep focused_thread since it was already seeded on workspace switch. + panel_b.update_in(cx, |panel, window, cx| { + if let Some(thread_view) = panel.active_conversation_view() { + thread_view.read(cx).focus_handle(cx).focus(window, cx); + } + }); + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_b2, + "Focusing the agent panel thread should set focused_thread", + ); + assert!( + has_thread_entry(sidebar, &session_id_b2), + "The focused thread should be present in the entries" + ); + }); +} + +#[gpui::test] +async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContext) { + let project = init_test_project_with_agent_panel("/project-a", cx).await; + let fs = cx.update(|cx| ::global(cx)); + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); + + // Start a thread and send a message so it has history. + let connection = StubAgentConnection::new(); + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done".into()), + )]); + open_thread_with_connection(&panel, connection, cx); + send_message(&panel, cx); + let session_id = active_session_id(&panel, cx); + save_test_thread_metadata(&session_id, &project, cx).await; + cx.run_until_parked(); + + // Verify the thread appears in the sidebar. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [project-a]", + " Hello * (active)", + ] + ); + + // The "New Thread" button should NOT be in "active/draft" state + // because the panel has a thread with messages. + sidebar.read_with(cx, |sidebar, _cx| { + assert!( + matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { .. })), + "Panel has a thread with messages, so active_entry should be Thread, got {:?}", + sidebar.active_entry, + ); + }); + + // Now add a second folder to the workspace, changing the path_list. + fs.as_fake() + .insert_tree("/project-b", serde_json::json!({ "src": {} })) + .await; + project + .update(cx, |project, cx| { + project.find_or_create_worktree("/project-b", true, cx) + }) + .await + .expect("should add worktree"); + cx.run_until_parked(); + + // The workspace path_list is now [project-a, project-b]. The active + // thread's metadata was re-saved with the new paths by the agent panel's + // project subscription. The old [project-a] key is replaced by the new + // key since no other workspace claims it. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [project-a, project-b]", + " Hello * (active)", + ] + ); + + // The "New Thread" button must still be clickable (not stuck in + // "active/draft" state). Verify that `active_thread_is_draft` is + // false — the panel still has the old thread with messages. + sidebar.read_with(cx, |sidebar, _cx| { + assert!( + matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { .. })), + "After adding a folder the panel still has a thread with messages, \ + so active_entry should be Thread, got {:?}", + sidebar.active_entry, + ); + }); + + // Actually click "New Thread" by calling create_new_thread and + // verify a new draft is created. + let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone()); + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.create_new_thread(&workspace, window, cx); + }); + cx.run_until_parked(); + + // After creating a new thread, the panel should now be in draft + // state (no messages on the new thread). + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_draft( + sidebar, + &workspace, + "After creating a new thread active_entry should be Draft", + ); + }); +} + +#[gpui::test] +async fn test_worktree_add_and_remove_migrates_threads(cx: &mut TestAppContext) { + // When a worktree is added to a project, the project group key changes + // and all historical threads should be migrated to the new key. Removing + // the worktree should migrate them back. + let (_fs, project) = init_multi_project_test(&["/project-a", "/project-b"], cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Save two threads against the initial project group [/project-a]. + save_n_test_threads(2, &project, cx).await; + sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [project-a]", + " Thread 2", + " Thread 1", + ] + ); + + // Verify the metadata store has threads under the old key. + let old_key_paths = PathList::new(&[PathBuf::from("/project-a")]); + cx.update(|_window, cx| { + let store = ThreadMetadataStore::global(cx).read(cx); + assert_eq!( + store.entries_for_main_worktree_path(&old_key_paths).count(), + 2, + "should have 2 threads under old key before add" + ); + }); + + // Add a second worktree to the same project. + project + .update(cx, |project, cx| { + project.find_or_create_worktree("/project-b", true, cx) + }) + .await + .expect("should add worktree"); + cx.run_until_parked(); + + // The project group key should now be [/project-a, /project-b]. + let new_key_paths = PathList::new(&[PathBuf::from("/project-a"), PathBuf::from("/project-b")]); + + // Verify multi-workspace state: exactly one project group key, the new one. + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<_> = mw.project_group_keys().cloned().collect(); + assert_eq!( + keys.len(), + 1, + "should have exactly 1 project group key after add" + ); + assert_eq!( + keys[0].path_list(), + &new_key_paths, + "the key should be the new combined path list" + ); + }); + + // Verify threads were migrated to the new key. + cx.update(|_window, cx| { + let store = ThreadMetadataStore::global(cx).read(cx); + assert_eq!( + store.entries_for_main_worktree_path(&old_key_paths).count(), + 0, + "should have 0 threads under old key after migration" + ); + assert_eq!( + store.entries_for_main_worktree_path(&new_key_paths).count(), + 2, + "should have 2 threads under new key after migration" + ); + }); + + // Sidebar should show threads under the new header. + sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [project-a, project-b]", + " Thread 2", + " Thread 1", + ] + ); + + // Now remove the second worktree. + let worktree_id = project.read_with(cx, |project, cx| { + project + .visible_worktrees(cx) + .find(|wt| wt.read(cx).abs_path().as_ref() == Path::new("/project-b")) + .map(|wt| wt.read(cx).id()) + .expect("should find project-b worktree") + }); + project.update(cx, |project, cx| { + project.remove_worktree(worktree_id, cx); + }); + cx.run_until_parked(); + + // The key should revert to [/project-a]. + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<_> = mw.project_group_keys().cloned().collect(); + assert_eq!( + keys.len(), + 1, + "should have exactly 1 project group key after remove" + ); + assert_eq!( + keys[0].path_list(), + &old_key_paths, + "the key should revert to the original path list" + ); + }); + + // Threads should be migrated back to the old key. + cx.update(|_window, cx| { + let store = ThreadMetadataStore::global(cx).read(cx); + assert_eq!( + store.entries_for_main_worktree_path(&new_key_paths).count(), + 0, + "should have 0 threads under new key after revert" + ); + assert_eq!( + store.entries_for_main_worktree_path(&old_key_paths).count(), + 2, + "should have 2 threads under old key after revert" + ); + }); + + sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [project-a]", + " Thread 2", + " Thread 1", + ] + ); +} + +#[gpui::test] +async fn test_worktree_add_key_collision_removes_duplicate_workspace(cx: &mut TestAppContext) { + // When a worktree is added to workspace A and the resulting key matches + // an existing workspace B's key (and B has the same root paths), B + // should be removed as a true duplicate. + let (fs, project_a) = init_multi_project_test(&["/project-a", "/project-b"], cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Save a thread against workspace A [/project-a]. + save_named_thread_metadata("thread-a", "Thread A", &project_a, cx).await; + + // Create workspace B with both worktrees [/project-a, /project-b]. + let project_b = project::Project::test( + fs.clone() as Arc, + ["/project-a".as_ref(), "/project-b".as_ref()], + cx, + ) + .await; + let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b.clone(), window, cx) + }); + cx.run_until_parked(); + + // Switch back to workspace A so it's the active workspace when the collision happens. + let workspace_a = + multi_workspace.read_with(cx, |mw, _| mw.workspaces().next().unwrap().clone()); + multi_workspace.update_in(cx, |mw, window, cx| { + mw.activate(workspace_a, window, cx); + }); + cx.run_until_parked(); + + // Save a thread against workspace B [/project-a, /project-b]. + save_named_thread_metadata("thread-b", "Thread B", &project_b, cx).await; + + sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); + cx.run_until_parked(); + + // Both project groups should be visible. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [project-a, project-b]", + " Thread B", + "v [project-a]", + " Thread A", + ] + ); + + let workspace_b_id = workspace_b.entity_id(); + + // Now add /project-b to workspace A's project, causing a key collision. + project_a + .update(cx, |project, cx| { + project.find_or_create_worktree("/project-b", true, cx) + }) + .await + .expect("should add worktree"); + cx.run_until_parked(); + + // Workspace B should have been removed (true duplicate — same root paths). + multi_workspace.read_with(cx, |mw, _cx| { + let workspace_ids: Vec<_> = mw.workspaces().map(|ws| ws.entity_id()).collect(); + assert!( + !workspace_ids.contains(&workspace_b_id), + "workspace B should have been removed after key collision" + ); + }); + + // There should be exactly one project group key now. + let combined_paths = PathList::new(&[PathBuf::from("/project-a"), PathBuf::from("/project-b")]); + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<_> = mw.project_group_keys().cloned().collect(); + assert_eq!( + keys.len(), + 1, + "should have exactly 1 project group key after collision" + ); + assert_eq!( + keys[0].path_list(), + &combined_paths, + "the remaining key should be the combined paths" + ); + }); + + // Both threads should be visible under the merged group. + sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [project-a, project-b]", + " Thread A", + " Thread B", + ] + ); +} + +#[gpui::test] +async fn test_worktree_collision_keeps_active_workspace(cx: &mut TestAppContext) { + // When workspace A adds a folder that makes it collide with workspace B, + // and B is the *active* workspace, A (the incoming one) should be + // dropped so the user stays on B. A linked worktree sibling of A + // should migrate into B's group. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + // Set up /project-a with a linked worktree. + fs.insert_tree( + "/project-a", + serde_json::json!({ + ".git": { + "worktrees": { + "feature": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature", + }, + }, + }, + "src": {}, + }), + ) + .await; + fs.insert_tree( + "/wt-feature", + serde_json::json!({ + ".git": "gitdir: /project-a/.git/worktrees/feature", + "src": {}, + }), + ) + .await; + fs.add_linked_worktree_for_repo( + Path::new("/project-a/.git"), + false, + git::repository::Worktree { + path: PathBuf::from("/wt-feature"), + ref_name: Some("refs/heads/feature".into()), + sha: "aaa".into(), + is_main: false, + }, + ) + .await; + fs.insert_tree("/project-b", serde_json::json!({ ".git": {}, "src": {} })) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let project_a = project::Project::test(fs.clone(), ["/project-a".as_ref()], cx).await; + project_a.update(cx, |p, cx| p.git_scans_complete(cx)).await; + + // Linked worktree sibling of A. + let project_wt = project::Project::test(fs.clone(), ["/wt-feature".as_ref()], cx).await; + project_wt + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + // Workspace B has both folders already. + let project_b = project::Project::test( + fs.clone() as Arc, + ["/project-a".as_ref(), "/project-b".as_ref()], + cx, + ) + .await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Add agent panels to all workspaces. + let workspace_a_entity = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + add_agent_panel(&workspace_a_entity, cx); + + // Add the linked worktree workspace (sibling of A). + let workspace_wt = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_wt.clone(), window, cx) + }); + add_agent_panel(&workspace_wt, cx); + cx.run_until_parked(); + + // Add workspace B (will become active). + let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b.clone(), window, cx) + }); + add_agent_panel(&workspace_b, cx); + cx.run_until_parked(); + + // Save threads in each group. + save_named_thread_metadata("thread-a", "Thread A", &project_a, cx).await; + save_thread_metadata_with_main_paths( + "thread-wt", + "Worktree Thread", + PathList::new(&[PathBuf::from("/wt-feature")]), + PathList::new(&[PathBuf::from("/project-a")]), + cx, + ); + save_named_thread_metadata("thread-b", "Thread B", &project_b, cx).await; + + sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); + cx.run_until_parked(); + + // B is active, A and wt-feature are in one group, B in another. + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspace().entity_id()), + workspace_b.entity_id(), + "workspace B should be active" + ); + multi_workspace.read_with(cx, |mw, _cx| { + assert_eq!(mw.project_group_keys().count(), 2, "should have 2 groups"); + assert_eq!(mw.workspaces().count(), 3, "should have 3 workspaces"); + }); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [project-a, project-b]", + " [~ Draft] (active)", + " Thread B", + "v [project-a]", + " Thread A", + " Worktree Thread {wt-feature}", + ] + ); + + let workspace_a = multi_workspace.read_with(cx, |mw, _| { + mw.workspaces() + .find(|ws| { + ws.entity_id() != workspace_b.entity_id() + && ws.entity_id() != workspace_wt.entity_id() + }) + .unwrap() + .clone() + }); + + // Add /project-b to workspace A's project, causing a collision with B. + project_a + .update(cx, |project, cx| { + project.find_or_create_worktree("/project-b", true, cx) + }) + .await + .expect("should add worktree"); + cx.run_until_parked(); + + // Workspace A (the incoming duplicate) should have been dropped. + multi_workspace.read_with(cx, |mw, _cx| { + let workspace_ids: Vec<_> = mw.workspaces().map(|ws| ws.entity_id()).collect(); + assert!( + !workspace_ids.contains(&workspace_a.entity_id()), + "workspace A should have been dropped" + ); + }); + + // The active workspace should still be B. + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspace().entity_id()), + workspace_b.entity_id(), + "workspace B should still be active" + ); + + // The linked worktree sibling should have migrated into B's group + // (it got the folder add and now shares the same key). + multi_workspace.read_with(cx, |mw, _cx| { + let workspace_ids: Vec<_> = mw.workspaces().map(|ws| ws.entity_id()).collect(); + assert!( + workspace_ids.contains(&workspace_wt.entity_id()), + "linked worktree workspace should still exist" + ); + assert_eq!( + mw.project_group_keys().count(), + 1, + "should have 1 group after merge" + ); + assert_eq!( + mw.workspaces().count(), + 2, + "should have 2 workspaces (B + linked worktree)" + ); + }); + + // The linked worktree workspace should have gotten the new folder. + let wt_worktree_count = + project_wt.read_with(cx, |project, cx| project.visible_worktrees(cx).count()); + assert_eq!( + wt_worktree_count, 2, + "linked worktree project should have gotten /project-b" + ); + + // After: everything merged under one group. Thread A migrated, + // worktree thread shows its chip, B's thread and draft remain. + sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [project-a, project-b]", + " [~ Draft] (active)", + " [+ New Thread {project-a:wt-feature}]", + " Thread A", + " Worktree Thread {project-a:wt-feature}", + " Thread B", + ] + ); +} + +#[gpui::test] +async fn test_worktree_add_syncs_linked_worktree_sibling(cx: &mut TestAppContext) { + // When a worktree is added to the main workspace, a linked worktree + // sibling (different root paths, same project group key) should also + // get the new folder added to its project. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": { + "worktrees": { + "feature": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature", + }, + }, + }, + "src": {}, + }), + ) + .await; + + fs.insert_tree( + "/wt-feature", + serde_json::json!({ + ".git": "gitdir: /project/.git/worktrees/feature", + "src": {}, + }), + ) + .await; + + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: PathBuf::from("/wt-feature"), + ref_name: Some("refs/heads/feature".into()), + sha: "aaa".into(), + is_main: false, + }, + ) + .await; + + // Create a second independent project to add as a folder later. + fs.insert_tree( + "/other-project", + serde_json::json!({ ".git": {}, "src": {} }), + ) + .await; - // Panel B is not the active workspace's panel (workspace A is - // active), so opening a thread there should not change focused_thread. - // This prevents running threads in background workspaces from causing - // the selection highlight to jump around. - sidebar.read_with(cx, |sidebar, _cx| { - assert_active_thread( - sidebar, - &session_id_a, - "Opening a thread in a non-active panel should not change focused_thread", - ); - }); + cx.update(|cx| ::set_global(fs.clone(), cx)); - workspace_b.update_in(cx, |workspace, window, cx| { - workspace.focus_handle(cx).focus(window, cx); - }); - cx.run_until_parked(); + let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature".as_ref()], cx).await; - sidebar.read_with(cx, |sidebar, _cx| { - assert_active_thread( - sidebar, - &session_id_a, - "Defocusing the sidebar should not change focused_thread", - ); - }); + main_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; - // Switching workspaces via the multi_workspace (simulates clicking - // a workspace header) should clear focused_thread. - multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = mw.workspaces().find(|w| *w == &workspace_b).cloned(); - if let Some(workspace) = workspace { - mw.activate(workspace, window, cx); - } + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Add agent panel to the main workspace. + let main_workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + add_agent_panel(&main_workspace, cx); + + // Open the linked worktree as a separate workspace. + let wt_workspace = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(worktree_project.clone(), window, cx) }); + add_agent_panel(&wt_workspace, cx); cx.run_until_parked(); - sidebar.read_with(cx, |sidebar, _cx| { - assert_active_thread( - sidebar, - &session_id_b2, - "Switching workspace should seed focused_thread from the new active panel", - ); - assert!( - has_thread_entry(sidebar, &session_id_b2), - "The seeded thread should be present in the entries" + // Both workspaces should share the same project group key [/project]. + multi_workspace.read_with(cx, |mw, _cx| { + assert_eq!( + mw.project_group_keys().count(), + 1, + "should have 1 project group key before add" ); + assert_eq!(mw.workspaces().count(), 2, "should have 2 workspaces"); }); - // ── 8. Focusing the agent panel thread keeps focused_thread ──── - // Workspace B still has session_id_b2 loaded in the agent panel. - // Clicking into the thread (simulated by focusing its view) should - // keep focused_thread since it was already seeded on workspace switch. - panel_b.update_in(cx, |panel, window, cx| { - if let Some(thread_view) = panel.active_conversation_view() { - thread_view.read(cx).focus_handle(cx).focus(window, cx); - } - }); - cx.run_until_parked(); + // Save threads against each workspace. + save_named_thread_metadata("main-thread", "Main Thread", &main_project, cx).await; + save_named_thread_metadata("wt-thread", "Worktree Thread", &worktree_project, cx).await; - sidebar.read_with(cx, |sidebar, _cx| { - assert_active_thread( - sidebar, - &session_id_b2, - "Focusing the agent panel thread should set focused_thread", - ); - assert!( - has_thread_entry(sidebar, &session_id_b2), - "The focused thread should be present in the entries" + // Verify both threads are under the old key [/project]. + let old_key_paths = PathList::new(&[PathBuf::from("/project")]); + cx.update(|_window, cx| { + let store = ThreadMetadataStore::global(cx).read(cx); + assert_eq!( + store.entries_for_main_worktree_path(&old_key_paths).count(), + 2, + "should have 2 threads under old key before add" ); }); -} - -#[gpui::test] -async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContext) { - let project = init_test_project_with_agent_panel("/project-a", cx).await; - let fs = cx.update(|cx| ::global(cx)); - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - // Start a thread and send a message so it has history. - let connection = StubAgentConnection::new(); - connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( - acp::ContentChunk::new("Done".into()), - )]); - open_thread_with_connection(&panel, connection, cx); - send_message(&panel, cx); - let session_id = active_session_id(&panel, cx); - save_test_thread_metadata(&session_id, &project, cx).await; + sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); cx.run_until_parked(); - // Verify the thread appears in the sidebar. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project-a]", " Hello *",] + vec![ + // + "v [project]", + " [~ Draft {wt-feature}] (active)", + " Worktree Thread {wt-feature}", + " Main Thread", + ] ); - // The "New Thread" button should NOT be in "active/draft" state - // because the panel has a thread with messages. - sidebar.read_with(cx, |sidebar, _cx| { - assert!( - matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { .. })), - "Panel has a thread with messages, so active_entry should be Thread, got {:?}", - sidebar.active_entry, - ); - }); - - // Now add a second folder to the workspace, changing the path_list. - fs.as_fake() - .insert_tree("/project-b", serde_json::json!({ "src": {} })) - .await; - project + // Add /other-project as a folder to the main workspace. + main_project .update(cx, |project, cx| { - project.find_or_create_worktree("/project-b", true, cx) + project.find_or_create_worktree("/other-project", true, cx) }) .await .expect("should add worktree"); cx.run_until_parked(); - // The workspace path_list is now [project-a, project-b]. The active - // thread's metadata was re-saved with the new paths by the agent panel's - // project subscription, so it stays visible under the updated group. - // The old [project-a] group persists in the sidebar (empty) because - // project_group_keys is append-only. + // The linked worktree workspace should have gotten the new folder too. + let wt_worktree_count = + worktree_project.read_with(cx, |project, cx| project.visible_worktrees(cx).count()); assert_eq!( - visible_entries_as_strings(&sidebar, cx), - vec![ - "v [project-a, project-b]", // - " Hello *", - "v [project-a]", - ] + wt_worktree_count, 2, + "linked worktree project should have gotten the new folder" ); - // The "New Thread" button must still be clickable (not stuck in - // "active/draft" state). Verify that `active_thread_is_draft` is - // false — the panel still has the old thread with messages. - sidebar.read_with(cx, |sidebar, _cx| { - assert!( - matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { .. })), - "After adding a folder the panel still has a thread with messages, \ - so active_entry should be Thread, got {:?}", - sidebar.active_entry, + // Both workspaces should still exist under one key. + multi_workspace.read_with(cx, |mw, _cx| { + assert_eq!(mw.workspaces().count(), 2, "both workspaces should survive"); + assert_eq!( + mw.project_group_keys().count(), + 1, + "should still have 1 project group key" ); }); - // Actually click "New Thread" by calling create_new_thread and - // verify a new draft is created. - let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone()); - sidebar.update_in(cx, |sidebar, window, cx| { - sidebar.create_new_thread(&workspace, window, cx); + // Threads should have been migrated to the new key. + let new_key_paths = + PathList::new(&[PathBuf::from("/other-project"), PathBuf::from("/project")]); + cx.update(|_window, cx| { + let store = ThreadMetadataStore::global(cx).read(cx); + assert_eq!( + store.entries_for_main_worktree_path(&old_key_paths).count(), + 0, + "should have 0 threads under old key after migration" + ); + assert_eq!( + store.entries_for_main_worktree_path(&new_key_paths).count(), + 2, + "should have 2 threads under new key after migration" + ); }); + + // Both threads should still be visible in the sidebar. + sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); cx.run_until_parked(); - // After creating a new thread, the panel should now be in draft - // state (no messages on the new thread). - sidebar.read_with(cx, |sidebar, _cx| { - assert_active_draft( - sidebar, - &workspace, - "After creating a new thread active_entry should be Draft", - ); - }); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [other-project, project]", + " [~ Draft {project:wt-feature}] (active)", + " Worktree Thread {project:wt-feature}", + " Main Thread", + ] + ); } #[gpui::test] @@ -2490,7 +3295,11 @@ async fn test_cmd_n_shows_new_thread_entry(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Hello *"] + vec![ + // + "v [my-project]", + " Hello * (active)", + ] ); // Simulate cmd-n @@ -2505,7 +3314,12 @@ async fn test_cmd_n_shows_new_thread_entry(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " [~ Draft]", " Hello *"], + vec![ + // + "v [my-project]", + " [~ Draft] (active)", + " Hello *", + ], "After Cmd-N the sidebar should show a highlighted Draft entry" ); @@ -2538,7 +3352,11 @@ async fn test_draft_with_server_session_shows_as_draft(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Hello *"] + vec![ + // + "v [my-project]", + " Hello * (active)", + ] ); // Open a new draft thread via a server connection. This gives the @@ -2550,7 +3368,12 @@ async fn test_draft_with_server_session_shows_as_draft(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " [~ Draft]", " Hello *"], + vec![ + // + "v [my-project]", + " [~ Draft] (active)", + " Hello *", + ], ); let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone()); @@ -2644,7 +3467,11 @@ async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestApp assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project]", " Hello {wt-feature-a} *"] + vec![ + // + "v [project]", + " Hello {wt-feature-a} * (active)", + ] ); // Simulate Cmd-N in the worktree workspace. @@ -2659,9 +3486,10 @@ async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestApp assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [project]", - " [~ Draft {wt-feature-a}]", - " Hello {wt-feature-a} *" + " [~ Draft {wt-feature-a}] (active)", + " Hello {wt-feature-a} *", ], "After Cmd-N in an absorbed worktree, the sidebar should show \ a highlighted Draft entry under the main repo header" @@ -2736,7 +3564,11 @@ async fn test_search_matches_worktree_name(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project]", " Fix Bug {rosewood} <== selected"], + vec![ + // + "v [project]", + " Fix Bug {rosewood} <== selected", + ], ); } @@ -2757,16 +3589,28 @@ async fn test_git_worktree_added_live_updates_sidebar(cx: &mut TestAppContext) { cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - // Save a thread against a worktree path that doesn't exist yet. - save_named_thread_metadata("wt-thread", "Worktree Thread", &worktree_project, cx).await; + // Save a thread against a worktree path with the correct main + // worktree association (as if the git state had been resolved). + save_thread_metadata_with_main_paths( + "wt-thread", + "Worktree Thread", + PathList::new(&[PathBuf::from("/wt/rosewood")]), + PathList::new(&[PathBuf::from("/project")]), + cx, + ); multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); - // Thread is not visible yet — no worktree knows about this path. + // Thread is visible because its main_worktree_paths match the group. + // The chip name is derived from the path even before git discovery. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project]"] + vec![ + // + "v [project]", + " Worktree Thread {rosewood}", + ] ); // Now add the worktree to the git state and trigger a rescan. @@ -2787,7 +3631,11 @@ async fn test_git_worktree_added_live_updates_sidebar(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project]", " Worktree Thread {rosewood}",] + vec![ + // + "v [project]", + " Worktree Thread {rosewood}", + ] ); } @@ -2857,6 +3705,7 @@ async fn test_two_worktree_workspaces_absorbed_when_main_added(cx: &mut TestAppC assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [project]", " Thread A {wt-feature-a}", " Thread B {wt-feature-b}", @@ -2878,6 +3727,7 @@ async fn test_two_worktree_workspaces_absorbed_when_main_added(cx: &mut TestAppC assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [project]", " Thread A {wt-feature-a}", " Thread B {wt-feature-b}", @@ -2953,6 +3803,7 @@ async fn test_threadless_workspace_shows_new_thread_with_worktree_chip(cx: &mut assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [project]", " [+ New Thread {wt-feature-b}]", " Thread A {wt-feature-a}", @@ -3032,8 +3883,9 @@ async fn test_multi_worktree_thread_shows_multiple_chips(cx: &mut TestAppContext assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [project_a, project_b]", - " Cross Worktree Thread {olivetti}, {selectric}", + " Cross Worktree Thread {project_a:olivetti}, {project_b:selectric}", ] ); } @@ -3105,6 +3957,7 @@ async fn test_same_named_worktree_chips_are_deduplicated(cx: &mut TestAppContext assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [project_a, project_b]", " Same Branch Thread {olivetti}", ] @@ -3209,8 +4062,9 @@ async fn test_absorbed_worktree_running_thread_shows_live_status(cx: &mut TestAp assert_eq!( entries, vec![ + // "v [project]", - " [~ Draft]", + " [~ Draft] (active)", " Hello {wt-feature-a} * (running)", ] ); @@ -3296,8 +4150,9 @@ async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAp assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [project]", - " [~ Draft]", + " [~ Draft] (active)", " Hello {wt-feature-a} * (running)", ] ); @@ -3307,7 +4162,12 @@ async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAp assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project]", " [~ Draft]", " Hello {wt-feature-a} * (!)",] + vec![ + // + "v [project]", + " [~ Draft] (active)", + " Hello {wt-feature-a} * (!)", + ] ); } @@ -3363,7 +4223,11 @@ async fn test_clicking_worktree_thread_opens_workspace_when_none_exists(cx: &mut // Thread should appear under the main repo with a worktree chip. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project]", " WT Thread {wt-feature-a}"], + vec![ + // + "v [project]", + " WT Thread {wt-feature-a}", + ], ); // Only 1 workspace should exist. @@ -3452,7 +4316,11 @@ async fn test_clicking_worktree_thread_does_not_briefly_render_as_separate_proje assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project]", " WT Thread {wt-feature-a}"], + vec![ + // + "v [project]", + " WT Thread {wt-feature-a}", + ], ); focus_sidebar(&sidebar, cx); @@ -4495,6 +5363,7 @@ async fn test_linked_worktree_threads_not_duplicated_across_groups(cx: &mut Test assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [other, project]", "v [project]", " Worktree Thread {wt-feature-a}", @@ -6282,19 +7151,23 @@ mod property_test { SwitchToThread { index: usize }, SwitchToProjectGroup { index: usize }, AddLinkedWorktree { project_group_index: usize }, + AddWorktreeToProject { project_group_index: usize }, + RemoveWorktreeFromProject { project_group_index: usize }, } - // Distribution (out of 20 slots): - // SaveThread: 5 slots (~25%) - // SaveWorktreeThread: 2 slots (~10%) - // ToggleAgentPanel: 1 slot (~5%) - // CreateDraftThread: 1 slot (~5%) - // AddProject: 1 slot (~5%) - // ArchiveThread: 2 slots (~10%) - // SwitchToThread: 2 slots (~10%) - // SwitchToProjectGroup: 2 slots (~10%) - // AddLinkedWorktree: 4 slots (~20%) - const DISTRIBUTION_SLOTS: u32 = 20; + // Distribution (out of 24 slots): + // SaveThread: 5 slots (~21%) + // SaveWorktreeThread: 2 slots (~8%) + // ToggleAgentPanel: 1 slot (~4%) + // CreateDraftThread: 1 slot (~4%) + // AddProject: 1 slot (~4%) + // ArchiveThread: 2 slots (~8%) + // SwitchToThread: 2 slots (~8%) + // SwitchToProjectGroup: 2 slots (~8%) + // AddLinkedWorktree: 4 slots (~17%) + // AddWorktreeToProject: 2 slots (~8%) + // RemoveWorktreeFromProject: 2 slots (~8%) + const DISTRIBUTION_SLOTS: u32 = 24; impl TestState { fn generate_operation(&self, raw: u32, project_group_count: usize) -> Operation { @@ -6336,6 +7209,18 @@ mod property_test { 16..=19 => Operation::SaveThread { project_group_index: extra % project_group_count, }, + 20..=21 if project_group_count > 0 => Operation::AddWorktreeToProject { + project_group_index: extra % project_group_count, + }, + 20..=21 => Operation::SaveThread { + project_group_index: extra % project_group_count, + }, + 22..=23 if project_group_count > 0 => Operation::RemoveWorktreeFromProject { + project_group_index: extra % project_group_count, + }, + 22..=23 => Operation::SaveThread { + project_group_index: extra % project_group_count, + }, _ => unreachable!(), } } @@ -6593,6 +7478,57 @@ mod property_test { main_workspace_path: main_path.clone(), }); } + Operation::AddWorktreeToProject { + project_group_index, + } => { + let workspace = multi_workspace.read_with(cx, |mw, cx| { + let key = mw.project_group_keys().nth(project_group_index).unwrap(); + mw.workspaces_for_project_group(key, cx).next().cloned() + }); + let Some(workspace) = workspace else { return }; + let project = workspace.read_with(cx, |ws, _| ws.project().clone()); + + let new_path = state.next_workspace_path(); + state + .fs + .insert_tree(&new_path, serde_json::json!({ ".git": {}, "src": {} })) + .await; + + let result = project + .update(cx, |project, cx| { + project.find_or_create_worktree(&new_path, true, cx) + }) + .await; + if result.is_err() { + return; + } + cx.run_until_parked(); + } + Operation::RemoveWorktreeFromProject { + project_group_index, + } => { + let workspace = multi_workspace.read_with(cx, |mw, cx| { + let key = mw.project_group_keys().nth(project_group_index).unwrap(); + mw.workspaces_for_project_group(key, cx).next().cloned() + }); + let Some(workspace) = workspace else { return }; + let project = workspace.read_with(cx, |ws, _| ws.project().clone()); + + let worktree_count = project.read_with(cx, |p, cx| p.visible_worktrees(cx).count()); + if worktree_count <= 1 { + return; + } + + let worktree_id = project.read_with(cx, |p, cx| { + p.visible_worktrees(cx).last().map(|wt| wt.read(cx).id()) + }); + if let Some(worktree_id) = worktree_id { + project.update(cx, |project, cx| { + project.remove_worktree(worktree_id, cx); + }); + cx.run_until_parked(); + } + } } } @@ -6620,6 +7556,7 @@ mod property_test { verify_all_threads_are_shown(sidebar, cx)?; verify_active_state_matches_current_workspace(sidebar, cx)?; verify_all_workspaces_are_reachable(sidebar, cx)?; + verify_workspace_group_key_integrity(sidebar, cx)?; Ok(()) } @@ -6871,6 +7808,15 @@ mod property_test { Ok(()) } + fn verify_workspace_group_key_integrity(sidebar: &Sidebar, cx: &App) -> anyhow::Result<()> { + let Some(multi_workspace) = sidebar.multi_workspace.upgrade() else { + anyhow::bail!("sidebar should still have an associated multi-workspace"); + }; + multi_workspace + .read(cx) + .assert_project_group_key_integrity(cx) + } + #[gpui::property_test(config = ProptestConfig { cases: 50, ..Default::default() diff --git a/crates/workspace/src/multi_workspace.rs b/crates/workspace/src/multi_workspace.rs index f4e8b47399e1420a4b01d380ad4a6532a0934a2d..bc9a5d59c74aa1cadc60ecbcb1f08b2afc3f3abd 100644 --- a/crates/workspace/src/multi_workspace.rs +++ b/crates/workspace/src/multi_workspace.rs @@ -101,6 +101,10 @@ pub enum MultiWorkspaceEvent { ActiveWorkspaceChanged, WorkspaceAdded(Entity), WorkspaceRemoved(EntityId), + ProjectGroupKeyChanged { + old_key: ProjectGroupKey, + new_key: ProjectGroupKey, + }, } pub enum SidebarEvent { @@ -302,7 +306,7 @@ pub struct MultiWorkspace { workspaces: Vec>, active_workspace: ActiveWorkspace, project_group_keys: Vec, - provisional_project_group_keys: HashMap, + workspace_group_keys: HashMap, sidebar: Option>, sidebar_open: bool, sidebar_overlay: Option, @@ -355,7 +359,7 @@ impl MultiWorkspace { Self { window_id: window.window_handle().window_id(), project_group_keys: Vec::new(), - provisional_project_group_keys: HashMap::default(), + workspace_group_keys: HashMap::default(), workspaces: Vec::new(), active_workspace: ActiveWorkspace::Transient(workspace), sidebar: None, @@ -559,19 +563,11 @@ impl MultiWorkspace { cx.subscribe_in(&project, window, { let workspace = workspace.downgrade(); move |this, _project, event, _window, cx| match event { - project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => { + project::Event::WorktreeAdded(_) + | project::Event::WorktreeRemoved(_) + | project::Event::WorktreeUpdatedRootRepoCommonDir(_) => { if let Some(workspace) = workspace.upgrade() { - this.add_project_group_key(workspace.read(cx).project_group_key(cx)); - } - } - project::Event::WorktreeUpdatedRootRepoCommonDir(_) => { - if let Some(workspace) = workspace.upgrade() { - this.maybe_clear_provisional_project_group_key(&workspace, cx); - this.add_project_group_key( - this.project_group_key_for_workspace(&workspace, cx), - ); - this.remove_stale_project_group_keys(cx); - cx.notify(); + this.handle_workspace_key_change(&workspace, cx); } } _ => {} @@ -587,7 +583,111 @@ impl MultiWorkspace { .detach(); } - pub fn add_project_group_key(&mut self, project_group_key: ProjectGroupKey) { + fn handle_workspace_key_change( + &mut self, + workspace: &Entity, + cx: &mut Context, + ) { + let workspace_id = workspace.entity_id(); + let old_key = self.project_group_key_for_workspace(workspace, cx); + let new_key = workspace.read(cx).project_group_key(cx); + + if new_key.path_list().paths().is_empty() || old_key == new_key { + return; + } + + let active_workspace = self.workspace().clone(); + + self.set_workspace_group_key(workspace, new_key.clone()); + + let changed_root_paths = workspace.read(cx).root_paths(cx); + let old_paths = old_key.path_list().paths(); + let new_paths = new_key.path_list().paths(); + + // Remove workspaces that already had the new key and have the same + // root paths (true duplicates that this workspace is replacing). + // + // NOTE: These are dropped without prompting for unsaved changes because + // the user explicitly added a folder that makes this workspace + // identical to the duplicate — they are intentionally overwriting it. + let duplicate_workspaces: Vec> = self + .workspaces + .iter() + .filter(|ws| { + ws.entity_id() != workspace_id + && self.project_group_key_for_workspace(ws, cx) == new_key + && ws.read(cx).root_paths(cx) == changed_root_paths + }) + .cloned() + .collect(); + + if duplicate_workspaces.contains(&active_workspace) { + // The active workspace is among the duplicates — drop the + // incoming workspace instead so the user stays where they are. + self.detach_workspace(workspace, cx); + self.workspaces.retain(|w| w != workspace); + } else { + for ws in &duplicate_workspaces { + self.detach_workspace(ws, cx); + self.workspaces.retain(|w| w != ws); + } + } + + // Propagate folder adds/removes to linked worktree siblings + // (different root paths, same old key) so they stay in the group. + let group_workspaces: Vec> = self + .workspaces + .iter() + .filter(|ws| { + ws.entity_id() != workspace_id + && self.project_group_key_for_workspace(ws, cx) == old_key + }) + .cloned() + .collect(); + + for workspace in &group_workspaces { + // Pre-set this to stop later WorktreeAdded events from triggering + self.set_workspace_group_key(&workspace, new_key.clone()); + + let project = workspace.read(cx).project().clone(); + + for added_path in new_paths.iter().filter(|p| !old_paths.contains(p)) { + project + .update(cx, |project, cx| { + project.find_or_create_worktree(added_path, true, cx) + }) + .detach_and_log_err(cx); + } + + for removed_path in old_paths.iter().filter(|p| !new_paths.contains(p)) { + project.update(cx, |project, cx| { + project.remove_worktree_for_main_worktree_path(removed_path, cx); + }); + } + } + + // Restore the active workspace after removals may have shifted + // the index. If the previously active workspace was removed, + // fall back to the workspace whose key just changed. + if let ActiveWorkspace::Persistent(_) = &self.active_workspace { + let target = if self.workspaces.contains(&active_workspace) { + &active_workspace + } else { + workspace + }; + if let Some(new_index) = self.workspaces.iter().position(|ws| ws == target) { + self.active_workspace = ActiveWorkspace::Persistent(new_index); + } + } + + self.remove_stale_project_group_keys(cx); + + cx.emit(MultiWorkspaceEvent::ProjectGroupKeyChanged { old_key, new_key }); + self.serialize(cx); + cx.notify(); + } + + fn add_project_group_key(&mut self, project_group_key: ProjectGroupKey) { if project_group_key.path_list().paths().is_empty() { return; } @@ -598,12 +698,12 @@ impl MultiWorkspace { self.project_group_keys.insert(0, project_group_key); } - pub fn set_provisional_project_group_key( + pub(crate) fn set_workspace_group_key( &mut self, workspace: &Entity, project_group_key: ProjectGroupKey, ) { - self.provisional_project_group_keys + self.workspace_group_keys .insert(workspace.entity_id(), project_group_key.clone()); self.add_project_group_key(project_group_key); } @@ -613,28 +713,12 @@ impl MultiWorkspace { workspace: &Entity, cx: &App, ) -> ProjectGroupKey { - self.provisional_project_group_keys + self.workspace_group_keys .get(&workspace.entity_id()) .cloned() .unwrap_or_else(|| workspace.read(cx).project_group_key(cx)) } - fn maybe_clear_provisional_project_group_key( - &mut self, - workspace: &Entity, - cx: &App, - ) { - let live_key = workspace.read(cx).project_group_key(cx); - if self - .provisional_project_group_keys - .get(&workspace.entity_id()) - .is_some_and(|key| *key == live_key) - { - self.provisional_project_group_keys - .remove(&workspace.entity_id()); - } - } - fn remove_stale_project_group_keys(&mut self, cx: &App) { let workspace_keys: HashSet = self .workspaces @@ -1045,7 +1129,6 @@ impl MultiWorkspace { self.promote_transient(old, cx); } else { self.detach_workspace(&old, cx); - cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old.entity_id())); } } } else { @@ -1056,7 +1139,6 @@ impl MultiWorkspace { }); if let Some(old) = self.active_workspace.set_transient(workspace) { self.detach_workspace(&old, cx); - cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old.entity_id())); } } @@ -1083,7 +1165,7 @@ impl MultiWorkspace { /// Returns the index of the newly inserted workspace. fn promote_transient(&mut self, workspace: Entity, cx: &mut Context) -> usize { let project_group_key = self.project_group_key_for_workspace(&workspace, cx); - self.add_project_group_key(project_group_key); + self.set_workspace_group_key(&workspace, project_group_key); self.workspaces.push(workspace.clone()); cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace)); self.workspaces.len() - 1 @@ -1099,10 +1181,10 @@ impl MultiWorkspace { for workspace in std::mem::take(&mut self.workspaces) { if workspace != active { self.detach_workspace(&workspace, cx); - cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id())); } } self.project_group_keys.clear(); + self.workspace_group_keys.clear(); self.active_workspace = ActiveWorkspace::Transient(active); cx.notify(); } @@ -1128,7 +1210,7 @@ impl MultiWorkspace { workspace.set_multi_workspace(weak_self, cx); }); - self.add_project_group_key(project_group_key); + self.set_workspace_group_key(&workspace, project_group_key); self.workspaces.push(workspace.clone()); cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace)); cx.notify(); @@ -1136,10 +1218,12 @@ impl MultiWorkspace { } } - /// Clears session state and DB binding for a workspace that is being - /// removed or replaced. The DB row is preserved so the workspace still - /// appears in the recent-projects list. + /// Detaches a workspace: clears session state, DB binding, cached + /// group key, and emits `WorkspaceRemoved`. The DB row is preserved + /// so the workspace still appears in the recent-projects list. fn detach_workspace(&mut self, workspace: &Entity, cx: &mut Context) { + self.workspace_group_keys.remove(&workspace.entity_id()); + cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id())); workspace.update(cx, |workspace, _cx| { workspace.session_id.take(); workspace._schedule_serialize_workspace.take(); @@ -1313,6 +1397,46 @@ impl MultiWorkspace { tasks } + #[cfg(any(test, feature = "test-support"))] + pub fn assert_project_group_key_integrity(&self, cx: &App) -> anyhow::Result<()> { + let stored_keys: HashSet<&ProjectGroupKey> = self.project_group_keys().collect(); + + let workspace_group_keys: HashSet<&ProjectGroupKey> = + self.workspace_group_keys.values().collect(); + let extra_keys = &workspace_group_keys - &stored_keys; + anyhow::ensure!( + extra_keys.is_empty(), + "workspace_group_keys values not in project_group_keys: {:?}", + extra_keys, + ); + + let cached_ids: HashSet = self.workspace_group_keys.keys().copied().collect(); + let workspace_ids: HashSet = + self.workspaces.iter().map(|ws| ws.entity_id()).collect(); + anyhow::ensure!( + cached_ids == workspace_ids, + "workspace_group_keys entity IDs don't match workspaces.\n\ + only in cache: {:?}\n\ + only in workspaces: {:?}", + &cached_ids - &workspace_ids, + &workspace_ids - &cached_ids, + ); + + for workspace in self.workspaces() { + let live_key = workspace.read(cx).project_group_key(cx); + let cached_key = &self.workspace_group_keys[&workspace.entity_id()]; + anyhow::ensure!( + *cached_key == live_key, + "workspace {:?} has live key {:?} but cached key {:?}", + workspace.entity_id(), + live_key, + cached_key, + ); + } + + Ok(()) + } + #[cfg(any(test, feature = "test-support"))] pub fn set_random_database_id(&mut self, cx: &mut Context) { self.workspace().update(cx, |workspace, _cx| { @@ -1471,7 +1595,6 @@ impl MultiWorkspace { for workspace in &removed_workspaces { this.detach_workspace(workspace, cx); - cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id())); } let removed_any = !removed_workspaces.is_empty(); diff --git a/crates/workspace/src/multi_workspace_tests.rs b/crates/workspace/src/multi_workspace_tests.rs index 259346fe097826b3dcc19fb8fad0b8f07ddd0488..9cab28c0ca4ab34b2189985e898285dd82dd4f32 100644 --- a/crates/workspace/src/multi_workspace_tests.rs +++ b/crates/workspace/src/multi_workspace_tests.rs @@ -185,157 +185,3 @@ async fn test_project_group_keys_duplicate_not_added(cx: &mut TestAppContext) { ); }); } - -#[gpui::test] -async fn test_project_group_keys_on_worktree_added(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; - fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; - let project = Project::test(fs, ["/root_a".as_ref()], cx).await; - - let initial_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); - - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - - multi_workspace.update(cx, |mw, cx| { - mw.open_sidebar(cx); - }); - - // Add a second worktree to the same project. - let (worktree, _) = project - .update(cx, |project, cx| { - project.find_or_create_worktree("/root_b", true, cx) - }) - .await - .unwrap(); - worktree - .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete()) - .await; - cx.run_until_parked(); - - let updated_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); - assert_ne!( - initial_key, updated_key, - "key should change after adding a worktree" - ); - - multi_workspace.read_with(cx, |mw, _cx| { - let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); - assert_eq!( - keys.len(), - 2, - "should have both the original and updated key" - ); - assert_eq!(*keys[0], updated_key); - assert_eq!(*keys[1], initial_key); - }); -} - -#[gpui::test] -async fn test_project_group_keys_on_worktree_removed(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; - fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; - let project = Project::test(fs, ["/root_a".as_ref(), "/root_b".as_ref()], cx).await; - - let initial_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); - - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - - multi_workspace.update(cx, |mw, cx| { - mw.open_sidebar(cx); - }); - - // Remove one worktree. - let worktree_b_id = project.read_with(cx, |project, cx| { - project - .worktrees(cx) - .find(|wt| wt.read(cx).root_name().as_unix_str() == "root_b") - .unwrap() - .read(cx) - .id() - }); - project.update(cx, |project, cx| { - project.remove_worktree(worktree_b_id, cx); - }); - cx.run_until_parked(); - - let updated_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); - assert_ne!( - initial_key, updated_key, - "key should change after removing a worktree" - ); - - multi_workspace.read_with(cx, |mw, _cx| { - let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); - assert_eq!( - keys.len(), - 2, - "should accumulate both the original and post-removal key" - ); - assert_eq!(*keys[0], updated_key); - assert_eq!(*keys[1], initial_key); - }); -} - -#[gpui::test] -async fn test_project_group_keys_across_multiple_workspaces_and_worktree_changes( - cx: &mut TestAppContext, -) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; - fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; - fs.insert_tree("/root_c", json!({ "file.txt": "" })).await; - let project_a = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await; - let project_b = Project::test(fs.clone(), ["/root_b".as_ref()], cx).await; - - let key_a = project_a.read_with(cx, |p, cx| p.project_group_key(cx)); - let key_b = project_b.read_with(cx, |p, cx| p.project_group_key(cx)); - - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); - - multi_workspace.update(cx, |mw, cx| { - mw.open_sidebar(cx); - }); - - multi_workspace.update_in(cx, |mw, window, cx| { - mw.test_add_workspace(project_b, window, cx); - }); - - multi_workspace.read_with(cx, |mw, _cx| { - assert_eq!(mw.project_group_keys().count(), 2); - }); - - // Now add a worktree to project_a. This should produce a third key. - let (worktree, _) = project_a - .update(cx, |project, cx| { - project.find_or_create_worktree("/root_c", true, cx) - }) - .await - .unwrap(); - worktree - .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete()) - .await; - cx.run_until_parked(); - - let key_a_updated = project_a.read_with(cx, |p, cx| p.project_group_key(cx)); - assert_ne!(key_a, key_a_updated); - - multi_workspace.read_with(cx, |mw, _cx| { - let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); - assert_eq!( - keys.len(), - 3, - "should have key_a, key_b, and the updated key_a with root_c" - ); - assert_eq!(*keys[0], key_a_updated); - assert_eq!(*keys[1], key_b); - assert_eq!(*keys[2], key_a); - }); -} diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 81224c0e2db520a278bfb21429e211ba9a4f09ae..d40b7abae0c036a5cdd227ec8a547bd3c10b262c 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -9886,7 +9886,7 @@ async fn open_remote_project_inner( }); if let Some(project_group_key) = provisional_project_group_key.clone() { - multi_workspace.set_provisional_project_group_key(&new_workspace, project_group_key); + multi_workspace.set_workspace_group_key(&new_workspace, project_group_key); } multi_workspace.activate(new_workspace.clone(), window, cx); new_workspace From ca5e44fd91ed5e92978de5ef90424dad290a9833 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 9 Apr 2026 19:07:57 -0700 Subject: [PATCH 25/67] Revert "Handle changing root paths without splitting in the sidebar" (#53579) Reverts zed-industries/zed#53566 --- crates/agent_ui/src/thread_metadata_store.rs | 30 - crates/sidebar/src/sidebar.rs | 179 +-- crates/sidebar/src/sidebar_tests.rs | 1364 +++-------------- crates/workspace/src/multi_workspace.rs | 209 +-- crates/workspace/src/multi_workspace_tests.rs | 154 ++ crates/workspace/src/workspace.rs | 2 +- 6 files changed, 454 insertions(+), 1484 deletions(-) diff --git a/crates/agent_ui/src/thread_metadata_store.rs b/crates/agent_ui/src/thread_metadata_store.rs index 69c5377465a420b2e9f64e16139736fe04b65e5a..127f746a9edd35bc3b62b489277980868faba1c8 100644 --- a/crates/agent_ui/src/thread_metadata_store.rs +++ b/crates/agent_ui/src/thread_metadata_store.rs @@ -477,36 +477,6 @@ impl ThreadMetadataStore { } } - pub fn update_main_worktree_paths( - &mut self, - old_paths: &PathList, - new_paths: PathList, - cx: &mut Context, - ) { - let session_ids = match self.threads_by_main_paths.remove(old_paths) { - Some(ids) if !ids.is_empty() => ids, - _ => return, - }; - - let new_index = self - .threads_by_main_paths - .entry(new_paths.clone()) - .or_default(); - - for session_id in &session_ids { - new_index.insert(session_id.clone()); - - if let Some(thread) = self.threads.get_mut(session_id) { - thread.main_worktree_paths = new_paths.clone(); - self.pending_thread_ops_tx - .try_send(DbOperation::Upsert(thread.clone())) - .log_err(); - } - } - - cx.notify(); - } - pub fn create_archived_worktree( &self, worktree_path: String, diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 547d3bd83cf97986dbe0006a2454223fba255886..488127eb0bd04b064c2c6e3b1d8dc297ada9c477 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -283,8 +283,10 @@ impl ListEntry { } } ListEntry::ProjectHeader { key, .. } => multi_workspace - .workspaces_for_project_group(key, cx) + .workspaces() + .find(|ws| PathList::new(&ws.read(cx).root_paths(cx)) == *key.path_list()) .cloned() + .into_iter() .collect(), ListEntry::ViewMore { .. } => Vec::new(), } @@ -363,81 +365,35 @@ fn workspace_path_list(workspace: &Entity, cx: &App) -> PathList { /// /// For each path in the thread's `folder_paths`, produces a /// [`WorktreeInfo`] with a short display name, full path, and whether -/// the worktree is the main checkout or a linked git worktree. When -/// multiple main paths exist and a linked worktree's short name alone -/// wouldn't identify which main project it belongs to, the main project -/// name is prefixed for disambiguation (e.g. `project:feature`). -/// -/// `linked_to_main` maps linked worktree abs paths to their main repo -/// abs path, used to pick the correct prefix. Falls back to a heuristic -/// when no mapping is available. +/// the worktree is the main checkout or a linked git worktree. fn worktree_info_from_thread_paths( folder_paths: &PathList, - main_worktree_paths: &PathList, - linked_to_main: &HashMap, -) -> Vec { - let main_paths = main_worktree_paths.paths(); - - let mut infos: Vec = Vec::new(); - let mut linked_short_names: Vec<(SharedString, SharedString)> = Vec::new(); - - for path in folder_paths.paths().iter() { + group_key: &project::ProjectGroupKey, +) -> impl Iterator { + let main_paths = group_key.path_list().paths(); + folder_paths.paths().iter().filter_map(|path| { let is_main = main_paths.iter().any(|mp| mp.as_path() == path.as_path()); if is_main { - let Some(name) = path.file_name() else { - continue; - }; - infos.push(WorktreeInfo { - name: SharedString::from(name.to_string_lossy().to_string()), + let name = path.file_name()?.to_string_lossy().to_string(); + Some(WorktreeInfo { + name: SharedString::from(name), full_path: SharedString::from(path.display().to_string()), highlight_positions: Vec::new(), kind: ui::WorktreeKind::Main, - }); + }) } else { - let Some(main_path) = linked_to_main - .get(&**path) - .and_then(|main| main_paths.iter().find(|mp| mp.as_path() == main.as_path())) - .or_else(|| { - main_paths - .iter() - .find(|mp| mp.file_name() == path.file_name()) - .or(main_paths.first()) - }) - else { - continue; - }; - let short_name = linked_worktree_short_name(main_path, path).unwrap_or_default(); - let project_name = main_path - .file_name() - .map(|n| SharedString::from(n.to_string_lossy().to_string())) - .unwrap_or_default(); - linked_short_names.push((short_name.clone(), project_name)); - infos.push(WorktreeInfo { - name: short_name, + let main_path = main_paths + .iter() + .find(|mp| mp.file_name() == path.file_name()) + .or(main_paths.first())?; + Some(WorktreeInfo { + name: linked_worktree_short_name(main_path, path).unwrap_or_default(), full_path: SharedString::from(path.display().to_string()), highlight_positions: Vec::new(), kind: ui::WorktreeKind::Linked, - }); - } - } - - // When the group has multiple main worktree paths and the thread's - // folder paths don't all share the same short name, prefix each - // linked worktree chip with its main project name so the user knows - // which project it belongs to. - let all_same_name = infos.len() > 1 && infos.iter().all(|i| i.name == infos[0].name); - - if main_paths.len() > 1 && !all_same_name { - for (info, (_short_name, project_name)) in infos - .iter_mut() - .filter(|i| i.kind == ui::WorktreeKind::Linked) - .zip(linked_short_names.iter()) - { - info.name = SharedString::from(format!("{}:{}", project_name, info.name)); + }) } - } - - infos + }) } /// Shows a [`RemoteConnectionModal`] on the given workspace and establishes @@ -524,16 +480,6 @@ impl Sidebar { MultiWorkspaceEvent::WorkspaceRemoved(_) => { this.update_entries(cx); } - MultiWorkspaceEvent::ProjectGroupKeyChanged { old_key, new_key } => { - ThreadMetadataStore::global(cx).update(cx, |store, cx| { - store.update_main_worktree_paths( - old_key.path_list(), - new_key.path_list().clone(), - cx, - ); - }); - this.update_entries(cx); - } }, ) .detach(); @@ -966,21 +912,6 @@ impl Sidebar { .as_ref() .is_some_and(|active| group_workspaces.contains(active)); - // Build a mapping from linked worktree paths to their main - // repo path, used to correctly attribute chips. - let linked_to_main: HashMap = group_workspaces - .iter() - .flat_map(|ws| root_repository_snapshots(ws, cx)) - .flat_map(|snapshot| { - let main_path = snapshot.original_repo_abs_path.to_path_buf(); - snapshot - .linked_worktrees() - .iter() - .map(move |wt| (wt.path.clone(), main_path.clone())) - .collect::>() - }) - .collect(); - // Collect live thread infos from all workspaces in this group. let live_infos: Vec<_> = group_workspaces .iter() @@ -1018,28 +949,26 @@ impl Sidebar { }; // Build a ThreadEntry from a metadata row. - let make_thread_entry = - |row: ThreadMetadata, workspace: ThreadEntryWorkspace| -> ThreadEntry { - let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id); - let worktrees = worktree_info_from_thread_paths( - &row.folder_paths, - &row.main_worktree_paths, - &linked_to_main, - ); - ThreadEntry { - metadata: row, - icon, - icon_from_external_svg, - status: AgentThreadStatus::default(), - workspace, - is_live: false, - is_background: false, - is_title_generating: false, - highlight_positions: Vec::new(), - worktrees, - diff_stats: DiffStats::default(), - } - }; + let make_thread_entry = |row: ThreadMetadata, + workspace: ThreadEntryWorkspace| + -> ThreadEntry { + let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id); + let worktrees: Vec = + worktree_info_from_thread_paths(&row.folder_paths, &group_key).collect(); + ThreadEntry { + metadata: row, + icon, + icon_from_external_svg, + status: AgentThreadStatus::default(), + workspace, + is_live: false, + is_background: false, + is_title_generating: false, + highlight_positions: Vec::new(), + worktrees, + diff_stats: DiffStats::default(), + } + }; // Main code path: one query per group via main_worktree_paths. // The main_worktree_paths column is set on all new threads and @@ -1255,17 +1184,11 @@ impl Sidebar { if is_draft_for_group { if let Some(ActiveEntry::Draft(draft_ws)) = &self.active_entry { let ws_path_list = workspace_path_list(draft_ws, cx); - let main_worktree_paths = - draft_ws.read(cx).project_group_key(cx).path_list().clone(); - let worktrees = worktree_info_from_thread_paths( - &ws_path_list, - &main_worktree_paths, - &linked_to_main, - ); + let worktrees = worktree_info_from_thread_paths(&ws_path_list, &group_key); entries.push(ListEntry::DraftThread { key: group_key.clone(), workspace: None, - worktrees, + worktrees: worktrees.collect(), }); } } @@ -1289,14 +1212,9 @@ impl Sidebar { continue; } let ws_path_list = workspace_path_list(ws, cx); - let ws_main_paths = ws.read(cx).project_group_key(cx).path_list().clone(); - let has_linked_worktrees = worktree_info_from_thread_paths( - &ws_path_list, - &ws_main_paths, - &linked_to_main, - ) - .iter() - .any(|wt| wt.kind == ui::WorktreeKind::Linked); + let has_linked_worktrees = + worktree_info_from_thread_paths(&ws_path_list, &group_key) + .any(|wt| wt.kind == ui::WorktreeKind::Linked); if !has_linked_worktrees { continue; } @@ -1309,11 +1227,8 @@ impl Sidebar { if has_threads { continue; } - let worktrees = worktree_info_from_thread_paths( - &ws_path_list, - &ws_main_paths, - &linked_to_main, - ); + let worktrees: Vec = + worktree_info_from_thread_paths(&ws_path_list, &group_key).collect(); entries.push(ListEntry::DraftThread { key: group_key.clone(), diff --git a/crates/sidebar/src/sidebar_tests.rs b/crates/sidebar/src/sidebar_tests.rs index 420eae134e11309e4d1cc90c45335f7fce76bb22..8ced8d6f71f6d88ff24a522404417ef7db3a6a7c 100644 --- a/crates/sidebar/src/sidebar_tests.rs +++ b/crates/sidebar/src/sidebar_tests.rs @@ -251,32 +251,6 @@ fn save_thread_metadata( cx.run_until_parked(); } -fn save_thread_metadata_with_main_paths( - session_id: &str, - title: &str, - folder_paths: PathList, - main_worktree_paths: PathList, - cx: &mut TestAppContext, -) { - let session_id = acp::SessionId::new(Arc::from(session_id)); - let title = SharedString::from(title.to_string()); - let updated_at = chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(); - let metadata = ThreadMetadata { - session_id, - agent_id: agent::ZED_AGENT_ID.clone(), - title, - updated_at, - created_at: None, - folder_paths, - main_worktree_paths, - archived: false, - }; - cx.update(|cx| { - ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx)); - }); - cx.run_until_parked(); -} - fn focus_sidebar(sidebar: &Entity, cx: &mut gpui::VisualTestContext) { sidebar.update_in(cx, |_, window, cx| { cx.focus_self(window); @@ -348,11 +322,6 @@ fn visible_entries_as_strings( } else { "" }; - let is_active = sidebar - .active_entry - .as_ref() - .is_some_and(|active| active.matches_entry(entry)); - let active_indicator = if is_active { " (active)" } else { "" }; match entry { ListEntry::ProjectHeader { label, @@ -369,7 +338,7 @@ fn visible_entries_as_strings( } ListEntry::Thread(thread) => { let title = thread.metadata.title.as_ref(); - let live = if thread.is_live { " *" } else { "" }; + let active = if thread.is_live { " *" } else { "" }; let status_str = match thread.status { AgentThreadStatus::Running => " (running)", AgentThreadStatus::Error => " (error)", @@ -385,7 +354,7 @@ fn visible_entries_as_strings( "" }; let worktree = format_linked_worktree_chips(&thread.worktrees); - format!(" {title}{worktree}{live}{status_str}{notified}{active_indicator}{selected}") + format!(" {title}{worktree}{active}{status_str}{notified}{selected}") } ListEntry::ViewMore { is_fully_expanded, .. @@ -405,7 +374,7 @@ fn visible_entries_as_strings( if workspace.is_some() { format!(" [+ New Thread{}]{}", worktree, selected) } else { - format!(" [~ Draft{}]{}{}", worktree, active_indicator, selected) + format!(" [~ Draft{}]{}", worktree, selected) } } } @@ -574,10 +543,7 @@ async fn test_single_workspace_no_threads(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - ] + vec!["v [my-project]"] ); } @@ -613,7 +579,6 @@ async fn test_single_workspace_with_saved_threads(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [my-project]", " Fix crash in project panel", " Add inline diff view", @@ -644,11 +609,7 @@ async fn test_workspace_lifecycle(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project-a]", - " Thread A1", - ] + vec!["v [project-a]", " Thread A1"] ); // Add a second workspace @@ -659,11 +620,7 @@ async fn test_workspace_lifecycle(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project-a]", - " Thread A1", - ] + vec!["v [project-a]", " Thread A1",] ); } @@ -682,7 +639,6 @@ async fn test_view_more_pagination(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [my-project]", " Thread 12", " Thread 11", @@ -793,11 +749,7 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - " Thread 1", - ] + vec!["v [my-project]", " Thread 1"] ); // Collapse @@ -808,10 +760,7 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "> [my-project]", - ] + vec!["> [my-project]"] ); // Expand @@ -822,11 +771,7 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - " Thread 1", - ] + vec!["v [my-project]", " Thread 1"] ); } @@ -996,7 +941,6 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [expanded-project]", " Completed thread", " Running thread * (running) <== selected", @@ -1160,14 +1104,10 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - " Thread 1", - ] + vec!["v [my-project]", " Thread 1"] ); - // Focus the sidebar and select the header + // Focus the sidebar and select the header (index 0) focus_sidebar(&sidebar, cx); sidebar.update_in(cx, |sidebar, _window, _cx| { sidebar.selection = Some(0); @@ -1179,10 +1119,7 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "> [my-project] <== selected", - ] + vec!["> [my-project] <== selected"] ); // Confirm again expands the group @@ -1191,11 +1128,7 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project] <== selected", - " Thread 1", - ] + vec!["v [my-project] <== selected", " Thread 1",] ); } @@ -1246,11 +1179,7 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - " Thread 1", - ] + vec!["v [my-project]", " Thread 1"] ); // Focus sidebar and manually select the header (index 0). Press left to collapse. @@ -1264,10 +1193,7 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "> [my-project] <== selected", - ] + vec!["> [my-project] <== selected"] ); // Press right to expand @@ -1276,11 +1202,7 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project] <== selected", - " Thread 1", - ] + vec!["v [my-project] <== selected", " Thread 1",] ); // Press right again on already-expanded header moves selection down @@ -1307,11 +1229,7 @@ async fn test_keyboard_collapse_from_child_selects_parent(cx: &mut TestAppContex assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - " Thread 1 <== selected", - ] + vec!["v [my-project]", " Thread 1 <== selected",] ); // Pressing left on a child collapses the parent group and selects it @@ -1321,10 +1239,7 @@ async fn test_keyboard_collapse_from_child_selects_parent(cx: &mut TestAppContex assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "> [my-project] <== selected", - ] + vec!["> [my-project] <== selected"] ); } @@ -1338,10 +1253,7 @@ async fn test_keyboard_navigation_on_empty_list(cx: &mut TestAppContext) { // An empty project has only the header. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [empty-project]", - ] + vec!["v [empty-project]"] ); // Focus sidebar — focus_in does not set a selection @@ -1473,12 +1385,7 @@ async fn test_parallel_threads_shown_with_live_status(cx: &mut TestAppContext) { entries[1..].sort(); assert_eq!( entries, - vec![ - // - "v [my-project]", - " Hello * (active)", - " Hello * (running)", - ] + vec!["v [my-project]", " Hello *", " Hello * (running)",] ); } @@ -1571,11 +1478,7 @@ async fn test_background_thread_completion_triggers_notification(cx: &mut TestAp // Thread A is still running; no notification yet. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project-a]", - " Hello * (running) (active)", - ] + vec!["v [project-a]", " Hello * (running)",] ); // Complete thread A's turn (transition Running → Completed). @@ -1585,11 +1488,7 @@ async fn test_background_thread_completion_triggers_notification(cx: &mut TestAp // The completed background thread shows a notification indicator. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project-a]", - " Hello * (!) (active)", - ] + vec!["v [project-a]", " Hello * (!)",] ); } @@ -1629,7 +1528,6 @@ async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [my-project]", " Fix crash in project panel", " Add inline diff view", @@ -1642,11 +1540,7 @@ async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext) type_in_search(&sidebar, "diff", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - " Add inline diff view <== selected", - ] + vec!["v [my-project]", " Add inline diff view <== selected",] ); // User changes query to something with no matches — list is empty. @@ -1681,7 +1575,6 @@ async fn test_search_matches_regardless_of_case(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [my-project]", " Fix Crash In Project Panel <== selected", ] @@ -1692,7 +1585,6 @@ async fn test_search_matches_regardless_of_case(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [my-project]", " Fix Crash In Project Panel <== selected", ] @@ -1723,12 +1615,7 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex // Confirm the full list is showing. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - " Alpha thread", - " Beta thread", - ] + vec!["v [my-project]", " Alpha thread", " Beta thread",] ); // User types a search query to filter down. @@ -1736,11 +1623,7 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex type_in_search(&sidebar, "alpha", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - " Alpha thread <== selected", - ] + vec!["v [my-project]", " Alpha thread <== selected",] ); // User presses Escape — filter clears, full list is restored. @@ -1750,7 +1633,6 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [my-project]", " Alpha thread <== selected", " Beta thread", @@ -1807,7 +1689,6 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [project-a]", " Fix bug in sidebar", " Add tests for editor", @@ -1818,11 +1699,7 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC type_in_search(&sidebar, "sidebar", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project-a]", - " Fix bug in sidebar <== selected", - ] + vec!["v [project-a]", " Fix bug in sidebar <== selected",] ); // "typo" only matches in the second workspace — the first header disappears. @@ -1838,7 +1715,6 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [project-a]", " Fix bug in sidebar <== selected", " Add tests for editor", @@ -1898,7 +1774,6 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [alpha-project]", " Fix bug in sidebar <== selected", " Add tests for editor", @@ -1910,11 +1785,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { type_in_search(&sidebar, "sidebar", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [alpha-project]", - " Fix bug in sidebar <== selected", - ] + vec!["v [alpha-project]", " Fix bug in sidebar <== selected",] ); // "alpha sidebar" matches the workspace name "alpha-project" (fuzzy: a-l-p-h-a-s-i-d-e-b-a-r @@ -1924,11 +1795,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { type_in_search(&sidebar, "fix", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [alpha-project]", - " Fix bug in sidebar <== selected", - ] + vec!["v [alpha-project]", " Fix bug in sidebar <== selected",] ); // A query that matches a workspace name AND a thread in that same workspace. @@ -1937,7 +1804,6 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [alpha-project]", " Fix bug in sidebar <== selected", " Add tests for editor", @@ -1951,7 +1817,6 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [alpha-project]", " Fix bug in sidebar <== selected", " Add tests for editor", @@ -2001,11 +1866,7 @@ async fn test_search_finds_threads_hidden_behind_view_more(cx: &mut TestAppConte let filtered = visible_entries_as_strings(&sidebar, cx); assert_eq!( filtered, - vec![ - // - "v [my-project]", - " Hidden gem thread <== selected", - ] + vec!["v [my-project]", " Hidden gem thread <== selected",] ); assert!( !filtered.iter().any(|e| e.contains("View More")), @@ -2041,21 +1902,14 @@ async fn test_search_finds_threads_inside_collapsed_groups(cx: &mut TestAppConte assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "> [my-project] <== selected", - ] + vec!["> [my-project] <== selected"] ); // User types a search — the thread appears even though its group is collapsed. type_in_search(&sidebar, "important", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "> [my-project]", - " Important thread <== selected", - ] + vec!["> [my-project]", " Important thread <== selected",] ); } @@ -2089,7 +1943,6 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [my-project]", " Fix crash in panel <== selected", " Fix lint warnings", @@ -2102,7 +1955,6 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [my-project]", " Fix crash in panel", " Fix lint warnings <== selected", @@ -2114,7 +1966,6 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [my-project]", " Fix crash in panel <== selected", " Fix lint warnings", @@ -2155,11 +2006,7 @@ async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppC assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - " Historical Thread", - ] + vec!["v [my-project]", " Historical Thread",] ); // Switch to workspace 1 so we can verify the confirm switches back. @@ -2220,12 +2067,7 @@ async fn test_click_clears_selection_and_focus_in_restores_it(cx: &mut TestAppCo assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - " Thread A", - " Thread B", - ] + vec!["v [my-project]", " Thread A", " Thread B",] ); // Keyboard confirm preserves selection. @@ -2277,11 +2119,7 @@ async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - " Hello * (active)", - ] + vec!["v [my-project]", " Hello *"] ); // Simulate the agent generating a title. The notification chain is: @@ -2303,11 +2141,7 @@ async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - " Friendly Greeting with AI * (active)", - ] + vec!["v [my-project]", " Friendly Greeting with AI *"] ); } @@ -2449,825 +2283,186 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { sidebar.read_with(cx, |sidebar, _cx| { assert_active_thread( sidebar, - &session_id_a, - "Switching workspace should seed focused_thread from the new active panel", - ); - assert!( - has_thread_entry(sidebar, &session_id_a), - "The seeded thread should be present in the entries" - ); - }); - - let connection_b2 = StubAgentConnection::new(); - connection_b2.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( - acp::ContentChunk::new(DEFAULT_THREAD_TITLE.into()), - )]); - open_thread_with_connection(&panel_b, connection_b2, cx); - send_message(&panel_b, cx); - let session_id_b2 = active_session_id(&panel_b, cx); - save_test_thread_metadata(&session_id_b2, &project_b, cx).await; - cx.run_until_parked(); - - // Panel B is not the active workspace's panel (workspace A is - // active), so opening a thread there should not change focused_thread. - // This prevents running threads in background workspaces from causing - // the selection highlight to jump around. - sidebar.read_with(cx, |sidebar, _cx| { - assert_active_thread( - sidebar, - &session_id_a, - "Opening a thread in a non-active panel should not change focused_thread", - ); - }); - - workspace_b.update_in(cx, |workspace, window, cx| { - workspace.focus_handle(cx).focus(window, cx); - }); - cx.run_until_parked(); - - sidebar.read_with(cx, |sidebar, _cx| { - assert_active_thread( - sidebar, - &session_id_a, - "Defocusing the sidebar should not change focused_thread", - ); - }); - - // Switching workspaces via the multi_workspace (simulates clicking - // a workspace header) should clear focused_thread. - multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = mw.workspaces().find(|w| *w == &workspace_b).cloned(); - if let Some(workspace) = workspace { - mw.activate(workspace, window, cx); - } - }); - cx.run_until_parked(); - - sidebar.read_with(cx, |sidebar, _cx| { - assert_active_thread( - sidebar, - &session_id_b2, - "Switching workspace should seed focused_thread from the new active panel", - ); - assert!( - has_thread_entry(sidebar, &session_id_b2), - "The seeded thread should be present in the entries" - ); - }); - - // ── 8. Focusing the agent panel thread keeps focused_thread ──── - // Workspace B still has session_id_b2 loaded in the agent panel. - // Clicking into the thread (simulated by focusing its view) should - // keep focused_thread since it was already seeded on workspace switch. - panel_b.update_in(cx, |panel, window, cx| { - if let Some(thread_view) = panel.active_conversation_view() { - thread_view.read(cx).focus_handle(cx).focus(window, cx); - } - }); - cx.run_until_parked(); - - sidebar.read_with(cx, |sidebar, _cx| { - assert_active_thread( - sidebar, - &session_id_b2, - "Focusing the agent panel thread should set focused_thread", - ); - assert!( - has_thread_entry(sidebar, &session_id_b2), - "The focused thread should be present in the entries" - ); - }); -} - -#[gpui::test] -async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContext) { - let project = init_test_project_with_agent_panel("/project-a", cx).await; - let fs = cx.update(|cx| ::global(cx)); - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - - // Start a thread and send a message so it has history. - let connection = StubAgentConnection::new(); - connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( - acp::ContentChunk::new("Done".into()), - )]); - open_thread_with_connection(&panel, connection, cx); - send_message(&panel, cx); - let session_id = active_session_id(&panel, cx); - save_test_thread_metadata(&session_id, &project, cx).await; - cx.run_until_parked(); - - // Verify the thread appears in the sidebar. - assert_eq!( - visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project-a]", - " Hello * (active)", - ] - ); - - // The "New Thread" button should NOT be in "active/draft" state - // because the panel has a thread with messages. - sidebar.read_with(cx, |sidebar, _cx| { - assert!( - matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { .. })), - "Panel has a thread with messages, so active_entry should be Thread, got {:?}", - sidebar.active_entry, - ); - }); - - // Now add a second folder to the workspace, changing the path_list. - fs.as_fake() - .insert_tree("/project-b", serde_json::json!({ "src": {} })) - .await; - project - .update(cx, |project, cx| { - project.find_or_create_worktree("/project-b", true, cx) - }) - .await - .expect("should add worktree"); - cx.run_until_parked(); - - // The workspace path_list is now [project-a, project-b]. The active - // thread's metadata was re-saved with the new paths by the agent panel's - // project subscription. The old [project-a] key is replaced by the new - // key since no other workspace claims it. - assert_eq!( - visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project-a, project-b]", - " Hello * (active)", - ] - ); - - // The "New Thread" button must still be clickable (not stuck in - // "active/draft" state). Verify that `active_thread_is_draft` is - // false — the panel still has the old thread with messages. - sidebar.read_with(cx, |sidebar, _cx| { - assert!( - matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { .. })), - "After adding a folder the panel still has a thread with messages, \ - so active_entry should be Thread, got {:?}", - sidebar.active_entry, - ); - }); - - // Actually click "New Thread" by calling create_new_thread and - // verify a new draft is created. - let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone()); - sidebar.update_in(cx, |sidebar, window, cx| { - sidebar.create_new_thread(&workspace, window, cx); - }); - cx.run_until_parked(); - - // After creating a new thread, the panel should now be in draft - // state (no messages on the new thread). - sidebar.read_with(cx, |sidebar, _cx| { - assert_active_draft( - sidebar, - &workspace, - "After creating a new thread active_entry should be Draft", - ); - }); -} - -#[gpui::test] -async fn test_worktree_add_and_remove_migrates_threads(cx: &mut TestAppContext) { - // When a worktree is added to a project, the project group key changes - // and all historical threads should be migrated to the new key. Removing - // the worktree should migrate them back. - let (_fs, project) = init_multi_project_test(&["/project-a", "/project-b"], cx).await; - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - let sidebar = setup_sidebar(&multi_workspace, cx); - - // Save two threads against the initial project group [/project-a]. - save_n_test_threads(2, &project, cx).await; - sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); - cx.run_until_parked(); - - assert_eq!( - visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project-a]", - " Thread 2", - " Thread 1", - ] - ); - - // Verify the metadata store has threads under the old key. - let old_key_paths = PathList::new(&[PathBuf::from("/project-a")]); - cx.update(|_window, cx| { - let store = ThreadMetadataStore::global(cx).read(cx); - assert_eq!( - store.entries_for_main_worktree_path(&old_key_paths).count(), - 2, - "should have 2 threads under old key before add" - ); - }); - - // Add a second worktree to the same project. - project - .update(cx, |project, cx| { - project.find_or_create_worktree("/project-b", true, cx) - }) - .await - .expect("should add worktree"); - cx.run_until_parked(); - - // The project group key should now be [/project-a, /project-b]. - let new_key_paths = PathList::new(&[PathBuf::from("/project-a"), PathBuf::from("/project-b")]); - - // Verify multi-workspace state: exactly one project group key, the new one. - multi_workspace.read_with(cx, |mw, _cx| { - let keys: Vec<_> = mw.project_group_keys().cloned().collect(); - assert_eq!( - keys.len(), - 1, - "should have exactly 1 project group key after add" - ); - assert_eq!( - keys[0].path_list(), - &new_key_paths, - "the key should be the new combined path list" - ); - }); - - // Verify threads were migrated to the new key. - cx.update(|_window, cx| { - let store = ThreadMetadataStore::global(cx).read(cx); - assert_eq!( - store.entries_for_main_worktree_path(&old_key_paths).count(), - 0, - "should have 0 threads under old key after migration" - ); - assert_eq!( - store.entries_for_main_worktree_path(&new_key_paths).count(), - 2, - "should have 2 threads under new key after migration" - ); - }); - - // Sidebar should show threads under the new header. - sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); - cx.run_until_parked(); - - assert_eq!( - visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project-a, project-b]", - " Thread 2", - " Thread 1", - ] - ); - - // Now remove the second worktree. - let worktree_id = project.read_with(cx, |project, cx| { - project - .visible_worktrees(cx) - .find(|wt| wt.read(cx).abs_path().as_ref() == Path::new("/project-b")) - .map(|wt| wt.read(cx).id()) - .expect("should find project-b worktree") - }); - project.update(cx, |project, cx| { - project.remove_worktree(worktree_id, cx); - }); - cx.run_until_parked(); - - // The key should revert to [/project-a]. - multi_workspace.read_with(cx, |mw, _cx| { - let keys: Vec<_> = mw.project_group_keys().cloned().collect(); - assert_eq!( - keys.len(), - 1, - "should have exactly 1 project group key after remove" - ); - assert_eq!( - keys[0].path_list(), - &old_key_paths, - "the key should revert to the original path list" - ); - }); - - // Threads should be migrated back to the old key. - cx.update(|_window, cx| { - let store = ThreadMetadataStore::global(cx).read(cx); - assert_eq!( - store.entries_for_main_worktree_path(&new_key_paths).count(), - 0, - "should have 0 threads under new key after revert" - ); - assert_eq!( - store.entries_for_main_worktree_path(&old_key_paths).count(), - 2, - "should have 2 threads under old key after revert" - ); - }); - - sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); - cx.run_until_parked(); - - assert_eq!( - visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project-a]", - " Thread 2", - " Thread 1", - ] - ); -} - -#[gpui::test] -async fn test_worktree_add_key_collision_removes_duplicate_workspace(cx: &mut TestAppContext) { - // When a worktree is added to workspace A and the resulting key matches - // an existing workspace B's key (and B has the same root paths), B - // should be removed as a true duplicate. - let (fs, project_a) = init_multi_project_test(&["/project-a", "/project-b"], cx).await; - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); - let sidebar = setup_sidebar(&multi_workspace, cx); - - // Save a thread against workspace A [/project-a]. - save_named_thread_metadata("thread-a", "Thread A", &project_a, cx).await; - - // Create workspace B with both worktrees [/project-a, /project-b]. - let project_b = project::Project::test( - fs.clone() as Arc, - ["/project-a".as_ref(), "/project-b".as_ref()], - cx, - ) - .await; - let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| { - mw.test_add_workspace(project_b.clone(), window, cx) - }); - cx.run_until_parked(); - - // Switch back to workspace A so it's the active workspace when the collision happens. - let workspace_a = - multi_workspace.read_with(cx, |mw, _| mw.workspaces().next().unwrap().clone()); - multi_workspace.update_in(cx, |mw, window, cx| { - mw.activate(workspace_a, window, cx); - }); - cx.run_until_parked(); - - // Save a thread against workspace B [/project-a, /project-b]. - save_named_thread_metadata("thread-b", "Thread B", &project_b, cx).await; - - sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); - cx.run_until_parked(); - - // Both project groups should be visible. - assert_eq!( - visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project-a, project-b]", - " Thread B", - "v [project-a]", - " Thread A", - ] - ); - - let workspace_b_id = workspace_b.entity_id(); - - // Now add /project-b to workspace A's project, causing a key collision. - project_a - .update(cx, |project, cx| { - project.find_or_create_worktree("/project-b", true, cx) - }) - .await - .expect("should add worktree"); - cx.run_until_parked(); - - // Workspace B should have been removed (true duplicate — same root paths). - multi_workspace.read_with(cx, |mw, _cx| { - let workspace_ids: Vec<_> = mw.workspaces().map(|ws| ws.entity_id()).collect(); - assert!( - !workspace_ids.contains(&workspace_b_id), - "workspace B should have been removed after key collision" - ); - }); - - // There should be exactly one project group key now. - let combined_paths = PathList::new(&[PathBuf::from("/project-a"), PathBuf::from("/project-b")]); - multi_workspace.read_with(cx, |mw, _cx| { - let keys: Vec<_> = mw.project_group_keys().cloned().collect(); - assert_eq!( - keys.len(), - 1, - "should have exactly 1 project group key after collision" - ); - assert_eq!( - keys[0].path_list(), - &combined_paths, - "the remaining key should be the combined paths" - ); - }); - - // Both threads should be visible under the merged group. - sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); - cx.run_until_parked(); - - assert_eq!( - visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project-a, project-b]", - " Thread A", - " Thread B", - ] - ); -} - -#[gpui::test] -async fn test_worktree_collision_keeps_active_workspace(cx: &mut TestAppContext) { - // When workspace A adds a folder that makes it collide with workspace B, - // and B is the *active* workspace, A (the incoming one) should be - // dropped so the user stays on B. A linked worktree sibling of A - // should migrate into B's group. - init_test(cx); - let fs = FakeFs::new(cx.executor()); - - // Set up /project-a with a linked worktree. - fs.insert_tree( - "/project-a", - serde_json::json!({ - ".git": { - "worktrees": { - "feature": { - "commondir": "../../", - "HEAD": "ref: refs/heads/feature", - }, - }, - }, - "src": {}, - }), - ) - .await; - fs.insert_tree( - "/wt-feature", - serde_json::json!({ - ".git": "gitdir: /project-a/.git/worktrees/feature", - "src": {}, - }), - ) - .await; - fs.add_linked_worktree_for_repo( - Path::new("/project-a/.git"), - false, - git::repository::Worktree { - path: PathBuf::from("/wt-feature"), - ref_name: Some("refs/heads/feature".into()), - sha: "aaa".into(), - is_main: false, - }, - ) - .await; - fs.insert_tree("/project-b", serde_json::json!({ ".git": {}, "src": {} })) - .await; - cx.update(|cx| ::set_global(fs.clone(), cx)); - - let project_a = project::Project::test(fs.clone(), ["/project-a".as_ref()], cx).await; - project_a.update(cx, |p, cx| p.git_scans_complete(cx)).await; - - // Linked worktree sibling of A. - let project_wt = project::Project::test(fs.clone(), ["/wt-feature".as_ref()], cx).await; - project_wt - .update(cx, |p, cx| p.git_scans_complete(cx)) - .await; - - // Workspace B has both folders already. - let project_b = project::Project::test( - fs.clone() as Arc, - ["/project-a".as_ref(), "/project-b".as_ref()], - cx, - ) - .await; - - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); - let sidebar = setup_sidebar(&multi_workspace, cx); - - // Add agent panels to all workspaces. - let workspace_a_entity = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); - add_agent_panel(&workspace_a_entity, cx); - - // Add the linked worktree workspace (sibling of A). - let workspace_wt = multi_workspace.update_in(cx, |mw, window, cx| { - mw.test_add_workspace(project_wt.clone(), window, cx) - }); - add_agent_panel(&workspace_wt, cx); - cx.run_until_parked(); - - // Add workspace B (will become active). - let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| { - mw.test_add_workspace(project_b.clone(), window, cx) - }); - add_agent_panel(&workspace_b, cx); - cx.run_until_parked(); - - // Save threads in each group. - save_named_thread_metadata("thread-a", "Thread A", &project_a, cx).await; - save_thread_metadata_with_main_paths( - "thread-wt", - "Worktree Thread", - PathList::new(&[PathBuf::from("/wt-feature")]), - PathList::new(&[PathBuf::from("/project-a")]), - cx, - ); - save_named_thread_metadata("thread-b", "Thread B", &project_b, cx).await; - - sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); - cx.run_until_parked(); - - // B is active, A and wt-feature are in one group, B in another. - assert_eq!( - multi_workspace.read_with(cx, |mw, _| mw.workspace().entity_id()), - workspace_b.entity_id(), - "workspace B should be active" - ); - multi_workspace.read_with(cx, |mw, _cx| { - assert_eq!(mw.project_group_keys().count(), 2, "should have 2 groups"); - assert_eq!(mw.workspaces().count(), 3, "should have 3 workspaces"); - }); - - assert_eq!( - visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project-a, project-b]", - " [~ Draft] (active)", - " Thread B", - "v [project-a]", - " Thread A", - " Worktree Thread {wt-feature}", - ] - ); - - let workspace_a = multi_workspace.read_with(cx, |mw, _| { - mw.workspaces() - .find(|ws| { - ws.entity_id() != workspace_b.entity_id() - && ws.entity_id() != workspace_wt.entity_id() - }) - .unwrap() - .clone() - }); - - // Add /project-b to workspace A's project, causing a collision with B. - project_a - .update(cx, |project, cx| { - project.find_or_create_worktree("/project-b", true, cx) - }) - .await - .expect("should add worktree"); - cx.run_until_parked(); - - // Workspace A (the incoming duplicate) should have been dropped. - multi_workspace.read_with(cx, |mw, _cx| { - let workspace_ids: Vec<_> = mw.workspaces().map(|ws| ws.entity_id()).collect(); - assert!( - !workspace_ids.contains(&workspace_a.entity_id()), - "workspace A should have been dropped" - ); - }); - - // The active workspace should still be B. - assert_eq!( - multi_workspace.read_with(cx, |mw, _| mw.workspace().entity_id()), - workspace_b.entity_id(), - "workspace B should still be active" - ); - - // The linked worktree sibling should have migrated into B's group - // (it got the folder add and now shares the same key). - multi_workspace.read_with(cx, |mw, _cx| { - let workspace_ids: Vec<_> = mw.workspaces().map(|ws| ws.entity_id()).collect(); - assert!( - workspace_ids.contains(&workspace_wt.entity_id()), - "linked worktree workspace should still exist" - ); - assert_eq!( - mw.project_group_keys().count(), - 1, - "should have 1 group after merge" - ); - assert_eq!( - mw.workspaces().count(), - 2, - "should have 2 workspaces (B + linked worktree)" - ); - }); - - // The linked worktree workspace should have gotten the new folder. - let wt_worktree_count = - project_wt.read_with(cx, |project, cx| project.visible_worktrees(cx).count()); - assert_eq!( - wt_worktree_count, 2, - "linked worktree project should have gotten /project-b" - ); - - // After: everything merged under one group. Thread A migrated, - // worktree thread shows its chip, B's thread and draft remain. - sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); - cx.run_until_parked(); - - assert_eq!( - visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project-a, project-b]", - " [~ Draft] (active)", - " [+ New Thread {project-a:wt-feature}]", - " Thread A", - " Worktree Thread {project-a:wt-feature}", - " Thread B", - ] - ); -} - -#[gpui::test] -async fn test_worktree_add_syncs_linked_worktree_sibling(cx: &mut TestAppContext) { - // When a worktree is added to the main workspace, a linked worktree - // sibling (different root paths, same project group key) should also - // get the new folder added to its project. - init_test(cx); - let fs = FakeFs::new(cx.executor()); - - fs.insert_tree( - "/project", - serde_json::json!({ - ".git": { - "worktrees": { - "feature": { - "commondir": "../../", - "HEAD": "ref: refs/heads/feature", - }, - }, - }, - "src": {}, - }), - ) - .await; - - fs.insert_tree( - "/wt-feature", - serde_json::json!({ - ".git": "gitdir: /project/.git/worktrees/feature", - "src": {}, - }), - ) - .await; - - fs.add_linked_worktree_for_repo( - Path::new("/project/.git"), - false, - git::repository::Worktree { - path: PathBuf::from("/wt-feature"), - ref_name: Some("refs/heads/feature".into()), - sha: "aaa".into(), - is_main: false, - }, - ) - .await; - - // Create a second independent project to add as a folder later. - fs.insert_tree( - "/other-project", - serde_json::json!({ ".git": {}, "src": {} }), - ) - .await; - - cx.update(|cx| ::set_global(fs.clone(), cx)); + &session_id_a, + "Switching workspace should seed focused_thread from the new active panel", + ); + assert!( + has_thread_entry(sidebar, &session_id_a), + "The seeded thread should be present in the entries" + ); + }); - let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; - let worktree_project = project::Project::test(fs.clone(), ["/wt-feature".as_ref()], cx).await; + let connection_b2 = StubAgentConnection::new(); + connection_b2.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new(DEFAULT_THREAD_TITLE.into()), + )]); + open_thread_with_connection(&panel_b, connection_b2, cx); + send_message(&panel_b, cx); + let session_id_b2 = active_session_id(&panel_b, cx); + save_test_thread_metadata(&session_id_b2, &project_b, cx).await; + cx.run_until_parked(); - main_project - .update(cx, |p, cx| p.git_scans_complete(cx)) - .await; - worktree_project - .update(cx, |p, cx| p.git_scans_complete(cx)) - .await; + // Panel B is not the active workspace's panel (workspace A is + // active), so opening a thread there should not change focused_thread. + // This prevents running threads in background workspaces from causing + // the selection highlight to jump around. + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_a, + "Opening a thread in a non-active panel should not change focused_thread", + ); + }); - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); - let sidebar = setup_sidebar(&multi_workspace, cx); + workspace_b.update_in(cx, |workspace, window, cx| { + workspace.focus_handle(cx).focus(window, cx); + }); + cx.run_until_parked(); - // Add agent panel to the main workspace. - let main_workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); - add_agent_panel(&main_workspace, cx); + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_a, + "Defocusing the sidebar should not change focused_thread", + ); + }); - // Open the linked worktree as a separate workspace. - let wt_workspace = multi_workspace.update_in(cx, |mw, window, cx| { - mw.test_add_workspace(worktree_project.clone(), window, cx) + // Switching workspaces via the multi_workspace (simulates clicking + // a workspace header) should clear focused_thread. + multi_workspace.update_in(cx, |mw, window, cx| { + let workspace = mw.workspaces().find(|w| *w == &workspace_b).cloned(); + if let Some(workspace) = workspace { + mw.activate(workspace, window, cx); + } }); - add_agent_panel(&wt_workspace, cx); cx.run_until_parked(); - // Both workspaces should share the same project group key [/project]. - multi_workspace.read_with(cx, |mw, _cx| { - assert_eq!( - mw.project_group_keys().count(), - 1, - "should have 1 project group key before add" + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_b2, + "Switching workspace should seed focused_thread from the new active panel", + ); + assert!( + has_thread_entry(sidebar, &session_id_b2), + "The seeded thread should be present in the entries" ); - assert_eq!(mw.workspaces().count(), 2, "should have 2 workspaces"); }); - // Save threads against each workspace. - save_named_thread_metadata("main-thread", "Main Thread", &main_project, cx).await; - save_named_thread_metadata("wt-thread", "Worktree Thread", &worktree_project, cx).await; + // ── 8. Focusing the agent panel thread keeps focused_thread ──── + // Workspace B still has session_id_b2 loaded in the agent panel. + // Clicking into the thread (simulated by focusing its view) should + // keep focused_thread since it was already seeded on workspace switch. + panel_b.update_in(cx, |panel, window, cx| { + if let Some(thread_view) = panel.active_conversation_view() { + thread_view.read(cx).focus_handle(cx).focus(window, cx); + } + }); + cx.run_until_parked(); - // Verify both threads are under the old key [/project]. - let old_key_paths = PathList::new(&[PathBuf::from("/project")]); - cx.update(|_window, cx| { - let store = ThreadMetadataStore::global(cx).read(cx); - assert_eq!( - store.entries_for_main_worktree_path(&old_key_paths).count(), - 2, - "should have 2 threads under old key before add" + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_b2, + "Focusing the agent panel thread should set focused_thread", + ); + assert!( + has_thread_entry(sidebar, &session_id_b2), + "The focused thread should be present in the entries" ); }); +} + +#[gpui::test] +async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContext) { + let project = init_test_project_with_agent_panel("/project-a", cx).await; + let fs = cx.update(|cx| ::global(cx)); + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); + // Start a thread and send a message so it has history. + let connection = StubAgentConnection::new(); + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done".into()), + )]); + open_thread_with_connection(&panel, connection, cx); + send_message(&panel, cx); + let session_id = active_session_id(&panel, cx); + save_test_thread_metadata(&session_id, &project, cx).await; cx.run_until_parked(); + // Verify the thread appears in the sidebar. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project]", - " [~ Draft {wt-feature}] (active)", - " Worktree Thread {wt-feature}", - " Main Thread", - ] + vec!["v [project-a]", " Hello *",] ); - // Add /other-project as a folder to the main workspace. - main_project + // The "New Thread" button should NOT be in "active/draft" state + // because the panel has a thread with messages. + sidebar.read_with(cx, |sidebar, _cx| { + assert!( + matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { .. })), + "Panel has a thread with messages, so active_entry should be Thread, got {:?}", + sidebar.active_entry, + ); + }); + + // Now add a second folder to the workspace, changing the path_list. + fs.as_fake() + .insert_tree("/project-b", serde_json::json!({ "src": {} })) + .await; + project .update(cx, |project, cx| { - project.find_or_create_worktree("/other-project", true, cx) + project.find_or_create_worktree("/project-b", true, cx) }) .await .expect("should add worktree"); cx.run_until_parked(); - // The linked worktree workspace should have gotten the new folder too. - let wt_worktree_count = - worktree_project.read_with(cx, |project, cx| project.visible_worktrees(cx).count()); + // The workspace path_list is now [project-a, project-b]. The active + // thread's metadata was re-saved with the new paths by the agent panel's + // project subscription, so it stays visible under the updated group. + // The old [project-a] group persists in the sidebar (empty) because + // project_group_keys is append-only. assert_eq!( - wt_worktree_count, 2, - "linked worktree project should have gotten the new folder" + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project-a, project-b]", // + " Hello *", + "v [project-a]", + ] ); - // Both workspaces should still exist under one key. - multi_workspace.read_with(cx, |mw, _cx| { - assert_eq!(mw.workspaces().count(), 2, "both workspaces should survive"); - assert_eq!( - mw.project_group_keys().count(), - 1, - "should still have 1 project group key" + // The "New Thread" button must still be clickable (not stuck in + // "active/draft" state). Verify that `active_thread_is_draft` is + // false — the panel still has the old thread with messages. + sidebar.read_with(cx, |sidebar, _cx| { + assert!( + matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { .. })), + "After adding a folder the panel still has a thread with messages, \ + so active_entry should be Thread, got {:?}", + sidebar.active_entry, ); }); - // Threads should have been migrated to the new key. - let new_key_paths = - PathList::new(&[PathBuf::from("/other-project"), PathBuf::from("/project")]); - cx.update(|_window, cx| { - let store = ThreadMetadataStore::global(cx).read(cx); - assert_eq!( - store.entries_for_main_worktree_path(&old_key_paths).count(), - 0, - "should have 0 threads under old key after migration" - ); - assert_eq!( - store.entries_for_main_worktree_path(&new_key_paths).count(), - 2, - "should have 2 threads under new key after migration" - ); + // Actually click "New Thread" by calling create_new_thread and + // verify a new draft is created. + let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone()); + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.create_new_thread(&workspace, window, cx); }); - - // Both threads should still be visible in the sidebar. - sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); cx.run_until_parked(); - assert_eq!( - visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [other-project, project]", - " [~ Draft {project:wt-feature}] (active)", - " Worktree Thread {project:wt-feature}", - " Main Thread", - ] - ); + // After creating a new thread, the panel should now be in draft + // state (no messages on the new thread). + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_draft( + sidebar, + &workspace, + "After creating a new thread active_entry should be Draft", + ); + }); } #[gpui::test] @@ -3295,11 +2490,7 @@ async fn test_cmd_n_shows_new_thread_entry(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - " Hello * (active)", - ] + vec!["v [my-project]", " Hello *"] ); // Simulate cmd-n @@ -3314,12 +2505,7 @@ async fn test_cmd_n_shows_new_thread_entry(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - " [~ Draft] (active)", - " Hello *", - ], + vec!["v [my-project]", " [~ Draft]", " Hello *"], "After Cmd-N the sidebar should show a highlighted Draft entry" ); @@ -3352,11 +2538,7 @@ async fn test_draft_with_server_session_shows_as_draft(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - " Hello * (active)", - ] + vec!["v [my-project]", " Hello *"] ); // Open a new draft thread via a server connection. This gives the @@ -3368,12 +2550,7 @@ async fn test_draft_with_server_session_shows_as_draft(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - " [~ Draft] (active)", - " Hello *", - ], + vec!["v [my-project]", " [~ Draft]", " Hello *"], ); let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone()); @@ -3467,11 +2644,7 @@ async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestApp assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project]", - " Hello {wt-feature-a} * (active)", - ] + vec!["v [project]", " Hello {wt-feature-a} *"] ); // Simulate Cmd-N in the worktree workspace. @@ -3486,10 +2659,9 @@ async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestApp assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [project]", - " [~ Draft {wt-feature-a}] (active)", - " Hello {wt-feature-a} *", + " [~ Draft {wt-feature-a}]", + " Hello {wt-feature-a} *" ], "After Cmd-N in an absorbed worktree, the sidebar should show \ a highlighted Draft entry under the main repo header" @@ -3564,11 +2736,7 @@ async fn test_search_matches_worktree_name(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project]", - " Fix Bug {rosewood} <== selected", - ], + vec!["v [project]", " Fix Bug {rosewood} <== selected"], ); } @@ -3589,28 +2757,16 @@ async fn test_git_worktree_added_live_updates_sidebar(cx: &mut TestAppContext) { cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - // Save a thread against a worktree path with the correct main - // worktree association (as if the git state had been resolved). - save_thread_metadata_with_main_paths( - "wt-thread", - "Worktree Thread", - PathList::new(&[PathBuf::from("/wt/rosewood")]), - PathList::new(&[PathBuf::from("/project")]), - cx, - ); + // Save a thread against a worktree path that doesn't exist yet. + save_named_thread_metadata("wt-thread", "Worktree Thread", &worktree_project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); - // Thread is visible because its main_worktree_paths match the group. - // The chip name is derived from the path even before git discovery. + // Thread is not visible yet — no worktree knows about this path. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project]", - " Worktree Thread {rosewood}", - ] + vec!["v [project]"] ); // Now add the worktree to the git state and trigger a rescan. @@ -3631,11 +2787,7 @@ async fn test_git_worktree_added_live_updates_sidebar(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project]", - " Worktree Thread {rosewood}", - ] + vec!["v [project]", " Worktree Thread {rosewood}",] ); } @@ -3705,7 +2857,6 @@ async fn test_two_worktree_workspaces_absorbed_when_main_added(cx: &mut TestAppC assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [project]", " Thread A {wt-feature-a}", " Thread B {wt-feature-b}", @@ -3727,7 +2878,6 @@ async fn test_two_worktree_workspaces_absorbed_when_main_added(cx: &mut TestAppC assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [project]", " Thread A {wt-feature-a}", " Thread B {wt-feature-b}", @@ -3803,7 +2953,6 @@ async fn test_threadless_workspace_shows_new_thread_with_worktree_chip(cx: &mut assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [project]", " [+ New Thread {wt-feature-b}]", " Thread A {wt-feature-a}", @@ -3883,9 +3032,8 @@ async fn test_multi_worktree_thread_shows_multiple_chips(cx: &mut TestAppContext assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [project_a, project_b]", - " Cross Worktree Thread {project_a:olivetti}, {project_b:selectric}", + " Cross Worktree Thread {olivetti}, {selectric}", ] ); } @@ -3957,7 +3105,6 @@ async fn test_same_named_worktree_chips_are_deduplicated(cx: &mut TestAppContext assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [project_a, project_b]", " Same Branch Thread {olivetti}", ] @@ -4062,9 +3209,8 @@ async fn test_absorbed_worktree_running_thread_shows_live_status(cx: &mut TestAp assert_eq!( entries, vec![ - // "v [project]", - " [~ Draft] (active)", + " [~ Draft]", " Hello {wt-feature-a} * (running)", ] ); @@ -4150,9 +3296,8 @@ async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAp assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [project]", - " [~ Draft] (active)", + " [~ Draft]", " Hello {wt-feature-a} * (running)", ] ); @@ -4162,12 +3307,7 @@ async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAp assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project]", - " [~ Draft] (active)", - " Hello {wt-feature-a} * (!)", - ] + vec!["v [project]", " [~ Draft]", " Hello {wt-feature-a} * (!)",] ); } @@ -4223,11 +3363,7 @@ async fn test_clicking_worktree_thread_opens_workspace_when_none_exists(cx: &mut // Thread should appear under the main repo with a worktree chip. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project]", - " WT Thread {wt-feature-a}", - ], + vec!["v [project]", " WT Thread {wt-feature-a}"], ); // Only 1 workspace should exist. @@ -4316,11 +3452,7 @@ async fn test_clicking_worktree_thread_does_not_briefly_render_as_separate_proje assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project]", - " WT Thread {wt-feature-a}", - ], + vec!["v [project]", " WT Thread {wt-feature-a}"], ); focus_sidebar(&sidebar, cx); @@ -5363,7 +4495,6 @@ async fn test_linked_worktree_threads_not_duplicated_across_groups(cx: &mut Test assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // "v [other, project]", "v [project]", " Worktree Thread {wt-feature-a}", @@ -7151,23 +6282,19 @@ mod property_test { SwitchToThread { index: usize }, SwitchToProjectGroup { index: usize }, AddLinkedWorktree { project_group_index: usize }, - AddWorktreeToProject { project_group_index: usize }, - RemoveWorktreeFromProject { project_group_index: usize }, } - // Distribution (out of 24 slots): - // SaveThread: 5 slots (~21%) - // SaveWorktreeThread: 2 slots (~8%) - // ToggleAgentPanel: 1 slot (~4%) - // CreateDraftThread: 1 slot (~4%) - // AddProject: 1 slot (~4%) - // ArchiveThread: 2 slots (~8%) - // SwitchToThread: 2 slots (~8%) - // SwitchToProjectGroup: 2 slots (~8%) - // AddLinkedWorktree: 4 slots (~17%) - // AddWorktreeToProject: 2 slots (~8%) - // RemoveWorktreeFromProject: 2 slots (~8%) - const DISTRIBUTION_SLOTS: u32 = 24; + // Distribution (out of 20 slots): + // SaveThread: 5 slots (~25%) + // SaveWorktreeThread: 2 slots (~10%) + // ToggleAgentPanel: 1 slot (~5%) + // CreateDraftThread: 1 slot (~5%) + // AddProject: 1 slot (~5%) + // ArchiveThread: 2 slots (~10%) + // SwitchToThread: 2 slots (~10%) + // SwitchToProjectGroup: 2 slots (~10%) + // AddLinkedWorktree: 4 slots (~20%) + const DISTRIBUTION_SLOTS: u32 = 20; impl TestState { fn generate_operation(&self, raw: u32, project_group_count: usize) -> Operation { @@ -7209,18 +6336,6 @@ mod property_test { 16..=19 => Operation::SaveThread { project_group_index: extra % project_group_count, }, - 20..=21 if project_group_count > 0 => Operation::AddWorktreeToProject { - project_group_index: extra % project_group_count, - }, - 20..=21 => Operation::SaveThread { - project_group_index: extra % project_group_count, - }, - 22..=23 if project_group_count > 0 => Operation::RemoveWorktreeFromProject { - project_group_index: extra % project_group_count, - }, - 22..=23 => Operation::SaveThread { - project_group_index: extra % project_group_count, - }, _ => unreachable!(), } } @@ -7478,57 +6593,6 @@ mod property_test { main_workspace_path: main_path.clone(), }); } - Operation::AddWorktreeToProject { - project_group_index, - } => { - let workspace = multi_workspace.read_with(cx, |mw, cx| { - let key = mw.project_group_keys().nth(project_group_index).unwrap(); - mw.workspaces_for_project_group(key, cx).next().cloned() - }); - let Some(workspace) = workspace else { return }; - let project = workspace.read_with(cx, |ws, _| ws.project().clone()); - - let new_path = state.next_workspace_path(); - state - .fs - .insert_tree(&new_path, serde_json::json!({ ".git": {}, "src": {} })) - .await; - - let result = project - .update(cx, |project, cx| { - project.find_or_create_worktree(&new_path, true, cx) - }) - .await; - if result.is_err() { - return; - } - cx.run_until_parked(); - } - Operation::RemoveWorktreeFromProject { - project_group_index, - } => { - let workspace = multi_workspace.read_with(cx, |mw, cx| { - let key = mw.project_group_keys().nth(project_group_index).unwrap(); - mw.workspaces_for_project_group(key, cx).next().cloned() - }); - let Some(workspace) = workspace else { return }; - let project = workspace.read_with(cx, |ws, _| ws.project().clone()); - - let worktree_count = project.read_with(cx, |p, cx| p.visible_worktrees(cx).count()); - if worktree_count <= 1 { - return; - } - - let worktree_id = project.read_with(cx, |p, cx| { - p.visible_worktrees(cx).last().map(|wt| wt.read(cx).id()) - }); - if let Some(worktree_id) = worktree_id { - project.update(cx, |project, cx| { - project.remove_worktree(worktree_id, cx); - }); - cx.run_until_parked(); - } - } } } @@ -7556,7 +6620,6 @@ mod property_test { verify_all_threads_are_shown(sidebar, cx)?; verify_active_state_matches_current_workspace(sidebar, cx)?; verify_all_workspaces_are_reachable(sidebar, cx)?; - verify_workspace_group_key_integrity(sidebar, cx)?; Ok(()) } @@ -7808,15 +6871,6 @@ mod property_test { Ok(()) } - fn verify_workspace_group_key_integrity(sidebar: &Sidebar, cx: &App) -> anyhow::Result<()> { - let Some(multi_workspace) = sidebar.multi_workspace.upgrade() else { - anyhow::bail!("sidebar should still have an associated multi-workspace"); - }; - multi_workspace - .read(cx) - .assert_project_group_key_integrity(cx) - } - #[gpui::property_test(config = ProptestConfig { cases: 50, ..Default::default() diff --git a/crates/workspace/src/multi_workspace.rs b/crates/workspace/src/multi_workspace.rs index bc9a5d59c74aa1cadc60ecbcb1f08b2afc3f3abd..f4e8b47399e1420a4b01d380ad4a6532a0934a2d 100644 --- a/crates/workspace/src/multi_workspace.rs +++ b/crates/workspace/src/multi_workspace.rs @@ -101,10 +101,6 @@ pub enum MultiWorkspaceEvent { ActiveWorkspaceChanged, WorkspaceAdded(Entity), WorkspaceRemoved(EntityId), - ProjectGroupKeyChanged { - old_key: ProjectGroupKey, - new_key: ProjectGroupKey, - }, } pub enum SidebarEvent { @@ -306,7 +302,7 @@ pub struct MultiWorkspace { workspaces: Vec>, active_workspace: ActiveWorkspace, project_group_keys: Vec, - workspace_group_keys: HashMap, + provisional_project_group_keys: HashMap, sidebar: Option>, sidebar_open: bool, sidebar_overlay: Option, @@ -359,7 +355,7 @@ impl MultiWorkspace { Self { window_id: window.window_handle().window_id(), project_group_keys: Vec::new(), - workspace_group_keys: HashMap::default(), + provisional_project_group_keys: HashMap::default(), workspaces: Vec::new(), active_workspace: ActiveWorkspace::Transient(workspace), sidebar: None, @@ -563,11 +559,19 @@ impl MultiWorkspace { cx.subscribe_in(&project, window, { let workspace = workspace.downgrade(); move |this, _project, event, _window, cx| match event { - project::Event::WorktreeAdded(_) - | project::Event::WorktreeRemoved(_) - | project::Event::WorktreeUpdatedRootRepoCommonDir(_) => { + project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => { if let Some(workspace) = workspace.upgrade() { - this.handle_workspace_key_change(&workspace, cx); + this.add_project_group_key(workspace.read(cx).project_group_key(cx)); + } + } + project::Event::WorktreeUpdatedRootRepoCommonDir(_) => { + if let Some(workspace) = workspace.upgrade() { + this.maybe_clear_provisional_project_group_key(&workspace, cx); + this.add_project_group_key( + this.project_group_key_for_workspace(&workspace, cx), + ); + this.remove_stale_project_group_keys(cx); + cx.notify(); } } _ => {} @@ -583,111 +587,7 @@ impl MultiWorkspace { .detach(); } - fn handle_workspace_key_change( - &mut self, - workspace: &Entity, - cx: &mut Context, - ) { - let workspace_id = workspace.entity_id(); - let old_key = self.project_group_key_for_workspace(workspace, cx); - let new_key = workspace.read(cx).project_group_key(cx); - - if new_key.path_list().paths().is_empty() || old_key == new_key { - return; - } - - let active_workspace = self.workspace().clone(); - - self.set_workspace_group_key(workspace, new_key.clone()); - - let changed_root_paths = workspace.read(cx).root_paths(cx); - let old_paths = old_key.path_list().paths(); - let new_paths = new_key.path_list().paths(); - - // Remove workspaces that already had the new key and have the same - // root paths (true duplicates that this workspace is replacing). - // - // NOTE: These are dropped without prompting for unsaved changes because - // the user explicitly added a folder that makes this workspace - // identical to the duplicate — they are intentionally overwriting it. - let duplicate_workspaces: Vec> = self - .workspaces - .iter() - .filter(|ws| { - ws.entity_id() != workspace_id - && self.project_group_key_for_workspace(ws, cx) == new_key - && ws.read(cx).root_paths(cx) == changed_root_paths - }) - .cloned() - .collect(); - - if duplicate_workspaces.contains(&active_workspace) { - // The active workspace is among the duplicates — drop the - // incoming workspace instead so the user stays where they are. - self.detach_workspace(workspace, cx); - self.workspaces.retain(|w| w != workspace); - } else { - for ws in &duplicate_workspaces { - self.detach_workspace(ws, cx); - self.workspaces.retain(|w| w != ws); - } - } - - // Propagate folder adds/removes to linked worktree siblings - // (different root paths, same old key) so they stay in the group. - let group_workspaces: Vec> = self - .workspaces - .iter() - .filter(|ws| { - ws.entity_id() != workspace_id - && self.project_group_key_for_workspace(ws, cx) == old_key - }) - .cloned() - .collect(); - - for workspace in &group_workspaces { - // Pre-set this to stop later WorktreeAdded events from triggering - self.set_workspace_group_key(&workspace, new_key.clone()); - - let project = workspace.read(cx).project().clone(); - - for added_path in new_paths.iter().filter(|p| !old_paths.contains(p)) { - project - .update(cx, |project, cx| { - project.find_or_create_worktree(added_path, true, cx) - }) - .detach_and_log_err(cx); - } - - for removed_path in old_paths.iter().filter(|p| !new_paths.contains(p)) { - project.update(cx, |project, cx| { - project.remove_worktree_for_main_worktree_path(removed_path, cx); - }); - } - } - - // Restore the active workspace after removals may have shifted - // the index. If the previously active workspace was removed, - // fall back to the workspace whose key just changed. - if let ActiveWorkspace::Persistent(_) = &self.active_workspace { - let target = if self.workspaces.contains(&active_workspace) { - &active_workspace - } else { - workspace - }; - if let Some(new_index) = self.workspaces.iter().position(|ws| ws == target) { - self.active_workspace = ActiveWorkspace::Persistent(new_index); - } - } - - self.remove_stale_project_group_keys(cx); - - cx.emit(MultiWorkspaceEvent::ProjectGroupKeyChanged { old_key, new_key }); - self.serialize(cx); - cx.notify(); - } - - fn add_project_group_key(&mut self, project_group_key: ProjectGroupKey) { + pub fn add_project_group_key(&mut self, project_group_key: ProjectGroupKey) { if project_group_key.path_list().paths().is_empty() { return; } @@ -698,12 +598,12 @@ impl MultiWorkspace { self.project_group_keys.insert(0, project_group_key); } - pub(crate) fn set_workspace_group_key( + pub fn set_provisional_project_group_key( &mut self, workspace: &Entity, project_group_key: ProjectGroupKey, ) { - self.workspace_group_keys + self.provisional_project_group_keys .insert(workspace.entity_id(), project_group_key.clone()); self.add_project_group_key(project_group_key); } @@ -713,12 +613,28 @@ impl MultiWorkspace { workspace: &Entity, cx: &App, ) -> ProjectGroupKey { - self.workspace_group_keys + self.provisional_project_group_keys .get(&workspace.entity_id()) .cloned() .unwrap_or_else(|| workspace.read(cx).project_group_key(cx)) } + fn maybe_clear_provisional_project_group_key( + &mut self, + workspace: &Entity, + cx: &App, + ) { + let live_key = workspace.read(cx).project_group_key(cx); + if self + .provisional_project_group_keys + .get(&workspace.entity_id()) + .is_some_and(|key| *key == live_key) + { + self.provisional_project_group_keys + .remove(&workspace.entity_id()); + } + } + fn remove_stale_project_group_keys(&mut self, cx: &App) { let workspace_keys: HashSet = self .workspaces @@ -1129,6 +1045,7 @@ impl MultiWorkspace { self.promote_transient(old, cx); } else { self.detach_workspace(&old, cx); + cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old.entity_id())); } } } else { @@ -1139,6 +1056,7 @@ impl MultiWorkspace { }); if let Some(old) = self.active_workspace.set_transient(workspace) { self.detach_workspace(&old, cx); + cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old.entity_id())); } } @@ -1165,7 +1083,7 @@ impl MultiWorkspace { /// Returns the index of the newly inserted workspace. fn promote_transient(&mut self, workspace: Entity, cx: &mut Context) -> usize { let project_group_key = self.project_group_key_for_workspace(&workspace, cx); - self.set_workspace_group_key(&workspace, project_group_key); + self.add_project_group_key(project_group_key); self.workspaces.push(workspace.clone()); cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace)); self.workspaces.len() - 1 @@ -1181,10 +1099,10 @@ impl MultiWorkspace { for workspace in std::mem::take(&mut self.workspaces) { if workspace != active { self.detach_workspace(&workspace, cx); + cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id())); } } self.project_group_keys.clear(); - self.workspace_group_keys.clear(); self.active_workspace = ActiveWorkspace::Transient(active); cx.notify(); } @@ -1210,7 +1128,7 @@ impl MultiWorkspace { workspace.set_multi_workspace(weak_self, cx); }); - self.set_workspace_group_key(&workspace, project_group_key); + self.add_project_group_key(project_group_key); self.workspaces.push(workspace.clone()); cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace)); cx.notify(); @@ -1218,12 +1136,10 @@ impl MultiWorkspace { } } - /// Detaches a workspace: clears session state, DB binding, cached - /// group key, and emits `WorkspaceRemoved`. The DB row is preserved - /// so the workspace still appears in the recent-projects list. + /// Clears session state and DB binding for a workspace that is being + /// removed or replaced. The DB row is preserved so the workspace still + /// appears in the recent-projects list. fn detach_workspace(&mut self, workspace: &Entity, cx: &mut Context) { - self.workspace_group_keys.remove(&workspace.entity_id()); - cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id())); workspace.update(cx, |workspace, _cx| { workspace.session_id.take(); workspace._schedule_serialize_workspace.take(); @@ -1397,46 +1313,6 @@ impl MultiWorkspace { tasks } - #[cfg(any(test, feature = "test-support"))] - pub fn assert_project_group_key_integrity(&self, cx: &App) -> anyhow::Result<()> { - let stored_keys: HashSet<&ProjectGroupKey> = self.project_group_keys().collect(); - - let workspace_group_keys: HashSet<&ProjectGroupKey> = - self.workspace_group_keys.values().collect(); - let extra_keys = &workspace_group_keys - &stored_keys; - anyhow::ensure!( - extra_keys.is_empty(), - "workspace_group_keys values not in project_group_keys: {:?}", - extra_keys, - ); - - let cached_ids: HashSet = self.workspace_group_keys.keys().copied().collect(); - let workspace_ids: HashSet = - self.workspaces.iter().map(|ws| ws.entity_id()).collect(); - anyhow::ensure!( - cached_ids == workspace_ids, - "workspace_group_keys entity IDs don't match workspaces.\n\ - only in cache: {:?}\n\ - only in workspaces: {:?}", - &cached_ids - &workspace_ids, - &workspace_ids - &cached_ids, - ); - - for workspace in self.workspaces() { - let live_key = workspace.read(cx).project_group_key(cx); - let cached_key = &self.workspace_group_keys[&workspace.entity_id()]; - anyhow::ensure!( - *cached_key == live_key, - "workspace {:?} has live key {:?} but cached key {:?}", - workspace.entity_id(), - live_key, - cached_key, - ); - } - - Ok(()) - } - #[cfg(any(test, feature = "test-support"))] pub fn set_random_database_id(&mut self, cx: &mut Context) { self.workspace().update(cx, |workspace, _cx| { @@ -1595,6 +1471,7 @@ impl MultiWorkspace { for workspace in &removed_workspaces { this.detach_workspace(workspace, cx); + cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id())); } let removed_any = !removed_workspaces.is_empty(); diff --git a/crates/workspace/src/multi_workspace_tests.rs b/crates/workspace/src/multi_workspace_tests.rs index 9cab28c0ca4ab34b2189985e898285dd82dd4f32..259346fe097826b3dcc19fb8fad0b8f07ddd0488 100644 --- a/crates/workspace/src/multi_workspace_tests.rs +++ b/crates/workspace/src/multi_workspace_tests.rs @@ -185,3 +185,157 @@ async fn test_project_group_keys_duplicate_not_added(cx: &mut TestAppContext) { ); }); } + +#[gpui::test] +async fn test_project_group_keys_on_worktree_added(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; + let project = Project::test(fs, ["/root_a".as_ref()], cx).await; + + let initial_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + + // Add a second worktree to the same project. + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/root_b", true, cx) + }) + .await + .unwrap(); + worktree + .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete()) + .await; + cx.run_until_parked(); + + let updated_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); + assert_ne!( + initial_key, updated_key, + "key should change after adding a worktree" + ); + + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); + assert_eq!( + keys.len(), + 2, + "should have both the original and updated key" + ); + assert_eq!(*keys[0], updated_key); + assert_eq!(*keys[1], initial_key); + }); +} + +#[gpui::test] +async fn test_project_group_keys_on_worktree_removed(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; + let project = Project::test(fs, ["/root_a".as_ref(), "/root_b".as_ref()], cx).await; + + let initial_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + + // Remove one worktree. + let worktree_b_id = project.read_with(cx, |project, cx| { + project + .worktrees(cx) + .find(|wt| wt.read(cx).root_name().as_unix_str() == "root_b") + .unwrap() + .read(cx) + .id() + }); + project.update(cx, |project, cx| { + project.remove_worktree(worktree_b_id, cx); + }); + cx.run_until_parked(); + + let updated_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); + assert_ne!( + initial_key, updated_key, + "key should change after removing a worktree" + ); + + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); + assert_eq!( + keys.len(), + 2, + "should accumulate both the original and post-removal key" + ); + assert_eq!(*keys[0], updated_key); + assert_eq!(*keys[1], initial_key); + }); +} + +#[gpui::test] +async fn test_project_group_keys_across_multiple_workspaces_and_worktree_changes( + cx: &mut TestAppContext, +) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_c", json!({ "file.txt": "" })).await; + let project_a = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await; + let project_b = Project::test(fs.clone(), ["/root_b".as_ref()], cx).await; + + let key_a = project_a.read_with(cx, |p, cx| p.project_group_key(cx)); + let key_b = project_b.read_with(cx, |p, cx| p.project_group_key(cx)); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b, window, cx); + }); + + multi_workspace.read_with(cx, |mw, _cx| { + assert_eq!(mw.project_group_keys().count(), 2); + }); + + // Now add a worktree to project_a. This should produce a third key. + let (worktree, _) = project_a + .update(cx, |project, cx| { + project.find_or_create_worktree("/root_c", true, cx) + }) + .await + .unwrap(); + worktree + .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete()) + .await; + cx.run_until_parked(); + + let key_a_updated = project_a.read_with(cx, |p, cx| p.project_group_key(cx)); + assert_ne!(key_a, key_a_updated); + + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); + assert_eq!( + keys.len(), + 3, + "should have key_a, key_b, and the updated key_a with root_c" + ); + assert_eq!(*keys[0], key_a_updated); + assert_eq!(*keys[1], key_b); + assert_eq!(*keys[2], key_a); + }); +} diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index d40b7abae0c036a5cdd227ec8a547bd3c10b262c..81224c0e2db520a278bfb21429e211ba9a4f09ae 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -9886,7 +9886,7 @@ async fn open_remote_project_inner( }); if let Some(project_group_key) = provisional_project_group_key.clone() { - multi_workspace.set_workspace_group_key(&new_workspace, project_group_key); + multi_workspace.set_provisional_project_group_key(&new_workspace, project_group_key); } multi_workspace.activate(new_workspace.clone(), window, cx); new_workspace From f1d9afff663a1dbc9104da3c84df979e1b000a1d Mon Sep 17 00:00:00 2001 From: Eric Holk Date: Thu, 9 Apr 2026 19:22:55 -0700 Subject: [PATCH 26/67] Dismiss stale remote connection modal when switching back to local workspace (#53575) When the sidebar opens a remote SSH project, it shows a `RemoteConnectionModal` on the currently active (local) workspace. After the connection succeeds and a new remote workspace is created and activated, the modal on the local workspace was never dismissed. Switching back to the local workspace (e.g. by activating a thread) would re-render the local workspace's modal layer, revealing the stale "Starting proxy..." modal. Other code paths that show this modal (`recent_projects`, `git_ui`) already call `modal.finished(cx)` after the connection completes. The sidebar and agent panel paths were missing this cleanup. ## Changes - **`remote_connection`**: Added `dismiss_connection_modal()`, a public utility that finds and dismisses any active `RemoteConnectionModal` on a given workspace. - **`sidebar`**: Fixed two call sites (`open_workspace_for_group` and `open_workspace_and_activate_thread`) to dismiss the modal after the connection task completes, regardless of success or failure. - **`agent_ui`**: Fixed `open_worktree_workspace_and_start_thread` to dismiss the modal after workspace creation completes. Release Notes: - (Preview only) Fixed a spurious "Starting proxy..." modal appearing and hanging when switching back to a local project after opening a remote SSH project in a multi-project workspace. --- crates/agent_ui/src/agent_panel.rs | 47 ++++++++++--------- .../src/remote_connection.rs | 17 +++++++ crates/sidebar/src/sidebar.rs | 35 +++++++++----- 3 files changed, 66 insertions(+), 33 deletions(-) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index adb255ea62454d4e3eb3dcaa3f0d6cf481f66472..6c555af7fa630f5f5cc5995f36ec8ee7007508b9 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -3190,28 +3190,33 @@ impl AgentPanel { let window_handle = window_handle .ok_or_else(|| anyhow!("No window handle available for workspace creation"))?; - let workspace_task = window_handle.update(cx, |multi_workspace, window, cx| { - let path_list = PathList::new(&all_paths); - let active_workspace = multi_workspace.workspace().clone(); - - multi_workspace.find_or_create_workspace( - path_list, - remote_connection_options, - None, - move |connection_options, window, cx| { - remote_connection::connect_with_modal( - &active_workspace, - connection_options, - window, - cx, - ) - }, - window, - cx, - ) - })?; + let (workspace_task, modal_workspace) = + window_handle.update(cx, |multi_workspace, window, cx| { + let path_list = PathList::new(&all_paths); + let active_workspace = multi_workspace.workspace().clone(); + let modal_workspace = active_workspace.clone(); + + let task = multi_workspace.find_or_create_workspace( + path_list, + remote_connection_options, + None, + move |connection_options, window, cx| { + remote_connection::connect_with_modal( + &active_workspace, + connection_options, + window, + cx, + ) + }, + window, + cx, + ); + (task, modal_workspace) + })?; - let new_workspace = workspace_task.await?; + let result = workspace_task.await; + remote_connection::dismiss_connection_modal(&modal_workspace, cx); + let new_workspace = result?; let panels_task = new_workspace.update(cx, |workspace, _cx| workspace.take_panels_task()); diff --git a/crates/remote_connection/src/remote_connection.rs b/crates/remote_connection/src/remote_connection.rs index 48024af741b2b850c6cc9ff7379183cda8385efd..8aa4622929d6086b99e840e6b52bd5f46c49c898 100644 --- a/crates/remote_connection/src/remote_connection.rs +++ b/crates/remote_connection/src/remote_connection.rs @@ -574,6 +574,23 @@ pub fn connect_with_modal( }) } +/// Dismisses any active [`RemoteConnectionModal`] on the given workspace. +/// +/// This should be called after a remote connection attempt completes +/// (success or failure) when the modal was shown on a workspace that may +/// outlive the connection flow — for example, when the modal is shown +/// on a local workspace before switching to a newly-created remote +/// workspace. +pub fn dismiss_connection_modal(workspace: &Entity, cx: &mut gpui::AsyncWindowContext) { + workspace + .update_in(cx, |workspace, _window, cx| { + if let Some(modal) = workspace.active_modal::(cx) { + modal.update(cx, |modal, cx| modal.finished(cx)); + } + }) + .ok(); +} + /// Creates a [`RemoteClient`] by reusing an existing connection from the /// global pool. No interactive UI is shown. This should only be called /// when [`remote::has_active_connection`] returns `true`. diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 488127eb0bd04b064c2c6e3b1d8dc297ada9c477..3cd7e0059ac165bcd5e738591363cb600abcd60f 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -753,19 +753,26 @@ impl Sidebar { let host = project_group_key.host(); let provisional_key = Some(project_group_key.clone()); let active_workspace = multi_workspace.read(cx).workspace().clone(); + let modal_workspace = active_workspace.clone(); - multi_workspace - .update(cx, |this, cx| { - this.find_or_create_workspace( - path_list, - host, - provisional_key, - |options, window, cx| connect_remote(active_workspace, options, window, cx), - window, - cx, - ) - }) - .detach_and_log_err(cx); + let task = multi_workspace.update(cx, |this, cx| { + this.find_or_create_workspace( + path_list, + host, + provisional_key, + |options, window, cx| connect_remote(active_workspace, options, window, cx), + window, + cx, + ) + }); + + cx.spawn_in(window, async move |_this, cx| { + let result = task.await; + remote_connection::dismiss_connection_modal(&modal_workspace, cx); + result?; + anyhow::Ok(()) + }) + .detach_and_log_err(cx); } /// Rebuilds the sidebar contents from current workspace and thread state. @@ -2292,6 +2299,7 @@ impl Sidebar { let host = project_group_key.host(); let provisional_key = Some(project_group_key.clone()); let active_workspace = multi_workspace.read(cx).workspace().clone(); + let modal_workspace = active_workspace.clone(); let open_task = multi_workspace.update(cx, |this, cx| { this.find_or_create_workspace( @@ -2306,6 +2314,9 @@ impl Sidebar { cx.spawn_in(window, async move |this, cx| { let result = open_task.await; + // Dismiss the modal as soon as the open attempt completes so + // failures or cancellations do not leave a stale connection modal behind. + remote_connection::dismiss_connection_modal(&modal_workspace, cx); if result.is_err() || is_remote { this.update(cx, |this, _cx| { From b8766ef3af89db608509d49b3507185b699d5eb4 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 9 Apr 2026 20:38:21 -0600 Subject: [PATCH 27/67] Use -- in more places that call subcommands with paths (#53484) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Thanks to Dario Weißer for the suggestion Release Notes: - Fixed some potential edge cases when paths in a project started with `-`. --- crates/dev_container/src/docker.rs | 2 +- crates/git/src/blame.rs | 2 +- crates/git/src/repository.rs | 7 ++++--- crates/gpui_macos/src/platform.rs | 1 + 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/crates/dev_container/src/docker.rs b/crates/dev_container/src/docker.rs index c02bafb195ae20e203bffdf471c4a284a44a15e0..99ce7422eee36d56e2bc53fd31d150fe2f41b16d 100644 --- a/crates/dev_container/src/docker.rs +++ b/crates/dev_container/src/docker.rs @@ -196,7 +196,7 @@ impl Docker { async fn pull_image(&self, image: &String) -> Result<(), DevContainerError> { let mut command = Command::new(&self.docker_cli); - command.args(&["pull", image]); + command.args(&["pull", "--", image]); let output = command.output().await.map_err(|e| { log::error!("Error pulling image: {e}"); diff --git a/crates/git/src/blame.rs b/crates/git/src/blame.rs index 76e622fd6d7ae490c2c869c5ed02f02a48b45cab..7597e29aee8604ce54ee278a4e8bb61e24ac6886 100644 --- a/crates/git/src/blame.rs +++ b/crates/git/src/blame.rs @@ -58,7 +58,7 @@ async fn run_git_blame( let mut child = { let span = ztracing::debug_span!("spawning git-blame command", path = path.as_unix_str()); let _enter = span.enter(); - git.build_command(&["blame", "--incremental", "--contents", "-"]) + git.build_command(&["blame", "--incremental", "--contents", "-", "--"]) .arg(path.as_unix_str()) .stdin(Stdio::piped()) .stdout(Stdio::piped()) diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 6d17641c6ef9afafe7967f3d4bd5b37ef8c363d3..b1b9af106d93e02ee61fdc436dce7d95f9a7c107 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -1462,7 +1462,7 @@ impl GitRepository for RealGitRepository { log::debug!("indexing SHA: {sha}, path {path:?}"); let output = git - .build_command(&["update-index", "--add", "--cacheinfo", mode, sha]) + .build_command(&["update-index", "--add", "--cacheinfo", mode, sha, "--"]) .envs(env.iter()) .arg(path.as_unix_str()) .output() @@ -1476,7 +1476,7 @@ impl GitRepository for RealGitRepository { } else { log::debug!("removing path {path:?} from the index"); let output = git - .build_command(&["update-index", "--force-remove"]) + .build_command(&["update-index", "--force-remove", "--"]) .envs(env.iter()) .arg(path.as_unix_str()) .output() @@ -2114,7 +2114,7 @@ impl GitRepository for RealGitRepository { .spawn(async move { let git = git_binary?; let output = git - .build_command(&["stash", "push", "--quiet", "--include-untracked"]) + .build_command(&["stash", "push", "--quiet", "--include-untracked", "--"]) .envs(env.iter()) .args(paths.iter().map(|p| p.as_unix_str())) .output() @@ -3146,6 +3146,7 @@ fn git_status_args(path_prefixes: &[RepoPath]) -> Vec { OsString::from("--untracked-files=all"), OsString::from("--no-renames"), OsString::from("-z"), + OsString::from("--"), ]; args.extend(path_prefixes.iter().map(|path_prefix| { if path_prefix.is_empty() { diff --git a/crates/gpui_macos/src/platform.rs b/crates/gpui_macos/src/platform.rs index 291eec54df43459e8ee15cd35d73a9dfd6e4dd15..c0f721842a5f5dcacd809b5a6d896335ca4f0e45 100644 --- a/crates/gpui_macos/src/platform.rs +++ b/crates/gpui_macos/src/platform.rs @@ -858,6 +858,7 @@ impl Platform for MacPlatform { .background_executor .spawn(async move { if let Some(mut child) = new_command("open") + .arg("--") .arg(path) .spawn() .context("invoking open command") From 377e78b8bf90a64ad868e73684d9a3b50cb1e16c Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Thu, 9 Apr 2026 22:45:01 -0400 Subject: [PATCH 28/67] agent: Support remote connection args in thread metadata database (#53550) This PR adds remote connection data to the threads metadata database. This fixes an issue where threads ran on separate projects with the same remote/local path list would show up in the sidebar in both workspaces, instead of only the workspace they were originally created in. I added a migrator that uses the workspace persistence database to add remote connection argument to threads that only have path list matches with a remote project. If a path list matches with both local/remote workspaces, we default to setting it as local. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A or Added/Fixed/Improved ... --------- Co-authored-by: Eric Holk --- crates/agent_ui/src/thread_import.rs | 117 +++++---- crates/agent_ui/src/thread_metadata_store.rs | 240 +++++++++++++++++-- crates/sidebar/src/sidebar_tests.rs | 20 ++ 3 files changed, 315 insertions(+), 62 deletions(-) diff --git a/crates/agent_ui/src/thread_import.rs b/crates/agent_ui/src/thread_import.rs index 5402b1c74353b73a522a068aa32dfd0a9dc85c60..41a23f894d8f406cbdbdcb03db3879437a45e40f 100644 --- a/crates/agent_ui/src/thread_import.rs +++ b/crates/agent_ui/src/thread_import.rs @@ -12,6 +12,7 @@ use gpui::{ }; use notifications::status_toast::{StatusToast, ToastIcon}; use project::{AgentId, AgentRegistryStore, AgentServerStore}; +use remote::RemoteConnectionOptions; use ui::{ Checkbox, KeyBinding, ListItem, ListItemSpacing, Modal, ModalFooter, ModalHeader, Section, prelude::*, @@ -436,19 +437,28 @@ fn find_threads_to_import( let mut wait_for_connection_tasks = Vec::new(); for store in stores { + let remote_connection = store + .read(cx) + .project() + .read(cx) + .remote_connection_options(cx); + for agent_id in agent_ids.clone() { let agent = Agent::from(agent_id.clone()); let server = agent.server(::global(cx), ThreadStore::global(cx)); let entry = store.update(cx, |store, cx| store.request_connection(agent, server, cx)); - wait_for_connection_tasks - .push(entry.read(cx).wait_for_connection().map(|s| (agent_id, s))); + + wait_for_connection_tasks.push(entry.read(cx).wait_for_connection().map({ + let remote_connection = remote_connection.clone(); + move |state| (agent_id, remote_connection, state) + })); } } let mut session_list_tasks = Vec::new(); cx.spawn(async move |cx| { let results = futures::future::join_all(wait_for_connection_tasks).await; - for (agent, result) in results { + for (agent_id, remote_connection, result) in results { let Some(state) = result.log_err() else { continue; }; @@ -457,18 +467,25 @@ fn find_threads_to_import( }; let task = cx.update(|cx| { list.list_sessions(AgentSessionListRequest::default(), cx) - .map(|r| (agent, r)) + .map({ + let remote_connection = remote_connection.clone(); + move |response| (agent_id, remote_connection, response) + }) }); session_list_tasks.push(task); } let mut sessions_by_agent = Vec::new(); let results = futures::future::join_all(session_list_tasks).await; - for (agent_id, result) in results { + for (agent_id, remote_connection, result) in results { let Some(response) = result.log_err() else { continue; }; - sessions_by_agent.push((agent_id, response.sessions)); + sessions_by_agent.push(SessionByAgent { + agent_id, + remote_connection, + sessions: response.sessions, + }); } Ok(collect_importable_threads( @@ -478,12 +495,23 @@ fn find_threads_to_import( }) } +struct SessionByAgent { + agent_id: AgentId, + remote_connection: Option, + sessions: Vec, +} + fn collect_importable_threads( - sessions_by_agent: Vec<(AgentId, Vec)>, + sessions_by_agent: Vec, mut existing_sessions: HashSet, ) -> Vec { let mut to_insert = Vec::new(); - for (agent_id, sessions) in sessions_by_agent { + for SessionByAgent { + agent_id, + remote_connection, + sessions, + } in sessions_by_agent + { for session in sessions { if !existing_sessions.insert(session.session_id.clone()) { continue; @@ -501,6 +529,7 @@ fn collect_importable_threads( created_at: session.created_at, folder_paths, main_worktree_paths: PathList::default(), + remote_connection: remote_connection.clone(), archived: true, }); } @@ -538,9 +567,10 @@ mod tests { let existing = HashSet::from_iter(vec![acp::SessionId::new("existing-1")]); let paths = PathList::new(&[Path::new("/project")]); - let sessions_by_agent = vec![( - AgentId::new("agent-a"), - vec![ + let sessions_by_agent = vec![SessionByAgent { + agent_id: AgentId::new("agent-a"), + remote_connection: None, + sessions: vec![ make_session( "existing-1", Some("Already There"), @@ -550,7 +580,7 @@ mod tests { ), make_session("new-1", Some("Brand New"), Some(paths), None, None), ], - )]; + }]; let result = collect_importable_threads(sessions_by_agent, existing); @@ -564,13 +594,14 @@ mod tests { let existing = HashSet::default(); let paths = PathList::new(&[Path::new("/project")]); - let sessions_by_agent = vec![( - AgentId::new("agent-a"), - vec![ + let sessions_by_agent = vec![SessionByAgent { + agent_id: AgentId::new("agent-a"), + remote_connection: None, + sessions: vec![ make_session("has-dirs", Some("With Dirs"), Some(paths), None, None), make_session("no-dirs", Some("No Dirs"), None, None, None), ], - )]; + }]; let result = collect_importable_threads(sessions_by_agent, existing); @@ -583,13 +614,14 @@ mod tests { let existing = HashSet::default(); let paths = PathList::new(&[Path::new("/project")]); - let sessions_by_agent = vec![( - AgentId::new("agent-a"), - vec![ + let sessions_by_agent = vec![SessionByAgent { + agent_id: AgentId::new("agent-a"), + remote_connection: None, + sessions: vec![ make_session("s1", Some("Thread 1"), Some(paths.clone()), None, None), make_session("s2", Some("Thread 2"), Some(paths), None, None), ], - )]; + }]; let result = collect_importable_threads(sessions_by_agent, existing); @@ -603,20 +635,22 @@ mod tests { let paths = PathList::new(&[Path::new("/project")]); let sessions_by_agent = vec![ - ( - AgentId::new("agent-a"), - vec![make_session( + SessionByAgent { + agent_id: AgentId::new("agent-a"), + remote_connection: None, + sessions: vec![make_session( "s1", Some("From A"), Some(paths.clone()), None, None, )], - ), - ( - AgentId::new("agent-b"), - vec![make_session("s2", Some("From B"), Some(paths), None, None)], - ), + }, + SessionByAgent { + agent_id: AgentId::new("agent-b"), + remote_connection: None, + sessions: vec![make_session("s2", Some("From B"), Some(paths), None, None)], + }, ]; let result = collect_importable_threads(sessions_by_agent, existing); @@ -640,26 +674,28 @@ mod tests { let paths = PathList::new(&[Path::new("/project")]); let sessions_by_agent = vec![ - ( - AgentId::new("agent-a"), - vec![make_session( + SessionByAgent { + agent_id: AgentId::new("agent-a"), + remote_connection: None, + sessions: vec![make_session( "shared-session", Some("From A"), Some(paths.clone()), None, None, )], - ), - ( - AgentId::new("agent-b"), - vec![make_session( + }, + SessionByAgent { + agent_id: AgentId::new("agent-b"), + remote_connection: None, + sessions: vec![make_session( "shared-session", Some("From B"), Some(paths), None, None, )], - ), + }, ]; let result = collect_importable_threads(sessions_by_agent, existing); @@ -679,13 +715,14 @@ mod tests { let existing = HashSet::from_iter(vec![acp::SessionId::new("s1"), acp::SessionId::new("s2")]); - let sessions_by_agent = vec![( - AgentId::new("agent-a"), - vec![ + let sessions_by_agent = vec![SessionByAgent { + agent_id: AgentId::new("agent-a"), + remote_connection: None, + sessions: vec![ make_session("s1", Some("T1"), Some(paths.clone()), None, None), make_session("s2", Some("T2"), Some(paths), None, None), ], - )]; + }]; let result = collect_importable_threads(sessions_by_agent, existing); assert!(result.is_empty()); diff --git a/crates/agent_ui/src/thread_metadata_store.rs b/crates/agent_ui/src/thread_metadata_store.rs index 127f746a9edd35bc3b62b489277980868faba1c8..101ea3c7369dae6dd88e8bc4499f048532d91a43 100644 --- a/crates/agent_ui/src/thread_metadata_store.rs +++ b/crates/agent_ui/src/thread_metadata_store.rs @@ -10,31 +10,37 @@ use anyhow::Context as _; use chrono::{DateTime, Utc}; use collections::{HashMap, HashSet}; use db::{ + kvp::KeyValueStore, sqlez::{ bindable::Column, domain::Domain, statement::Statement, thread_safe_connection::ThreadSafeConnection, }, sqlez_macros::sql, }; -use futures::{FutureExt as _, future::Shared}; +use fs::Fs; +use futures::{FutureExt, future::Shared}; use gpui::{AppContext as _, Entity, Global, Subscription, Task}; use project::AgentId; +use remote::RemoteConnectionOptions; use ui::{App, Context, SharedString}; use util::ResultExt as _; -use workspace::PathList; +use workspace::{PathList, SerializedWorkspaceLocation, WorkspaceDb}; use crate::DEFAULT_THREAD_TITLE; +const THREAD_REMOTE_CONNECTION_MIGRATION_KEY: &str = "thread-metadata-remote-connection-backfill"; + pub fn init(cx: &mut App) { ThreadMetadataStore::init_global(cx); - migrate_thread_metadata(cx); + let migration_task = migrate_thread_metadata(cx); + migrate_thread_remote_connections(cx, migration_task); } /// Migrate existing thread metadata from native agent thread store to the new metadata storage. /// We skip migrating threads that do not have a project. /// /// TODO: Remove this after N weeks of shipping the sidebar -fn migrate_thread_metadata(cx: &mut App) { +fn migrate_thread_metadata(cx: &mut App) -> Task> { let store = ThreadMetadataStore::global(cx); let db = store.read(cx).db.clone(); @@ -60,6 +66,7 @@ fn migrate_thread_metadata(cx: &mut App) { created_at: entry.created_at, folder_paths: entry.folder_paths, main_worktree_paths: PathList::default(), + remote_connection: None, archived: true, }) }) @@ -104,6 +111,84 @@ fn migrate_thread_metadata(cx: &mut App) { let _ = store.update(cx, |store, cx| store.reload(cx)); anyhow::Ok(()) }) +} + +fn migrate_thread_remote_connections(cx: &mut App, migration_task: Task>) { + let store = ThreadMetadataStore::global(cx); + let db = store.read(cx).db.clone(); + let kvp = KeyValueStore::global(cx); + let workspace_db = WorkspaceDb::global(cx); + let fs = ::global(cx); + + cx.spawn(async move |cx| -> anyhow::Result<()> { + migration_task.await?; + + if kvp + .read_kvp(THREAD_REMOTE_CONNECTION_MIGRATION_KEY)? + .is_some() + { + return Ok(()); + } + + let recent_workspaces = workspace_db.recent_workspaces_on_disk(fs.as_ref()).await?; + + let mut local_path_lists = HashSet::::default(); + let mut remote_path_lists = HashMap::::default(); + + recent_workspaces + .iter() + .filter(|(_, location, path_list, _)| { + !path_list.is_empty() && matches!(location, &SerializedWorkspaceLocation::Local) + }) + .for_each(|(_, _, path_list, _)| { + local_path_lists.insert(path_list.clone()); + }); + + for (_, location, path_list, _) in recent_workspaces { + match location { + SerializedWorkspaceLocation::Remote(remote_connection) + if !local_path_lists.contains(&path_list) => + { + remote_path_lists + .entry(path_list) + .or_insert(remote_connection); + } + _ => {} + } + } + + let mut reloaded = false; + for metadata in db.list()? { + if metadata.remote_connection.is_some() { + continue; + } + + if let Some(remote_connection) = remote_path_lists + .get(&metadata.folder_paths) + .or_else(|| remote_path_lists.get(&metadata.main_worktree_paths)) + { + db.save(ThreadMetadata { + remote_connection: Some(remote_connection.clone()), + ..metadata + }) + .await?; + reloaded = true; + } + } + + let reloaded_task = reloaded + .then_some(store.update(cx, |store, cx| store.reload(cx))) + .unwrap_or(Task::ready(()).shared()); + + kvp.write_kvp( + THREAD_REMOTE_CONNECTION_MIGRATION_KEY.to_string(), + "1".to_string(), + ) + .await?; + reloaded_task.await; + + Ok(()) + }) .detach_and_log_err(cx); } @@ -121,6 +206,7 @@ pub struct ThreadMetadata { pub created_at: Option>, pub folder_paths: PathList, pub main_worktree_paths: PathList, + pub remote_connection: Option, pub archived: bool, } @@ -715,8 +801,8 @@ impl ThreadMetadataStore { let agent_id = thread_ref.connection().agent_id(); + let project = thread_ref.project().read(cx); let folder_paths = { - let project = thread_ref.project().read(cx); let paths: Vec> = project .visible_worktrees(cx) .map(|worktree| worktree.read(cx).abs_path()) @@ -724,12 +810,9 @@ impl ThreadMetadataStore { PathList::new(&paths) }; - let main_worktree_paths = thread_ref - .project() - .read(cx) - .project_group_key(cx) - .path_list() - .clone(); + let project_group_key = project.project_group_key(cx); + let main_worktree_paths = project_group_key.path_list().clone(); + let remote_connection = project_group_key.host(); // Threads without a folder path (e.g. started in an empty // window) are archived by default so they don't get lost, @@ -747,6 +830,7 @@ impl ThreadMetadataStore { updated_at, folder_paths, main_worktree_paths, + remote_connection, archived, }; @@ -801,6 +885,7 @@ impl Domain for ThreadMetadataDb { PRIMARY KEY (session_id, archived_worktree_id) ) STRICT; ), + sql!(ALTER TABLE sidebar_threads ADD COLUMN remote_connection TEXT), ]; } @@ -817,7 +902,7 @@ impl ThreadMetadataDb { /// List all sidebar thread metadata, ordered by updated_at descending. pub fn list(&self) -> anyhow::Result> { self.select::( - "SELECT session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived, main_worktree_paths, main_worktree_paths_order \ + "SELECT session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived, main_worktree_paths, main_worktree_paths_order, remote_connection \ FROM sidebar_threads \ ORDER BY updated_at DESC" )?() @@ -847,11 +932,17 @@ impl ThreadMetadataDb { } else { (Some(main_serialized.paths), Some(main_serialized.order)) }; + let remote_connection = row + .remote_connection + .as_ref() + .map(serde_json::to_string) + .transpose() + .context("serialize thread metadata remote connection")?; let archived = row.archived; self.write(move |conn| { - let sql = "INSERT INTO sidebar_threads(session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived, main_worktree_paths, main_worktree_paths_order) \ - VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10) \ + let sql = "INSERT INTO sidebar_threads(session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived, main_worktree_paths, main_worktree_paths_order, remote_connection) \ + VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11) \ ON CONFLICT(session_id) DO UPDATE SET \ agent_id = excluded.agent_id, \ title = excluded.title, \ @@ -861,7 +952,8 @@ impl ThreadMetadataDb { folder_paths_order = excluded.folder_paths_order, \ archived = excluded.archived, \ main_worktree_paths = excluded.main_worktree_paths, \ - main_worktree_paths_order = excluded.main_worktree_paths_order"; + main_worktree_paths_order = excluded.main_worktree_paths_order, \ + remote_connection = excluded.remote_connection"; let mut stmt = Statement::prepare(conn, sql)?; let mut i = stmt.bind(&id, 1)?; i = stmt.bind(&agent_id, i)?; @@ -872,7 +964,8 @@ impl ThreadMetadataDb { i = stmt.bind(&folder_paths_order, i)?; i = stmt.bind(&archived, i)?; i = stmt.bind(&main_worktree_paths, i)?; - stmt.bind(&main_worktree_paths_order, i)?; + i = stmt.bind(&main_worktree_paths_order, i)?; + stmt.bind(&remote_connection, i)?; stmt.exec() }) .await @@ -1005,6 +1098,8 @@ impl Column for ThreadMetadata { Column::column(statement, next)?; let (main_worktree_paths_order_str, next): (Option, i32) = Column::column(statement, next)?; + let (remote_connection_json, next): (Option, i32) = + Column::column(statement, next)?; let agent_id = agent_id .map(|id| AgentId::new(id)) @@ -1035,6 +1130,12 @@ impl Column for ThreadMetadata { }) .unwrap_or_default(); + let remote_connection = remote_connection_json + .as_deref() + .map(serde_json::from_str::) + .transpose() + .context("deserialize thread metadata remote connection")?; + Ok(( ThreadMetadata { session_id: acp::SessionId::new(id), @@ -1044,6 +1145,7 @@ impl Column for ThreadMetadata { created_at, folder_paths, main_worktree_paths, + remote_connection, archived, }, next, @@ -1087,6 +1189,7 @@ mod tests { use gpui::TestAppContext; use project::FakeFs; use project::Project; + use remote::WslConnectionOptions; use std::path::Path; use std::rc::Rc; @@ -1126,19 +1229,37 @@ mod tests { created_at: Some(updated_at), folder_paths, main_worktree_paths: PathList::default(), + remote_connection: None, } } fn init_test(cx: &mut TestAppContext) { + let fs = FakeFs::new(cx.executor()); cx.update(|cx| { let settings_store = settings::SettingsStore::test(cx); cx.set_global(settings_store); + ::set_global(fs, cx); ThreadMetadataStore::init_global(cx); ThreadStore::init_global(cx); }); cx.run_until_parked(); } + fn clear_thread_metadata_remote_connection_backfill(cx: &mut TestAppContext) { + let kvp = cx.update(|cx| KeyValueStore::global(cx)); + smol::block_on(kvp.delete_kvp("thread-metadata-remote-connection-backfill".to_string())) + .unwrap(); + } + + fn run_thread_metadata_migrations(cx: &mut TestAppContext) { + clear_thread_metadata_remote_connection_backfill(cx); + cx.update(|cx| { + let migration_task = migrate_thread_metadata(cx); + migrate_thread_remote_connections(cx, migration_task); + }); + cx.run_until_parked(); + } + #[gpui::test] async fn test_store_initializes_cache_from_database(cx: &mut TestAppContext) { let first_paths = PathList::new(&[Path::new("/project-a")]); @@ -1340,6 +1461,7 @@ mod tests { created_at: Some(now - chrono::Duration::seconds(10)), folder_paths: project_a_paths.clone(), main_worktree_paths: PathList::default(), + remote_connection: None, archived: false, }; @@ -1397,8 +1519,7 @@ mod tests { cx.run_until_parked(); } - cx.update(|cx| migrate_thread_metadata(cx)); - cx.run_until_parked(); + run_thread_metadata_migrations(cx); let list = cx.update(|cx| { let store = ThreadMetadataStore::global(cx); @@ -1450,6 +1571,7 @@ mod tests { created_at: Some(existing_updated_at), folder_paths: project_paths.clone(), main_worktree_paths: PathList::default(), + remote_connection: None, archived: false, }; @@ -1478,8 +1600,7 @@ mod tests { save_task.await.unwrap(); cx.run_until_parked(); - cx.update(|cx| migrate_thread_metadata(cx)); - cx.run_until_parked(); + run_thread_metadata_migrations(cx); let list = cx.update(|cx| { let store = ThreadMetadataStore::global(cx); @@ -1490,6 +1611,82 @@ mod tests { assert_eq!(list[0].session_id.0.as_ref(), "existing-session"); } + #[gpui::test] + async fn test_migrate_thread_remote_connections_backfills_from_workspace_db( + cx: &mut TestAppContext, + ) { + init_test(cx); + + let folder_paths = PathList::new(&[Path::new("/remote-project")]); + let updated_at = Utc::now(); + let metadata = make_metadata( + "remote-session", + "Remote Thread", + updated_at, + folder_paths.clone(), + ); + + cx.update(|cx| { + let store = ThreadMetadataStore::global(cx); + store.update(cx, |store, cx| { + store.save(metadata, cx); + }); + }); + cx.run_until_parked(); + + let workspace_db = cx.update(|cx| WorkspaceDb::global(cx)); + let workspace_id = workspace_db.next_id().await.unwrap(); + let serialized_paths = folder_paths.serialize(); + let remote_connection_id = 1_i64; + workspace_db + .write(move |conn| { + let mut stmt = Statement::prepare( + conn, + "INSERT INTO remote_connections(id, kind, user, distro) VALUES (?1, ?2, ?3, ?4)", + )?; + let mut next_index = stmt.bind(&remote_connection_id, 1)?; + next_index = stmt.bind(&"wsl", next_index)?; + next_index = stmt.bind(&Some("anth".to_string()), next_index)?; + stmt.bind(&Some("Ubuntu".to_string()), next_index)?; + stmt.exec()?; + + let mut stmt = Statement::prepare( + conn, + "UPDATE workspaces SET paths = ?2, paths_order = ?3, remote_connection_id = ?4, timestamp = CURRENT_TIMESTAMP WHERE workspace_id = ?1", + )?; + let mut next_index = stmt.bind(&workspace_id, 1)?; + next_index = stmt.bind(&serialized_paths.paths, next_index)?; + next_index = stmt.bind(&serialized_paths.order, next_index)?; + stmt.bind(&Some(remote_connection_id as i32), next_index)?; + stmt.exec() + }) + .await + .unwrap(); + + clear_thread_metadata_remote_connection_backfill(cx); + cx.update(|cx| { + migrate_thread_remote_connections(cx, Task::ready(Ok(()))); + }); + cx.run_until_parked(); + + let metadata = cx.update(|cx| { + let store = ThreadMetadataStore::global(cx); + store + .read(cx) + .entry(&acp::SessionId::new("remote-session")) + .cloned() + .expect("expected migrated metadata row") + }); + + assert_eq!( + metadata.remote_connection, + Some(RemoteConnectionOptions::Wsl(WslConnectionOptions { + distro_name: "Ubuntu".to_string(), + user: Some("anth".to_string()), + })) + ); + } + #[gpui::test] async fn test_migrate_thread_metadata_archives_beyond_five_most_recent_per_project( cx: &mut TestAppContext, @@ -1538,8 +1735,7 @@ mod tests { cx.run_until_parked(); } - cx.update(|cx| migrate_thread_metadata(cx)); - cx.run_until_parked(); + run_thread_metadata_migrations(cx); let list = cx.update(|cx| { let store = ThreadMetadataStore::global(cx); diff --git a/crates/sidebar/src/sidebar_tests.rs b/crates/sidebar/src/sidebar_tests.rs index 8ced8d6f71f6d88ff24a522404417ef7db3a6a7c..6a3da0a1d07ae66b4012b87e4533ed163115f4c3 100644 --- a/crates/sidebar/src/sidebar_tests.rs +++ b/crates/sidebar/src/sidebar_tests.rs @@ -245,6 +245,7 @@ fn save_thread_metadata( folder_paths, main_worktree_paths, archived: false, + remote_connection: None, }; ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx)); }); @@ -813,6 +814,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { updated_at: Utc::now(), created_at: Some(Utc::now()), archived: false, + remote_connection: None, }, icon: IconName::ZedAgent, icon_from_external_svg: None, @@ -836,6 +838,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { updated_at: Utc::now(), created_at: Some(Utc::now()), archived: false, + remote_connection: None, }, icon: IconName::ZedAgent, icon_from_external_svg: None, @@ -853,6 +856,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { metadata: ThreadMetadata { session_id: acp::SessionId::new(Arc::from("t-3")), agent_id: AgentId::new("zed-agent"), + remote_connection: None, folder_paths: PathList::default(), main_worktree_paths: PathList::default(), title: "Error thread".into(), @@ -872,10 +876,12 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { diff_stats: DiffStats::default(), }), // Thread with WaitingForConfirmation status, not active + // remote_connection: None, ListEntry::Thread(ThreadEntry { metadata: ThreadMetadata { session_id: acp::SessionId::new(Arc::from("t-4")), agent_id: AgentId::new("zed-agent"), + remote_connection: None, folder_paths: PathList::default(), main_worktree_paths: PathList::default(), title: "Waiting thread".into(), @@ -895,10 +901,12 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { diff_stats: DiffStats::default(), }), // Background thread that completed (should show notification) + // remote_connection: None, ListEntry::Thread(ThreadEntry { metadata: ThreadMetadata { session_id: acp::SessionId::new(Arc::from("t-5")), agent_id: AgentId::new("zed-agent"), + remote_connection: None, folder_paths: PathList::default(), main_worktree_paths: PathList::default(), title: "Notified thread".into(), @@ -2197,6 +2205,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { folder_paths: PathList::default(), main_worktree_paths: PathList::default(), archived: false, + remote_connection: None, }, &workspace_a, false, @@ -2253,6 +2262,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { folder_paths: PathList::default(), main_worktree_paths: PathList::default(), archived: false, + remote_connection: None, }, &workspace_b, false, @@ -3692,6 +3702,7 @@ async fn test_activate_archived_thread_with_saved_paths_activates_matching_works folder_paths: PathList::new(&[PathBuf::from("/project-b")]), main_worktree_paths: PathList::default(), archived: false, + remote_connection: None, }, window, cx, @@ -3757,6 +3768,7 @@ async fn test_activate_archived_thread_cwd_fallback_with_matching_workspace( folder_paths: PathList::new(&[std::path::PathBuf::from("/project-b")]), main_worktree_paths: PathList::default(), archived: false, + remote_connection: None, }, window, cx, @@ -3820,6 +3832,7 @@ async fn test_activate_archived_thread_no_paths_no_cwd_uses_active_workspace( folder_paths: PathList::default(), main_worktree_paths: PathList::default(), archived: false, + remote_connection: None, }, window, cx, @@ -3875,6 +3888,7 @@ async fn test_activate_archived_thread_saved_paths_opens_new_workspace(cx: &mut folder_paths: path_list_b, main_worktree_paths: PathList::default(), archived: false, + remote_connection: None, }, window, cx, @@ -3929,6 +3943,7 @@ async fn test_activate_archived_thread_reuses_workspace_in_another_window(cx: &m folder_paths: PathList::new(&[PathBuf::from("/project-b")]), main_worktree_paths: PathList::default(), archived: false, + remote_connection: None, }, window, cx, @@ -4006,6 +4021,7 @@ async fn test_activate_archived_thread_reuses_workspace_in_another_window_with_t folder_paths: PathList::new(&[PathBuf::from("/project-b")]), main_worktree_paths: PathList::default(), archived: false, + remote_connection: None, }, window, cx, @@ -4086,6 +4102,7 @@ async fn test_activate_archived_thread_prefers_current_window_for_matching_paths folder_paths: PathList::new(&[PathBuf::from("/project-a")]), main_worktree_paths: PathList::default(), archived: false, + remote_connection: None, }, window, cx, @@ -6006,6 +6023,7 @@ async fn test_legacy_thread_with_canonical_path_opens_main_repo_workspace(cx: &m folder_paths: PathList::new(&[PathBuf::from("/project")]), main_worktree_paths: PathList::default(), archived: false, + remote_connection: None, }; ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx)); }); @@ -6361,6 +6379,7 @@ mod property_test { folder_paths: path_list, main_worktree_paths, archived: false, + remote_connection: None, }; cx.update(|_, cx| { ThreadMetadataStore::global(cx) @@ -7094,6 +7113,7 @@ async fn test_remote_project_integration_does_not_briefly_render_as_separate_pro folder_paths: PathList::new(&[PathBuf::from("/project-wt-1")]), main_worktree_paths, archived: false, + remote_connection: None, }; ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx)); }); From f6aaa1600104c5197591475c1452d53b3eeb51f6 Mon Sep 17 00:00:00 2001 From: Ted Robertson <10043369+tredondo@users.noreply.github.com> Date: Thu, 9 Apr 2026 23:30:41 -0700 Subject: [PATCH 29/67] Update bug report template mode options (#53591) Removed 'Text Threads' option from the mode dropdown for AI issues, since #52757 removed the feature Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- .github/ISSUE_TEMPLATE/10_bug_report.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/10_bug_report.yml b/.github/ISSUE_TEMPLATE/10_bug_report.yml index 5eb8e8a6299c5189384b6d060e12cd61a2249a3c..1050f5b9b4876a73267ccc0662bc306c198b08f0 100644 --- a/.github/ISSUE_TEMPLATE/10_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/10_bug_report.yml @@ -101,7 +101,7 @@ body: placeholder: | - Provider: (Anthropic via ZedPro, Anthropic via API key, Copilot Chat, Mistral, OpenAI, etc.) - Model Name: (Claude Sonnet 4.5, Gemini 3.1 Pro, GPT-5) - - Mode: (Agent Panel, Inline Assistant, Terminal Assistant or Text Threads) + - Mode: (Agent Panel, Inline Assistant, or Terminal Assistant) - Other details (ACPs, MCPs, other settings, etc.): validations: required: false From 1fcf9749a6ed384ae346ddf2049ae76e0cc430a9 Mon Sep 17 00:00:00 2001 From: "Angel P." Date: Fri, 10 Apr 2026 02:38:50 -0400 Subject: [PATCH 30/67] Fix auto update status not being shown in the title bar (#53552) Fixes a UI regression introduced in https://github.com/zed-industries/zed/pull/48467, which prevented automatic update statuses from being shown in the title bar unless the update was checked for manually by the user, causing some users to unknowingly cancel updates in progress by closing the application, since there was no indication. https://github.com/user-attachments/assets/ea16a600-3db4-49dc-bca5-11c8fcfff619 Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes https://github.com/zed-industries/zed/issues/50162 Release Notes: - Fixed missing indication that an update was currently being downloaded or installed in the title bar --- crates/title_bar/src/update_version.rs | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/crates/title_bar/src/update_version.rs b/crates/title_bar/src/update_version.rs index 642187c3aba92a7366d75cb35d7875758097ab13..454474c6bc18d22df099a5b0062b91967cecb343 100644 --- a/crates/title_bar/src/update_version.rs +++ b/crates/title_bar/src/update_version.rs @@ -84,11 +84,11 @@ impl Render for UpdateVersion { AutoUpdateStatus::Checking if self.update_check_type.is_manual() => { UpdateButton::checking().into_any_element() } - AutoUpdateStatus::Downloading { version } if self.update_check_type.is_manual() => { + AutoUpdateStatus::Downloading { version } => { let tooltip = Self::version_tooltip_message(&version); UpdateButton::downloading(tooltip).into_any_element() } - AutoUpdateStatus::Installing { version } if self.update_check_type.is_manual() => { + AutoUpdateStatus::Installing { version } => { let tooltip = Self::version_tooltip_message(&version); UpdateButton::installing(tooltip).into_any_element() } @@ -116,10 +116,7 @@ impl Render for UpdateVersion { })) .into_any_element() } - AutoUpdateStatus::Idle - | AutoUpdateStatus::Checking { .. } - | AutoUpdateStatus::Downloading { .. } - | AutoUpdateStatus::Installing { .. } => Empty.into_any_element(), + AutoUpdateStatus::Idle | AutoUpdateStatus::Checking { .. } => Empty.into_any_element(), } } } From 081081e4f183c65564d7516ca5640ea93c61c7b7 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Fri, 10 Apr 2026 09:44:35 +0300 Subject: [PATCH 31/67] Update Rust crate wasmtime to v36.0.7 [SECURITY] (#53553) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [wasmtime](https://redirect.github.com/bytecodealliance/wasmtime) | workspace.dependencies | patch | `36.0.6` → `36.0.7` | --- > [!WARNING] > Some dependencies could not be looked up. Check the [Dependency Dashboard](../issues/15138) for more information. ### GitHub Vulnerability Alerts #### [CVE-2026-34941](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-hx6p-xpx3-jvvv) ### Summary Wasmtime contains a vulnerability where when transcoding a UTF-16 string to the latin1+utf16 component-model encoding it would incorrectly validate the byte length of the input string when performing a bounds check. Specifically the number of code units were checked instead of the byte length, which is twice the size of the code units. This vulnerability can cause the host to read beyond the end of a WebAssembly's linear memory in an attempt to transcode nonexistent bytes. In Wasmtime's default configuration this will read unmapped memory on a guard page, terminating the process with a segfault. Wasmtime can be configured, however, without guard pages which would mean that host memory beyond the end of linear memory may be read and interpreted as UTF-16. A host segfault is a denial-of-service vulnerability in Wasmtime, and possibly being able to read beyond the end of linear memory is additionally a vulnerability. Note that reading beyond the end of linear memory requires nonstandard configuration of Wasmtime, specifically with guard pages disabled. ### Impact This is an out-of-bounds memory access. Any user running untrusted wasm components that use cross-component string passing (with UTF-16 source and latin1+utf16 destination encodings) is affected. - With guard pages: Denial of service. The host process crashes with SIGBUS/SIGSEGV. - Without guard pages: Potential information disclosure. The guest can read host memory beyond its linear memory allocation. Patches Wasmtime 24.0.7, 36.0.7, 42.0.2, and 43.0.1 have been issued to fix this bug. Users are recommended to update to these patched versions of Wasmtime. Workarounds There is no workaround for this bug. Hosts are recommended to updated to a patched version of Wasmtime. #### [CVE-2026-34942](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-jxhv-7h78-9775) ### Impact Wasmtime's implementation of transcoding strings into the Component Model's `utf16` or `latin1+utf16` encodings improperly verified the alignment of reallocated strings. This meant that unaligned pointers could be passed to the host for transcoding which would trigger a host panic. This panic is possible to trigger from malicious guests which transfer very specific strings across components with specific addresses. Host panics are considered a DoS vector in Wasmtime as the panic conditions are controlled by the guest in this situation. ### Patches Wasmtime 24.0.7, 36.0.7, 42.0.2, and 43.0.1 have been issued to fix this bug. Users are recommended to update to these patched versions of Wasmtime. ### Workarounds There is no workaround for this bug. Hosts are recommended to updated to a patched version of Wasmtime. #### [CVE-2026-34943](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-m758-wjhj-p3jq) ### Impact Wasmtime contains a possible panic which can happen when a `flags`-typed component model value is lifted with the `Val` type. If bits are set outside of the set of flags the component model specifies that these bits should be ignored but Wasmtime will panic when this value is lifted. This panic only affects wasmtime's implementation of lifting into `Val`, not when using the `flags!` macro. This additionally only affects `flags`-typed values which are part of a WIT interface. This has the risk of being a guest-controlled panic within the host which Wasmtime considers a DoS vector. ### Patches Wasmtime 24.0.7, 36.0.7, 42.0.2, and 43.0.1 have been issued to fix this bug. Users are recommended to update to these patched versions of Wasmtime. ### Workarounds There is no workaround for this bug if a host meets the criteria to be affected. To be affected a host must be using `wasmtime::component::Val` and possibly work with a `flags` type in the component model. #### [CVE-2026-34944](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-qqfj-4vcm-26hv) On x86-64 platforms with SSE3 disabled Wasmtime's compilation of the `f64x2.splat` WebAssembly instruction with Cranelift may load 8 more bytes than is necessary. When [signals-based-traps](https://docs.rs/wasmtime/latest/wasmtime/struct.Config.html#method.signals_based_traps) are disabled this can result in a uncaught segfault due to loading from unmapped guard pages. With guard pages disabled it's possible for out-of-sandbox data to be loaded, but this data is not visible to WebAssembly guests. ### Details The `f64x2.splat` operator, when operating on a value loaded from a memory (for example with f64.load), compiles with Cranelift to code on x86-64 without SSE3 that loads 128 bits (16 bytes) rather than the expected 64 bits (8 bytes) from memory. When the address is in-bounds for a (correct) 8-byte load but not an (incorrect) 16-byte load, this can load beyond memory by up to 8 bytes. This can result in three different behaviors depending on Wasmtime's configuration: 1. If guard pages are disabled then this extra data will be loaded. The extra data is present in the upper bits of a register, but the upper bits are not visible to WebAssembly guests. Actually witnessing this data would require a different bug in Cranelift, of which none are known. Thus in this situation while it's something we're patching in Cranelift it's not a security issue. 2. If guard pages are enabled, and [signals-based-traps](https://docs.rs/wasmtime/latest/wasmtime/struct.Config.html#method.signals_based_traps) are enabled, then this operation will result in a safe WebAssembly trap. The trap is incorrect because the load is not out-of-bounds as defined by WebAssembly, but this mistakenly widened load will load bytes from an unmapped guard page, causing a segfault which is caught and handled as a Wasm trap. In this situation this is not a security issue, but we're patching Cranelift to fix the WebAssembly behavior. 3. If guard pages are enabled, and [signals-based-traps](https://docs.rs/wasmtime/latest/wasmtime/struct.Config.html#method.signals_based_traps) are disabled, then this operation results in an uncaught segfault. Like the previous case with guard pages enabled this will load from an unmapped guard page. Unlike before, however, signals-based-traps are disabled meaning that signal handlers aren't configured. The resulting segfault will, by default, terminate the process. This is a security issue from a DoS perspective, but does not represent an arbitrary read or write from WebAssembly, for example. Wasmtime's default configuration is case (2) in this case. That means that Wasmtime, by default, incorrectly executes this WebAssembly instruction but does not have insecure behavior. ### Impact If [signals-based-traps](https://docs.rs/wasmtime/latest/wasmtime/struct.Config.html#method.signals_based_traps) are disabled and guard pages are enabled then guests can trigger an uncaught segfault in the host, likely aborting the host process. This represents, for example, a DoS vector for WebAssembly guests. This bug does not affect Wasmtime's default configuration and requires [signals-based-traps](https://docs.rs/wasmtime/latest/wasmtime/struct.Config.html#method.signals_based_traps) to be disabled. This bug only affects the x86-64 target with the SSE3 feature disabled and the Cranelift backend (Wasmtime's default backend). ### Patches Wasmtime 24.0.7, 36.0.7, 42.0.2, and 43.0.1 have been issued to fix this bug. Users are recommended to update to these patched versions of Wasmtime. ### Workarounds This bug only affects x86-64 hosts where SSE3 is disabled. If SSE3 is enabled or if a non-x86-64 host is used then hosts are not affect. Otherwise there are no known workarounds to this issue. #### [CVE-2026-34945](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-m9w2-8782-2946) ### Impact Wasmtime's Winch compiler contains a bug where a 64-bit table, part of the memory64 proposal of WebAssembly, incorrectly translated the `table.size` instruction. This bug could lead to disclosing data on the host's stack to WebAssembly guests. The host's stack can possibly contain sensitive data related to other host-originating operations which is not intended to be disclosed to guests. This bug specifically arose from a mistake where the return value of `table.size` was statically typed as a 32-bit integer, as opposed to consulting the table's index type to see how large the returned register could be. When combined with details about Wnich's ABI, such as multi-value returns, this can be combined to read stack data from the host, within a guest. This information disclosure should not be possible in WebAssembly, violates spec semantics, and is a vulnerability in Wasmtime. ### Patches Wasmtime 36.0.7, 42.0.2, and 43.0.1 have been issued to fix this bug. Users are recommended to update to these patched versions of Wasmtime. ### Workarounds Users of Cranelift are not affected by this issue, but users of Winch have no workarounds other than disabling the `Config::wasm_memory64` proposal. #### [CVE-2026-34946](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-q49f-xg75-m9xw) ### Impact Wasmtime's Winch compiler contains a vulnerability where the compilation of the `table.fill` instruction can result in a host panic. This means that a valid guest can be compiled with Winch, on any architecture, and cause the host to panic. This represents a denial-of-service vulnerability in Wasmtime due to guests being able to trigger a panic. The specific issue is that a historical refactoring, #​11254, changed how compiled code referenced tables within the `table.*` instructions. This refactoring forgot to update the Winch code paths associated as well, meaning that Winch was using the wrong indexing scheme. Due to the feature support of Winch the only problem that can result is tables being mixed up or nonexistent tables being used, meaning that the guest is limited to panicking the host (using a nonexistent table), or executing spec-incorrect behavior and modifying the wrong table. ### Patches Wasmtime 36.0.7, 42.0.2, and 43.0.1 have been issued to fix this bug. Users are recommended to update to these patched versions of Wasmtime. ### Workarounds Users of Cranelift are not affected by this issue, but for users of Winch there is no workaround for this bug. Hosts are recommended to updated to a patched version of Wasmtime. #### [CVE-2026-34971](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-jhxm-h53p-jm7w) ### Impact Wasmtime's Cranelift compilation backend contains a bug on aarch64 when performing a certain shape of heap accesses which means that the wrong address is accessed. When combined with explicit bounds checks a guest WebAssembly module this can create a situation where there are two diverging computations for the same address: one for the address to bounds-check and one for the address to load. This difference in address being operated on means that a guest module can pass a bounds check but then load a different address. Combined together this enables an arbitrary read/write primitive for guest WebAssembly when accesssing host memory. This is a sandbox escape as guests are able to read/write arbitrary host memory. This vulnerability has a few ingredients, all of which must be met, for this situation to occur and bypass the sandbox restrictions: * This miscompiled shape of load only occurs on 64-bit WebAssembly linear memories, or when `Config::wasm_memory64` is enabled. 32-bit WebAssembly is not affected. * Spectre mitigations or signals-based-traps must be disabled. When spectre mitigations are enabled then the offending shape of load is not generated. When signals-based-traps are disabled then spectre mitigations are also automatically disabled. The specific bug in Cranelift is a miscompile of a load of the shape `load(iadd(base, ishl(index, amt)))` where `amt` is a constant. The `amt` value is masked incorrectly to test if it's a certain value, and this incorrect mask means that Cranelift can pattern-match this lowering rule during instruction selection erroneously, diverging from WebAssembly's and Cranelift's semantics. This incorrect lowering would, for example, load an address much further away than intended as the correct address's computation would have wrapped around to a smaller value insetad. ### Patches Wasmtime 36.0.7, 42.0.2, and 43.0.1 have been issued to fix this bug. Users are recommended to update to these patched versions of Wasmtime. ### Workarounds This bug only affects users of Cranelift on aarch64. Cranelift on other platforms is not affected. Additionally this only affects 64-bit WebAssembly linear memories, so if `Config::wasm_memory64` is disabled then hosts are not affected. Note that `Config::wasm_memory64` is enabled by default. If spectre mitigations are enabled, which are enabled by default, then hosts are not affected by this issue. #### [CVE-2026-34988](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-6wgr-89rj-399p) ### Impact Wasmtime's implementation of its pooling allocator contains a bug where in certain configurations the contents of linear memory can be leaked from one instance to the next. The implementation of resetting the virtual memory permissions for linear memory used the wrong predicate to determine if resetting was necessary, where the compilation process used a different predicate. This divergence meant that the pooling allocator incorrectly deduced at runtime that resetting virtual memory permissions was not necessary while compile-time determine that virtual memory could be relied upon. Exposing this bug requires specific configuration values to be used. If any of these configurations are not applicable then this bug does not happen: * The pooling allocator must be in use. * The `Config::memory_guard_size` configuration option must be 0. * The `Config::memory_reservation` configuration must be less than 4GiB. * The pooling allocator must be configured with `max_memory_size` the same as the `memory_reservation` value. If all of these conditions are applicable then when a linear memory is reused the VM permissions of the previous iteration are not reset. This means that the compiled code, which is assuming out-of-bounds loads will segfault, will not actually segfault and can read the previous contents of linear memory if it was previously mapped. This represents a data leakage vulnerability between guest WebAssembly instances which breaks WebAssembly's semantics and additionally breaks the sandbox that Wasmtime provides. Wasmtime is not vulnerable to this issue with its default settings, nor with the default settings of the pooling allocator, but embeddings are still allowed to configure these values to cause this vulnerability. ### Patches Wasmtime 36.0.7, 42.0.2, and 43.0.1 have been issued to fix this bug. Users are recommended to update to these patched versions of Wasmtime. ### Workarounds All four conditions above must be met to be vulnerable to this bug, and users can work around this bug by adjusting any of the above conditions. For example it is strongly recommended that guard pages are configured for linear memories which would make this bug not applicable. #### [CVE-2026-35195](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-394w-hwhg-8vgm) ### Impact Wasmtime's implementation of transcoding strings between components contains a bug where the return value of a guest component's `realloc` is not validated before the host attempts to write through the pointer. This enables a guest to cause the host to write arbitrary transcoded string bytes to an arbitrary location up to 4GiB away from the base of linear memory. These writes on the host could hit unmapped memory or could corrupt host data structures depending on Wasmtime's configuration. Wasmtime by default reserves 4GiB of virtual memory for a guest's linear memory meaning that this bug will by default on hosts cause the host to hit unmapped memory and abort the process due to an unhandled fault. Wasmtime can be configured, however, to reserve less memory for a guest and to remove all guard pages, so some configurations of Wasmtime may lead to corruption of data outside of a guest's linear memory, such as host data structures or other guests's linear memories. ### Patches Wasmtime 24.0.7, 36.0.7, 42.0.2, and 43.0.1 have been issued to fix this bug. Users are recommended to update to these patched versions of Wasmtime. ### Workarounds There is no known workaround for this issue and affected hosts/embeddings are recommended to upgrade. --- ### Release Notes
bytecodealliance/wasmtime (wasmtime) ### [`v36.0.7`](https://redirect.github.com/bytecodealliance/wasmtime/releases/tag/v36.0.7) [Compare Source](https://redirect.github.com/bytecodealliance/wasmtime/compare/v36.0.6...v36.0.7) #### 36.0.7 Released 2026-04-09. ##### Fixed - Miscompiled guest heap access enables sandbox escape on aarch64 Cranelift. [GHSA-jhxm-h53p-jm7w](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-jhxm-h53p-jm7w) - Wasmtime with Winch compiler backend may allow a sandbox-escaping memory access. [GHSA-xx5w-cvp6-jv83](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-xx5w-cvp6-jv83) - Out-of-bounds write or crash when transcoding component model strings. [GHSA-394w-hwhg-8vgm](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-394w-hwhg-8vgm) - Host panic when Winch compiler executes `table.fill`. [GHSA-q49f-xg75-m9xw](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-q49f-xg75-m9xw) - Wasmtime segfault or unused out-of-sandbox load with `f64x2.splat` operator on x86-64. [GHSA-qqfj-4vcm-26hv](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-qqfj-4vcm-26hv) - Improperly masked return value from `table.grow` with Winch compiler backend. [GHSA-f984-pcp8-v2p7](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-f984-pcp8-v2p7) - Panic when transcoding misaligned utf-16 strings. [GHSA-jxhv-7h78-9775](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-jxhv-7h78-9775) - Panic when lifting `flags` component value. [GHSA-m758-wjhj-p3jq](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-m758-wjhj-p3jq) - Heap OOB read in component model UTF-16 to latin1+utf16 string transcoding. [GHSA-hx6p-xpx3-jvvv](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-hx6p-xpx3-jvvv) - Data leakage between pooling allocator instances. [GHSA-6wgr-89rj-399p](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-6wgr-89rj-399p) - Host data leakage with 64-bit tables and Winch. [GHSA-m9w2-8782-2946](https://redirect.github.com/bytecodealliance/wasmtime/security/advisories/GHSA-m9w2-8782-2946)
--- ### Configuration 📅 **Schedule**: (in timezone America/New_York) - Branch creation - "" - Automerge - At any time (no schedule defined) 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 144 ++++++++++++++++++++++++++--------------------------- 1 file changed, 72 insertions(+), 72 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1e5c390ac823bfe3b4c0839d2d98102b5a0e87f3..5280fc72d074c22414b603a9b7092f2005f07a85 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3864,36 +3864,36 @@ dependencies = [ [[package]] name = "cranelift-assembler-x64" -version = "0.123.6" +version = "0.123.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba33ddc4e157cb1abe9da6c821e8824f99e56d057c2c22536850e0141f281d61" +checksum = "c8056d63fef9a6f88a1e7aae52bb08fcf48de8866d514c0dc52feb15975f5db5" dependencies = [ "cranelift-assembler-x64-meta", ] [[package]] name = "cranelift-assembler-x64-meta" -version = "0.123.6" +version = "0.123.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69b23dd6ea360e6fb28a3f3b40b7f126509668f58076a4729b2cfd656f26a0ad" +checksum = "57d063b40884a0d733223a45c5de1155395af4393cf7f900d5be8e2cbc094015" dependencies = [ "cranelift-srcgen", ] [[package]] name = "cranelift-bforest" -version = "0.123.6" +version = "0.123.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9d81afcee8fe27ee2536987df3fadcb2e161af4edb7dbe3ef36838d0ce74382" +checksum = "3c3add2881bae2d55cd7162906988dd70053cb7ece865ad793a6754b04d47df6" dependencies = [ "cranelift-entity", ] [[package]] name = "cranelift-bitset" -version = "0.123.6" +version = "0.123.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb33595f1279fe7af03b28245060e9085caf98b10ed3137461a85796eb83972a" +checksum = "dd73e32bc1ea4bddc4c770760c66fa24b2890991b0561af554219e603fcd7c34" dependencies = [ "serde", "serde_derive", @@ -3901,9 +3901,9 @@ dependencies = [ [[package]] name = "cranelift-codegen" -version = "0.123.6" +version = "0.123.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0230a6ac0660bfe31eb244cbb43dcd4f2b3c1c4e0addc3e0348c6053ea60272e" +checksum = "3e1da85f2636fe28244848861d1ed0f8dccdc6e98fc5db31aa5eb8878e7ff617" dependencies = [ "bumpalo", "cranelift-assembler-x64", @@ -3931,9 +3931,9 @@ dependencies = [ [[package]] name = "cranelift-codegen-meta" -version = "0.123.6" +version = "0.123.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96d6817fdc15cb8f236fc9d8e610767d3a03327ceca4abff7a14d8e2154c405e" +checksum = "ee3c8aba9d89832df27364b2e79dc2fe288daf4bd6c7347829e7f3f258ea5650" dependencies = [ "cranelift-assembler-x64-meta", "cranelift-codegen-shared", @@ -3944,24 +3944,24 @@ dependencies = [ [[package]] name = "cranelift-codegen-shared" -version = "0.123.6" +version = "0.123.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0403796328e9e2e7df2b80191cdbb473fd9ea3889eb45ef5632d0fef168ea032" +checksum = "ac9a9b09fe107fef6377caed20614586124184cffccb73611312ceb922a917e6" [[package]] name = "cranelift-control" -version = "0.123.6" +version = "0.123.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "188f04092279a3814e0b6235c2f9c2e34028e4beb72da7bfed55cbd184702bcc" +checksum = "50aef001c7ad250d5fdda2c7481cbfcabe6435c66106adf5760dcb9fb9a8ede4" dependencies = [ "arbitrary", ] [[package]] name = "cranelift-entity" -version = "0.123.6" +version = "0.123.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43f5e7391167605d505fe66a337e1a69583b3f34b63d359ffa5a430313c555e8" +checksum = "cf3c84656a010df2b5afaedcbbbd94f1efe175b55e29864df7b99e64bfa40d56" dependencies = [ "cranelift-bitset", "serde", @@ -3970,9 +3970,9 @@ dependencies = [ [[package]] name = "cranelift-frontend" -version = "0.123.6" +version = "0.123.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea5440792eb2b5ba0a0976df371b9f94031bd853ae56f389de610bca7128a7cb" +checksum = "6aa1d2006915cddb63705db46dcfb8637fe08f91d26fbe59680d7257ec39d609" dependencies = [ "cranelift-codegen", "log", @@ -3982,15 +3982,15 @@ dependencies = [ [[package]] name = "cranelift-isle" -version = "0.123.6" +version = "0.123.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e5c05fab6fce38d729088f3fa1060eaa1ad54eefd473588887205ed2ab2f79e" +checksum = "6e4fecbcbb81273f9aff4559e26fc341f42663da420cca5ac84b34e74e9267e0" [[package]] name = "cranelift-native" -version = "0.123.6" +version = "0.123.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c9a0607a028edf5ba5bba7e7cf5ca1b7f0a030e3ae84dcd401e8b9b05192280" +checksum = "976a3d85f197a56ae34ee4d5a5e469855ac52804a09a513d0562d425da0ff56e" dependencies = [ "cranelift-codegen", "libc", @@ -3999,9 +3999,9 @@ dependencies = [ [[package]] name = "cranelift-srcgen" -version = "0.123.6" +version = "0.123.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb0f2da72eb2472aaac6cfba4e785af42b1f2d82f5155f30c9c30e8cce351e17" +checksum = "37fbd4aefce642145491ff862d2054a71b63d2d97b8dd1e280c9fdaf399598b7" [[package]] name = "crash-context" @@ -4925,7 +4925,7 @@ dependencies = [ "libc", "option-ext", "redox_users 0.5.2", - "windows-sys 0.61.2", + "windows-sys 0.59.0", ] [[package]] @@ -5691,7 +5691,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.61.2", + "windows-sys 0.52.0", ] [[package]] @@ -7116,7 +7116,7 @@ dependencies = [ "gobject-sys", "libc", "system-deps 7.0.7", - "windows-sys 0.61.2", + "windows-sys 0.52.0", ] [[package]] @@ -11089,7 +11089,7 @@ version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "windows-sys 0.61.2", + "windows-sys 0.59.0", ] [[package]] @@ -13535,7 +13535,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" dependencies = [ "bytes 1.11.1", - "heck 0.5.0", + "heck 0.4.1", "itertools 0.12.1", "log", "multimap", @@ -13685,9 +13685,9 @@ checksum = "bd348ff538bc9caeda7ee8cad2d1d48236a1f443c1fa3913c6a02fe0043b1dd3" [[package]] name = "pulley-interpreter" -version = "36.0.6" +version = "36.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "499d922aa0f9faac8d92351416664f1b7acd914008a90fce2f0516d31efddf67" +checksum = "a078b4bdfd275fadeefc4f9ae3675ee5af302e69497da439956dd05257858970" dependencies = [ "cranelift-bitset", "log", @@ -13697,9 +13697,9 @@ dependencies = [ [[package]] name = "pulley-macros" -version = "36.0.6" +version = "36.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3848fb193d6dffca43a21f24ca9492f22aab88af1223d06bac7f8a0ef405b81" +checksum = "9dac91999883fd00b900eb5377be403c5cb8b93e10efcb571bf66454c2d9f231" dependencies = [ "proc-macro2", "quote", @@ -13841,7 +13841,7 @@ dependencies = [ "once_cell", "socket2 0.6.1", "tracing", - "windows-sys 0.60.2", + "windows-sys 0.52.0", ] [[package]] @@ -15033,7 +15033,7 @@ dependencies = [ "errno 0.3.14", "libc", "linux-raw-sys 0.11.0", - "windows-sys 0.61.2", + "windows-sys 0.52.0", ] [[package]] @@ -16286,7 +16286,7 @@ version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1c97747dbf44bb1ca44a561ece23508e99cb592e862f22222dcf42f51d1e451" dependencies = [ - "heck 0.5.0", + "heck 0.4.1", "proc-macro2", "quote", "syn 2.0.117", @@ -17500,7 +17500,7 @@ dependencies = [ "getrandom 0.3.4", "once_cell", "rustix 1.1.2", - "windows-sys 0.61.2", + "windows-sys 0.52.0", ] [[package]] @@ -19710,9 +19710,9 @@ dependencies = [ [[package]] name = "wasmtime" -version = "36.0.6" +version = "36.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a2f8736ddc86e03a9d0e4c477a37939cfc53cd1b052ee38a3133679b87ef830" +checksum = "b80d5ba38b9b00f60a0665e07dde38e91d884d4a78cd61d777c8cf081a1267c1" dependencies = [ "addr2line", "anyhow", @@ -19771,9 +19771,9 @@ dependencies = [ [[package]] name = "wasmtime-environ" -version = "36.0.6" +version = "36.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "733682a327755c77153ac7455b1ba8f2db4d9946c1738f8002fe1fbda1d52e83" +checksum = "44a45d60dea98308decb71a9f7bb35a629696d1fbf7127dbfde42cbc64b8fa33" dependencies = [ "anyhow", "cpp_demangle", @@ -19798,9 +19798,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-asm-macros" -version = "36.0.6" +version = "36.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68288980a2e02bcb368d436da32565897033ea21918007e3f2bae18843326cf9" +checksum = "dd014b4001b6da03d79062d9ad5ec98fa62e34d50e30e46298545282cc2957e4" dependencies = [ "cfg-if", ] @@ -19817,9 +19817,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-component-macro" -version = "36.0.6" +version = "36.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dea846da68f8e776c8a43bde3386022d7bb74e713b9654f7c0196e5ff2e4684" +checksum = "0f2942aa5d44b02061e0c6ab71b23090cf3b300b4519e3b80776ac38edde2e65" dependencies = [ "anyhow", "proc-macro2", @@ -19832,15 +19832,15 @@ dependencies = [ [[package]] name = "wasmtime-internal-component-util" -version = "36.0.6" +version = "36.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe1e5735b3c8251510d2a55311562772d6c6fca9438a3d0329eb6e38af4957d6" +checksum = "bcb6f974fe739e98034b7e6ec6feb2ab399f4cde7207675f26138bd9a1d65720" [[package]] name = "wasmtime-internal-cranelift" -version = "36.0.6" +version = "36.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89bb9ef571288e2be6b8a3c4763acc56c348dcd517500b1679d3ffad9e4a757" +checksum = "4047020866a80aa943e41133e607020e17562126cf81533362275272098a22b1" dependencies = [ "anyhow", "cfg-if", @@ -19865,9 +19865,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-fiber" -version = "36.0.6" +version = "36.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b698d004b15ea1f1ae2d06e5e8b80080cbd684fd245220ce2fac3cdd5ecf87f2" +checksum = "7cd172b622993bb8f834f6ca3b7683dfdba72b12db0527824850fdec17c89e5a" dependencies = [ "anyhow", "cc", @@ -19881,9 +19881,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-jit-debug" -version = "36.0.6" +version = "36.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c803a9fec05c3d7fa03474d4595079d546e77a3c71c1d09b21f74152e2165c17" +checksum = "1287e310fef4c8759a6b5caa0d44eff9a03ebcd6c273729cc39ce3e321a9e26a" dependencies = [ "cc", "wasmtime-internal-versioned-export-macros", @@ -19891,9 +19891,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-jit-icache-coherence" -version = "36.0.6" +version = "36.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3866909d37f7929d902e6011847748147e8734e9d7e0353e78fb8b98f586aee" +checksum = "c02bca30ef670a31496d742d9facdbd0228debe766b1e9541655c0530ff5c953" dependencies = [ "anyhow", "cfg-if", @@ -19903,24 +19903,24 @@ dependencies = [ [[package]] name = "wasmtime-internal-math" -version = "36.0.6" +version = "36.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a23b03fb14c64bd0dfcaa4653101f94ade76c34a3027ed2d6b373267536e45b" +checksum = "fd3a1f51a037ae2c048f0d76d36e27f0d22276295496c44f16a251f24690e003" dependencies = [ "libm", ] [[package]] name = "wasmtime-internal-slab" -version = "36.0.6" +version = "36.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbff220b88cdb990d34a20b13344e5da2e7b99959a5b1666106bec94b58d6364" +checksum = "ba6171aac3d66e4d69e50080bb6bc5205de2283513984a4118a93cb66dc02994" [[package]] name = "wasmtime-internal-unwinder" -version = "36.0.6" +version = "36.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13e1ad30e88988b20c0d1c56ea4b4fbc01a8c614653cbf12ca50c0dcc695e2f7" +checksum = "3fd1bc1783391a02176fb687159b1779fc10b71d5350adf09c1f3aa8442a02cc" dependencies = [ "anyhow", "cfg-if", @@ -19931,9 +19931,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-versioned-export-macros" -version = "36.0.6" +version = "36.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "549aefdaa1398c2fcfbf69a7b882956bb5b6e8e5b600844ecb91a3b5bf658ca7" +checksum = "8097e2c8ca02ed65d31dda111faa0888ffbf28dc3ee74355e283118a8d293eb0" dependencies = [ "proc-macro2", "quote", @@ -19942,9 +19942,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-winch" -version = "36.0.6" +version = "36.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc96a84c5700171aeecf96fa9a9ab234f333f5afb295dabf3f8a812b70fe832" +checksum = "6a8cb36b61fbcff2c8bcd14f9f2651a6e52b019d0d329324620d7bc971b2b235" dependencies = [ "anyhow", "cranelift-codegen", @@ -19959,9 +19959,9 @@ dependencies = [ [[package]] name = "wasmtime-internal-wit-bindgen" -version = "36.0.6" +version = "36.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28dc9efea511598c88564ac1974e0825c07d9c0de902dbf68f227431cd4ff8c" +checksum = "ff555cfb71577028616d65c00221c7fe6eef45a9ebb96fc6d34d4a41fa1de191" dependencies = [ "anyhow", "bitflags 2.10.0", @@ -20531,7 +20531,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.61.2", + "windows-sys 0.48.0", ] [[package]] @@ -20542,9 +20542,9 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "winch-codegen" -version = "36.0.6" +version = "36.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06c0ec09e8eb5e850e432da6271ed8c4a9d459a9db3850c38e98a3ee9d015e79" +checksum = "0989126b21d12c9923aa2de7ddbcf87db03037b24b7365041d9dd0095b69d8cb" dependencies = [ "anyhow", "cranelift-assembler-x64", From 857f81b04bfa8e52fc07f9e8d3bc14726957bf6e Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Fri, 10 Apr 2026 00:02:16 -0700 Subject: [PATCH 32/67] Fix more folder mutation things (#53585) Continuation of https://github.com/zed-industries/zed/pull/53566, now with proper thread root mutation. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- crates/agent_ui/src/agent_panel.rs | 2 +- crates/agent_ui/src/thread_import.rs | 7 +- crates/agent_ui/src/thread_metadata_store.rs | 452 ++++- .../agent_ui/src/thread_worktree_archive.rs | 4 +- crates/agent_ui/src/threads_archive_view.rs | 7 +- crates/sidebar/src/sidebar.rs | 189 +- crates/sidebar/src/sidebar_tests.rs | 1577 ++++++++++++++--- crates/workspace/src/multi_workspace.rs | 226 ++- crates/workspace/src/multi_workspace_tests.rs | 154 -- crates/workspace/src/workspace.rs | 2 +- 10 files changed, 2039 insertions(+), 581 deletions(-) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 6c555af7fa630f5f5cc5995f36ec8ee7007508b9..2ff4cd18a78fd53c5d540e66670d6e6c9e51aa47 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -6485,7 +6485,7 @@ mod tests { let metadata = store .entry(session_id) .unwrap_or_else(|| panic!("{label} thread metadata should exist")); - metadata.folder_paths.clone() + metadata.folder_paths().clone() }); let mut sorted = metadata_paths.ordered_paths().cloned().collect::>(); sorted.sort(); diff --git a/crates/agent_ui/src/thread_import.rs b/crates/agent_ui/src/thread_import.rs index 41a23f894d8f406cbdbdcb03db3879437a45e40f..686ca5d6cd4fdfede7eb4a5ed70c90074972fdf4 100644 --- a/crates/agent_ui/src/thread_import.rs +++ b/crates/agent_ui/src/thread_import.rs @@ -18,12 +18,12 @@ use ui::{ prelude::*, }; use util::ResultExt; -use workspace::{ModalView, MultiWorkspace, PathList, Workspace}; +use workspace::{ModalView, MultiWorkspace, Workspace}; use crate::{ Agent, AgentPanel, agent_connection_store::AgentConnectionStore, - thread_metadata_store::{ThreadMetadata, ThreadMetadataStore}, + thread_metadata_store::{ThreadMetadata, ThreadMetadataStore, ThreadWorktreePaths}, }; pub struct AcpThreadImportOnboarding; @@ -527,8 +527,7 @@ fn collect_importable_threads( .unwrap_or_else(|| crate::DEFAULT_THREAD_TITLE.into()), updated_at: session.updated_at.unwrap_or_else(|| Utc::now()), created_at: session.created_at, - folder_paths, - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::from_folder_paths(&folder_paths), remote_connection: remote_connection.clone(), archived: true, }); diff --git a/crates/agent_ui/src/thread_metadata_store.rs b/crates/agent_ui/src/thread_metadata_store.rs index 101ea3c7369dae6dd88e8bc4499f048532d91a43..4ba68b400a60320e95bfd645ee662f6483dc6cf4 100644 --- a/crates/agent_ui/src/thread_metadata_store.rs +++ b/crates/agent_ui/src/thread_metadata_store.rs @@ -64,8 +64,7 @@ fn migrate_thread_metadata(cx: &mut App) -> Task> { title: entry.title, updated_at: entry.updated_at, created_at: entry.created_at, - folder_paths: entry.folder_paths, - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::from_folder_paths(&entry.folder_paths), remote_connection: None, archived: true, }) @@ -82,11 +81,11 @@ fn migrate_thread_metadata(cx: &mut App) -> Task> { if is_first_migration { let mut per_project: HashMap> = HashMap::default(); for entry in &mut to_migrate { - if entry.folder_paths.is_empty() { + if entry.worktree_paths.is_empty() { continue; } per_project - .entry(entry.folder_paths.clone()) + .entry(entry.worktree_paths.folder_path_list().clone()) .or_default() .push(entry); } @@ -164,8 +163,8 @@ fn migrate_thread_remote_connections(cx: &mut App, migration_task: Task); impl Global for GlobalThreadMetadataStore {} +/// Paired worktree paths for a thread. Each folder path has a corresponding +/// main worktree path at the same position. The two lists are always the +/// same length and are modified together via `add_path` / `remove_main_path`. +/// +/// For non-linked worktrees, the main path and folder path are identical. +/// For linked worktrees, the main path is the original repo and the folder +/// path is the linked worktree location. +/// +/// Internally stores two `PathList`s with matching insertion order so that +/// `ordered_paths()` on both yields positionally-paired results. +#[derive(Default, Debug, Clone)] +pub struct ThreadWorktreePaths { + folder_paths: PathList, + main_worktree_paths: PathList, +} + +impl PartialEq for ThreadWorktreePaths { + fn eq(&self, other: &Self) -> bool { + self.folder_paths == other.folder_paths + && self.main_worktree_paths == other.main_worktree_paths + } +} + +impl ThreadWorktreePaths { + /// Build from a project's current state. Each visible worktree is paired + /// with its main repo path (resolved via git), falling back to the + /// worktree's own path if no git repo is found. + pub fn from_project(project: &project::Project, cx: &App) -> Self { + let (mains, folders): (Vec, Vec) = project + .visible_worktrees(cx) + .map(|worktree| { + let snapshot = worktree.read(cx).snapshot(); + let folder_path = snapshot.abs_path().to_path_buf(); + let main_path = snapshot + .root_repo_common_dir() + .and_then(|dir| Some(dir.parent()?.to_path_buf())) + .unwrap_or_else(|| folder_path.clone()); + (main_path, folder_path) + }) + .unzip(); + Self { + folder_paths: PathList::new(&folders), + main_worktree_paths: PathList::new(&mains), + } + } + + /// Build from two parallel `PathList`s that already share the same + /// insertion order. Used for deserialization from DB. + /// + /// Returns an error if the two lists have different lengths, which + /// indicates corrupted data from a prior migration bug. + pub fn from_path_lists( + main_worktree_paths: PathList, + folder_paths: PathList, + ) -> anyhow::Result { + anyhow::ensure!( + main_worktree_paths.paths().len() == folder_paths.paths().len(), + "main_worktree_paths has {} entries but folder_paths has {}", + main_worktree_paths.paths().len(), + folder_paths.paths().len(), + ); + Ok(Self { + folder_paths, + main_worktree_paths, + }) + } + + /// Build for non-linked worktrees where main == folder for every path. + pub fn from_folder_paths(folder_paths: &PathList) -> Self { + Self { + folder_paths: folder_paths.clone(), + main_worktree_paths: folder_paths.clone(), + } + } + + pub fn is_empty(&self) -> bool { + self.folder_paths.is_empty() + } + + /// The folder paths (for workspace matching / `threads_by_paths` index). + pub fn folder_path_list(&self) -> &PathList { + &self.folder_paths + } + + /// The main worktree paths (for group key / `threads_by_main_paths` index). + pub fn main_worktree_path_list(&self) -> &PathList { + &self.main_worktree_paths + } + + /// Iterate the (main_worktree_path, folder_path) pairs in insertion order. + pub fn ordered_pairs(&self) -> impl Iterator { + self.main_worktree_paths + .ordered_paths() + .zip(self.folder_paths.ordered_paths()) + } + + /// Add a new path pair. If the exact (main, folder) pair already exists, + /// this is a no-op. Rebuilds both internal `PathList`s to maintain + /// consistent ordering. + pub fn add_path(&mut self, main_path: &Path, folder_path: &Path) { + let already_exists = self + .ordered_pairs() + .any(|(m, f)| m.as_path() == main_path && f.as_path() == folder_path); + if already_exists { + return; + } + let (mut mains, mut folders): (Vec, Vec) = self + .ordered_pairs() + .map(|(m, f)| (m.clone(), f.clone())) + .unzip(); + mains.push(main_path.to_path_buf()); + folders.push(folder_path.to_path_buf()); + self.main_worktree_paths = PathList::new(&mains); + self.folder_paths = PathList::new(&folders); + } + + /// Remove all pairs whose main worktree path matches the given path. + /// This removes the corresponding entries from both lists. + pub fn remove_main_path(&mut self, main_path: &Path) { + let (mains, folders): (Vec, Vec) = self + .ordered_pairs() + .filter(|(m, _)| m.as_path() != main_path) + .map(|(m, f)| (m.clone(), f.clone())) + .unzip(); + self.main_worktree_paths = PathList::new(&mains); + self.folder_paths = PathList::new(&folders); + } +} + /// Lightweight metadata for any thread (native or ACP), enough to populate /// the sidebar list and route to the correct load path when clicked. #[derive(Debug, Clone, PartialEq)] @@ -204,17 +332,25 @@ pub struct ThreadMetadata { pub title: SharedString, pub updated_at: DateTime, pub created_at: Option>, - pub folder_paths: PathList, - pub main_worktree_paths: PathList, + pub worktree_paths: ThreadWorktreePaths, pub remote_connection: Option, pub archived: bool, } +impl ThreadMetadata { + pub fn folder_paths(&self) -> &PathList { + self.worktree_paths.folder_path_list() + } + pub fn main_worktree_paths(&self) -> &PathList { + self.worktree_paths.main_worktree_path_list() + } +} + impl From<&ThreadMetadata> for acp_thread::AgentSessionInfo { fn from(meta: &ThreadMetadata) -> Self { Self { session_id: meta.session_id.clone(), - work_dirs: Some(meta.folder_paths.clone()), + work_dirs: Some(meta.folder_paths().clone()), title: Some(meta.title.clone()), updated_at: Some(meta.updated_at), created_at: meta.created_at, @@ -398,12 +534,12 @@ impl ThreadMetadataStore { for row in rows { this.threads_by_paths - .entry(row.folder_paths.clone()) + .entry(row.folder_paths().clone()) .or_default() .insert(row.session_id.clone()); - if !row.main_worktree_paths.is_empty() { + if !row.main_worktree_paths().is_empty() { this.threads_by_main_paths - .entry(row.main_worktree_paths.clone()) + .entry(row.main_worktree_paths().clone()) .or_default() .insert(row.session_id.clone()); } @@ -438,17 +574,17 @@ impl ThreadMetadataStore { fn save_internal(&mut self, metadata: ThreadMetadata) { if let Some(thread) = self.threads.get(&metadata.session_id) { - if thread.folder_paths != metadata.folder_paths { - if let Some(session_ids) = self.threads_by_paths.get_mut(&thread.folder_paths) { + if thread.folder_paths() != metadata.folder_paths() { + if let Some(session_ids) = self.threads_by_paths.get_mut(thread.folder_paths()) { session_ids.remove(&metadata.session_id); } } - if thread.main_worktree_paths != metadata.main_worktree_paths - && !thread.main_worktree_paths.is_empty() + if thread.main_worktree_paths() != metadata.main_worktree_paths() + && !thread.main_worktree_paths().is_empty() { if let Some(session_ids) = self .threads_by_main_paths - .get_mut(&thread.main_worktree_paths) + .get_mut(thread.main_worktree_paths()) { session_ids.remove(&metadata.session_id); } @@ -459,13 +595,13 @@ impl ThreadMetadataStore { .insert(metadata.session_id.clone(), metadata.clone()); self.threads_by_paths - .entry(metadata.folder_paths.clone()) + .entry(metadata.folder_paths().clone()) .or_default() .insert(metadata.session_id.clone()); - if !metadata.main_worktree_paths.is_empty() { + if !metadata.main_worktree_paths().is_empty() { self.threads_by_main_paths - .entry(metadata.main_worktree_paths.clone()) + .entry(metadata.main_worktree_paths().clone()) .or_default() .insert(metadata.session_id.clone()); } @@ -483,7 +619,11 @@ impl ThreadMetadataStore { ) { if let Some(thread) = self.threads.get(session_id) { self.save_internal(ThreadMetadata { - folder_paths: work_dirs, + worktree_paths: ThreadWorktreePaths::from_path_lists( + thread.main_worktree_paths().clone(), + work_dirs.clone(), + ) + .unwrap_or_else(|_| ThreadWorktreePaths::from_folder_paths(&work_dirs)), ..thread.clone() }); cx.notify(); @@ -524,7 +664,7 @@ impl ThreadMetadataStore { cx: &mut Context, ) { if let Some(thread) = self.threads.get(session_id).cloned() { - let mut paths: Vec = thread.folder_paths.paths().to_vec(); + let mut paths: Vec = thread.folder_paths().paths().to_vec(); for (old_path, new_path) in path_replacements { if let Some(pos) = paths.iter().position(|p| p == old_path) { paths[pos] = new_path.clone(); @@ -532,7 +672,11 @@ impl ThreadMetadataStore { } let new_folder_paths = PathList::new(&paths); self.save_internal(ThreadMetadata { - folder_paths: new_folder_paths, + worktree_paths: ThreadWorktreePaths::from_path_lists( + thread.main_worktree_paths().clone(), + new_folder_paths.clone(), + ) + .unwrap_or_else(|_| ThreadWorktreePaths::from_folder_paths(&new_folder_paths)), ..thread }); cx.notify(); @@ -546,7 +690,7 @@ impl ThreadMetadataStore { cx: &mut Context, ) { if let Some(thread) = self.threads.get(session_id).cloned() { - let mut paths: Vec = thread.folder_paths.paths().to_vec(); + let mut paths: Vec = thread.folder_paths().paths().to_vec(); for (old_path, new_path) in path_replacements { for path in &mut paths { if path == old_path { @@ -556,13 +700,69 @@ impl ThreadMetadataStore { } let new_folder_paths = PathList::new(&paths); self.save_internal(ThreadMetadata { - folder_paths: new_folder_paths, + worktree_paths: ThreadWorktreePaths::from_path_lists( + thread.main_worktree_paths().clone(), + new_folder_paths.clone(), + ) + .unwrap_or_else(|_| ThreadWorktreePaths::from_folder_paths(&new_folder_paths)), ..thread }); cx.notify(); } } + /// Apply a mutation to the worktree paths of all threads whose current + /// `main_worktree_paths` matches `current_main_paths`, then re-index. + pub fn change_worktree_paths( + &mut self, + current_main_paths: &PathList, + mutate: impl Fn(&mut ThreadWorktreePaths), + cx: &mut Context, + ) { + let session_ids: Vec<_> = self + .threads_by_main_paths + .get(current_main_paths) + .into_iter() + .flatten() + .cloned() + .collect(); + + if session_ids.is_empty() { + return; + } + + for session_id in &session_ids { + if let Some(thread) = self.threads.get_mut(session_id) { + if let Some(ids) = self + .threads_by_main_paths + .get_mut(thread.main_worktree_paths()) + { + ids.remove(session_id); + } + if let Some(ids) = self.threads_by_paths.get_mut(thread.folder_paths()) { + ids.remove(session_id); + } + + mutate(&mut thread.worktree_paths); + + self.threads_by_main_paths + .entry(thread.main_worktree_paths().clone()) + .or_default() + .insert(session_id.clone()); + self.threads_by_paths + .entry(thread.folder_paths().clone()) + .or_default() + .insert(session_id.clone()); + + self.pending_thread_ops_tx + .try_send(DbOperation::Upsert(thread.clone())) + .log_err(); + } + } + + cx.notify(); + } + pub fn create_archived_worktree( &self, worktree_path: String, @@ -655,13 +855,13 @@ impl ThreadMetadataStore { pub fn delete(&mut self, session_id: acp::SessionId, cx: &mut Context) { if let Some(thread) = self.threads.get(&session_id) { - if let Some(session_ids) = self.threads_by_paths.get_mut(&thread.folder_paths) { + if let Some(session_ids) = self.threads_by_paths.get_mut(thread.folder_paths()) { session_ids.remove(&session_id); } - if !thread.main_worktree_paths.is_empty() { + if !thread.main_worktree_paths().is_empty() { if let Some(session_ids) = self .threads_by_main_paths - .get_mut(&thread.main_worktree_paths) + .get_mut(thread.main_worktree_paths()) { session_ids.remove(&session_id); } @@ -802,16 +1002,9 @@ impl ThreadMetadataStore { let agent_id = thread_ref.connection().agent_id(); let project = thread_ref.project().read(cx); - let folder_paths = { - let paths: Vec> = project - .visible_worktrees(cx) - .map(|worktree| worktree.read(cx).abs_path()) - .collect(); - PathList::new(&paths) - }; + let worktree_paths = ThreadWorktreePaths::from_project(project, cx); let project_group_key = project.project_group_key(cx); - let main_worktree_paths = project_group_key.path_list().clone(); let remote_connection = project_group_key.host(); // Threads without a folder path (e.g. started in an empty @@ -820,7 +1013,7 @@ impl ThreadMetadataStore { // them from the archive. let archived = existing_thread .map(|t| t.archived) - .unwrap_or(folder_paths.is_empty()); + .unwrap_or(worktree_paths.is_empty()); let metadata = ThreadMetadata { session_id, @@ -828,8 +1021,7 @@ impl ThreadMetadataStore { title, created_at: Some(created_at), updated_at, - folder_paths, - main_worktree_paths, + worktree_paths, remote_connection, archived, }; @@ -919,19 +1111,19 @@ impl ThreadMetadataDb { let title = row.title.to_string(); let updated_at = row.updated_at.to_rfc3339(); let created_at = row.created_at.map(|dt| dt.to_rfc3339()); - let serialized = row.folder_paths.serialize(); - let (folder_paths, folder_paths_order) = if row.folder_paths.is_empty() { + let serialized = row.folder_paths().serialize(); + let (folder_paths, folder_paths_order) = if row.folder_paths().is_empty() { (None, None) } else { (Some(serialized.paths), Some(serialized.order)) }; - let main_serialized = row.main_worktree_paths.serialize(); - let (main_worktree_paths, main_worktree_paths_order) = if row.main_worktree_paths.is_empty() - { - (None, None) - } else { - (Some(main_serialized.paths), Some(main_serialized.order)) - }; + let main_serialized = row.main_worktree_paths().serialize(); + let (main_worktree_paths, main_worktree_paths_order) = + if row.main_worktree_paths().is_empty() { + (None, None) + } else { + (Some(main_serialized.paths), Some(main_serialized.order)) + }; let remote_connection = row .remote_connection .as_ref() @@ -1136,6 +1328,10 @@ impl Column for ThreadMetadata { .transpose() .context("deserialize thread metadata remote connection")?; + let worktree_paths = + ThreadWorktreePaths::from_path_lists(main_worktree_paths, folder_paths) + .unwrap_or_else(|_| ThreadWorktreePaths::default()); + Ok(( ThreadMetadata { session_id: acp::SessionId::new(id), @@ -1143,8 +1339,7 @@ impl Column for ThreadMetadata { title: title.into(), updated_at, created_at, - folder_paths, - main_worktree_paths, + worktree_paths, remote_connection, archived, }, @@ -1227,8 +1422,7 @@ mod tests { title: title.to_string().into(), updated_at, created_at: Some(updated_at), - folder_paths, - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::from_folder_paths(&folder_paths), remote_connection: None, } } @@ -1459,8 +1653,7 @@ mod tests { title: "Existing Metadata".into(), updated_at: now - chrono::Duration::seconds(10), created_at: Some(now - chrono::Duration::seconds(10)), - folder_paths: project_a_paths.clone(), - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::from_folder_paths(&project_a_paths), remote_connection: None, archived: false, }; @@ -1569,8 +1762,7 @@ mod tests { title: "Existing Metadata".into(), updated_at: existing_updated_at, created_at: Some(existing_updated_at), - folder_paths: project_paths.clone(), - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::from_folder_paths(&project_paths), remote_connection: None, archived: false, }; @@ -1747,7 +1939,7 @@ mod tests { // Project A: 5 most recent should be unarchived, 2 oldest should be archived let mut project_a_entries: Vec<_> = list .iter() - .filter(|m| m.folder_paths == project_a_paths) + .filter(|m| *m.folder_paths() == project_a_paths) .collect(); assert_eq!(project_a_entries.len(), 7); project_a_entries.sort_by(|a, b| b.updated_at.cmp(&a.updated_at)); @@ -1770,7 +1962,7 @@ mod tests { // Project B: all 3 should be unarchived (under the limit) let project_b_entries: Vec<_> = list .iter() - .filter(|m| m.folder_paths == project_b_paths) + .filter(|m| *m.folder_paths() == project_b_paths) .collect(); assert_eq!(project_b_entries.len(), 3); assert!(project_b_entries.iter().all(|m| !m.archived)); @@ -1934,7 +2126,7 @@ mod tests { let without_worktree = store .entry(&session_without_worktree) .expect("missing metadata for thread without project association"); - assert!(without_worktree.folder_paths.is_empty()); + assert!(without_worktree.folder_paths().is_empty()); assert!( without_worktree.archived, "expected thread without project association to be archived" @@ -1944,7 +2136,7 @@ mod tests { .entry(&session_with_worktree) .expect("missing metadata for thread with project association"); assert_eq!( - with_worktree.folder_paths, + *with_worktree.folder_paths(), PathList::new(&[Path::new("/project-a")]) ); assert!( @@ -2578,7 +2770,7 @@ mod tests { store.entry(&acp::SessionId::new("session-multi")).cloned() }); let entry = entry.unwrap(); - let paths = entry.folder_paths.paths(); + let paths = entry.folder_paths().paths(); assert_eq!(paths.len(), 3); assert!(paths.contains(&PathBuf::from("/restored/worktree-a"))); assert!(paths.contains(&PathBuf::from("/restored/worktree-b"))); @@ -2623,7 +2815,7 @@ mod tests { .cloned() }); let entry = entry.unwrap(); - let paths = entry.folder_paths.paths(); + let paths = entry.folder_paths().paths(); assert_eq!(paths.len(), 2); assert!(paths.contains(&PathBuf::from("/new/worktree-a"))); assert!(paths.contains(&PathBuf::from("/other/path"))); @@ -2669,7 +2861,7 @@ mod tests { store.entry(&acp::SessionId::new("session-multi")).cloned() }); let entry = entry.unwrap(); - let paths = entry.folder_paths.paths(); + let paths = entry.folder_paths().paths(); assert_eq!(paths.len(), 3); assert!(paths.contains(&PathBuf::from("/restored/worktree-a"))); assert!(paths.contains(&PathBuf::from("/restored/worktree-b"))); @@ -2714,7 +2906,7 @@ mod tests { .cloned() }); let entry = entry.unwrap(); - let paths = entry.folder_paths.paths(); + let paths = entry.folder_paths().paths(); assert_eq!(paths.len(), 2); assert!(paths.contains(&PathBuf::from("/new/worktree-a"))); assert!(paths.contains(&PathBuf::from("/other/path"))); @@ -2786,4 +2978,136 @@ mod tests { assert!(paths.contains(&Path::new("/projects/worktree-a"))); assert!(paths.contains(&Path::new("/projects/worktree-b"))); } + + // ── ThreadWorktreePaths tests ────────────────────────────────────── + + /// Helper to build a `ThreadWorktreePaths` from (main, folder) pairs. + fn make_worktree_paths(pairs: &[(&str, &str)]) -> ThreadWorktreePaths { + let (mains, folders): (Vec<&Path>, Vec<&Path>) = pairs + .iter() + .map(|(m, f)| (Path::new(*m), Path::new(*f))) + .unzip(); + ThreadWorktreePaths::from_path_lists(PathList::new(&mains), PathList::new(&folders)) + .unwrap() + } + + #[test] + fn test_thread_worktree_paths_full_add_then_remove_cycle() { + // Full scenario from the issue: + // 1. Start with linked worktree selectric → zed + // 2. Add cloud + // 3. Remove zed + + let mut paths = make_worktree_paths(&[("/projects/zed", "/worktrees/selectric/zed")]); + + // Step 2: add cloud + paths.add_path(Path::new("/projects/cloud"), Path::new("/projects/cloud")); + + assert_eq!(paths.ordered_pairs().count(), 2); + assert_eq!( + paths.folder_path_list(), + &PathList::new(&[ + Path::new("/worktrees/selectric/zed"), + Path::new("/projects/cloud"), + ]) + ); + assert_eq!( + paths.main_worktree_path_list(), + &PathList::new(&[Path::new("/projects/zed"), Path::new("/projects/cloud"),]) + ); + + // Step 3: remove zed + paths.remove_main_path(Path::new("/projects/zed")); + + assert_eq!(paths.ordered_pairs().count(), 1); + assert_eq!( + paths.folder_path_list(), + &PathList::new(&[Path::new("/projects/cloud")]) + ); + assert_eq!( + paths.main_worktree_path_list(), + &PathList::new(&[Path::new("/projects/cloud")]) + ); + } + + #[test] + fn test_thread_worktree_paths_add_is_idempotent() { + let mut paths = make_worktree_paths(&[("/projects/zed", "/projects/zed")]); + + paths.add_path(Path::new("/projects/zed"), Path::new("/projects/zed")); + + assert_eq!(paths.ordered_pairs().count(), 1); + } + + #[test] + fn test_thread_worktree_paths_remove_nonexistent_is_noop() { + let mut paths = make_worktree_paths(&[("/projects/zed", "/worktrees/selectric/zed")]); + + paths.remove_main_path(Path::new("/projects/nonexistent")); + + assert_eq!(paths.ordered_pairs().count(), 1); + } + + #[test] + fn test_thread_worktree_paths_from_path_lists_preserves_association() { + let folder = PathList::new(&[ + Path::new("/worktrees/selectric/zed"), + Path::new("/projects/cloud"), + ]); + let main = PathList::new(&[Path::new("/projects/zed"), Path::new("/projects/cloud")]); + + let paths = ThreadWorktreePaths::from_path_lists(main, folder).unwrap(); + + let pairs: Vec<_> = paths + .ordered_pairs() + .map(|(m, f)| (m.clone(), f.clone())) + .collect(); + assert_eq!(pairs.len(), 2); + assert!(pairs.contains(&( + PathBuf::from("/projects/zed"), + PathBuf::from("/worktrees/selectric/zed") + ))); + assert!(pairs.contains(&( + PathBuf::from("/projects/cloud"), + PathBuf::from("/projects/cloud") + ))); + } + + #[test] + fn test_thread_worktree_paths_main_deduplicates_linked_worktrees() { + // Two linked worktrees of the same main repo: the main_worktree_path_list + // deduplicates because PathList stores unique sorted paths, but + // ordered_pairs still has both entries. + let paths = make_worktree_paths(&[ + ("/projects/zed", "/worktrees/selectric/zed"), + ("/projects/zed", "/worktrees/feature/zed"), + ]); + + // main_worktree_path_list has the duplicate main path twice + // (PathList keeps all entries from its input) + assert_eq!(paths.ordered_pairs().count(), 2); + assert_eq!( + paths.folder_path_list(), + &PathList::new(&[ + Path::new("/worktrees/selectric/zed"), + Path::new("/worktrees/feature/zed"), + ]) + ); + assert_eq!( + paths.main_worktree_path_list(), + &PathList::new(&[Path::new("/projects/zed"), Path::new("/projects/zed"),]) + ); + } + + #[test] + fn test_thread_worktree_paths_mismatched_lengths_returns_error() { + let folder = PathList::new(&[ + Path::new("/worktrees/selectric/zed"), + Path::new("/projects/cloud"), + ]); + let main = PathList::new(&[Path::new("/projects/zed")]); + + let result = ThreadWorktreePaths::from_path_lists(main, folder); + assert!(result.is_err()); + } } diff --git a/crates/agent_ui/src/thread_worktree_archive.rs b/crates/agent_ui/src/thread_worktree_archive.rs index 86c9fb946a911868439c991503dd0ace60e12aa8..4398a2154d4abd550535b247ab1a9e518f84b39d 100644 --- a/crates/agent_ui/src/thread_worktree_archive.rs +++ b/crates/agent_ui/src/thread_worktree_archive.rs @@ -192,7 +192,7 @@ pub fn path_is_referenced_by_other_unarchived_threads( .filter(|thread| !thread.archived) .any(|thread| { thread - .folder_paths + .folder_paths() .paths() .iter() .any(|other_path| other_path.as_path() == path) @@ -428,7 +428,7 @@ pub async fn persist_worktree_state(root: &RootPlan, cx: &mut AsyncApp) -> Resul .entries() .filter(|thread| { thread - .folder_paths + .folder_paths() .paths() .iter() .any(|p| p.as_path() == root.root_path) diff --git a/crates/agent_ui/src/threads_archive_view.rs b/crates/agent_ui/src/threads_archive_view.rs index 44d3e71c170111f6d647c74f237d57705d55f183..6e73584ef87f11810e4c860cc6ff4c8d8ff015a9 100644 --- a/crates/agent_ui/src/threads_archive_view.rs +++ b/crates/agent_ui/src/threads_archive_view.rs @@ -340,7 +340,7 @@ impl ThreadsArchiveView { return; } - if thread.folder_paths.is_empty() { + if thread.folder_paths().is_empty() { self.show_project_picker_for_thread(thread, window, cx); return; } @@ -537,7 +537,7 @@ impl ThreadsArchiveView { }) .timestamp(timestamp) .highlight_positions(highlight_positions.clone()) - .project_paths(thread.folder_paths.paths_owned()) + .project_paths(thread.folder_paths().paths_owned()) .focused(is_focused) .hovered(is_hovered) .on_hover(cx.listener(move |this, is_hovered, _window, cx| { @@ -930,7 +930,8 @@ impl ProjectPickerDelegate { window: &mut Window, cx: &mut Context>, ) { - self.thread.folder_paths = paths.clone(); + self.thread.worktree_paths = + super::thread_metadata_store::ThreadWorktreePaths::from_folder_paths(&paths); ThreadMetadataStore::global(cx).update(cx, |store, cx| { store.update_working_directories(&self.thread.session_id, paths, cx); }); diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 3cd7e0059ac165bcd5e738591363cb600abcd60f..9c126929a4705de4d3ffc9e6472332e86a07c2e8 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -4,7 +4,7 @@ use acp_thread::ThreadStatus; use action_log::DiffStats; use agent_client_protocol::{self as acp}; use agent_settings::AgentSettings; -use agent_ui::thread_metadata_store::{ThreadMetadata, ThreadMetadataStore}; +use agent_ui::thread_metadata_store::{ThreadMetadata, ThreadMetadataStore, ThreadWorktreePaths}; use agent_ui::thread_worktree_archive; use agent_ui::threads_archive_view::{ ThreadsArchiveView, ThreadsArchiveViewEvent, format_history_entry_timestamp, @@ -283,10 +283,8 @@ impl ListEntry { } } ListEntry::ProjectHeader { key, .. } => multi_workspace - .workspaces() - .find(|ws| PathList::new(&ws.read(cx).root_paths(cx)) == *key.path_list()) + .workspaces_for_project_group(key, cx) .cloned() - .into_iter() .collect(), ListEntry::ViewMore { .. } => Vec::new(), } @@ -365,35 +363,63 @@ fn workspace_path_list(workspace: &Entity, cx: &App) -> PathList { /// /// For each path in the thread's `folder_paths`, produces a /// [`WorktreeInfo`] with a short display name, full path, and whether -/// the worktree is the main checkout or a linked git worktree. -fn worktree_info_from_thread_paths( - folder_paths: &PathList, - group_key: &project::ProjectGroupKey, -) -> impl Iterator { - let main_paths = group_key.path_list().paths(); - folder_paths.paths().iter().filter_map(|path| { - let is_main = main_paths.iter().any(|mp| mp.as_path() == path.as_path()); - if is_main { - let name = path.file_name()?.to_string_lossy().to_string(); - Some(WorktreeInfo { - name: SharedString::from(name), - full_path: SharedString::from(path.display().to_string()), +/// the worktree is the main checkout or a linked git worktree. When +/// multiple main paths exist and a linked worktree's short name alone +/// wouldn't identify which main project it belongs to, the main project +/// name is prefixed for disambiguation (e.g. `project:feature`). +/// +fn worktree_info_from_thread_paths(worktree_paths: &ThreadWorktreePaths) -> Vec { + let mut infos: Vec = Vec::new(); + let mut linked_short_names: Vec<(SharedString, SharedString)> = Vec::new(); + let mut unique_main_count = HashSet::new(); + + for (main_path, folder_path) in worktree_paths.ordered_pairs() { + unique_main_count.insert(main_path.clone()); + let is_linked = main_path != folder_path; + + if is_linked { + let short_name = linked_worktree_short_name(main_path, folder_path).unwrap_or_default(); + let project_name = main_path + .file_name() + .map(|n| SharedString::from(n.to_string_lossy().to_string())) + .unwrap_or_default(); + linked_short_names.push((short_name.clone(), project_name)); + infos.push(WorktreeInfo { + name: short_name, + full_path: SharedString::from(folder_path.display().to_string()), highlight_positions: Vec::new(), - kind: ui::WorktreeKind::Main, - }) + kind: ui::WorktreeKind::Linked, + }); } else { - let main_path = main_paths - .iter() - .find(|mp| mp.file_name() == path.file_name()) - .or(main_paths.first())?; - Some(WorktreeInfo { - name: linked_worktree_short_name(main_path, path).unwrap_or_default(), - full_path: SharedString::from(path.display().to_string()), + let Some(name) = folder_path.file_name() else { + continue; + }; + infos.push(WorktreeInfo { + name: SharedString::from(name.to_string_lossy().to_string()), + full_path: SharedString::from(folder_path.display().to_string()), highlight_positions: Vec::new(), - kind: ui::WorktreeKind::Linked, - }) + kind: ui::WorktreeKind::Main, + }); } - }) + } + + // When the group has multiple main worktree paths and the thread's + // folder paths don't all share the same short name, prefix each + // linked worktree chip with its main project name so the user knows + // which project it belongs to. + let all_same_name = infos.len() > 1 && infos.iter().all(|i| i.name == infos[0].name); + + if unique_main_count.len() > 1 && !all_same_name { + for (info, (_short_name, project_name)) in infos + .iter_mut() + .filter(|i| i.kind == ui::WorktreeKind::Linked) + .zip(linked_short_names.iter()) + { + info.name = SharedString::from(format!("{}:{}", project_name, info.name)); + } + } + + infos } /// Shows a [`RemoteConnectionModal`] on the given workspace and establishes @@ -480,6 +506,34 @@ impl Sidebar { MultiWorkspaceEvent::WorkspaceRemoved(_) => { this.update_entries(cx); } + MultiWorkspaceEvent::WorktreePathAdded { + old_main_paths, + added_path, + } => { + let added_path = added_path.clone(); + ThreadMetadataStore::global(cx).update(cx, |store, cx| { + store.change_worktree_paths( + old_main_paths, + |paths| paths.add_path(&added_path, &added_path), + cx, + ); + }); + this.update_entries(cx); + } + MultiWorkspaceEvent::WorktreePathRemoved { + old_main_paths, + removed_path, + } => { + let removed_path = removed_path.clone(); + ThreadMetadataStore::global(cx).update(cx, |store, cx| { + store.change_worktree_paths( + old_main_paths, + |paths| paths.remove_main_path(&removed_path), + cx, + ); + }); + this.update_entries(cx); + } }, ) .detach(); @@ -947,35 +1001,33 @@ impl Sidebar { // Open; otherwise use Closed. let resolve_workspace = |row: &ThreadMetadata| -> ThreadEntryWorkspace { workspace_by_path_list - .get(&row.folder_paths) + .get(row.folder_paths()) .map(|ws| ThreadEntryWorkspace::Open((*ws).clone())) .unwrap_or_else(|| ThreadEntryWorkspace::Closed { - folder_paths: row.folder_paths.clone(), + folder_paths: row.folder_paths().clone(), project_group_key: group_key.clone(), }) }; // Build a ThreadEntry from a metadata row. - let make_thread_entry = |row: ThreadMetadata, - workspace: ThreadEntryWorkspace| - -> ThreadEntry { - let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id); - let worktrees: Vec = - worktree_info_from_thread_paths(&row.folder_paths, &group_key).collect(); - ThreadEntry { - metadata: row, - icon, - icon_from_external_svg, - status: AgentThreadStatus::default(), - workspace, - is_live: false, - is_background: false, - is_title_generating: false, - highlight_positions: Vec::new(), - worktrees, - diff_stats: DiffStats::default(), - } - }; + let make_thread_entry = + |row: ThreadMetadata, workspace: ThreadEntryWorkspace| -> ThreadEntry { + let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id); + let worktrees = worktree_info_from_thread_paths(&row.worktree_paths); + ThreadEntry { + metadata: row, + icon, + icon_from_external_svg, + status: AgentThreadStatus::default(), + workspace, + is_live: false, + is_background: false, + is_title_generating: false, + highlight_positions: Vec::new(), + worktrees, + diff_stats: DiffStats::default(), + } + }; // Main code path: one query per group via main_worktree_paths. // The main_worktree_paths column is set on all new threads and @@ -1190,12 +1242,15 @@ impl Sidebar { // Emit a DraftThread entry when the active draft belongs to this group. if is_draft_for_group { if let Some(ActiveEntry::Draft(draft_ws)) = &self.active_entry { - let ws_path_list = workspace_path_list(draft_ws, cx); - let worktrees = worktree_info_from_thread_paths(&ws_path_list, &group_key); + let ws_worktree_paths = ThreadWorktreePaths::from_project( + draft_ws.read(cx).project().read(cx), + cx, + ); + let worktrees = worktree_info_from_thread_paths(&ws_worktree_paths); entries.push(ListEntry::DraftThread { key: group_key.clone(), workspace: None, - worktrees: worktrees.collect(), + worktrees, }); } } @@ -1218,13 +1273,16 @@ impl Sidebar { if Some(ws.entity_id()) == draft_ws_id { continue; } - let ws_path_list = workspace_path_list(ws, cx); + let ws_worktree_paths = + ThreadWorktreePaths::from_project(ws.read(cx).project().read(cx), cx); let has_linked_worktrees = - worktree_info_from_thread_paths(&ws_path_list, &group_key) + worktree_info_from_thread_paths(&ws_worktree_paths) + .iter() .any(|wt| wt.kind == ui::WorktreeKind::Linked); if !has_linked_worktrees { continue; } + let ws_path_list = workspace_path_list(ws, cx); let store = thread_store.read(cx); let has_threads = store.entries_for_path(&ws_path_list).next().is_some() || store @@ -1234,8 +1292,7 @@ impl Sidebar { if has_threads { continue; } - let worktrees: Vec = - worktree_info_from_thread_paths(&ws_path_list, &group_key).collect(); + let worktrees = worktree_info_from_thread_paths(&ws_worktree_paths); entries.push(ListEntry::DraftThread { key: group_key.clone(), @@ -2170,7 +2227,7 @@ impl Sidebar { panel.load_agent_thread( Agent::from(metadata.agent_id.clone()), metadata.session_id.clone(), - Some(metadata.folder_paths.clone()), + Some(metadata.folder_paths().clone()), Some(metadata.title.clone()), focus, window, @@ -2368,7 +2425,7 @@ impl Sidebar { _ => None, }; - if metadata.folder_paths.paths().is_empty() { + if metadata.folder_paths().paths().is_empty() { ThreadMetadataStore::global(cx) .update(cx, |store, cx| store.unarchive(&session_id, cx)); @@ -2380,7 +2437,7 @@ impl Sidebar { if let Some(workspace) = active_workspace { self.activate_thread_locally(&metadata, &workspace, false, window, cx); } else { - let path_list = metadata.folder_paths.clone(); + let path_list = metadata.folder_paths().clone(); if let Some((target_window, workspace)) = self.find_open_workspace_for_path_list(&path_list, cx) { @@ -2398,7 +2455,7 @@ impl Sidebar { let task = store .read(cx) .get_archived_worktrees_for_thread(session_id.0.to_string(), cx); - let path_list = metadata.folder_paths.clone(); + let path_list = metadata.folder_paths().clone(); let task_session_id = session_id.clone(); let restore_task = cx.spawn_in(window, async move |this, cx| { @@ -2494,7 +2551,7 @@ impl Sidebar { cx.update(|_window, cx| store.read(cx).entry(&session_id).cloned())?; if let Some(updated_metadata) = updated_metadata { - let new_paths = updated_metadata.folder_paths.clone(); + let new_paths = updated_metadata.folder_paths().clone(); cx.update(|_window, cx| { store.update(cx, |store, cx| { @@ -2669,7 +2726,7 @@ impl Sidebar { .read(cx) .entry(session_id) .cloned(); - let thread_folder_paths = metadata.as_ref().map(|m| m.folder_paths.clone()); + let thread_folder_paths = metadata.as_ref().map(|m| m.folder_paths().clone()); // Compute which linked worktree roots should be archived from disk if // this thread is archived. This must happen before we remove any @@ -2696,7 +2753,7 @@ impl Sidebar { } } metadata - .folder_paths + .folder_paths() .ordered_paths() .filter_map(|path| { thread_worktree_archive::build_root_plan(path, &workspaces, cx) @@ -2902,7 +2959,7 @@ impl Sidebar { if let Some(metadata) = neighbor { if let Some(workspace) = self.multi_workspace.upgrade().and_then(|mw| { mw.read(cx) - .workspace_for_paths(&metadata.folder_paths, None, cx) + .workspace_for_paths(metadata.folder_paths(), None, cx) }) { self.activate_workspace(&workspace, window, cx); Self::load_agent_thread_in_workspace(&workspace, metadata, true, window, cx); diff --git a/crates/sidebar/src/sidebar_tests.rs b/crates/sidebar/src/sidebar_tests.rs index 6a3da0a1d07ae66b4012b87e4533ed163115f4c3..ea4ec36674878ca958a2f73af0adf749a40157f6 100644 --- a/crates/sidebar/src/sidebar_tests.rs +++ b/crates/sidebar/src/sidebar_tests.rs @@ -3,7 +3,7 @@ use acp_thread::{AcpThread, PermissionOptions, StubAgentConnection}; use agent::ThreadStore; use agent_ui::{ test_support::{active_session_id, open_thread_with_connection, send_message}, - thread_metadata_store::ThreadMetadata, + thread_metadata_store::{ThreadMetadata, ThreadWorktreePaths}, }; use chrono::DateTime; use fs::{FakeFs, Fs}; @@ -226,24 +226,14 @@ fn save_thread_metadata( cx: &mut TestAppContext, ) { cx.update(|cx| { - let (folder_paths, main_worktree_paths) = { - let project_ref = project.read(cx); - let paths: Vec> = project_ref - .visible_worktrees(cx) - .map(|worktree| worktree.read(cx).abs_path()) - .collect(); - let folder_paths = PathList::new(&paths); - let main_worktree_paths = project_ref.project_group_key(cx).path_list().clone(); - (folder_paths, main_worktree_paths) - }; + let worktree_paths = ThreadWorktreePaths::from_project(project.read(cx), cx); let metadata = ThreadMetadata { session_id, agent_id: agent::ZED_AGENT_ID.clone(), title, updated_at, created_at, - folder_paths, - main_worktree_paths, + worktree_paths, archived: false, remote_connection: None, }; @@ -252,6 +242,33 @@ fn save_thread_metadata( cx.run_until_parked(); } +fn save_thread_metadata_with_main_paths( + session_id: &str, + title: &str, + folder_paths: PathList, + main_worktree_paths: PathList, + cx: &mut TestAppContext, +) { + let session_id = acp::SessionId::new(Arc::from(session_id)); + let title = SharedString::from(title.to_string()); + let updated_at = chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(); + let metadata = ThreadMetadata { + session_id, + agent_id: agent::ZED_AGENT_ID.clone(), + title, + updated_at, + created_at: None, + worktree_paths: ThreadWorktreePaths::from_path_lists(main_worktree_paths, folder_paths) + .unwrap(), + archived: false, + remote_connection: None, + }; + cx.update(|cx| { + ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx)); + }); + cx.run_until_parked(); +} + fn focus_sidebar(sidebar: &Entity, cx: &mut gpui::VisualTestContext) { sidebar.update_in(cx, |_, window, cx| { cx.focus_self(window); @@ -323,6 +340,11 @@ fn visible_entries_as_strings( } else { "" }; + let is_active = sidebar + .active_entry + .as_ref() + .is_some_and(|active| active.matches_entry(entry)); + let active_indicator = if is_active { " (active)" } else { "" }; match entry { ListEntry::ProjectHeader { label, @@ -339,7 +361,7 @@ fn visible_entries_as_strings( } ListEntry::Thread(thread) => { let title = thread.metadata.title.as_ref(); - let active = if thread.is_live { " *" } else { "" }; + let live = if thread.is_live { " *" } else { "" }; let status_str = match thread.status { AgentThreadStatus::Running => " (running)", AgentThreadStatus::Error => " (error)", @@ -355,7 +377,7 @@ fn visible_entries_as_strings( "" }; let worktree = format_linked_worktree_chips(&thread.worktrees); - format!(" {title}{worktree}{active}{status_str}{notified}{selected}") + format!(" {title}{worktree}{live}{status_str}{notified}{active_indicator}{selected}") } ListEntry::ViewMore { is_fully_expanded, .. @@ -375,7 +397,7 @@ fn visible_entries_as_strings( if workspace.is_some() { format!(" [+ New Thread{}]{}", worktree, selected) } else { - format!(" [~ Draft{}]{}", worktree, selected) + format!(" [~ Draft{}]{}{}", worktree, active_indicator, selected) } } } @@ -544,7 +566,10 @@ async fn test_single_workspace_no_threads(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]"] + vec![ + // + "v [my-project]", + ] ); } @@ -580,6 +605,7 @@ async fn test_single_workspace_with_saved_threads(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [my-project]", " Fix crash in project panel", " Add inline diff view", @@ -610,7 +636,11 @@ async fn test_workspace_lifecycle(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project-a]", " Thread A1"] + vec![ + // + "v [project-a]", + " Thread A1", + ] ); // Add a second workspace @@ -621,7 +651,11 @@ async fn test_workspace_lifecycle(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project-a]", " Thread A1",] + vec![ + // + "v [project-a]", + " Thread A1", + ] ); } @@ -640,6 +674,7 @@ async fn test_view_more_pagination(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [my-project]", " Thread 12", " Thread 11", @@ -750,7 +785,11 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Thread 1"] + vec![ + // + "v [my-project]", + " Thread 1", + ] ); // Collapse @@ -761,7 +800,10 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["> [my-project]"] + vec![ + // + "> [my-project]", + ] ); // Expand @@ -772,7 +814,11 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Thread 1"] + vec![ + // + "v [my-project]", + " Thread 1", + ] ); } @@ -808,8 +854,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { metadata: ThreadMetadata { session_id: acp::SessionId::new(Arc::from("t-1")), agent_id: AgentId::new("zed-agent"), - folder_paths: PathList::default(), - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::default(), title: "Completed thread".into(), updated_at: Utc::now(), created_at: Some(Utc::now()), @@ -832,8 +877,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { metadata: ThreadMetadata { session_id: acp::SessionId::new(Arc::from("t-2")), agent_id: AgentId::new("zed-agent"), - folder_paths: PathList::default(), - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::default(), title: "Running thread".into(), updated_at: Utc::now(), created_at: Some(Utc::now()), @@ -856,13 +900,12 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { metadata: ThreadMetadata { session_id: acp::SessionId::new(Arc::from("t-3")), agent_id: AgentId::new("zed-agent"), - remote_connection: None, - folder_paths: PathList::default(), - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::default(), title: "Error thread".into(), updated_at: Utc::now(), created_at: Some(Utc::now()), archived: false, + remote_connection: None, }, icon: IconName::ZedAgent, icon_from_external_svg: None, @@ -881,13 +924,12 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { metadata: ThreadMetadata { session_id: acp::SessionId::new(Arc::from("t-4")), agent_id: AgentId::new("zed-agent"), - remote_connection: None, - folder_paths: PathList::default(), - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::default(), title: "Waiting thread".into(), updated_at: Utc::now(), created_at: Some(Utc::now()), archived: false, + remote_connection: None, }, icon: IconName::ZedAgent, icon_from_external_svg: None, @@ -906,13 +948,12 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { metadata: ThreadMetadata { session_id: acp::SessionId::new(Arc::from("t-5")), agent_id: AgentId::new("zed-agent"), - remote_connection: None, - folder_paths: PathList::default(), - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::default(), title: "Notified thread".into(), updated_at: Utc::now(), created_at: Some(Utc::now()), archived: false, + remote_connection: None, }, icon: IconName::ZedAgent, icon_from_external_svg: None, @@ -949,6 +990,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [expanded-project]", " Completed thread", " Running thread * (running) <== selected", @@ -1112,10 +1154,14 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Thread 1"] + vec![ + // + "v [my-project]", + " Thread 1", + ] ); - // Focus the sidebar and select the header (index 0) + // Focus the sidebar and select the header focus_sidebar(&sidebar, cx); sidebar.update_in(cx, |sidebar, _window, _cx| { sidebar.selection = Some(0); @@ -1127,7 +1173,10 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["> [my-project] <== selected"] + vec![ + // + "> [my-project] <== selected", + ] ); // Confirm again expands the group @@ -1136,7 +1185,11 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project] <== selected", " Thread 1",] + vec![ + // + "v [my-project] <== selected", + " Thread 1", + ] ); } @@ -1187,7 +1240,11 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Thread 1"] + vec![ + // + "v [my-project]", + " Thread 1", + ] ); // Focus sidebar and manually select the header (index 0). Press left to collapse. @@ -1201,7 +1258,10 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["> [my-project] <== selected"] + vec![ + // + "> [my-project] <== selected", + ] ); // Press right to expand @@ -1210,7 +1270,11 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project] <== selected", " Thread 1",] + vec![ + // + "v [my-project] <== selected", + " Thread 1", + ] ); // Press right again on already-expanded header moves selection down @@ -1237,7 +1301,11 @@ async fn test_keyboard_collapse_from_child_selects_parent(cx: &mut TestAppContex assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Thread 1 <== selected",] + vec![ + // + "v [my-project]", + " Thread 1 <== selected", + ] ); // Pressing left on a child collapses the parent group and selects it @@ -1247,7 +1315,10 @@ async fn test_keyboard_collapse_from_child_selects_parent(cx: &mut TestAppContex assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["> [my-project] <== selected"] + vec![ + // + "> [my-project] <== selected", + ] ); } @@ -1261,7 +1332,10 @@ async fn test_keyboard_navigation_on_empty_list(cx: &mut TestAppContext) { // An empty project has only the header. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [empty-project]"] + vec![ + // + "v [empty-project]", + ] ); // Focus sidebar — focus_in does not set a selection @@ -1393,7 +1467,12 @@ async fn test_parallel_threads_shown_with_live_status(cx: &mut TestAppContext) { entries[1..].sort(); assert_eq!( entries, - vec!["v [my-project]", " Hello *", " Hello * (running)",] + vec![ + // + "v [my-project]", + " Hello * (active)", + " Hello * (running)", + ] ); } @@ -1486,7 +1565,11 @@ async fn test_background_thread_completion_triggers_notification(cx: &mut TestAp // Thread A is still running; no notification yet. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project-a]", " Hello * (running)",] + vec![ + // + "v [project-a]", + " Hello * (running) (active)", + ] ); // Complete thread A's turn (transition Running → Completed). @@ -1496,7 +1579,11 @@ async fn test_background_thread_completion_triggers_notification(cx: &mut TestAp // The completed background thread shows a notification indicator. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project-a]", " Hello * (!)",] + vec![ + // + "v [project-a]", + " Hello * (!) (active)", + ] ); } @@ -1536,6 +1623,7 @@ async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [my-project]", " Fix crash in project panel", " Add inline diff view", @@ -1548,7 +1636,11 @@ async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext) type_in_search(&sidebar, "diff", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Add inline diff view <== selected",] + vec![ + // + "v [my-project]", + " Add inline diff view <== selected", + ] ); // User changes query to something with no matches — list is empty. @@ -1583,6 +1675,7 @@ async fn test_search_matches_regardless_of_case(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [my-project]", " Fix Crash In Project Panel <== selected", ] @@ -1593,6 +1686,7 @@ async fn test_search_matches_regardless_of_case(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [my-project]", " Fix Crash In Project Panel <== selected", ] @@ -1623,7 +1717,12 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex // Confirm the full list is showing. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Alpha thread", " Beta thread",] + vec![ + // + "v [my-project]", + " Alpha thread", + " Beta thread", + ] ); // User types a search query to filter down. @@ -1631,7 +1730,11 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex type_in_search(&sidebar, "alpha", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Alpha thread <== selected",] + vec![ + // + "v [my-project]", + " Alpha thread <== selected", + ] ); // User presses Escape — filter clears, full list is restored. @@ -1641,6 +1744,7 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [my-project]", " Alpha thread <== selected", " Beta thread", @@ -1697,6 +1801,7 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [project-a]", " Fix bug in sidebar", " Add tests for editor", @@ -1707,7 +1812,11 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC type_in_search(&sidebar, "sidebar", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project-a]", " Fix bug in sidebar <== selected",] + vec![ + // + "v [project-a]", + " Fix bug in sidebar <== selected", + ] ); // "typo" only matches in the second workspace — the first header disappears. @@ -1723,6 +1832,7 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [project-a]", " Fix bug in sidebar <== selected", " Add tests for editor", @@ -1782,6 +1892,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [alpha-project]", " Fix bug in sidebar <== selected", " Add tests for editor", @@ -1793,7 +1904,11 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { type_in_search(&sidebar, "sidebar", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [alpha-project]", " Fix bug in sidebar <== selected",] + vec![ + // + "v [alpha-project]", + " Fix bug in sidebar <== selected", + ] ); // "alpha sidebar" matches the workspace name "alpha-project" (fuzzy: a-l-p-h-a-s-i-d-e-b-a-r @@ -1803,7 +1918,11 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { type_in_search(&sidebar, "fix", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [alpha-project]", " Fix bug in sidebar <== selected",] + vec![ + // + "v [alpha-project]", + " Fix bug in sidebar <== selected", + ] ); // A query that matches a workspace name AND a thread in that same workspace. @@ -1812,6 +1931,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [alpha-project]", " Fix bug in sidebar <== selected", " Add tests for editor", @@ -1825,6 +1945,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [alpha-project]", " Fix bug in sidebar <== selected", " Add tests for editor", @@ -1874,7 +1995,11 @@ async fn test_search_finds_threads_hidden_behind_view_more(cx: &mut TestAppConte let filtered = visible_entries_as_strings(&sidebar, cx); assert_eq!( filtered, - vec!["v [my-project]", " Hidden gem thread <== selected",] + vec![ + // + "v [my-project]", + " Hidden gem thread <== selected", + ] ); assert!( !filtered.iter().any(|e| e.contains("View More")), @@ -1910,14 +2035,21 @@ async fn test_search_finds_threads_inside_collapsed_groups(cx: &mut TestAppConte assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["> [my-project] <== selected"] + vec![ + // + "> [my-project] <== selected", + ] ); // User types a search — the thread appears even though its group is collapsed. type_in_search(&sidebar, "important", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["> [my-project]", " Important thread <== selected",] + vec![ + // + "> [my-project]", + " Important thread <== selected", + ] ); } @@ -1951,6 +2083,7 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [my-project]", " Fix crash in panel <== selected", " Fix lint warnings", @@ -1963,6 +2096,7 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [my-project]", " Fix crash in panel", " Fix lint warnings <== selected", @@ -1974,6 +2108,7 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [my-project]", " Fix crash in panel <== selected", " Fix lint warnings", @@ -2014,7 +2149,11 @@ async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppC assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Historical Thread",] + vec![ + // + "v [my-project]", + " Historical Thread", + ] ); // Switch to workspace 1 so we can verify the confirm switches back. @@ -2075,7 +2214,12 @@ async fn test_click_clears_selection_and_focus_in_restores_it(cx: &mut TestAppCo assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Thread A", " Thread B",] + vec![ + // + "v [my-project]", + " Thread A", + " Thread B", + ] ); // Keyboard confirm preserves selection. @@ -2127,7 +2271,11 @@ async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Hello *"] + vec![ + // + "v [my-project]", + " Hello * (active)", + ] ); // Simulate the agent generating a title. The notification chain is: @@ -2149,7 +2297,11 @@ async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Friendly Greeting with AI *"] + vec![ + // + "v [my-project]", + " Friendly Greeting with AI * (active)", + ] ); } @@ -2202,8 +2354,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { title: "Test".into(), updated_at: Utc::now(), created_at: None, - folder_paths: PathList::default(), - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::default(), archived: false, remote_connection: None, }, @@ -2259,8 +2410,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { title: "Thread B".into(), updated_at: Utc::now(), created_at: None, - folder_paths: PathList::default(), - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::default(), archived: false, remote_connection: None, }, @@ -2312,167 +2462,935 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { save_test_thread_metadata(&session_id_b2, &project_b, cx).await; cx.run_until_parked(); - // Panel B is not the active workspace's panel (workspace A is - // active), so opening a thread there should not change focused_thread. - // This prevents running threads in background workspaces from causing - // the selection highlight to jump around. - sidebar.read_with(cx, |sidebar, _cx| { - assert_active_thread( - sidebar, - &session_id_a, - "Opening a thread in a non-active panel should not change focused_thread", - ); - }); + // Panel B is not the active workspace's panel (workspace A is + // active), so opening a thread there should not change focused_thread. + // This prevents running threads in background workspaces from causing + // the selection highlight to jump around. + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_a, + "Opening a thread in a non-active panel should not change focused_thread", + ); + }); + + workspace_b.update_in(cx, |workspace, window, cx| { + workspace.focus_handle(cx).focus(window, cx); + }); + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_a, + "Defocusing the sidebar should not change focused_thread", + ); + }); + + // Switching workspaces via the multi_workspace (simulates clicking + // a workspace header) should clear focused_thread. + multi_workspace.update_in(cx, |mw, window, cx| { + let workspace = mw.workspaces().find(|w| *w == &workspace_b).cloned(); + if let Some(workspace) = workspace { + mw.activate(workspace, window, cx); + } + }); + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_b2, + "Switching workspace should seed focused_thread from the new active panel", + ); + assert!( + has_thread_entry(sidebar, &session_id_b2), + "The seeded thread should be present in the entries" + ); + }); + + // ── 8. Focusing the agent panel thread keeps focused_thread ──── + // Workspace B still has session_id_b2 loaded in the agent panel. + // Clicking into the thread (simulated by focusing its view) should + // keep focused_thread since it was already seeded on workspace switch. + panel_b.update_in(cx, |panel, window, cx| { + if let Some(thread_view) = panel.active_conversation_view() { + thread_view.read(cx).focus_handle(cx).focus(window, cx); + } + }); + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_thread( + sidebar, + &session_id_b2, + "Focusing the agent panel thread should set focused_thread", + ); + assert!( + has_thread_entry(sidebar, &session_id_b2), + "The focused thread should be present in the entries" + ); + }); +} + +#[gpui::test] +async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContext) { + let project = init_test_project_with_agent_panel("/project-a", cx).await; + let fs = cx.update(|cx| ::global(cx)); + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); + + // Start a thread and send a message so it has history. + let connection = StubAgentConnection::new(); + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done".into()), + )]); + open_thread_with_connection(&panel, connection, cx); + send_message(&panel, cx); + let session_id = active_session_id(&panel, cx); + save_test_thread_metadata(&session_id, &project, cx).await; + cx.run_until_parked(); + + // Verify the thread appears in the sidebar. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [project-a]", + " Hello * (active)", + ] + ); + + // The "New Thread" button should NOT be in "active/draft" state + // because the panel has a thread with messages. + sidebar.read_with(cx, |sidebar, _cx| { + assert!( + matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { .. })), + "Panel has a thread with messages, so active_entry should be Thread, got {:?}", + sidebar.active_entry, + ); + }); + + // Now add a second folder to the workspace, changing the path_list. + fs.as_fake() + .insert_tree("/project-b", serde_json::json!({ "src": {} })) + .await; + project + .update(cx, |project, cx| { + project.find_or_create_worktree("/project-b", true, cx) + }) + .await + .expect("should add worktree"); + cx.run_until_parked(); + + // The workspace path_list is now [project-a, project-b]. The active + // thread's metadata was re-saved with the new paths by the agent panel's + // project subscription. The old [project-a] key is replaced by the new + // key since no other workspace claims it. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [project-a, project-b]", + " Hello * (active)", + ] + ); + + // The "New Thread" button must still be clickable (not stuck in + // "active/draft" state). Verify that `active_thread_is_draft` is + // false — the panel still has the old thread with messages. + sidebar.read_with(cx, |sidebar, _cx| { + assert!( + matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { .. })), + "After adding a folder the panel still has a thread with messages, \ + so active_entry should be Thread, got {:?}", + sidebar.active_entry, + ); + }); + + // Actually click "New Thread" by calling create_new_thread and + // verify a new draft is created. + let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone()); + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.create_new_thread(&workspace, window, cx); + }); + cx.run_until_parked(); + + // After creating a new thread, the panel should now be in draft + // state (no messages on the new thread). + sidebar.read_with(cx, |sidebar, _cx| { + assert_active_draft( + sidebar, + &workspace, + "After creating a new thread active_entry should be Draft", + ); + }); +} + +#[gpui::test] +async fn test_worktree_add_and_remove_migrates_threads(cx: &mut TestAppContext) { + // When a worktree is added to a project, the project group key changes + // and all historical threads should be migrated to the new key. Removing + // the worktree should migrate them back. + let (_fs, project) = init_multi_project_test(&["/project-a", "/project-b"], cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Save two threads against the initial project group [/project-a]. + save_n_test_threads(2, &project, cx).await; + sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [project-a]", + " Thread 2", + " Thread 1", + ] + ); + + // Verify the metadata store has threads under the old key. + let old_key_paths = PathList::new(&[PathBuf::from("/project-a")]); + cx.update(|_window, cx| { + let store = ThreadMetadataStore::global(cx).read(cx); + assert_eq!( + store.entries_for_main_worktree_path(&old_key_paths).count(), + 2, + "should have 2 threads under old key before add" + ); + }); + + // Add a second worktree to the same project. + project + .update(cx, |project, cx| { + project.find_or_create_worktree("/project-b", true, cx) + }) + .await + .expect("should add worktree"); + cx.run_until_parked(); + + // The project group key should now be [/project-a, /project-b]. + let new_key_paths = PathList::new(&[PathBuf::from("/project-a"), PathBuf::from("/project-b")]); + + // Verify multi-workspace state: exactly one project group key, the new one. + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<_> = mw.project_group_keys().cloned().collect(); + assert_eq!( + keys.len(), + 1, + "should have exactly 1 project group key after add" + ); + assert_eq!( + keys[0].path_list(), + &new_key_paths, + "the key should be the new combined path list" + ); + }); + + // Verify threads were migrated to the new key. + cx.update(|_window, cx| { + let store = ThreadMetadataStore::global(cx).read(cx); + assert_eq!( + store.entries_for_main_worktree_path(&old_key_paths).count(), + 0, + "should have 0 threads under old key after migration" + ); + assert_eq!( + store.entries_for_main_worktree_path(&new_key_paths).count(), + 2, + "should have 2 threads under new key after migration" + ); + }); + + // Sidebar should show threads under the new header. + sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [project-a, project-b]", + " Thread 2", + " Thread 1", + ] + ); + + // Now remove the second worktree. + let worktree_id = project.read_with(cx, |project, cx| { + project + .visible_worktrees(cx) + .find(|wt| wt.read(cx).abs_path().as_ref() == Path::new("/project-b")) + .map(|wt| wt.read(cx).id()) + .expect("should find project-b worktree") + }); + project.update(cx, |project, cx| { + project.remove_worktree(worktree_id, cx); + }); + cx.run_until_parked(); + + // The key should revert to [/project-a]. + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<_> = mw.project_group_keys().cloned().collect(); + assert_eq!( + keys.len(), + 1, + "should have exactly 1 project group key after remove" + ); + assert_eq!( + keys[0].path_list(), + &old_key_paths, + "the key should revert to the original path list" + ); + }); + + // Threads should be migrated back to the old key. + cx.update(|_window, cx| { + let store = ThreadMetadataStore::global(cx).read(cx); + assert_eq!( + store.entries_for_main_worktree_path(&new_key_paths).count(), + 0, + "should have 0 threads under new key after revert" + ); + assert_eq!( + store.entries_for_main_worktree_path(&old_key_paths).count(), + 2, + "should have 2 threads under old key after revert" + ); + }); + + sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [project-a]", + " Thread 2", + " Thread 1", + ] + ); +} + +#[gpui::test] +async fn test_worktree_add_and_remove_preserves_thread_path_associations(cx: &mut TestAppContext) { + // Verifies that adding/removing folders to a project correctly updates + // each thread's worktree_paths (both folder_paths and main_worktree_paths) + // while preserving per-path associations for linked worktrees. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: PathBuf::from("/wt-feature"), + ref_name: Some("refs/heads/feature".into()), + sha: "aaa".into(), + is_main: false, + }, + ) + .await; + fs.insert_tree("/other-project", serde_json::json!({ ".git": {} })) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + + // Start with a linked worktree workspace: visible root is /wt-feature, + // main repo is /project. + let project = + project::Project::test(fs.clone() as Arc, ["/wt-feature".as_ref()], cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let _sidebar = setup_sidebar(&multi_workspace, cx); + + // Save a thread. It should have folder_paths=[/wt-feature], main=[/project]. + save_named_thread_metadata("thread-1", "Thread 1", &project, cx).await; + + let session_id = acp::SessionId::new(Arc::from("thread-1")); + cx.update(|_window, cx| { + let store = ThreadMetadataStore::global(cx).read(cx); + let thread = store.entry(&session_id).expect("thread should exist"); + assert_eq!( + thread.folder_paths().paths(), + &[PathBuf::from("/wt-feature")], + "initial folder_paths should be the linked worktree" + ); + assert_eq!( + thread.main_worktree_paths().paths(), + &[PathBuf::from("/project")], + "initial main_worktree_paths should be the main repo" + ); + }); + + // Add /other-project to the workspace. + project + .update(cx, |project, cx| { + project.find_or_create_worktree("/other-project", true, cx) + }) + .await + .expect("should add worktree"); + cx.run_until_parked(); + + // Thread should now have both paths, with correct associations. + cx.update(|_window, cx| { + let store = ThreadMetadataStore::global(cx).read(cx); + let thread = store.entry(&session_id).expect("thread should exist"); + let pairs: Vec<_> = thread + .worktree_paths + .ordered_pairs() + .map(|(m, f)| (m.clone(), f.clone())) + .collect(); + assert!( + pairs.contains(&(PathBuf::from("/project"), PathBuf::from("/wt-feature"))), + "linked worktree association should be preserved, got: {:?}", + pairs + ); + assert!( + pairs.contains(&( + PathBuf::from("/other-project"), + PathBuf::from("/other-project") + )), + "new folder should have main == folder, got: {:?}", + pairs + ); + }); + + // Remove /other-project. + let worktree_id = project.read_with(cx, |project, cx| { + project + .visible_worktrees(cx) + .find(|wt| wt.read(cx).abs_path().as_ref() == Path::new("/other-project")) + .map(|wt| wt.read(cx).id()) + .expect("should find other-project worktree") + }); + project.update(cx, |project, cx| { + project.remove_worktree(worktree_id, cx); + }); + cx.run_until_parked(); + + // Thread should be back to original state. + cx.update(|_window, cx| { + let store = ThreadMetadataStore::global(cx).read(cx); + let thread = store.entry(&session_id).expect("thread should exist"); + assert_eq!( + thread.folder_paths().paths(), + &[PathBuf::from("/wt-feature")], + "folder_paths should revert to just the linked worktree" + ); + assert_eq!( + thread.main_worktree_paths().paths(), + &[PathBuf::from("/project")], + "main_worktree_paths should revert to just the main repo" + ); + let pairs: Vec<_> = thread + .worktree_paths + .ordered_pairs() + .map(|(m, f)| (m.clone(), f.clone())) + .collect(); + assert_eq!( + pairs, + vec![(PathBuf::from("/project"), PathBuf::from("/wt-feature"))], + "linked worktree association should be preserved through add+remove cycle" + ); + }); +} + +#[gpui::test] +async fn test_worktree_add_key_collision_removes_duplicate_workspace(cx: &mut TestAppContext) { + // When a worktree is added to workspace A and the resulting key matches + // an existing workspace B's key (and B has the same root paths), B + // should be removed as a true duplicate. + let (fs, project_a) = init_multi_project_test(&["/project-a", "/project-b"], cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Save a thread against workspace A [/project-a]. + save_named_thread_metadata("thread-a", "Thread A", &project_a, cx).await; + + // Create workspace B with both worktrees [/project-a, /project-b]. + let project_b = project::Project::test( + fs.clone() as Arc, + ["/project-a".as_ref(), "/project-b".as_ref()], + cx, + ) + .await; + let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b.clone(), window, cx) + }); + cx.run_until_parked(); + + // Switch back to workspace A so it's the active workspace when the collision happens. + let workspace_a = + multi_workspace.read_with(cx, |mw, _| mw.workspaces().next().unwrap().clone()); + multi_workspace.update_in(cx, |mw, window, cx| { + mw.activate(workspace_a, window, cx); + }); + cx.run_until_parked(); + + // Save a thread against workspace B [/project-a, /project-b]. + save_named_thread_metadata("thread-b", "Thread B", &project_b, cx).await; + + sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); + cx.run_until_parked(); + + // Both project groups should be visible. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [project-a, project-b]", + " Thread B", + "v [project-a]", + " Thread A", + ] + ); + + let workspace_b_id = workspace_b.entity_id(); + + // Now add /project-b to workspace A's project, causing a key collision. + project_a + .update(cx, |project, cx| { + project.find_or_create_worktree("/project-b", true, cx) + }) + .await + .expect("should add worktree"); + cx.run_until_parked(); + + // Workspace B should have been removed (true duplicate — same root paths). + multi_workspace.read_with(cx, |mw, _cx| { + let workspace_ids: Vec<_> = mw.workspaces().map(|ws| ws.entity_id()).collect(); + assert!( + !workspace_ids.contains(&workspace_b_id), + "workspace B should have been removed after key collision" + ); + }); + + // There should be exactly one project group key now. + let combined_paths = PathList::new(&[PathBuf::from("/project-a"), PathBuf::from("/project-b")]); + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<_> = mw.project_group_keys().cloned().collect(); + assert_eq!( + keys.len(), + 1, + "should have exactly 1 project group key after collision" + ); + assert_eq!( + keys[0].path_list(), + &combined_paths, + "the remaining key should be the combined paths" + ); + }); + + // Both threads should be visible under the merged group. + sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [project-a, project-b]", + " Thread A", + " Thread B", + ] + ); +} + +#[gpui::test] +async fn test_worktree_collision_keeps_active_workspace(cx: &mut TestAppContext) { + // When workspace A adds a folder that makes it collide with workspace B, + // and B is the *active* workspace, A (the incoming one) should be + // dropped so the user stays on B. A linked worktree sibling of A + // should migrate into B's group. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + // Set up /project-a with a linked worktree. + fs.insert_tree( + "/project-a", + serde_json::json!({ + ".git": { + "worktrees": { + "feature": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature", + }, + }, + }, + "src": {}, + }), + ) + .await; + fs.insert_tree( + "/wt-feature", + serde_json::json!({ + ".git": "gitdir: /project-a/.git/worktrees/feature", + "src": {}, + }), + ) + .await; + fs.add_linked_worktree_for_repo( + Path::new("/project-a/.git"), + false, + git::repository::Worktree { + path: PathBuf::from("/wt-feature"), + ref_name: Some("refs/heads/feature".into()), + sha: "aaa".into(), + is_main: false, + }, + ) + .await; + fs.insert_tree("/project-b", serde_json::json!({ ".git": {}, "src": {} })) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let project_a = project::Project::test(fs.clone(), ["/project-a".as_ref()], cx).await; + project_a.update(cx, |p, cx| p.git_scans_complete(cx)).await; + + // Linked worktree sibling of A. + let project_wt = project::Project::test(fs.clone(), ["/wt-feature".as_ref()], cx).await; + project_wt + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + // Workspace B has both folders already. + let project_b = project::Project::test( + fs.clone() as Arc, + ["/project-a".as_ref(), "/project-b".as_ref()], + cx, + ) + .await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Add agent panels to all workspaces. + let workspace_a_entity = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + add_agent_panel(&workspace_a_entity, cx); + + // Add the linked worktree workspace (sibling of A). + let workspace_wt = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_wt.clone(), window, cx) + }); + add_agent_panel(&workspace_wt, cx); + cx.run_until_parked(); + + // Add workspace B (will become active). + let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b.clone(), window, cx) + }); + add_agent_panel(&workspace_b, cx); + cx.run_until_parked(); + + // Save threads in each group. + save_named_thread_metadata("thread-a", "Thread A", &project_a, cx).await; + save_thread_metadata_with_main_paths( + "thread-wt", + "Worktree Thread", + PathList::new(&[PathBuf::from("/wt-feature")]), + PathList::new(&[PathBuf::from("/project-a")]), + cx, + ); + save_named_thread_metadata("thread-b", "Thread B", &project_b, cx).await; + + sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); + cx.run_until_parked(); + + // B is active, A and wt-feature are in one group, B in another. + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspace().entity_id()), + workspace_b.entity_id(), + "workspace B should be active" + ); + multi_workspace.read_with(cx, |mw, _cx| { + assert_eq!(mw.project_group_keys().count(), 2, "should have 2 groups"); + assert_eq!(mw.workspaces().count(), 3, "should have 3 workspaces"); + }); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [project-a, project-b]", + " [~ Draft] (active)", + " Thread B", + "v [project-a]", + " Thread A", + " Worktree Thread {wt-feature}", + ] + ); + + let workspace_a = multi_workspace.read_with(cx, |mw, _| { + mw.workspaces() + .find(|ws| { + ws.entity_id() != workspace_b.entity_id() + && ws.entity_id() != workspace_wt.entity_id() + }) + .unwrap() + .clone() + }); + + // Add /project-b to workspace A's project, causing a collision with B. + project_a + .update(cx, |project, cx| { + project.find_or_create_worktree("/project-b", true, cx) + }) + .await + .expect("should add worktree"); + cx.run_until_parked(); + + // Workspace A (the incoming duplicate) should have been dropped. + multi_workspace.read_with(cx, |mw, _cx| { + let workspace_ids: Vec<_> = mw.workspaces().map(|ws| ws.entity_id()).collect(); + assert!( + !workspace_ids.contains(&workspace_a.entity_id()), + "workspace A should have been dropped" + ); + }); + + // The active workspace should still be B. + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspace().entity_id()), + workspace_b.entity_id(), + "workspace B should still be active" + ); + + // The linked worktree sibling should have migrated into B's group + // (it got the folder add and now shares the same key). + multi_workspace.read_with(cx, |mw, _cx| { + let workspace_ids: Vec<_> = mw.workspaces().map(|ws| ws.entity_id()).collect(); + assert!( + workspace_ids.contains(&workspace_wt.entity_id()), + "linked worktree workspace should still exist" + ); + assert_eq!( + mw.project_group_keys().count(), + 1, + "should have 1 group after merge" + ); + assert_eq!( + mw.workspaces().count(), + 2, + "should have 2 workspaces (B + linked worktree)" + ); + }); + + // The linked worktree workspace should have gotten the new folder. + let wt_worktree_count = + project_wt.read_with(cx, |project, cx| project.visible_worktrees(cx).count()); + assert_eq!( + wt_worktree_count, 2, + "linked worktree project should have gotten /project-b" + ); + + // After: everything merged under one group. Thread A migrated, + // worktree thread shows its chip, B's thread and draft remain. + sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [project-a, project-b]", + " [~ Draft] (active)", + " Thread A", + " Worktree Thread {project-a:wt-feature}", + " Thread B", + ] + ); +} + +#[gpui::test] +async fn test_worktree_add_syncs_linked_worktree_sibling(cx: &mut TestAppContext) { + // When a worktree is added to the main workspace, a linked worktree + // sibling (different root paths, same project group key) should also + // get the new folder added to its project. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": { + "worktrees": { + "feature": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature", + }, + }, + }, + "src": {}, + }), + ) + .await; + + fs.insert_tree( + "/wt-feature", + serde_json::json!({ + ".git": "gitdir: /project/.git/worktrees/feature", + "src": {}, + }), + ) + .await; + + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: PathBuf::from("/wt-feature"), + ref_name: Some("refs/heads/feature".into()), + sha: "aaa".into(), + is_main: false, + }, + ) + .await; + + // Create a second independent project to add as a folder later. + fs.insert_tree( + "/other-project", + serde_json::json!({ ".git": {}, "src": {} }), + ) + .await; + + cx.update(|cx| ::set_global(fs.clone(), cx)); - workspace_b.update_in(cx, |workspace, window, cx| { - workspace.focus_handle(cx).focus(window, cx); - }); - cx.run_until_parked(); + let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature".as_ref()], cx).await; - sidebar.read_with(cx, |sidebar, _cx| { - assert_active_thread( - sidebar, - &session_id_a, - "Defocusing the sidebar should not change focused_thread", - ); - }); + main_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; - // Switching workspaces via the multi_workspace (simulates clicking - // a workspace header) should clear focused_thread. - multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = mw.workspaces().find(|w| *w == &workspace_b).cloned(); - if let Some(workspace) = workspace { - mw.activate(workspace, window, cx); - } + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Add agent panel to the main workspace. + let main_workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + add_agent_panel(&main_workspace, cx); + + // Open the linked worktree as a separate workspace. + let wt_workspace = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(worktree_project.clone(), window, cx) }); + add_agent_panel(&wt_workspace, cx); cx.run_until_parked(); - sidebar.read_with(cx, |sidebar, _cx| { - assert_active_thread( - sidebar, - &session_id_b2, - "Switching workspace should seed focused_thread from the new active panel", - ); - assert!( - has_thread_entry(sidebar, &session_id_b2), - "The seeded thread should be present in the entries" + // Both workspaces should share the same project group key [/project]. + multi_workspace.read_with(cx, |mw, _cx| { + assert_eq!( + mw.project_group_keys().count(), + 1, + "should have 1 project group key before add" ); + assert_eq!(mw.workspaces().count(), 2, "should have 2 workspaces"); }); - // ── 8. Focusing the agent panel thread keeps focused_thread ──── - // Workspace B still has session_id_b2 loaded in the agent panel. - // Clicking into the thread (simulated by focusing its view) should - // keep focused_thread since it was already seeded on workspace switch. - panel_b.update_in(cx, |panel, window, cx| { - if let Some(thread_view) = panel.active_conversation_view() { - thread_view.read(cx).focus_handle(cx).focus(window, cx); - } - }); - cx.run_until_parked(); + // Save threads against each workspace. + save_named_thread_metadata("main-thread", "Main Thread", &main_project, cx).await; + save_named_thread_metadata("wt-thread", "Worktree Thread", &worktree_project, cx).await; - sidebar.read_with(cx, |sidebar, _cx| { - assert_active_thread( - sidebar, - &session_id_b2, - "Focusing the agent panel thread should set focused_thread", - ); - assert!( - has_thread_entry(sidebar, &session_id_b2), - "The focused thread should be present in the entries" + // Verify both threads are under the old key [/project]. + let old_key_paths = PathList::new(&[PathBuf::from("/project")]); + cx.update(|_window, cx| { + let store = ThreadMetadataStore::global(cx).read(cx); + assert_eq!( + store.entries_for_main_worktree_path(&old_key_paths).count(), + 2, + "should have 2 threads under old key before add" ); }); -} - -#[gpui::test] -async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContext) { - let project = init_test_project_with_agent_panel("/project-a", cx).await; - let fs = cx.update(|cx| ::global(cx)); - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - // Start a thread and send a message so it has history. - let connection = StubAgentConnection::new(); - connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( - acp::ContentChunk::new("Done".into()), - )]); - open_thread_with_connection(&panel, connection, cx); - send_message(&panel, cx); - let session_id = active_session_id(&panel, cx); - save_test_thread_metadata(&session_id, &project, cx).await; + sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); cx.run_until_parked(); - // Verify the thread appears in the sidebar. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project-a]", " Hello *",] + vec![ + // + "v [project]", + " [~ Draft {wt-feature}] (active)", + " Worktree Thread {wt-feature}", + " Main Thread", + ] ); - // The "New Thread" button should NOT be in "active/draft" state - // because the panel has a thread with messages. - sidebar.read_with(cx, |sidebar, _cx| { - assert!( - matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { .. })), - "Panel has a thread with messages, so active_entry should be Thread, got {:?}", - sidebar.active_entry, - ); - }); - - // Now add a second folder to the workspace, changing the path_list. - fs.as_fake() - .insert_tree("/project-b", serde_json::json!({ "src": {} })) - .await; - project + // Add /other-project as a folder to the main workspace. + main_project .update(cx, |project, cx| { - project.find_or_create_worktree("/project-b", true, cx) + project.find_or_create_worktree("/other-project", true, cx) }) .await .expect("should add worktree"); cx.run_until_parked(); - // The workspace path_list is now [project-a, project-b]. The active - // thread's metadata was re-saved with the new paths by the agent panel's - // project subscription, so it stays visible under the updated group. - // The old [project-a] group persists in the sidebar (empty) because - // project_group_keys is append-only. + // The linked worktree workspace should have gotten the new folder too. + let wt_worktree_count = + worktree_project.read_with(cx, |project, cx| project.visible_worktrees(cx).count()); assert_eq!( - visible_entries_as_strings(&sidebar, cx), - vec![ - "v [project-a, project-b]", // - " Hello *", - "v [project-a]", - ] + wt_worktree_count, 2, + "linked worktree project should have gotten the new folder" ); - // The "New Thread" button must still be clickable (not stuck in - // "active/draft" state). Verify that `active_thread_is_draft` is - // false — the panel still has the old thread with messages. - sidebar.read_with(cx, |sidebar, _cx| { - assert!( - matches!(&sidebar.active_entry, Some(ActiveEntry::Thread { .. })), - "After adding a folder the panel still has a thread with messages, \ - so active_entry should be Thread, got {:?}", - sidebar.active_entry, + // Both workspaces should still exist under one key. + multi_workspace.read_with(cx, |mw, _cx| { + assert_eq!(mw.workspaces().count(), 2, "both workspaces should survive"); + assert_eq!( + mw.project_group_keys().count(), + 1, + "should still have 1 project group key" ); }); - // Actually click "New Thread" by calling create_new_thread and - // verify a new draft is created. - let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone()); - sidebar.update_in(cx, |sidebar, window, cx| { - sidebar.create_new_thread(&workspace, window, cx); + // Threads should have been migrated to the new key. + let new_key_paths = + PathList::new(&[PathBuf::from("/other-project"), PathBuf::from("/project")]); + cx.update(|_window, cx| { + let store = ThreadMetadataStore::global(cx).read(cx); + assert_eq!( + store.entries_for_main_worktree_path(&old_key_paths).count(), + 0, + "should have 0 threads under old key after migration" + ); + assert_eq!( + store.entries_for_main_worktree_path(&new_key_paths).count(), + 2, + "should have 2 threads under new key after migration" + ); }); + + // Both threads should still be visible in the sidebar. + sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx)); cx.run_until_parked(); - // After creating a new thread, the panel should now be in draft - // state (no messages on the new thread). - sidebar.read_with(cx, |sidebar, _cx| { - assert_active_draft( - sidebar, - &workspace, - "After creating a new thread active_entry should be Draft", - ); - }); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + // + "v [other-project, project]", + " [~ Draft {project:wt-feature}] (active)", + " Worktree Thread {project:wt-feature}", + " Main Thread", + ] + ); } #[gpui::test] @@ -2500,7 +3418,11 @@ async fn test_cmd_n_shows_new_thread_entry(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Hello *"] + vec![ + // + "v [my-project]", + " Hello * (active)", + ] ); // Simulate cmd-n @@ -2515,7 +3437,12 @@ async fn test_cmd_n_shows_new_thread_entry(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " [~ Draft]", " Hello *"], + vec![ + // + "v [my-project]", + " [~ Draft] (active)", + " Hello *", + ], "After Cmd-N the sidebar should show a highlighted Draft entry" ); @@ -2548,7 +3475,11 @@ async fn test_draft_with_server_session_shows_as_draft(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " Hello *"] + vec![ + // + "v [my-project]", + " Hello * (active)", + ] ); // Open a new draft thread via a server connection. This gives the @@ -2560,7 +3491,12 @@ async fn test_draft_with_server_session_shows_as_draft(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " [~ Draft]", " Hello *"], + vec![ + // + "v [my-project]", + " [~ Draft] (active)", + " Hello *", + ], ); let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone()); @@ -2654,7 +3590,11 @@ async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestApp assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project]", " Hello {wt-feature-a} *"] + vec![ + // + "v [project]", + " Hello {wt-feature-a} * (active)", + ] ); // Simulate Cmd-N in the worktree workspace. @@ -2669,9 +3609,10 @@ async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestApp assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [project]", - " [~ Draft {wt-feature-a}]", - " Hello {wt-feature-a} *" + " [~ Draft {wt-feature-a}] (active)", + " Hello {wt-feature-a} *", ], "After Cmd-N in an absorbed worktree, the sidebar should show \ a highlighted Draft entry under the main repo header" @@ -2746,7 +3687,11 @@ async fn test_search_matches_worktree_name(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project]", " Fix Bug {rosewood} <== selected"], + vec![ + // + "v [project]", + " Fix Bug {rosewood} <== selected", + ], ); } @@ -2767,16 +3712,28 @@ async fn test_git_worktree_added_live_updates_sidebar(cx: &mut TestAppContext) { cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - // Save a thread against a worktree path that doesn't exist yet. - save_named_thread_metadata("wt-thread", "Worktree Thread", &worktree_project, cx).await; + // Save a thread against a worktree path with the correct main + // worktree association (as if the git state had been resolved). + save_thread_metadata_with_main_paths( + "wt-thread", + "Worktree Thread", + PathList::new(&[PathBuf::from("/wt/rosewood")]), + PathList::new(&[PathBuf::from("/project")]), + cx, + ); multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); - // Thread is not visible yet — no worktree knows about this path. + // Thread is visible because its main_worktree_paths match the group. + // The chip name is derived from the path even before git discovery. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project]"] + vec![ + // + "v [project]", + " Worktree Thread {rosewood}", + ] ); // Now add the worktree to the git state and trigger a rescan. @@ -2797,7 +3754,11 @@ async fn test_git_worktree_added_live_updates_sidebar(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project]", " Worktree Thread {rosewood}",] + vec![ + // + "v [project]", + " Worktree Thread {rosewood}", + ] ); } @@ -2867,6 +3828,7 @@ async fn test_two_worktree_workspaces_absorbed_when_main_added(cx: &mut TestAppC assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [project]", " Thread A {wt-feature-a}", " Thread B {wt-feature-b}", @@ -2888,6 +3850,7 @@ async fn test_two_worktree_workspaces_absorbed_when_main_added(cx: &mut TestAppC assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [project]", " Thread A {wt-feature-a}", " Thread B {wt-feature-b}", @@ -2963,6 +3926,7 @@ async fn test_threadless_workspace_shows_new_thread_with_worktree_chip(cx: &mut assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [project]", " [+ New Thread {wt-feature-b}]", " Thread A {wt-feature-a}", @@ -3042,8 +4006,9 @@ async fn test_multi_worktree_thread_shows_multiple_chips(cx: &mut TestAppContext assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [project_a, project_b]", - " Cross Worktree Thread {olivetti}, {selectric}", + " Cross Worktree Thread {project_a:olivetti}, {project_b:selectric}", ] ); } @@ -3115,6 +4080,7 @@ async fn test_same_named_worktree_chips_are_deduplicated(cx: &mut TestAppContext assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [project_a, project_b]", " Same Branch Thread {olivetti}", ] @@ -3219,8 +4185,9 @@ async fn test_absorbed_worktree_running_thread_shows_live_status(cx: &mut TestAp assert_eq!( entries, vec![ + // "v [project]", - " [~ Draft]", + " [~ Draft] (active)", " Hello {wt-feature-a} * (running)", ] ); @@ -3306,8 +4273,9 @@ async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAp assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [project]", - " [~ Draft]", + " [~ Draft] (active)", " Hello {wt-feature-a} * (running)", ] ); @@ -3317,7 +4285,12 @@ async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAp assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project]", " [~ Draft]", " Hello {wt-feature-a} * (!)",] + vec![ + // + "v [project]", + " [~ Draft] (active)", + " Hello {wt-feature-a} * (!)", + ] ); } @@ -3373,7 +4346,11 @@ async fn test_clicking_worktree_thread_opens_workspace_when_none_exists(cx: &mut // Thread should appear under the main repo with a worktree chip. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project]", " WT Thread {wt-feature-a}"], + vec![ + // + "v [project]", + " WT Thread {wt-feature-a}", + ], ); // Only 1 workspace should exist. @@ -3462,7 +4439,11 @@ async fn test_clicking_worktree_thread_does_not_briefly_render_as_separate_proje assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project]", " WT Thread {wt-feature-a}"], + vec![ + // + "v [project]", + " WT Thread {wt-feature-a}", + ], ); focus_sidebar(&sidebar, cx); @@ -3699,8 +4680,9 @@ async fn test_activate_archived_thread_with_saved_paths_activates_matching_works title: "Archived Thread".into(), updated_at: Utc::now(), created_at: None, - folder_paths: PathList::new(&[PathBuf::from("/project-b")]), - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::from_folder_paths(&PathList::new(&[ + PathBuf::from("/project-b"), + ])), archived: false, remote_connection: None, }, @@ -3713,7 +4695,7 @@ async fn test_activate_archived_thread_with_saved_paths_activates_matching_works assert_eq!( multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), workspace_b, - "should have activated the workspace matching the saved path_list" + "should have switched to the workspace matching the saved paths" ); } @@ -3765,8 +4747,9 @@ async fn test_activate_archived_thread_cwd_fallback_with_matching_workspace( title: "CWD Thread".into(), updated_at: Utc::now(), created_at: None, - folder_paths: PathList::new(&[std::path::PathBuf::from("/project-b")]), - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::from_folder_paths(&PathList::new(&[ + std::path::PathBuf::from("/project-b"), + ])), archived: false, remote_connection: None, }, @@ -3829,8 +4812,7 @@ async fn test_activate_archived_thread_no_paths_no_cwd_uses_active_workspace( title: "Contextless Thread".into(), updated_at: Utc::now(), created_at: None, - folder_paths: PathList::default(), - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::default(), archived: false, remote_connection: None, }, @@ -3885,8 +4867,7 @@ async fn test_activate_archived_thread_saved_paths_opens_new_workspace(cx: &mut title: "New WS Thread".into(), updated_at: Utc::now(), created_at: None, - folder_paths: path_list_b, - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::from_folder_paths(&path_list_b), archived: false, remote_connection: None, }, @@ -3940,8 +4921,9 @@ async fn test_activate_archived_thread_reuses_workspace_in_another_window(cx: &m title: "Cross Window Thread".into(), updated_at: Utc::now(), created_at: None, - folder_paths: PathList::new(&[PathBuf::from("/project-b")]), - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::from_folder_paths(&PathList::new(&[ + PathBuf::from("/project-b"), + ])), archived: false, remote_connection: None, }, @@ -4018,8 +5000,9 @@ async fn test_activate_archived_thread_reuses_workspace_in_another_window_with_t title: "Cross Window Thread".into(), updated_at: Utc::now(), created_at: None, - folder_paths: PathList::new(&[PathBuf::from("/project-b")]), - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::from_folder_paths(&PathList::new(&[ + PathBuf::from("/project-b"), + ])), archived: false, remote_connection: None, }, @@ -4099,8 +5082,9 @@ async fn test_activate_archived_thread_prefers_current_window_for_matching_paths title: "Current Window Thread".into(), updated_at: Utc::now(), created_at: None, - folder_paths: PathList::new(&[PathBuf::from("/project-a")]), - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::from_folder_paths(&PathList::new(&[ + PathBuf::from("/project-a"), + ])), archived: false, remote_connection: None, }, @@ -4512,6 +5496,7 @@ async fn test_linked_worktree_threads_not_duplicated_across_groups(cx: &mut Test assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ + // "v [other, project]", "v [project]", " Worktree Thread {wt-feature-a}", @@ -6020,8 +7005,9 @@ async fn test_legacy_thread_with_canonical_path_opens_main_repo_workspace(cx: &m title: "Legacy Main Thread".into(), updated_at: chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), created_at: None, - folder_paths: PathList::new(&[PathBuf::from("/project")]), - main_worktree_paths: PathList::default(), + worktree_paths: ThreadWorktreePaths::from_folder_paths(&PathList::new(&[ + PathBuf::from("/project"), + ])), archived: false, remote_connection: None, }; @@ -6300,19 +7286,23 @@ mod property_test { SwitchToThread { index: usize }, SwitchToProjectGroup { index: usize }, AddLinkedWorktree { project_group_index: usize }, + AddWorktreeToProject { project_group_index: usize }, + RemoveWorktreeFromProject { project_group_index: usize }, } - // Distribution (out of 20 slots): - // SaveThread: 5 slots (~25%) - // SaveWorktreeThread: 2 slots (~10%) - // ToggleAgentPanel: 1 slot (~5%) - // CreateDraftThread: 1 slot (~5%) - // AddProject: 1 slot (~5%) - // ArchiveThread: 2 slots (~10%) - // SwitchToThread: 2 slots (~10%) - // SwitchToProjectGroup: 2 slots (~10%) - // AddLinkedWorktree: 4 slots (~20%) - const DISTRIBUTION_SLOTS: u32 = 20; + // Distribution (out of 24 slots): + // SaveThread: 5 slots (~21%) + // SaveWorktreeThread: 2 slots (~8%) + // ToggleAgentPanel: 1 slot (~4%) + // CreateDraftThread: 1 slot (~4%) + // AddProject: 1 slot (~4%) + // ArchiveThread: 2 slots (~8%) + // SwitchToThread: 2 slots (~8%) + // SwitchToProjectGroup: 2 slots (~8%) + // AddLinkedWorktree: 4 slots (~17%) + // AddWorktreeToProject: 2 slots (~8%) + // RemoveWorktreeFromProject: 2 slots (~8%) + const DISTRIBUTION_SLOTS: u32 = 24; impl TestState { fn generate_operation(&self, raw: u32, project_group_count: usize) -> Operation { @@ -6354,6 +7344,18 @@ mod property_test { 16..=19 => Operation::SaveThread { project_group_index: extra % project_group_count, }, + 20..=21 if project_group_count > 0 => Operation::AddWorktreeToProject { + project_group_index: extra % project_group_count, + }, + 20..=21 => Operation::SaveThread { + project_group_index: extra % project_group_count, + }, + 22..=23 if project_group_count > 0 => Operation::RemoveWorktreeFromProject { + project_group_index: extra % project_group_count, + }, + 22..=23 => Operation::SaveThread { + project_group_index: extra % project_group_count, + }, _ => unreachable!(), } } @@ -6376,8 +7378,8 @@ mod property_test { title, updated_at, created_at: None, - folder_paths: path_list, - main_worktree_paths, + worktree_paths: ThreadWorktreePaths::from_path_lists(main_worktree_paths, path_list) + .unwrap(), archived: false, remote_connection: None, }; @@ -6612,6 +7614,57 @@ mod property_test { main_workspace_path: main_path.clone(), }); } + Operation::AddWorktreeToProject { + project_group_index, + } => { + let workspace = multi_workspace.read_with(cx, |mw, cx| { + let key = mw.project_group_keys().nth(project_group_index).unwrap(); + mw.workspaces_for_project_group(key, cx).next().cloned() + }); + let Some(workspace) = workspace else { return }; + let project = workspace.read_with(cx, |ws, _| ws.project().clone()); + + let new_path = state.next_workspace_path(); + state + .fs + .insert_tree(&new_path, serde_json::json!({ ".git": {}, "src": {} })) + .await; + + let result = project + .update(cx, |project, cx| { + project.find_or_create_worktree(&new_path, true, cx) + }) + .await; + if result.is_err() { + return; + } + cx.run_until_parked(); + } + Operation::RemoveWorktreeFromProject { + project_group_index, + } => { + let workspace = multi_workspace.read_with(cx, |mw, cx| { + let key = mw.project_group_keys().nth(project_group_index).unwrap(); + mw.workspaces_for_project_group(key, cx).next().cloned() + }); + let Some(workspace) = workspace else { return }; + let project = workspace.read_with(cx, |ws, _| ws.project().clone()); + + let worktree_count = project.read_with(cx, |p, cx| p.visible_worktrees(cx).count()); + if worktree_count <= 1 { + return; + } + + let worktree_id = project.read_with(cx, |p, cx| { + p.visible_worktrees(cx).last().map(|wt| wt.read(cx).id()) + }); + if let Some(worktree_id) = worktree_id { + project.update(cx, |project, cx| { + project.remove_worktree(worktree_id, cx); + }); + cx.run_until_parked(); + } + } } } @@ -6636,9 +7689,35 @@ mod property_test { fn validate_sidebar_properties(sidebar: &Sidebar, cx: &App) -> anyhow::Result<()> { verify_every_group_in_multiworkspace_is_shown(sidebar, cx)?; + verify_no_duplicate_threads(sidebar)?; verify_all_threads_are_shown(sidebar, cx)?; verify_active_state_matches_current_workspace(sidebar, cx)?; verify_all_workspaces_are_reachable(sidebar, cx)?; + verify_workspace_group_key_integrity(sidebar, cx)?; + Ok(()) + } + + fn verify_no_duplicate_threads(sidebar: &Sidebar) -> anyhow::Result<()> { + let mut seen: HashSet = HashSet::default(); + let mut duplicates: Vec<(acp::SessionId, String)> = Vec::new(); + + for entry in &sidebar.contents.entries { + if let Some(session_id) = entry.session_id() { + if !seen.insert(session_id.clone()) { + let title = match entry { + ListEntry::Thread(thread) => thread.metadata.title.to_string(), + _ => "".to_string(), + }; + duplicates.push((session_id.clone(), title)); + } + } + } + + anyhow::ensure!( + duplicates.is_empty(), + "threads appear more than once in sidebar: {:?}", + duplicates, + ); Ok(()) } @@ -6890,6 +7969,15 @@ mod property_test { Ok(()) } + fn verify_workspace_group_key_integrity(sidebar: &Sidebar, cx: &App) -> anyhow::Result<()> { + let Some(multi_workspace) = sidebar.multi_workspace.upgrade() else { + anyhow::bail!("sidebar should still have an associated multi-workspace"); + }; + multi_workspace + .read(cx) + .assert_project_group_key_integrity(cx) + } + #[gpui::property_test(config = ProptestConfig { cases: 50, ..Default::default() @@ -7110,8 +8198,11 @@ async fn test_remote_project_integration_does_not_briefly_render_as_separate_pro title: "Worktree Thread".into(), updated_at: chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 1).unwrap(), created_at: None, - folder_paths: PathList::new(&[PathBuf::from("/project-wt-1")]), - main_worktree_paths, + worktree_paths: ThreadWorktreePaths::from_path_lists( + main_worktree_paths, + PathList::new(&[PathBuf::from("/project-wt-1")]), + ) + .unwrap(), archived: false, remote_connection: None, }; diff --git a/crates/workspace/src/multi_workspace.rs b/crates/workspace/src/multi_workspace.rs index f4e8b47399e1420a4b01d380ad4a6532a0934a2d..9ef81194639e625b4944c48be41b7518fee0bbe3 100644 --- a/crates/workspace/src/multi_workspace.rs +++ b/crates/workspace/src/multi_workspace.rs @@ -101,6 +101,14 @@ pub enum MultiWorkspaceEvent { ActiveWorkspaceChanged, WorkspaceAdded(Entity), WorkspaceRemoved(EntityId), + WorktreePathAdded { + old_main_paths: PathList, + added_path: PathBuf, + }, + WorktreePathRemoved { + old_main_paths: PathList, + removed_path: PathBuf, + }, } pub enum SidebarEvent { @@ -302,7 +310,7 @@ pub struct MultiWorkspace { workspaces: Vec>, active_workspace: ActiveWorkspace, project_group_keys: Vec, - provisional_project_group_keys: HashMap, + workspace_group_keys: HashMap, sidebar: Option>, sidebar_open: bool, sidebar_overlay: Option, @@ -355,7 +363,7 @@ impl MultiWorkspace { Self { window_id: window.window_handle().window_id(), project_group_keys: Vec::new(), - provisional_project_group_keys: HashMap::default(), + workspace_group_keys: HashMap::default(), workspaces: Vec::new(), active_workspace: ActiveWorkspace::Transient(workspace), sidebar: None, @@ -559,19 +567,11 @@ impl MultiWorkspace { cx.subscribe_in(&project, window, { let workspace = workspace.downgrade(); move |this, _project, event, _window, cx| match event { - project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => { + project::Event::WorktreeAdded(_) + | project::Event::WorktreeRemoved(_) + | project::Event::WorktreeUpdatedRootRepoCommonDir(_) => { if let Some(workspace) = workspace.upgrade() { - this.add_project_group_key(workspace.read(cx).project_group_key(cx)); - } - } - project::Event::WorktreeUpdatedRootRepoCommonDir(_) => { - if let Some(workspace) = workspace.upgrade() { - this.maybe_clear_provisional_project_group_key(&workspace, cx); - this.add_project_group_key( - this.project_group_key_for_workspace(&workspace, cx), - ); - this.remove_stale_project_group_keys(cx); - cx.notify(); + this.handle_workspace_key_change(&workspace, cx); } } _ => {} @@ -587,7 +587,124 @@ impl MultiWorkspace { .detach(); } - pub fn add_project_group_key(&mut self, project_group_key: ProjectGroupKey) { + fn handle_workspace_key_change( + &mut self, + workspace: &Entity, + cx: &mut Context, + ) { + let workspace_id = workspace.entity_id(); + let old_key = self.project_group_key_for_workspace(workspace, cx); + let new_key = workspace.read(cx).project_group_key(cx); + + if new_key.path_list().paths().is_empty() || old_key == new_key { + return; + } + + let active_workspace = self.workspace().clone(); + + self.set_workspace_group_key(workspace, new_key.clone()); + + let changed_root_paths = workspace.read(cx).root_paths(cx); + let old_paths = old_key.path_list().paths(); + let new_paths = new_key.path_list().paths(); + + // Remove workspaces that already had the new key and have the same + // root paths (true duplicates that this workspace is replacing). + // + // NOTE: These are dropped without prompting for unsaved changes because + // the user explicitly added a folder that makes this workspace + // identical to the duplicate — they are intentionally overwriting it. + let duplicate_workspaces: Vec> = self + .workspaces + .iter() + .filter(|ws| { + ws.entity_id() != workspace_id + && self.project_group_key_for_workspace(ws, cx) == new_key + && ws.read(cx).root_paths(cx) == changed_root_paths + }) + .cloned() + .collect(); + + if duplicate_workspaces.contains(&active_workspace) { + // The active workspace is among the duplicates — drop the + // incoming workspace instead so the user stays where they are. + self.detach_workspace(workspace, cx); + self.workspaces.retain(|w| w != workspace); + } else { + for ws in &duplicate_workspaces { + self.detach_workspace(ws, cx); + self.workspaces.retain(|w| w != ws); + } + } + + // Propagate folder adds/removes to linked worktree siblings + // (different root paths, same old key) so they stay in the group. + let group_workspaces: Vec> = self + .workspaces + .iter() + .filter(|ws| { + ws.entity_id() != workspace_id + && self.project_group_key_for_workspace(ws, cx) == old_key + }) + .cloned() + .collect(); + + for workspace in &group_workspaces { + // Pre-set this to stop later WorktreeAdded events from triggering + self.set_workspace_group_key(&workspace, new_key.clone()); + + let project = workspace.read(cx).project().clone(); + + for added_path in new_paths.iter().filter(|p| !old_paths.contains(p)) { + project + .update(cx, |project, cx| { + project.find_or_create_worktree(added_path, true, cx) + }) + .detach_and_log_err(cx); + } + + for removed_path in old_paths.iter().filter(|p| !new_paths.contains(p)) { + project.update(cx, |project, cx| { + project.remove_worktree_for_main_worktree_path(removed_path, cx); + }); + } + } + + // Restore the active workspace after removals may have shifted + // the index. If the previously active workspace was removed, + // fall back to the workspace whose key just changed. + if let ActiveWorkspace::Persistent(_) = &self.active_workspace { + let target = if self.workspaces.contains(&active_workspace) { + &active_workspace + } else { + workspace + }; + if let Some(new_index) = self.workspaces.iter().position(|ws| ws == target) { + self.active_workspace = ActiveWorkspace::Persistent(new_index); + } + } + + self.remove_stale_project_group_keys(cx); + + let old_main_paths = old_key.path_list().clone(); + for added_path in new_paths.iter().filter(|p| !old_paths.contains(p)) { + cx.emit(MultiWorkspaceEvent::WorktreePathAdded { + old_main_paths: old_main_paths.clone(), + added_path: added_path.clone(), + }); + } + for removed_path in old_paths.iter().filter(|p| !new_paths.contains(p)) { + cx.emit(MultiWorkspaceEvent::WorktreePathRemoved { + old_main_paths: old_main_paths.clone(), + removed_path: removed_path.clone(), + }); + } + + self.serialize(cx); + cx.notify(); + } + + fn add_project_group_key(&mut self, project_group_key: ProjectGroupKey) { if project_group_key.path_list().paths().is_empty() { return; } @@ -598,12 +715,12 @@ impl MultiWorkspace { self.project_group_keys.insert(0, project_group_key); } - pub fn set_provisional_project_group_key( + pub(crate) fn set_workspace_group_key( &mut self, workspace: &Entity, project_group_key: ProjectGroupKey, ) { - self.provisional_project_group_keys + self.workspace_group_keys .insert(workspace.entity_id(), project_group_key.clone()); self.add_project_group_key(project_group_key); } @@ -613,28 +730,12 @@ impl MultiWorkspace { workspace: &Entity, cx: &App, ) -> ProjectGroupKey { - self.provisional_project_group_keys + self.workspace_group_keys .get(&workspace.entity_id()) .cloned() .unwrap_or_else(|| workspace.read(cx).project_group_key(cx)) } - fn maybe_clear_provisional_project_group_key( - &mut self, - workspace: &Entity, - cx: &App, - ) { - let live_key = workspace.read(cx).project_group_key(cx); - if self - .provisional_project_group_keys - .get(&workspace.entity_id()) - .is_some_and(|key| *key == live_key) - { - self.provisional_project_group_keys - .remove(&workspace.entity_id()); - } - } - fn remove_stale_project_group_keys(&mut self, cx: &App) { let workspace_keys: HashSet = self .workspaces @@ -1045,7 +1146,6 @@ impl MultiWorkspace { self.promote_transient(old, cx); } else { self.detach_workspace(&old, cx); - cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old.entity_id())); } } } else { @@ -1056,7 +1156,6 @@ impl MultiWorkspace { }); if let Some(old) = self.active_workspace.set_transient(workspace) { self.detach_workspace(&old, cx); - cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old.entity_id())); } } @@ -1083,7 +1182,7 @@ impl MultiWorkspace { /// Returns the index of the newly inserted workspace. fn promote_transient(&mut self, workspace: Entity, cx: &mut Context) -> usize { let project_group_key = self.project_group_key_for_workspace(&workspace, cx); - self.add_project_group_key(project_group_key); + self.set_workspace_group_key(&workspace, project_group_key); self.workspaces.push(workspace.clone()); cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace)); self.workspaces.len() - 1 @@ -1099,10 +1198,10 @@ impl MultiWorkspace { for workspace in std::mem::take(&mut self.workspaces) { if workspace != active { self.detach_workspace(&workspace, cx); - cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id())); } } self.project_group_keys.clear(); + self.workspace_group_keys.clear(); self.active_workspace = ActiveWorkspace::Transient(active); cx.notify(); } @@ -1128,7 +1227,7 @@ impl MultiWorkspace { workspace.set_multi_workspace(weak_self, cx); }); - self.add_project_group_key(project_group_key); + self.set_workspace_group_key(&workspace, project_group_key); self.workspaces.push(workspace.clone()); cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace)); cx.notify(); @@ -1136,10 +1235,12 @@ impl MultiWorkspace { } } - /// Clears session state and DB binding for a workspace that is being - /// removed or replaced. The DB row is preserved so the workspace still - /// appears in the recent-projects list. + /// Detaches a workspace: clears session state, DB binding, cached + /// group key, and emits `WorkspaceRemoved`. The DB row is preserved + /// so the workspace still appears in the recent-projects list. fn detach_workspace(&mut self, workspace: &Entity, cx: &mut Context) { + self.workspace_group_keys.remove(&workspace.entity_id()); + cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id())); workspace.update(cx, |workspace, _cx| { workspace.session_id.take(); workspace._schedule_serialize_workspace.take(); @@ -1313,6 +1414,46 @@ impl MultiWorkspace { tasks } + #[cfg(any(test, feature = "test-support"))] + pub fn assert_project_group_key_integrity(&self, cx: &App) -> anyhow::Result<()> { + let stored_keys: HashSet<&ProjectGroupKey> = self.project_group_keys().collect(); + + let workspace_group_keys: HashSet<&ProjectGroupKey> = + self.workspace_group_keys.values().collect(); + let extra_keys = &workspace_group_keys - &stored_keys; + anyhow::ensure!( + extra_keys.is_empty(), + "workspace_group_keys values not in project_group_keys: {:?}", + extra_keys, + ); + + let cached_ids: HashSet = self.workspace_group_keys.keys().copied().collect(); + let workspace_ids: HashSet = + self.workspaces.iter().map(|ws| ws.entity_id()).collect(); + anyhow::ensure!( + cached_ids == workspace_ids, + "workspace_group_keys entity IDs don't match workspaces.\n\ + only in cache: {:?}\n\ + only in workspaces: {:?}", + &cached_ids - &workspace_ids, + &workspace_ids - &cached_ids, + ); + + for workspace in self.workspaces() { + let live_key = workspace.read(cx).project_group_key(cx); + let cached_key = &self.workspace_group_keys[&workspace.entity_id()]; + anyhow::ensure!( + *cached_key == live_key, + "workspace {:?} has live key {:?} but cached key {:?}", + workspace.entity_id(), + live_key, + cached_key, + ); + } + + Ok(()) + } + #[cfg(any(test, feature = "test-support"))] pub fn set_random_database_id(&mut self, cx: &mut Context) { self.workspace().update(cx, |workspace, _cx| { @@ -1471,7 +1612,6 @@ impl MultiWorkspace { for workspace in &removed_workspaces { this.detach_workspace(workspace, cx); - cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id())); } let removed_any = !removed_workspaces.is_empty(); diff --git a/crates/workspace/src/multi_workspace_tests.rs b/crates/workspace/src/multi_workspace_tests.rs index 259346fe097826b3dcc19fb8fad0b8f07ddd0488..9cab28c0ca4ab34b2189985e898285dd82dd4f32 100644 --- a/crates/workspace/src/multi_workspace_tests.rs +++ b/crates/workspace/src/multi_workspace_tests.rs @@ -185,157 +185,3 @@ async fn test_project_group_keys_duplicate_not_added(cx: &mut TestAppContext) { ); }); } - -#[gpui::test] -async fn test_project_group_keys_on_worktree_added(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; - fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; - let project = Project::test(fs, ["/root_a".as_ref()], cx).await; - - let initial_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); - - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - - multi_workspace.update(cx, |mw, cx| { - mw.open_sidebar(cx); - }); - - // Add a second worktree to the same project. - let (worktree, _) = project - .update(cx, |project, cx| { - project.find_or_create_worktree("/root_b", true, cx) - }) - .await - .unwrap(); - worktree - .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete()) - .await; - cx.run_until_parked(); - - let updated_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); - assert_ne!( - initial_key, updated_key, - "key should change after adding a worktree" - ); - - multi_workspace.read_with(cx, |mw, _cx| { - let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); - assert_eq!( - keys.len(), - 2, - "should have both the original and updated key" - ); - assert_eq!(*keys[0], updated_key); - assert_eq!(*keys[1], initial_key); - }); -} - -#[gpui::test] -async fn test_project_group_keys_on_worktree_removed(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; - fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; - let project = Project::test(fs, ["/root_a".as_ref(), "/root_b".as_ref()], cx).await; - - let initial_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); - - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - - multi_workspace.update(cx, |mw, cx| { - mw.open_sidebar(cx); - }); - - // Remove one worktree. - let worktree_b_id = project.read_with(cx, |project, cx| { - project - .worktrees(cx) - .find(|wt| wt.read(cx).root_name().as_unix_str() == "root_b") - .unwrap() - .read(cx) - .id() - }); - project.update(cx, |project, cx| { - project.remove_worktree(worktree_b_id, cx); - }); - cx.run_until_parked(); - - let updated_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); - assert_ne!( - initial_key, updated_key, - "key should change after removing a worktree" - ); - - multi_workspace.read_with(cx, |mw, _cx| { - let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); - assert_eq!( - keys.len(), - 2, - "should accumulate both the original and post-removal key" - ); - assert_eq!(*keys[0], updated_key); - assert_eq!(*keys[1], initial_key); - }); -} - -#[gpui::test] -async fn test_project_group_keys_across_multiple_workspaces_and_worktree_changes( - cx: &mut TestAppContext, -) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; - fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; - fs.insert_tree("/root_c", json!({ "file.txt": "" })).await; - let project_a = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await; - let project_b = Project::test(fs.clone(), ["/root_b".as_ref()], cx).await; - - let key_a = project_a.read_with(cx, |p, cx| p.project_group_key(cx)); - let key_b = project_b.read_with(cx, |p, cx| p.project_group_key(cx)); - - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); - - multi_workspace.update(cx, |mw, cx| { - mw.open_sidebar(cx); - }); - - multi_workspace.update_in(cx, |mw, window, cx| { - mw.test_add_workspace(project_b, window, cx); - }); - - multi_workspace.read_with(cx, |mw, _cx| { - assert_eq!(mw.project_group_keys().count(), 2); - }); - - // Now add a worktree to project_a. This should produce a third key. - let (worktree, _) = project_a - .update(cx, |project, cx| { - project.find_or_create_worktree("/root_c", true, cx) - }) - .await - .unwrap(); - worktree - .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete()) - .await; - cx.run_until_parked(); - - let key_a_updated = project_a.read_with(cx, |p, cx| p.project_group_key(cx)); - assert_ne!(key_a, key_a_updated); - - multi_workspace.read_with(cx, |mw, _cx| { - let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); - assert_eq!( - keys.len(), - 3, - "should have key_a, key_b, and the updated key_a with root_c" - ); - assert_eq!(*keys[0], key_a_updated); - assert_eq!(*keys[1], key_b); - assert_eq!(*keys[2], key_a); - }); -} diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 81224c0e2db520a278bfb21429e211ba9a4f09ae..d40b7abae0c036a5cdd227ec8a547bd3c10b262c 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -9886,7 +9886,7 @@ async fn open_remote_project_inner( }); if let Some(project_group_key) = provisional_project_group_key.clone() { - multi_workspace.set_provisional_project_group_key(&new_workspace, project_group_key); + multi_workspace.set_workspace_group_key(&new_workspace, project_group_key); } multi_workspace.activate(new_workspace.clone(), window, cx); new_workspace From 60fac25960c95c20032f6a60a3a3beb620ff666d Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Fri, 10 Apr 2026 09:00:42 +0100 Subject: [PATCH 33/67] agent: Skip serializing empty fields in streaming edit file tool (#53510) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A or Added/Fixed/Improved ... --- crates/agent/src/tools/streaming_edit_file_tool.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs index 47da35bbf25ad188f3f6b98e843b2955910bb7ac..c988fede454ff6e8b4dc327c81132224a8b87a49 100644 --- a/crates/agent/src/tools/streaming_edit_file_tool.rs +++ b/crates/agent/src/tools/streaming_edit_file_tool.rs @@ -189,9 +189,9 @@ pub enum StreamingEditFileToolOutput { }, Error { error: String, - #[serde(default)] + #[serde(default, skip_serializing_if = "Option::is_none")] input_path: Option, - #[serde(default)] + #[serde(default, skip_serializing_if = "String::is_empty")] diff: String, }, } From 23830d59469e906153c5ce9edbb89ff30bd87629 Mon Sep 17 00:00:00 2001 From: CanWang Date: Fri, 10 Apr 2026 16:05:39 +0800 Subject: [PATCH 34/67] Fix crash on startup when the X11 server supports XInput < 2.4 (#53582) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary - Fix crash on startup when the X11 server supports XInput < 2.4 (e.g. XInput 2.3) - Gesture event mask bits (pinch begin/update/end) are now only requested when the server advertises XInput >= 2.4 - Zed previously failed to open any window on affected systems, printing `Zed failed to open a window: X11 XiSelectEvents failed` ## Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable ## Problem On X11 systems where the XInput extension version is older than 2.4, Zed crashes immediately on startup with: ``` Zed failed to open a window: X11 XiSelectEvents failed. Caused by: X11 error X11Error { error_kind: Value, error_code: 2, sequence: 277, bad_value: 27, minor_opcode: 46, major_opcode: 131, extension_name: Some("XInputExtension"), request_name: Some("XISelectEvents") } ``` This makes Zed completely unusable on any X11 display server that only supports XInput 2.3 or earlier, which includes many current Ubuntu 20.04/22.04 systems, remote X11 sessions, and VNC/Xvfb setups. ### Root cause During window creation, `X11WindowState::new` calls `XISelectEvents` with an event mask that unconditionally includes gesture event bits (`GESTURE_PINCH_BEGIN`, `GESTURE_PINCH_UPDATE`, `GESTURE_PINCH_END`). These gesture events were introduced in **XInput 2.4**. When the X server only supports XInput 2.3 (or older), it does not recognize these mask bits and rejects the entire `XISelectEvents` request with a `BadValue` error. This is fatal because the error is propagated up and prevents the window from being created. A comment in the original code stated: > If the server only supports an older version, gesture events simply won't be delivered. This is incorrect. The X11 protocol does **not** silently ignore unknown mask bits in `XISelectEvents` — it rejects the whole request. ### How XInput version negotiation works The client calls `XIQueryVersion(2, 4)` to announce the highest version it supports. The server responds with the highest version **it** supports (e.g. `2.3`). The client is then responsible for not using features beyond the negotiated version. The existing code ignored the server's response and used 2.4 features unconditionally. ## Fix ### Approach Check the XInput version returned by the server. Only include gesture event mask bits in `XISelectEvents` when the negotiated version is >= 2.4. On older servers, basic input events (motion, button press/release, enter, leave) still work normally — only touchpad pinch gestures are unavailable. ### Changed files **`crates/gpui_linux/src/linux/x11/client.rs`** 1. Added `supports_xinput_gestures: bool` field to `X11ClientState`. 2. After the existing `xinput_xi_query_version(2, 4)` call, compute whether the server version is >= 2.4: ```rust let supports_xinput_gestures = xinput_version.major_version > 2 || (xinput_version.major_version == 2 && xinput_version.minor_version >= 4); ``` 3. Added an `info!` log line reporting the detected XInput version and gesture support status. 4. Pass `supports_xinput_gestures` through `open_window` into `X11Window::new`. **`crates/gpui_linux/src/linux/x11/window.rs`** 1. Added `supports_xinput_gestures: bool` parameter to both `X11Window::new` and `X11WindowState::new`. 2. The `XISelectEvents` call now builds the event mask conditionally: - Always includes: `MOTION`, `BUTTON_PRESS`, `BUTTON_RELEASE`, `ENTER`, `LEAVE` - Only when `supports_xinput_gestures` is true: `GESTURE_PINCH_BEGIN`, `GESTURE_PINCH_UPDATE`, `GESTURE_PINCH_END` ### What is NOT changed - The gesture event **handlers** in `client.rs` (`XinputGesturePinchBegin`, `XinputGesturePinchUpdate`, `XinputGesturePinchEnd`) are left as-is. They simply won't be triggered on servers without gesture support, since the events are never registered. - No behavioral change on systems with XInput >= 2.4 — gesture events continue to work exactly as before. ## Testing | Test | Before fix | After fix | |------|-----------|-----------| | `./target/release/zed .` on XInput 2.3 | Immediate crash (exit code 1) | Window opens successfully (runs until killed) | | XInput version detection | Version queried but response ignored | Version checked and logged | Verified on an X11 system with XInput 2.3 (X.Org 1.20.13, Ubuntu 20.04). ## Test plan - [x] Build succeeds (`cargo build --release`) - [x] Zed launches and opens a window on XInput 2.3 system - [x] No regression on the basic input event path (motion, clicks, enter/leave still registered) - [ ] Verify gesture pinch events still work on a system with XInput >= 2.4 Release Notes: - Fixed Zed failing to start on X11 systems with XInput version older than 2.4, which includes many Linux distributions and remote desktop setups. --- crates/gpui_linux/src/linux/x11/client.rs | 17 ++++++++++++- crates/gpui_linux/src/linux/x11/window.rs | 31 +++++++++++++---------- 2 files changed, 34 insertions(+), 14 deletions(-) diff --git a/crates/gpui_linux/src/linux/x11/client.rs b/crates/gpui_linux/src/linux/x11/client.rs index 57871e6ef32b937a7a47662f8022293a57bc3fe2..bc7c9594734d4fae9a8dda4056bc02d515fbab48 100644 --- a/crates/gpui_linux/src/linux/x11/client.rs +++ b/crates/gpui_linux/src/linux/x11/client.rs @@ -214,6 +214,8 @@ pub struct X11ClientState { pointer_device_states: BTreeMap, + pub(crate) supports_xinput_gestures: bool, + pub(crate) common: LinuxCommon, pub(crate) clipboard: Clipboard, pub(crate) clipboard_item: Option, @@ -345,7 +347,8 @@ impl X11Client { // Announce to X server that XInput up to 2.4 is supported. // Version 2.4 is needed for gesture events (GesturePinchBegin/Update/End). - // If the server only supports an older version, gesture events simply won't be delivered. + // The server responds with the highest version it supports; if < 2.4, + // we must not request gesture event masks in XISelectEvents. let xinput_version = get_reply( || "XInput XiQueryVersion failed", xcb_connection.xinput_xi_query_version(2, 4), @@ -354,6 +357,14 @@ impl X11Client { xinput_version.major_version >= 2, "XInput version >= 2 required." ); + let supports_xinput_gestures = xinput_version.major_version > 2 + || (xinput_version.major_version == 2 && xinput_version.minor_version >= 4); + log::info!( + "XInput version: {}.{}, gesture support: {}", + xinput_version.major_version, + xinput_version.minor_version, + supports_xinput_gestures, + ); let pointer_device_states = current_pointer_device_states(&xcb_connection, &BTreeMap::new()).unwrap_or_default(); @@ -535,6 +546,8 @@ impl X11Client { pointer_device_states, + supports_xinput_gestures, + clipboard, clipboard_item: None, xdnd_state: Xdnd::default(), @@ -1593,6 +1606,7 @@ impl LinuxClient for X11Client { let scale_factor = state.scale_factor; let appearance = state.common.appearance; let compositor_gpu = state.compositor_gpu.take(); + let supports_xinput_gestures = state.supports_xinput_gestures; let window = X11Window::new( handle, X11ClientStatePtr(Rc::downgrade(&self.0)), @@ -1608,6 +1622,7 @@ impl LinuxClient for X11Client { scale_factor, appearance, parent_window, + supports_xinput_gestures, )?; check_reply( || "Failed to set XdndAware property", diff --git a/crates/gpui_linux/src/linux/x11/window.rs b/crates/gpui_linux/src/linux/x11/window.rs index 1974cc0bb28f62da4d7dcb3e9fca92b6324470bb..f29ba49fb2498dd49a5f025aad4dc2584a8a8a42 100644 --- a/crates/gpui_linux/src/linux/x11/window.rs +++ b/crates/gpui_linux/src/linux/x11/window.rs @@ -423,6 +423,7 @@ impl X11WindowState { scale_factor: f32, appearance: WindowAppearance, parent_window: Option, + supports_xinput_gestures: bool, ) -> anyhow::Result { let x_screen_index = params .display_id @@ -660,25 +661,27 @@ impl X11WindowState { ), )?; + let mut xi_event_mask = xinput::XIEventMask::MOTION + | xinput::XIEventMask::BUTTON_PRESS + | xinput::XIEventMask::BUTTON_RELEASE + | xinput::XIEventMask::ENTER + | xinput::XIEventMask::LEAVE; + if supports_xinput_gestures { + // x11rb 0.13 doesn't define XIEventMask constants for gesture + // events, so we construct them from the event opcodes (each + // XInput event type N maps to mask bit N). + xi_event_mask |= + xinput::XIEventMask::from(1u32 << xinput::GESTURE_PINCH_BEGIN_EVENT) + | xinput::XIEventMask::from(1u32 << xinput::GESTURE_PINCH_UPDATE_EVENT) + | xinput::XIEventMask::from(1u32 << xinput::GESTURE_PINCH_END_EVENT); + } check_reply( || "X11 XiSelectEvents failed.", xcb.xinput_xi_select_events( x_window, &[xinput::EventMask { deviceid: XINPUT_ALL_DEVICE_GROUPS, - mask: vec![ - xinput::XIEventMask::MOTION - | xinput::XIEventMask::BUTTON_PRESS - | xinput::XIEventMask::BUTTON_RELEASE - | xinput::XIEventMask::ENTER - | xinput::XIEventMask::LEAVE - // x11rb 0.13 doesn't define XIEventMask constants for gesture - // events, so we construct them from the event opcodes (each - // XInput event type N maps to mask bit N). - | xinput::XIEventMask::from(1u32 << xinput::GESTURE_PINCH_BEGIN_EVENT) - | xinput::XIEventMask::from(1u32 << xinput::GESTURE_PINCH_UPDATE_EVENT) - | xinput::XIEventMask::from(1u32 << xinput::GESTURE_PINCH_END_EVENT), - ], + mask: vec![xi_event_mask], }], ), )?; @@ -855,6 +858,7 @@ impl X11Window { scale_factor: f32, appearance: WindowAppearance, parent_window: Option, + supports_xinput_gestures: bool, ) -> anyhow::Result { let ptr = X11WindowStatePtr { state: Rc::new(RefCell::new(X11WindowState::new( @@ -872,6 +876,7 @@ impl X11Window { scale_factor, appearance, parent_window, + supports_xinput_gestures, )?)), callbacks: Rc::new(RefCell::new(Callbacks::default())), xcb: xcb.clone(), From 5f10547cc3755a44251546dcbdd5f3c4e939861e Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Fri, 10 Apr 2026 01:36:15 -0700 Subject: [PATCH 35/67] Dismiss the context menu on interaction (#53599) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- crates/sidebar/src/sidebar.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 9c126929a4705de4d3ffc9e6472332e86a07c2e8..4d88ddeffdd6625768dd0207176c0984e9833a29 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -1759,6 +1759,7 @@ impl Sidebar { let menu = ContextMenu::build_persistent(window, cx, move |menu, _window, menu_cx| { + let weak_menu = menu_cx.weak_entity(); let mut menu = menu .header("Project Folders") .end_slot_action(Box::new(menu::EndSlot)); @@ -1771,6 +1772,7 @@ impl Sidebar { let path = path.clone(); let project_group_key = project_group_key.clone(); let multi_workspace = multi_workspace.clone(); + let weak_menu = weak_menu.clone(); menu = menu.entry_with_end_slot_on_hover( name.clone(), None, @@ -1787,6 +1789,7 @@ impl Sidebar { ); }) .ok(); + weak_menu.update(cx, |_, cx| cx.emit(DismissEvent)).ok(); }, ); } @@ -1797,6 +1800,7 @@ impl Sidebar { { let project_group_key = project_group_key.clone(); let multi_workspace = multi_workspace.clone(); + let weak_menu = weak_menu.clone(); move |window, cx| { multi_workspace .update(cx, |multi_workspace, cx| { @@ -1807,13 +1811,13 @@ impl Sidebar { ); }) .ok(); + weak_menu.update(cx, |_, cx| cx.emit(DismissEvent)).ok(); } }, ); let project_group_key = project_group_key.clone(); let multi_workspace = multi_workspace.clone(); - let weak_menu = menu_cx.weak_entity(); menu.separator() .entry("Remove Project", None, move |window, cx| { multi_workspace From 2635ef55c6df198cc11324fdb1bbff690411465b Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 10 Apr 2026 12:20:38 +0300 Subject: [PATCH 36/67] Restrict mouse wheel zoom for certain editors (#53598) Follow-up of https://github.com/zed-industries/zed/pull/53452 * disables mouse wheel zooming in agent, debugger, keymap editor, dev inspector and repl-related editors * adjusts the code to call for theme changes directly instead of sending the events, so that agent following does not capture the events and changes its font size Release Notes: - N/A --- crates/agent_ui/src/agent_diff.rs | 1 + .../src/conversation_view/thread_view.rs | 1 + crates/agent_ui/src/inline_assistant.rs | 1 + crates/agent_ui/src/message_editor.rs | 1 + .../src/session/running/console.rs | 1 + crates/editor/src/editor.rs | 13 +++++++++++-- crates/editor/src/element.rs | 17 +++++------------ crates/inspector_ui/src/div_inspector.rs | 1 + crates/keymap_editor/src/keymap_editor.rs | 1 + crates/repl/src/notebook/cell.rs | 3 +++ crates/theme_settings/src/settings.rs | 3 ++- crates/theme_settings/src/theme_settings.rs | 19 ++++++++++++++++--- crates/zed/src/zed.rs | 4 ++-- 13 files changed, 46 insertions(+), 20 deletions(-) diff --git a/crates/agent_ui/src/agent_diff.rs b/crates/agent_ui/src/agent_diff.rs index 7b70740dd1ac462614a9d08d9e48d7d13ac2ed32..567595143a41e71a25237e3b1bdcf2301880bccb 100644 --- a/crates/agent_ui/src/agent_diff.rs +++ b/crates/agent_ui/src/agent_diff.rs @@ -98,6 +98,7 @@ impl AgentDiffPane { editor .set_render_diff_hunk_controls(diff_hunk_controls(&thread, workspace.clone()), cx); editor.register_addon(AgentDiffAddon); + editor.disable_mouse_wheel_zoom(); editor }); diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs index 412778e054cab1596b2e9555a9cd4a12c3edb6ec..32fe52480e2c347cc482b2296a107ee8731fb672 100644 --- a/crates/agent_ui/src/conversation_view/thread_view.rs +++ b/crates/agent_ui/src/conversation_view/thread_view.rs @@ -5170,6 +5170,7 @@ impl ThreadView { let mut editor = Editor::for_multibuffer(buffer, Some(project.clone()), window, cx); editor.set_breadcrumb_header(thread_title); + editor.disable_mouse_wheel_zoom(); editor })), None, diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index f2beb719cc7e5638cfc36f339419bda405a8e773..ce74b7f78cda0ea14a79593f83e5666795f80e5e 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -1483,6 +1483,7 @@ impl InlineAssistant { editor.set_show_wrap_guides(false, cx); editor.set_show_gutter(false, cx); editor.set_offset_content(false, cx); + editor.disable_mouse_wheel_zoom(); editor.scroll_manager.set_forbid_vertical_scroll(true); editor.set_read_only(true); editor.set_show_edit_predictions(Some(false), window, cx); diff --git a/crates/agent_ui/src/message_editor.rs b/crates/agent_ui/src/message_editor.rs index 0f59441ab27b5074a710c46a683e72d003a8d5d7..3b93439b62305f63596abcaebe562e7b3f2a65f3 100644 --- a/crates/agent_ui/src/message_editor.rs +++ b/crates/agent_ui/src/message_editor.rs @@ -422,6 +422,7 @@ impl MessageEditor { editor.set_show_indent_guides(false, cx); editor.set_show_completions_on_input(Some(true)); editor.set_soft_wrap(); + editor.disable_mouse_wheel_zoom(); editor.set_use_modal_editing(true); editor.set_context_menu_options(ContextMenuOptions { min_entries_visible: 12, diff --git a/crates/debugger_ui/src/session/running/console.rs b/crates/debugger_ui/src/session/running/console.rs index 65bc949b2b6ddb1a707abf2e001ffde151fb70b8..c541257b6d219b56a611f8a3711da287109ef48d 100644 --- a/crates/debugger_ui/src/session/running/console.rs +++ b/crates/debugger_ui/src/session/running/console.rs @@ -84,6 +84,7 @@ impl Console { editor.set_show_indent_guides(false, cx); editor.set_show_edit_predictions(Some(false), window, cx); editor.set_use_modal_editing(false); + editor.disable_mouse_wheel_zoom(); editor.set_soft_wrap_mode(language::language_settings::SoftWrap::EditorWidth, cx); editor }); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index c9c2688f80edc14e879ae50adb654d3cf2c9ae8a..09fc8ece435c8aff22bbf380709669282bd28dcd 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1183,6 +1183,7 @@ pub struct Editor { delegate_open_excerpts: bool, enable_lsp_data: bool, enable_runnables: bool, + enable_mouse_wheel_zoom: bool, show_line_numbers: Option, use_relative_line_numbers: Option, show_git_diff_gutter: Option, @@ -1972,6 +1973,9 @@ impl Editor { clone.read_only = self.read_only; clone.buffers_with_disabled_indent_guides = self.buffers_with_disabled_indent_guides.clone(); + clone.enable_mouse_wheel_zoom = self.enable_mouse_wheel_zoom; + clone.enable_lsp_data = self.enable_lsp_data; + clone.enable_runnables = self.enable_runnables; clone } @@ -2419,8 +2423,9 @@ impl Editor { delegate_expand_excerpts: false, delegate_stage_and_restore: false, delegate_open_excerpts: false, - enable_lsp_data: true, - enable_runnables: true, + enable_lsp_data: full_mode, + enable_runnables: full_mode, + enable_mouse_wheel_zoom: full_mode, show_git_diff_gutter: None, show_code_actions: None, show_runnables: None, @@ -26082,6 +26087,10 @@ impl Editor { self.enable_runnables = false; } + pub fn disable_mouse_wheel_zoom(&mut self) { + self.enable_mouse_wheel_zoom = false; + } + fn update_data_on_scroll(&mut self, window: &mut Window, cx: &mut Context<'_, Self>) { self.register_visible_buffers(cx); self.colorize_brackets(false, cx); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 12a79fa6695ddff8371fa1b648056f43bec0cb98..24b9606a83a5bcf2a675e3632f4bc2bad41aa591 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -7673,22 +7673,18 @@ impl EditorElement { move |event: &ScrollWheelEvent, phase, window, cx| { if phase == DispatchPhase::Bubble && hitbox.should_handle_scroll(window) { - if event.modifiers.secondary() { + delta = delta.coalesce(event.delta); + + if event.modifiers.secondary() && editor.read(cx).enable_mouse_wheel_zoom { let delta_y = match event.delta { ScrollDelta::Pixels(pixels) => pixels.y.into(), ScrollDelta::Lines(lines) => lines.y, }; if delta_y > 0.0 { - window.dispatch_action( - Box::new(zed_actions::IncreaseBufferFontSize { persist: false }), - cx, - ); + theme_settings::increase_buffer_font_size(cx); } else if delta_y < 0.0 { - window.dispatch_action( - Box::new(zed_actions::DecreaseBufferFontSize { persist: false }), - cx, - ); + theme_settings::decrease_buffer_font_size(cx); } cx.stop_propagation(); @@ -7701,10 +7697,7 @@ impl EditorElement { } }; - delta = delta.coalesce(event.delta); editor.update(cx, |editor, cx| { - let position_map: &PositionMap = &position_map; - let line_height = position_map.line_height; let glyph_width = position_map.em_layout_width; let (delta, axis) = match delta { diff --git a/crates/inspector_ui/src/div_inspector.rs b/crates/inspector_ui/src/div_inspector.rs index 7ec2d7ba8303e899331d3f38642a9a51f4c14d4c..135c8f22116498fbc0db43c88928a365e5607ce5 100644 --- a/crates/inspector_ui/src/div_inspector.rs +++ b/crates/inspector_ui/src/div_inspector.rs @@ -498,6 +498,7 @@ impl DivInspector { editor.set_show_breakpoints(false, cx); editor.set_show_git_diff_gutter(false, cx); editor.set_show_runnables(false, cx); + editor.disable_mouse_wheel_zoom(); editor.set_show_edit_predictions(Some(false), window, cx); editor.set_minimap_visibility(MinimapVisibility::Disabled, window, cx); editor diff --git a/crates/keymap_editor/src/keymap_editor.rs b/crates/keymap_editor/src/keymap_editor.rs index ee9f6a11c2b51f7993b17c01352cfb97b535049a..c4833620cf4ec0a6dc965aa9e23c2690a44773fd 100644 --- a/crates/keymap_editor/src/keymap_editor.rs +++ b/crates/keymap_editor/src/keymap_editor.rs @@ -3318,6 +3318,7 @@ impl ActionArgumentsEditor { window, cx, ); + editor.disable_mouse_wheel_zoom(); editor.set_searchable(false); editor.disable_scrollbars_and_minimap(window, cx); editor.set_show_edit_predictions(Some(false), window, cx); diff --git a/crates/repl/src/notebook/cell.rs b/crates/repl/src/notebook/cell.rs index ba70e50f8cbccc32bef5de5c1864a3d8db46aa89..cb8f1d51103fca83cb92718e51a80c42f1e6be62 100644 --- a/crates/repl/src/notebook/cell.rs +++ b/crates/repl/src/notebook/cell.rs @@ -378,6 +378,7 @@ impl MarkdownCell { editor.set_show_gutter(false, cx); editor.set_text_style_refinement(refinement); editor.set_use_modal_editing(true); + editor.disable_mouse_wheel_zoom(); editor }); @@ -641,6 +642,7 @@ impl CodeCell { ..Default::default() }; + editor.disable_mouse_wheel_zoom(); editor.set_show_gutter(false, cx); editor.set_text_style_refinement(refinement); editor.set_use_modal_editing(true); @@ -718,6 +720,7 @@ impl CodeCell { ..Default::default() }; + editor.disable_mouse_wheel_zoom(); editor.set_text(source.clone(), window, cx); editor.set_show_gutter(false, cx); editor.set_text_style_refinement(refinement); diff --git a/crates/theme_settings/src/settings.rs b/crates/theme_settings/src/settings.rs index cda63ab9c8aa10d0f006f3bf371aab6491dff6de..7b8261d27b6ef1c04677d74f868f85e6356daba7 100644 --- a/crates/theme_settings/src/settings.rs +++ b/crates/theme_settings/src/settings.rs @@ -490,7 +490,8 @@ pub fn adjusted_font_size(size: Pixels, cx: &App) -> Pixels { clamp_font_size(adjusted_font_size) } -/// Adjusts the buffer font size. +/// Adjusts the buffer font size, without persisting the result in the settings. +/// This will be effective until the app is restarted. pub fn adjust_buffer_font_size(cx: &mut App, f: impl FnOnce(Pixels) -> Pixels) { let buffer_font_size = ThemeSettings::get_global(cx).buffer_font_size; let adjusted_size = cx diff --git a/crates/theme_settings/src/theme_settings.rs b/crates/theme_settings/src/theme_settings.rs index f5bc96ba02a63088b6311055899b39de65ea9de2..39ffe8327460431ede9c2d1c9a012d0de503fdb2 100644 --- a/crates/theme_settings/src/theme_settings.rs +++ b/crates/theme_settings/src/theme_settings.rs @@ -12,7 +12,7 @@ use std::sync::Arc; use ::settings::{IntoGpui, Settings, SettingsStore}; use anyhow::{Context as _, Result}; -use gpui::{App, Font, HighlightStyle, Pixels, Refineable}; +use gpui::{App, Font, HighlightStyle, Pixels, Refineable, px}; use gpui_util::ResultExt; use theme::{ AccentColors, Appearance, AppearanceContent, DEFAULT_DARK_THEME, DEFAULT_ICON_THEME_NAME, @@ -26,11 +26,12 @@ pub use crate::schema::{ ThemeColorsContent, ThemeContent, ThemeFamilyContent, ThemeStyleContent, WindowBackgroundContent, status_colors_refinement, syntax_overrides, theme_colors_refinement, }; +use crate::settings::adjust_buffer_font_size; pub use crate::settings::{ AgentFontSize, BufferLineHeight, FontFamilyName, IconThemeName, IconThemeSelection, ThemeAppearanceMode, ThemeName, ThemeSelection, ThemeSettings, adjust_agent_buffer_font_size, - adjust_agent_ui_font_size, adjust_buffer_font_size, adjust_ui_font_size, adjusted_font_size, - appearance_to_mode, clamp_font_size, default_theme, observe_buffer_font_size_adjustment, + adjust_agent_ui_font_size, adjust_ui_font_size, adjusted_font_size, appearance_to_mode, + clamp_font_size, default_theme, observe_buffer_font_size_adjustment, reset_agent_buffer_font_size, reset_agent_ui_font_size, reset_buffer_font_size, reset_ui_font_size, set_icon_theme, set_mode, set_theme, setup_ui_font, }; @@ -410,3 +411,15 @@ pub fn merge_accent_colors( accent_colors.0 = Arc::from(colors); } } + +/// Increases the buffer font size by 1 pixel, without persisting the result in the settings. +/// This will be effective until the app is restarted. +pub fn increase_buffer_font_size(cx: &mut App) { + adjust_buffer_font_size(cx, |size| size + px(1.0)); +} + +/// Decreases the buffer font size by 1 pixel, without persisting the result in the settings. +/// This will be effective until the app is restarted. +pub fn decrease_buffer_font_size(cx: &mut App) { + adjust_buffer_font_size(cx, |size| size - px(1.0)); +} diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 3d4ada8a1b90020090eb74a8a6ea752fa7a44ab3..e3cb1b90d46ed8f758ef0334f82fd07b34c93ea9 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -928,7 +928,7 @@ fn register_actions( .insert(f32::from(theme_settings::clamp_font_size(buffer_font_size)).into()); }); } else { - theme_settings::adjust_buffer_font_size(cx, |size| size + px(1.0)); + theme_settings::increase_buffer_font_size(cx); } } }) @@ -945,7 +945,7 @@ fn register_actions( .insert(f32::from(theme_settings::clamp_font_size(buffer_font_size)).into()); }); } else { - theme_settings::adjust_buffer_font_size(cx, |size| size - px(1.0)); + theme_settings::decrease_buffer_font_size(cx); } } }) From 2d650da0dcbc136fd0752ff7b92327e1d5a9a82e Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 10 Apr 2026 11:01:12 +0100 Subject: [PATCH 37/67] ci: Do not install cargo machete by building it (#53607) Re-building the tool on CI every time is pointless when can just install the binary itself Release Notes: - N/A or Added/Fixed/Improved ... --- .github/workflows/autofix_pr.yml | 5 ++--- .github/workflows/run_tests.yml | 9 +++------ .../xtask/src/tasks/workflows/autofix_pr.rs | 9 ++++----- tooling/xtask/src/tasks/workflows/run_tests.rs | 18 ++++++------------ 4 files changed, 15 insertions(+), 26 deletions(-) diff --git a/.github/workflows/autofix_pr.yml b/.github/workflows/autofix_pr.yml index 717c5e2fa5e3c35f3ff33d176f73022e7a0c95d4..4c0b4ac378c81f0ab9ee88eee6fa274fa2ed6356 100644 --- a/.github/workflows/autofix_pr.yml +++ b/.github/workflows/autofix_pr.yml @@ -45,10 +45,9 @@ jobs: version: '9' - name: autofix_pr::run_autofix::install_cargo_machete if: ${{ inputs.run_clippy }} - uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386 + uses: taiki-e/install-action@02cc5f8ca9f2301050c0c099055816a41ee05507 with: - command: install - args: cargo-machete@0.7.0 + tool: cargo-machete@0.7.0 - name: autofix_pr::run_autofix::run_cargo_fix if: ${{ inputs.run_clippy }} run: cargo fix --workspace --release --all-targets --all-features --allow-dirty --allow-staged diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index a1e15e7beb8a7fe3f03536bf8a4fb41519aa4e0a..c9e83554959b5e3281a0094c284b5a45ff121d16 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -618,14 +618,11 @@ jobs: cache: rust path: ~/.rustup - name: run_tests::check_dependencies::install_cargo_machete - uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386 + uses: taiki-e/install-action@02cc5f8ca9f2301050c0c099055816a41ee05507 with: - command: install - args: cargo-machete@0.7.0 + tool: cargo-machete@0.7.0 - name: run_tests::check_dependencies::run_cargo_machete - uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386 - with: - command: machete + run: cargo machete - name: run_tests::check_dependencies::check_cargo_lock run: cargo update --locked --workspace - name: run_tests::check_dependencies::check_vulnerable_dependencies diff --git a/tooling/xtask/src/tasks/workflows/autofix_pr.rs b/tooling/xtask/src/tasks/workflows/autofix_pr.rs index 6fa7743275f36eda1746e7afdd4caabc429fec3c..400103b55e78ba32bfcd641802876be536a25af1 100644 --- a/tooling/xtask/src/tasks/workflows/autofix_pr.rs +++ b/tooling/xtask/src/tasks/workflows/autofix_pr.rs @@ -62,12 +62,11 @@ fn run_autofix(pr_number: &WorkflowInput, run_clippy: &WorkflowInput) -> NamedJo fn install_cargo_machete() -> Step { named::uses( - "clechasseur", - "rs-cargo", - "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2 + "taiki-e", + "install-action", + "02cc5f8ca9f2301050c0c099055816a41ee05507", ) - .add_with(("command", "install")) - .add_with(("args", "cargo-machete@0.7.0")) + .add_with(("tool", "cargo-machete@0.7.0")) } fn run_cargo_fmt() -> Step { diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs index b8d6e0636078289b80184edfea29a516774c1fd7..f51b21b961ddbeabf30c5e757bdf6815833ab3ca 100644 --- a/tooling/xtask/src/tasks/workflows/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -408,21 +408,15 @@ fn check_style() -> NamedJob { fn check_dependencies() -> NamedJob { fn install_cargo_machete() -> Step { named::uses( - "clechasseur", - "rs-cargo", - "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2 + "taiki-e", + "install-action", + "02cc5f8ca9f2301050c0c099055816a41ee05507", ) - .add_with(("command", "install")) - .add_with(("args", "cargo-machete@0.7.0")) + .add_with(("tool", "cargo-machete@0.7.0")) } - fn run_cargo_machete() -> Step { - named::uses( - "clechasseur", - "rs-cargo", - "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2 - ) - .add_with(("command", "machete")) + fn run_cargo_machete() -> Step { + named::bash("cargo machete") } fn check_cargo_lock() -> Step { From 00c771af0a3eefbd5b0c4d4496b78adbd9e351bd Mon Sep 17 00:00:00 2001 From: Andre Roelofs Date: Fri, 10 Apr 2026 13:16:51 +0200 Subject: [PATCH 38/67] terminal: Properly apply focus when switching terminal via tabbing hotkey (#53127) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #53056. Release Notes: - Fixed terminal tabs losing keyboard focus after switching tabs on Linux X11 --- crates/terminal_view/src/terminal_view.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index acccd6129f75ee2f5213fa359203220a7fee08c0..2f6a984798f35c87e39f51978ad84bfdfa435187 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -1356,7 +1356,9 @@ impl Item for TerminalView { h_flex() .gap_1() .group("term-tab-icon") - .track_focus(&self.focus_handle) + .when(!params.selected, |this| { + this.track_focus(&self.focus_handle) + }) .on_action(move |action: &RenameTerminal, window, cx| { self_handle .update(cx, |this, cx| this.rename_terminal(action, window, cx)) From 10122be9cb4d0ba35e79aa62f1f978eb56b892c5 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 10 Apr 2026 13:08:13 +0100 Subject: [PATCH 39/67] gpui: Fix background window freezes on wayland (#53597) Release Notes: - N/A or Added/Fixed/Improved ... --- crates/gpui/src/window.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 5778d6ac7372f4b13f14d4fa7d0ebca54a03fd1d..dc357bda80f4329a1ae5b9894ea329c44e483475 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -1232,6 +1232,13 @@ impl Window { if let Some(last_frame) = last_frame_time.get() && now.duration_since(last_frame) < min_interval { + // Must still complete the frame on platforms that require it. + // On Wayland, `surface.frame()` was already called to request the + // next frame callback, so we must call `surface.commit()` (via + // `complete_frame`) or the compositor won't send another callback. + handle + .update(&mut cx, |_, window, _| window.complete_frame()) + .log_err(); return; } } From 3c4e235d6704a37830ef80404c06e66f0b66e938 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 10 Apr 2026 15:34:50 +0300 Subject: [PATCH 40/67] Add settings and disable mouse wheel zoom by default (#53622) Follow-up of https://github.com/zed-industries/zed/pull/53452 Release Notes: - N/A --- assets/settings/default.json | 3 ++ crates/editor/src/editor_settings.rs | 2 + crates/editor/src/element.rs | 5 ++- crates/settings/src/vscode_import.rs | 1 + crates/settings_content/src/editor.rs | 5 +++ crates/settings_ui/src/page_data.rs | 15 ++++++- crates/zed/src/zed.rs | 60 +++++++++++++++++++++++++++ docs/src/reference/all-settings.md | 10 +++++ 8 files changed, 99 insertions(+), 2 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 799a34d6a6f4dea367cc2c5cc4ce774ff0ad312e..8e8c93c5088338af63a2daed8c87fe031d500727 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -636,6 +636,9 @@ // Scroll sensitivity multiplier. This multiplier is applied // to both the horizontal and vertical delta values while scrolling. "scroll_sensitivity": 1.0, + // Whether to zoom the editor font size with the mouse wheel + // while holding the primary modifier key (Cmd on macOS, Ctrl on other platforms). + "mouse_wheel_zoom": false, // Scroll sensitivity multiplier for fast scrolling. This multiplier is applied // to both the horizontal and vertical delta values while scrolling. Fast scrolling // happens when a user holds the alt or option key while scrolling. diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index e4a20476419578ff646952c84b399e2333f0a411..67b56a161f4d92985339d725b553c4baeec05bca 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -33,6 +33,7 @@ pub struct EditorSettings { pub autoscroll_on_clicks: bool, pub horizontal_scroll_margin: f32, pub scroll_sensitivity: f32, + pub mouse_wheel_zoom: bool, pub fast_scroll_sensitivity: f32, pub sticky_scroll: StickyScroll, pub relative_line_numbers: RelativeLineNumbers, @@ -251,6 +252,7 @@ impl Settings for EditorSettings { autoscroll_on_clicks: editor.autoscroll_on_clicks.unwrap(), horizontal_scroll_margin: editor.horizontal_scroll_margin.unwrap(), scroll_sensitivity: editor.scroll_sensitivity.unwrap(), + mouse_wheel_zoom: editor.mouse_wheel_zoom.unwrap(), fast_scroll_sensitivity: editor.fast_scroll_sensitivity.unwrap(), sticky_scroll: StickyScroll { enabled: sticky_scroll.enabled.unwrap(), diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 24b9606a83a5bcf2a675e3632f4bc2bad41aa591..fa6b9d30b5b7123e8775ba1d8b65a79461e26ca1 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -7675,7 +7675,10 @@ impl EditorElement { if phase == DispatchPhase::Bubble && hitbox.should_handle_scroll(window) { delta = delta.coalesce(event.delta); - if event.modifiers.secondary() && editor.read(cx).enable_mouse_wheel_zoom { + if event.modifiers.secondary() + && editor.read(cx).enable_mouse_wheel_zoom + && EditorSettings::get_global(cx).mouse_wheel_zoom + { let delta_y = match event.delta { ScrollDelta::Pixels(pixels) => pixels.y.into(), ScrollDelta::Lines(lines) => lines.y, diff --git a/crates/settings/src/vscode_import.rs b/crates/settings/src/vscode_import.rs index 5ebf0ba6abd1749ef13b9d8fcd26ac8caa608e51..042b3a7c71c77d8aaa02cec559a943608ee87859 100644 --- a/crates/settings/src/vscode_import.rs +++ b/crates/settings/src/vscode_import.rs @@ -286,6 +286,7 @@ impl VsCodeSettings { }), rounded_selection: self.read_bool("editor.roundedSelection"), scroll_beyond_last_line: None, + mouse_wheel_zoom: self.read_bool("editor.mouseWheelZoom"), scroll_sensitivity: self.read_f32("editor.mouseWheelScrollSensitivity"), scrollbar: self.scrollbar_content(), search: self.search_content(), diff --git a/crates/settings_content/src/editor.rs b/crates/settings_content/src/editor.rs index b37192882694f999a5e7f3180e5a7899a8732393..00a0549d6b8b1ded71069a5ece36ded5d1a69d0e 100644 --- a/crates/settings_content/src/editor.rs +++ b/crates/settings_content/src/editor.rs @@ -89,6 +89,11 @@ pub struct EditorSettingsContent { /// Default: 1.0 #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub scroll_sensitivity: Option, + /// Whether to zoom the editor font size with the mouse wheel + /// while holding the primary modifier key (Cmd on macOS, Ctrl on other platforms). + /// + /// Default: false + pub mouse_wheel_zoom: Option, /// Scroll sensitivity multiplier for fast scrolling. This multiplier is applied /// to both the horizontal and vertical delta values while scrolling. Fast scrolling /// happens when a user holds the alt or option key while scrolling. diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index 1bab4984a1515627ef26042fa7937a328877df0a..cef65431a459126ac14054dee5bc5ffe68e2419c 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -1571,7 +1571,7 @@ fn editor_page() -> SettingsPage { ] } - fn scrolling_section() -> [SettingsPageItem; 8] { + fn scrolling_section() -> [SettingsPageItem; 9] { [ SettingsPageItem::SectionHeader("Scrolling"), SettingsPageItem::SettingItem(SettingItem { @@ -1632,6 +1632,19 @@ fn editor_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Mouse Wheel Zoom", + description: "Whether to zoom the editor font size with the mouse wheel while holding the primary modifier key.", + field: Box::new(SettingField { + json_path: Some("mouse_wheel_zoom"), + pick: |settings_content| settings_content.editor.mouse_wheel_zoom.as_ref(), + write: |settings_content, value| { + settings_content.editor.mouse_wheel_zoom = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Fast Scroll Sensitivity", description: "Fast scroll sensitivity multiplier for both horizontal and vertical scrolling.", diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index e3cb1b90d46ed8f758ef0334f82fd07b34c93ea9..63e86a0b7c7980f6591dc248a4313577e8d46bea 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -4144,6 +4144,7 @@ mod tests { window.draw(cx).clear(); }); + // mouse_wheel_zoom is disabled by default — zoom should not work. let initial_font_size = cx.update(|_, cx| ThemeSettings::get_global(cx).buffer_font_size(cx).as_f32()); @@ -4154,6 +4155,34 @@ mod tests { ..Default::default() }); + let font_size_after_disabled_zoom = + cx.update(|_, cx| ThemeSettings::get_global(cx).buffer_font_size(cx).as_f32()); + + assert_eq!( + initial_font_size, font_size_after_disabled_zoom, + "Editor buffer font-size should not change when mouse_wheel_zoom is disabled" + ); + + // Enable mouse_wheel_zoom and verify zoom works. + cx.update(|_, cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.editor.mouse_wheel_zoom = Some(true); + }); + }); + }); + + cx.update(|window, cx| { + window.draw(cx).clear(); + }); + + cx.simulate_event(gpui::ScrollWheelEvent { + position: mouse_position, + delta: gpui::ScrollDelta::Pixels(point(px(0.), px(1.))), + modifiers: event_modifiers, + ..Default::default() + }); + let increased_font_size = cx.update(|_, cx| ThemeSettings::get_global(cx).buffer_font_size(cx).as_f32()); @@ -4180,6 +4209,37 @@ mod tests { decreased_font_size < increased_font_size, "Editor buffer font-size should have decreased from scroll-zoom" ); + + // Disable mouse_wheel_zoom again and verify zoom stops working. + cx.update(|_, cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.editor.mouse_wheel_zoom = Some(false); + }); + }); + }); + + let font_size_before = + cx.update(|_, cx| ThemeSettings::get_global(cx).buffer_font_size(cx).as_f32()); + + cx.update(|window, cx| { + window.draw(cx).clear(); + }); + + cx.simulate_event(gpui::ScrollWheelEvent { + position: mouse_position, + delta: gpui::ScrollDelta::Pixels(point(px(0.), px(1.))), + modifiers: event_modifiers, + ..Default::default() + }); + + let font_size_after = + cx.update(|_, cx| ThemeSettings::get_global(cx).buffer_font_size(cx).as_f32()); + + assert_eq!( + font_size_before, font_size_after, + "Editor buffer font-size should not change when mouse_wheel_zoom is re-disabled" + ); } #[gpui::test] diff --git a/docs/src/reference/all-settings.md b/docs/src/reference/all-settings.md index cb731de2e11888393ab00aef32b0722765a1ede7..b2b5a76a3a21411b1444268c592e24186ad29797 100644 --- a/docs/src/reference/all-settings.md +++ b/docs/src/reference/all-settings.md @@ -3396,6 +3396,16 @@ List of strings containing any combination of: Positive `float` values +### Mouse Wheel Zoom + +- Description: Whether to zoom the editor font size with the mouse wheel while holding the primary modifier key (Cmd on macOS, Ctrl on other platforms). +- Setting: `mouse_wheel_zoom` +- Default: `false` + +**Options** + +`boolean` values + ### Fast Scroll Sensitivity - Description: Scroll sensitivity multiplier for fast scrolling. This multiplier is applied to both the horizontal and vertical delta values while scrolling. Fast scrolling happens when a user holds the alt or option key while scrolling. From e6196e2216bf72e4235955e889464330c380530d Mon Sep 17 00:00:00 2001 From: Neel Date: Fri, 10 Apr 2026 14:16:43 +0100 Subject: [PATCH 41/67] git: Fix spurious file creation when staging (#53621) Related to https://github.com/zed-industries/zed/pull/53484. This PR fixes a `--` file being created when staging a hunk. Release Notes: - N/A --- crates/git/src/repository.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index b1b9af106d93e02ee61fdc436dce7d95f9a7c107..faf973505af6cde1b2e736a0bfb630fa18c3647c 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -1462,7 +1462,7 @@ impl GitRepository for RealGitRepository { log::debug!("indexing SHA: {sha}, path {path:?}"); let output = git - .build_command(&["update-index", "--add", "--cacheinfo", mode, sha, "--"]) + .build_command(&["update-index", "--add", "--cacheinfo", mode, sha]) .envs(env.iter()) .arg(path.as_unix_str()) .output() From a151def86f06d1ec551d4599d7a9ed7d616147b3 Mon Sep 17 00:00:00 2001 From: Dong Date: Fri, 10 Apr 2026 21:41:27 +0800 Subject: [PATCH 42/67] markdown_preview: Add footnotes support (#53086) ## What does this PR changed Adds footnote rendering and navigation to Zed's markdown preview. - **Footnote references**: (`[^1]`) render inline as `[1]` with link styling (color + underline) - **Footnote definitions**: (`[^1]: ...`) render at the bottom with a horizontal separator, smaller text (85% size), and a label prefix - **Click-to-navigate**: clicking a footnote reference scrolls to its definition https://github.com/user-attachments/assets/a79a0136-f22d-40ac-8b53-cfefa8573d21 ## OOS/ Need discussion - **Display style**: Since currently the gpui crate does not provide a superscript style, in this PR we publish the feature with using [`[1]`]() instead of aligning to the GFM styled[^1] - **Footnote definition placement**: GFM renders the footnote at the bottom of the content no matter where the user place the footnote definition, but the `pulldown_cmark` renders the footnote just at where user place it, for this PR I'll keep the footnote where `pulldown_cmark` renders it, and we may have some more discuss on if we need to move them to the bottom of the markdown preview [^1]: GitHub-flavoured markdown ## What to test - [ ] Open a markdown file with footnotes (e.g. `Text[^1]\n\n[^1]: Definition`) - [ ] Verify reference renders as `[1]` with link color - [ ] Verify definition renders below a separator with smaller text - [ ] Verify pointer cursor appears on hover over `[1]` - [ ] Verify clicking `[1]` scrolls to the definition - [ ] Verify normal links still work as before - [ ] `cargo test -p markdown` passes (46 tests) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #13603 Release Notes: - Added support for footnotes in Markdown Preview. --------- Co-authored-by: Smit Barmase --- crates/markdown/src/markdown.rs | 167 +++++++++++++++++++++++++++----- crates/markdown/src/parser.rs | 80 ++++++++++++++- 2 files changed, 217 insertions(+), 30 deletions(-) diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index e873a458cdaf981635f14c4e3ab18456e700f048..a0f91cb43698be042207e2f51a5fc8cab16e67a7 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -249,6 +249,7 @@ pub struct Markdown { source: SharedString, selection: Selection, pressed_link: Option, + pressed_footnote_ref: Option, autoscroll_request: Option, active_root_block: Option, parsed_markdown: ParsedMarkdown, @@ -419,6 +420,7 @@ impl Markdown { source, selection: Selection::default(), pressed_link: None, + pressed_footnote_ref: None, autoscroll_request: None, active_root_block: None, should_reparse: false, @@ -532,6 +534,13 @@ impl Markdown { cx.refresh_windows(); } + fn footnote_definition_content_start(&self, label: &SharedString) -> Option { + self.parsed_markdown + .footnote_definitions + .get(label) + .copied() + } + pub fn set_active_root_for_source_index( &mut self, source_index: Option, @@ -696,6 +705,7 @@ impl Markdown { html_blocks: BTreeMap::default(), mermaid_diagrams: BTreeMap::default(), heading_slugs: HashMap::default(), + footnote_definitions: HashMap::default(), }, Default::default(), ); @@ -709,6 +719,7 @@ impl Markdown { let root_block_starts = parsed.root_block_starts; let html_blocks = parsed.html_blocks; let heading_slugs = parsed.heading_slugs; + let footnote_definitions = parsed.footnote_definitions; let mermaid_diagrams = if should_render_mermaid_diagrams { extract_mermaid_diagrams(&source, &events) } else { @@ -776,6 +787,7 @@ impl Markdown { html_blocks, mermaid_diagrams, heading_slugs, + footnote_definitions, }, images_by_source_offset, ) @@ -900,6 +912,7 @@ pub struct ParsedMarkdown { pub(crate) html_blocks: BTreeMap, pub(crate) mermaid_diagrams: BTreeMap, pub heading_slugs: HashMap, + pub footnote_definitions: HashMap, } impl ParsedMarkdown { @@ -1300,18 +1313,22 @@ impl MarkdownElement { return; } - let is_hovering_link = hitbox.is_hovered(window) + let is_hovering_clickable = hitbox.is_hovered(window) && !self.markdown.read(cx).selection.pending && rendered_text - .link_for_position(window.mouse_position()) - .is_some(); - - if !self.style.prevent_mouse_interaction { - if is_hovering_link { - window.set_cursor_style(CursorStyle::PointingHand, hitbox); - } else { - window.set_cursor_style(CursorStyle::IBeam, hitbox); - } + .source_index_for_position(window.mouse_position()) + .ok() + .is_some_and(|source_index| { + rendered_text.link_for_source_index(source_index).is_some() + || rendered_text + .footnote_ref_for_source_index(source_index) + .is_some() + }); + + if is_hovering_clickable { + window.set_cursor_style(CursorStyle::PointingHand, hitbox); + } else { + window.set_cursor_style(CursorStyle::IBeam, hitbox); } let on_open_url = self.on_url_click.take(); @@ -1336,13 +1353,27 @@ impl MarkdownElement { move |markdown, event: &MouseDownEvent, phase, window, cx| { if hitbox.is_hovered(window) { if phase.bubble() { - if let Some(link) = rendered_text.link_for_position(event.position) { - markdown.pressed_link = Some(link.clone()); - } else { - let source_index = - match rendered_text.source_index_for_position(event.position) { - Ok(ix) | Err(ix) => ix, - }; + let position_result = + rendered_text.source_index_for_position(event.position); + + if let Ok(source_index) = position_result { + if let Some(footnote_ref) = + rendered_text.footnote_ref_for_source_index(source_index) + { + markdown.pressed_footnote_ref = Some(footnote_ref.clone()); + } else if let Some(link) = + rendered_text.link_for_source_index(source_index) + { + markdown.pressed_link = Some(link.clone()); + } + } + + if markdown.pressed_footnote_ref.is_none() + && markdown.pressed_link.is_none() + { + let source_index = match position_result { + Ok(ix) | Err(ix) => ix, + }; if let Some(handler) = on_source_click.as_ref() { let blocked = handler(source_index, event.click_count, window, cx); if blocked { @@ -1398,7 +1429,7 @@ impl MarkdownElement { self.on_mouse_event(window, cx, { let rendered_text = rendered_text.clone(); let hitbox = hitbox.clone(); - let was_hovering_link = is_hovering_link; + let was_hovering_clickable = is_hovering_clickable; move |markdown, event: &MouseMoveEvent, phase, window, cx| { if phase.capture() { return; @@ -1414,9 +1445,17 @@ impl MarkdownElement { markdown.autoscroll_request = Some(source_index); cx.notify(); } else { - let is_hovering_link = hitbox.is_hovered(window) - && rendered_text.link_for_position(event.position).is_some(); - if is_hovering_link != was_hovering_link { + let is_hovering_clickable = hitbox.is_hovered(window) + && rendered_text + .source_index_for_position(event.position) + .ok() + .is_some_and(|source_index| { + rendered_text.link_for_source_index(source_index).is_some() + || rendered_text + .footnote_ref_for_source_index(source_index) + .is_some() + }); + if is_hovering_clickable != was_hovering_clickable { cx.notify(); } } @@ -1426,8 +1465,21 @@ impl MarkdownElement { let rendered_text = rendered_text.clone(); move |markdown, event: &MouseUpEvent, phase, window, cx| { if phase.bubble() { - if let Some(pressed_link) = markdown.pressed_link.take() - && Some(&pressed_link) == rendered_text.link_for_position(event.position) + let source_index = rendered_text.source_index_for_position(event.position).ok(); + if let Some(pressed_footnote_ref) = markdown.pressed_footnote_ref.take() + && source_index + .and_then(|ix| rendered_text.footnote_ref_for_source_index(ix)) + == Some(&pressed_footnote_ref) + { + if let Some(source_index) = + markdown.footnote_definition_content_start(&pressed_footnote_ref.label) + { + markdown.autoscroll_request = Some(source_index); + cx.notify(); + } + } else if let Some(pressed_link) = markdown.pressed_link.take() + && source_index.and_then(|ix| rendered_text.link_for_source_index(ix)) + == Some(&pressed_link) { if let Some(open_url) = on_open_url.as_ref() { open_url(pressed_link.destination_url, window, cx); @@ -1818,6 +1870,36 @@ impl Element for MarkdownElement { builder.push_text_style(style) } } + MarkdownTag::FootnoteDefinition(label) => { + if !builder.rendered_footnote_separator { + builder.rendered_footnote_separator = true; + builder.push_div( + div() + .border_t_1() + .mt_2() + .border_color(self.style.rule_color), + range, + markdown_end, + ); + builder.pop_div(); + } + builder.push_div( + div() + .pt_1() + .mb_1() + .line_height(rems(1.3)) + .text_size(rems(0.85)) + .h_flex() + .items_start() + .gap_2() + .child( + div().text_size(rems(0.85)).child(format!("{}.", label)), + ), + range, + markdown_end, + ); + builder.push_div(div().flex_1().w_0(), range, markdown_end); + } MarkdownTag::MetadataBlock(_) => {} MarkdownTag::Table(alignments) => { builder.table.start(alignments.clone()); @@ -1973,6 +2055,10 @@ impl Element for MarkdownElement { builder.pop_div(); builder.table.end_cell(); } + MarkdownTagEnd::FootnoteDefinition => { + builder.pop_div(); + builder.pop_div(); + } _ => log::debug!("unsupported markdown tag end: {:?}", tag), }, MarkdownEvent::Text => { @@ -2028,7 +2114,12 @@ impl Element for MarkdownElement { MarkdownEvent::TaskListMarker(_) => { // handled inside the `MarkdownTag::Item` case } - _ => log::debug!("unsupported markdown event {:?}", event), + MarkdownEvent::FootnoteReference(label) => { + builder.push_footnote_ref(label.clone(), range.clone()); + builder.push_text_style(self.style.link.clone()); + builder.push_text(&format!("[{label}]"), range.clone()); + builder.pop_text_style(); + } } } if self.style.code_block_overflow_x_scroll { @@ -2270,8 +2361,10 @@ struct MarkdownElementBuilder { rendered_lines: Vec, pending_line: PendingLine, rendered_links: Vec, + rendered_footnote_refs: Vec, current_source_index: usize, html_comment: bool, + rendered_footnote_separator: bool, base_text_style: TextStyle, text_style_stack: Vec, code_block_stack: Vec>>, @@ -2306,8 +2399,10 @@ impl MarkdownElementBuilder { rendered_lines: Vec::new(), pending_line: PendingLine::default(), rendered_links: Vec::new(), + rendered_footnote_refs: Vec::new(), current_source_index: 0, html_comment: false, + rendered_footnote_separator: false, base_text_style, text_style_stack: Vec::new(), code_block_stack: Vec::new(), @@ -2459,6 +2554,13 @@ impl MarkdownElementBuilder { }); } + fn push_footnote_ref(&mut self, label: SharedString, source_range: Range) { + self.rendered_footnote_refs.push(RenderedFootnoteRef { + source_range, + label, + }); + } + fn push_text(&mut self, text: &str, source_range: Range) { self.pending_line.source_mappings.push(SourceMapping { rendered_index: self.pending_line.text.len(), @@ -2576,6 +2678,7 @@ impl MarkdownElementBuilder { text: RenderedText { lines: self.rendered_lines.into(), links: self.rendered_links.into(), + footnote_refs: self.rendered_footnote_refs.into(), }, } } @@ -2690,6 +2793,7 @@ pub struct RenderedMarkdown { struct RenderedText { lines: Rc<[RenderedLine]>, links: Rc<[RenderedLink]>, + footnote_refs: Rc<[RenderedFootnoteRef]>, } #[derive(Debug, Clone, Eq, PartialEq)] @@ -2698,6 +2802,12 @@ struct RenderedLink { destination_url: SharedString, } +#[derive(Debug, Clone, Eq, PartialEq)] +struct RenderedFootnoteRef { + source_range: Range, + label: SharedString, +} + impl RenderedText { fn source_index_for_position(&self, position: Point) -> Result { let mut lines = self.lines.iter().peekable(); @@ -2844,12 +2954,17 @@ impl RenderedText { accumulator } - fn link_for_position(&self, position: Point) -> Option<&RenderedLink> { - let source_index = self.source_index_for_position(position).ok()?; + fn link_for_source_index(&self, source_index: usize) -> Option<&RenderedLink> { self.links .iter() .find(|link| link.source_range.contains(&source_index)) } + + fn footnote_ref_for_source_index(&self, source_index: usize) -> Option<&RenderedFootnoteRef> { + self.footnote_refs + .iter() + .find(|fref| fref.source_range.contains(&source_index)) + } } #[cfg(test)] diff --git a/crates/markdown/src/parser.rs b/crates/markdown/src/parser.rs index c6c988083fddeac357b92d0b6604e0bbd564308f..641b43a1399773d2d4df2ec13e2873c816a6d49a 100644 --- a/crates/markdown/src/parser.rs +++ b/crates/markdown/src/parser.rs @@ -38,6 +38,7 @@ pub(crate) struct ParsedMarkdownData { pub root_block_starts: Vec, pub html_blocks: BTreeMap, pub heading_slugs: HashMap, + pub footnote_definitions: HashMap, } impl ParseState { @@ -499,9 +500,10 @@ pub(crate) fn parse_markdown_with_options( pulldown_cmark::Event::InlineHtml(_) => { state.push_event(range, MarkdownEvent::InlineHtml) } - pulldown_cmark::Event::FootnoteReference(_) => { - state.push_event(range, MarkdownEvent::FootnoteReference) - } + pulldown_cmark::Event::FootnoteReference(label) => state.push_event( + range, + MarkdownEvent::FootnoteReference(SharedString::from(label.to_string())), + ), pulldown_cmark::Event::SoftBreak => state.push_event(range, MarkdownEvent::SoftBreak), pulldown_cmark::Event::HardBreak => state.push_event(range, MarkdownEvent::HardBreak), pulldown_cmark::Event::Rule => state.push_event(range, MarkdownEvent::Rule), @@ -517,6 +519,7 @@ pub(crate) fn parse_markdown_with_options( } else { HashMap::default() }; + let footnote_definitions = build_footnote_definitions(&state.events); ParsedMarkdownData { events: state.events, @@ -525,7 +528,34 @@ pub(crate) fn parse_markdown_with_options( root_block_starts: state.root_block_starts, html_blocks, heading_slugs, + footnote_definitions, + } +} + +fn build_footnote_definitions( + events: &[(Range, MarkdownEvent)], +) -> HashMap { + let mut definitions = HashMap::default(); + let mut current_label: Option = None; + + for (range, event) in events { + match event { + MarkdownEvent::Start(MarkdownTag::FootnoteDefinition(label)) => { + current_label = Some(label.clone()); + } + MarkdownEvent::End(MarkdownTagEnd::FootnoteDefinition) => { + current_label = None; + } + MarkdownEvent::Text if current_label.is_some() => { + if let Some(label) = current_label.take() { + definitions.entry(label).or_insert(range.start); + } + } + _ => {} + } } + + definitions } pub fn parse_links_only(text: &str) -> Vec<(Range, MarkdownEvent)> { @@ -589,7 +619,7 @@ pub enum MarkdownEvent { /// A reference to a footnote with given label, which may or may not be defined /// by an event with a `Tag::FootnoteDefinition` tag. Definitions and references to them may /// occur in any order. - FootnoteReference, + FootnoteReference(SharedString), /// A soft line break. SoftBreak, /// A hard line break. @@ -1111,6 +1141,48 @@ mod tests { assert_eq!(extract_code_block_content_range(input), 3..3); } + #[test] + fn test_footnotes() { + let parsed = parse_markdown_with_options( + "Text with a footnote[^1] and some more text.\n\n[^1]: This is the footnote content.", + false, + false, + ); + assert_eq!( + parsed.events, + vec![ + (0..45, RootStart), + (0..45, Start(Paragraph)), + (0..20, Text), + (20..24, FootnoteReference("1".into())), + (24..44, Text), + (0..45, End(MarkdownTagEnd::Paragraph)), + (0..45, RootEnd(0)), + (46..81, RootStart), + (46..81, Start(FootnoteDefinition("1".into()))), + (52..81, Start(Paragraph)), + (52..81, Text), + (52..81, End(MarkdownTagEnd::Paragraph)), + (46..81, End(MarkdownTagEnd::FootnoteDefinition)), + (46..81, RootEnd(1)), + ] + ); + assert_eq!(parsed.footnote_definitions.len(), 1); + assert_eq!(parsed.footnote_definitions.get("1").copied(), Some(52)); + } + + #[test] + fn test_footnote_definitions_multiple() { + let parsed = parse_markdown_with_options( + "Text[^a] and[^b].\n\n[^a]: First.\n\n[^b]: Second.", + false, + false, + ); + assert_eq!(parsed.footnote_definitions.len(), 2); + assert!(parsed.footnote_definitions.contains_key("a")); + assert!(parsed.footnote_definitions.contains_key("b")); + } + #[test] fn test_links_split_across_fragments() { // This test verifies that links split across multiple text fragments due to escaping or other issues From 5a9f82583bdaf9a41676b23cf2d00d878beb4b09 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 10 Apr 2026 10:15:28 -0400 Subject: [PATCH 43/67] collab_ui: Disable Collab panel based on organization configuration (#53567) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR makes it so the Collab panel can be disabled by the organization's configuration: Screenshot 2026-04-09 at 2 38 35 PM Depends on https://github.com/zed-industries/cloud/pull/2247. Closes CLO-638. Release Notes: - N/A --- crates/cloud_api_types/src/cloud_api_types.rs | 1 + crates/collab_ui/src/collab_panel.rs | 22 ++++++++++++++++++- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/crates/cloud_api_types/src/cloud_api_types.rs b/crates/cloud_api_types/src/cloud_api_types.rs index b4435f5bf0d53d6df3df54ef28bd99124b622421..439ed5b2e822382aebcc7dfc18f5887d7a389038 100644 --- a/crates/cloud_api_types/src/cloud_api_types.rs +++ b/crates/cloud_api_types/src/cloud_api_types.rs @@ -56,6 +56,7 @@ pub struct Organization { pub struct OrganizationConfiguration { pub is_zed_model_provider_enabled: bool, pub is_agent_thread_feedback_enabled: bool, + pub is_collaboration_enabled: bool, pub edit_prediction: OrganizationEditPredictionConfiguration, } diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index c17cdc3d6e529a311c308aa4f13cd4acd66dd84b..a80d5682eb56526d9060fd1014d29f1deac4d7d2 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -2620,6 +2620,18 @@ impl CollabPanel { cx.write_to_clipboard(item) } + fn render_disabled_by_organization(&mut self, _cx: &mut Context) -> Div { + v_flex() + .p_4() + .gap_4() + .size_full() + .text_center() + .justify_center() + .child(Label::new( + "Collaboration is disabled for this organization.", + )) + } + fn render_signed_out(&mut self, cx: &mut Context) -> Div { let collab_blurb = "Work with your team in realtime with collaborative editing, voice, shared notes and more."; @@ -3645,6 +3657,12 @@ impl Render for CollabPanel { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let status = *self.client.status().borrow(); + let is_collaboration_disabled = self + .user_store + .read(cx) + .current_organization_configuration() + .is_some_and(|config| !config.is_collaboration_enabled); + v_flex() .key_context(self.dispatch_context(window, cx)) .on_action(cx.listener(CollabPanel::cancel)) @@ -3664,7 +3682,9 @@ impl Render for CollabPanel { .on_action(cx.listener(CollabPanel::move_channel_down)) .track_focus(&self.focus_handle) .size_full() - .child(if !status.is_or_was_connected() || status.is_signing_in() { + .child(if is_collaboration_disabled { + self.render_disabled_by_organization(cx) + } else if !status.is_or_was_connected() || status.is_signing_in() { self.render_signed_out(cx) } else { self.render_signed_in(window, cx) From 2d3f49e4b265c6d074b0d3316806a454fccabeb8 Mon Sep 17 00:00:00 2001 From: Sandro Meier Date: Fri, 10 Apr 2026 17:28:54 +0200 Subject: [PATCH 44/67] dev_container: Handle devcontainer.metadata label as JSON object or array (#53557) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable ## Details - The [devcontainer CLI writes the `devcontainer.metadata` label as a bare JSON object](https://github.com/devcontainers/cli/issues/1054) when there is only one metadata entry (e.g. docker-compose devcontainer with a Dockerfile and no features) - Zed's `deserialize_metadata` only accepted a JSON array, causing deserialization to fail with `invalid type: map, expected a sequence` - This made it impossible to attach to existing docker-compose devcontainers created by the devcontainer CLI or VS Code The fix tries parsing as an array first, then falls back to parsing as a single object wrapped in a vec. This mirrors how the [devcontainer CLI itself reads the label](https://github.com/devcontainers/cli/blob/main/src/spec-node/imageMetadata.ts#L476-L493). An upstream fix has also been submitted: https://github.com/devcontainers/cli/pull/1199 ## Reproduction 1. Create a docker-compose devcontainer with a Dockerfile and no features: `.devcontainer/devcontainer.json`: ```json { "name": "repro", "dockerComposeFile": "docker-compose.yml", "service": "app", "remoteUser": "root" } ``` `.devcontainer/docker-compose.yml`: ```yaml services: app: build: context: . dockerfile: Dockerfile command: sleep infinity volumes: - ..:/workspace ``` `.devcontainer/Dockerfile`: ```dockerfile FROM ubuntu:24.04 ``` 2. `devcontainer up --workspace-folder .` 3. Open the folder in Zed, fails with metadata deserialization error Release Notes: - Fixed attaching to a devcontainer that has a single metadata element which was started with `devcontainer-cli` --- crates/dev_container/src/docker.rs | 38 +++++++++++++++++++++++++++--- 1 file changed, 35 insertions(+), 3 deletions(-) diff --git a/crates/dev_container/src/docker.rs b/crates/dev_container/src/docker.rs index 99ce7422eee36d56e2bc53fd31d150fe2f41b16d..7931923b4219e33fa56e8fb2fb6b97c1ea89a750 100644 --- a/crates/dev_container/src/docker.rs +++ b/crates/dev_container/src/docker.rs @@ -487,10 +487,18 @@ where let s: Option = Option::deserialize(deserializer)?; match s { Some(json_string) => { + // The devcontainer metadata label can be either a JSON array (e.g. from + // image-based devcontainers) or a single JSON object (e.g. from + // docker-compose-based devcontainers created by the devcontainer CLI). + // Handle both formats. let parsed: Vec> = - serde_json_lenient::from_str(&json_string).map_err(|e| { - log::error!("Error deserializing metadata: {e}"); - serde::de::Error::custom(e) + serde_json_lenient::from_str(&json_string).or_else(|_| { + let single: HashMap = + serde_json_lenient::from_str(&json_string).map_err(|e| { + log::error!("Error deserializing metadata: {e}"); + serde::de::Error::custom(e) + })?; + Ok(vec![single]) })?; Ok(Some(parsed)) } @@ -936,6 +944,30 @@ mod test { assert_eq!(target_dir.unwrap(), "/workspaces/cli/".to_string()); } + #[test] + fn should_deserialize_object_metadata_from_docker_compose_container() { + // The devcontainer CLI writes metadata as a bare JSON object (not an array) + // when there is only one metadata entry (e.g. docker-compose with no features). + // See https://github.com/devcontainers/cli/issues/1054 + let given_config = r#" + { + "Id": "dc4e7b8ff4bf", + "Config": { + "Labels": { + "devcontainer.metadata": "{\"remoteUser\":\"ubuntu\"}" + } + } + } + "#; + let config = serde_json_lenient::from_str::(given_config).unwrap(); + + assert!(config.config.labels.metadata.is_some()); + let metadata = config.config.labels.metadata.unwrap(); + assert_eq!(metadata.len(), 1); + assert!(metadata[0].contains_key("remoteUser")); + assert_eq!(metadata[0]["remoteUser"], "ubuntu"); + } + #[test] fn should_deserialize_docker_compose_config() { let given_config = r#" From c788dc52174aa1d54cf987c0c61f9ad455dfda40 Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Fri, 10 Apr 2026 19:29:52 +0300 Subject: [PATCH 45/67] ep: Add LCS-based recall (#53509) This PR adds the `correctly_deleted_chars` field and updates `kept_rate` to account for it, not just inserted chars. It also adds `recall_rate` to measure coverage of reference insertions/deletions. Finally, it renames "final" to "reference" and "prediction" to "candidate". Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- crates/edit_prediction/src/edit_prediction.rs | 8 +- .../edit_prediction/src/metrics/kept_rate.rs | 245 ++++++++++++------ crates/edit_prediction_cli/src/example.rs | 8 + crates/edit_prediction_cli/src/score.rs | 124 ++++++++- 4 files changed, 293 insertions(+), 92 deletions(-) diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index 9148a0bb62462a6ab32ce4837312c5de701d21f2..6bca0a1639d47d09a94b650bc59ad790dbdcbf46 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -1690,12 +1690,16 @@ impl EditPredictionStore { settled_editable_region, ts_error_count_before_prediction, ts_error_count_after_prediction, - edit_bytes_predicted_new = kept_rate_result.predicted_new_chars, - edit_bytes_final_new = kept_rate_result.final_new_chars, + edit_bytes_candidate_new = kept_rate_result.candidate_new_chars, + edit_bytes_reference_new = kept_rate_result.reference_new_chars, + edit_bytes_candidate_deleted = kept_rate_result.candidate_deleted_chars, + edit_bytes_reference_deleted = kept_rate_result.reference_deleted_chars, edit_bytes_kept = kept_rate_result.kept_chars, + edit_bytes_correctly_deleted = kept_rate_result.correctly_deleted_chars, edit_bytes_discarded = kept_rate_result.discarded_chars, edit_bytes_context = kept_rate_result.context_chars, edit_bytes_kept_rate = kept_rate_result.kept_rate, + edit_bytes_recall_rate = kept_rate_result.recall_rate, example, e2e_latency = e2e_latency.as_millis(), ); diff --git a/crates/edit_prediction/src/metrics/kept_rate.rs b/crates/edit_prediction/src/metrics/kept_rate.rs index 4843c4465251756f47b9f1e82726c70bba6940c4..599280f5d9aea7964b9d99ab318356e9f4acfb49 100644 --- a/crates/edit_prediction/src/metrics/kept_rate.rs +++ b/crates/edit_prediction/src/metrics/kept_rate.rs @@ -13,12 +13,33 @@ pub enum TokenAnnotation { #[allow(dead_code)] #[derive(Debug, Clone)] pub struct KeptRateResult { - pub predicted_new_chars: usize, - pub final_new_chars: usize, + /// Characters newly introduced by the candidate + pub candidate_new_chars: usize, + /// Characters newly introduced by the reference + pub reference_new_chars: usize, + /// Characters from `base` that are deleted by the candidate. + pub candidate_deleted_chars: usize, + /// Characters from `base` that are deleted by the reference. + pub reference_deleted_chars: usize, + /// Candidate new characters that are also present in the reference. pub kept_chars: usize, + /// Base characters deleted by both the candidate and the reference. + pub correctly_deleted_chars: usize, + /// Candidate new characters that are not kept in the reference. pub discarded_chars: usize, + /// Candidate characters treated as unchanged context pub context_chars: usize, + /// Fraction of candidate edit characters that match the reference edit. + /// + /// This includes both kept newly introduced characters and correctly + /// deleted base characters. pub kept_rate: f64, + /// Fraction of reference edit characters covered by the candidate edit. + /// + /// This includes both kept newly introduced characters and correctly + /// deleted base characters. + pub recall_rate: f64, + /// Per-token classification for candidate tokens used by tests. #[cfg(test)] pub token_annotations: Vec, } @@ -188,89 +209,127 @@ fn analyze_masked_tokens<'a>(tokens: &[&'a str], mask: &[bool]) -> (Vec<&'a str> (unmasked_tokens, unmasked_chars, masked_chars) } -fn should_bail_for_dirty_final(base: &str, predicted: &str, final_text: &str) -> bool { - let predicted_delta_chars = predicted.len().abs_diff(base.len()); - let final_delta_chars = final_text.len().abs_diff(base.len()); - predicted_delta_chars.abs_diff(final_delta_chars) > MAX_DIRTY_LENGTH_DELTA_CHARS +fn count_unmasked_chars(tokens: &[&str], mask: &[bool]) -> usize { + tokens + .iter() + .zip(mask.iter()) + .filter_map(|(&token, &is_masked)| (!is_masked).then_some(token.len())) + .sum() +} + +fn should_bail_for_dirty_final(base: &str, candidate: &str, reference: &str) -> bool { + let candidate_delta_chars = candidate.len().abs_diff(base.len()); + let reference_delta_chars = reference.len().abs_diff(base.len()); + candidate_delta_chars.abs_diff(reference_delta_chars) > MAX_DIRTY_LENGTH_DELTA_CHARS } -pub fn compute_kept_rate(base: &str, predicted: &str, final_text: &str) -> KeptRateResult { - if base == predicted && predicted == final_text { - let predicted_tokens = tokenize(predicted); - let context_chars = predicted_tokens.iter().map(|token| token.len()).sum(); +pub fn compute_kept_rate(base: &str, candidate: &str, reference: &str) -> KeptRateResult { + if base == candidate && candidate == reference { + let candidate_tokens = tokenize(candidate); + let context_chars = candidate_tokens.iter().map(|token| token.len()).sum(); return KeptRateResult { - predicted_new_chars: 0, - final_new_chars: 0, + candidate_new_chars: 0, + reference_new_chars: 0, + candidate_deleted_chars: 0, + reference_deleted_chars: 0, kept_chars: 0, + correctly_deleted_chars: 0, discarded_chars: 0, context_chars, kept_rate: 1.0, + recall_rate: 1.0, #[cfg(test)] - token_annotations: vec![TokenAnnotation::Context; predicted_tokens.len()], + token_annotations: vec![TokenAnnotation::Context; candidate_tokens.len()], }; } - if should_bail_for_dirty_final(base, predicted, final_text) { - let predicted_new_chars = predicted.len().abs_diff(base.len()); - let final_new_chars = final_text.len().abs_diff(base.len()); + if should_bail_for_dirty_final(base, candidate, reference) { + let candidate_new_chars = candidate.len().abs_diff(base.len()); + let reference_new_chars = reference.len().abs_diff(base.len()); return KeptRateResult { - predicted_new_chars, - final_new_chars, + candidate_new_chars, + reference_new_chars, + candidate_deleted_chars: 0, + reference_deleted_chars: 0, kept_chars: 0, - discarded_chars: predicted_new_chars, + correctly_deleted_chars: 0, + discarded_chars: candidate_new_chars, context_chars: 0, kept_rate: 0.0, + recall_rate: 0.0, #[cfg(test)] - token_annotations: vec![TokenAnnotation::Discarded; tokenize(predicted).len()], + token_annotations: vec![TokenAnnotation::Discarded; tokenize(candidate).len()], }; } let base_tokens = tokenize(base); - let predicted_tokens = tokenize(predicted); - let final_tokens = tokenize(final_text); - - let pred_base_mask = lcs_keep_mask(&predicted_tokens, &base_tokens); - let (pred_final_mask, final_pred_mask) = lcs_keep_masks(&predicted_tokens, &final_tokens); - let context_mask: Vec = pred_base_mask + let candidate_tokens = tokenize(candidate); + let reference_tokens = tokenize(reference); + + let (candidate_base_mask, base_candidate_mask) = + lcs_keep_masks(&candidate_tokens, &base_tokens); + let (candidate_reference_mask, reference_candidate_mask) = + lcs_keep_masks(&candidate_tokens, &reference_tokens); + let context_mask: Vec = candidate_base_mask .iter() - .zip(pred_final_mask.iter()) - .map(|(&in_base, &in_final)| in_base && in_final) + .zip(candidate_reference_mask.iter()) + .map(|(&in_base, &in_reference)| in_base && in_reference) .collect(); - let (stripped_predicted, predicted_new_chars, context_chars) = - analyze_masked_tokens(&predicted_tokens, &context_mask); + let (stripped_candidate, candidate_new_chars, context_chars) = + analyze_masked_tokens(&candidate_tokens, &context_mask); - let final_base_mask = lcs_keep_mask(&final_tokens, &base_tokens); - let final_context_mask: Vec = final_base_mask + let (reference_base_mask, base_reference_mask) = + lcs_keep_masks(&reference_tokens, &base_tokens); + let reference_context_mask: Vec = reference_base_mask .iter() - .zip(final_pred_mask.iter()) - .map(|(&in_base, &in_predicted)| in_base && in_predicted) + .zip(reference_candidate_mask.iter()) + .map(|(&in_base, &in_candidate)| in_base && in_candidate) .collect(); - let (stripped_final, final_new_chars, _) = - analyze_masked_tokens(&final_tokens, &final_context_mask); + let (stripped_reference, reference_new_chars, _) = + analyze_masked_tokens(&reference_tokens, &reference_context_mask); - let keep_mask = lcs_keep_mask(&stripped_predicted, &stripped_final); + let keep_mask = lcs_keep_mask(&stripped_candidate, &stripped_reference); - let kept_chars: usize = stripped_predicted + let kept_chars: usize = stripped_candidate .iter() .zip(keep_mask.iter()) .filter_map(|(&token, &is_kept)| is_kept.then_some(token.len())) .sum(); - let discarded_chars = predicted_new_chars - kept_chars; + let candidate_deleted_chars = count_unmasked_chars(&base_tokens, &base_candidate_mask); + let reference_deleted_chars = count_unmasked_chars(&base_tokens, &base_reference_mask); + let correctly_deleted_chars: usize = base_tokens + .iter() + .zip(base_candidate_mask.iter().zip(base_reference_mask.iter())) + .filter_map(|(&token, (&in_candidate, &in_reference))| { + (!in_candidate && !in_reference).then_some(token.len()) + }) + .sum(); + + let discarded_chars = candidate_new_chars - kept_chars; + let matched_edit_chars = kept_chars + correctly_deleted_chars; + let candidate_edit_chars = candidate_new_chars + candidate_deleted_chars; + let reference_edit_chars = reference_new_chars + reference_deleted_chars; - let kept_rate = if predicted_new_chars == 0 { - if final_new_chars == 0 { 1.0 } else { 0.0 } + let kept_rate = if candidate_edit_chars == 0 { + if reference_edit_chars == 0 { 1.0 } else { 0.0 } } else { - kept_chars as f64 / predicted_new_chars as f64 + matched_edit_chars as f64 / candidate_edit_chars as f64 + }; + + let recall_rate = if reference_edit_chars == 0 { + if candidate_edit_chars == 0 { 1.0 } else { 0.0 } + } else { + matched_edit_chars as f64 / reference_edit_chars as f64 }; #[cfg(test)] let token_annotations = { - let mut token_annotations = Vec::with_capacity(predicted_tokens.len()); + let mut token_annotations = Vec::with_capacity(candidate_tokens.len()); let mut new_index = 0; - for (token_index, _token) in predicted_tokens.iter().enumerate() { + for (token_index, _token) in candidate_tokens.iter().enumerate() { if context_mask[token_index] { token_annotations.push(TokenAnnotation::Context); } else { @@ -288,12 +347,16 @@ pub fn compute_kept_rate(base: &str, predicted: &str, final_text: &str) -> KeptR }; KeptRateResult { - predicted_new_chars, - final_new_chars, + candidate_new_chars, + reference_new_chars, + candidate_deleted_chars, + reference_deleted_chars, kept_chars, + correctly_deleted_chars, discarded_chars, context_chars, kept_rate, + recall_rate, #[cfg(test)] token_annotations, } @@ -327,7 +390,8 @@ mod test_kept_rate { fn test_rate_extremes() { let no_change = compute_kept_rate("foo bar", "foo bar", "foo bar"); assert!((no_change.kept_rate - 1.0).abs() < 1e-6); - assert_eq!(no_change.predicted_new_chars, 0); + assert!((no_change.recall_rate - 1.0).abs() < 1e-6); + assert_eq!(no_change.candidate_new_chars, 0); assert!( no_change .token_annotations @@ -337,15 +401,17 @@ mod test_kept_rate { let accepted = compute_kept_rate("old", "new", "new"); assert!((accepted.kept_rate - 1.0).abs() < 1e-6); + assert!((accepted.recall_rate - 1.0).abs() < 1e-6); let discarded = compute_kept_rate("old", "old", "new"); assert!((discarded.kept_rate - 0.0).abs() < 1e-6); + assert!((discarded.recall_rate - 0.0).abs() < 1e-6); } #[test] fn test_pure_addition() { let kept = compute_kept_rate("", "brand new line\n", "brand new line\n"); - assert_eq!(kept.kept_chars, kept.predicted_new_chars); + assert_eq!(kept.kept_chars, kept.candidate_new_chars); assert!( kept.token_annotations .iter() @@ -354,26 +420,28 @@ mod test_kept_rate { let discarded = compute_kept_rate("", "brand new line\n", "something completely different\n"); - assert!(discarded.kept_chars < discarded.predicted_new_chars); + assert!(discarded.kept_chars < discarded.candidate_new_chars); } #[test] fn test_decoy_when_base_excluded() { let base = " decoy.when(mock_sync_hardware_api.sp()).then_return(SpeedStatus.IDLE)\n"; - let predicted = " decoy.when(mock_sync_module_hardware.speed_status).then_return(SpeedStatus.IDLE)\n"; - let final_text = " decoy.when(mock_sync_module_hardware.speed_status).then_return(SpeedStatus.IDLE)\n"; - let result = compute_kept_rate(base, predicted, final_text); + let candidate = " decoy.when(mock_sync_module_hardware.speed_status).then_return(SpeedStatus.IDLE)\n"; + let reference = " decoy.when(mock_sync_module_hardware.speed_status).then_return(SpeedStatus.IDLE)\n"; + let result = compute_kept_rate(base, candidate, reference); let expected_new = "mock_sync_module_hardware".len() + "speed_status".len(); - assert_eq!(result.predicted_new_chars, expected_new); + assert_eq!(result.candidate_new_chars, expected_new); + assert!(result.correctly_deleted_chars > 0); assert!((result.kept_rate - 1.0).abs() < 1e-6); + assert!((result.recall_rate - 1.0).abs() < 1e-6); } #[test] fn test_missing_deletion() { let base = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n epr\n"; - let predicted = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n epr\neprintln!(\"\");\n"; - let final_text = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n eprintln!(\"\");\n"; - let result = compute_kept_rate(base, predicted, final_text); + let candidate = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n epr\neprintln!(\"\");\n"; + let reference = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n eprintln!(\"\");\n"; + let result = compute_kept_rate(base, candidate, reference); assert!( result.kept_rate < 0.85, "expected kept_rate < 0.85, got {}", @@ -385,7 +453,12 @@ mod test_kept_rate { #[test] fn test_empty_prediction() { let result = compute_kept_rate("old line\n", "", "new line\n"); - assert!((result.kept_rate - 0.0).abs() < 1e-6); + assert_eq!(result.candidate_new_chars, 0); + assert!(result.candidate_deleted_chars > 0); + assert!(result.correctly_deleted_chars > 0); + assert!(result.correctly_deleted_chars < result.candidate_deleted_chars); + assert!(result.kept_rate > 0.0 && result.kept_rate < 1.0); + assert!(result.recall_rate > 0.0 && result.recall_rate < 1.0); } #[test] @@ -399,24 +472,25 @@ mod test_kept_rate { #[test] fn test_bails_for_dirty_final() { let base = "fn example() {\n work();\n}\n"; - let predicted = "fn example() {\n work();\n predicted();\n}\n"; - let final_text = format!( + let candidate = "fn example() {\n work();\n predicted();\n}\n"; + let reference = format!( "fn example() {{\n work();\n {}\n}}\n", "settled();\n ".repeat(MAX_DIRTY_LENGTH_DELTA_CHARS / 8 + 64) ); - let result = compute_kept_rate(base, predicted, &final_text); + let result = compute_kept_rate(base, candidate, &reference); assert_eq!(result.kept_rate, 0.0); + assert_eq!(result.recall_rate, 0.0); assert_eq!(result.kept_chars, 0); - assert_eq!(result.discarded_chars, result.predicted_new_chars); + assert_eq!(result.discarded_chars, result.candidate_new_chars); } #[test] fn test_eprintln_token_alignment() { let base = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n epr\n"; - let predicted = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n eprintln!(\"hello world!\");\n"; - let final_text = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n eprintln!(\"\");\n"; - let result = compute_kept_rate(base, predicted, final_text); + let candidate = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n eprintln!(\"hello world!\");\n"; + let reference = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n eprintln!(\"\");\n"; + let result = compute_kept_rate(base, candidate, reference); assert!(result.discarded_chars > 0); assert!(result.kept_chars > 0); assert!(result.kept_rate > 0.0 && result.kept_rate < 1.0); @@ -427,14 +501,18 @@ mod test_kept_rate { #[test] fn test_annotations_rename() { let base = " foo(old_name)\n"; - let predicted = " foo(new_name)\n"; - let final_text = " foo(new_name)\n"; - let result = compute_kept_rate(base, predicted, final_text); - - assert_eq!(result.predicted_new_chars, "new_name".len()); - assert_eq!(result.token_annotations.len(), tokenize(predicted).len()); - - for (&token, &annotation) in tokenize(predicted).iter().zip(&result.token_annotations) { + let candidate = " foo(new_name)\n"; + let reference = " foo(new_name)\n"; + let result = compute_kept_rate(base, candidate, reference); + + assert_eq!(result.candidate_new_chars, "new_name".len()); + assert_eq!(result.candidate_deleted_chars, "old_name".len()); + assert_eq!(result.reference_deleted_chars, "old_name".len()); + assert_eq!(result.correctly_deleted_chars, "old_name".len()); + assert!((result.recall_rate - 1.0).abs() < 1e-6); + assert_eq!(result.token_annotations.len(), tokenize(candidate).len()); + + for (&token, &annotation) in tokenize(candidate).iter().zip(&result.token_annotations) { if token == "new_name" { assert_eq!(annotation, TokenAnnotation::Kept); } else { @@ -446,12 +524,12 @@ mod test_kept_rate { #[test] fn test_annotations_eprintln_coloring() { let base = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n epr\n"; - let predicted = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n eprintln!(\"hello world!\");\n"; - let final_text = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n eprintln!(\"\");\n"; - let result = compute_kept_rate(base, predicted, final_text); - let predicted_tokens = tokenize(predicted); + let candidate = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n eprintln!(\"hello world!\");\n"; + let reference = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n eprintln!(\"\");\n"; + let result = compute_kept_rate(base, candidate, reference); + let candidate_tokens = tokenize(candidate); - let eprintln_index = predicted_tokens + let eprintln_index = candidate_tokens .iter() .position(|&token| token == "eprintln") .expect("eprintln token not found"); @@ -485,12 +563,15 @@ mod test_kept_rate { #[test] fn test_repetitive_tokens_remain_discarded() { let base = "foo + foo + foo + foo + foo\n".repeat(16); - let predicted = "foo + foo + prediction_token + foo + foo\n".repeat(16); - let final_text = "foo + foo + kept_token + foo + foo\n".repeat(16); - let result = compute_kept_rate(&base, &predicted, &final_text); + let candidate = "foo + foo + prediction_token + foo + foo\n".repeat(16); + let reference = "foo + foo + kept_token + foo + foo\n".repeat(16); + let result = compute_kept_rate(&base, &candidate, &reference); assert_eq!(result.kept_chars, 0); - assert_eq!(result.discarded_chars, result.predicted_new_chars); - assert_eq!(result.predicted_new_chars, "prediction_token".len() * 16); + assert_eq!(result.correctly_deleted_chars, "foo".len() * 16); + assert_eq!(result.discarded_chars, result.candidate_new_chars); + assert_eq!(result.candidate_new_chars, "prediction_token".len() * 16); + assert!(result.kept_rate > 0.0); + assert!(result.recall_rate > 0.0); } } diff --git a/crates/edit_prediction_cli/src/example.rs b/crates/edit_prediction_cli/src/example.rs index 3795a375d380e12557f1989a2b81dc77e1826c03..516f77ce2cbe61cde8daee07068277d5988737d4 100644 --- a/crates/edit_prediction_cli/src/example.rs +++ b/crates/edit_prediction_cli/src/example.rs @@ -187,6 +187,14 @@ pub struct ExampleScore { #[serde(default, skip_serializing_if = "Option::is_none")] pub kept_rate: Option, #[serde(default, skip_serializing_if = "Option::is_none")] + pub recall_rate: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub kept_chars: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub correctly_deleted_chars: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub discarded_chars: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] pub cumulative_logprob: Option, #[serde(default, skip_serializing_if = "Option::is_none")] pub avg_logprob: Option, diff --git a/crates/edit_prediction_cli/src/score.rs b/crates/edit_prediction_cli/src/score.rs index 1dace832d4998362610e860b386f4db49f965144..38329c8c3329fa3f26f5795b6a9bdcd02997b59f 100644 --- a/crates/edit_prediction_cli/src/score.rs +++ b/crates/edit_prediction_cli/src/score.rs @@ -85,6 +85,10 @@ pub async fn run_scoring( inserted_tokens: 0, deleted_tokens: 0, kept_rate: None, + recall_rate: None, + kept_chars: None, + correctly_deleted_chars: None, + discarded_chars: None, cumulative_logprob: None, avg_logprob: None, }; @@ -187,9 +191,20 @@ pub async fn run_scoring( prediction.actual_cursor.as_ref(), ); - let kept_rate = best_expected_text.map(|final_text| { - metrics::compute_kept_rate(original_text, &actual_text, final_text).kept_rate - }); + let (kept_rate, recall_rate, kept_chars, correctly_deleted_chars, discarded_chars) = + best_expected_text + .map(|reference_text| { + let result = + metrics::compute_kept_rate(original_text, &actual_text, reference_text); + ( + Some(result.kept_rate), + Some(result.recall_rate), + Some(result.kept_chars), + Some(result.correctly_deleted_chars), + Some(result.discarded_chars), + ) + }) + .unwrap_or((None, None, None, None, None)); scores.push(ExampleScore { delta_chr_f: best_delta_chr_f_metrics.score as f32, @@ -211,6 +226,10 @@ pub async fn run_scoring( inserted_tokens: token_changes.inserted_tokens, deleted_tokens: token_changes.deleted_tokens, kept_rate, + recall_rate, + kept_chars, + correctly_deleted_chars, + discarded_chars, cumulative_logprob: prediction.cumulative_logprob, avg_logprob: prediction.avg_logprob, }); @@ -277,6 +296,11 @@ pub fn print_report(examples: &[Example], verbose: bool) { let mut isolated_whitespace_count: usize = 0; let mut kept_rate_sum: f64 = 0.0; let mut kept_rate_count: usize = 0; + let mut kept_chars_total: usize = 0; + let mut correctly_deleted_chars_total: usize = 0; + let mut discarded_chars_total: usize = 0; + let mut recall_rate_sum: f64 = 0.0; + let mut recall_rate_count: usize = 0; let mut patch_inserted_tokens: Vec = Vec::new(); let mut patch_deleted_tokens: Vec = Vec::new(); let mut predictions_with_patch: usize = 0; @@ -369,11 +393,24 @@ pub fn print_report(examples: &[Example], verbose: bool) { isolated_whitespace_count += 1; } - // Accumulate kept rate metrics + // Accumulate kept and recall rate metrics if let Some(kr) = score.kept_rate { kept_rate_sum += kr; kept_rate_count += 1; } + if let Some(kept_chars) = score.kept_chars { + kept_chars_total += kept_chars; + } + if let Some(correctly_deleted_chars) = score.correctly_deleted_chars { + correctly_deleted_chars_total += correctly_deleted_chars; + } + if let Some(discarded_chars) = score.discarded_chars { + discarded_chars_total += discarded_chars; + } + if let Some(rr) = score.recall_rate { + recall_rate_sum += rr; + recall_rate_count += 1; + } // Accumulate token change metrics (only for predictions that produced a patch) let has_patch = example @@ -504,13 +541,24 @@ pub fn print_report(examples: &[Example], verbose: bool) { println!("Isolated whitespace changes: {}", isolated_ws_str); } - // Print kept rate metrics + // Print kept and recall rate metrics if kept_rate_count > 0 { let avg_kept_rate = kept_rate_sum / kept_rate_count as f64; println!( - "Kept rate: {:.1}% avg ({} evaluated)", + "Kept rate: {:.1}% avg ({} evaluated, kept chars: {}, correctly deleted chars: {}, discarded chars: {})", avg_kept_rate * 100.0, - kept_rate_count + kept_rate_count, + kept_chars_total, + correctly_deleted_chars_total, + discarded_chars_total + ); + } + if recall_rate_count > 0 { + let avg_recall_rate = recall_rate_sum / recall_rate_count as f64; + println!( + "Recall rate: {:.1}% avg ({} evaluated)", + avg_recall_rate * 100.0, + recall_rate_count ); } @@ -618,6 +666,14 @@ pub struct SummaryJson { pub isolated_whitespace_rate: Option, #[serde(skip_serializing_if = "Option::is_none")] pub avg_kept_rate: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub avg_recall_rate: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub total_kept_chars: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub total_correctly_deleted_chars: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub total_discarded_chars: Option, } pub fn compute_summary(examples: &[Example]) -> SummaryJson { @@ -645,6 +701,14 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson { let mut isolated_whitespace_count: usize = 0; let mut kept_rate_sum: f64 = 0.0; let mut kept_rate_count: usize = 0; + let mut kept_chars_total: usize = 0; + let mut kept_chars_count: usize = 0; + let mut correctly_deleted_chars_total: usize = 0; + let mut correctly_deleted_chars_count: usize = 0; + let mut discarded_chars_total: usize = 0; + let mut discarded_chars_count: usize = 0; + let mut recall_rate_sum: f64 = 0.0; + let mut recall_rate_count: usize = 0; for example in examples { for (score_idx, score) in example.score.iter().enumerate() { @@ -685,11 +749,27 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson { isolated_whitespace_count += 1; } - // Accumulate kept rate metrics + // Accumulate kept and recall rate metrics if let Some(kr) = score.kept_rate { kept_rate_sum += kr; kept_rate_count += 1; } + if let Some(kept_chars) = score.kept_chars { + kept_chars_total += kept_chars; + kept_chars_count += 1; + } + if let Some(correctly_deleted_chars) = score.correctly_deleted_chars { + correctly_deleted_chars_total += correctly_deleted_chars; + correctly_deleted_chars_count += 1; + } + if let Some(discarded_chars) = score.discarded_chars { + discarded_chars_total += discarded_chars; + discarded_chars_count += 1; + } + if let Some(rr) = score.recall_rate { + recall_rate_sum += rr; + recall_rate_count += 1; + } // Accumulate cursor metrics if let Some(exact_match) = score.cursor_exact_match { @@ -771,6 +851,30 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson { None }; + let avg_recall_rate = if recall_rate_count > 0 { + Some(recall_rate_sum / recall_rate_count as f64) + } else { + None + }; + + let total_kept_chars = if kept_chars_count > 0 { + Some(kept_chars_total) + } else { + None + }; + + let total_correctly_deleted_chars = if correctly_deleted_chars_count > 0 { + Some(correctly_deleted_chars_total) + } else { + None + }; + + let total_discarded_chars = if discarded_chars_count > 0 { + Some(discarded_chars_total) + } else { + None + }; + SummaryJson { total_examples: total_scores, avg_delta_chr_f, @@ -804,6 +908,10 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson { wrong_editable_region_rate, isolated_whitespace_rate, avg_kept_rate, + avg_recall_rate, + total_kept_chars, + total_correctly_deleted_chars, + total_discarded_chars, } } From 4c63fb1a101e2dc11617c729663659c66658b49c Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Fri, 10 Apr 2026 18:39:35 +0200 Subject: [PATCH 46/67] compliance: Reduce noisiness when checks were successful (#53515) This will ensure we do not post a second Slack message in case the first check was successful. We still _run_ the second check, but do not notify Release Notes: - N/A --- .github/workflows/compliance_check.yml | 5 +- .github/workflows/release.yml | 12 +- .../src/tasks/workflows/compliance_check.rs | 43 ++---- tooling/xtask/src/tasks/workflows/release.rs | 139 +++++++++++++----- tooling/xtask/src/tasks/workflows/vars.rs | 2 +- 5 files changed, 125 insertions(+), 76 deletions(-) diff --git a/.github/workflows/compliance_check.yml b/.github/workflows/compliance_check.yml index 7eb53f082dd6aa22e60248acac1fd18529db3b26..e74c38ec5d3701b936448a128ea8076932d83e91 100644 --- a/.github/workflows/compliance_check.yml +++ b/.github/workflows/compliance_check.yml @@ -34,13 +34,14 @@ jobs: echo "Checking compliance for $TAG" echo "tag=$TAG" >> "$GITHUB_OUTPUT" - id: run-compliance-check - name: compliance_check::scheduled_compliance_check::run_compliance_check + name: release::add_compliance_steps::run_compliance_check run: | cargo xtask compliance "$LATEST_TAG" --branch main --report-path "compliance-report-${GITHUB_REF_NAME}.md" env: - LATEST_TAG: ${{ steps.determine-version.outputs.tag }} GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }} GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} + LATEST_TAG: ${{ steps.determine-version.outputs.tag }} + continue-on-error: true - name: '@actions/upload-artifact compliance-report-${GITHUB_REF_NAME}.md' if: always() uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e40c9f68b5f79c19238fd08da0b73919734f8fa4..17178ab3054a7cddf1dccd2cd9bfa415a56755bd 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -307,7 +307,7 @@ jobs: cache: rust path: ~/.rustup - id: run-compliance-check - name: release::run_compliance_check + name: release::add_compliance_steps::run_compliance_check run: | cargo xtask compliance "$GITHUB_REF_NAME" --report-path "compliance-report-${GITHUB_REF_NAME}.md" env: @@ -328,7 +328,7 @@ jobs: STATUS="✅ Compliance check passed for $COMPLIANCE_TAG" MESSAGE=$(printf "%s\n\nReport: %s" "$STATUS" "$ARTIFACT_URL") else - STATUS="❌ Compliance check failed for $COMPLIANCE_TAG" + STATUS="❌ Preliminary compliance check failed (but this can still be fixed while the builds are running!) for $COMPLIANCE_TAG" MESSAGE=$(printf "%s\n\nReport: %s\nPRs needing review: %s" "$STATUS" "$ARTIFACT_URL" "https://github.com/zed-industries/zed/pulls?q=is%3Apr+is%3Aclosed+label%3A%22PR+state%3Aneeds+review%22") fi @@ -340,6 +340,8 @@ jobs: COMPLIANCE_OUTCOME: ${{ steps.run-compliance-check.outcome }} COMPLIANCE_TAG: ${{ github.ref_name }} ARTIFACT_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}#artifacts + outputs: + outcome: ${{ steps.run-compliance-check.outputs.outcome }} timeout-minutes: 60 bundle_linux_aarch64: needs: @@ -641,6 +643,7 @@ jobs: validate_release_assets: needs: - upload_release_assets + - compliance_check runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: release::validate_release_assets @@ -673,13 +676,12 @@ jobs: cache: rust path: ~/.rustup - id: run-compliance-check - name: release::run_compliance_check + name: release::add_compliance_steps::run_compliance_check run: | cargo xtask compliance "$GITHUB_REF_NAME" --report-path "compliance-report-${GITHUB_REF_NAME}.md" env: GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }} GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} - continue-on-error: true - name: '@actions/upload-artifact compliance-report-${GITHUB_REF_NAME}.md' if: always() uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 @@ -689,7 +691,7 @@ jobs: if-no-files-found: error overwrite: true - name: send_compliance_slack_notification - if: always() + if: failure() || needs.compliance_check.outputs.outcome != 'success' run: | if [ "$COMPLIANCE_OUTCOME" == "success" ]; then STATUS="✅ Compliance check passed for $COMPLIANCE_TAG" diff --git a/tooling/xtask/src/tasks/workflows/compliance_check.rs b/tooling/xtask/src/tasks/workflows/compliance_check.rs index 941e993403a45c37477f88048376faa8807d2d4f..5918bc476772ae1ffe4c0878bccce1e092a6ac7e 100644 --- a/tooling/xtask/src/tasks/workflows/compliance_check.rs +++ b/tooling/xtask/src/tasks/workflows/compliance_check.rs @@ -1,14 +1,10 @@ -use gh_workflow::{Event, Job, Run, Schedule, Step, Workflow, WorkflowDispatch}; -use indoc::formatdoc; +use gh_workflow::{Event, Job, Schedule, Workflow, WorkflowDispatch}; use crate::tasks::workflows::{ - release::{ - COMPLIANCE_REPORT_PATH, COMPLIANCE_STEP_ID, ComplianceContext, - add_compliance_notification_steps, - }, + release::{ComplianceContext, add_compliance_steps}, runners, steps::{self, CommonJobConditions, named}, - vars::{self, StepOutput}, + vars::StepOutput, }; pub fn compliance_check() -> Workflow { @@ -37,31 +33,20 @@ fn scheduled_compliance_check() -> steps::NamedJob { let tag_output = StepOutput::new(&determine_version_step, "tag"); - fn run_compliance_check(tag: &StepOutput) -> Step { - named::bash( - formatdoc! {r#" - cargo xtask compliance "$LATEST_TAG" --branch main --report-path "{COMPLIANCE_REPORT_PATH}" - "#, - } - ) - .id(COMPLIANCE_STEP_ID) - .add_env(("LATEST_TAG", tag.to_string())) - .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID)) - .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY)) - } - let job = Job::default() .with_repository_owner_guard() .runs_on(runners::LINUX_SMALL) .add_step(steps::checkout_repo().with_full_history()) .add_step(steps::cache_rust_dependencies_namespace()) - .add_step(determine_version_step) - .add_step(run_compliance_check(&tag_output)); - - named::job(add_compliance_notification_steps( - job, - ComplianceContext::Scheduled { - tag_source: tag_output, - }, - )) + .add_step(determine_version_step); + + named::job( + add_compliance_steps( + job, + ComplianceContext::Scheduled { + tag_source: tag_output, + }, + ) + .0, + ) } diff --git a/tooling/xtask/src/tasks/workflows/release.rs b/tooling/xtask/src/tasks/workflows/release.rs index 5a33cc911b5d940b2d52e93568c5f4e5c53a0898..d3ad064a6653c963fdc78d147ffbb3147c009c8d 100644 --- a/tooling/xtask/src/tasks/workflows/release.rs +++ b/tooling/xtask/src/tasks/workflows/release.rs @@ -6,7 +6,7 @@ use crate::tasks::workflows::{ run_tests, runners::{self, Arch, Platform}, steps::{self, FluentBuilder, NamedJob, dependant_job, named, release_job}, - vars::{self, StepOutput, assets}, + vars::{self, JobOutput, StepOutput, assets}, }; const CURRENT_ACTION_RUN_URL: &str = @@ -22,7 +22,7 @@ pub(crate) fn release() -> Workflow { let check_scripts = run_tests::check_scripts(); let create_draft_release = create_draft_release(); - let compliance = compliance_check(); + let (non_blocking_compliance_run, job_output) = compliance_check(); let bundle = ReleaseBundleJobs { linux_aarch64: bundle_linux( @@ -58,7 +58,10 @@ pub(crate) fn release() -> Workflow { }; let upload_release_assets = upload_release_assets(&[&create_draft_release], &bundle); - let validate_release_assets = validate_release_assets(&[&upload_release_assets]); + let validate_release_assets = validate_release_assets( + &[&upload_release_assets, &non_blocking_compliance_run], + job_output, + ); let auto_release_preview = auto_release_preview(&[&validate_release_assets]); @@ -93,7 +96,10 @@ pub(crate) fn release() -> Workflow { .add_job(windows_clippy.name, windows_clippy.job) .add_job(check_scripts.name, check_scripts.job) .add_job(create_draft_release.name, create_draft_release.job) - .add_job(compliance.name, compliance.job) + .add_job( + non_blocking_compliance_run.name, + non_blocking_compliance_run.job, + ) .map(|mut workflow| { for job in bundle.into_jobs() { workflow = workflow.add_job(job.name, job.job); @@ -156,25 +162,65 @@ pub(crate) const COMPLIANCE_STEP_ID: &str = "run-compliance-check"; const NEEDS_REVIEW_PULLS_URL: &str = "https://github.com/zed-industries/zed/pulls?q=is%3Apr+is%3Aclosed+label%3A%22PR+state%3Aneeds+review%22"; pub(crate) enum ComplianceContext { - Release, + Release { non_blocking_outcome: JobOutput }, ReleaseNonBlocking, Scheduled { tag_source: StepOutput }, } -pub(crate) fn add_compliance_notification_steps( +impl ComplianceContext { + fn tag_source(&self) -> Option<&StepOutput> { + match self { + ComplianceContext::Scheduled { tag_source } => Some(tag_source), + _ => None, + } + } +} + +pub(crate) fn add_compliance_steps( job: gh_workflow::Job, context: ComplianceContext, -) -> gh_workflow::Job { +) -> (gh_workflow::Job, StepOutput) { + fn run_compliance_check(context: &ComplianceContext) -> (Step, StepOutput) { + let job = named::bash( + formatdoc! {r#" + cargo xtask compliance {target} --report-path "{COMPLIANCE_REPORT_PATH}" + "#, + target = if context.tag_source().is_some() { r#""$LATEST_TAG" --branch main"# } else { r#""$GITHUB_REF_NAME""# }, + } + ) + .id(COMPLIANCE_STEP_ID) + .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID)) + .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY)) + .when_some(context.tag_source(), |step, tag_source| { + step.add_env(("LATEST_TAG", tag_source.to_string())) + }) + .when( + matches!( + context, + ComplianceContext::Scheduled { .. } | ComplianceContext::ReleaseNonBlocking + ), + |step| step.continue_on_error(true), + ); + + let result = StepOutput::new_unchecked(&job, "outcome"); + (job, result) + } + let upload_step = upload_artifact(COMPLIANCE_REPORT_PATH) .if_condition(Expression::new("always()")) - .when(matches!(context, ComplianceContext::Release), |step| { - step.add_with(("overwrite", true)) - }); + .when( + matches!(context, ComplianceContext::Release { .. }), + |step| step.add_with(("overwrite", true)), + ); let (success_prefix, failure_prefix) = match context { - ComplianceContext::Release | ComplianceContext::ReleaseNonBlocking => { + ComplianceContext::Release { .. } => { ("✅ Compliance check passed", "❌ Compliance check failed") } + ComplianceContext::ReleaseNonBlocking => ( + "✅ Compliance check passed", + "❌ Preliminary compliance check failed (but this can still be fixed while the builds are running!)", + ), ComplianceContext::Scheduled { .. } => ( "✅ Scheduled compliance check passed", "⚠️ Scheduled compliance check failed", @@ -198,7 +244,17 @@ pub(crate) fn add_compliance_notification_steps( let notification_step = Step::new("send_compliance_slack_notification") .run(&script) - .if_condition(Expression::new("always()")) + .if_condition(match &context { + ComplianceContext::Release { + non_blocking_outcome, + } => Expression::new(format!( + "failure() || {prior_outcome} != 'success'", + prior_outcome = non_blocking_outcome.expr() + )), + ComplianceContext::Scheduled { .. } | ComplianceContext::ReleaseNonBlocking => { + Expression::new("always()") + } + }) .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES)) .add_env(( "COMPLIANCE_OUTCOME", @@ -206,8 +262,8 @@ pub(crate) fn add_compliance_notification_steps( )) .add_env(( "COMPLIANCE_TAG", - match context { - ComplianceContext::Release | ComplianceContext::ReleaseNonBlocking => { + match &context { + ComplianceContext::Release { .. } | ComplianceContext::ReleaseNonBlocking => { Context::github().ref_name().to_string() } ComplianceContext::Scheduled { tag_source } => tag_source.to_string(), @@ -218,21 +274,21 @@ pub(crate) fn add_compliance_notification_steps( format!("{CURRENT_ACTION_RUN_URL}#artifacts"), )); - job.add_step(upload_step).add_step(notification_step) -} + let (compliance_step, check_result) = run_compliance_check(&context); -fn run_compliance_check() -> Step { - named::bash(formatdoc! {r#" - cargo xtask compliance "$GITHUB_REF_NAME" --report-path "{COMPLIANCE_REPORT_PATH}" - "#, - }) - .id(COMPLIANCE_STEP_ID) - .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID)) - .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY)) - .continue_on_error(true) + ( + job.add_step(compliance_step) + .add_step(upload_step) + .add_step(notification_step) + .when( + matches!(context, ComplianceContext::ReleaseNonBlocking), + |step| step.outputs([("outcome".to_string(), check_result.to_string())]), + ), + check_result, + ) } -fn compliance_check() -> NamedJob { +fn compliance_check() -> (NamedJob, JobOutput) { let job = release_job(&[]) .runs_on(runners::LINUX_SMALL) .add_step( @@ -240,16 +296,17 @@ fn compliance_check() -> NamedJob { .with_full_history() .with_ref(Context::github().ref_()), ) - .add_step(steps::cache_rust_dependencies_namespace()) - .add_step(run_compliance_check()); + .add_step(steps::cache_rust_dependencies_namespace()); + + let (compliance_job, check_result) = + add_compliance_steps(job, ComplianceContext::ReleaseNonBlocking); + let compliance_job = named::job(compliance_job); + let check_result = check_result.as_job_output(&compliance_job); - named::job(add_compliance_notification_steps( - job, - ComplianceContext::ReleaseNonBlocking, - )) + (compliance_job, check_result) } -fn validate_release_assets(deps: &[&NamedJob]) -> NamedJob { +fn validate_release_assets(deps: &[&NamedJob], context_check_result: JobOutput) -> NamedJob { let expected_assets: Vec = assets::all().iter().map(|a| format!("\"{a}\"")).collect(); let expected_assets_json = format!("[{}]", expected_assets.join(", ")); @@ -279,13 +336,17 @@ fn validate_release_assets(deps: &[&NamedJob]) -> NamedJob { .with_full_history() .with_ref(Context::github().ref_()), ) - .add_step(steps::cache_rust_dependencies_namespace()) - .add_step(run_compliance_check()); + .add_step(steps::cache_rust_dependencies_namespace()); - named::job(add_compliance_notification_steps( - job, - ComplianceContext::Release, - )) + named::job( + add_compliance_steps( + job, + ComplianceContext::Release { + non_blocking_outcome: context_check_result, + }, + ) + .0, + ) } fn auto_release_preview(deps: &[&NamedJob]) -> NamedJob { diff --git a/tooling/xtask/src/tasks/workflows/vars.rs b/tooling/xtask/src/tasks/workflows/vars.rs index b3f8bdf56e9bb0f93f81992fbc61dab2b9754e63..8afcad7461f936c081111eeb35097709aa0eb13f 100644 --- a/tooling/xtask/src/tasks/workflows/vars.rs +++ b/tooling/xtask/src/tasks/workflows/vars.rs @@ -167,7 +167,7 @@ impl StepOutput { .run .as_ref() .is_none_or(|run_command| run_command.contains(name)), - "Step Output name {name} must occur at least once in run command with ID {step_id}!" + "Step output with name '{name}' must occur at least once in run command with ID {step_id}!" ); Self { name, step_id } From 7f73e38452353b3ac2a9b6e52ba7ca517e6a5d97 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Fri, 10 Apr 2026 19:14:06 +0200 Subject: [PATCH 47/67] ci: Use Zed Zippy configuration for creating cherry-pick branch (#53640) We hope this fixes an issue with permissions Release Notes: - N/A --- .github/workflows/cherry_pick.yml | 6 ++++-- tooling/xtask/src/tasks/workflows/cherry_pick.rs | 12 ++++++++++-- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/.github/workflows/cherry_pick.yml b/.github/workflows/cherry_pick.yml index ed0800dc5bbf1ec59182e9d24753e9b5112c4d13..b24f8a133be8f3c2be0191f57d8d587d53ea21d0 100644 --- a/.github/workflows/cherry_pick.yml +++ b/.github/workflows/cherry_pick.yml @@ -44,8 +44,10 @@ jobs: BRANCH: ${{ inputs.branch }} COMMIT: ${{ inputs.commit }} CHANNEL: ${{ inputs.channel }} - GIT_COMMITTER_NAME: Zed Zippy - GIT_COMMITTER_EMAIL: hi@zed.dev + GIT_AUTHOR_NAME: zed-zippy[bot] + GIT_AUTHOR_EMAIL: <234243425+zed-zippy[bot]@users.noreply.github.com> + GIT_COMMITTER_NAME: zed-zippy[bot] + GIT_COMMITTER_EMAIL: <234243425+zed-zippy[bot]@users.noreply.github.com> GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }} defaults: run: diff --git a/tooling/xtask/src/tasks/workflows/cherry_pick.rs b/tooling/xtask/src/tasks/workflows/cherry_pick.rs index 46fb41094eb9fcea3cf40c4a289217f16855483b..fe5a966156d16ba0598e0af3671a504d70d61df7 100644 --- a/tooling/xtask/src/tasks/workflows/cherry_pick.rs +++ b/tooling/xtask/src/tasks/workflows/cherry_pick.rs @@ -39,8 +39,16 @@ fn run_cherry_pick( .add_env(("BRANCH", branch.to_string())) .add_env(("COMMIT", commit.to_string())) .add_env(("CHANNEL", channel.to_string())) - .add_env(("GIT_COMMITTER_NAME", "Zed Zippy")) - .add_env(("GIT_COMMITTER_EMAIL", "hi@zed.dev")) + .add_env(("GIT_AUTHOR_NAME", "zed-zippy[bot]")) + .add_env(( + "GIT_AUTHOR_EMAIL", + "<234243425+zed-zippy[bot]@users.noreply.github.com>", + )) + .add_env(("GIT_COMMITTER_NAME", "zed-zippy[bot]")) + .add_env(( + "GIT_COMMITTER_EMAIL", + "<234243425+zed-zippy[bot]@users.noreply.github.com>", + )) .add_env(("GITHUB_TOKEN", token)) } From eb76db4050b411829276ddaf52a886c466457d3c Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 10 Apr 2026 13:27:04 -0400 Subject: [PATCH 48/67] edit_prediction: Disable training data collection based on organization configuration (#53639) This PR makes it so we disable training data collection for Edit Prediction based on the organization's configuration. Closes CLO-641. Release Notes: - N/A --- .../src/zed_edit_prediction_delegate.rs | 42 +++++++++++++++++++ .../src/edit_prediction_types.rs | 9 ++++ .../src/edit_prediction_button.rs | 2 +- 3 files changed, 52 insertions(+), 1 deletion(-) diff --git a/crates/edit_prediction/src/zed_edit_prediction_delegate.rs b/crates/edit_prediction/src/zed_edit_prediction_delegate.rs index 1a574e9389715ce888f8b8c5ec8be921ceab4a38..f0fa37c4d6f1a97aa4b1b96b3a6885ed60cc801a 100644 --- a/crates/edit_prediction/src/zed_edit_prediction_delegate.rs +++ b/crates/edit_prediction/src/zed_edit_prediction_delegate.rs @@ -6,6 +6,7 @@ use edit_prediction_types::{ DataCollectionState, EditPredictionDelegate, EditPredictionDiscardReason, EditPredictionIconSet, SuggestionDisplayType, }; +use feature_flags::FeatureFlagAppExt; use gpui::{App, Entity, prelude::*}; use language::{Buffer, ToPoint as _}; use project::Project; @@ -73,6 +74,24 @@ impl EditPredictionDelegate for ZedEditPredictionDelegate { self.store .read(cx) .is_file_open_source(&self.project, file, cx); + + if let Some(organization_configuration) = self + .store + .read(cx) + .user_store + .read(cx) + .current_organization_configuration() + { + if !organization_configuration + .edit_prediction + .is_feedback_enabled + { + return DataCollectionState::Disabled { + is_project_open_source, + }; + } + } + if self.store.read(cx).data_collection_choice.is_enabled(cx) { DataCollectionState::Enabled { is_project_open_source, @@ -89,6 +108,29 @@ impl EditPredictionDelegate for ZedEditPredictionDelegate { } } + fn can_toggle_data_collection(&self, cx: &App) -> bool { + if cx.is_staff() { + return false; + } + + if let Some(organization_configuration) = self + .store + .read(cx) + .user_store + .read(cx) + .current_organization_configuration() + { + if !organization_configuration + .edit_prediction + .is_feedback_enabled + { + return false; + } + } + + true + } + fn toggle_data_collection(&mut self, cx: &mut App) { self.store.update(cx, |store, cx| { store.toggle_data_collection_choice(cx); diff --git a/crates/edit_prediction_types/src/edit_prediction_types.rs b/crates/edit_prediction_types/src/edit_prediction_types.rs index eb4b7b2292e907dc5d630873dbb4d1abb9edbeb5..31caf628544ade8709553e4255dc6d5f2e6a8f00 100644 --- a/crates/edit_prediction_types/src/edit_prediction_types.rs +++ b/crates/edit_prediction_types/src/edit_prediction_types.rs @@ -168,6 +168,10 @@ pub trait EditPredictionDelegate: 'static + Sized { None } + fn can_toggle_data_collection(&self, _cx: &App) -> bool { + true + } + fn toggle_data_collection(&mut self, _cx: &mut App) {} fn is_enabled( &self, @@ -209,6 +213,7 @@ pub trait EditPredictionDelegateHandle { fn icons(&self, cx: &App) -> EditPredictionIconSet; fn data_collection_state(&self, cx: &App) -> DataCollectionState; fn usage(&self, cx: &App) -> Option; + fn can_toggle_data_collection(&self, cx: &App) -> bool; fn toggle_data_collection(&self, cx: &mut App); fn is_refreshing(&self, cx: &App) -> bool; fn refresh( @@ -265,6 +270,10 @@ where self.read(cx).usage(cx) } + fn can_toggle_data_collection(&self, cx: &App) -> bool { + self.read(cx).can_toggle_data_collection(cx) + } + fn toggle_data_collection(&self, cx: &mut App) { self.update(cx, |this, cx| this.toggle_data_collection(cx)) } diff --git a/crates/edit_prediction_ui/src/edit_prediction_button.rs b/crates/edit_prediction_ui/src/edit_prediction_button.rs index e7aff1271f0505d9c87899cc8b555e377ca3fbd0..bf915409480f1ab56ef7b2c002c467c02c1095d3 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_button.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_button.rs @@ -790,7 +790,7 @@ impl EditPredictionButton { .toggleable(IconPosition::Start, data_collection.is_enabled()) .icon(icon_name) .icon_color(icon_color) - .disabled(cx.is_staff()) + .disabled(!provider.can_toggle_data_collection(cx)) .documentation_aside(DocumentationSide::Left, move |cx| { let (msg, label_color, icon_name, icon_color) = match (is_open_source, is_collecting) { (true, true) => ( From 6143af13a3a4eb4ea84f545af34ea4d3cf54207b Mon Sep 17 00:00:00 2001 From: KyleBarton Date: Fri, 10 Apr 2026 10:56:48 -0700 Subject: [PATCH 49/67] Use stored home_dir in Docker struct, not literal $HOME (#53642) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A --- crates/remote/src/transport/docker.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/remote/src/transport/docker.rs b/crates/remote/src/transport/docker.rs index 6322cd9193d383cfcd3e9ff5cb93670bcd136023..872d1d460ec82cb3eab2233a0af3161bbf2ec1b2 100644 --- a/crates/remote/src/transport/docker.rs +++ b/crates/remote/src/transport/docker.rs @@ -761,7 +761,8 @@ impl RemoteConnection for DockerExecConnection { const TILDE_PREFIX: &'static str = "~/"; if working_dir.starts_with(TILDE_PREFIX) { let working_dir = working_dir.trim_start_matches("~").trim_start_matches("/"); - parsed_working_dir = Some(format!("$HOME/{working_dir}")); + parsed_working_dir = + Some(format!("{}/{}", self.remote_dir_for_server, working_dir)); } else { parsed_working_dir = Some(working_dir); } From b15969086e30765465dab6f76c327baa4940e9a3 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 10 Apr 2026 12:12:42 -0700 Subject: [PATCH 50/67] Set active repository when picking a root folder in recent project menu (#53645) This changes the behavior of the recent project picker in multi-folder projects, so that when you select a folder that is a repository, it sets the active repository in the Git panel as well. Release Notes: - N/A --- crates/git_ui/src/branch_picker.rs | 4 +- crates/git_ui/src/git_picker.rs | 2 +- crates/git_ui/src/git_ui.rs | 27 --------- crates/git_ui/src/project_diff.rs | 19 ++++--- crates/project/src/git_store.rs | 43 ++++++++++++-- crates/recent_projects/src/recent_projects.rs | 27 ++++----- crates/title_bar/src/title_bar.rs | 56 ++----------------- crates/workspace/src/workspace.rs | 23 -------- 8 files changed, 72 insertions(+), 129 deletions(-) diff --git a/crates/git_ui/src/branch_picker.rs b/crates/git_ui/src/branch_picker.rs index f46eb08ef9caf35b3e8fab1ce65c449f76ea2ed4..7269a14ab3c0931e71feb83673172b301c6f1087 100644 --- a/crates/git_ui/src/branch_picker.rs +++ b/crates/git_ui/src/branch_picker.rs @@ -22,7 +22,7 @@ use util::ResultExt; use workspace::notifications::DetachAndPromptErr; use workspace::{ModalView, Workspace}; -use crate::{branch_picker, git_panel::show_error_toast, resolve_active_repository}; +use crate::{branch_picker, git_panel::show_error_toast}; actions!( branch_picker, @@ -59,7 +59,7 @@ pub fn open( cx: &mut Context, ) { let workspace_handle = workspace.weak_handle(); - let repository = resolve_active_repository(workspace, cx); + let repository = workspace.project().read(cx).active_repository(cx); workspace.toggle_modal(window, cx, |window, cx| { BranchList::new( diff --git a/crates/git_ui/src/git_picker.rs b/crates/git_ui/src/git_picker.rs index bf9d122a7ec16b11c56fc45f59ff8c5f85f7fded..1a1ea84aaa16ba0a015d3079e4ff647e4d05c917 100644 --- a/crates/git_ui/src/git_picker.rs +++ b/crates/git_ui/src/git_picker.rs @@ -582,7 +582,7 @@ fn open_with_tab( cx: &mut Context, ) { let workspace_handle = workspace.weak_handle(); - let repository = crate::resolve_active_repository(workspace, cx); + let repository = workspace.project().read(cx).active_repository(cx); workspace.toggle_modal(window, cx, |window, cx| { GitPicker::new(workspace_handle, repository, tab, rems(34.), window, cx) diff --git a/crates/git_ui/src/git_ui.rs b/crates/git_ui/src/git_ui.rs index 7d73760e34d1b2923a247f71b04fc8b5218f380b..1e7391178d2473a173a1503b4f2c724191c06a60 100644 --- a/crates/git_ui/src/git_ui.rs +++ b/crates/git_ui/src/git_ui.rs @@ -281,33 +281,6 @@ fn open_modified_files( } } -/// Resolves the repository for git operations, respecting the workspace's -/// active worktree override from the project dropdown. -pub fn resolve_active_repository(workspace: &Workspace, cx: &App) -> Option> { - let project = workspace.project().read(cx); - workspace - .active_worktree_override() - .and_then(|override_id| { - project - .worktree_for_id(override_id, cx) - .and_then(|worktree| { - let worktree_abs_path = worktree.read(cx).abs_path(); - let git_store = project.git_store().read(cx); - git_store - .repositories() - .values() - .filter(|repo| { - let repo_path = &repo.read(cx).work_directory_abs_path; - *repo_path == worktree_abs_path - || worktree_abs_path.starts_with(repo_path.as_ref()) - }) - .max_by_key(|repo| repo.read(cx).work_directory_abs_path.as_os_str().len()) - .cloned() - }) - }) - .or_else(|| project.active_repository(cx)) -} - pub fn git_status_icon(status: FileStatus) -> impl IntoElement { GitStatusIcon::new(status) } diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index 8fa4680593a7565c84efd7503f6cf9d188d3be35..a0708cae36cafd733c711df5bbab93af508510c1 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -2,7 +2,6 @@ use crate::{ conflict_view::ConflictAddon, git_panel::{GitPanel, GitPanelAddon, GitStatusEntry}, git_panel_settings::GitPanelSettings, - resolve_active_repository, }; use agent_settings::AgentSettings; use anyhow::{Context as _, Result, anyhow}; @@ -205,7 +204,7 @@ impl ProjectDiff { "Action" } ); - let intended_repo = resolve_active_repository(workspace, cx); + let intended_repo = workspace.project().read(cx).active_repository(cx); let existing = workspace .items_of_type::(cx) @@ -2708,7 +2707,7 @@ mod tests { } #[gpui::test] - async fn test_deploy_at_respects_worktree_override(cx: &mut TestAppContext) { + async fn test_deploy_at_respects_active_repository_selection(cx: &mut TestAppContext) { init_test(cx); let fs = FakeFs::new(cx.executor()); @@ -2759,9 +2758,12 @@ mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); cx.run_until_parked(); - // Select project A via the dropdown override and open the diff. + // Select project A explicitly and open the diff. workspace.update(cx, |workspace, cx| { - workspace.set_active_worktree_override(Some(worktree_a_id), cx); + let git_store = workspace.project().read(cx).git_store().clone(); + git_store.update(cx, |git_store, cx| { + git_store.set_active_repo_for_worktree(worktree_a_id, cx); + }); }); cx.focus(&workspace); cx.update(|window, cx| { @@ -2776,9 +2778,12 @@ mod tests { assert_eq!(paths_a.len(), 1); assert_eq!(*paths_a[0], *"a.txt"); - // Switch the override to project B and re-run the diff action. + // Switch the explicit active repository to project B and re-run the diff action. workspace.update(cx, |workspace, cx| { - workspace.set_active_worktree_override(Some(worktree_b_id), cx); + let git_store = workspace.project().read(cx).git_store().clone(); + git_store.update(cx, |git_store, cx| { + git_store.set_active_repo_for_worktree(worktree_b_id, cx); + }); }); cx.focus(&workspace); cx.update(|window, cx| { diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 7f24282dda619399701a740d335ece7c76b63683..a00e2dcfc860d468795e15ad9d78df87d75f15e8 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -594,16 +594,49 @@ impl GitStore { pub fn is_local(&self) -> bool { matches!(self.state, GitStoreState::Local { .. }) } + + fn set_active_repo_id(&mut self, repo_id: RepositoryId, cx: &mut Context) { + if self.active_repo_id != Some(repo_id) { + self.active_repo_id = Some(repo_id); + cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id))); + } + } + pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context) { if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) { - let id = repo.read(cx).id; - if self.active_repo_id != Some(id) { - self.active_repo_id = Some(id); - cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id))); - } + self.set_active_repo_id(repo.read(cx).id, cx); } } + pub fn set_active_repo_for_worktree( + &mut self, + worktree_id: WorktreeId, + cx: &mut Context, + ) { + let Some(worktree) = self + .worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) + else { + return; + }; + let worktree_abs_path = worktree.read(cx).abs_path(); + let Some(repo_id) = self + .repositories + .values() + .filter(|repo| { + let repo_path = &repo.read(cx).work_directory_abs_path; + *repo_path == worktree_abs_path || worktree_abs_path.starts_with(repo_path.as_ref()) + }) + .max_by_key(|repo| repo.read(cx).work_directory_abs_path.as_os_str().len()) + .map(|repo| repo.read(cx).id) + else { + return; + }; + + self.set_active_repo_id(repo_id, cx); + } + pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context) { match &mut self.state { GitStoreState::Remote { diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 9a6015ba843b06dfe678fee1b5de2fac38295849..c90f2f69154f171dd5023697fbbf757c013f9b84 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -168,22 +168,20 @@ fn get_open_folders(workspace: &Workspace, cx: &App) -> Vec { return Vec::new(); } - let active_worktree_id = workspace.active_worktree_override().or_else(|| { - if let Some(repo) = project.active_repository(cx) { - let repo = repo.read(cx); - let repo_path = &repo.work_directory_abs_path; - for worktree in project.visible_worktrees(cx) { - let worktree_path = worktree.read(cx).abs_path(); - if worktree_path == *repo_path || worktree_path.starts_with(repo_path.as_ref()) { - return Some(worktree.read(cx).id()); - } - } - } + let active_worktree_id = if let Some(repo) = project.active_repository(cx) { + let repo = repo.read(cx); + let repo_path = &repo.work_directory_abs_path; + project.visible_worktrees(cx).find_map(|worktree| { + let worktree_path = worktree.read(cx).abs_path(); + (worktree_path == *repo_path || worktree_path.starts_with(repo_path.as_ref())) + .then(|| worktree.read(cx).id()) + }) + } else { project .visible_worktrees(cx) .next() .map(|wt| wt.read(cx).id()) - }); + }; let mut all_paths: Vec = visible_worktrees .iter() @@ -1118,7 +1116,10 @@ impl PickerDelegate for RecentProjectsDelegate { let worktree_id = folder.worktree_id; if let Some(workspace) = self.workspace.upgrade() { workspace.update(cx, |workspace, cx| { - workspace.set_active_worktree_override(Some(worktree_id), cx); + let git_store = workspace.project().read(cx).git_store().clone(); + git_store.update(cx, |git_store, cx| { + git_store.set_active_repo_for_worktree(worktree_id, cx); + }); }); } cx.emit(DismissEvent); diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 34cad6f9540b1f8ba17aca08176b6950cdc7febe..f47817b24509e7c99ee98fd1877e48361204e7cc 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -33,7 +33,6 @@ use onboarding_banner::OnboardingBanner; use project::{Project, git_store::GitStoreEvent, trusted_worktrees::TrustedWorktrees}; use remote::RemoteConnectionOptions; use settings::Settings; -use settings::WorktreeId; use std::sync::Arc; use std::time::Duration; @@ -377,27 +376,13 @@ impl TitleBar { cx.notify() }), ); - subscriptions.push( - cx.subscribe(&project, |this, _, event: &project::Event, cx| { - if let project::Event::BufferEdited = event { - // Clear override when user types in any editor, - // so the title bar reflects the project they're actually working in - this.clear_active_worktree_override(cx); - cx.notify(); - } - }), - ); + subscriptions.push(cx.observe(&active_call, |this, _, cx| this.active_call_changed(cx))); subscriptions.push(cx.observe_window_activation(window, Self::window_activation_changed)); subscriptions.push( - cx.subscribe(&git_store, move |this, _, event, cx| match event { - GitStoreEvent::ActiveRepositoryChanged(_) => { - // Clear override when focus-derived active repo changes - // (meaning the user focused a file from a different project) - this.clear_active_worktree_override(cx); - cx.notify(); - } - GitStoreEvent::RepositoryUpdated(_, _, true) => { + cx.subscribe(&git_store, move |_, _, event, cx| match event { + GitStoreEvent::ActiveRepositoryChanged(_) + | GitStoreEvent::RepositoryUpdated(_, _, true) => { cx.notify(); } _ => {} @@ -451,20 +436,11 @@ impl TitleBar { } /// Returns the worktree to display in the title bar. - /// - If there's an override set on the workspace, use that (if still valid) - /// - Otherwise, derive from the active repository + /// - Prefer the worktree owning the project's active repository /// - Fall back to the first visible worktree pub fn effective_active_worktree(&self, cx: &App) -> Option> { let project = self.project.read(cx); - if let Some(workspace) = self.workspace.upgrade() { - if let Some(override_id) = workspace.read(cx).active_worktree_override() { - if let Some(worktree) = project.worktree_for_id(override_id, cx) { - return Some(worktree); - } - } - } - if let Some(repo) = project.active_repository(cx) { let repo = repo.read(cx); let repo_path = &repo.work_directory_abs_path; @@ -480,28 +456,6 @@ impl TitleBar { project.visible_worktrees(cx).next() } - pub fn set_active_worktree_override( - &mut self, - worktree_id: WorktreeId, - cx: &mut Context, - ) { - if let Some(workspace) = self.workspace.upgrade() { - workspace.update(cx, |workspace, cx| { - workspace.set_active_worktree_override(Some(worktree_id), cx); - }); - } - cx.notify(); - } - - fn clear_active_worktree_override(&mut self, cx: &mut Context) { - if let Some(workspace) = self.workspace.upgrade() { - workspace.update(cx, |workspace, cx| { - workspace.clear_active_worktree_override(cx); - }); - } - cx.notify(); - } - fn get_repository_for_worktree( &self, worktree: &Entity, diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index d40b7abae0c036a5cdd227ec8a547bd3c10b262c..b25e9c4128b7ecfa428f328c59d3344ed634b293 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1325,7 +1325,6 @@ pub struct Workspace { bottom_dock: Entity, right_dock: Entity, panes: Vec>, - active_worktree_override: Option, panes_by_item: HashMap>, active_pane: Entity, last_active_center_pane: Option>, @@ -1758,7 +1757,6 @@ impl Workspace { modal_layer, toast_layer, titlebar_item: None, - active_worktree_override: None, notifications: Notifications::default(), suppressed_notifications: HashSet::default(), left_dock, @@ -2951,27 +2949,6 @@ impl Workspace { self.titlebar_item.clone() } - /// Returns the worktree override set by the user (e.g., via the project dropdown). - /// When set, git-related operations should use this worktree instead of deriving - /// the active worktree from the focused file. - pub fn active_worktree_override(&self) -> Option { - self.active_worktree_override - } - - pub fn set_active_worktree_override( - &mut self, - worktree_id: Option, - cx: &mut Context, - ) { - self.active_worktree_override = worktree_id; - cx.notify(); - } - - pub fn clear_active_worktree_override(&mut self, cx: &mut Context) { - self.active_worktree_override = None; - cx.notify(); - } - /// Call the given callback with a workspace whose project is local or remote via WSL (allowing host access). /// /// If the given workspace has a local project, then it will be passed From 4e0022cbc0596678837ce10d66f67404a592e68c Mon Sep 17 00:00:00 2001 From: Katie Geer Date: Fri, 10 Apr 2026 14:17:28 -0700 Subject: [PATCH 51/67] agent_ui: Replace raw error messages with user-friendly copy in the agent panel (#53099) Replaces raw provider error strings in the agent panel with specific, user-friendly error callouts. Each error now has a clear title, actionable copy, and appropriate buttons instead of the generic "An Error Happened" fallback. Error variants added: Variant | Title | Body | Trigger | |---|---|---|---| | `RateLimitExceeded` | Rate Limit Reached | {Provider}'s rate limit was reached. Zed will retry automatically. You can also wait a moment and try again. | Provider rate limit exhausted after retries | | `ServerOverloaded` | Provider Unavailable | {Provider}'s servers are temporarily unavailable. Zed will retry automatically. If the problem persists, check the provider's status page. | Provider server overloaded or internal server error | | `PromptTooLarge` | Context Too Large | This conversation is too long for the model's context window. Start a new thread or remove some attached files to continue. | Conversation exceeds model's context window | | `NoApiKey` | API Key Missing | No API key is configured for {Provider}. Add your key via the Agent Panel settings to continue. | No API key configured for a direct provider | | `StreamError` | Connection Interrupted | The connection to {Provider}'s API was interrupted. Zed will retry automatically. If the problem persists, check your network connection. | Stream dropped or I/O error during generation | | `InvalidApiKey` | Invalid API Key | The API key for {Provider} is invalid or has expired. Update your key via the Agent Panel settings to continue. | API key present but invalid or expired | | `PermissionDenied` | Permission Denied | {Provider}'s API rejected the request due to insufficient permissions. Check that your API key has access to this model. | API key lacks access to the requested model | | `RequestFailed` | Request Failed | The request could not be completed after multiple attempts. Try again in a moment. | Upstream provider unreachable after retries | | `MaxOutputTokens` | Output Limit Reached | The model stopped because it reached its maximum output length. You can ask it to continue where it left off. | Model hit its maximum output token budget | | `NoModelSelected` | No Model Selected | Select a model from the model picker below to get started. | No model configured when a message is sent | | `ApiError` | API Error | {Provider}'s API returned an unexpected error. If the problem persists, try switching models or restarting Zed. ## Approach - Added typed errors (`NoModelConfiguredError`, `MaxOutputTokensError`) where previously raw strings were used, so they can be reliably downcast - Extended `From for ThreadError` to downcast `LanguageModelCompletionError` variants before falling through to the generic `Other` case - Each variant has a dedicated `render_*` function with appropriate buttons (retry icon, New Thread, or none) - Telemetry events updated with specific `kind` labels for each new variant Release Notes: - Improved error messages in the agent panel to show specific, actionable copy instead of raw provider error strings Self-Review Checklist: - [ x] I've reviewed my own diff for quality, security, and reliability - [n/a] Unsafe blocks (if any) have justifying comments - [ x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [n/a ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: Improved error messages in the agent panel to show specific, actionable copy instead of raw provider error strings --- crates/acp_thread/src/acp_thread.rs | 20 +- crates/agent/src/thread.rs | 25 +- crates/agent_ui/src/conversation_view.rs | 103 ++++++-- .../src/conversation_view/thread_view.rs | 225 ++++++++++++++++++ 4 files changed, 346 insertions(+), 27 deletions(-) diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index 7fb48c132f971fd3449d116b22bd4437c1ebf611..2f3973fbcc94e2d06bdc08a91d61c53809a951ed 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -36,6 +36,18 @@ use util::path_list::PathList; use util::{ResultExt, get_default_system_shell_preferring_bash, paths::PathStyle}; use uuid::Uuid; +/// Returned when the model stops because it exhausted its output token budget. +#[derive(Debug)] +pub struct MaxOutputTokensError; + +impl std::fmt::Display for MaxOutputTokensError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "output token limit reached") + } +} + +impl std::error::Error for MaxOutputTokensError {} + /// Key used in ACP ToolCall meta to store the tool's programmatic name. /// This is a workaround since ACP's ToolCall doesn't have a dedicated name field. pub const TOOL_NAME_META_KEY: &str = "tool_name"; @@ -2272,17 +2284,15 @@ impl AcpThread { .is_some_and(|max| u.output_tokens >= max) }); - let message = if exceeded_max_output_tokens { + if exceeded_max_output_tokens { log::error!( "Max output tokens reached. Usage: {:?}", this.token_usage ); - "Maximum output tokens reached" } else { log::error!("Max tokens reached. Usage: {:?}", this.token_usage); - "Maximum tokens reached" - }; - return Err(anyhow!(message)); + } + return Err(anyhow!(MaxOutputTokensError)); } let canceled = matches!(r.stop_reason, acp::StopReason::Cancelled); diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index e3a075ada62b6108c489779d5261c1c89afec8aa..bd9ef285169bf98ce196990156a269e830ccd738 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -64,6 +64,18 @@ const TOOL_CANCELED_MESSAGE: &str = "Tool canceled by user"; pub const MAX_TOOL_NAME_LENGTH: usize = 64; pub const MAX_SUBAGENT_DEPTH: u8 = 1; +/// Returned when a turn is attempted but no language model has been selected. +#[derive(Debug)] +pub struct NoModelConfiguredError; + +impl std::fmt::Display for NoModelConfiguredError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "no language model configured") + } +} + +impl std::error::Error for NoModelConfiguredError {} + /// Context passed to a subagent thread for lifecycle management #[derive(Clone, Debug, Serialize, Deserialize)] pub struct SubagentContext { @@ -1772,7 +1784,9 @@ impl Thread { &mut self, cx: &mut Context, ) -> Result>> { - let model = self.model().context("No language model configured")?; + let model = self + .model() + .ok_or_else(|| anyhow!(NoModelConfiguredError))?; log::info!("Thread::send called with model: {}", model.name().0); self.advance_prompt_id(); @@ -1896,7 +1910,10 @@ impl Thread { // mid-turn changes (e.g. the user switches model, toggles tools, // or changes profile) take effect between tool-call rounds. let (model, request) = this.update(cx, |this, cx| { - let model = this.model.clone().context("No language model configured")?; + let model = this + .model + .clone() + .ok_or_else(|| anyhow!(NoModelConfiguredError))?; this.refresh_turn_tools(cx); let request = this.build_completion_request(intent, cx)?; anyhow::Ok((model, request)) @@ -2742,7 +2759,9 @@ impl Thread { completion_intent }; - let model = self.model().context("No language model configured")?; + let model = self + .model() + .ok_or_else(|| anyhow!(NoModelConfiguredError))?; let tools = if let Some(turn) = self.running_turn.as_ref() { turn.tools .iter() diff --git a/crates/agent_ui/src/conversation_view.rs b/crates/agent_ui/src/conversation_view.rs index d38e1344701fc8681b0feaf2fa7843611750532d..528e38333144524c4a4dffa63a7a8b107c829e41 100644 --- a/crates/agent_ui/src/conversation_view.rs +++ b/crates/agent_ui/src/conversation_view.rs @@ -1,12 +1,15 @@ use acp_thread::{ AcpThread, AcpThreadEvent, AgentSessionInfo, AgentThreadEntry, AssistantMessage, - AssistantMessageChunk, AuthRequired, LoadError, MentionUri, PermissionOptionChoice, - PermissionOptions, PermissionPattern, RetryStatus, SelectedPermissionOutcome, ThreadStatus, - ToolCall, ToolCallContent, ToolCallStatus, UserMessageId, + AssistantMessageChunk, AuthRequired, LoadError, MaxOutputTokensError, MentionUri, + PermissionOptionChoice, PermissionOptions, PermissionPattern, RetryStatus, + SelectedPermissionOutcome, ThreadStatus, ToolCall, ToolCallContent, ToolCallStatus, + UserMessageId, }; use acp_thread::{AgentConnection, Plan}; use action_log::{ActionLog, ActionLogTelemetry, DiffStats}; -use agent::{NativeAgentServer, NativeAgentSessionList, SharedThread, ThreadStore}; +use agent::{ + NativeAgentServer, NativeAgentSessionList, NoModelConfiguredError, SharedThread, ThreadStore, +}; use agent_client_protocol as acp; #[cfg(test)] use agent_servers::AgentServerDelegate; @@ -34,7 +37,7 @@ use gpui::{ list, point, pulsating_between, }; use language::Buffer; -use language_model::LanguageModelRegistry; +use language_model::{LanguageModelCompletionError, LanguageModelRegistry}; use markdown::{Markdown, MarkdownElement, MarkdownFont, MarkdownStyle}; use parking_lot::RwLock; use project::{AgentId, AgentServerStore, Project, ProjectEntryId}; @@ -113,6 +116,31 @@ pub(crate) enum ThreadError { PaymentRequired, Refusal, AuthenticationRequired(SharedString), + RateLimitExceeded { + provider: SharedString, + }, + ServerOverloaded { + provider: SharedString, + }, + PromptTooLarge, + NoApiKey { + provider: SharedString, + }, + StreamError { + provider: SharedString, + }, + InvalidApiKey { + provider: SharedString, + }, + PermissionDenied { + provider: SharedString, + }, + RequestFailed, + MaxOutputTokens, + NoModelSelected, + ApiError { + provider: SharedString, + }, Other { message: SharedString, acp_error_code: Option, @@ -121,12 +149,57 @@ pub(crate) enum ThreadError { impl From for ThreadError { fn from(error: anyhow::Error) -> Self { - if error.is::() { + if error.is::() { + Self::MaxOutputTokens + } else if error.is::() { + Self::NoModelSelected + } else if error.is::() { Self::PaymentRequired } else if let Some(acp_error) = error.downcast_ref::() && acp_error.code == acp::ErrorCode::AuthRequired { Self::AuthenticationRequired(acp_error.message.clone().into()) + } else if let Some(lm_error) = error.downcast_ref::() { + use LanguageModelCompletionError::*; + match lm_error { + RateLimitExceeded { provider, .. } => Self::RateLimitExceeded { + provider: provider.to_string().into(), + }, + ServerOverloaded { provider, .. } | ApiInternalServerError { provider, .. } => { + Self::ServerOverloaded { + provider: provider.to_string().into(), + } + } + PromptTooLarge { .. } => Self::PromptTooLarge, + NoApiKey { provider } => Self::NoApiKey { + provider: provider.to_string().into(), + }, + StreamEndedUnexpectedly { provider } + | ApiReadResponseError { provider, .. } + | DeserializeResponse { provider, .. } + | HttpSend { provider, .. } => Self::StreamError { + provider: provider.to_string().into(), + }, + AuthenticationError { provider, .. } => Self::InvalidApiKey { + provider: provider.to_string().into(), + }, + PermissionError { provider, .. } => Self::PermissionDenied { + provider: provider.to_string().into(), + }, + UpstreamProviderError { .. } => Self::RequestFailed, + BadRequestFormat { provider, .. } + | HttpResponseError { provider, .. } + | ApiEndpointNotFound { provider } => Self::ApiError { + provider: provider.to_string().into(), + }, + _ => { + let message: SharedString = format!("{:#}", error).into(); + Self::Other { + message, + acp_error_code: None, + } + } + } } else { let message: SharedString = format!("{:#}", error).into(); @@ -6625,19 +6698,11 @@ pub(crate) mod tests { conversation_view.read_with(cx, |conversation_view, cx| { let state = conversation_view.active_thread().unwrap(); let error = &state.read(cx).thread_error; - match error { - Some(ThreadError::Other { message, .. }) => { - assert!( - message.contains("Maximum tokens reached"), - "Expected 'Maximum tokens reached' error, got: {}", - message - ); - } - other => panic!( - "Expected ThreadError::Other with 'Maximum tokens reached', got: {:?}", - other.is_some() - ), - } + assert!( + matches!(error, Some(ThreadError::MaxOutputTokens)), + "Expected ThreadError::MaxOutputTokens, got: {:?}", + error.is_some() + ); }); } diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs index 32fe52480e2c347cc482b2296a107ee8731fb672..6fa5f5999c84c5190163be5904828cbbd3ebf053 100644 --- a/crates/agent_ui/src/conversation_view/thread_view.rs +++ b/crates/agent_ui/src/conversation_view/thread_view.rs @@ -1259,6 +1259,62 @@ impl ThreadView { ThreadError::AuthenticationRequired(message) => { ("authentication_required", None, message.clone()) } + ThreadError::RateLimitExceeded { provider } => ( + "rate_limit_exceeded", + None, + format!("{provider}'s rate limit was reached.").into(), + ), + ThreadError::ServerOverloaded { provider } => ( + "server_overloaded", + None, + format!("{provider}'s servers are temporarily unavailable.").into(), + ), + ThreadError::PromptTooLarge => ( + "prompt_too_large", + None, + "Context too large for the model's context window.".into(), + ), + ThreadError::NoApiKey { provider } => ( + "no_api_key", + None, + format!("No API key configured for {provider}.").into(), + ), + ThreadError::StreamError { provider } => ( + "stream_error", + None, + format!("Connection to {provider}'s API was interrupted.").into(), + ), + ThreadError::InvalidApiKey { provider } => ( + "invalid_api_key", + None, + format!("Invalid or expired API key for {provider}.").into(), + ), + ThreadError::PermissionDenied { provider } => ( + "permission_denied", + None, + format!( + "{provider}'s API rejected the request due to insufficient permissions." + ) + .into(), + ), + ThreadError::RequestFailed => ( + "request_failed", + None, + "Request could not be completed after multiple attempts.".into(), + ), + ThreadError::MaxOutputTokens => ( + "max_output_tokens", + None, + "Model reached its maximum output length.".into(), + ), + ThreadError::NoModelSelected => { + ("no_model_selected", None, "No model selected.".into()) + } + ThreadError::ApiError { provider } => ( + "api_error", + None, + format!("{provider}'s API returned an unexpected error.").into(), + ), ThreadError::Other { acp_error_code, message, @@ -8088,6 +8144,109 @@ impl ThreadView { self.render_authentication_required_error(error.clone(), cx) } ThreadError::PaymentRequired => self.render_payment_required_error(cx), + ThreadError::RateLimitExceeded { provider } => self.render_error_callout( + "Rate Limit Reached", + format!( + "{provider}'s rate limit was reached. Zed will retry automatically. \ + You can also wait a moment and try again." + ) + .into(), + true, + true, + cx, + ), + ThreadError::ServerOverloaded { provider } => self.render_error_callout( + "Provider Unavailable", + format!( + "{provider}'s servers are temporarily unavailable. Zed will retry \ + automatically. If the problem persists, check the provider's status page." + ) + .into(), + true, + true, + cx, + ), + ThreadError::PromptTooLarge => self.render_prompt_too_large_error(cx), + ThreadError::NoApiKey { provider } => self.render_error_callout( + "API Key Missing", + format!( + "No API key is configured for {provider}. \ + Add your key via the Agent Panel settings to continue." + ) + .into(), + false, + true, + cx, + ), + ThreadError::StreamError { provider } => self.render_error_callout( + "Connection Interrupted", + format!( + "The connection to {provider}'s API was interrupted. Zed will retry \ + automatically. If the problem persists, check your network connection." + ) + .into(), + true, + true, + cx, + ), + ThreadError::InvalidApiKey { provider } => self.render_error_callout( + "Invalid API Key", + format!( + "The API key for {provider} is invalid or has expired. \ + Update your key via the Agent Panel settings to continue." + ) + .into(), + false, + false, + cx, + ), + ThreadError::PermissionDenied { provider } => self.render_error_callout( + "Permission Denied", + format!( + "{provider}'s API rejected the request due to insufficient permissions. \ + Check that your API key has access to this model." + ) + .into(), + false, + false, + cx, + ), + ThreadError::RequestFailed => self.render_error_callout( + "Request Failed", + "The request could not be completed after multiple attempts. \ + Try again in a moment." + .into(), + true, + false, + cx, + ), + ThreadError::MaxOutputTokens => self.render_error_callout( + "Output Limit Reached", + "The model stopped because it reached its maximum output length. \ + You can ask it to continue where it left off." + .into(), + false, + false, + cx, + ), + ThreadError::NoModelSelected => self.render_error_callout( + "No Model Selected", + "Select a model from the model picker below to get started.".into(), + false, + false, + cx, + ), + ThreadError::ApiError { provider } => self.render_error_callout( + "API Error", + format!( + "{provider}'s API returned an unexpected error. \ + If the problem persists, try switching models or restarting Zed." + ) + .into(), + true, + true, + cx, + ), }; Some(div().child(content)) @@ -8148,6 +8307,72 @@ impl ThreadView { .dismiss_action(self.dismiss_error_button(cx)) } + fn render_error_callout( + &self, + title: &'static str, + message: SharedString, + show_retry: bool, + show_copy: bool, + cx: &mut Context, + ) -> Callout { + let can_resume = show_retry && self.thread.read(cx).can_retry(cx); + let show_actions = can_resume || show_copy; + + Callout::new() + .severity(Severity::Error) + .icon(IconName::XCircle) + .title(title) + .description(message.clone()) + .when(show_actions, |callout| { + callout.actions_slot( + h_flex() + .gap_0p5() + .when(can_resume, |this| this.child(self.retry_button(cx))) + .when(show_copy, |this| { + this.child(self.create_copy_button(message.clone())) + }), + ) + }) + .dismiss_action(self.dismiss_error_button(cx)) + } + + fn render_prompt_too_large_error(&self, cx: &mut Context) -> Callout { + const MESSAGE: &str = "This conversation is too long for the model's context window. \ + Start a new thread or remove some attached files to continue."; + + Callout::new() + .severity(Severity::Error) + .icon(IconName::XCircle) + .title("Context Too Large") + .description(MESSAGE) + .actions_slot( + h_flex() + .gap_0p5() + .child(self.new_thread_button(cx)) + .child(self.create_copy_button(MESSAGE)), + ) + .dismiss_action(self.dismiss_error_button(cx)) + } + + fn retry_button(&self, cx: &mut Context) -> impl IntoElement { + Button::new("retry", "Retry") + .label_size(LabelSize::Small) + .style(ButtonStyle::Filled) + .on_click(cx.listener(|this, _, _, cx| { + this.retry_generation(cx); + })) + } + + fn new_thread_button(&self, cx: &mut Context) -> impl IntoElement { + Button::new("new_thread", "New Thread") + .label_size(LabelSize::Small) + .style(ButtonStyle::Filled) + .on_click(cx.listener(|this, _, window, cx| { + this.clear_thread_error(cx); + window.dispatch_action(NewThread.boxed_clone(), cx); + })) + } + fn upgrade_button(&self, cx: &mut Context) -> impl IntoElement { Button::new("upgrade", "Upgrade") .label_size(LabelSize::Small) From cca6552bc4228b0ab33a57ede4a5f4a46571035b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 10 Apr 2026 14:29:03 -0700 Subject: [PATCH 52/67] Open new remote projects in the same window via sidebar (#53654) Previously, when opening a remote project via the sidebar, and picking a new project path via the path picker, it would incorrectly open in a new window. Release Notes: - N/A --- crates/recent_projects/src/remote_servers.rs | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index d360ba4233d036191617c89ffb92b9decced5002..0e15abf296e491185f24718cddf72e2532e9e6aa 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -488,18 +488,21 @@ impl ProjectPicker { }) .log_err(); - let options = cx - .update(|_, cx| (app_state.build_window_options)(None, cx)) - .log_err()?; - let window = cx - .open_window(options, |window, cx| { + let window = if create_new_window { + let options = cx + .update(|_, cx| (app_state.build_window_options)(None, cx)) + .log_err()?; + cx.open_window(options, |window, cx| { let workspace = cx.new(|cx| { telemetry::event!("SSH Project Created"); Workspace::new(None, project.clone(), app_state.clone(), window, cx) }); cx.new(|cx| MultiWorkspace::new(workspace, window, cx)) }) - .log_err()?; + .log_err() + } else { + cx.window_handle().downcast::() + }?; let items = open_remote_project_with_existing_connection( connection, project, paths, app_state, window, None, cx, From ed2f21aa056f854b2fa1021f6062eb3204db36b9 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 10 Apr 2026 19:19:00 -0300 Subject: [PATCH 53/67] agent_ui: Fix scroll area in zoomed-in panel (#53657) When the agent panel is zoomed-in, or if it's wide enough to the point you see a lot of horizontal padding due to the max-width, it wasn't previously possible to scroll the thread in that padding while your mouse was resting in that area. Release Notes: - Agent: Fixed scroll behavior when the agent panel is zoomed-in. --- .../src/conversation_view/thread_view.rs | 23 +++++++++++-------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs index 6fa5f5999c84c5190163be5904828cbbd3ebf053..c856c7c69a48c1e5a27e9ec48cd2adb3fa0ee702 100644 --- a/crates/agent_ui/src/conversation_view/thread_view.rs +++ b/crates/agent_ui/src/conversation_view/thread_view.rs @@ -4387,17 +4387,27 @@ impl Render for TokenUsageTooltip { impl ThreadView { fn render_entries(&mut self, cx: &mut Context) -> List { + let max_content_width = AgentSettings::get_global(cx).max_content_width; + let centered_container = move |content: AnyElement| { + h_flex() + .w_full() + .justify_center() + .child(div().max_w(max_content_width).w_full().child(content)) + }; + list( self.list_state.clone(), cx.processor(move |this, index: usize, window, cx| { let entries = this.thread.read(cx).entries(); if let Some(entry) = entries.get(index) { - this.render_entry(index, entries.len(), entry, window, cx) + let rendered = this.render_entry(index, entries.len(), entry, window, cx); + centered_container(rendered.into_any_element()).into_any_element() } else if this.generating_indicator_in_list { let confirmation = entries .last() .is_some_and(|entry| Self::is_waiting_for_confirmation(entry)); - this.render_generating(confirmation, cx).into_any_element() + let rendered = this.render_generating(confirmation, cx); + centered_container(rendered.into_any_element()).into_any_element() } else { Empty.into_any() } @@ -8783,7 +8793,6 @@ impl ThreadView { impl Render for ThreadView { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let has_messages = self.list_state.item_count() > 0; - let max_content_width = AgentSettings::get_global(cx).max_content_width; let list_state = self.list_state.clone(); let conversation = v_flex() @@ -8794,13 +8803,7 @@ impl Render for ThreadView { if has_messages { this.flex_1() .size_full() - .child( - v_flex() - .mx_auto() - .max_w(max_content_width) - .size_full() - .child(self.render_entries(cx)), - ) + .child(self.render_entries(cx)) .vertical_scrollbar_for(&list_state, window, cx) .into_any() } else { From b5e1aea0e155a0a9399e4d92ae6ef2c6860332bd Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 10 Apr 2026 20:04:53 -0300 Subject: [PATCH 54/67] Add callout communicating about multi-root setups in ACP agents (#53660) This PR adds a callout in the agent panel communicating about the lack of support for multi-root workspaces within ACP agents. Release Notes: - N/A --- .../src/conversation_view/thread_view.rs | 50 +++++++++++++++++++ 1 file changed, 50 insertions(+) diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs index c856c7c69a48c1e5a27e9ec48cd2adb3fa0ee702..66a211b2f4cf048301da1e7c154d50db191ec15a 100644 --- a/crates/agent_ui/src/conversation_view/thread_view.rs +++ b/crates/agent_ui/src/conversation_view/thread_view.rs @@ -330,6 +330,7 @@ pub struct ThreadView { pub hovered_recent_history_item: Option, pub show_external_source_prompt_warning: bool, pub show_codex_windows_warning: bool, + pub multi_root_callout_dismissed: bool, pub generating_indicator_in_list: bool, pub history: Option>, pub _history_subscription: Option, @@ -573,6 +574,7 @@ impl ThreadView { history, _history_subscription: history_subscription, show_codex_windows_warning, + multi_root_callout_dismissed: false, generating_indicator_in_list: false, }; @@ -8585,6 +8587,53 @@ impl ThreadView { ) } + fn render_multi_root_callout(&self, cx: &mut Context) -> Option { + if self.multi_root_callout_dismissed { + return None; + } + + if self.as_native_connection(cx).is_some() { + return None; + } + + let project = self.project.upgrade()?; + let worktree_count = project.read(cx).visible_worktrees(cx).count(); + if worktree_count <= 1 { + return None; + } + + let work_dirs = self.thread.read(cx).work_dirs()?; + let active_dir = work_dirs + .ordered_paths() + .next() + .and_then(|p| p.file_name()) + .map(|name| name.to_string_lossy().to_string()) + .unwrap_or_else(|| "one folder".to_string()); + + let description = format!( + "This agent only operates on \"{}\". Other folders in this workspace are not accessible to it.", + active_dir + ); + + Some( + Callout::new() + .severity(Severity::Warning) + .icon(IconName::Warning) + .title("External Agents currently don't support multi-root workspaces") + .description(description) + .border_position(ui::BorderPosition::Bottom) + .dismiss_action( + IconButton::new("dismiss-multi-root-callout", IconName::Close) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Dismiss")) + .on_click(cx.listener(|this, _, _, cx| { + this.multi_root_callout_dismissed = true; + cx.notify(); + })), + ), + ) + } + fn render_new_version_callout(&self, version: &SharedString, cx: &mut Context) -> Div { let server_view = self.server_view.clone(); let has_version = !version.is_empty(); @@ -8988,6 +9037,7 @@ impl Render for ThreadView { .size_full() .children(self.render_subagent_titlebar(cx)) .child(conversation) + .children(self.render_multi_root_callout(cx)) .children(self.render_activity_bar(window, cx)) .when(self.show_external_source_prompt_warning, |this| { this.child(self.render_external_source_prompt_warning(cx)) From f4477249f281f0e1ba4e05048c750f3f85980290 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 10 Apr 2026 16:06:15 -0700 Subject: [PATCH 55/67] Do not include non-existent folders in recent projects, while retaining them for a one week grace period (#53662) Fixes a regression introduced in https://github.com/zed-industries/zed/issues/49603 Supersedes https://github.com/zed-industries/zed/pull/52638 We want to leave these non-existent workspaces in the database for up to 7 days, in case they are on external drives that are restored later, but we do *not* want to show them in the UI if they don't exist. Release Notes: - Fixed an issue where deleted folders appeared in the recent project picker --------- Co-authored-by: ojpro --- crates/workspace/src/persistence.rs | 54 +++++++++++++++-------------- 1 file changed, 28 insertions(+), 26 deletions(-) diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 67383740a8b3287bb237748776b0c7ab2654d7ba..9ae44ef3db2e6c18979694440744043a6abc055e 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -1804,16 +1804,12 @@ impl WorkspaceDb { } } - async fn all_paths_exist_with_a_directory( - paths: &[PathBuf], - fs: &dyn Fs, - timestamp: Option>, - ) -> bool { + async fn all_paths_exist_with_a_directory(paths: &[PathBuf], fs: &dyn Fs) -> bool { let mut any_dir = false; for path in paths { match fs.metadata(path).await.ok().flatten() { None => { - return timestamp.is_some_and(|t| Utc::now() - t < chrono::Duration::days(7)); + return false; } Some(meta) => { if meta.is_dir { @@ -1839,9 +1835,9 @@ impl WorkspaceDb { )>, > { let mut result = Vec::new(); - let mut delete_tasks = Vec::new(); + let mut workspaces_to_delete = Vec::new(); let remote_connections = self.remote_connections()?; - + let now = Utc::now(); for (id, paths, remote_connection_id, timestamp) in self.recent_workspaces()? { if let Some(remote_connection_id) = remote_connection_id { if let Some(connection_options) = remote_connections.get(&remote_connection_id) { @@ -1852,34 +1848,40 @@ impl WorkspaceDb { timestamp, )); } else { - delete_tasks.push(self.delete_workspace_by_id(id)); + workspaces_to_delete.push(id); } continue; } - let has_wsl_path = if cfg!(windows) { - paths + // Delete the workspace if any of the paths are WSL paths. If a + // local workspace points to WSL, attempting to read its metadata + // will wait for the WSL VM and file server to boot up. This can + // block for many seconds. Supported scenarios use remote + // workspaces. + if cfg!(windows) { + let has_wsl_path = paths .paths() .iter() - .any(|path| util::paths::WslPath::from_path(path).is_some()) - } else { - false - }; + .any(|path| util::paths::WslPath::from_path(path).is_some()); + if has_wsl_path { + workspaces_to_delete.push(id); + continue; + } + } - // Delete the workspace if any of the paths are WSL paths. - // If a local workspace points to WSL, this check will cause us to wait for the - // WSL VM and file server to boot up. This can block for many seconds. - // Supported scenarios use remote workspaces. - if !has_wsl_path - && Self::all_paths_exist_with_a_directory(paths.paths(), fs, Some(timestamp)).await - { + if Self::all_paths_exist_with_a_directory(paths.paths(), fs).await { result.push((id, SerializedWorkspaceLocation::Local, paths, timestamp)); - } else { - delete_tasks.push(self.delete_workspace_by_id(id)); + } else if now - timestamp >= chrono::Duration::days(7) { + workspaces_to_delete.push(id); } } - futures::future::join_all(delete_tasks).await; + futures::future::join_all( + workspaces_to_delete + .into_iter() + .map(|id| self.delete_workspace_by_id(id)), + ) + .await; Ok(result) } @@ -1932,7 +1934,7 @@ impl WorkspaceDb { window_id, }); } else { - if Self::all_paths_exist_with_a_directory(paths.paths(), fs, None).await { + if Self::all_paths_exist_with_a_directory(paths.paths(), fs).await { workspaces.push(SessionWorkspace { workspace_id, location: SerializedWorkspaceLocation::Local, From 86f55495c21260adf0da5b7afebebd2d5ca0fa4a Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 10 Apr 2026 20:19:55 -0300 Subject: [PATCH 56/67] sidebar: Add a new thread draft system (#53574) Release Notes: - N/A --------- Co-authored-by: Mikayla Maki Co-authored-by: Nathan Sobo --- crates/agent_ui/src/agent_panel.rs | 178 +++- crates/agent_ui/src/agent_ui.rs | 4 +- crates/agent_ui/src/thread_import.rs | 4 +- .../agent_ui/src/thread_worktree_archive.rs | 21 +- crates/agent_ui/src/threads_archive_view.rs | 4 +- crates/sidebar/src/sidebar.rs | 894 ++++++++++-------- crates/sidebar/src/sidebar_tests.rs | 673 ++++++++++--- 7 files changed, 1227 insertions(+), 551 deletions(-) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 2ff4cd18a78fd53c5d540e66670d6e6c9e51aa47..8005445034d0b9339d36cb2d48da516f9c2a9207 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -56,7 +56,7 @@ use extension_host::ExtensionStore; use fs::Fs; use gpui::{ Action, Animation, AnimationExt, AnyElement, App, AsyncWindowContext, ClipboardItem, Corner, - DismissEvent, Entity, EntityId, EventEmitter, ExternalPaths, FocusHandle, Focusable, + DismissEvent, Entity, EntityId, EventEmitter, ExternalPaths, FocusHandle, Focusable, Global, KeyContext, Pixels, Subscription, Task, UpdateGlobal, WeakEntity, prelude::*, pulsating_between, }; @@ -204,21 +204,12 @@ pub fn init(cx: &mut App) { panel.update(cx, |panel, cx| panel.open_configuration(window, cx)); } }) - .register_action(|workspace, action: &NewExternalAgentThread, window, cx| { + .register_action(|workspace, _action: &NewExternalAgentThread, window, cx| { if let Some(panel) = workspace.panel::(cx) { workspace.focus_panel::(window, cx); panel.update(cx, |panel, cx| { - let initial_content = panel.take_active_draft_initial_content(cx); - panel.external_thread( - action.agent.clone(), - None, - None, - None, - initial_content, - true, - window, - cx, - ) + let id = panel.create_draft(window, cx); + panel.activate_draft(id, true, window, cx); }); } }) @@ -602,6 +593,25 @@ fn build_conflicted_files_resolution_prompt( content } +/// Unique identifier for a sidebar draft thread. Not persisted across restarts. +/// IDs are globally unique across all AgentPanel instances within the same app. +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct DraftId(pub usize); + +#[derive(Default)] +struct DraftIdCounter(usize); + +impl Global for DraftIdCounter {} + +impl DraftId { + fn next(cx: &mut App) -> Self { + let counter = cx.default_global::(); + let id = counter.0; + counter.0 += 1; + Self(id) + } +} + enum ActiveView { Uninitialized, AgentThread { @@ -803,6 +813,7 @@ pub struct AgentPanel { active_view: ActiveView, previous_view: Option, background_threads: HashMap>, + draft_threads: HashMap>, new_thread_menu_handle: PopoverMenuHandle, start_thread_in_menu_handle: PopoverMenuHandle, thread_branch_menu_handle: PopoverMenuHandle, @@ -1181,6 +1192,7 @@ impl AgentPanel { context_server_registry, previous_view: None, background_threads: HashMap::default(), + draft_threads: HashMap::default(), new_thread_menu_handle: PopoverMenuHandle::default(), start_thread_in_menu_handle: PopoverMenuHandle::default(), thread_branch_menu_handle: PopoverMenuHandle::default(), @@ -1306,9 +1318,96 @@ impl AgentPanel { } pub fn new_thread(&mut self, _action: &NewThread, window: &mut Window, cx: &mut Context) { - self.reset_start_thread_in_to_default(cx); - let initial_content = self.take_active_draft_initial_content(cx); - self.external_thread(None, None, None, None, initial_content, true, window, cx); + let id = self.create_draft(window, cx); + self.activate_draft(id, true, window, cx); + } + + /// Creates a new empty draft thread and stores it. Returns the DraftId. + /// The draft is NOT activated — call `activate_draft` to show it. + pub fn create_draft(&mut self, window: &mut Window, cx: &mut Context) -> DraftId { + let id = DraftId::next(cx); + let workspace = self.workspace.clone(); + let project = self.project.clone(); + let fs = self.fs.clone(); + let thread_store = self.thread_store.clone(); + let agent = if self.project.read(cx).is_via_collab() { + Agent::NativeAgent + } else { + self.selected_agent.clone() + }; + let server = agent.server(fs, thread_store); + let conversation_view = self.create_agent_thread( + server, None, None, None, None, workspace, project, agent, window, cx, + ); + self.draft_threads.insert(id, conversation_view); + id + } + + pub fn activate_draft( + &mut self, + id: DraftId, + focus: bool, + window: &mut Window, + cx: &mut Context, + ) { + let Some(conversation_view) = self.draft_threads.get(&id).cloned() else { + return; + }; + self.set_active_view( + ActiveView::AgentThread { conversation_view }, + focus, + window, + cx, + ); + } + + /// Removes a draft thread. If it's currently active, does nothing to + /// the active view — the caller should activate something else first. + pub fn remove_draft(&mut self, id: DraftId) { + self.draft_threads.remove(&id); + } + + /// Returns the DraftId of the currently active draft, if the active + /// view is a draft thread tracked in `draft_threads`. + pub fn active_draft_id(&self) -> Option { + let active_cv = self.active_conversation_view()?; + self.draft_threads + .iter() + .find_map(|(id, cv)| (cv.entity_id() == active_cv.entity_id()).then_some(*id)) + } + + /// Returns all draft IDs, sorted newest-first. + pub fn draft_ids(&self) -> Vec { + let mut ids: Vec = self.draft_threads.keys().copied().collect(); + ids.sort_by_key(|id| std::cmp::Reverse(id.0)); + ids + } + + /// Returns the text from a draft's message editor, or `None` if the + /// draft doesn't exist or has no text. + pub fn draft_editor_text(&self, id: DraftId, cx: &App) -> Option { + let cv = self.draft_threads.get(&id)?; + let tv = cv.read(cx).active_thread()?; + let text = tv.read(cx).message_editor.read(cx).text(cx); + if text.trim().is_empty() { + None + } else { + Some(text) + } + } + + /// Clears the message editor text of a tracked draft. + pub fn clear_draft_editor(&self, id: DraftId, window: &mut Window, cx: &mut Context) { + let Some(cv) = self.draft_threads.get(&id) else { + return; + }; + let Some(tv) = cv.read(cx).active_thread() else { + return; + }; + let editor = tv.read(cx).message_editor.clone(); + editor.update(cx, |editor, cx| { + editor.clear(window, cx); + }); } fn take_active_draft_initial_content( @@ -1410,7 +1509,7 @@ impl AgentPanel { }); let server = agent.server(fs, thread_store); - self.create_agent_thread( + let conversation_view = self.create_agent_thread( server, resume_session_id, work_dirs, @@ -1419,6 +1518,11 @@ impl AgentPanel { workspace, project, agent, + window, + cx, + ); + self.set_active_view( + ActiveView::AgentThread { conversation_view }, focus, window, cx, @@ -1982,6 +2086,16 @@ impl AgentPanel { return; }; + // If this ConversationView is a tracked draft, it's already + // stored in `draft_threads` — don't drop it. + let is_tracked_draft = self + .draft_threads + .values() + .any(|cv| cv.entity_id() == conversation_view.entity_id()); + if is_tracked_draft { + return; + } + let Some(thread_view) = conversation_view.read(cx).root_thread(cx) else { return; }; @@ -2188,6 +2302,12 @@ impl AgentPanel { this.handle_first_send_requested(view.clone(), content.clone(), window, cx); } AcpThreadViewEvent::MessageSentOrQueued => { + // When a draft sends its first message it becomes a + // real thread. Remove it from `draft_threads` so the + // sidebar stops showing a stale draft entry. + if let Some(draft_id) = this.active_draft_id() { + this.draft_threads.remove(&draft_id); + } let session_id = view.read(cx).thread.read(cx).session_id().clone(); cx.emit(AgentPanelEvent::MessageSentOrQueued { session_id }); } @@ -2528,10 +2648,9 @@ impl AgentPanel { workspace: WeakEntity, project: Entity, agent: Agent, - focus: bool, window: &mut Window, cx: &mut Context, - ) { + ) -> Entity { if self.selected_agent != agent { self.selected_agent = agent.clone(); self.serialize(cx); @@ -2586,12 +2705,7 @@ impl AgentPanel { }) .detach(); - self.set_active_view( - ActiveView::AgentThread { conversation_view }, - focus, - window, - cx, - ); + conversation_view } fn active_thread_has_messages(&self, cx: &App) -> bool { @@ -3457,8 +3571,8 @@ impl Panel for AgentPanel { Some((_, WorktreeCreationStatus::Creating)) ) { - let selected_agent = self.selected_agent.clone(); - self.new_agent_thread_inner(selected_agent, false, window, cx); + let id = self.create_draft(window, cx); + self.activate_draft(id, false, window, cx); } } @@ -4745,8 +4859,14 @@ impl AgentPanel { id: server.agent_id(), }; - self.create_agent_thread( - server, None, None, None, None, workspace, project, ext_agent, true, window, cx, + let conversation_view = self.create_agent_thread( + server, None, None, None, None, workspace, project, ext_agent, window, cx, + ); + self.set_active_view( + ActiveView::AgentThread { conversation_view }, + true, + window, + cx, ); } diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 2cf4218719a0412534d9832c3cb54587f4c45a73..2e2e4018e3cc9521c7b2e106a87281a4f37b0796 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -65,11 +65,11 @@ use std::any::TypeId; use workspace::Workspace; use crate::agent_configuration::{ConfigureContextServerModal, ManageProfilesModal}; -pub use crate::agent_panel::{AgentPanel, AgentPanelEvent, WorktreeCreationStatus}; +pub use crate::agent_panel::{AgentPanel, AgentPanelEvent, DraftId, WorktreeCreationStatus}; use crate::agent_registry_ui::AgentRegistryPage; pub use crate::inline_assistant::InlineAssistant; pub use agent_diff::{AgentDiffPane, AgentDiffToolbar}; -pub(crate) use conversation_view::ConversationView; +pub use conversation_view::ConversationView; pub use external_source_prompt::ExternalSourcePrompt; pub(crate) use mode_selector::ModeSelector; pub(crate) use model_selector::ModelSelector; diff --git a/crates/agent_ui/src/thread_import.rs b/crates/agent_ui/src/thread_import.rs index 686ca5d6cd4fdfede7eb4a5ed70c90074972fdf4..78de6fd34e2cab57377c640d27bd21c26f8a7339 100644 --- a/crates/agent_ui/src/thread_import.rs +++ b/crates/agent_ui/src/thread_import.rs @@ -342,9 +342,9 @@ impl Render for ThreadImportModal { Modal::new("import-threads", None) .header( ModalHeader::new() - .headline("Import ACP Threads") + .headline("Import External Agent Threads") .description( - "Import threads from your ACP agents — whether started in Zed or another client. \ + "Import threads from agents like Claude Agent, Codex, and more, whether started in Zed or another client. \ Choose which agents to include, and their threads will appear in your archive." ) .show_dismiss_button(true), diff --git a/crates/agent_ui/src/thread_worktree_archive.rs b/crates/agent_ui/src/thread_worktree_archive.rs index 4398a2154d4abd550535b247ab1a9e518f84b39d..723e6779a44eecb9b4ef05049f392785f84041b1 100644 --- a/crates/agent_ui/src/thread_worktree_archive.rs +++ b/crates/agent_ui/src/thread_worktree_archive.rs @@ -139,16 +139,6 @@ pub fn build_root_plan( .then_some((snapshot, repo)) }); - let matching_worktree_snapshot = workspaces.iter().find_map(|workspace| { - workspace - .read(cx) - .project() - .read(cx) - .visible_worktrees(cx) - .find(|worktree| worktree.read(cx).abs_path().as_ref() == path.as_path()) - .map(|worktree| worktree.read(cx).snapshot()) - }); - let (main_repo_path, worktree_repo, branch_name) = if let Some((linked_snapshot, repo)) = linked_repo { ( @@ -160,12 +150,11 @@ pub fn build_root_plan( .map(|branch| branch.name().to_string()), ) } else { - let main_repo_path = matching_worktree_snapshot - .as_ref()? - .root_repo_common_dir() - .and_then(|dir| dir.parent())? - .to_path_buf(); - (main_repo_path, None, None) + // Not a linked worktree — nothing to archive from disk. + // `remove_root` would try to remove the main worktree from + // the project and then run `git worktree remove`, both of + // which fail for main working trees. + return None; }; Some(RootPlan { diff --git a/crates/agent_ui/src/threads_archive_view.rs b/crates/agent_ui/src/threads_archive_view.rs index 6e73584ef87f11810e4c860cc6ff4c8d8ff015a9..817a9deb3ae9223c0fe2bd169563bb9c5c6cb5a9 100644 --- a/crates/agent_ui/src/threads_archive_view.rs +++ b/crates/agent_ui/src/threads_archive_view.rs @@ -553,7 +553,6 @@ impl ThreadsArchiveView { base.status(AgentThreadStatus::Running) .action_slot( IconButton::new("cancel-restore", IconName::Close) - .style(ButtonStyle::Filled) .icon_size(IconSize::Small) .icon_color(Color::Muted) .tooltip(Tooltip::text("Cancel Restore")) @@ -568,12 +567,11 @@ impl ThreadsArchiveView { }) }), ) - .tooltip(Tooltip::text("Restoring\u{2026}")) + .tooltip(Tooltip::text("Restoring…")) .into_any_element() } else { base.action_slot( IconButton::new("delete-thread", IconName::Trash) - .style(ButtonStyle::Filled) .icon_size(IconSize::Small) .icon_color(Color::Muted) .tooltip({ diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 4d88ddeffdd6625768dd0207176c0984e9833a29..f69e5206a6e236bd602e5d1922d33b36afb6b520 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -9,9 +9,9 @@ use agent_ui::thread_worktree_archive; use agent_ui::threads_archive_view::{ ThreadsArchiveView, ThreadsArchiveViewEvent, format_history_entry_timestamp, }; -use agent_ui::{AcpThreadImportOnboarding, ThreadImportModal}; use agent_ui::{ - Agent, AgentPanel, AgentPanelEvent, DEFAULT_THREAD_TITLE, NewThread, RemoveSelectedThread, + AcpThreadImportOnboarding, Agent, AgentPanel, AgentPanelEvent, DEFAULT_THREAD_TITLE, DraftId, + NewThread, RemoveSelectedThread, ThreadImportModal, }; use chrono::{DateTime, Utc}; use editor::Editor; @@ -38,9 +38,9 @@ use std::path::PathBuf; use std::rc::Rc; use theme::ActiveTheme; use ui::{ - AgentThreadStatus, CommonAnimationExt, ContextMenu, Divider, HighlightedLabel, KeyBinding, - PopoverMenu, PopoverMenuHandle, Tab, ThreadItem, ThreadItemWorktreeInfo, TintColor, Tooltip, - WithScrollbar, prelude::*, + AgentThreadStatus, CommonAnimationExt, ContextMenu, Divider, GradientFade, HighlightedLabel, + KeyBinding, PopoverMenu, PopoverMenuHandle, Tab, ThreadItem, ThreadItemWorktreeInfo, TintColor, + Tooltip, WithScrollbar, prelude::*, }; use util::ResultExt as _; use util::path_list::PathList; @@ -121,14 +121,17 @@ enum ActiveEntry { session_id: acp::SessionId, workspace: Entity, }, - Draft(Entity), + Draft { + id: DraftId, + workspace: Entity, + }, } impl ActiveEntry { fn workspace(&self) -> &Entity { match self { ActiveEntry::Thread { workspace, .. } => workspace, - ActiveEntry::Draft(workspace) => workspace, + ActiveEntry::Draft { workspace, .. } => workspace, } } @@ -136,17 +139,22 @@ impl ActiveEntry { matches!(self, ActiveEntry::Thread { session_id: id, .. } if id == session_id) } + fn is_active_draft(&self, draft_id: DraftId) -> bool { + matches!(self, ActiveEntry::Draft { id, .. } if *id == draft_id) + } + fn matches_entry(&self, entry: &ListEntry) -> bool { match (self, entry) { (ActiveEntry::Thread { session_id, .. }, ListEntry::Thread(thread)) => { thread.metadata.session_id == *session_id } ( - ActiveEntry::Draft(_), + ActiveEntry::Draft { id, .. }, ListEntry::DraftThread { - workspace: None, .. + draft_id: Some(entry_id), + .. }, - ) => true, + ) => *id == *entry_id, _ => false, } } @@ -245,9 +253,10 @@ enum ListEntry { key: ProjectGroupKey, is_fully_expanded: bool, }, - /// The user's active draft thread. Shows a prefix of the currently-typed - /// prompt, or "Untitled Thread" if the prompt is empty. DraftThread { + /// `None` for placeholder entries in empty groups with no open + /// workspace. `Some` for drafts backed by an AgentPanel. + draft_id: Option, key: project::ProjectGroupKey, workspace: Option>, worktrees: Vec, @@ -273,15 +282,7 @@ impl ListEntry { ThreadEntryWorkspace::Open(ws) => vec![ws.clone()], ThreadEntryWorkspace::Closed { .. } => Vec::new(), }, - ListEntry::DraftThread { workspace, .. } => { - if let Some(ws) = workspace { - vec![ws.clone()] - } else { - // workspace: None means this is the active draft, - // which always lives on the current workspace. - vec![multi_workspace.workspace().clone()] - } - } + ListEntry::DraftThread { workspace, .. } => workspace.iter().cloned().collect(), ListEntry::ProjectHeader { key, .. } => multi_workspace .workspaces_for_project_group(key, cx) .cloned() @@ -595,10 +596,6 @@ impl Sidebar { cx.emit(workspace::SidebarEvent::SerializeNeeded); } - fn active_entry_workspace(&self) -> Option<&Entity> { - self.active_entry.as_ref().map(|entry| entry.workspace()) - } - fn is_active_workspace(&self, workspace: &Entity, cx: &App) -> bool { self.multi_workspace .upgrade() @@ -648,10 +645,10 @@ impl Sidebar { cx.subscribe_in( workspace, window, - |this, _workspace, event: &workspace::Event, window, cx| { + |this, _workspace, event: &workspace::Event, _window, cx| { if let workspace::Event::PanelAdded(view) = event { if let Ok(agent_panel) = view.clone().downcast::() { - this.subscribe_to_agent_panel(&agent_panel, window, cx); + this.subscribe_to_agent_panel(&agent_panel, _window, cx); } } }, @@ -675,21 +672,8 @@ impl Sidebar { cx.subscribe_in( agent_panel, window, - |this, agent_panel, event: &AgentPanelEvent, _window, cx| match event { + |this, _agent_panel, event: &AgentPanelEvent, _window, cx| match event { AgentPanelEvent::ActiveViewChanged => { - let is_new_draft = agent_panel - .read(cx) - .active_conversation_view() - .is_some_and(|cv| cv.read(cx).parent_id(cx).is_none()); - if is_new_draft { - if let Some(active_workspace) = this - .multi_workspace - .upgrade() - .map(|mw| mw.read(cx).workspace().clone()) - { - this.active_entry = Some(ActiveEntry::Draft(active_workspace)); - } - } this.observe_draft_editor(cx); this.update_entries(cx); } @@ -749,26 +733,6 @@ impl Sidebar { }); } - fn active_draft_text(&self, cx: &App) -> Option { - let mw = self.multi_workspace.upgrade()?; - let workspace = mw.read(cx).workspace(); - let panel = workspace.read(cx).panel::(cx)?; - let conversation_view = panel.read(cx).active_conversation_view()?; - let thread_view = conversation_view.read(cx).active_thread()?; - let raw = thread_view.read(cx).message_editor.read(cx).text(cx); - let cleaned = Self::clean_mention_links(&raw); - let mut text: String = cleaned.split_whitespace().collect::>().join(" "); - if text.is_empty() { - None - } else { - const MAX_CHARS: usize = 250; - if let Some((truncate_at, _)) = text.char_indices().nth(MAX_CHARS) { - text.truncate(truncate_at); - } - Some(text.into()) - } - } - fn clean_mention_links(input: &str) -> String { let mut result = String::with_capacity(input.len()); let mut remaining = input; @@ -829,6 +793,42 @@ impl Sidebar { .detach_and_log_err(cx); } + fn open_workspace_and_create_draft( + &mut self, + project_group_key: &ProjectGroupKey, + window: &mut Window, + cx: &mut Context, + ) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; + + let path_list = project_group_key.path_list().clone(); + let host = project_group_key.host(); + let provisional_key = Some(project_group_key.clone()); + let active_workspace = multi_workspace.read(cx).workspace().clone(); + + let task = multi_workspace.update(cx, |this, cx| { + this.find_or_create_workspace( + path_list, + host, + provisional_key, + |options, window, cx| connect_remote(active_workspace, options, window, cx), + window, + cx, + ) + }); + + cx.spawn_in(window, async move |this, cx| { + let workspace = task.await?; + this.update_in(cx, |this, window, cx| { + this.create_new_thread(&workspace, window, cx); + })?; + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + /// Rebuilds the sidebar contents from current workspace and thread state. /// /// Iterates [`MultiWorkspace::project_group_keys`] to determine project @@ -859,43 +859,21 @@ impl Sidebar { let query = self.filter_editor.read(cx).text(cx); // Derive active_entry from the active workspace's agent panel. - // Draft is checked first because a conversation can have a session_id - // before any messages are sent. However, a thread that's still loading - // also appears as a "draft" (no messages yet). + // A tracked draft (in `draft_threads`) is checked first via + // `active_draft_id`. Then we check for a thread with a session_id. + // If a thread is mid-load with no session_id yet, we fall back to + // `pending_remote_thread_activation` or keep the previous value. if let Some(active_ws) = &active_workspace { if let Some(panel) = active_ws.read(cx).panel::(cx) { - let active_thread_is_draft = panel.read(cx).active_thread_is_draft(cx); - let active_conversation_view = panel.read(cx).active_conversation_view(); - - if active_thread_is_draft || active_conversation_view.is_none() { - if active_conversation_view.is_none() - && let Some(session_id) = self.pending_remote_thread_activation.clone() - { - self.active_entry = Some(ActiveEntry::Thread { - session_id, - workspace: active_ws.clone(), - }); - } else { - let conversation_parent_id = - active_conversation_view.and_then(|cv| cv.read(cx).parent_id(cx)); - let preserving_thread = if let Some(ActiveEntry::Thread { - session_id, - .. - }) = &self.active_entry - { - self.active_entry_workspace() == Some(active_ws) - && conversation_parent_id - .as_ref() - .is_some_and(|id| id == session_id) - } else { - false - }; - if !preserving_thread { - self.active_entry = Some(ActiveEntry::Draft(active_ws.clone())); - } - } - } else if let Some(session_id) = - active_conversation_view.and_then(|cv| cv.read(cx).parent_id(cx)) + let panel = panel.read(cx); + if let Some(draft_id) = panel.active_draft_id() { + self.active_entry = Some(ActiveEntry::Draft { + id: draft_id, + workspace: active_ws.clone(), + }); + } else if let Some(session_id) = panel + .active_conversation_view() + .and_then(|cv| cv.read(cx).parent_id(cx)) { if self.pending_remote_thread_activation.as_ref() == Some(&session_id) { self.pending_remote_thread_activation = None; @@ -904,9 +882,14 @@ impl Sidebar { session_id, workspace: active_ws.clone(), }); + } else if let Some(session_id) = self.pending_remote_thread_activation.clone() { + self.active_entry = Some(ActiveEntry::Thread { + session_id, + workspace: active_ws.clone(), + }); } - // else: conversation exists, not a draft, but no session_id - // yet — thread is mid-load. Keep previous value. + // else: conversation is mid-load or panel is + // uninitialized — keep previous active_entry. } } @@ -1221,9 +1204,6 @@ impl Sidebar { entries.push(thread.into()); } } else { - let is_draft_for_group = is_active - && matches!(&self.active_entry, Some(ActiveEntry::Draft(ws)) if group_workspaces.contains(ws)); - project_header_indices.push(entries.len()); entries.push(ListEntry::ProjectHeader { key: group_key.clone(), @@ -1239,66 +1219,43 @@ impl Sidebar { continue; } - // Emit a DraftThread entry when the active draft belongs to this group. - if is_draft_for_group { - if let Some(ActiveEntry::Draft(draft_ws)) = &self.active_entry { - let ws_worktree_paths = ThreadWorktreePaths::from_project( - draft_ws.read(cx).project().read(cx), - cx, - ); - let worktrees = worktree_info_from_thread_paths(&ws_worktree_paths); - entries.push(ListEntry::DraftThread { - key: group_key.clone(), - workspace: None, - worktrees, - }); - } - } - - // Emit a DraftThread for each open linked worktree workspace - // that has no threads. Skip the specific workspace that is - // showing the active draft (it already has a DraftThread entry - // from the block above). + // Emit DraftThread entries by reading draft IDs from + // each workspace's AgentPanel in this group. { - let draft_ws_id = if is_draft_for_group { - self.active_entry.as_ref().and_then(|e| match e { - ActiveEntry::Draft(ws) => Some(ws.entity_id()), - _ => None, - }) - } else { - None - }; - let thread_store = ThreadMetadataStore::global(cx); + let mut group_draft_ids: Vec<(DraftId, Entity)> = Vec::new(); for ws in group_workspaces { - if Some(ws.entity_id()) == draft_ws_id { - continue; - } - let ws_worktree_paths = - ThreadWorktreePaths::from_project(ws.read(cx).project().read(cx), cx); - let has_linked_worktrees = - worktree_info_from_thread_paths(&ws_worktree_paths) - .iter() - .any(|wt| wt.kind == ui::WorktreeKind::Linked); - if !has_linked_worktrees { - continue; - } - let ws_path_list = workspace_path_list(ws, cx); - let store = thread_store.read(cx); - let has_threads = store.entries_for_path(&ws_path_list).next().is_some() - || store - .entries_for_main_worktree_path(&ws_path_list) - .next() - .is_some(); - if has_threads { - continue; + if let Some(panel) = ws.read(cx).panel::(cx) { + let ids = panel.read(cx).draft_ids(); + + for draft_id in ids { + group_draft_ids.push((draft_id, ws.clone())); + } } - let worktrees = worktree_info_from_thread_paths(&ws_worktree_paths); + } + // For empty groups with no drafts, emit a + // placeholder DraftThread. + if !has_threads && group_draft_ids.is_empty() { entries.push(ListEntry::DraftThread { + draft_id: None, key: group_key.clone(), - workspace: Some(ws.clone()), - worktrees, + workspace: group_workspaces.first().cloned(), + worktrees: Vec::new(), }); + } else { + for (draft_id, ws) in &group_draft_ids { + let ws_worktree_paths = ThreadWorktreePaths::from_project( + ws.read(cx).project().read(cx), + cx, + ); + let worktrees = worktree_info_from_thread_paths(&ws_worktree_paths); + entries.push(ListEntry::DraftThread { + draft_id: Some(*draft_id), + key: group_key.clone(), + workspace: Some(ws.clone()), + worktrees, + }); + } } } @@ -1457,15 +1414,34 @@ impl Sidebar { is_fully_expanded, } => self.render_view_more(ix, key, *is_fully_expanded, is_selected, cx), ListEntry::DraftThread { + draft_id, key, workspace, worktrees, } => { - if workspace.is_some() { - self.render_new_thread(ix, key, worktrees, workspace.as_ref(), is_selected, cx) - } else { - self.render_draft_thread(ix, is_active, worktrees, is_selected, cx) - } + let group_has_threads = self + .contents + .entries + .iter() + .any(|e| matches!(e, ListEntry::ProjectHeader { key: hk, has_threads: true, .. } if hk == key)); + // Count drafts in the AgentPanel for this group's workspaces. + let sibling_draft_count = workspace + .as_ref() + .and_then(|ws| ws.read(cx).panel::(cx)) + .map(|p| p.read(cx).draft_ids().len()) + .unwrap_or(0); + let can_dismiss = group_has_threads || sibling_draft_count > 1; + self.render_draft_thread( + ix, + *draft_id, + key, + workspace.as_ref(), + is_active, + worktrees, + is_selected, + can_dismiss, + cx, + ) } }; @@ -1533,17 +1509,6 @@ impl Sidebar { (IconName::ChevronDown, "Collapse Project") }; - let has_new_thread_entry = self - .contents - .entries - .get(ix + 1) - .is_some_and(|entry| matches!(entry, ListEntry::DraftThread { .. })); - let show_new_thread_button = !has_new_thread_entry && !self.has_filter_query(cx); - let workspace = self.multi_workspace.upgrade().and_then(|mw| { - mw.read(cx) - .workspace_for_paths(key.path_list(), key.host().as_ref(), cx) - }); - let key_for_toggle = key.clone(); let key_for_collapse = key.clone(); let view_more_expanded = self.expanded_groups.contains_key(key); @@ -1559,9 +1524,26 @@ impl Sidebar { }; let color = cx.theme().colors(); - let hover_color = color + let sidebar_base_bg = color + .title_bar_background + .blend(color.panel_background.opacity(0.25)); + + let base_bg = color.background.blend(sidebar_base_bg); + + let hover_base = color .element_active .blend(color.element_background.opacity(0.2)); + let hover_solid = base_bg.blend(hover_base); + let real_hover_color = if is_active { base_bg } else { hover_solid }; + + let group_name_for_gradient = group_name.clone(); + let gradient_overlay = move || { + GradientFade::new(base_bg, real_hover_color, real_hover_color) + .width(px(64.0)) + .right(px(-2.0)) + .gradient_stop(0.75) + .group_name(group_name_for_gradient.clone()) + }; let is_ellipsis_menu_open = self.project_header_menu_ix == Some(ix); @@ -1569,9 +1551,11 @@ impl Sidebar { .id(id) .group(&group_name) .h(Tab::content_height(cx)) + .relative() .w_full() .pl(px(5.)) .pr_1p5() + .justify_between() .border_1() .map(|this| { if is_focused { @@ -1580,7 +1564,6 @@ impl Sidebar { this.border_color(gpui::transparent_black()) } }) - .justify_between() .child( h_flex() .relative() @@ -1633,11 +1616,13 @@ impl Sidebar { }) }), ) + .child(gradient_overlay()) .child( h_flex() .when(!is_ellipsis_menu_open, |this| { this.visible_on_hover(&group_name) }) + .child(gradient_overlay()) .on_mouse_down(gpui::MouseButton::Left, |_, _, cx| { cx.stop_propagation(); }) @@ -1663,37 +1648,54 @@ impl Sidebar { })), ) }) - .when_some( - workspace.filter(|_| show_new_thread_button), - |this, workspace| { - let key = key.clone(); - let focus_handle = self.focus_handle.clone(); - this.child( - IconButton::new( - SharedString::from(format!( - "{id_prefix}project-header-new-thread-{ix}", - )), - IconName::Plus, - ) - .icon_size(IconSize::Small) - .tooltip(move |_, cx| { - Tooltip::for_action_in( - "New Thread", - &NewThread, - &focus_handle, - cx, - ) - }) - .on_click(cx.listener( - move |this, _, window, cx| { - this.collapsed_groups.remove(&key); - this.selection = None; - this.create_new_thread(&workspace, window, cx); - }, - )), + .child({ + let key = key.clone(); + let focus_handle = self.focus_handle.clone(); + + IconButton::new( + SharedString::from(format!( + "{id_prefix}project-header-new-thread-{ix}", + )), + IconName::Plus, + ) + .icon_size(IconSize::Small) + .tooltip(move |_, cx| { + Tooltip::for_action_in( + "Start New Agent Thread", + &NewThread, + &focus_handle, + cx, ) - }, - ), + }) + .on_click(cx.listener( + move |this, _, window, cx| { + this.collapsed_groups.remove(&key); + this.selection = None; + // If the active workspace belongs to this + // group, use it (preserves linked worktree + // context). Otherwise resolve from the key. + let workspace = this.multi_workspace.upgrade().and_then(|mw| { + let mw = mw.read(cx); + let active = mw.workspace().clone(); + let active_key = active.read(cx).project_group_key(cx); + if active_key == key { + Some(active) + } else { + mw.workspace_for_paths( + key.path_list(), + key.host().as_ref(), + cx, + ) + } + }); + if let Some(workspace) = workspace { + this.create_new_thread(&workspace, window, cx); + } else { + this.open_workspace_and_create_draft(&key, window, cx); + } + }, + )) + }), ) .map(|this| { if !has_threads && is_active { @@ -1701,7 +1703,7 @@ impl Sidebar { } else { let key = key.clone(); this.cursor_pointer() - .when(!is_active, |this| this.hover(|s| s.bg(hover_color))) + .when(!is_active, |this| this.hover(|s| s.bg(hover_solid))) .tooltip(Tooltip::text("Open Workspace")) .on_click(cx.listener(move |this, _, window, cx| { if let Some(workspace) = this.multi_workspace.upgrade().and_then(|mw| { @@ -1711,12 +1713,11 @@ impl Sidebar { cx, ) }) { - this.active_entry = Some(ActiveEntry::Draft(workspace.clone())); - if let Some(multi_workspace) = this.multi_workspace.upgrade() { - multi_workspace.update(cx, |multi_workspace, cx| { - multi_workspace.activate(workspace.clone(), window, cx); - }); - } + // Just activate the workspace. The + // AgentPanel remembers what was last + // shown, so the user returns to whatever + // thread/draft they were looking at. + this.activate_workspace(&workspace, window, cx); if AgentPanel::is_visible(&workspace, cx) { workspace.update(cx, |workspace, cx| { workspace.focus_panel::(window, cx); @@ -2165,16 +2166,21 @@ impl Sidebar { self.expand_thread_group(&key, cx); } } - ListEntry::DraftThread { key, workspace, .. } => { + ListEntry::DraftThread { + draft_id, + key, + workspace, + .. + } => { + let draft_id = *draft_id; let key = key.clone(); let workspace = workspace.clone(); - if let Some(workspace) = workspace.or_else(|| { - self.multi_workspace.upgrade().and_then(|mw| { - mw.read(cx) - .workspace_for_paths(key.path_list(), key.host().as_ref(), cx) - }) - }) { - self.create_new_thread(&workspace, window, cx); + if let Some(draft_id) = draft_id { + if let Some(workspace) = workspace { + self.activate_draft(draft_id, &workspace, window, cx); + } + } else if let Some(workspace) = workspace { + self.activate_workspace(&workspace, window, cx); } else { self.open_workspace_for_group(&key, window, cx); } @@ -2352,10 +2358,10 @@ impl Sidebar { }; let pending_session_id = metadata.session_id.clone(); - let is_remote = project_group_key.host().is_some(); - if is_remote { - self.pending_remote_thread_activation = Some(pending_session_id.clone()); - } + // Mark the pending thread activation so rebuild_contents + // preserves the Thread active_entry during loading (prevents + // spurious draft flash). + self.pending_remote_thread_activation = Some(pending_session_id.clone()); let host = project_group_key.host(); let provisional_key = Some(project_group_key.clone()); @@ -2379,7 +2385,7 @@ impl Sidebar { // failures or cancellations do not leave a stale connection modal behind. remote_connection::dismiss_connection_modal(&modal_workspace, cx); - if result.is_err() || is_remote { + if result.is_err() { this.update(cx, |this, _cx| { if this.pending_remote_thread_activation.as_ref() == Some(&pending_session_id) { this.pending_remote_thread_activation = None; @@ -2813,22 +2819,20 @@ impl Sidebar { .entries_for_path(folder_paths) .filter(|t| t.session_id != *session_id) .count(); + if remaining > 0 { return None; } let multi_workspace = self.multi_workspace.upgrade()?; - // Thread metadata doesn't carry host info yet, so we pass - // `None` here. This may match a local workspace with the same - // paths instead of the intended remote one. let workspace = multi_workspace .read(cx) .workspace_for_paths(folder_paths, None, cx)?; - // Don't remove the main worktree workspace — the project - // header always provides access to it. let group_key = workspace.read(cx).project_group_key(cx); - (group_key.path_list() != folder_paths).then_some(workspace) + let is_linked_worktree = group_key.path_list() != folder_paths; + + is_linked_worktree.then_some(workspace) }); if let Some(workspace_to_remove) = workspace_to_remove { @@ -2881,7 +2885,6 @@ impl Sidebar { }) .detach_and_log_err(cx); } else { - // Simple case: no workspace removal needed. let neighbor_metadata = neighbor.map(|(metadata, _)| metadata); let in_flight = self.start_archive_worktree_task(session_id, roots_to_archive, cx); self.archive_and_activate( @@ -2947,7 +2950,11 @@ impl Sidebar { .is_some_and(|id| id == *session_id); if panel_shows_archived { panel.update(cx, |panel, cx| { - panel.clear_active_thread(window, cx); + // Replace the archived thread with a + // tracked draft so the panel isn't left + // in Uninitialized state. + let id = panel.create_draft(window, cx); + panel.activate_draft(id, false, window, cx); }); } } @@ -2960,6 +2967,7 @@ impl Sidebar { // tell the panel to load it and activate that workspace. // `rebuild_contents` will reconcile `active_entry` once the thread // finishes loading. + if let Some(metadata) = neighbor { if let Some(workspace) = self.multi_workspace.upgrade().and_then(|mw| { mw.read(cx) @@ -2974,26 +2982,24 @@ impl Sidebar { // No neighbor or its workspace isn't open — fall back to a new // draft. Use the group workspace (main project) rather than the // active entry workspace, which may be a linked worktree that is - // about to be cleaned up. + // about to be cleaned up or already removed. let fallback_workspace = thread_folder_paths .and_then(|folder_paths| { let mw = self.multi_workspace.upgrade()?; let mw = mw.read(cx); - // Find the group's main workspace (whose root paths match - // the project group key, not the thread's folder paths). let thread_workspace = mw.workspace_for_paths(folder_paths, None, cx)?; let group_key = thread_workspace.read(cx).project_group_key(cx); mw.workspace_for_paths(group_key.path_list(), None, cx) }) - .or_else(|| self.active_entry_workspace().cloned()); + .or_else(|| { + self.multi_workspace + .upgrade() + .map(|mw| mw.read(cx).workspace().clone()) + }); if let Some(workspace) = fallback_workspace { self.activate_workspace(&workspace, window, cx); - if let Some(panel) = workspace.read(cx).panel::(cx) { - panel.update(cx, |panel, cx| { - panel.new_thread(&NewThread, window, cx); - }); - } + self.create_new_thread(&workspace, window, cx); } } @@ -3120,35 +3126,18 @@ impl Sidebar { self.archive_thread(&session_id, window, cx); } Some(ListEntry::DraftThread { + draft_id: Some(draft_id), workspace: Some(workspace), .. }) => { - self.remove_worktree_workspace(workspace.clone(), window, cx); + let draft_id = *draft_id; + let workspace = workspace.clone(); + self.remove_draft(draft_id, &workspace, window, cx); } _ => {} } } - fn remove_worktree_workspace( - &mut self, - workspace: Entity, - window: &mut Window, - cx: &mut Context, - ) { - if let Some(multi_workspace) = self.multi_workspace.upgrade() { - multi_workspace - .update(cx, |mw, cx| { - mw.remove( - [workspace], - |this, _window, _cx| gpui::Task::ready(Ok(this.workspace().clone())), - window, - cx, - ) - }) - .detach_and_log_err(cx); - } - } - fn record_thread_access(&mut self, session_id: &acp::SessionId) { self.thread_last_accessed .insert(session_id.clone(), Utc::now()); @@ -3687,30 +3676,13 @@ impl Sidebar { // If there is a keyboard selection, walk backwards through // `project_header_indices` to find the header that owns the selected // row. Otherwise fall back to the active workspace. - let workspace = if let Some(selected_ix) = self.selection { - self.contents - .project_header_indices - .iter() - .rev() - .find(|&&header_ix| header_ix <= selected_ix) - .and_then(|&header_ix| match &self.contents.entries[header_ix] { - ListEntry::ProjectHeader { key, .. } => { - self.multi_workspace.upgrade().and_then(|mw| { - mw.read(cx).workspace_for_paths( - key.path_list(), - key.host().as_ref(), - cx, - ) - }) - } - _ => None, - }) - } else { - // Use the currently active workspace. - self.multi_workspace - .upgrade() - .map(|mw| mw.read(cx).workspace().clone()) - }; + // Always use the currently active workspace so that drafts + // are created in the linked worktree the user is focused on, + // not the main worktree resolved from the project header. + let workspace = self + .multi_workspace + .upgrade() + .map(|mw| mw.read(cx).workspace().clone()); let Some(workspace) = workspace else { return; @@ -3729,20 +3701,166 @@ impl Sidebar { return; }; - self.active_entry = Some(ActiveEntry::Draft(workspace.clone())); - multi_workspace.update(cx, |multi_workspace, cx| { multi_workspace.activate(workspace.clone(), window, cx); }); - workspace.update(cx, |workspace, cx| { - if let Some(agent_panel) = workspace.panel::(cx) { - agent_panel.update(cx, |panel, cx| { - panel.new_thread(&NewThread, window, cx); + let draft_id = workspace.update(cx, |workspace, cx| { + let panel = workspace.panel::(cx)?; + let draft_id = panel.update(cx, |panel, cx| { + let id = panel.create_draft(window, cx); + panel.activate_draft(id, true, window, cx); + id + }); + workspace.focus_panel::(window, cx); + Some(draft_id) + }); + + if let Some(draft_id) = draft_id { + self.active_entry = Some(ActiveEntry::Draft { + id: draft_id, + workspace: workspace.clone(), + }); + } + } + + fn activate_draft( + &mut self, + draft_id: DraftId, + workspace: &Entity, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(multi_workspace) = self.multi_workspace.upgrade() { + multi_workspace.update(cx, |mw, cx| { + mw.activate(workspace.clone(), window, cx); + }); + } + + workspace.update(cx, |ws, cx| { + if let Some(panel) = ws.panel::(cx) { + panel.update(cx, |panel, cx| { + panel.activate_draft(draft_id, true, window, cx); + }); + } + ws.focus_panel::(window, cx); + }); + + self.active_entry = Some(ActiveEntry::Draft { + id: draft_id, + workspace: workspace.clone(), + }); + + self.observe_draft_editor(cx); + } + + fn remove_draft( + &mut self, + draft_id: DraftId, + workspace: &Entity, + window: &mut Window, + cx: &mut Context, + ) { + workspace.update(cx, |ws, cx| { + if let Some(panel) = ws.panel::(cx) { + panel.update(cx, |panel, _cx| { + panel.remove_draft(draft_id); + }); + } + }); + + let was_active = self + .active_entry + .as_ref() + .is_some_and(|e| e.is_active_draft(draft_id)); + + if was_active { + let mut switched = false; + let group_key = workspace.read(cx).project_group_key(cx); + + // Try the next draft below in the sidebar (smaller ID + // since the list is newest-first). Fall back to the one + // above (larger ID) if the deleted draft was last. + if let Some(panel) = workspace.read(cx).panel::(cx) { + let ids = panel.read(cx).draft_ids(); + let sibling = ids + .iter() + .find(|id| id.0 < draft_id.0) + .or_else(|| ids.first()); + if let Some(&sibling_id) = sibling { + self.activate_draft(sibling_id, workspace, window, cx); + switched = true; + } + } + + // No sibling draft — try the first thread in the group. + if !switched { + let first_thread = self.contents.entries.iter().find_map(|entry| { + if let ListEntry::Thread(thread) = entry { + if let ThreadEntryWorkspace::Open(ws) = &thread.workspace { + if ws.read(cx).project_group_key(cx) == group_key { + return Some((thread.metadata.clone(), ws.clone())); + } + } + } + None + }); + if let Some((metadata, ws)) = first_thread { + self.activate_thread(metadata, &ws, false, window, cx); + switched = true; + } + } + + if !switched { + self.active_entry = None; + } + } + + self.update_entries(cx); + } + + fn clear_draft( + &mut self, + draft_id: DraftId, + workspace: &Entity, + window: &mut Window, + cx: &mut Context, + ) { + workspace.update(cx, |ws, cx| { + if let Some(panel) = ws.panel::(cx) { + panel.update(cx, |panel, cx| { + panel.clear_draft_editor(draft_id, window, cx); }); } - workspace.focus_panel::(window, cx); }); + self.update_entries(cx); + } + + /// Cleans, collapses whitespace, and truncates raw editor text + /// for display as a draft label in the sidebar. + fn truncate_draft_label(raw: &str) -> Option { + let cleaned = Self::clean_mention_links(raw); + let mut text: String = cleaned.split_whitespace().collect::>().join(" "); + if text.is_empty() { + return None; + } + const MAX_CHARS: usize = 250; + if let Some((truncate_at, _)) = text.char_indices().nth(MAX_CHARS) { + text.truncate(truncate_at); + } + Some(text.into()) + } + + /// Reads a draft's prompt text from its ConversationView in the AgentPanel. + fn read_draft_text( + &self, + draft_id: DraftId, + workspace: &Entity, + cx: &App, + ) -> Option { + let panel = workspace.read(cx).panel::(cx)?; + let raw = panel.read(cx).draft_editor_text(draft_id, cx)?; + Self::truncate_draft_label(&raw) } fn active_project_group_key(&self, cx: &App) -> Option { @@ -3978,111 +4096,122 @@ impl Sidebar { fn render_draft_thread( &self, ix: usize, + draft_id: Option, + key: &ProjectGroupKey, + workspace: Option<&Entity>, is_active: bool, worktrees: &[WorktreeInfo], is_selected: bool, + can_dismiss: bool, cx: &mut Context, ) -> AnyElement { - let label: SharedString = if is_active { - self.active_draft_text(cx) - .unwrap_or_else(|| "New Thread".into()) - } else { - "New Thread".into() - }; + let label: SharedString = draft_id + .and_then(|id| workspace.and_then(|ws| self.read_draft_text(id, ws, cx))) + .unwrap_or_else(|| "New Agent Thread".into()); let id = SharedString::from(format!("draft-thread-btn-{}", ix)); - let thread_item = ThreadItem::new(id, label) - .icon(IconName::Plus) - .icon_color(Color::Custom(cx.theme().colors().icon_muted.opacity(0.8))) - .worktrees( - worktrees - .iter() - .map(|wt| ThreadItemWorktreeInfo { - name: wt.name.clone(), - full_path: wt.full_path.clone(), - highlight_positions: wt.highlight_positions.clone(), - kind: wt.kind, - }) - .collect(), - ) - .selected(true) - .focused(is_selected) - .on_click(cx.listener(|this, _, window, cx| { - if let Some(workspace) = this.active_workspace(cx) { - if !AgentPanel::is_visible(&workspace, cx) { - workspace.update(cx, |workspace, cx| { - workspace.focus_panel::(window, cx); - }); - } - } - })); - - div() - .on_mouse_down(gpui::MouseButton::Left, |_, _, cx| { - cx.stop_propagation(); + let worktrees = worktrees + .iter() + .map(|worktree| ThreadItemWorktreeInfo { + name: worktree.name.clone(), + full_path: worktree.full_path.clone(), + highlight_positions: worktree.highlight_positions.clone(), + kind: worktree.kind, }) - .child(thread_item) - .into_any_element() - } + .collect(); - fn render_new_thread( - &self, - ix: usize, - key: &ProjectGroupKey, - worktrees: &[WorktreeInfo], - workspace: Option<&Entity>, - is_selected: bool, - cx: &mut Context, - ) -> AnyElement { - let label: SharedString = DEFAULT_THREAD_TITLE.into(); - let key = key.clone(); + let is_hovered = self.hovered_thread_index == Some(ix); - let id = SharedString::from(format!("new-thread-btn-{}", ix)); + let key = key.clone(); + let workspace_for_click = workspace.cloned(); + let workspace_for_remove = workspace.cloned(); + let workspace_for_clear = workspace.cloned(); - let mut thread_item = ThreadItem::new(id, label) - .icon(IconName::Plus) - .icon_color(Color::Custom(cx.theme().colors().icon_muted.opacity(0.8))) - .worktrees( - worktrees - .iter() - .map(|wt| ThreadItemWorktreeInfo { - name: wt.name.clone(), - full_path: wt.full_path.clone(), - highlight_positions: wt.highlight_positions.clone(), - kind: wt.kind, - }) - .collect(), - ) - .selected(false) + ThreadItem::new(id, label) + .icon(IconName::Pencil) + .icon_color(Color::Custom(cx.theme().colors().icon_muted.opacity(0.4))) + .worktrees(worktrees) + .selected(is_active) .focused(is_selected) + .hovered(is_hovered) + .on_hover(cx.listener(move |this, is_hovered: &bool, _window, cx| { + if *is_hovered { + this.hovered_thread_index = Some(ix); + } else if this.hovered_thread_index == Some(ix) { + this.hovered_thread_index = None; + } + cx.notify(); + })) .on_click(cx.listener(move |this, _, window, cx| { - this.selection = None; - if let Some(workspace) = this.multi_workspace.upgrade().and_then(|mw| { - mw.read(cx) - .workspace_for_paths(key.path_list(), key.host().as_ref(), cx) - }) { - this.create_new_thread(&workspace, window, cx); + if let Some(draft_id) = draft_id { + if let Some(workspace) = &workspace_for_click { + this.activate_draft(draft_id, workspace, window, cx); + } + } else if let Some(workspace) = &workspace_for_click { + // Placeholder with an open workspace — just + // activate it. The panel remembers its last view. + this.activate_workspace(workspace, window, cx); + if AgentPanel::is_visible(workspace, cx) { + workspace.update(cx, |ws, cx| { + ws.focus_panel::(window, cx); + }); + } } else { + // No workspace at all — just open one. The + // panel's load fallback will create a draft. this.open_workspace_for_group(&key, window, cx); } - })); - - // Linked worktree DraftThread entries can be dismissed, which removes - // the workspace from the multi-workspace. - if let Some(workspace) = workspace.cloned() { - thread_item = thread_item.action_slot( - IconButton::new("close-worktree-workspace", IconName::Close) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .tooltip(Tooltip::text("Close Workspace")) - .on_click(cx.listener(move |this, _, window, cx| { - this.remove_worktree_workspace(workspace.clone(), window, cx); - })), - ); - } - - thread_item.into_any_element() + })) + .when_some(draft_id.filter(|_| can_dismiss), |this, draft_id| { + this.action_slot( + div() + .on_mouse_down(gpui::MouseButton::Left, |_, _, cx| { + cx.stop_propagation(); + }) + .child( + IconButton::new( + SharedString::from(format!("close-draft-{}", ix)), + IconName::Close, + ) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .tooltip(Tooltip::text("Remove Draft")) + .on_click(cx.listener( + move |this, _, window, cx| { + if let Some(workspace) = &workspace_for_remove { + this.remove_draft(draft_id, workspace, window, cx); + } + }, + )), + ), + ) + }) + .when_some(draft_id.filter(|_| !can_dismiss), |this, draft_id| { + this.action_slot( + div() + .on_mouse_down(gpui::MouseButton::Left, |_, _, cx| { + cx.stop_propagation(); + }) + .child( + IconButton::new( + SharedString::from(format!("clear-draft-{}", ix)), + IconName::Close, + ) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .tooltip(Tooltip::text("Clear Draft")) + .on_click(cx.listener( + move |this, _, window, cx| { + if let Some(workspace) = &workspace_for_clear { + this.clear_draft(draft_id, workspace, window, cx); + } + }, + )), + ), + ) + }) + .into_any_element() } fn render_no_results(&self, cx: &mut Context) -> impl IntoElement { @@ -4389,8 +4518,7 @@ impl Sidebar { } fn render_acp_import_onboarding(&mut self, cx: &mut Context) -> impl IntoElement { - let description = - "Import threads from your ACP agents — whether started in Zed or another client."; + let description = "Import threads from agents like Claude Agent, Codex, and more, whether started in Zed or another client."; let bg = cx.theme().colors().text_accent; @@ -4411,7 +4539,7 @@ impl Sidebar { .w_full() .gap_1() .justify_between() - .child(Label::new("Looking for ACP threads?")) + .child(Label::new("Looking for threads from external agents?")) .child( IconButton::new("close-onboarding", IconName::Close) .icon_size(IconSize::Small) @@ -4420,7 +4548,7 @@ impl Sidebar { ) .child(Label::new(description).color(Color::Muted).mb_2()) .child( - Button::new("import-acp", "Import ACP Threads") + Button::new("import-acp", "Import Threads") .full_width() .style(ButtonStyle::OutlinedCustom(cx.theme().colors().border)) .label_size(LabelSize::Small) diff --git a/crates/sidebar/src/sidebar_tests.rs b/crates/sidebar/src/sidebar_tests.rs index ea4ec36674878ca958a2f73af0adf749a40157f6..0b197b5fd278bbdf19b4c30fe27e1d591ad29696 100644 --- a/crates/sidebar/src/sidebar_tests.rs +++ b/crates/sidebar/src/sidebar_tests.rs @@ -45,7 +45,7 @@ fn assert_active_thread(sidebar: &Sidebar, session_id: &acp::SessionId, msg: &st #[track_caller] fn assert_active_draft(sidebar: &Sidebar, workspace: &Entity, msg: &str) { assert!( - matches!(&sidebar.active_entry, Some(ActiveEntry::Draft(ws)) if ws == workspace), + matches!(&sidebar.active_entry, Some(ActiveEntry::Draft { workspace: ws, .. }) if ws == workspace), "{msg}: expected active_entry to be Draft for workspace {:?}, got {:?}", workspace.entity_id(), sidebar.active_entry, @@ -340,11 +340,6 @@ fn visible_entries_as_strings( } else { "" }; - let is_active = sidebar - .active_entry - .as_ref() - .is_some_and(|active| active.matches_entry(entry)); - let active_indicator = if is_active { " (active)" } else { "" }; match entry { ListEntry::ProjectHeader { label, @@ -377,7 +372,7 @@ fn visible_entries_as_strings( "" }; let worktree = format_linked_worktree_chips(&thread.worktrees); - format!(" {title}{worktree}{live}{status_str}{notified}{active_indicator}{selected}") + format!(" {title}{worktree}{live}{status_str}{notified}{selected}") } ListEntry::ViewMore { is_fully_expanded, .. @@ -388,17 +383,14 @@ fn visible_entries_as_strings( format!(" + View More{}", selected) } } - ListEntry::DraftThread { - workspace, - worktrees, - .. - } => { + ListEntry::DraftThread { worktrees, .. } => { let worktree = format_linked_worktree_chips(worktrees); - if workspace.is_some() { - format!(" [+ New Thread{}]{}", worktree, selected) - } else { - format!(" [~ Draft{}]{}{}", worktree, active_indicator, selected) - } + let is_active = sidebar + .active_entry + .as_ref() + .is_some_and(|e| e.matches_entry(entry)); + let active_marker = if is_active { " *" } else { "" }; + format!(" [~ Draft{}]{}{}", worktree, active_marker, selected) } } }) @@ -566,10 +558,7 @@ async fn test_single_workspace_no_threads(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - ] + vec!["v [my-project]", " [~ Draft]"] ); } @@ -1329,13 +1318,10 @@ async fn test_keyboard_navigation_on_empty_list(cx: &mut TestAppContext) { cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - // An empty project has only the header. + // An empty project has the header and an auto-created draft. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [empty-project]", - ] + vec!["v [empty-project]", " [~ Draft]"] ); // Focus sidebar — focus_in does not set a selection @@ -1346,7 +1332,11 @@ async fn test_keyboard_navigation_on_empty_list(cx: &mut TestAppContext) { cx.dispatch_action(SelectNext); assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); - // At the end (only one entry), wraps back to first entry + // SelectNext advances to index 1 (draft entry) + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + + // At the end (two entries), wraps back to first entry cx.dispatch_action(SelectNext); assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); @@ -1470,7 +1460,7 @@ async fn test_parallel_threads_shown_with_live_status(cx: &mut TestAppContext) { vec![ // "v [my-project]", - " Hello * (active)", + " Hello *", " Hello * (running)", ] ); @@ -1568,7 +1558,7 @@ async fn test_background_thread_completion_triggers_notification(cx: &mut TestAp vec![ // "v [project-a]", - " Hello * (running) (active)", + " Hello * (running)", ] ); @@ -1582,7 +1572,7 @@ async fn test_background_thread_completion_triggers_notification(cx: &mut TestAp vec![ // "v [project-a]", - " Hello * (!) (active)", + " Hello * (!)", ] ); } @@ -2274,7 +2264,7 @@ async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext) vec![ // "v [my-project]", - " Hello * (active)", + " Hello *", ] ); @@ -2300,7 +2290,7 @@ async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext) vec![ // "v [my-project]", - " Friendly Greeting with AI * (active)", + " Friendly Greeting with AI *", ] ); } @@ -2558,7 +2548,7 @@ async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContex vec![ // "v [project-a]", - " Hello * (active)", + " Hello *", ] ); @@ -2591,9 +2581,8 @@ async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContex assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - // - "v [project-a, project-b]", - " Hello * (active)", + "v [project-a, project-b]", // + " Hello *", ] ); @@ -3126,7 +3115,6 @@ async fn test_worktree_collision_keeps_active_workspace(cx: &mut TestAppContext) vec![ // "v [project-a, project-b]", - " [~ Draft] (active)", " Thread B", "v [project-a]", " Thread A", @@ -3207,7 +3195,6 @@ async fn test_worktree_collision_keeps_active_workspace(cx: &mut TestAppContext) vec![ // "v [project-a, project-b]", - " [~ Draft] (active)", " Thread A", " Worktree Thread {project-a:wt-feature}", " Thread B", @@ -3327,7 +3314,6 @@ async fn test_worktree_add_syncs_linked_worktree_sibling(cx: &mut TestAppContext vec![ // "v [project]", - " [~ Draft {wt-feature}] (active)", " Worktree Thread {wt-feature}", " Main Thread", ] @@ -3386,7 +3372,6 @@ async fn test_worktree_add_syncs_linked_worktree_sibling(cx: &mut TestAppContext vec![ // "v [other-project, project]", - " [~ Draft {project:wt-feature}] (active)", " Worktree Thread {project:wt-feature}", " Main Thread", ] @@ -3421,7 +3406,7 @@ async fn test_cmd_n_shows_new_thread_entry(cx: &mut TestAppContext) { vec![ // "v [my-project]", - " Hello * (active)", + " Hello *", ] ); @@ -3437,12 +3422,7 @@ async fn test_cmd_n_shows_new_thread_entry(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - " [~ Draft] (active)", - " Hello *", - ], + vec!["v [my-project]", " [~ Draft] *", " Hello *"], "After Cmd-N the sidebar should show a highlighted Draft entry" ); @@ -3478,25 +3458,20 @@ async fn test_draft_with_server_session_shows_as_draft(cx: &mut TestAppContext) vec![ // "v [my-project]", - " Hello * (active)", + " Hello *", ] ); - // Open a new draft thread via a server connection. This gives the - // conversation a parent_id (session assigned by the server) but - // no messages have been sent, so active_thread_is_draft() is true. - let draft_connection = StubAgentConnection::new(); - open_thread_with_connection(&panel, draft_connection, cx); + // Create a new draft via Cmd-N. Since new_thread() now creates a + // tracked draft in the AgentPanel, it appears in the sidebar. + panel.update_in(cx, |panel, window, cx| { + panel.new_thread(&NewThread, window, cx); + }); cx.run_until_parked(); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [my-project]", - " [~ Draft] (active)", - " Hello *", - ], + vec!["v [my-project]", " [~ Draft] *", " Hello *"], ); let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone()); @@ -3509,6 +3484,80 @@ async fn test_draft_with_server_session_shows_as_draft(cx: &mut TestAppContext) }); } +#[gpui::test] +async fn test_sending_message_from_draft_removes_draft(cx: &mut TestAppContext) { + // When the user sends a message from a draft thread, the draft + // should be removed from the sidebar and the active_entry should + // transition to a Thread pointing at the new session. + let project = init_test_project_with_agent_panel("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); + + // Create a saved thread so the group isn't empty. + let connection = StubAgentConnection::new(); + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done".into()), + )]); + open_thread_with_connection(&panel, connection, cx); + send_message(&panel, cx); + let existing_session_id = active_session_id(&panel, cx); + save_test_thread_metadata(&existing_session_id, &project, cx).await; + cx.run_until_parked(); + + // Create a draft via Cmd-N. + panel.update_in(cx, |panel, window, cx| { + panel.new_thread(&NewThread, window, cx); + }); + cx.run_until_parked(); + + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " [~ Draft] *", " Hello *"], + "draft should be visible before sending", + ); + sidebar.read_with(cx, |sidebar, _| { + assert_active_draft(sidebar, &workspace, "should be on draft before sending"); + }); + + // Simulate what happens when a draft sends its first message: + // the AgentPanel's MessageSentOrQueued handler removes the draft + // from `draft_threads`, then the sidebar rebuilds. We can't use + // the NativeAgentServer in tests, so replicate the key steps: + // remove the draft, open a real thread with a stub connection, + // and send. + let draft_id = panel.read_with(cx, |panel, _| panel.active_draft_id().unwrap()); + panel.update_in(cx, |panel, _window, _cx| { + panel.remove_draft(draft_id); + }); + let draft_connection = StubAgentConnection::new(); + draft_connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("World".into()), + )]); + open_thread_with_connection(&panel, draft_connection, cx); + send_message(&panel, cx); + let new_session_id = active_session_id(&panel, cx); + save_test_thread_metadata(&new_session_id, &project, cx).await; + cx.run_until_parked(); + + // The draft should be gone and the new thread should be active. + let entries = visible_entries_as_strings(&sidebar, cx); + let draft_count = entries.iter().filter(|e| e.contains("Draft")).count(); + assert_eq!( + draft_count, 0, + "draft should be removed after sending a message" + ); + + sidebar.read_with(cx, |sidebar, _| { + assert_active_thread( + sidebar, + &new_session_id, + "active_entry should transition to the new thread after sending", + ); + }); +} + #[gpui::test] async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestAppContext) { // When the active workspace is an absorbed git worktree, cmd-n @@ -3593,7 +3642,7 @@ async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestApp vec![ // "v [project]", - " Hello {wt-feature-a} * (active)", + " Hello {wt-feature-a} *", ] ); @@ -3611,8 +3660,8 @@ async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestApp vec![ // "v [project]", - " [~ Draft {wt-feature-a}] (active)", - " Hello {wt-feature-a} *", + " [~ Draft {wt-feature-a}] *", + " Hello {wt-feature-a} *" ], "After Cmd-N in an absorbed worktree, the sidebar should show \ a highlighted Draft entry under the main repo header" @@ -3729,11 +3778,7 @@ async fn test_git_worktree_added_live_updates_sidebar(cx: &mut TestAppContext) { // The chip name is derived from the path even before git discovery. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project]", - " Worktree Thread {rosewood}", - ] + vec!["v [project]", " Worktree Thread {rosewood}"] ); // Now add the worktree to the git state and trigger a rescan. @@ -3925,12 +3970,7 @@ async fn test_threadless_workspace_shows_new_thread_with_worktree_chip(cx: &mut // appears as a "New Thread" button with its worktree chip. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project]", - " [+ New Thread {wt-feature-b}]", - " Thread A {wt-feature-a}", - ] + vec!["v [project]", " Thread A {wt-feature-a}",] ); } @@ -4184,12 +4224,7 @@ async fn test_absorbed_worktree_running_thread_shows_live_status(cx: &mut TestAp let entries = visible_entries_as_strings(&sidebar, cx); assert_eq!( entries, - vec![ - // - "v [project]", - " [~ Draft] (active)", - " Hello {wt-feature-a} * (running)", - ] + vec!["v [project]", " Hello {wt-feature-a} * (running)",] ); } @@ -4272,12 +4307,7 @@ async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAp assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project]", - " [~ Draft] (active)", - " Hello {wt-feature-a} * (running)", - ] + vec!["v [project]", " Hello {wt-feature-a} * (running)",] ); connection.end_turn(session_id, acp::StopReason::EndTurn); @@ -4285,12 +4315,7 @@ async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAp assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - // - "v [project]", - " [~ Draft] (active)", - " Hello {wt-feature-a} * (!)", - ] + vec!["v [project]", " Hello {wt-feature-a} * (!)",] ); } @@ -5498,6 +5523,7 @@ async fn test_linked_worktree_threads_not_duplicated_across_groups(cx: &mut Test vec![ // "v [other, project]", + " [~ Draft]", "v [project]", " Worktree Thread {wt-feature-a}", ] @@ -5931,6 +5957,12 @@ async fn test_archive_thread_active_entry_management(cx: &mut TestAppContext) { let panel_b = add_agent_panel(&workspace_b, cx); cx.run_until_parked(); + // Explicitly create a draft on workspace_b so the sidebar tracks one. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.create_new_thread(&workspace_b, window, cx); + }); + cx.run_until_parked(); + // --- Scenario 1: archive a thread in the non-active workspace --- // Create a thread in project-a (non-active — project-b is active). @@ -5951,7 +5983,7 @@ async fn test_archive_thread_active_entry_management(cx: &mut TestAppContext) { // active_entry should still be a draft on workspace_b (the active one). sidebar.read_with(cx, |sidebar, _| { assert!( - matches!(&sidebar.active_entry, Some(ActiveEntry::Draft(ws)) if ws == &workspace_b), + matches!(&sidebar.active_entry, Some(ActiveEntry::Draft { workspace: ws, .. }) if ws == &workspace_b), "expected Draft(workspace_b) after archiving non-active thread, got: {:?}", sidebar.active_entry, ); @@ -5986,7 +6018,7 @@ async fn test_archive_thread_active_entry_management(cx: &mut TestAppContext) { // Should fall back to a draft on the same workspace. sidebar.read_with(cx, |sidebar, _| { assert!( - matches!(&sidebar.active_entry, Some(ActiveEntry::Draft(ws)) if ws == &workspace_b), + matches!(&sidebar.active_entry, Some(ActiveEntry::Draft { workspace: ws, .. }) if ws == &workspace_b), "expected Draft(workspace_b) after archiving active thread, got: {:?}", sidebar.active_entry, ); @@ -5996,9 +6028,8 @@ async fn test_archive_thread_active_entry_management(cx: &mut TestAppContext) { #[gpui::test] async fn test_switch_to_workspace_with_archived_thread_shows_draft(cx: &mut TestAppContext) { // When a thread is archived while the user is in a different workspace, - // the archiving code clears the thread from its panel (via - // `clear_active_thread`). Switching back to that workspace should show - // a draft, not the archived thread. + // the archiving code replaces the thread with a tracked draft in its + // panel. Switching back to that workspace should show the draft. agent_ui::test_support::init_test(cx); cx.update(|cx| { ThreadStore::init_global(cx); @@ -6059,7 +6090,7 @@ async fn test_switch_to_workspace_with_archived_thread_shows_draft(cx: &mut Test sidebar.read_with(cx, |sidebar, _| { assert!( - matches!(&sidebar.active_entry, Some(ActiveEntry::Draft(ws)) if ws == &workspace_a), + matches!(&sidebar.active_entry, Some(ActiveEntry::Draft { workspace: ws, .. }) if ws == &workspace_a), "expected Draft(workspace_a) after switching to workspace with archived thread, got: {:?}", sidebar.active_entry, ); @@ -6561,9 +6592,10 @@ async fn test_archive_thread_on_linked_worktree_selects_sibling_thread(cx: &mut #[gpui::test] async fn test_linked_worktree_workspace_reachable_and_dismissable(cx: &mut TestAppContext) { // When a linked worktree is opened as its own workspace and the user - // switches away, the workspace must still be reachable from a DraftThread - // sidebar entry. Pressing RemoveSelectedThread (shift-backspace) on that - // entry should remove the workspace. + // creates a draft thread from it, then switches away, the workspace must + // still be reachable from that DraftThread sidebar entry. Pressing + // RemoveSelectedThread (shift-backspace) on that entry should remove the + // workspace. init_test(cx); let fs = FakeFs::new(cx.executor()); @@ -6627,6 +6659,14 @@ async fn test_linked_worktree_workspace_reachable_and_dismissable(cx: &mut TestA add_agent_panel(&worktree_workspace, cx); cx.run_until_parked(); + // Explicitly create a draft thread from the linked worktree workspace. + // Auto-created drafts use the group's first workspace (the main one), + // so a user-created draft is needed to make the linked worktree reachable. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.create_new_thread(&worktree_workspace, window, cx); + }); + cx.run_until_parked(); + // Switch back to the main workspace. multi_workspace.update_in(cx, |mw, window, cx| { let main_ws = mw.workspaces().next().unwrap().clone(); @@ -6656,7 +6696,7 @@ async fn test_linked_worktree_workspace_reachable_and_dismissable(cx: &mut TestA "linked worktree workspace should be reachable, but reachable are: {reachable:?}" ); - // Find the DraftThread entry for the linked worktree and dismiss it. + // Find the DraftThread entry whose workspace is the linked worktree. let new_thread_ix = sidebar.read_with(cx, |sidebar, _| { sidebar .contents @@ -6666,9 +6706,9 @@ async fn test_linked_worktree_workspace_reachable_and_dismissable(cx: &mut TestA matches!( entry, ListEntry::DraftThread { - workspace: Some(_), + workspace: Some(ws), .. - } + } if ws.entity_id() == worktree_ws_id ) }) .expect("expected a DraftThread entry for the linked worktree") @@ -6687,8 +6727,25 @@ async fn test_linked_worktree_workspace_reachable_and_dismissable(cx: &mut TestA assert_eq!( multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), - 1, - "linked worktree workspace should be removed after dismissing DraftThread entry" + 2, + "dismissing a draft no longer removes the linked worktree workspace" + ); + + let has_draft_for_worktree = sidebar.read_with(cx, |sidebar, _| { + sidebar.contents.entries.iter().any(|entry| { + matches!( + entry, + ListEntry::DraftThread { + draft_id: Some(_), + workspace: Some(ws), + .. + } if ws.entity_id() == worktree_ws_id + ) + }) + }); + assert!( + !has_draft_for_worktree, + "DraftThread entry for the linked worktree should be removed after dismiss" ); } @@ -7226,6 +7283,372 @@ async fn test_linked_worktree_workspace_reachable_after_adding_unrelated_project ); } +#[gpui::test] +async fn test_startup_failed_restoration_shows_draft(cx: &mut TestAppContext) { + // Rule 4: When the app starts and the AgentPanel fails to restore its + // last thread (no metadata), a draft should appear in the sidebar. + let project = init_test_project_with_agent_panel("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, _panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); + + // In tests, AgentPanel::test_new doesn't call `load`, so no + // fallback draft is created. The empty group shows a placeholder. + // Simulate the startup fallback by creating a draft explicitly. + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.create_new_thread(&workspace, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " [~ Draft] *"] + ); + + sidebar.read_with(cx, |sidebar, _| { + assert_active_draft(sidebar, &workspace, "should show active draft"); + }); +} + +#[gpui::test] +async fn test_startup_successful_restoration_no_spurious_draft(cx: &mut TestAppContext) { + // Rule 5: When the app starts and the AgentPanel successfully loads + // a thread, no spurious draft should appear. + let project = init_test_project_with_agent_panel("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); + + // Create and send a message to make a real thread. + let connection = StubAgentConnection::new(); + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done".into()), + )]); + open_thread_with_connection(&panel, connection, cx); + send_message(&panel, cx); + let session_id = active_session_id(&panel, cx); + save_test_thread_metadata(&session_id, &project, cx).await; + cx.run_until_parked(); + + // Should show the thread, NOT a spurious draft. + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries, vec!["v [my-project]", " Hello *"]); + + // active_entry should be Thread, not Draft. + sidebar.read_with(cx, |sidebar, _| { + assert_active_thread(sidebar, &session_id, "should be on the thread, not a draft"); + }); +} + +#[gpui::test] +async fn test_delete_last_draft_in_empty_group_shows_placeholder(cx: &mut TestAppContext) { + // Rule 8: Deleting the last draft in a threadless group should + // leave a placeholder draft entry (not an empty group). + let project = init_test_project_with_agent_panel("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, _panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); + + // Create two drafts explicitly (test_new doesn't call load). + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.create_new_thread(&workspace, window, cx); + }); + cx.run_until_parked(); + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.create_new_thread(&workspace, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " [~ Draft] *", " [~ Draft]"] + ); + + // Delete the active (first) draft. The second should become active. + let active_draft_id = sidebar.read_with(cx, |_sidebar, cx| { + workspace + .read(cx) + .panel::(cx) + .unwrap() + .read(cx) + .active_draft_id() + .unwrap() + }); + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.remove_draft(active_draft_id, &workspace, window, cx); + }); + cx.run_until_parked(); + + // Should still have 1 draft (the remaining one), now active. + let entries = visible_entries_as_strings(&sidebar, cx); + let draft_count = entries.iter().filter(|e| e.contains("Draft")).count(); + assert_eq!(draft_count, 1, "one draft should remain after deleting one"); + + // Delete the last remaining draft. + let last_draft_id = sidebar.read_with(cx, |_sidebar, cx| { + workspace + .read(cx) + .panel::(cx) + .unwrap() + .read(cx) + .active_draft_id() + .unwrap() + }); + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.remove_draft(last_draft_id, &workspace, window, cx); + }); + cx.run_until_parked(); + + // The group has no threads and no tracked drafts, so a + // placeholder draft should appear. + let entries = visible_entries_as_strings(&sidebar, cx); + let draft_count = entries.iter().filter(|e| e.contains("Draft")).count(); + assert_eq!( + draft_count, 1, + "placeholder draft should appear after deleting all tracked drafts" + ); +} + +#[gpui::test] +async fn test_project_header_click_restores_last_viewed(cx: &mut TestAppContext) { + // Rule 9: Clicking a project header should restore whatever the + // user was last looking at in that group, not create new drafts + // or jump to the first entry. + let project_a = init_test_project_with_agent_panel("/project-a", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + let (sidebar, panel_a) = setup_sidebar_with_agent_panel(&multi_workspace, cx); + + // Create two threads in project-a. + let conn1 = StubAgentConnection::new(); + conn1.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done".into()), + )]); + open_thread_with_connection(&panel_a, conn1, cx); + send_message(&panel_a, cx); + let thread_a1 = active_session_id(&panel_a, cx); + save_test_thread_metadata(&thread_a1, &project_a, cx).await; + + let conn2 = StubAgentConnection::new(); + conn2.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done".into()), + )]); + open_thread_with_connection(&panel_a, conn2, cx); + send_message(&panel_a, cx); + let thread_a2 = active_session_id(&panel_a, cx); + save_test_thread_metadata(&thread_a2, &project_a, cx).await; + cx.run_until_parked(); + + // The user is now looking at thread_a2. + sidebar.read_with(cx, |sidebar, _| { + assert_active_thread(sidebar, &thread_a2, "should be on thread_a2"); + }); + + // Add project-b and switch to it. + let fs = cx.update(|_window, cx| ::global(cx)); + fs.as_fake() + .insert_tree("/project-b", serde_json::json!({ "src": {} })) + .await; + let project_b = + project::Project::test(fs.clone() as Arc, ["/project-b".as_ref()], cx).await; + let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b.clone(), window, cx) + }); + let _panel_b = add_agent_panel(&workspace_b, cx); + cx.run_until_parked(); + + // Now switch BACK to project-a by activating its workspace. + let workspace_a = multi_workspace.read_with(cx, |mw, cx| { + mw.workspaces() + .find(|ws| { + ws.read(cx) + .project() + .read(cx) + .visible_worktrees(cx) + .any(|wt| { + wt.read(cx) + .abs_path() + .to_string_lossy() + .contains("project-a") + }) + }) + .unwrap() + .clone() + }); + multi_workspace.update_in(cx, |mw, window, cx| { + mw.activate(workspace_a.clone(), window, cx); + }); + cx.run_until_parked(); + + // The panel should still show thread_a2 (the last thing the user + // was viewing in project-a), not a draft or thread_a1. + sidebar.read_with(cx, |sidebar, _| { + assert_active_thread( + sidebar, + &thread_a2, + "switching back to project-a should restore thread_a2", + ); + }); + + // No spurious draft entries should have been created in + // project-a's group (project-b may have a placeholder). + let entries = visible_entries_as_strings(&sidebar, cx); + // Find project-a's section and check it has no drafts. + let project_a_start = entries + .iter() + .position(|e| e.contains("project-a")) + .unwrap(); + let project_a_end = entries[project_a_start + 1..] + .iter() + .position(|e| e.starts_with("v ")) + .map(|i| i + project_a_start + 1) + .unwrap_or(entries.len()); + let project_a_drafts = entries[project_a_start..project_a_end] + .iter() + .filter(|e| e.contains("Draft")) + .count(); + assert_eq!( + project_a_drafts, 0, + "switching back to project-a should not create drafts in its group" + ); +} + +#[gpui::test] +async fn test_plus_button_always_creates_new_draft(cx: &mut TestAppContext) { + // Rule 3: Clicking the + button on a group should always create + // a new draft, even starting from a placeholder (no tracked drafts). + let project = init_test_project_with_agent_panel("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, _panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); + + // Start: panel has no tracked drafts, sidebar shows a placeholder. + let entries = visible_entries_as_strings(&sidebar, cx); + let draft_count = entries.iter().filter(|e| e.contains("Draft")).count(); + assert_eq!(draft_count, 1, "should start with 1 placeholder"); + + // Simulate what the + button handler does: create exactly one + // new draft per click. + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + let simulate_plus_button = + |sidebar: &mut Sidebar, window: &mut Window, cx: &mut Context| { + sidebar.create_new_thread(&workspace, window, cx); + }; + + // First + click: placeholder -> 1 tracked draft. + sidebar.update_in(cx, |sidebar, window, cx| { + simulate_plus_button(sidebar, window, cx); + }); + cx.run_until_parked(); + + let entries = visible_entries_as_strings(&sidebar, cx); + let draft_count = entries.iter().filter(|e| e.contains("Draft")).count(); + assert_eq!( + draft_count, 1, + "first + click on placeholder should produce 1 tracked draft" + ); + + // Second + click: 1 -> 2 drafts. + sidebar.update_in(cx, |sidebar, window, cx| { + simulate_plus_button(sidebar, window, cx); + }); + cx.run_until_parked(); + + let entries = visible_entries_as_strings(&sidebar, cx); + let draft_count = entries.iter().filter(|e| e.contains("Draft")).count(); + assert_eq!(draft_count, 2, "second + click should add 1 more draft"); + + // Third + click: 2 -> 3 drafts. + sidebar.update_in(cx, |sidebar, window, cx| { + simulate_plus_button(sidebar, window, cx); + }); + cx.run_until_parked(); + + let entries = visible_entries_as_strings(&sidebar, cx); + let draft_count = entries.iter().filter(|e| e.contains("Draft")).count(); + assert_eq!(draft_count, 3, "third + click should add 1 more draft"); + + // The most recently created draft should be active (first in list). + assert_eq!(entries[1], " [~ Draft] *"); +} + +#[gpui::test] +async fn test_activating_workspace_with_draft_does_not_create_extras(cx: &mut TestAppContext) { + // When a workspace has a draft (from the panel's load fallback) + // and the user activates it (e.g. by clicking the placeholder or + // the project header), no extra drafts should be created. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/project-a", serde_json::json!({ ".git": {}, "src": {} })) + .await; + fs.insert_tree("/project-b", serde_json::json!({ ".git": {}, "src": {} })) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let project_a = + project::Project::test(fs.clone() as Arc, ["/project-a".as_ref()], cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + let workspace_a = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + let _panel_a = add_agent_panel(&workspace_a, cx); + cx.run_until_parked(); + + // Add project-b with its own workspace and agent panel. + let project_b = + project::Project::test(fs.clone() as Arc, ["/project-b".as_ref()], cx).await; + let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b.clone(), window, cx) + }); + let _panel_b = add_agent_panel(&workspace_b, cx); + cx.run_until_parked(); + + // Explicitly create a draft on workspace_b so the sidebar tracks one. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.create_new_thread(&workspace_b, window, cx); + }); + cx.run_until_parked(); + + // Count project-b's drafts. + let count_b_drafts = |cx: &mut gpui::VisualTestContext| { + let entries = visible_entries_as_strings(&sidebar, cx); + entries + .iter() + .skip_while(|e| !e.contains("project-b")) + .take_while(|e| !e.starts_with("v ") || e.contains("project-b")) + .filter(|e| e.contains("Draft")) + .count() + }; + let drafts_before = count_b_drafts(cx); + + // Switch away from project-b, then back. + multi_workspace.update_in(cx, |mw, window, cx| { + mw.activate(workspace_a.clone(), window, cx); + }); + cx.run_until_parked(); + multi_workspace.update_in(cx, |mw, window, cx| { + mw.activate(workspace_b.clone(), window, cx); + }); + cx.run_until_parked(); + + let drafts_after = count_b_drafts(cx); + assert_eq!( + drafts_before, drafts_after, + "activating workspace should not create extra drafts" + ); + + // The draft should be highlighted as active after switching back. + sidebar.read_with(cx, |sidebar, _| { + assert_active_draft( + sidebar, + &workspace_b, + "draft should be active after switching back to its workspace", + ); + }); +} + mod property_test { use super::*; use gpui::proptest::prelude::*; @@ -7462,8 +7885,9 @@ mod property_test { let panel = workspace.read_with(cx, |workspace, cx| workspace.panel::(cx)); if let Some(panel) = panel { - let connection = StubAgentConnection::new(); - open_thread_with_connection(&panel, connection, cx); + panel.update_in(cx, |panel, window, cx| { + panel.new_thread(&NewThread, window, cx); + }); cx.run_until_parked(); } workspace.update_in(cx, |workspace, window, cx| { @@ -7880,11 +8304,29 @@ mod property_test { let active_workspace = multi_workspace.read(cx).workspace(); - // 1. active_entry must always be Some after rebuild_contents. - let entry = sidebar - .active_entry - .as_ref() - .ok_or_else(|| anyhow::anyhow!("active_entry must always be Some"))?; + // 1. active_entry should be Some when the panel has content. + // It may be None when the panel is uninitialized (no drafts, + // no threads), which is fine. + // It may also temporarily point at a different workspace + // when the workspace just changed and the new panel has no + // content yet. + let panel = active_workspace.read(cx).panel::(cx).unwrap(); + let panel_has_content = panel.read(cx).active_draft_id().is_some() + || panel.read(cx).active_conversation_view().is_some(); + + let Some(entry) = sidebar.active_entry.as_ref() else { + if panel_has_content { + anyhow::bail!("active_entry is None but panel has content (draft or thread)"); + } + return Ok(()); + }; + + // If the entry workspace doesn't match the active workspace + // and the panel has no content, this is a transient state that + // will resolve when the panel gets content. + if entry.workspace().entity_id() != active_workspace.entity_id() && !panel_has_content { + return Ok(()); + } // 2. The entry's workspace must agree with the multi-workspace's // active workspace. @@ -7896,11 +8338,10 @@ mod property_test { ); // 3. The entry must match the agent panel's current state. - let panel = active_workspace.read(cx).panel::(cx).unwrap(); - if panel.read(cx).active_thread_is_draft(cx) { + if panel.read(cx).active_draft_id().is_some() { anyhow::ensure!( - matches!(entry, ActiveEntry::Draft(_)), - "panel shows a draft but active_entry is {:?}", + matches!(entry, ActiveEntry::Draft { .. }), + "panel shows a tracked draft but active_entry is {:?}", entry, ); } else if let Some(session_id) = panel From 93e36aa55ef9b43efc59ebe7a3f036149f88db47 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Fri, 10 Apr 2026 18:30:10 -0600 Subject: [PATCH 57/67] Always open agent panel when selecting a thread or draft (#53664) When clicking a draft placeholder ("New Agent Thread" tab) or confirming one via keyboard, the agent panel now always opens. Previously it only focused the panel if it was already visible. Regular threads and drafts with IDs already opened the panel through `activate_draft` and `load_agent_thread_in_workspace`. The project header click continues to activate the workspace without forcing the panel open. cc @danilo-leal Release Notes: - N/A --- crates/sidebar/src/sidebar.rs | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index f69e5206a6e236bd602e5d1922d33b36afb6b520..8864d9e7faa245de5ded1e38f2567d8ba2008d76 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -1718,11 +1718,6 @@ impl Sidebar { // shown, so the user returns to whatever // thread/draft they were looking at. this.activate_workspace(&workspace, window, cx); - if AgentPanel::is_visible(&workspace, cx) { - workspace.update(cx, |workspace, cx| { - workspace.focus_panel::(window, cx); - }); - } } else { this.open_workspace_for_group(&key, window, cx); } @@ -2181,6 +2176,9 @@ impl Sidebar { } } else if let Some(workspace) = workspace { self.activate_workspace(&workspace, window, cx); + workspace.update(cx, |ws, cx| { + ws.focus_panel::(window, cx); + }); } else { self.open_workspace_for_group(&key, window, cx); } @@ -4152,11 +4150,9 @@ impl Sidebar { // Placeholder with an open workspace — just // activate it. The panel remembers its last view. this.activate_workspace(workspace, window, cx); - if AgentPanel::is_visible(workspace, cx) { - workspace.update(cx, |ws, cx| { - ws.focus_panel::(window, cx); - }); - } + workspace.update(cx, |ws, cx| { + ws.focus_panel::(window, cx); + }); } else { // No workspace at all — just open one. The // panel's load fallback will create a draft. From 45c0ced8b2ccee4229a6fb9c42a48bcdc6f83514 Mon Sep 17 00:00:00 2001 From: Eric Holk Date: Fri, 10 Apr 2026 21:37:43 -0700 Subject: [PATCH 58/67] cli: Add first-run prompt for default open behavior and abstract IPC transport (#53663) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR adds the `cli_default_open_behavior` setting and a first-run TUI prompt that appears when `zed ` is invoked without flags while existing windows are open and the setting hasn't been configured yet. ## What it does ### Setting and prompt - Adds a new `cli_default_open_behavior` workspace setting with two values: `existing_window` (default) and `new_window`. - When the user runs `zed ` for the first time with existing Zed windows open, a `dialoguer::Select` prompt in the CLI asks them to choose their preferred behavior. The choice is persisted to `settings.json`. - The prompt is skipped when: - An explicit flag (`-n`, `-e`, `-a`) is given - No existing Zed windows are open - The setting is already configured in `settings.json` - The paths being opened are already contained in an existing workspace ### IPC transport abstraction - Introduces a `CliResponseSink` trait in the `cli` crate that abstracts `IpcSender`, with an implementation for the real IPC sender. - Replaces `IpcSender` with `Box` / `&dyn CliResponseSink` across all signatures in `open_listener.rs`: `OpenRequestKind::CliConnection`, `handle_cli_connection`, `maybe_prompt_open_behavior`, `open_workspaces`, `open_local_workspace`. - Extracts the inline CLI response loop from `main.rs` into a testable `cli::run_cli_response_loop` function. - Switches the request channel from bounded `mpsc::channel(16)` to `mpsc::unbounded()`, eliminating `smol::block_on` in the bridge thread. ### End-to-end tests Seven new tests exercise both the CLI-side response loop and the Zed-side handler connected through in-memory channels, using `allow_parking()` so the real `cli::run_cli_response_loop` runs on an OS thread while the GPUI executor drives the Zed handler: - No flags, no windows → no prompt, opens new window - No flags, existing windows, user picks "existing window" → prompt, setting persisted - No flags, existing windows, user picks "new window" → prompt, setting persisted - Setting already configured → no prompt - Paths already in existing workspace → no prompt - Explicit `-e` flag → no prompt - Explicit `-n` flag → no prompt Existing tests that previously used `ipc::channel()` now use a `DiscardResponseSink`, removing OS-level IPC from all tests. Release Notes: - Added a first-run prompt when using `zed ` to choose between opening in an existing window or a new window. The choice is saved to settings and can be changed later via the `cli_default_open_behavior` setting. --------- Co-authored-by: Nathan Sobo --- Cargo.lock | 54 +- Cargo.toml | 2 + assets/settings/default.json | 9 + crates/cli/Cargo.toml | 2 + crates/cli/src/cli.rs | 28 + crates/cli/src/main.rs | 58 +- crates/gpui/src/elements/text.rs | 56 +- crates/settings/src/vscode_import.rs | 1 + crates/settings_content/src/workspace.rs | 30 + crates/settings_ui/src/page_data.rs | 23 +- crates/settings_ui/src/settings_ui.rs | 4 +- crates/ui/src/components/label/label.rs | 113 +++- crates/workspace/src/workspace.rs | 56 +- crates/workspace/src/workspace_settings.rs | 2 + crates/zed/src/zed/open_listener.rs | 613 ++++++++++++++++++-- crates/zed/src/zed/windows_only_instance.rs | 6 + 16 files changed, 972 insertions(+), 85 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5280fc72d074c22414b603a9b7092f2005f07a85..ca2e15a8ecf4d648e2e48e6c2e7e7feddd4c8fbd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2876,8 +2876,10 @@ dependencies = [ "askpass", "clap", "collections", + "console", "core-foundation 0.10.0", "core-services", + "dialoguer", "exec", "fork", "ipc-channel", @@ -3418,6 +3420,18 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "console" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d64e8af5551369d19cf50138de61f1c42074ab970f74e99be916646777f8fc87" +dependencies = [ + "encode_unicode", + "libc", + "unicode-width", + "windows-sys 0.61.2", +] + [[package]] name = "console_error_panic_hook" version = "0.1.7" @@ -4839,6 +4853,16 @@ dependencies = [ "zlog", ] +[[package]] +name = "dialoguer" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25f104b501bf2364e78d0d3974cbc774f738f5865306ed128e1e0d7499c0ad96" +dependencies = [ + "console", + "shell-words", +] + [[package]] name = "diff" version = "0.1.13" @@ -4925,7 +4949,7 @@ dependencies = [ "libc", "option-ext", "redox_users 0.5.2", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -5520,6 +5544,12 @@ dependencies = [ "phf 0.11.3", ] +[[package]] +name = "encode_unicode" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" + [[package]] name = "encoding_rs" version = "0.8.35" @@ -5691,7 +5721,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -7116,7 +7146,7 @@ dependencies = [ "gobject-sys", "libc", "system-deps 7.0.7", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -8445,7 +8475,7 @@ dependencies = [ "js-sys", "log", "wasm-bindgen", - "windows-core 0.57.0", + "windows-core 0.62.2", ] [[package]] @@ -11089,7 +11119,7 @@ version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -13535,7 +13565,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" dependencies = [ "bytes 1.11.1", - "heck 0.4.1", + "heck 0.5.0", "itertools 0.12.1", "log", "multimap", @@ -13841,7 +13871,7 @@ dependencies = [ "once_cell", "socket2 0.6.1", "tracing", - "windows-sys 0.52.0", + "windows-sys 0.60.2", ] [[package]] @@ -15033,7 +15063,7 @@ dependencies = [ "errno 0.3.14", "libc", "linux-raw-sys 0.11.0", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -16286,7 +16316,7 @@ version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1c97747dbf44bb1ca44a561ece23508e99cb592e862f22222dcf42f51d1e451" dependencies = [ - "heck 0.4.1", + "heck 0.5.0", "proc-macro2", "quote", "syn 2.0.117", @@ -17500,7 +17530,7 @@ dependencies = [ "getrandom 0.3.4", "once_cell", "rustix 1.1.2", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -18419,7 +18449,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2fb391ac70462b3097a755618fbf9c8f95ecc1eb379a414f7b46f202ed10db1f" dependencies = [ "cc", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] @@ -20531,7 +20561,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.61.2", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 9825c8319a7bb3440782b155d9952619096bdfd5..fea8ba56761ea76d2ec37381e498a2711efa0d43 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -295,6 +295,7 @@ command_palette_hooks = { path = "crates/command_palette_hooks" } compliance = { path = "tooling/compliance" } component = { path = "crates/component" } component_preview = { path = "crates/component_preview" } +console = "0.16" context_server = { path = "crates/context_server" } copilot = { path = "crates/copilot" } copilot_chat = { path = "crates/copilot_chat" } @@ -313,6 +314,7 @@ deepseek = { path = "crates/deepseek" } derive_refineable = { path = "crates/refineable/derive_refineable" } dev_container = { path = "crates/dev_container" } diagnostics = { path = "crates/diagnostics" } +dialoguer = { version = "0.12", default-features = false } editor = { path = "crates/editor" } encoding_selector = { path = "crates/encoding_selector" } env_var = { path = "crates/env_var" } diff --git a/assets/settings/default.json b/assets/settings/default.json index 8e8c93c5088338af63a2daed8c87fe031d500727..2fd6120ba0d79add35903117e17a43caa02ef619 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -134,6 +134,15 @@ // 3. Do not restore previous workspaces // "restore_on_startup": "none", "restore_on_startup": "last_session", + // The default behavior when opening paths from the CLI without + // an explicit `-e` (existing window) or `-n` (new window) flag. + // + // May take 2 values: + // 1. Add to the existing Zed window + // "cli_default_open_behavior": "existing_window" + // 2. Open a new Zed window + // "cli_default_open_behavior": "new_window" + "cli_default_open_behavior": "existing_window", // Whether to attempt to restore previous file's state when opening it again. // The state is stored per pane. // When disabled, defaults are applied instead of the state restoration. diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index c7a71f036a350b5ab4b8a7eb49fd1ba0aa7d7272..cfd807c0356aa2a11e018c60db033b42471bf876 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -25,6 +25,8 @@ anyhow.workspace = true askpass.workspace = true clap.workspace = true collections.workspace = true +console.workspace = true +dialoguer.workspace = true ipc-channel = "0.19" parking_lot.workspace = true paths.workspace = true diff --git a/crates/cli/src/cli.rs b/crates/cli/src/cli.rs index d8da78c53210230597dab49ce297d9fa694e62f1..ea7e42beb4e22d7743bc5caade972f8f9f889925 100644 --- a/crates/cli/src/cli.rs +++ b/crates/cli/src/cli.rs @@ -1,3 +1,4 @@ +use anyhow::Result; use collections::HashMap; pub use ipc_channel::ipc; use serde::{Deserialize, Serialize}; @@ -8,6 +9,13 @@ pub struct IpcHandshake { pub responses: ipc::IpcReceiver, } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum CliOpenBehavior { + ExistingWindow, + NewWindow, +} + #[derive(Debug, Serialize, Deserialize)] pub enum CliRequest { Open { @@ -18,11 +26,16 @@ pub enum CliRequest { wsl: Option, wait: bool, open_new_workspace: Option, + #[serde(default)] + force_existing_window: bool, reuse: bool, env: Option>, user_data_dir: Option, dev_container: bool, }, + SetOpenBehavior { + behavior: CliOpenBehavior, + }, } #[derive(Debug, Serialize, Deserialize)] @@ -31,6 +44,7 @@ pub enum CliResponse { Stdout { message: String }, Stderr { message: String }, Exit { status: i32 }, + PromptOpenBehavior, } /// When Zed started not as an *.app but as a binary (e.g. local development), @@ -39,3 +53,17 @@ pub enum CliResponse { /// Note that in the main zed binary, this variable is unset after it's read for the first time, /// therefore it should always be accessed through the `FORCE_CLI_MODE` static. pub const FORCE_CLI_MODE_ENV_VAR_NAME: &str = "ZED_FORCE_CLI_MODE"; + +/// Abstracts the transport for sending CLI responses (Zed → CLI). +/// +/// Production code uses `IpcSender`. Tests can provide in-memory +/// implementations to avoid OS-level IPC. +pub trait CliResponseSink: Send + 'static { + fn send(&self, response: CliResponse) -> Result<()>; +} + +impl CliResponseSink for ipc::IpcSender { + fn send(&self, response: CliResponse) -> Result<()> { + ipc::IpcSender::send(self, response).map_err(|error| anyhow::anyhow!("{error}")) + } +} diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index 41f2d14c1908ac18e7ea297eef19d8d9bd1cf8b5..49129532603625b48ac86ba506ad3ff1014f30d3 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -25,7 +25,6 @@ use tempfile::{NamedTempFile, TempDir}; use util::paths::PathWithPosition; use walkdir::WalkDir; -#[cfg(any(target_os = "linux", target_os = "freebsd"))] use std::io::IsTerminal; const URL_PREFIX: [&'static str; 5] = ["zed://", "http://", "https://", "file://", "ssh://"]; @@ -68,14 +67,17 @@ struct Args { #[arg(short, long)] wait: bool, /// Add files to the currently open workspace - #[arg(short, long, overrides_with_all = ["new", "reuse"])] + #[arg(short, long, overrides_with_all = ["new", "reuse", "existing"])] add: bool, /// Create a new workspace - #[arg(short, long, overrides_with_all = ["add", "reuse"])] + #[arg(short, long, overrides_with_all = ["add", "reuse", "existing"])] new: bool, /// Reuse an existing window, replacing its workspace - #[arg(short, long, overrides_with_all = ["add", "new"])] + #[arg(short, long, overrides_with_all = ["add", "new", "existing"])] reuse: bool, + /// Open in existing Zed window + #[arg(short = 'e', long = "existing", overrides_with_all = ["add", "new", "reuse"])] + existing: bool, /// Sets a custom directory for all user data (e.g., database, extensions, logs). /// This overrides the default platform-specific data directory location: #[cfg_attr(target_os = "macos", doc = "`~/Library/Application Support/Zed`.")] @@ -544,6 +546,8 @@ fn main() -> Result<()> { None }; + let force_existing_window = args.existing; + let env = { #[cfg(any(target_os = "linux", target_os = "freebsd"))] { @@ -665,7 +669,7 @@ fn main() -> Result<()> { #[cfg(not(target_os = "windows"))] let wsl = None; - tx.send(CliRequest::Open { + let open_request = CliRequest::Open { paths, urls, diff_paths, @@ -673,11 +677,14 @@ fn main() -> Result<()> { wsl, wait: args.wait, open_new_workspace, + force_existing_window, reuse: args.reuse, env, user_data_dir: user_data_dir_for_thread, dev_container: args.dev_container, - })?; + }; + + tx.send(open_request)?; while let Ok(response) = rx.recv() { match response { @@ -688,6 +695,11 @@ fn main() -> Result<()> { exit_status.lock().replace(status); return Ok(()); } + CliResponse::PromptOpenBehavior => { + let behavior = prompt_open_behavior() + .unwrap_or(cli::CliOpenBehavior::ExistingWindow); + tx.send(CliRequest::SetOpenBehavior { behavior })?; + } } } @@ -781,6 +793,40 @@ fn anonymous_fd(path: &str) -> Option { } } +/// Shows an interactive prompt asking the user to choose the default open +/// behavior for `zed `. Returns `None` if the prompt cannot be shown +/// (e.g. stdin is not a terminal) or the user cancels. +fn prompt_open_behavior() -> Option { + if !std::io::stdin().is_terminal() { + return None; + } + + let blue = console::Style::new().blue(); + let items = [ + format!("Add to existing Zed window ({})", blue.apply_to("zed -e")), + format!("Open a new window ({})", blue.apply_to("zed -n")), + ]; + + let prompt = format!( + "Configure default behavior for {}\n{}", + blue.apply_to("zed "), + console::style("You can change this later in Zed settings"), + ); + + let selection = dialoguer::Select::new() + .with_prompt(&prompt) + .items(&items) + .default(0) + .interact() + .ok()?; + + Some(if selection == 0 { + cli::CliOpenBehavior::ExistingWindow + } else { + cli::CliOpenBehavior::NewWindow + }) +} + #[cfg(any(target_os = "linux", target_os = "freebsd"))] mod linux { use std::{ diff --git a/crates/gpui/src/elements/text.rs b/crates/gpui/src/elements/text.rs index 49036abfec1cb3145ce72d2aabe7683e308f1ed0..847acb6e287b0165faa75889ae01e688b504295e 100644 --- a/crates/gpui/src/elements/text.rs +++ b/crates/gpui/src/elements/text.rs @@ -159,6 +159,7 @@ pub struct StyledText { text: SharedString, runs: Option>, delayed_highlights: Option, HighlightStyle)>>, + delayed_font_family_overrides: Option, SharedString)>>, layout: TextLayout, } @@ -169,6 +170,7 @@ impl StyledText { text: text.into(), runs: None, delayed_highlights: None, + delayed_font_family_overrides: None, layout: TextLayout::default(), } } @@ -242,6 +244,51 @@ impl StyledText { runs } + /// Override the font family for specific byte ranges of the text. + /// + /// This is resolved lazily at layout time, so the overrides are applied + /// on top of the inherited text style from the parent element. + /// Can be combined with [`with_highlights`](Self::with_highlights). + /// + /// The overrides must be sorted by range start and non-overlapping. + /// Each override range must fall on character boundaries. + pub fn with_font_family_overrides( + mut self, + overrides: impl IntoIterator, SharedString)>, + ) -> Self { + self.delayed_font_family_overrides = Some( + overrides + .into_iter() + .inspect(|(range, _)| { + debug_assert!(self.text.is_char_boundary(range.start)); + debug_assert!(self.text.is_char_boundary(range.end)); + }) + .collect(), + ); + self + } + + fn apply_font_family_overrides( + runs: &mut [TextRun], + overrides: &[(Range, SharedString)], + ) { + let mut byte_offset = 0; + let mut override_idx = 0; + for run in runs.iter_mut() { + let run_end = byte_offset + run.len; + while override_idx < overrides.len() && overrides[override_idx].0.end <= byte_offset { + override_idx += 1; + } + if override_idx < overrides.len() { + let (ref range, ref family) = overrides[override_idx]; + if byte_offset >= range.start && run_end <= range.end { + run.font.family = family.clone(); + } + } + byte_offset = run_end; + } + } + /// Set the text runs for this piece of text. pub fn with_runs(mut self, runs: Vec) -> Self { let mut text = &**self.text; @@ -278,12 +325,19 @@ impl Element for StyledText { window: &mut Window, cx: &mut App, ) -> (LayoutId, Self::RequestLayoutState) { - let runs = self.runs.take().or_else(|| { + let font_family_overrides = self.delayed_font_family_overrides.take(); + let mut runs = self.runs.take().or_else(|| { self.delayed_highlights.take().map(|delayed_highlights| { Self::compute_runs(&self.text, &window.text_style(), delayed_highlights) }) }); + if let Some(ref overrides) = font_family_overrides { + let runs = + runs.get_or_insert_with(|| vec![window.text_style().to_run(self.text.len())]); + Self::apply_font_family_overrides(runs, overrides); + } + let layout_id = self.layout.layout(self.text.clone(), runs, window, cx); (layout_id, ()) } diff --git a/crates/settings/src/vscode_import.rs b/crates/settings/src/vscode_import.rs index 042b3a7c71c77d8aaa02cec559a943608ee87859..40565fc4616d3b71c61729743d36b8479c3e590f 100644 --- a/crates/settings/src/vscode_import.rs +++ b/crates/settings/src/vscode_import.rs @@ -975,6 +975,7 @@ impl VsCodeSettings { }), bottom_dock_layout: None, centered_layout: None, + cli_default_open_behavior: None, close_on_file_delete: None, close_panel_on_toggle: None, command_aliases: Default::default(), diff --git a/crates/settings_content/src/workspace.rs b/crates/settings_content/src/workspace.rs index 02ec229cb37bfa39aded1764f0f1c5235e081ba6..cebc73550f268f0be5385b5eb41928898db67585 100644 --- a/crates/settings_content/src/workspace.rs +++ b/crates/settings_content/src/workspace.rs @@ -49,6 +49,11 @@ pub struct WorkspaceSettingsContent { /// Values: empty_tab, last_workspace, last_session, launchpad /// Default: last_session pub restore_on_startup: Option, + /// The default behavior when opening paths from the CLI without + /// an explicit `-e` or `-n` flag. + /// + /// Default: existing_window + pub cli_default_open_behavior: Option, /// Whether to attempt to restore previous file's state when opening it again. /// The state is stored per pane. /// When disabled, defaults are applied instead of the state restoration. @@ -379,6 +384,31 @@ impl CloseWindowWhenNoItems { } } +#[derive( + Copy, + Clone, + PartialEq, + Eq, + Default, + Serialize, + Deserialize, + JsonSchema, + MergeFrom, + Debug, + strum::VariantArray, + strum::VariantNames, +)] +#[serde(rename_all = "snake_case")] +pub enum CliDefaultOpenBehavior { + /// Add to the existing Zed window as a new workspace. + #[default] + #[strum(serialize = "Add to Existing Window")] + ExistingWindow, + /// Open a new Zed window. + #[strum(serialize = "Open a New Window")] + NewWindow, +} + #[derive( Copy, Clone, diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index cef65431a459126ac14054dee5bc5ffe68e2419c..a14a831452a423baf5f75ec2698ee86c34ae042d 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -80,7 +80,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec { } fn general_page() -> SettingsPage { - fn general_settings_section() -> [SettingsPageItem; 8] { + fn general_settings_section() -> [SettingsPageItem; 9] { [ SettingsPageItem::SectionHeader("General Settings"), SettingsPageItem::SettingItem(SettingItem { @@ -140,6 +140,27 @@ fn general_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "CLI Default Open Behavior", + description: "How `zed ` opens directories when no `-e` or `-n` flag is specified.", + field: Box::new(SettingField { + json_path: Some("cli_default_open_behavior"), + pick: |settings_content| { + settings_content + .workspace + .cli_default_open_behavior + .as_ref() + }, + write: |settings_content, value| { + settings_content.workspace.cli_default_open_behavior = value; + }, + }), + metadata: Some(Box::new(SettingsFieldMetadata { + should_do_titlecase: Some(false), + ..Default::default() + })), + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Use System Path Prompts", description: "Use native OS dialogs for 'Open' and 'Save As'.", diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index 9a5a7dafea4708205569b53e7aa460510a9fbb1e..c5df1910a4d6a2d0b660c73cb31b936f67a9d76b 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -458,6 +458,7 @@ fn init_renderers(cx: &mut App) { .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_font_picker) @@ -1213,7 +1214,8 @@ fn render_settings_item( .child( Label::new(SharedString::new_static(setting_item.description)) .size(LabelSize::Small) - .color(Color::Muted), + .color(Color::Muted) + .render_code_spans(), ), ) .child(control) diff --git a/crates/ui/src/components/label/label.rs b/crates/ui/src/components/label/label.rs index 405948ea06c7e86fcb3dec217186596bdaaf0aeb..871f53fbe4d00828850017c6d90f395762262631 100644 --- a/crates/ui/src/components/label/label.rs +++ b/crates/ui/src/components/label/label.rs @@ -1,5 +1,7 @@ +use std::ops::Range; + use crate::{LabelLike, prelude::*}; -use gpui::StyleRefinement; +use gpui::{HighlightStyle, StyleRefinement, StyledText}; /// A struct representing a label element in the UI. /// @@ -33,6 +35,7 @@ use gpui::StyleRefinement; pub struct Label { base: LabelLike, label: SharedString, + render_code_spans: bool, } impl Label { @@ -49,9 +52,17 @@ impl Label { Self { base: LabelLike::new(), label: label.into(), + render_code_spans: false, } } + /// When enabled, text wrapped in backticks (e.g. `` `code` ``) will be + /// rendered in the buffer (monospace) font. + pub fn render_code_spans(mut self) -> Self { + self.render_code_spans = true; + self + } + /// Sets the text of the [`Label`]. pub fn set_text(&mut self, text: impl Into) { self.label = text.into(); @@ -233,11 +244,109 @@ impl LabelCommon for Label { } impl RenderOnce for Label { - fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { + fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { + if self.render_code_spans { + if let Some((stripped, code_ranges)) = parse_backtick_spans(&self.label) { + let buffer_font_family = theme::theme_settings(cx).buffer_font(cx).family.clone(); + let background_color = cx.theme().colors().element_background; + + let highlights = code_ranges.iter().map(|range| { + ( + range.clone(), + HighlightStyle { + background_color: Some(background_color), + ..Default::default() + }, + ) + }); + + let font_overrides = code_ranges + .iter() + .map(|range| (range.clone(), buffer_font_family.clone())); + + return self.base.child( + StyledText::new(stripped) + .with_highlights(highlights) + .with_font_family_overrides(font_overrides), + ); + } + } self.base.child(self.label) } } +/// Parses backtick-delimited code spans from a string. +/// +/// Returns `None` if there are no matched backtick pairs. +/// Otherwise returns the text with backticks stripped and the byte ranges +/// of the code spans in the stripped string. +fn parse_backtick_spans(text: &str) -> Option<(SharedString, Vec>)> { + if !text.contains('`') { + return None; + } + + let mut stripped = String::with_capacity(text.len()); + let mut code_ranges = Vec::new(); + let mut in_code = false; + let mut code_start = 0; + + for ch in text.chars() { + if ch == '`' { + if in_code { + code_ranges.push(code_start..stripped.len()); + } else { + code_start = stripped.len(); + } + in_code = !in_code; + } else { + stripped.push(ch); + } + } + + if code_ranges.is_empty() { + return None; + } + + Some((SharedString::from(stripped), code_ranges)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_backtick_spans_no_backticks() { + assert_eq!(parse_backtick_spans("plain text"), None); + } + + #[test] + fn test_parse_backtick_spans_single_span() { + let (text, ranges) = parse_backtick_spans("use `zed` to open").unwrap(); + assert_eq!(text.as_ref(), "use zed to open"); + assert_eq!(ranges, vec![4..7]); + } + + #[test] + fn test_parse_backtick_spans_multiple_spans() { + let (text, ranges) = parse_backtick_spans("flags `-e` or `-n`").unwrap(); + assert_eq!(text.as_ref(), "flags -e or -n"); + assert_eq!(ranges, vec![6..8, 12..14]); + } + + #[test] + fn test_parse_backtick_spans_unmatched_backtick() { + // A trailing unmatched backtick should not produce a code range + assert_eq!(parse_backtick_spans("trailing `backtick"), None); + } + + #[test] + fn test_parse_backtick_spans_empty_span() { + let (text, ranges) = parse_backtick_spans("empty `` span").unwrap(); + assert_eq!(text.as_ref(), "empty span"); + assert_eq!(ranges, vec![6..6]); + } +} + impl Component for Label { fn scope() -> ComponentScope { ComponentScope::Typography diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index b25e9c4128b7ecfa428f328c59d3344ed634b293..857db0795bbac8cfe5ee3040971d071c05ed38cd 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -9317,6 +9317,7 @@ pub struct OpenOptions { pub visible: Option, pub focus: Option, pub open_new_workspace: Option, + pub force_existing_window: bool, pub wait: bool, pub requesting_window: Option>, pub open_mode: OpenMode, @@ -9501,31 +9502,42 @@ pub fn open_paths( } // Fallback for directories: when no flag is specified and no existing - // workspace matched, add the directory as a new workspace in the - // active window's MultiWorkspace (instead of opening a new window). + // workspace matched, check the user's setting to decide whether to add + // the directory as a new workspace in the active window's MultiWorkspace + // or open a new window. if open_options.open_new_workspace.is_none() && existing.is_none() { - let target_window = cx.update(|cx| { - let windows = workspace_windows_for_location( - &SerializedWorkspaceLocation::Local, - cx, - ); - let window = cx - .active_window() - .and_then(|window| window.downcast::()) - .filter(|window| windows.contains(window)) - .or_else(|| windows.into_iter().next()); - window.filter(|window| { - window.read(cx).is_ok_and(|mw| mw.multi_workspace_enabled(cx)) - }) - }); + let use_existing_window = open_options.force_existing_window + || cx.update(|cx| { + WorkspaceSettings::get_global(cx).cli_default_open_behavior + == settings::CliDefaultOpenBehavior::ExistingWindow + }); - if let Some(window) = target_window { - open_options.requesting_window = Some(window); - window - .update(cx, |multi_workspace, _, cx| { - multi_workspace.open_sidebar(cx); + if use_existing_window { + let target_window = cx.update(|cx| { + let windows = workspace_windows_for_location( + &SerializedWorkspaceLocation::Local, + cx, + ); + let window = cx + .active_window() + .and_then(|window| window.downcast::()) + .filter(|window| windows.contains(window)) + .or_else(|| windows.into_iter().next()); + window.filter(|window| { + window + .read(cx) + .is_ok_and(|mw| mw.multi_workspace_enabled(cx)) }) - .log_err(); + }); + + if let Some(window) = target_window { + open_options.requesting_window = Some(window); + window + .update(cx, |multi_workspace, _, cx| { + multi_workspace.open_sidebar(cx); + }) + .log_err(); + } } } diff --git a/crates/workspace/src/workspace_settings.rs b/crates/workspace/src/workspace_settings.rs index ee0e80336d744cadaecdf0201525deddb8d5eec9..f097f381d16a51f32e3079968334fa65e264498d 100644 --- a/crates/workspace/src/workspace_settings.rs +++ b/crates/workspace/src/workspace_settings.rs @@ -20,6 +20,7 @@ pub struct WorkspaceSettings { pub show_call_status_icon: bool, pub autosave: AutosaveSetting, pub restore_on_startup: settings::RestoreOnStartupBehavior, + pub cli_default_open_behavior: settings::CliDefaultOpenBehavior, pub restore_on_file_reopen: bool, pub drop_target_size: f32, pub use_system_path_prompts: bool, @@ -99,6 +100,7 @@ impl Settings for WorkspaceSettings { show_call_status_icon: workspace.show_call_status_icon.unwrap(), autosave: workspace.autosave.unwrap(), restore_on_startup: workspace.restore_on_startup.unwrap(), + cli_default_open_behavior: workspace.cli_default_open_behavior.unwrap(), restore_on_file_reopen: workspace.restore_on_file_reopen.unwrap(), drop_target_size: workspace.drop_target_size.unwrap(), use_system_path_prompts: workspace.use_system_path_prompts.unwrap(), diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index 16d220c56093a2645db9f5bdc3114e16814138ac..7094a6a6a7addcfdb5c373258a95b2f2b02d5c2b 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -2,7 +2,7 @@ use crate::handle_open_request; use crate::restore_or_create_workspace; use agent_ui::ExternalSourcePrompt; use anyhow::{Context as _, Result, anyhow}; -use cli::{CliRequest, CliResponse, ipc::IpcSender}; +use cli::{CliRequest, CliResponse, CliResponseSink}; use cli::{IpcHandshake, ipc}; use client::{ZedLink, parse_zed_link}; use db::kvp::KeyValueStore; @@ -12,7 +12,7 @@ use futures::channel::mpsc::{UnboundedReceiver, UnboundedSender}; use futures::channel::{mpsc, oneshot}; use futures::future; -use futures::{FutureExt, SinkExt, StreamExt}; +use futures::{FutureExt, StreamExt}; use git_ui::{file_diff_view::FileDiffView, multi_diff_view::MultiDiffView}; use gpui::{App, AsyncApp, Global, WindowHandle}; use onboarding::FIRST_OPEN; @@ -26,6 +26,7 @@ use std::thread; use std::time::Duration; use ui::SharedString; use util::ResultExt; +use util::debug_panic; use util::paths::PathWithPosition; use workspace::PathList; use workspace::item::ItemHandle; @@ -43,9 +44,13 @@ pub struct OpenRequest { pub remote_connection: Option, } -#[derive(Debug)] pub enum OpenRequestKind { - CliConnection((mpsc::Receiver, IpcSender)), + CliConnection( + ( + mpsc::UnboundedReceiver, + Box, + ), + ), Extension { extension_id: String, }, @@ -73,6 +78,45 @@ pub enum OpenRequestKind { }, } +impl std::fmt::Debug for OpenRequestKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::CliConnection(_) => write!(f, "CliConnection(..)"), + Self::Extension { extension_id } => f + .debug_struct("Extension") + .field("extension_id", extension_id) + .finish(), + Self::AgentPanel { + external_source_prompt, + } => f + .debug_struct("AgentPanel") + .field("external_source_prompt", external_source_prompt) + .finish(), + Self::SharedAgentThread { session_id } => f + .debug_struct("SharedAgentThread") + .field("session_id", session_id) + .finish(), + Self::DockMenuAction { index } => f + .debug_struct("DockMenuAction") + .field("index", index) + .finish(), + Self::BuiltinJsonSchema { schema_path } => f + .debug_struct("BuiltinJsonSchema") + .field("schema_path", schema_path) + .finish(), + Self::Setting { setting_path } => f + .debug_struct("Setting") + .field("setting_path", setting_path) + .finish(), + Self::GitClone { repo_url } => f + .debug_struct("GitClone") + .field("repo_url", repo_url) + .finish(), + Self::GitCommit { sha } => f.debug_struct("GitCommit").field("sha", sha).finish(), + } + } +} + impl OpenRequest { pub fn parse(request: RawOpenRequest, cx: &App) -> Result { let mut this = Self::default(); @@ -305,8 +349,11 @@ pub fn listen_for_cli_connections(opener: OpenListener) -> Result<()> { fn connect_to_cli( server_name: &str, -) -> Result<(mpsc::Receiver, IpcSender)> { - let handshake_tx = cli::ipc::IpcSender::::connect(server_name.to_string()) +) -> Result<( + mpsc::UnboundedReceiver, + Box, +)> { + let handshake_tx = ipc::IpcSender::::connect(server_name.to_string()) .context("error connecting to cli")?; let (request_tx, request_rx) = ipc::channel::()?; let (response_tx, response_rx) = ipc::channel::()?; @@ -318,18 +365,17 @@ fn connect_to_cli( }) .context("error sending ipc handshake")?; - let (mut async_request_tx, async_request_rx) = - futures::channel::mpsc::channel::(16); + let (async_request_tx, async_request_rx) = futures::channel::mpsc::unbounded::(); thread::spawn(move || { while let Ok(cli_request) = request_rx.recv() { - if smol::block_on(async_request_tx.send(cli_request)).is_err() { + if async_request_tx.unbounded_send(cli_request).is_err() { break; } } anyhow::Ok(()) }); - Ok((async_request_rx, response_tx)) + Ok((async_request_rx, Box::new(response_tx))) } pub async fn open_paths_with_positions( @@ -399,7 +445,10 @@ pub async fn open_paths_with_positions( } pub async fn handle_cli_connection( - (mut requests, responses): (mpsc::Receiver, IpcSender), + (mut requests, responses): ( + mpsc::UnboundedReceiver, + Box, + ), app_state: Arc, cx: &mut AsyncApp, ) { @@ -412,7 +461,8 @@ pub async fn handle_cli_connection( diff_all, wait, wsl, - open_new_workspace, + mut open_new_workspace, + mut force_existing_window, reuse, env, user_data_dir: _, @@ -447,13 +497,36 @@ pub async fn handle_cli_connection( return; } + if let Some(behavior) = maybe_prompt_open_behavior( + open_new_workspace, + force_existing_window, + reuse, + &paths, + &app_state, + responses.as_ref(), + &mut requests, + cx, + ) + .await + { + match behavior { + settings::CliDefaultOpenBehavior::ExistingWindow => { + force_existing_window = true; + } + settings::CliDefaultOpenBehavior::NewWindow => { + open_new_workspace = Some(true); + } + } + } + let open_workspace_result = open_workspaces( paths, diff_paths, diff_all, open_new_workspace, + force_existing_window, reuse, - &responses, + responses.as_ref(), wait, dev_container, app_state.clone(), @@ -465,8 +538,117 @@ pub async fn handle_cli_connection( let status = if open_workspace_result.is_err() { 1 } else { 0 }; responses.send(CliResponse::Exit { status }).log_err(); } + CliRequest::SetOpenBehavior { .. } => { + // We handle this case in a situation-specific way in + // maybe_prompt_open_behavior + debug_panic!("unexpected SetOpenBehavior message"); + } + } + } +} + +/// Checks whether the CLI user should be prompted to configure their default +/// open behavior. Sends `CliResponse::PromptOpenBehavior` and waits for the +/// CLI's response if all of these are true: +/// - No explicit flag was given (`-n`, `-e`, `-a`) +/// - There is at least one existing Zed window +/// - The user has not yet configured `cli_default_open_behavior` in settings +/// +/// Returns the user's choice, or `None` if no prompt was needed or the CLI +/// didn't respond. +async fn maybe_prompt_open_behavior( + open_new_workspace: Option, + force_existing_window: bool, + reuse: bool, + paths: &[String], + app_state: &Arc, + responses: &dyn CliResponseSink, + requests: &mut mpsc::UnboundedReceiver, + cx: &mut AsyncApp, +) -> Option { + if open_new_workspace.is_some() || force_existing_window || reuse { + return None; + } + + let has_existing_windows = cx.update(|cx| { + cx.windows() + .iter() + .any(|window| window.downcast::().is_some()) + }); + + if !has_existing_windows { + return None; + } + + if !paths.is_empty() { + let paths_as_pathbufs: Vec = paths.iter().map(PathBuf::from).collect(); + let paths_in_existing_workspace = cx.update(|cx| { + for window in cx.windows() { + if let Some(multi_workspace) = window.downcast::() { + if let Ok(multi_workspace) = multi_workspace.read(cx) { + for workspace in multi_workspace.workspaces() { + let project = workspace.read(cx).project().read(cx); + if project + .visibility_for_paths(&paths_as_pathbufs, false, cx) + .is_some() + { + return true; + } + } + } + } + } + false + }); + + if paths_in_existing_workspace { + return None; + } + } + + if !paths.is_empty() { + let has_directory = + futures::future::join_all(paths.iter().map(|p| app_state.fs.is_dir(Path::new(p)))) + .await + .into_iter() + .any(|is_dir| is_dir); + + if !has_directory { + return None; } } + + let settings_text = app_state + .fs + .load(paths::settings_file()) + .await + .unwrap_or_default(); + + if settings_text.contains("cli_default_open_behavior") { + return None; + } + + responses.send(CliResponse::PromptOpenBehavior).log_err()?; + + if let Some(CliRequest::SetOpenBehavior { behavior }) = requests.next().await { + let behavior = match behavior { + cli::CliOpenBehavior::ExistingWindow => { + settings::CliDefaultOpenBehavior::ExistingWindow + } + cli::CliOpenBehavior::NewWindow => settings::CliDefaultOpenBehavior::NewWindow, + }; + + let fs = app_state.fs.clone(); + cx.update(|cx| { + settings::update_settings_file(fs, cx, move |content, _cx| { + content.workspace.cli_default_open_behavior = Some(behavior); + }); + }); + + return Some(behavior); + } + + None } async fn open_workspaces( @@ -474,8 +656,9 @@ async fn open_workspaces( diff_paths: Vec<[String; 2]>, diff_all: bool, open_new_workspace: Option, + force_existing_window: bool, reuse: bool, - responses: &IpcSender, + responses: &dyn CliResponseSink, wait: bool, dev_container: bool, app_state: Arc, @@ -536,6 +719,7 @@ async fn open_workspaces( }; let open_options = workspace::OpenOptions { open_new_workspace, + force_existing_window, requesting_window: replace_window, wait, env: env.clone(), @@ -600,7 +784,7 @@ async fn open_local_workspace( diff_paths: Vec<[String; 2]>, diff_all: bool, open_options: workspace::OpenOptions, - responses: &IpcSender, + responses: &dyn CliResponseSink, app_state: &Arc, cx: &mut AsyncApp, ) -> bool { @@ -742,10 +926,7 @@ pub async fn derive_paths_with_position( mod tests { use super::*; use crate::zed::{open_listener::open_local_workspace, tests::init_test}; - use cli::{ - CliResponse, - ipc::{self}, - }; + use cli::CliResponse; use editor::Editor; use futures::poll; use gpui::{AppContext as _, TestAppContext}; @@ -757,6 +938,24 @@ mod tests { use util::path; use workspace::{AppState, MultiWorkspace}; + struct DiscardResponseSink; + + impl CliResponseSink for DiscardResponseSink { + fn send(&self, _response: CliResponse) -> anyhow::Result<()> { + Ok(()) + } + } + + struct SyncResponseSender(std::sync::mpsc::Sender); + + impl CliResponseSink for SyncResponseSender { + fn send(&self, response: CliResponse) -> anyhow::Result<()> { + self.0 + .send(response) + .map_err(|error| anyhow::anyhow!("{error}")) + } + } + #[gpui::test] fn test_parse_ssh_url(cx: &mut TestAppContext) { let _app_state = init_test(cx); @@ -1072,7 +1271,7 @@ mod tests { ) .await; - let (response_tx, _) = ipc::channel::().unwrap(); + let response_sink = DiscardResponseSink; let workspace_paths = vec![path!("/root/dir1").to_owned()]; let (done_tx, mut done_rx) = futures::channel::oneshot::channel(); @@ -1087,7 +1286,7 @@ mod tests { wait: true, ..Default::default() }, - &response_tx, + &response_sink, &app_state, &mut cx, ) @@ -1171,7 +1370,7 @@ mod tests { app_state: Arc, cx: &TestAppContext, ) { - let (response_tx, _) = ipc::channel::().unwrap(); + let response_sink = DiscardResponseSink; let workspace_paths = vec![path.to_owned()]; @@ -1185,7 +1384,7 @@ mod tests { open_new_workspace, ..Default::default() }, - &response_tx, + &response_sink, &app_state, &mut cx, ) @@ -1243,20 +1442,19 @@ mod tests { .unwrap(); // First, open a workspace normally - let (response_tx, _response_rx) = ipc::channel::().unwrap(); + let response_sink = DiscardResponseSink; let workspace_paths = vec![file1_path.to_string()]; let _errored = cx .spawn({ let app_state = app_state.clone(); - let response_tx = response_tx.clone(); |mut cx| async move { open_local_workspace( workspace_paths, vec![], false, workspace::OpenOptions::default(), - &response_tx, + &response_sink, &app_state, &mut cx, ) @@ -1282,8 +1480,8 @@ mod tests { let errored_reuse = cx .spawn({ let app_state = app_state.clone(); - let response_tx = response_tx.clone(); |mut cx| async move { + let response_sink = DiscardResponseSink; open_local_workspace( workspace_paths_reuse, vec![], @@ -1292,7 +1490,7 @@ mod tests { requesting_window: Some(window_to_replace), ..Default::default() }, - &response_tx, + &response_sink, &app_state, &mut cx, ) @@ -1426,21 +1624,19 @@ mod tests { .await .unwrap(); - let (response_tx, _response_rx) = ipc::channel::().unwrap(); - // Open first workspace let workspace_paths_1 = vec![file1_path.to_string()]; let _errored = cx .spawn({ let app_state = app_state.clone(); - let response_tx = response_tx.clone(); |mut cx| async move { + let response_sink = DiscardResponseSink; open_local_workspace( workspace_paths_1, Vec::new(), false, workspace::OpenOptions::default(), - &response_tx, + &response_sink, &app_state, &mut cx, ) @@ -1457,8 +1653,8 @@ mod tests { let _errored = cx .spawn({ let app_state = app_state.clone(); - let response_tx = response_tx.clone(); |mut cx| async move { + let response_sink = DiscardResponseSink; open_local_workspace( workspace_paths_2, Vec::new(), @@ -1467,7 +1663,7 @@ mod tests { open_new_workspace: Some(true), // Force new window ..Default::default() }, - &response_tx, + &response_sink, &app_state, &mut cx, ) @@ -1503,8 +1699,8 @@ mod tests { let _errored = cx .spawn({ let app_state = app_state.clone(); - let response_tx = response_tx.clone(); |mut cx| async move { + let response_sink = DiscardResponseSink; open_local_workspace( workspace_paths_add, Vec::new(), @@ -1513,7 +1709,7 @@ mod tests { open_new_workspace: Some(false), // --add flag ..Default::default() }, - &response_tx, + &response_sink, &app_state, &mut cx, ) @@ -1564,11 +1760,11 @@ mod tests { ) .await; - let (response_tx, _) = ipc::channel::().unwrap(); let errored = cx .spawn({ let app_state = app_state.clone(); |mut cx| async move { + let response_sink = DiscardResponseSink; open_local_workspace( vec![path!("/project").to_owned()], vec![], @@ -1577,7 +1773,7 @@ mod tests { open_in_dev_container: true, ..Default::default() }, - &response_tx, + &response_sink, &app_state, &mut cx, ) @@ -1618,11 +1814,11 @@ mod tests { ) .await; - let (response_tx, _) = ipc::channel::().unwrap(); let errored = cx .spawn({ let app_state = app_state.clone(); |mut cx| async move { + let response_sink = DiscardResponseSink; open_local_workspace( vec![path!("/project").to_owned()], vec![], @@ -1631,7 +1827,7 @@ mod tests { open_in_dev_container: true, ..Default::default() }, - &response_tx, + &response_sink, &app_state, &mut cx, ) @@ -1661,4 +1857,341 @@ mod tests { }) .unwrap(); } + + fn make_cli_open_request( + paths: Vec, + open_new_workspace: Option, + force_existing_window: bool, + ) -> CliRequest { + CliRequest::Open { + paths, + urls: vec![], + diff_paths: vec![], + diff_all: false, + wsl: None, + wait: false, + open_new_workspace, + force_existing_window, + reuse: false, + env: None, + user_data_dir: None, + dev_container: false, + } + } + + /// Runs the real [`cli::run_cli_response_loop`] on an OS thread against + /// the Zed-side `handle_cli_connection` on the GPUI foreground executor, + /// using `allow_parking` so the test scheduler tolerates cross-thread + /// wakeups. + /// + /// Returns `(exit_status, prompt_was_shown)`. + fn run_cli_with_zed_handler( + cx: &mut TestAppContext, + app_state: Arc, + open_request: CliRequest, + prompt_response: Option, + ) -> (i32, bool) { + cx.executor().allow_parking(); + + let (request_tx, request_rx) = mpsc::unbounded::(); + let (response_tx, response_rx) = std::sync::mpsc::channel::(); + let response_sink: Box = Box::new(SyncResponseSender(response_tx)); + + cx.spawn(|mut cx| async move { + handle_cli_connection((request_rx, response_sink), app_state, &mut cx).await; + }) + .detach(); + + let prompt_called = Arc::new(std::sync::atomic::AtomicBool::new(false)); + let prompt_called_for_thread = prompt_called.clone(); + + let cli_thread = std::thread::spawn(move || -> anyhow::Result { + request_tx + .unbounded_send(open_request) + .map_err(|error| anyhow::anyhow!("{error}"))?; + + while let Ok(response) = response_rx.recv() { + match response { + CliResponse::Ping => {} + CliResponse::Stdout { .. } | CliResponse::Stderr { .. } => {} + CliResponse::Exit { status } => return Ok(status), + CliResponse::PromptOpenBehavior => { + prompt_called_for_thread.store(true, std::sync::atomic::Ordering::SeqCst); + let behavior = + prompt_response.unwrap_or(cli::CliOpenBehavior::ExistingWindow); + request_tx + .unbounded_send(CliRequest::SetOpenBehavior { behavior }) + .map_err(|error| anyhow::anyhow!("{error}"))?; + } + } + } + + anyhow::bail!("CLI response channel closed without Exit") + }); + + while !cli_thread.is_finished() { + cx.run_until_parked(); + std::thread::sleep(std::time::Duration::from_millis(1)); + } + + let exit_status = cli_thread.join().unwrap().expect("CLI loop failed"); + let prompt_shown = prompt_called.load(std::sync::atomic::Ordering::SeqCst); + + // Flush any remaining async work (e.g. settings file writes). + cx.run_until_parked(); + + (exit_status, prompt_shown) + } + + #[gpui::test] + async fn test_e2e_no_flags_no_windows_no_prompt(cx: &mut TestAppContext) { + let app_state = init_test(cx); + + app_state + .fs + .as_fake() + .insert_tree(path!("/project"), json!({ "file.txt": "content" })) + .await; + + assert_eq!(cx.windows().len(), 0); + + let (status, prompt_shown) = run_cli_with_zed_handler( + cx, + app_state, + make_cli_open_request(vec![path!("/project/file.txt").to_string()], None, false), + None, + ); + + assert_eq!(status, 0); + assert!( + !prompt_shown, + "no prompt should be shown when no windows exist" + ); + assert_eq!(cx.windows().len(), 1); + } + + #[gpui::test] + async fn test_e2e_prompt_user_picks_existing_window(cx: &mut TestAppContext) { + let app_state = init_test(cx); + + app_state + .fs + .as_fake() + .insert_tree(path!("/project_a"), json!({ "file.txt": "content" })) + .await; + app_state + .fs + .as_fake() + .insert_tree(path!("/project_b"), json!({ "file.txt": "content" })) + .await; + + // Create an existing window so the prompt triggers + open_workspace_file(path!("/project_a"), None, app_state.clone(), cx).await; + assert_eq!(cx.windows().len(), 1); + + let (status, prompt_shown) = run_cli_with_zed_handler( + cx, + app_state.clone(), + make_cli_open_request(vec![path!("/project_b").to_string()], None, false), + Some(cli::CliOpenBehavior::ExistingWindow), + ); + + assert_eq!(status, 0); + assert!(prompt_shown, "prompt should be shown"); + assert_eq!(cx.windows().len(), 1); + + let settings_text = app_state + .fs + .load(paths::settings_file()) + .await + .unwrap_or_default(); + assert!( + settings_text.contains("existing_window"), + "settings should contain 'existing_window', got: {settings_text}" + ); + } + + #[gpui::test] + async fn test_e2e_prompt_user_picks_new_window(cx: &mut TestAppContext) { + let app_state = init_test(cx); + + app_state + .fs + .as_fake() + .insert_tree(path!("/project_a"), json!({ "file.txt": "content" })) + .await; + app_state + .fs + .as_fake() + .insert_tree(path!("/project_b"), json!({ "file.txt": "content" })) + .await; + + // Create an existing window with project_a + open_workspace_file(path!("/project_a"), None, app_state.clone(), cx).await; + assert_eq!(cx.windows().len(), 1); + + let (status, prompt_shown) = run_cli_with_zed_handler( + cx, + app_state.clone(), + make_cli_open_request(vec![path!("/project_b").to_string()], None, false), + Some(cli::CliOpenBehavior::NewWindow), + ); + + assert_eq!(status, 0); + assert!(prompt_shown, "prompt should be shown"); + assert_eq!(cx.windows().len(), 2); + + let settings_text = app_state + .fs + .load(paths::settings_file()) + .await + .unwrap_or_default(); + assert!( + settings_text.contains("new_window"), + "settings should contain 'new_window', got: {settings_text}" + ); + } + + #[gpui::test] + async fn test_e2e_setting_already_configured_no_prompt(cx: &mut TestAppContext) { + let app_state = init_test(cx); + + app_state + .fs + .as_fake() + .insert_tree(path!("/project"), json!({ "file.txt": "content" })) + .await; + + // Pre-configure the setting in settings.json + app_state + .fs + .as_fake() + .insert_tree( + paths::config_dir(), + json!({ + "settings.json": r#"{"cli_default_open_behavior": "existing_window"}"# + }), + ) + .await; + + // Create an existing window + open_workspace_file(path!("/project"), None, app_state.clone(), cx).await; + assert_eq!(cx.windows().len(), 1); + + let (status, prompt_shown) = run_cli_with_zed_handler( + cx, + app_state, + make_cli_open_request(vec![path!("/project/file.txt").to_string()], None, false), + None, + ); + + assert_eq!(status, 0); + assert!( + !prompt_shown, + "no prompt should be shown when setting already configured" + ); + } + + #[gpui::test] + async fn test_e2e_explicit_existing_flag_no_prompt(cx: &mut TestAppContext) { + let app_state = init_test(cx); + + app_state + .fs + .as_fake() + .insert_tree(path!("/project"), json!({ "file.txt": "content" })) + .await; + + // Create an existing window + open_workspace_file(path!("/project"), None, app_state.clone(), cx).await; + assert_eq!(cx.windows().len(), 1); + + let (status, prompt_shown) = run_cli_with_zed_handler( + cx, + app_state, + make_cli_open_request( + vec![path!("/project/file.txt").to_string()], + None, + true, // -e flag: force existing window + ), + None, + ); + + assert_eq!(status, 0); + assert!(!prompt_shown, "no prompt should be shown with -e flag"); + assert_eq!(cx.windows().len(), 1); + } + + #[gpui::test] + async fn test_e2e_explicit_new_flag_no_prompt(cx: &mut TestAppContext) { + let app_state = init_test(cx); + + app_state + .fs + .as_fake() + .insert_tree(path!("/project_a"), json!({ "file.txt": "content" })) + .await; + app_state + .fs + .as_fake() + .insert_tree(path!("/project_b"), json!({ "file.txt": "content" })) + .await; + + // Create an existing window + open_workspace_file(path!("/project_a"), None, app_state.clone(), cx).await; + assert_eq!(cx.windows().len(), 1); + + let (status, prompt_shown) = run_cli_with_zed_handler( + cx, + app_state, + make_cli_open_request( + vec![path!("/project_b/file.txt").to_string()], + Some(true), // -n flag: force new window + false, + ), + None, + ); + + assert_eq!(status, 0); + assert!(!prompt_shown, "no prompt should be shown with -n flag"); + assert_eq!(cx.windows().len(), 2); + } + + #[gpui::test] + async fn test_e2e_paths_in_existing_workspace_no_prompt(cx: &mut TestAppContext) { + let app_state = init_test(cx); + + app_state + .fs + .as_fake() + .insert_tree( + path!("/project"), + json!({ + "src": { + "main.rs": "fn main() {}", + } + }), + ) + .await; + + // Open the project directory as a workspace + open_workspace_file(path!("/project"), None, app_state.clone(), cx).await; + assert_eq!(cx.windows().len(), 1); + + // Opening a file inside the already-open workspace should not prompt + let (status, prompt_shown) = run_cli_with_zed_handler( + cx, + app_state, + make_cli_open_request(vec![path!("/project/src/main.rs").to_string()], None, false), + None, + ); + + assert_eq!(status, 0); + assert!( + !prompt_shown, + "no prompt should be shown when paths are in an existing workspace" + ); + // File opened in existing window + assert_eq!(cx.windows().len(), 1); + } } diff --git a/crates/zed/src/zed/windows_only_instance.rs b/crates/zed/src/zed/windows_only_instance.rs index f22f49e26a982cb8cb68e21645033819e059de36..efc0e9e999d05d4d2dfe4969f82679e909f3ea06 100644 --- a/crates/zed/src/zed/windows_only_instance.rs +++ b/crates/zed/src/zed/windows_only_instance.rs @@ -159,6 +159,7 @@ fn send_args_to_instance(args: &Args) -> anyhow::Result<()> { wait: false, wsl: args.wsl.clone(), open_new_workspace: None, + force_existing_window: false, reuse: false, env: None, user_data_dir: args.user_data_dir.clone(), @@ -186,6 +187,11 @@ fn send_args_to_instance(args: &Args) -> anyhow::Result<()> { exit_status.lock().replace(status); return Ok(()); } + CliResponse::PromptOpenBehavior => { + tx.send(CliRequest::SetOpenBehavior { + behavior: cli::CliOpenBehavior::ExistingWindow, + })?; + } } } Ok(()) From 6213f06e1b01f48ee73ccfee9e3c6cddcd959317 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Sat, 11 Apr 2026 13:30:04 -0300 Subject: [PATCH 59/67] agent_ui: Add some general UI fixes (#53696) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit A note-worthy thing I'm adding in this PR to fix an agent panel problem is a trait method allowing to set a min-width in panels. I think some of them—if not all—could benefit from it, because there is a certain width these panels can get in that we just can't ensure they will work; things will break. So we either accept that and let the user's common sense understand that, or we don't allow them to reach that state... Surely, a small enough _window_ size can still break things, but then it's out of the realms of solution... Release Notes: - Agent: Fixed multi-line queued messages getting cut-off when the agent panel is in full screen. - Agent: Fixed agent panel getting auto-closed after submitting a queued message when the panel is in full screen. - Agent: Added a min-width to the agent panel, preventing it from reaching a small enough width where it would be essentially unusable. --- crates/agent_ui/src/agent_panel.rs | 8 ++++++++ crates/agent_ui/src/conversation_view/thread_view.rs | 11 ++++++++--- crates/workspace/src/dock.rs | 8 ++++++++ crates/workspace/src/workspace.rs | 4 ++++ 4 files changed, 28 insertions(+), 3 deletions(-) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 8005445034d0b9339d36cb2d48da516f9c2a9207..9452f02732ba2ed05375d37b7659f2200843b5dd 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -90,6 +90,7 @@ use zed_actions::{ }; const AGENT_PANEL_KEY: &str = "agent_panel"; +const MIN_PANEL_WIDTH: Pixels = px(300.); const RECENTLY_UPDATED_MENU_LIMIT: usize = 6; const LAST_USED_AGENT_KEY: &str = "agent_panel__last_used_external_agent"; @@ -3546,6 +3547,13 @@ impl Panel for AgentPanel { } } + fn min_size(&self, window: &Window, cx: &App) -> Option { + match self.position(window, cx) { + DockPosition::Left | DockPosition::Right => Some(MIN_PANEL_WIDTH), + DockPosition::Bottom => None, + } + } + fn supports_flexible_size(&self) -> bool { true } diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs index 66a211b2f4cf048301da1e7c154d50db191ec15a..a3e36cdafbfbbb82661f69784f779ece8f7fcc8b 100644 --- a/crates/agent_ui/src/conversation_view/thread_view.rs +++ b/crates/agent_ui/src/conversation_view/thread_view.rs @@ -1510,6 +1510,9 @@ impl ThreadView { let Some(queued) = self.remove_from_queue(index, cx) else { return; }; + + self.message_editor.focus_handle(cx).focus(window, cx); + let content = queued.content; let tracked_buffers = queued.tracked_buffers; @@ -2257,12 +2260,14 @@ impl ThreadView { let max_content_width = AgentSettings::get_global(cx).max_content_width; - div() + h_flex() .w_full() - .max_w(max_content_width) - .mx_auto() + .justify_center() .child( v_flex() + .flex_basis(max_content_width) + .flex_shrink() + .flex_grow_0() .mx_2() .bg(self.activity_bar_bg(cx)) .border_1() diff --git a/crates/workspace/src/dock.rs b/crates/workspace/src/dock.rs index e58b4b59100c05085c93993370b85a788fc159ca..81e46b513b4b902aeebb1a912261826c0c4f30dc 100644 --- a/crates/workspace/src/dock.rs +++ b/crates/workspace/src/dock.rs @@ -39,6 +39,9 @@ pub trait Panel: Focusable + EventEmitter + Render + Sized { fn position_is_valid(&self, position: DockPosition) -> bool; fn set_position(&mut self, position: DockPosition, window: &mut Window, cx: &mut Context); fn default_size(&self, window: &Window, cx: &App) -> Pixels; + fn min_size(&self, _window: &Window, _cx: &App) -> Option { + None + } fn initial_size_state(&self, _window: &Window, _cx: &App) -> PanelSizeState { PanelSizeState::default() } @@ -98,6 +101,7 @@ pub trait PanelHandle: Send + Sync { fn remote_id(&self) -> Option; fn pane(&self, cx: &App) -> Option>; fn default_size(&self, window: &Window, cx: &App) -> Pixels; + fn min_size(&self, window: &Window, cx: &App) -> Option; fn initial_size_state(&self, window: &Window, cx: &App) -> PanelSizeState; fn size_state_changed(&self, window: &mut Window, cx: &mut App); fn supports_flexible_size(&self, cx: &App) -> bool; @@ -181,6 +185,10 @@ where self.read(cx).default_size(window, cx) } + fn min_size(&self, window: &Window, cx: &App) -> Option { + self.read(cx).min_size(window, cx) + } + fn initial_size_state(&self, window: &Window, cx: &App) -> PanelSizeState { self.read(cx).initial_size_state(window, cx) } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 857db0795bbac8cfe5ee3040971d071c05ed38cd..f9330f07ee3d057f33aa660fd491d2e61642d658 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -7457,6 +7457,7 @@ impl Workspace { let dock = dock.read(cx); if let Some(panel) = dock.visible_panel() { let size_state = dock.stored_panel_size_state(panel.as_ref()); + let min_size = panel.min_size(window, cx); if position.axis() == Axis::Horizontal { let use_flexible = panel.has_flexible_size(window, cx); let flex_grow = if use_flexible { @@ -7478,6 +7479,9 @@ impl Workspace { .unwrap_or_else(|| panel.default_size(window, cx)); container = container.w(size); } + if let Some(min) = min_size { + container = container.min_w(min); + } } else { let size = size_state .and_then(|state| state.size) From 6af5ca3f8036b937e9d84bdf2283e89986caca67 Mon Sep 17 00:00:00 2001 From: Smit Chaudhary Date: Sat, 11 Apr 2026 18:52:27 +0200 Subject: [PATCH 60/67] editor: Fix breadcrumb syntax colors not updating when theme changes (#53185) --- crates/editor/src/document_symbols.rs | 132 +++++++++++++++++++++++++- crates/editor/src/editor.rs | 1 + 2 files changed, 132 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/document_symbols.rs b/crates/editor/src/document_symbols.rs index ef9159788a7a5c2b2c317015219090fdae6a4944..074e5a5ed27b104946ab7537c2e772cf0c9c5572 100644 --- a/crates/editor/src/document_symbols.rs +++ b/crates/editor/src/document_symbols.rs @@ -331,7 +331,7 @@ mod tests { use futures::StreamExt as _; use gpui::TestAppContext; - use settings::DocumentSymbols; + use settings::{DocumentSymbols, SettingsStore}; use util::path; use zed_actions::editor::{MoveDown, MoveUp}; @@ -875,4 +875,134 @@ mod tests { "Should not have made any LSP document symbol requests when setting is off" ); } + + #[gpui::test] + async fn test_breadcrumb_highlights_update_on_theme_change(cx: &mut TestAppContext) { + use collections::IndexMap; + use gpui::{Hsla, Rgba, UpdateGlobal as _}; + use theme_settings::{HighlightStyleContent, ThemeStyleContent}; + use ui::ActiveTheme as _; + + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new_rust(lsp::ServerCapabilities::default(), cx).await; + + // Set the initial theme with a red keyword color and sync it to the + // language registry so tree-sitter highlight maps are up to date. + let red_color: Hsla = Rgba { + r: 1.0, + g: 0.0, + b: 0.0, + a: 1.0, + } + .into(); + cx.update(|_, cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.theme.experimental_theme_overrides = Some(ThemeStyleContent { + syntax: IndexMap::from_iter([( + "keyword".to_string(), + HighlightStyleContent { + color: Some("#ff0000".to_string()), + background_color: None, + font_style: None, + font_weight: None, + }, + )]), + ..ThemeStyleContent::default() + }); + }); + }); + }); + cx.update_editor(|editor, _window, cx| { + editor + .project + .as_ref() + .expect("editor should have a project") + .read(cx) + .languages() + .set_theme(cx.theme().clone()); + }); + cx.set_state("fn maˇin() {}"); + cx.run_until_parked(); + + cx.update_editor(|editor, _window, cx| { + let breadcrumbs = editor + .breadcrumbs_inner(cx) + .expect("Should have breadcrumbs"); + let symbol_segment = breadcrumbs + .iter() + .find(|b| b.text.as_ref() == "fn main") + .expect("Should have 'fn main' breadcrumb"); + let keyword_highlight = symbol_segment + .highlights + .iter() + .find(|(range, _)| &symbol_segment.text[range.clone()] == "fn") + .expect("Should have a highlight for the 'fn' keyword"); + assert_eq!( + keyword_highlight.1.color, + Some(red_color), + "The 'fn' keyword should have red color" + ); + }); + + // Change the theme to use a blue keyword color. This simulates a user + // switching themes. The language registry set_theme call mirrors what + // the application does in main.rs on theme change. + let blue_color: Hsla = Rgba { + r: 0.0, + g: 0.0, + b: 1.0, + a: 1.0, + } + .into(); + cx.update(|_, cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.theme.experimental_theme_overrides = Some(ThemeStyleContent { + syntax: IndexMap::from_iter([( + "keyword".to_string(), + HighlightStyleContent { + color: Some("#0000ff".to_string()), + background_color: None, + font_style: None, + font_weight: None, + }, + )]), + ..ThemeStyleContent::default() + }); + }); + }); + }); + cx.update_editor(|editor, _window, cx| { + editor + .project + .as_ref() + .expect("editor should have a project") + .read(cx) + .languages() + .set_theme(cx.theme().clone()); + }); + cx.run_until_parked(); + + cx.update_editor(|editor, _window, cx| { + let breadcrumbs = editor + .breadcrumbs_inner(cx) + .expect("Should have breadcrumbs after theme change"); + let symbol_segment = breadcrumbs + .iter() + .find(|b| b.text.as_ref() == "fn main") + .expect("Should have 'fn main' breadcrumb after theme change"); + let keyword_highlight = symbol_segment + .highlights + .iter() + .find(|(range, _)| &symbol_segment.text[range.clone()] == "fn") + .expect("Should have a highlight for the 'fn' keyword after theme change"); + assert_eq!( + keyword_highlight.1.color, + Some(blue_color), + "The 'fn' keyword should have blue color after theme change" + ); + }); + } } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 09fc8ece435c8aff22bbf380709669282bd28dcd..77ddd5e95d159d20ac627ec1d0406823d201ff4c 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -24815,6 +24815,7 @@ impl Editor { self.invalidate_semantic_tokens(None); self.refresh_semantic_tokens(None, None, cx); + self.refresh_outline_symbols_at_cursor(cx); } pub fn set_searchable(&mut self, searchable: bool) { From 9f507655c85d498d3eadb88349729e35ae35d684 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Sat, 11 Apr 2026 16:07:36 -0300 Subject: [PATCH 61/67] agent_ui: Fix creating a thread from the toolbar (#53706) Follow up to https://github.com/zed-industries/zed/pull/53574 Closes https://github.com/zed-industries/zed/issues/53705 In the draft feature PR, we didn't update the new thread creation from the agent panel's toolbar to _also_ create a corresponding draft item in the sidebar. Additionally, the draft feature exposed a previously-existing bug where the toolbar wouldn't get synced with the currently selected agent; and as you navigated between them, we would fall back to the native agent, as well as when you hit cmd-n. cc @mikayla-maki just a quick FYI here for the revamp! Will push this forward in the meantime given it's a bit of a bad bug. Release Notes: - N/A --- crates/agent_ui/src/agent_panel.rs | 113 ++++++++++++++++++++--- crates/agent_ui/src/conversation_view.rs | 4 + 2 files changed, 105 insertions(+), 12 deletions(-) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 9452f02732ba2ed05375d37b7659f2200843b5dd..aa355b07b1399d931e9bc805d1ed17240aba61fb 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -2184,6 +2184,15 @@ impl AgentPanel { self.retain_running_thread(old_view, cx); } + // Keep the toolbar's selected agent in sync with the active thread's agent. + if let ActiveView::AgentThread { conversation_view } = &self.active_view { + let thread_agent = conversation_view.read(cx).agent_key().clone(); + if self.selected_agent != thread_agent { + self.selected_agent = thread_agent; + self.serialize(cx); + } + } + // Subscribe to the active ThreadView's events (e.g. FirstSendRequested) // so the panel can intercept the first send for worktree creation. // Re-subscribe whenever the ConnectionView changes, since the inner @@ -4012,11 +4021,10 @@ impl AgentPanel { workspace.panel::(cx) { panel.update(cx, |panel, cx| { - panel.new_agent_thread( - Agent::NativeAgent, - window, - cx, - ); + panel.selected_agent = Agent::NativeAgent; + panel.reset_start_thread_in_to_default(cx); + let id = panel.create_draft(window, cx); + panel.activate_draft(id, true, window, cx); }); } }); @@ -4097,13 +4105,16 @@ impl AgentPanel { workspace.panel::(cx) { panel.update(cx, |panel, cx| { - panel.new_agent_thread( - Agent::Custom { - id: agent_id.clone(), - }, - window, + panel.selected_agent = Agent::Custom { + id: agent_id.clone(), + }; + panel.reset_start_thread_in_to_default( cx, ); + let id = panel.create_draft(window, cx); + panel.activate_draft( + id, true, window, cx, + ); }); } }); @@ -4145,8 +4156,11 @@ impl AgentPanel { let selected_agent = div() .id("selected_agent_icon") .when_some(selected_agent_custom_icon, |this, icon_path| { - this.px_1() - .child(Icon::from_external_svg(icon_path).color(Color::Muted)) + this.px_1().child( + Icon::from_external_svg(icon_path) + .color(Color::Muted) + .size(IconSize::Small), + ) }) .when(!has_custom_icon, |this| { this.when_some(selected_agent_builtin_icon, |this, icon| { @@ -7357,4 +7371,79 @@ mod tests { }) .unwrap(); } + + #[gpui::test] + async fn test_selected_agent_syncs_when_navigating_between_threads(cx: &mut TestAppContext) { + let (panel, mut cx) = setup_panel(cx).await; + + let custom_agent = Agent::Custom { + id: "my-custom-agent".into(), + }; + + // Create a draft thread with the custom agent. + panel.update(&mut cx, |panel, _cx| { + panel.selected_agent = custom_agent.clone(); + }); + panel.update_in(&mut cx, |panel, window, cx| { + panel.new_thread(&NewThread, window, cx); + }); + let draft_id = panel.read_with(&cx, |panel, _cx| { + assert_eq!(panel.selected_agent, custom_agent); + panel + .active_draft_id() + .expect("should have an active draft") + }); + + // Open a different thread (stub agent) — this navigates away from the draft. + let connection = StubAgentConnection::new(); + let stub_agent = Agent::Custom { id: "Test".into() }; + open_thread_with_connection(&panel, connection.clone(), &mut cx); + let other_session_id = active_session_id(&panel, &cx); + + // Send a message so the thread is retained when we navigate away. + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("response".into()), + )]); + send_message(&panel, &mut cx); + cx.run_until_parked(); + + panel.read_with(&cx, |panel, _cx| { + assert_ne!( + panel.selected_agent, custom_agent, + "selected_agent should have changed to the stub agent" + ); + }); + + // Navigate back to the draft thread. + panel.update_in(&mut cx, |panel, window, cx| { + panel.activate_draft(draft_id, true, window, cx); + }); + + panel.read_with(&cx, |panel, _cx| { + assert_eq!( + panel.selected_agent, custom_agent, + "selected_agent should sync back to the draft's agent" + ); + }); + + // Navigate to the other thread via load_agent_thread (simulating history click). + panel.update_in(&mut cx, |panel, window, cx| { + panel.load_agent_thread( + stub_agent.clone(), + other_session_id, + None, + None, + true, + window, + cx, + ); + }); + + panel.read_with(&cx, |panel, _cx| { + assert_eq!( + panel.selected_agent, stub_agent, + "selected_agent should sync to the loaded thread's agent" + ); + }); + } } diff --git a/crates/agent_ui/src/conversation_view.rs b/crates/agent_ui/src/conversation_view.rs index 528e38333144524c4a4dffa63a7a8b107c829e41..9861a4086949f964bcac99adec48a2f0af31a100 100644 --- a/crates/agent_ui/src/conversation_view.rs +++ b/crates/agent_ui/src/conversation_view.rs @@ -1229,6 +1229,10 @@ impl ConversationView { &self.workspace } + pub fn agent_key(&self) -> &Agent { + &self.connection_key + } + pub fn title(&self, cx: &App) -> SharedString { match &self.server_state { ServerState::Connected(view) => view From 5a47e9825babb293a4e4aea3359fb860085ab63f Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Sat, 11 Apr 2026 13:32:39 -0600 Subject: [PATCH 62/67] Skip flakey prop tests (#53677) Closes #ISSUE Release Notes: - N/A --- crates/sidebar/src/sidebar_tests.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/sidebar/src/sidebar_tests.rs b/crates/sidebar/src/sidebar_tests.rs index 0b197b5fd278bbdf19b4c30fe27e1d591ad29696..ec9c709fab0c153e1f3f035f847fbc8bdb48654c 100644 --- a/crates/sidebar/src/sidebar_tests.rs +++ b/crates/sidebar/src/sidebar_tests.rs @@ -8419,6 +8419,7 @@ mod property_test { .assert_project_group_key_integrity(cx) } + #[ignore] #[gpui::property_test(config = ProptestConfig { cases: 50, ..Default::default() From a86de48c70e4bcc8c8da53eb5317726c74be7005 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Soares?= <37777652+Dnreikronos@users.noreply.github.com> Date: Sun, 12 Apr 2026 16:00:40 -0300 Subject: [PATCH 63/67] editor: Fix semantic tokens missing when opening buffer from multibuffer (#53712) Summary Semantic token highlighting was missing when opening a file from multibuffer search results (Ctrl+Shift+F). Which file got hit depended on window size and scroll offset. ## Root cause Two async tasks race to write `post_scroll_update`: 1. `set_visible_line_count` (scroll.rs:682) fires on first render and spawns a task that calls `register_visible_buffers` + `update_lsp_data` (requests semantic tokens). 2. `open_buffers_in_workspace` (editor.rs:25049) calls `change_selections` with autoscroll right after creating the editor. This emits `ScrollPositionChanged`, whose handler (editor.rs:2655) replaces `post_scroll_update` with a task calling `update_data_on_scroll`. 3. `update_data_on_scroll` (editor.rs:26099) has a singleton guard: `if !self.buffer().read(cx).is_singleton()` that skips `update_lsp_data` for single-file buffers. This is a scroll optimization, singleton buffers don't change their visible buffer set on scroll. 4. The initial task gets dropped, the replacement skips `update_lsp_data`, semantic tokens are never requested. ## Fix Added a `needs_initial_lsp_data` flag to the Editor struct, set to `true` on creation. `update_data_on_scroll` checks this flag alongside the singleton guard, so `update_lsp_data` runs at least once even for singletons. The flag flips to `false` right after, so subsequent scrolls behave exactly as before. No perf impact after the first render. ## Self-review checklist - [x] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [ ] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #53051 ## Demo Before: https://github.com/user-attachments/assets/77d07d95-cb4a-44ff-842d-1f7a46653ca9 After: https://github.com/user-attachments/assets/2c942f52-4ec3-459f-a97b-93919e4bfb3d ## Release notes - Fixed semantic token highlighting missing when opening a buffer from multibuffer search results --- crates/editor/src/editor.rs | 44 +++-- crates/editor/src/scroll.rs | 13 +- crates/editor/src/semantic_tokens.rs | 257 +++++++++++++++++++++++++++ 3 files changed, 290 insertions(+), 24 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 77ddd5e95d159d20ac627ec1d0406823d201ff4c..bc343ca0d4c8fbba8ddd6622a20c217385c2b919 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1182,6 +1182,7 @@ pub struct Editor { delegate_stage_and_restore: bool, delegate_open_excerpts: bool, enable_lsp_data: bool, + needs_initial_data_update: bool, enable_runnables: bool, enable_mouse_wheel_zoom: bool, show_line_numbers: Option, @@ -1975,6 +1976,7 @@ impl Editor { self.buffers_with_disabled_indent_guides.clone(); clone.enable_mouse_wheel_zoom = self.enable_mouse_wheel_zoom; clone.enable_lsp_data = self.enable_lsp_data; + clone.needs_initial_data_update = self.enable_lsp_data; clone.enable_runnables = self.enable_runnables; clone } @@ -2424,6 +2426,7 @@ impl Editor { delegate_stage_and_restore: false, delegate_open_excerpts: false, enable_lsp_data: full_mode, + needs_initial_data_update: full_mode, enable_runnables: full_mode, enable_mouse_wheel_zoom: full_mode, show_git_diff_gutter: None, @@ -2652,16 +2655,7 @@ impl Editor { ); }); - editor.post_scroll_update = cx.spawn_in(window, async move |editor, cx| { - cx.background_executor() - .timer(Duration::from_millis(50)) - .await; - editor - .update_in(cx, |editor, window, cx| { - editor.update_data_on_scroll(window, cx) - }) - .ok(); - }); + editor.update_data_on_scroll(true, window, cx); } editor.refresh_sticky_headers(&editor.snapshot(window, cx), cx); } @@ -20860,7 +20854,7 @@ impl Editor { cx.notify(); self.scrollbar_marker_state.dirty = true; - self.update_data_on_scroll(window, cx); + self.update_data_on_scroll(false, window, cx); self.folds_did_change(cx); } @@ -26092,11 +26086,35 @@ impl Editor { self.enable_mouse_wheel_zoom = false; } - fn update_data_on_scroll(&mut self, window: &mut Window, cx: &mut Context<'_, Self>) { + fn update_data_on_scroll( + &mut self, + debounce: bool, + window: &mut Window, + cx: &mut Context<'_, Self>, + ) { + if debounce { + self.post_scroll_update = cx.spawn_in(window, async move |editor, cx| { + cx.background_executor() + .timer(Duration::from_millis(50)) + .await; + editor + .update_in(cx, |editor, window, cx| { + editor.do_update_data_on_scroll(window, cx); + }) + .ok(); + }); + } else { + self.post_scroll_update = Task::ready(()); + self.do_update_data_on_scroll(window, cx); + } + } + + fn do_update_data_on_scroll(&mut self, window: &mut Window, cx: &mut Context<'_, Self>) { self.register_visible_buffers(cx); self.colorize_brackets(false, cx); self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); - if !self.buffer().read(cx).is_singleton() { + if !self.buffer().read(cx).is_singleton() || self.needs_initial_data_update { + self.needs_initial_data_update = false; self.update_lsp_data(None, window, cx); self.refresh_runnables(None, window, cx); } diff --git a/crates/editor/src/scroll.rs b/crates/editor/src/scroll.rs index 42b865b17ca4e241b8f0728488cacd42d52d257c..0735ae5170d453e8b29dd033752b1cd2c114d457 100644 --- a/crates/editor/src/scroll.rs +++ b/crates/editor/src/scroll.rs @@ -5,7 +5,7 @@ pub(crate) mod scroll_amount; use crate::editor_settings::ScrollBeyondLastLine; use crate::{ Anchor, DisplayPoint, DisplayRow, Editor, EditorEvent, EditorMode, EditorSettings, - InlayHintRefreshReason, MultiBufferSnapshot, RowExt, SizingBehavior, ToPoint, + MultiBufferSnapshot, RowExt, SizingBehavior, ToPoint, display_map::{DisplaySnapshot, ToDisplayPoint}, hover_popover::hide_hover, persistence::EditorDb, @@ -680,16 +680,7 @@ impl Editor { let opened_first_time = self.scroll_manager.visible_line_count.is_none(); self.scroll_manager.visible_line_count = Some(lines); if opened_first_time { - self.post_scroll_update = cx.spawn_in(window, async move |editor, cx| { - editor - .update_in(cx, |editor, window, cx| { - editor.register_visible_buffers(cx); - editor.colorize_brackets(false, cx); - editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); - editor.update_lsp_data(None, window, cx); - }) - .ok(); - }); + self.update_data_on_scroll(false, window, cx); } } diff --git a/crates/editor/src/semantic_tokens.rs b/crates/editor/src/semantic_tokens.rs index d485cfa70237fed542a240f202a8dc47b07467c4..eaadbbb0e2ee9a49e53cc645487ea489572b1241 100644 --- a/crates/editor/src/semantic_tokens.rs +++ b/crates/editor/src/semantic_tokens.rs @@ -1267,6 +1267,263 @@ mod tests { ); } + #[gpui::test] + async fn lsp_semantic_tokens_singleton_opened_from_multibuffer(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + update_test_language_settings(cx, &|language_settings| { + language_settings.languages.0.insert( + "Rust".into(), + LanguageSettingsContent { + semantic_tokens: Some(SemanticTokens::Full), + ..LanguageSettingsContent::default() + }, + ); + }); + + let rust_language = Arc::new(Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".into()], + ..LanguageMatcher::default() + }, + ..LanguageConfig::default() + }, + None, + )); + + let rust_legend = lsp::SemanticTokensLegend { + token_types: vec!["function".into()], + token_modifiers: Vec::new(), + }; + + let app_state = cx.update(workspace::AppState::test); + cx.update(|cx| { + assets::Assets.load_test_fonts(cx); + crate::init(cx); + workspace::init(app_state.clone(), cx); + }); + + let project = Project::test(app_state.fs.clone(), [], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + + let mut rust_server = language_registry.register_fake_lsp( + rust_language.name(), + FakeLspAdapter { + name: "rust", + capabilities: lsp::ServerCapabilities { + semantic_tokens_provider: Some( + lsp::SemanticTokensServerCapabilities::SemanticTokensOptions( + lsp::SemanticTokensOptions { + legend: rust_legend, + full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }), + ..lsp::SemanticTokensOptions::default() + }, + ), + ), + ..lsp::ServerCapabilities::default() + }, + initializer: Some(Box::new(move |fake_server| { + fake_server + .set_request_handler::( + move |_, _| async move { + Ok(Some(lsp::SemanticTokensResult::Tokens( + lsp::SemanticTokens { + data: vec![0, 3, 4, 0, 0], + result_id: None, + }, + ))) + }, + ); + })), + ..FakeLspAdapter::default() + }, + ); + language_registry.add(rust_language.clone()); + + // foo.rs must be long enough that autoscroll triggers an actual scroll + // position change when opening from the multibuffer with cursor near + // the end. This reproduces the race: set_visible_line_count spawns a + // task, then autoscroll fires ScrollPositionChanged whose handler + // replaces post_scroll_update with a debounced task that skips + // update_lsp_data for singletons. + let mut foo_content = String::from("fn test() {}\n"); + for i in 0..100 { + foo_content.push_str(&format!("fn func_{i}() {{}}\n")); + } + + app_state + .fs + .as_fake() + .insert_tree( + EditorLspTestContext::root_path(), + json!({ + ".git": {}, + "bar.rs": "fn main() {}\n", + "foo.rs": foo_content, + }), + ) + .await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + project + .update(cx, |project, cx| { + project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx) + }) + .await + .unwrap(); + cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx)) + .await; + + // Open bar.rs as an editor to start the LSP server. + let bar_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone()); + let bar_item = workspace + .update_in(cx, |workspace, window, cx| { + workspace.open_path(bar_file, None, true, window, cx) + }) + .await + .expect("Could not open bar.rs"); + let bar_editor = cx.update(|_, cx| { + bar_item + .act_as::(cx) + .expect("Opened test file wasn't an editor") + }); + let bar_buffer = cx.read(|cx| { + bar_editor + .read(cx) + .buffer() + .read(cx) + .as_singleton() + .unwrap() + }); + + let _rust_server = rust_server.next().await.unwrap(); + + cx.executor().advance_clock(Duration::from_millis(200)); + let task = bar_editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task()); + cx.run_until_parked(); + task.await; + cx.run_until_parked(); + + assert!( + !extract_semantic_highlights(&bar_editor, &cx).is_empty(), + "bar.rs should have semantic tokens after initial open" + ); + + // Get foo.rs buffer directly from the project. No editor has ever + // fetched semantic tokens for this buffer. + let foo_file = cx.read(|cx| workspace.file_project_paths(cx)[1].clone()); + let foo_buffer = project + .update(cx, |project, cx| project.open_buffer(foo_file, cx)) + .await + .expect("Could not open foo.rs buffer"); + + // Build a multibuffer with both files. The foo.rs excerpt covers a + // range near the end of the file so that opening the singleton will + // autoscroll to a position that requires changing scroll_position. + let multibuffer = cx.new(|cx| { + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), + bar_buffer.clone(), + [Point::new(0, 0)..Point::new(0, 12)], + 0, + cx, + ); + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), + foo_buffer.clone(), + [Point::new(95, 0)..Point::new(100, 0)], + 0, + cx, + ); + multibuffer + }); + + let mb_editor = workspace.update_in(cx, |workspace, window, cx| { + let editor = + cx.new(|cx| build_editor_with_project(project.clone(), multibuffer, window, cx)); + workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx); + editor + }); + mb_editor.update_in(cx, |editor, window, cx| { + let nav_history = workspace + .read(cx) + .active_pane() + .read(cx) + .nav_history_for_item(&cx.entity()); + editor.set_nav_history(Some(nav_history)); + window.focus(&editor.focus_handle(cx), cx) + }); + + // Close bar.rs tab so only the multibuffer remains. + workspace + .update_in(cx, |workspace, window, cx| { + let pane = workspace.active_pane().clone(); + pane.update(cx, |pane, cx| { + pane.close_item_by_id( + bar_editor.entity_id(), + workspace::SaveIntent::Skip, + window, + cx, + ) + }) + }) + .await + .ok(); + + cx.run_until_parked(); + + // Position cursor in the foo.rs excerpt (near line 95+). + mb_editor.update_in(cx, |editor, window, cx| { + let snapshot = editor.display_snapshot(cx); + let end = snapshot.buffer_snapshot().len(); + editor.change_selections(None.into(), window, cx, |s| { + s.select_ranges([end..end]); + }); + }); + + // Open the singleton from the multibuffer. open_buffers_in_workspace + // creates the editor and calls change_selections with autoscroll. + // During render, set_visible_line_count fires first (spawning a task), + // then autoscroll_vertically scrolls to line ~95 which emits + // ScrollPositionChanged, whose handler replaces post_scroll_update. + mb_editor.update_in(cx, |editor, window, cx| { + editor.open_excerpts(&crate::actions::OpenExcerpts, window, cx); + }); + + cx.run_until_parked(); + cx.executor().advance_clock(Duration::from_millis(200)); + cx.run_until_parked(); + + let active_editor = workspace.read_with(cx, |workspace, cx| { + workspace + .active_item(cx) + .and_then(|item| item.act_as::(cx)) + .expect("Active item should be an editor") + }); + + assert!( + active_editor.read_with(cx, |editor, cx| editor.buffer().read(cx).is_singleton()), + "Active editor should be a singleton buffer" + ); + + // Wait for semantic tokens on the singleton. + cx.executor().advance_clock(Duration::from_millis(200)); + let task = active_editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task()); + task.await; + cx.run_until_parked(); + + let highlights = extract_semantic_highlights(&active_editor, &cx); + assert!( + !highlights.is_empty(), + "Singleton editor opened from multibuffer should have semantic tokens" + ); + } + fn extract_semantic_highlights( editor: &Entity, cx: &TestAppContext, From 60a8b6f0034b5732efc36a7d836474a55c8e1419 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Sun, 12 Apr 2026 23:54:22 +0200 Subject: [PATCH 64/67] ci: Use GitHub context for artifact name (#53559) Missed this yesterday in https://github.com/zed-industries/zed/pull/53433, because in the `with` level, we need to use the proper context syntax. Release Notes: - N/A --- .github/workflows/compliance_check.yml | 6 +++--- .github/workflows/release.yml | 12 ++++++------ tooling/xtask/src/tasks/workflows/release.rs | 4 +++- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/.github/workflows/compliance_check.yml b/.github/workflows/compliance_check.yml index e74c38ec5d3701b936448a128ea8076932d83e91..144185f95ba95ba902d1239649cdcd8dc8828ef1 100644 --- a/.github/workflows/compliance_check.yml +++ b/.github/workflows/compliance_check.yml @@ -42,12 +42,12 @@ jobs: GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} LATEST_TAG: ${{ steps.determine-version.outputs.tag }} continue-on-error: true - - name: '@actions/upload-artifact compliance-report-${GITHUB_REF_NAME}.md' + - name: '@actions/upload-artifact compliance-report-${{ github.ref_name }}.md' if: always() uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: compliance-report-${GITHUB_REF_NAME}.md - path: compliance-report-${GITHUB_REF_NAME}.md + name: compliance-report-${{ github.ref_name }}.md + path: compliance-report-${{ github.ref_name }}.md if-no-files-found: error - name: send_compliance_slack_notification if: always() diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 17178ab3054a7cddf1dccd2cd9bfa415a56755bd..8081955920823edad55bcaef371a6f2f15f7b386 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -314,12 +314,12 @@ jobs: GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }} GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} continue-on-error: true - - name: '@actions/upload-artifact compliance-report-${GITHUB_REF_NAME}.md' + - name: '@actions/upload-artifact compliance-report-${{ github.ref_name }}.md' if: always() uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: compliance-report-${GITHUB_REF_NAME}.md - path: compliance-report-${GITHUB_REF_NAME}.md + name: compliance-report-${{ github.ref_name }}.md + path: compliance-report-${{ github.ref_name }}.md if-no-files-found: error - name: send_compliance_slack_notification if: always() @@ -682,12 +682,12 @@ jobs: env: GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }} GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} - - name: '@actions/upload-artifact compliance-report-${GITHUB_REF_NAME}.md' + - name: '@actions/upload-artifact compliance-report-${{ github.ref_name }}.md' if: always() uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: - name: compliance-report-${GITHUB_REF_NAME}.md - path: compliance-report-${GITHUB_REF_NAME}.md + name: compliance-report-${{ github.ref_name }}.md + path: compliance-report-${{ github.ref_name }}.md if-no-files-found: error overwrite: true - name: send_compliance_slack_notification diff --git a/tooling/xtask/src/tasks/workflows/release.rs b/tooling/xtask/src/tasks/workflows/release.rs index d3ad064a6653c963fdc78d147ffbb3147c009c8d..e3e0fb78a86208fa09d5b1b2a6697d3b2abc125d 100644 --- a/tooling/xtask/src/tasks/workflows/release.rs +++ b/tooling/xtask/src/tasks/workflows/release.rs @@ -158,6 +158,8 @@ pub(crate) fn create_sentry_release() -> Step { } pub(crate) const COMPLIANCE_REPORT_PATH: &str = "compliance-report-${GITHUB_REF_NAME}.md"; +pub(crate) const COMPLIANCE_REPORT_ARTIFACT_PATH: &str = + "compliance-report-${{ github.ref_name }}.md"; pub(crate) const COMPLIANCE_STEP_ID: &str = "run-compliance-check"; const NEEDS_REVIEW_PULLS_URL: &str = "https://github.com/zed-industries/zed/pulls?q=is%3Apr+is%3Aclosed+label%3A%22PR+state%3Aneeds+review%22"; @@ -206,7 +208,7 @@ pub(crate) fn add_compliance_steps( (job, result) } - let upload_step = upload_artifact(COMPLIANCE_REPORT_PATH) + let upload_step = upload_artifact(COMPLIANCE_REPORT_ARTIFACT_PATH) .if_condition(Expression::new("always()")) .when( matches!(context, ComplianceContext::Release { .. }), From c512c72b634d9e3f7c907846ee111f68cf41dbbb Mon Sep 17 00:00:00 2001 From: Erin van der Veen Date: Mon, 13 Apr 2026 03:44:23 +0200 Subject: [PATCH 65/67] git_ui: Toggle directory expansion with Enter key in tree view (#52965) Semantics and code was mostly taken from the existing behaviour of the Left/Right arrow keys. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #51986 Release Notes: - Added ability to toggle tree collapse in git panel --- crates/git_ui/src/git_panel.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index a6f1e097cfe1cc0c012ff77987011571760b3ef0..6d4c73dd57b1ebc78d6525abb416741c783c7d67 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -1272,6 +1272,14 @@ impl GitPanel { } fn open_diff(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context) { + if let Some(GitListEntry::Directory(dir_entry)) = self + .selected_entry + .and_then(|i| self.entries.get(i)) + .cloned() + { + self.toggle_directory(&dir_entry.key, window, cx); + return; + } maybe!({ let entry = self.entries.get(self.selected_entry?)?.status_entry()?; let workspace = self.workspace.upgrade()?; From 8e0005c0cd84aa6bd2b272ccea00da6460fc1da2 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Sun, 12 Apr 2026 23:04:46 -0700 Subject: [PATCH 66/67] Fix a bug where worktree creation would apply to nested git repos (#53771) Saw this while manually testing other stuff. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- crates/agent_ui/src/agent_panel.rs | 102 ++++++++++++++++++++++++++++- 1 file changed, 99 insertions(+), 3 deletions(-) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index aa355b07b1399d931e9bc805d1ed17240aba61fb..a9debf3ff0f75150822c814478617e12ed0ee9bf 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -2800,9 +2800,7 @@ impl AgentPanel { .iter() .filter_map(|(id, repo)| { let work_dir = repo.read(cx).work_directory_abs_path.clone(); - if wt_path.starts_with(work_dir.as_ref()) - || work_dir.starts_with(wt_path.as_ref()) - { + if wt_path.starts_with(work_dir.as_ref()) { Some((*id, repo.clone(), work_dir.as_ref().components().count())) } else { None @@ -7446,4 +7444,102 @@ mod tests { ); }); } + + #[gpui::test] + async fn test_classify_worktrees_skips_non_git_root_with_nested_repo(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/repo_a", + json!({ + ".git": {}, + "src": { "main.rs": "" } + }), + ) + .await; + fs.insert_tree( + "/repo_b", + json!({ + ".git": {}, + "src": { "lib.rs": "" } + }), + ) + .await; + // `plain_dir` is NOT a git repo, but contains a nested git repo. + fs.insert_tree( + "/plain_dir", + json!({ + "nested_repo": { + ".git": {}, + "src": { "lib.rs": "" } + } + }), + ) + .await; + + let project = Project::test( + fs.clone(), + [ + Path::new("/repo_a"), + Path::new("/repo_b"), + Path::new("/plain_dir"), + ], + cx, + ) + .await; + + // Let the worktree scanner discover all `.git` directories. + cx.executor().run_until_parked(); + + let multi_workspace = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace = multi_workspace + .read_with(cx, |mw, _cx| mw.workspace().clone()) + .unwrap(); + + let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx); + + let panel = workspace.update_in(cx, |workspace, window, cx| { + cx.new(|cx| AgentPanel::new(workspace, None, window, cx)) + }); + + cx.run_until_parked(); + + panel.read_with(cx, |panel, cx| { + let (git_repos, non_git_paths) = panel.classify_worktrees(cx); + + let git_work_dirs: Vec = git_repos + .iter() + .map(|repo| repo.read(cx).work_directory_abs_path.to_path_buf()) + .collect(); + + assert_eq!( + git_repos.len(), + 2, + "only repo_a and repo_b should be classified as git repos, \ + but got: {git_work_dirs:?}" + ); + assert!( + git_work_dirs.contains(&PathBuf::from("/repo_a")), + "repo_a should be in git_repos: {git_work_dirs:?}" + ); + assert!( + git_work_dirs.contains(&PathBuf::from("/repo_b")), + "repo_b should be in git_repos: {git_work_dirs:?}" + ); + + assert_eq!( + non_git_paths, + vec![PathBuf::from("/plain_dir")], + "plain_dir should be classified as a non-git path \ + (not matched to nested_repo inside it)" + ); + }); + } } From 497b6de85f11a6afeaa3593ce26d22e49e141b91 Mon Sep 17 00:00:00 2001 From: daydalek <90121301+daydalek@users.noreply.github.com> Date: Mon, 13 Apr 2026 14:46:19 +0800 Subject: [PATCH 67/67] editor: Add configurable hover delay (#53504) follow up #47471 As described in #47471, we introduced a direction-aware strategy to improve user experience when interacting with hover popovers. In this follow-up, we are adding `hover_popover_sticky` and `hover_popover_hiding_delay` to control whether the feature introduced in 47471 enabled, and to let users configure the delay to balance responsiveness . Also `hover_popover_sticky` can now be imported from `editor.hover.sticky`, as well as `hover_popover_hiding_delay` from `editor.hover.hidingDelay` in VSCode. Also this PR adds several tests: - `test_hover_popover_cancel_hide_on_rehover`: when the cursor returns to the hover after leaving once within the hiding delay, the hover should persist while canceling the existing hiding timer. - `test_hover_popover_enabled_false_ignores_sticky` : when `hover_popover_enabled` is false, the `hover_popover_sticky` and `hover_popover_hiding_delay` have no effect(since no hover is shown). - `test_hover_popover_sticky_delay_restarts_when_mouse_gets_closer`: when mouse gets closer to hover popover, we expect the timer to reset and the hover remains visible. - `test_hover_popover_hiding_delay`: check if the delay(in test, that's 500ms) works. - `test_hover_popover_sticky_disabled`: when hover_popover_sticky is false, the hover popover disappears immediately after the cursor leaving the codes. - VSCode import test in `settings_store.rs` Release Notes: - Added `hover_popover_sticky` and `hover_popover_hiding_delay` settings to balance responsiveness of hover popovers. --------- Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- assets/settings/default.json | 7 + crates/editor/src/editor_settings.rs | 4 + crates/editor/src/hover_popover.rs | 447 +++++++++++++++++++++++++- crates/settings/src/settings_store.rs | 22 ++ crates/settings/src/vscode_import.rs | 2 + crates/settings_content/src/editor.rs | 11 + crates/settings_ui/src/page_data.rs | 31 +- docs/src/migrate/vs-code.md | 2 + docs/src/reference/all-settings.md | 20 ++ 9 files changed, 543 insertions(+), 3 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 2fd6120ba0d79add35903117e17a43caa02ef619..68a9f2324912db7c1724c81cefd60ecbc41bf4b1 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -123,6 +123,13 @@ // Time to wait in milliseconds before showing the informational hover box. // This delay also applies to auto signature help when `auto_signature_help` is enabled. "hover_popover_delay": 300, + // Whether the hover popover sticks when the mouse moves toward it, + // allowing interaction with its contents before it disappears. + "hover_popover_sticky": true, + // Time to wait in milliseconds before hiding the hover popover + // after the mouse moves away from the hover target. + // Only applies when `hover_popover_sticky` is enabled. + "hover_popover_hiding_delay": 300, // Whether to confirm before quitting Zed. "confirm_quit": false, // Whether to restore last closed project when fresh Zed instance is opened diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index 67b56a161f4d92985339d725b553c4baeec05bca..548053da7d794de83d99afdfddb098e4cfb2b18e 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -24,6 +24,8 @@ pub struct EditorSettings { pub lsp_highlight_debounce: DelayMs, pub hover_popover_enabled: bool, pub hover_popover_delay: DelayMs, + pub hover_popover_sticky: bool, + pub hover_popover_hiding_delay: DelayMs, pub toolbar: Toolbar, pub scrollbar: Scrollbar, pub minimap: Minimap, @@ -205,6 +207,8 @@ impl Settings for EditorSettings { lsp_highlight_debounce: editor.lsp_highlight_debounce.unwrap(), hover_popover_enabled: editor.hover_popover_enabled.unwrap(), hover_popover_delay: editor.hover_popover_delay.unwrap(), + hover_popover_sticky: editor.hover_popover_sticky.unwrap(), + hover_popover_hiding_delay: editor.hover_popover_hiding_delay.unwrap(), toolbar: Toolbar { breadcrumbs: toolbar.breadcrumbs.unwrap(), quick_actions: toolbar.quick_actions.unwrap(), diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 55350a9c679a10ea8597ae8c923c33af34d71360..730adec4f9b6b13ea14fc00c447b37bf77156b94 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -63,6 +63,12 @@ pub fn hover_at( editor.hover_state.closest_mouse_distance = None; show_hover(editor, anchor, false, window, cx); } else { + let settings = EditorSettings::get_global(cx); + if !settings.hover_popover_sticky { + hide_hover(editor, cx); + return; + } + let mut getting_closer = false; if let Some(mouse_position) = mouse_position { getting_closer = editor.hover_state.is_mouse_getting_closer(mouse_position); @@ -73,8 +79,8 @@ pub fn hover_at( return; } - // If we are moving closer, or if no timer is running at all, start/restart the 300ms timer. - let delay = Duration::from_millis(300u64); + // If we are moving closer, or if no timer is running at all, start/restart the timer. + let delay = Duration::from_millis(settings.hover_popover_hiding_delay.0); let task = cx.spawn(async move |this, cx| { cx.background_executor().timer(delay).await; this.update(cx, |editor, cx| { @@ -1201,6 +1207,7 @@ mod tests { use markdown::parser::MarkdownEvent; use project::InlayId; use settings::InlayHintSettingsContent; + use settings::{DelayMs, SettingsStore}; use smol::stream::StreamExt; use std::sync::atomic; use std::sync::atomic::AtomicUsize; @@ -2149,4 +2156,440 @@ mod tests { InlayOffset(MultiBufferOffset(104))..InlayOffset(MultiBufferOffset(108)) ); } + + #[gpui::test] + async fn test_hover_popover_hiding_delay(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let custom_delay_ms = 500u64; + cx.update(|cx| { + cx.update_global::(|settings, cx| { + settings.update_user_settings(cx, |settings| { + settings.editor.hover_popover_sticky = Some(true); + settings.editor.hover_popover_hiding_delay = Some(DelayMs(custom_delay_ms)); + }); + }); + }); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), + ..Default::default() + }, + cx, + ) + .await; + + cx.set_state(indoc! {" + fn ˇtest() { println!(); } + "}); + + // Trigger hover on a symbol + let hover_point = cx.display_point(indoc! {" + fn test() { printˇln!(); } + "}); + let symbol_range = cx.lsp_range(indoc! {" + fn test() { «println!»(); } + "}); + let mut requests = + cx.set_request_handler::(move |_, _, _| async move { + Ok(Some(lsp::Hover { + contents: lsp::HoverContents::Markup(lsp::MarkupContent { + kind: lsp::MarkupKind::Markdown, + value: "some basic docs".to_string(), + }), + range: Some(symbol_range), + })) + }); + cx.update_editor(|editor, window, cx| { + let snapshot = editor.snapshot(window, cx); + let anchor = snapshot + .buffer_snapshot() + .anchor_before(hover_point.to_offset(&snapshot, Bias::Left)); + hover_at(editor, Some(anchor), None, window, cx) + }); + cx.background_executor + .advance_clock(Duration::from_millis(get_hover_popover_delay(&cx) + 100)); + requests.next().await; + + // Hover should be visible + cx.editor(|editor, _, _| { + assert!(editor.hover_state.visible()); + }); + + // Move mouse away (hover_at with None anchor triggers the hiding delay) + cx.update_editor(|editor, window, cx| hover_at(editor, None, None, window, cx)); + + // Popover should still be visible before the custom hiding delay expires + cx.background_executor + .advance_clock(Duration::from_millis(custom_delay_ms - 100)); + cx.editor(|editor, _, _| { + assert!( + editor.hover_state.visible(), + "Popover should remain visible before the hiding delay expires" + ); + }); + + // After the full custom delay, the popover should be hidden + cx.background_executor + .advance_clock(Duration::from_millis(200)); + cx.editor(|editor, _, _| { + assert!( + !editor.hover_state.visible(), + "Popover should be hidden after the hiding delay expires" + ); + }); + } + + #[gpui::test] + async fn test_hover_popover_sticky_disabled(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + cx.update(|cx| { + cx.update_global::(|settings, cx| { + settings.update_user_settings(cx, |settings| { + settings.editor.hover_popover_sticky = Some(false); + }); + }); + }); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), + ..Default::default() + }, + cx, + ) + .await; + + cx.set_state(indoc! {" + fn ˇtest() { println!(); } + "}); + + // Trigger hover on a symbol + let hover_point = cx.display_point(indoc! {" + fn test() { printˇln!(); } + "}); + let symbol_range = cx.lsp_range(indoc! {" + fn test() { «println!»(); } + "}); + let mut requests = + cx.set_request_handler::(move |_, _, _| async move { + Ok(Some(lsp::Hover { + contents: lsp::HoverContents::Markup(lsp::MarkupContent { + kind: lsp::MarkupKind::Markdown, + value: "some basic docs".to_string(), + }), + range: Some(symbol_range), + })) + }); + cx.update_editor(|editor, window, cx| { + let snapshot = editor.snapshot(window, cx); + let anchor = snapshot + .buffer_snapshot() + .anchor_before(hover_point.to_offset(&snapshot, Bias::Left)); + hover_at(editor, Some(anchor), None, window, cx) + }); + cx.background_executor + .advance_clock(Duration::from_millis(get_hover_popover_delay(&cx) + 100)); + requests.next().await; + + // Hover should be visible + cx.editor(|editor, _, _| { + assert!(editor.hover_state.visible()); + }); + + // Move mouse away — with sticky disabled, hide immediately + cx.update_editor(|editor, window, cx| hover_at(editor, None, None, window, cx)); + + // Popover should be hidden immediately without any delay + cx.editor(|editor, _, _| { + assert!( + !editor.hover_state.visible(), + "Popover should be hidden immediately when sticky is disabled" + ); + }); + } + + #[gpui::test] + async fn test_hover_popover_hiding_delay_restarts_when_mouse_gets_closer( + cx: &mut gpui::TestAppContext, + ) { + init_test(cx, |_| {}); + + let custom_delay_ms = 600u64; + cx.update(|cx| { + cx.update_global::(|settings, cx| { + settings.update_user_settings(cx, |settings| { + settings.editor.hover_popover_sticky = Some(true); + settings.editor.hover_popover_hiding_delay = Some(DelayMs(custom_delay_ms)); + }); + }); + }); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), + ..Default::default() + }, + cx, + ) + .await; + + cx.set_state(indoc! {" + fn ˇtest() { println!(); } + "}); + + let hover_point = cx.display_point(indoc! {" + fn test() { printˇln!(); } + "}); + let symbol_range = cx.lsp_range(indoc! {" + fn test() { «println!»(); } + "}); + let mut requests = + cx.set_request_handler::(move |_, _, _| async move { + Ok(Some(lsp::Hover { + contents: lsp::HoverContents::Markup(lsp::MarkupContent { + kind: lsp::MarkupKind::Markdown, + value: "some basic docs".to_string(), + }), + range: Some(symbol_range), + })) + }); + cx.update_editor(|editor, window, cx| { + let snapshot = editor.snapshot(window, cx); + let anchor = snapshot + .buffer_snapshot() + .anchor_before(hover_point.to_offset(&snapshot, Bias::Left)); + hover_at(editor, Some(anchor), None, window, cx) + }); + cx.background_executor + .advance_clock(Duration::from_millis(get_hover_popover_delay(&cx) + 100)); + requests.next().await; + + cx.editor(|editor, _, _| { + assert!(editor.hover_state.visible()); + }); + + cx.update_editor(|editor, _, _| { + let popover = editor.hover_state.info_popovers.first().unwrap(); + popover.last_bounds.set(Some(Bounds { + origin: gpui::Point { + x: px(100.0), + y: px(100.0), + }, + size: Size { + width: px(100.0), + height: px(60.0), + }, + })); + }); + + let far_point = gpui::Point { + x: px(260.0), + y: px(130.0), + }; + cx.update_editor(|editor, window, cx| hover_at(editor, None, Some(far_point), window, cx)); + + cx.background_executor + .advance_clock(Duration::from_millis(400)); + cx.background_executor.run_until_parked(); + + let closer_point = gpui::Point { + x: px(220.0), + y: px(130.0), + }; + cx.update_editor(|editor, window, cx| { + hover_at(editor, None, Some(closer_point), window, cx) + }); + + cx.background_executor + .advance_clock(Duration::from_millis(250)); + cx.background_executor.run_until_parked(); + + cx.editor(|editor, _, _| { + assert!( + editor.hover_state.visible(), + "Popover should remain visible because moving closer restarts the hiding timer" + ); + }); + + cx.background_executor + .advance_clock(Duration::from_millis(350)); + cx.background_executor.run_until_parked(); + + cx.editor(|editor, _, _| { + assert!( + !editor.hover_state.visible(), + "Popover should hide after the restarted hiding timer expires" + ); + }); + } + + #[gpui::test] + async fn test_hover_popover_cancel_hide_on_rehover(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let custom_delay_ms = 500u64; + cx.update(|cx| { + cx.update_global::(|settings, cx| { + settings.update_user_settings(cx, |settings| { + settings.editor.hover_popover_sticky = Some(true); + settings.editor.hover_popover_hiding_delay = Some(DelayMs(custom_delay_ms)); + }); + }); + }); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), + ..Default::default() + }, + cx, + ) + .await; + + cx.set_state(indoc! {" + fn ˇtest() { println!(); } + "}); + + let hover_point = cx.display_point(indoc! {" + fn test() { printˇln!(); } + "}); + let symbol_range = cx.lsp_range(indoc! {" + fn test() { «println!»(); } + "}); + let mut requests = + cx.set_request_handler::(move |_, _, _| async move { + Ok(Some(lsp::Hover { + contents: lsp::HoverContents::Markup(lsp::MarkupContent { + kind: lsp::MarkupKind::Markdown, + value: "some basic docs".to_string(), + }), + range: Some(symbol_range), + })) + }); + cx.update_editor(|editor, window, cx| { + let snapshot = editor.snapshot(window, cx); + let anchor = snapshot + .buffer_snapshot() + .anchor_before(hover_point.to_offset(&snapshot, Bias::Left)); + hover_at(editor, Some(anchor), None, window, cx) + }); + cx.background_executor + .advance_clock(Duration::from_millis(get_hover_popover_delay(&cx) + 100)); + requests.next().await; + + cx.editor(|editor, _, _| { + assert!(editor.hover_state.visible()); + }); + + // Move mouse away — starts the 500ms hide timer + cx.update_editor(|editor, window, cx| hover_at(editor, None, None, window, cx)); + + cx.background_executor + .advance_clock(Duration::from_millis(300)); + cx.background_executor.run_until_parked(); + cx.editor(|editor, _, _| { + assert!( + editor.hover_state.visible(), + "Popover should still be visible before hiding delay expires" + ); + }); + + // Move back to the symbol — should cancel the hiding timer + cx.update_editor(|editor, window, cx| { + let snapshot = editor.snapshot(window, cx); + let anchor = snapshot + .buffer_snapshot() + .anchor_before(hover_point.to_offset(&snapshot, Bias::Left)); + hover_at(editor, Some(anchor), None, window, cx) + }); + + // Advance past the original deadline — popover should still be visible + // because re-hovering cleared the hiding_delay_task + cx.background_executor + .advance_clock(Duration::from_millis(300)); + cx.background_executor.run_until_parked(); + cx.editor(|editor, _, _| { + assert!( + editor.hover_state.visible(), + "Popover should remain visible after re-hovering the symbol" + ); + assert!( + editor.hover_state.hiding_delay_task.is_none(), + "Hiding delay task should have been cleared by re-hover" + ); + }); + + // Move away again — starts a fresh 500ms timer + cx.update_editor(|editor, window, cx| hover_at(editor, None, None, window, cx)); + + cx.background_executor + .advance_clock(Duration::from_millis(custom_delay_ms + 100)); + cx.background_executor.run_until_parked(); + cx.editor(|editor, _, _| { + assert!( + !editor.hover_state.visible(), + "Popover should hide after the new hiding timer expires" + ); + }); + } + + #[gpui::test] + async fn test_hover_popover_enabled_false_ignores_sticky(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + cx.update(|cx| { + cx.update_global::(|settings, cx| { + settings.update_user_settings(cx, |settings| { + settings.editor.hover_popover_enabled = Some(false); + settings.editor.hover_popover_sticky = Some(true); + settings.editor.hover_popover_hiding_delay = Some(DelayMs(500)); + }); + }); + }); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), + ..Default::default() + }, + cx, + ) + .await; + + cx.set_state(indoc! {" + fn ˇtest() { println!(); } + "}); + + let hover_point = cx.display_point(indoc! {" + fn test() { printˇln!(); } + "}); + + // Trigger hover_at — should be gated by hover_popover_enabled=false + cx.update_editor(|editor, window, cx| { + let snapshot = editor.snapshot(window, cx); + let anchor = snapshot + .buffer_snapshot() + .anchor_before(hover_point.to_offset(&snapshot, Bias::Left)); + hover_at(editor, Some(anchor), None, window, cx) + }); + + // No need to advance clock or wait for LSP — the gate should prevent any work + cx.editor(|editor, _, _| { + assert!( + !editor.hover_state.visible(), + "Popover should not appear when hover_popover_enabled is false" + ); + assert!( + editor.hover_state.info_task.is_none(), + "No hover info task should be scheduled when hover is disabled" + ); + assert!( + editor.hover_state.triggered_from.is_none(), + "No hover trigger should be recorded when hover is disabled" + ); + }); + } } diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 6e37c22afe0087c54c5574e17275218d8468ae05..a1b3b8c0ae23f58bdbe915a151e0825ab085a866 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -2071,6 +2071,28 @@ mod tests { .unindent(), cx, ); + + // hover sticky settings + check_vscode_import( + &mut store, + r#"{ + } + "# + .unindent(), + r#"{ + "editor.hover.sticky": false, + "editor.hover.hidingDelay": 500 + }"# + .to_owned(), + r#"{ + "base_keymap": "VSCode", + "hover_popover_hiding_delay": 500, + "hover_popover_sticky": false + } + "# + .unindent(), + cx, + ); } #[track_caller] diff --git a/crates/settings/src/vscode_import.rs b/crates/settings/src/vscode_import.rs index 40565fc4616d3b71c61729743d36b8479c3e590f..c83e56577373aa9834f76b3c32488a069844d249 100644 --- a/crates/settings/src/vscode_import.rs +++ b/crates/settings/src/vscode_import.rs @@ -267,6 +267,8 @@ impl VsCodeSettings { horizontal_scroll_margin: None, hover_popover_delay: self.read_u64("editor.hover.delay").map(Into::into), hover_popover_enabled: self.read_bool("editor.hover.enabled"), + hover_popover_sticky: self.read_bool("editor.hover.sticky"), + hover_popover_hiding_delay: self.read_u64("editor.hover.hidingDelay").map(Into::into), inline_code_actions: None, jupyter: None, lsp_document_colors: None, diff --git a/crates/settings_content/src/editor.rs b/crates/settings_content/src/editor.rs index 00a0549d6b8b1ded71069a5ece36ded5d1a69d0e..60c2686c084ba428992dfc82a9c18b6c24860a66 100644 --- a/crates/settings_content/src/editor.rs +++ b/crates/settings_content/src/editor.rs @@ -57,6 +57,17 @@ pub struct EditorSettingsContent { /// /// Default: 300 pub hover_popover_delay: Option, + /// Whether the hover popover sticks when the mouse moves toward it, + /// allowing interaction with its contents before it disappears. + /// + /// Default: true + pub hover_popover_sticky: Option, + /// Time to wait in milliseconds before hiding the hover popover + /// after the mouse moves away from the hover target. + /// Only applies when `hover_popover_sticky` is enabled. + /// + /// Default: 300 + pub hover_popover_hiding_delay: Option, /// Toolbar related settings pub toolbar: Option, /// Scrollbar related settings diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index a14a831452a423baf5f75ec2698ee86c34ae042d..f3f44a754035f1b4531f8a0b987e26981c8963df 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -1770,7 +1770,7 @@ fn editor_page() -> SettingsPage { ] } - fn hover_popover_section() -> [SettingsPageItem; 3] { + fn hover_popover_section() -> [SettingsPageItem; 5] { [ SettingsPageItem::SectionHeader("Hover Popover"), SettingsPageItem::SettingItem(SettingItem { @@ -1800,6 +1800,35 @@ fn editor_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Sticky", + description: "Whether the hover popover sticks when the mouse moves toward it, allowing interaction with its contents.", + field: Box::new(SettingField { + json_path: Some("hover_popover_sticky"), + pick: |settings_content| settings_content.editor.hover_popover_sticky.as_ref(), + write: |settings_content, value| { + settings_content.editor.hover_popover_sticky = value; + }, + }), + metadata: None, + files: USER, + }), + // todo(settings ui): add units to this number input + SettingsPageItem::SettingItem(SettingItem { + title: "Hiding Delay", + description: "Time to wait in milliseconds before hiding the hover popover after the mouse moves away.", + field: Box::new(SettingField { + json_path: Some("hover_popover_hiding_delay"), + pick: |settings_content| { + settings_content.editor.hover_popover_hiding_delay.as_ref() + }, + write: |settings_content, value| { + settings_content.editor.hover_popover_hiding_delay = value; + }, + }), + metadata: None, + files: USER, + }), ] } diff --git a/docs/src/migrate/vs-code.md b/docs/src/migrate/vs-code.md index 820158c73ffc1ec2f869ad88e34fea4697e4fbec..b2f3049fce10b0dc0593e9f477c89e674b8f566d 100644 --- a/docs/src/migrate/vs-code.md +++ b/docs/src/migrate/vs-code.md @@ -59,6 +59,8 @@ The following VS Code settings are automatically imported when you use **Import | `editor.cursorSurroundingLines` | `vertical_scroll_margin` | | `editor.hover.enabled` | `hover_popover_enabled` | | `editor.hover.delay` | `hover_popover_delay` | +| `editor.hover.sticky` | `hover_popover_sticky` | +| `editor.hover.hidingDelay` | `hover_popover_hiding_delay` | | `editor.parameterHints.enabled` | `auto_signature_help` | | `editor.multiCursorModifier` | `multi_cursor_modifier` | | `editor.selectionHighlight` | `selection_highlight` | diff --git a/docs/src/reference/all-settings.md b/docs/src/reference/all-settings.md index b2b5a76a3a21411b1444268c592e24186ad29797..6eada231df3eafe44b86242aa75ba00a286a7be4 100644 --- a/docs/src/reference/all-settings.md +++ b/docs/src/reference/all-settings.md @@ -2465,6 +2465,26 @@ Example: `integer` values representing milliseconds +## Hover Popover Sticky + +- Description: Whether the hover popover sticks when the mouse moves toward it, allowing interaction with its contents before it disappears. +- Setting: `hover_popover_sticky` +- Default: `true` + +**Options** + +`boolean` values + +## Hover Popover Hiding Delay + +- Description: Time to wait in milliseconds before hiding the hover popover after the mouse moves away from the hover target. Only applies when `hover_popover_sticky` is enabled. +- Setting: `hover_popover_hiding_delay` +- Default: `300` + +**Options** + +`integer` values representing milliseconds + ## Icon Theme - Description: The icon theme setting can be specified in two forms - either as the name of an icon theme or as an object containing the `mode`, `dark`, and `light` icon themes for files/folders inside Zed.