From f92b498eb193e6e9f75f824e838ce11cb55a7438 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Thu, 2 Apr 2026 13:33:55 +0200 Subject: [PATCH 01/63] docs: Remove incomplete section in extension publishing steps (#52980) Release Notes: - N/A --- docs/src/extensions/developing-extensions.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/src/extensions/developing-extensions.md b/docs/src/extensions/developing-extensions.md index 46bed8e223721be81806a3662752d3a4533ab173..01c16dc62be8b9be7e576bc1be10f20437acc993 100644 --- a/docs/src/extensions/developing-extensions.md +++ b/docs/src/extensions/developing-extensions.md @@ -173,8 +173,6 @@ git submodule add https://github.com/your-username/foobar-zed.git extensions/my- git add extensions/my-extension ``` -> **Note:** Your extension must live under te - > All extension submodules must use HTTPS URLs and not SSH URLS (`git@github.com`). 2. Add a new entry to the top-level `extensions.toml` file containing your extension: From 4deb4008b8d6a8a68d22b607bd58ddeb3f35b153 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Thu, 2 Apr 2026 14:52:42 +0200 Subject: [PATCH 02/63] language_core: Introduce fallback highlights (#52575) Self-Review Checklist: - [X] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [ ] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [ ] Performance impact has been considered and is acceptable Release Notes: - Added the option for highlights from languages to specify fallbacks. That means that if you have a pattern with the captures `@second.capture @first.capture`, Zed will first try resolving a highlight from your theme for the code fragment using the first capture, then look for the second capture if no match for the first capture could be found. --------- Co-authored-by: Kirill Bulatov --- crates/agent_ui/src/completion_provider.rs | 2 +- crates/editor/src/editor_tests.rs | 110 ++++++++++++------ crates/language/src/buffer.rs | 18 +-- crates/language/src/language.rs | 29 +++-- crates/language_core/src/grammar.rs | 7 +- crates/language_core/src/highlight_map.rs | 50 +++----- .../src/extension_lsp_adapter.rs | 6 +- .../src/highlights_tree_view.rs | 5 +- crates/languages/src/rust.rs | 38 +++--- crates/project/tests/integration/lsp_store.rs | 4 +- docs/src/extensions/languages.md | 15 +++ 11 files changed, 171 insertions(+), 113 deletions(-) diff --git a/crates/agent_ui/src/completion_provider.rs b/crates/agent_ui/src/completion_provider.rs index a72b352375ef9b219729172f0d19854287e0e7fc..47fd7b0295adbcd2ecea768c3bd9e321a5f551b9 100644 --- a/crates/agent_ui/src/completion_provider.rs +++ b/crates/agent_ui/src/completion_provider.rs @@ -2144,7 +2144,7 @@ fn build_code_label_for_path( .theme() .syntax() .highlight_id("variable") - .map(HighlightId); + .map(HighlightId::new); let mut label = CodeLabelBuilder::default(); label.push_str(file, None); diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 2afd724f5e4a7332b713e14f1e4da5ad32517f13..c29df272d35af5a69ba07c76cb7da3866786bd2b 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -52,7 +52,7 @@ use settings::{ ProjectSettingsContent, ScrollBeyondLastLine, SearchSettingsContent, SettingsContent, SettingsStore, }; -use std::borrow::Cow; +use std::{borrow::Cow, sync::Arc}; use std::{cell::RefCell, future::Future, rc::Rc, sync::atomic::AtomicBool, time::Instant}; use std::{ iter, @@ -19112,7 +19112,7 @@ async fn test_copy_highlight_json(cx: &mut TestAppContext) { let x = 1;ˇ } "}); - setup_rust_syntax_highlighting(&mut cx); + setup_syntax_highlighting(rust_lang(), &mut cx); cx.update_editor(|editor, window, cx| { editor.copy_highlight_json(&CopyHighlightJson, window, cx); @@ -19160,7 +19160,7 @@ async fn test_copy_highlight_json_selected_range(cx: &mut TestAppContext) { let yˇ» = 2; } "}); - setup_rust_syntax_highlighting(&mut cx); + setup_syntax_highlighting(rust_lang(), &mut cx); cx.update_editor(|editor, window, cx| { editor.copy_highlight_json(&CopyHighlightJson, window, cx); @@ -19203,7 +19203,7 @@ async fn test_copy_highlight_json_selected_line_range(cx: &mut TestAppContext) { let yˇ» = 2; } "}); - setup_rust_syntax_highlighting(&mut cx); + setup_syntax_highlighting(rust_lang(), &mut cx); cx.update_editor(|editor, window, cx| { editor.selections.set_line_mode(true); @@ -19253,7 +19253,7 @@ async fn test_copy_highlight_json_single_line(cx: &mut TestAppContext) { let y = 2; } "}); - setup_rust_syntax_highlighting(&mut cx); + setup_syntax_highlighting(rust_lang(), &mut cx); cx.update_editor(|editor, window, cx| { editor.selections.set_line_mode(true); @@ -19280,34 +19280,6 @@ async fn test_copy_highlight_json_single_line(cx: &mut TestAppContext) { ); } -fn setup_rust_syntax_highlighting(cx: &mut EditorTestContext) { - let syntax = SyntaxTheme::new_test(vec![ - ("keyword", Hsla::red()), - ("function", Hsla::blue()), - ("variable", Hsla::green()), - ("number", Hsla::default()), - ("operator", Hsla::default()), - ("punctuation.bracket", Hsla::default()), - ("punctuation.delimiter", Hsla::default()), - ]); - - let language = rust_lang(); - language.set_theme(&syntax); - - cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); - cx.executor().run_until_parked(); - cx.update_editor(|editor, window, cx| { - editor.set_style( - EditorStyle { - syntax: Arc::new(syntax), - ..Default::default() - }, - window, - cx, - ); - }); -} - #[gpui::test] async fn test_following(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -35729,3 +35701,75 @@ async fn test_align_selections_multicolumn(cx: &mut TestAppContext) { cx.update_editor(|e, window, cx| e.align_selections(&AlignSelections, window, cx)); cx.assert_editor_state(after); } + +#[gpui::test] +async fn test_custom_fallback_highlights(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + cx.set_state(indoc! {"fn main(self, variable: TType) {ˇ}"}); + + let variable_color = Hsla::green(); + let function_color = Hsla::blue(); + + let test_cases = [ + ("@variable", Some(variable_color)), + ("@type", None), + ("@type @variable", Some(variable_color)), + ("@variable @type", Some(variable_color)), + ("@variable @function", Some(function_color)), + ("@function @variable", Some(variable_color)), + ]; + + for (test_case, expected) in test_cases { + let custom_rust_lang = Arc::into_inner(rust_lang()) + .unwrap() + .with_highlights_query(format! {r#"(type_identifier) {test_case}"#}.as_str()) + .unwrap(); + let theme = setup_syntax_highlighting(Arc::new(custom_rust_lang), &mut cx); + let expected = expected.map_or_else(Vec::new, |expected_color| { + vec![(24..29, HighlightStyle::color(expected_color))] + }); + + cx.update_editor(|editor, window, cx| { + let snapshot = editor.snapshot(window, cx); + assert_eq!( + expected, + snapshot.combined_highlights(MultiBufferOffset(0)..snapshot.buffer().len(), &theme), + "Test case with '{test_case}' highlights query did not pass", + ); + }); + } +} + +fn setup_syntax_highlighting( + language: Arc, + cx: &mut EditorTestContext, +) -> Arc { + let syntax = Arc::new(SyntaxTheme::new_test(vec![ + ("keyword", Hsla::red()), + ("function", Hsla::blue()), + ("variable", Hsla::green()), + ("number", Hsla::default()), + ("operator", Hsla::default()), + ("punctuation.bracket", Hsla::default()), + ("punctuation.delimiter", Hsla::default()), + ])); + + language.set_theme(&syntax); + + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + cx.executor().run_until_parked(); + cx.update_editor(|editor, window, cx| { + editor.set_style( + EditorStyle { + syntax: syntax.clone(), + ..EditorStyle::default() + }, + window, + cx, + ); + }); + + syntax +} diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index b2ab420312249f809599d06315e706627b76570b..a467cd789555d39a32ad4e1d7b21da7b14df9c25 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -5549,11 +5549,11 @@ impl<'a> BufferChunks<'a> { && range.start >= capture.node.start_byte() { let next_capture_end = capture.node.end_byte(); - if range.start < next_capture_end { - highlights.stack.push(( - next_capture_end, - highlights.highlight_maps[capture.grammar_index].get(capture.index), - )); + if range.start < next_capture_end + && let Some(capture_id) = + highlights.highlight_maps[capture.grammar_index].get(capture.index) + { + highlights.stack.push((next_capture_end, capture_id)); } highlights.next_capture.take(); } @@ -5688,9 +5688,11 @@ impl<'a> Iterator for BufferChunks<'a> { } else { let highlight_id = highlights.highlight_maps[capture.grammar_index].get(capture.index); - highlights - .stack - .push((capture.node.end_byte(), highlight_id)); + if let Some(highlight_id) = highlight_id { + highlights + .stack + .push((capture.node.end_byte(), highlight_id)); + } highlights.next_capture = highlights.captures.next(); } } diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 035cb3a2009241cc4ff97a7adf4c82de73166a76..43bbe7a08c73e476a41aec8af015464aa3af853d 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1023,9 +1023,7 @@ impl Language { BufferChunks::new(text, range, Some((captures, highlight_maps)), false, None) { let end_offset = offset + chunk.text.len(); - if let Some(highlight_id) = chunk.syntax_highlight_id - && !highlight_id.is_default() - { + if let Some(highlight_id) = chunk.syntax_highlight_id { result.push((offset..end_offset, highlight_id)); } offset = end_offset; @@ -1077,11 +1075,11 @@ impl Language { #[inline] pub fn build_highlight_map(capture_names: &[&str], theme: &SyntaxTheme) -> HighlightMap { - HighlightMap::from_ids(capture_names.iter().map(|capture_name| { - theme - .highlight_id(capture_name) - .map_or(HighlightId::default(), HighlightId) - })) + HighlightMap::from_ids( + capture_names + .iter() + .map(|capture_name| theme.highlight_id(capture_name).map(HighlightId::new)), + ) } impl LanguageScope { @@ -1645,9 +1643,18 @@ mod tests { ]; let map = build_highlight_map(capture_names, &theme); - assert_eq!(theme.get_capture_name(map.get(0)), Some("function")); - assert_eq!(theme.get_capture_name(map.get(1)), Some("function.async")); - assert_eq!(theme.get_capture_name(map.get(2)), Some("variable.builtin")); + assert_eq!( + theme.get_capture_name(map.get(0).unwrap()), + Some("function") + ); + assert_eq!( + theme.get_capture_name(map.get(1).unwrap()), + Some("function.async") + ); + assert_eq!( + theme.get_capture_name(map.get(2).unwrap()), + Some("variable.builtin") + ); } #[gpui::test(iterations = 10)] diff --git a/crates/language_core/src/grammar.rs b/crates/language_core/src/grammar.rs index 77e3805e52415a20f5d343bff98682744a50fdc2..54e9a3f1b3309718436b206874802779925a9d04 100644 --- a/crates/language_core/src/grammar.rs +++ b/crates/language_core/src/grammar.rs @@ -275,12 +275,11 @@ impl Grammar { } pub fn highlight_id_for_name(&self, name: &str) -> Option { - let capture_id = self - .highlights_config + self.highlights_config .as_ref()? .query - .capture_index_for_name(name)?; - Some(self.highlight_map.lock().get(capture_id)) + .capture_index_for_name(name) + .and_then(|capture_id| self.highlight_map.lock().get(capture_id)) } pub fn debug_variables_config(&self) -> Option<&DebugVariablesConfig> { diff --git a/crates/language_core/src/highlight_map.rs b/crates/language_core/src/highlight_map.rs index 1235c7d62c72950f57de0cdad1363f49d8fbbd96..cba5cda6f7501a04966d5ce512e2fed700724d1a 100644 --- a/crates/language_core/src/highlight_map.rs +++ b/crates/language_core/src/highlight_map.rs @@ -1,35 +1,35 @@ -use std::sync::Arc; +use std::{num::NonZeroU32, sync::Arc}; #[derive(Clone, Debug)] -pub struct HighlightMap(Arc<[HighlightId]>); +pub struct HighlightMap(Arc<[Option]>); #[derive(Clone, Copy, Debug, PartialEq, Eq)] -pub struct HighlightId(pub u32); +pub struct HighlightId(NonZeroU32); -const DEFAULT_SYNTAX_HIGHLIGHT_ID: HighlightId = HighlightId(u32::MAX); +impl HighlightId { + pub const TABSTOP_INSERT_ID: HighlightId = HighlightId(NonZeroU32::new(u32::MAX - 1).unwrap()); + pub const TABSTOP_REPLACE_ID: HighlightId = HighlightId(NonZeroU32::new(u32::MAX - 2).unwrap()); -impl HighlightMap { - #[inline] - pub fn from_ids(highlight_ids: impl IntoIterator) -> Self { - Self(highlight_ids.into_iter().collect()) + pub fn new(capture_id: u32) -> Self { + Self(NonZeroU32::new(capture_id + 1).unwrap_or(NonZeroU32::MAX)) } +} - #[inline] - pub fn get(&self, capture_id: u32) -> HighlightId { - self.0 - .get(capture_id as usize) - .copied() - .unwrap_or(DEFAULT_SYNTAX_HIGHLIGHT_ID) +impl From for usize { + fn from(value: HighlightId) -> Self { + value.0.get() as usize - 1 } } -impl HighlightId { - pub const TABSTOP_INSERT_ID: HighlightId = HighlightId(u32::MAX - 1); - pub const TABSTOP_REPLACE_ID: HighlightId = HighlightId(u32::MAX - 2); +impl HighlightMap { + #[inline] + pub fn from_ids(highlight_ids: impl IntoIterator>) -> Self { + Self(highlight_ids.into_iter().collect()) + } #[inline] - pub fn is_default(&self) -> bool { - *self == DEFAULT_SYNTAX_HIGHLIGHT_ID + pub fn get(&self, capture_id: u32) -> Option { + self.0.get(capture_id as usize).copied().flatten() } } @@ -38,15 +38,3 @@ impl Default for HighlightMap { Self(Arc::new([])) } } - -impl Default for HighlightId { - fn default() -> Self { - DEFAULT_SYNTAX_HIGHLIGHT_ID - } -} - -impl From for usize { - fn from(value: HighlightId) -> Self { - value.0 as usize - } -} diff --git a/crates/language_extension/src/extension_lsp_adapter.rs b/crates/language_extension/src/extension_lsp_adapter.rs index 13899f11c30556db189da48ed1fcb4b5d12b2f20..3c28e07e6b306ea3a0ce644ac688f9fab8d6125f 100644 --- a/crates/language_extension/src/extension_lsp_adapter.rs +++ b/crates/language_extension/src/extension_lsp_adapter.rs @@ -684,7 +684,7 @@ fn test_build_code_label() { ); let code_runs = code_ranges .into_iter() - .map(|range| (range, HighlightId(0))) + .map(|range| (range, HighlightId::new(0))) .collect::>(); let label = build_code_label( @@ -707,7 +707,7 @@ fn test_build_code_label() { marked_text_ranges("pqrs.tuv: «fn»(«Bcd»(«Efgh»)) -> «Ijklm»", false); let label_runs = label_ranges .into_iter() - .map(|range| (range, HighlightId(0))) + .map(|range| (range, HighlightId::new(0))) .collect::>(); assert_eq!( @@ -723,7 +723,7 @@ fn test_build_code_label_with_invalid_ranges() { let (code, code_ranges) = marked_text_ranges("const «a»: «B» = '🏀'", false); let code_runs = code_ranges .into_iter() - .map(|range| (range, HighlightId(0))) + .map(|range| (range, HighlightId::new(0))) .collect::>(); // A span uses a code range that is invalid because it starts inside of diff --git a/crates/language_tools/src/highlights_tree_view.rs b/crates/language_tools/src/highlights_tree_view.rs index aec0cad5b1cf4be043ca21298995b08ceb93f3f2..763cdf76dab46a7fc1c233eda84cfb4ab50e6975 100644 --- a/crates/language_tools/src/highlights_tree_view.rs +++ b/crates/language_tools/src/highlights_tree_view.rs @@ -420,7 +420,10 @@ impl HighlightsTreeView { let highlight_maps: Vec<_> = grammars.iter().map(|g| g.highlight_map()).collect(); for capture in captures { - let highlight_id = highlight_maps[capture.grammar_index].get(capture.index); + let Some(highlight_id) = highlight_maps[capture.grammar_index].get(capture.index) + else { + continue; + }; let Some(style) = syntax_theme.get(highlight_id).cloned() else { continue; }; diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index 3bb8826d555308145847d47525cba9de84a6aa89..d92c1392c128ed72b6e2972bc54dcf7dfc152b1e 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -1542,10 +1542,10 @@ mod tests { "await.as_deref_mut(&mut self) -> IterMut<'_, T>".to_string(), 6..18, vec![ - (6..18, HighlightId(2)), - (20..23, HighlightId(1)), - (33..40, HighlightId(0)), - (45..46, HighlightId(0)) + (6..18, HighlightId::new(2)), + (20..23, HighlightId::new(1)), + (33..40, HighlightId::new(0)), + (45..46, HighlightId::new(0)) ], )) ); @@ -1572,12 +1572,12 @@ mod tests { "pub fn as_deref_mut(&mut self) -> IterMut<'_, T>".to_string(), 7..19, vec![ - (0..3, HighlightId(1)), - (4..6, HighlightId(1)), - (7..19, HighlightId(2)), - (21..24, HighlightId(1)), - (34..41, HighlightId(0)), - (46..47, HighlightId(0)) + (0..3, HighlightId::new(1)), + (4..6, HighlightId::new(1)), + (7..19, HighlightId::new(2)), + (21..24, HighlightId::new(1)), + (34..41, HighlightId::new(0)), + (46..47, HighlightId::new(0)) ], )) ); @@ -1598,7 +1598,7 @@ mod tests { Some(CodeLabel::new( "inner_value: String".to_string(), 6..11, - vec![(0..11, HighlightId(3)), (13..19, HighlightId(0))], + vec![(0..11, HighlightId::new(3)), (13..19, HighlightId::new(0))], )) ); @@ -1625,8 +1625,8 @@ mod tests { vec![ (10..13, HighlightId::TABSTOP_INSERT_ID), (16..19, HighlightId::TABSTOP_INSERT_ID), - (0..7, HighlightId(2)), - (7..8, HighlightId(2)), + (0..7, HighlightId::new(2)), + (7..8, HighlightId::new(2)), ], )) ); @@ -1653,8 +1653,8 @@ mod tests { 0..4, vec![ (5..9, HighlightId::TABSTOP_REPLACE_ID), - (0..3, HighlightId(2)), - (3..4, HighlightId(2)), + (0..3, HighlightId::new(2)), + (3..4, HighlightId::new(2)), ], )) ); @@ -1682,8 +1682,8 @@ mod tests { vec![ (7..10, HighlightId::TABSTOP_REPLACE_ID), (13..16, HighlightId::TABSTOP_INSERT_ID), - (0..2, HighlightId(1)), - (3..6, HighlightId(1)), + (0..2, HighlightId::new(1)), + (3..6, HighlightId::new(1)), ], )) ); @@ -1711,8 +1711,8 @@ mod tests { vec![ (4..8, HighlightId::TABSTOP_REPLACE_ID), (12..16, HighlightId::TABSTOP_REPLACE_ID), - (0..3, HighlightId(1)), - (9..11, HighlightId(1)), + (0..3, HighlightId::new(1)), + (9..11, HighlightId::new(1)), ], )) ); diff --git a/crates/project/tests/integration/lsp_store.rs b/crates/project/tests/integration/lsp_store.rs index 91d5ca1697255a07c0bc9bb37869d87773792297..7d266ff1365485032458d6de033b57f106602869 100644 --- a/crates/project/tests/integration/lsp_store.rs +++ b/crates/project/tests/integration/lsp_store.rs @@ -43,7 +43,7 @@ fn test_multi_len_chars_normalization() { let mut label = CodeLabel::new( "myElˇ (parameter) myElˇ: {\n foo: string;\n}".to_string(), 0..6, - vec![(0..6, HighlightId(1))], + vec![(0..6, HighlightId::new(1))], ); ensure_uniform_list_compatible_label(&mut label); assert_eq!( @@ -51,7 +51,7 @@ fn test_multi_len_chars_normalization() { CodeLabel::new( "myElˇ (parameter) myElˇ: { foo: string; }".to_string(), 0..6, - vec![(0..6, HighlightId(1))], + vec![(0..6, HighlightId::new(1))], ) ); } diff --git a/docs/src/extensions/languages.md b/docs/src/extensions/languages.md index c8e6958db683a5a3e2c9903c590f564b0ef4cb93..121357306e73552140f938197ffc466c0e489484 100644 --- a/docs/src/extensions/languages.md +++ b/docs/src/extensions/languages.md @@ -143,6 +143,21 @@ This query marks strings, object keys, and numbers for highlighting. The followi | @variable.parameter | Captures function/method parameters | | @variant | Captures variants | +#### Fallback captures + +A single Tree-sitter pattern can specify multiple captures on the same node to define fallback highlights. +Zed resolves them right-to-left: It first tries the rightmost capture, and if the current theme has no style for it, falls back to the next capture to the left, and so on. + +For example: + +```scheme +(type_identifier) @type @variable +``` + +Here Zed will first try to resolve `@variable` from the theme. If the theme defines a style for `@variable`, that style is used. Otherwise, Zed falls back to `@type`. + +This is useful when a language wants to provide a preferred highlight that not all themes may support, while still falling back to a more common capture that most themes define. + ### Bracket matching The `brackets.scm` file defines matching brackets. From a7248e8c006af22263d97e4eb234cb613a9bf605 Mon Sep 17 00:00:00 2001 From: Justin Su Date: Thu, 2 Apr 2026 09:09:29 -0400 Subject: [PATCH 03/63] Don't save buffers by default when running tasks (#52976) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [ ] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [ ] Performance impact has been considered and is acceptable Closes #52926 Follow-up to #48861 cc @SomeoneToIgnore Release Notes: - Edited buffers are no longer saved by default before running a task, but you can still configure this using the "save" field in `tasks.json`. --------- Co-authored-by: Kirill Bulatov --- assets/settings/initial_tasks.json | 4 ++-- crates/task/src/task_template.rs | 2 +- docs/src/tasks.md | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/assets/settings/initial_tasks.json b/assets/settings/initial_tasks.json index 0d6f4471320e443f3c4a483f53f6901c76e7dc72..bb6c9c04ae14db8f2d01adabd8d1494caa7d7407 100644 --- a/assets/settings/initial_tasks.json +++ b/assets/settings/initial_tasks.json @@ -50,9 +50,9 @@ "show_command": true, // Which edited buffers to save before running the task: // * `all` — save all edited buffers - // * `current` — save current buffer only + // * `current` — save currently active buffer only // * `none` — don't save any buffers - "save": "all", + "save": "none", // Represents the tags for inline runnable indicators, or spawning multiple tasks at once. // "tags": [] }, diff --git a/crates/task/src/task_template.rs b/crates/task/src/task_template.rs index 2f74d84e500e5151014aa2a71686cd68ac3a87a5..25fde261f106d57eef94c4d2ef7cad57b3a7ecd0 100644 --- a/crates/task/src/task_template.rs +++ b/crates/task/src/task_template.rs @@ -127,11 +127,11 @@ pub enum HideStrategy { #[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum SaveStrategy { - #[default] /// Save all edited buffers. All, /// Save the current buffer. Current, + #[default] /// Don't save any buffers. None, } diff --git a/docs/src/tasks.md b/docs/src/tasks.md index b4c9ba8a2abf5ce03e4a9a43fe7fc7e55f9240a4..3bbef85e9760ad036b75d50f26d3536b2e5b20f1 100644 --- a/docs/src/tasks.md +++ b/docs/src/tasks.md @@ -53,9 +53,9 @@ Zed supports ways to spawn (and rerun) commands using its integrated [terminal]( "show_command": true, // Which edited buffers to save before running the task: // * `all` — save all edited buffers - // * `current` — save current buffer only + // * `current` — save currently active buffer only // * `none` — don't save any buffers - "save": "all" + "save": "none" // Represents the tags for inline runnable indicators, or spawning multiple tasks at once. // "tags": [] } From e0b9c3afd01d30d37732467ed7d885152da1da95 Mon Sep 17 00:00:00 2001 From: Bret Comnes <166301+bcomnes@users.noreply.github.com> Date: Thu, 2 Apr 2026 06:22:35 -0700 Subject: [PATCH 04/63] Add support for `diff.plus` and `diff.minus` highlight captures (#45459) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Update diff in highlights.scm to use semantic @diff.plus and @diff.minus tokens - Add diff.plus, diff.minus tokens to fallback theme - Add diff syntax tokens to all official themes (One, Gruvbox, Ayu) This fixes the issue where diff files showed inverted colors (additions in purple/blue, deletions in green) by using proper semantic tokens instead of @string and @keyword. Related discussion in https://github.com/zed-industries/zed/issues/34057 (thought there might be second issue in that issue) Release Notes: - Added support for diff.plus, diff.minus and diff.deleta theme selectors in the diff syntax grammar. Previously diff.plus was mapped to @string and diff.minus was mapped to @keyword. *Before:* Screenshot 2025-12-20 at 10 58 34 AM *After:* Screenshot 2025-12-20 at 10 58 30 AM *Before:* Screenshot 2025-12-20 at 10 58 16 AM *After:* Screenshot 2025-12-20 at 10 58 21 AM *Before:* Screenshot 2025-12-20 at 10 58 06 AM *After:* Screenshot 2025-12-20 at 10 56 00 AM --------- Co-authored-by: Finn Evers Co-authored-by: MrSubidubi --- assets/themes/ayu/ayu.json | 30 +++++++++++++ assets/themes/gruvbox/gruvbox.json | 60 +++++++++++++++++++++++++ assets/themes/one/one.json | 20 +++++++++ crates/grammars/src/diff/highlights.scm | 6 +-- crates/theme/src/fallback_themes.rs | 2 + 5 files changed, 114 insertions(+), 4 deletions(-) diff --git a/assets/themes/ayu/ayu.json b/assets/themes/ayu/ayu.json index 3450e35bf62d780bdaf0cff2c6bc9f8bdfea7c1e..c62f9eb11238e21bbc8c04c8fda4bbe7bfc72a81 100644 --- a/assets/themes/ayu/ayu.json +++ b/assets/themes/ayu/ayu.json @@ -391,6 +391,16 @@ "color": "#5ac1feff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#aad94cff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#f07178ff", + "font_style": null, + "font_weight": null } } } @@ -783,6 +793,16 @@ "color": "#3b9ee5ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#6cbf43ff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#ff6666ff", + "font_style": null, + "font_weight": null } } } @@ -1175,6 +1195,16 @@ "color": "#72cffeff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#aad94cff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#f07178ff", + "font_style": null, + "font_weight": null } } } diff --git a/assets/themes/gruvbox/gruvbox.json b/assets/themes/gruvbox/gruvbox.json index 16ae188712f7a800ab4fb8a81a2d24cac99da56b..a4fc1b561b36421b353187ef4a2639dae88e901d 100644 --- a/assets/themes/gruvbox/gruvbox.json +++ b/assets/themes/gruvbox/gruvbox.json @@ -406,6 +406,16 @@ "color": "#83a598ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#b8bb26ff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#fb4934ff", + "font_style": null, + "font_weight": null } } } @@ -813,6 +823,16 @@ "color": "#83a598ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#b8bb26ff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#fb4934ff", + "font_style": null, + "font_weight": null } } } @@ -1220,6 +1240,16 @@ "color": "#83a598ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#b8bb26ff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#fb4934ff", + "font_style": null, + "font_weight": null } } } @@ -1627,6 +1657,16 @@ "color": "#0b6678ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#79740eff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#9d0006ff", + "font_style": null, + "font_weight": null } } } @@ -2034,6 +2074,16 @@ "color": "#0b6678ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#79740eff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#9d0006ff", + "font_style": null, + "font_weight": null } } } @@ -2441,6 +2491,16 @@ "color": "#0b6678ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#79740eff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#9d0006ff", + "font_style": null, + "font_weight": null } } } diff --git a/assets/themes/one/one.json b/assets/themes/one/one.json index 05af3f5cfeec7d4a24c4fe6d684fb21d04e2d81c..409a5c705c518e83f08dcbaafc1b29c8120beae9 100644 --- a/assets/themes/one/one.json +++ b/assets/themes/one/one.json @@ -403,6 +403,16 @@ "color": "#73ade9ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#98c379ff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#e06c75ff", + "font_style": null, + "font_weight": null } } } @@ -805,6 +815,16 @@ "color": "#5b79e3ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#50a14fff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#e45649ff", + "font_style": null, + "font_weight": null } } } diff --git a/crates/grammars/src/diff/highlights.scm b/crates/grammars/src/diff/highlights.scm index a2e33190f154d6a210572dbb066000dca6f30455..3c9abbe147b6554d6894d5d8d3c8bcf5d93e2edd 100644 --- a/crates/grammars/src/diff/highlights.scm +++ b/crates/grammars/src/diff/highlights.scm @@ -3,15 +3,13 @@ [ (addition) (new_file) -] @string +] @string @diff.plus -; TODO: This should eventually be `@diff.plus` with a fallback of `@string` [ (deletion) (old_file) -] @keyword +] @keyword @diff.minus -; TODO: This should eventually be `@diff.minus` with a fallback of `@keyword` (commit) @constant (location) @attribute diff --git a/crates/theme/src/fallback_themes.rs b/crates/theme/src/fallback_themes.rs index ba7f600fb05cc160f8d2668cf549853c8ae39ebe..a3831cafff68a3858a76b8e29071c88579090bcc 100644 --- a/crates/theme/src/fallback_themes.rs +++ b/crates/theme/src/fallback_themes.rs @@ -377,6 +377,8 @@ pub(crate) fn zed_default_dark() -> Theme { ("variable".into(), HighlightStyle::default()), ("variable.special".into(), red.into()), ("variant".into(), HighlightStyle::default()), + ("diff.plus".into(), green.into()), + ("diff.minus".into(), red.into()), ])), }, } From c4d700446eaa1626ec68870933a1486556b691a1 Mon Sep 17 00:00:00 2001 From: Wuji Chen Date: Thu, 2 Apr 2026 21:24:15 +0800 Subject: [PATCH 05/63] languages: Fix C/C++ preprocessor directive highlight regression (#49031) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary - PR #48109 changed the capture name for C/C++ preprocessor directives from `@keyword.directive` to `@preproc`. While semantically correct, the builtin themes had `preproc` defined with colors nearly indistinguishable from plain text (e.g. One Dark `#dce0e5ff`, Ayu Dark `#bfbdb6ff`), making `#include`, `#define`, etc. appear unhighlighted. - This PR updates the `preproc` color in all builtin themes (and the fallback theme) to match their respective `keyword` color, restoring visible highlighting for preprocessor directives. Fixes #49024 ## Side effects - Go uses `@preproc` for `//go:` and `// +build` compiler directives. These will also change from the previous muted gray to the keyword color. This is arguably an improvement — compiler directives are special constructs that benefit from visible highlighting, consistent with how other editors (CLion, VS Code) handle them. ## Test plan - [x] `cargo test -p language highlight_map` passes - [x] Open a C/C++ file and verify `#include`, `#define`, `#ifdef`, etc. are highlighted with the keyword color - [x] Verify across multiple builtin themes (One Dark, Ayu Dark, Gruvbox Dark, etc.) - [x] Open a Go file and verify `//go:` directives are highlighted reasonably 🤖 Generated with [Claude Code](https://claude.com/claude-code) Release Notes: - Fixed C/C++ preprocessor directives (`#include`, `#define`, etc.) appearing unhighlighted in builtin themes. --------- Co-authored-by: Claude Co-authored-by: MrSubidubi --- assets/themes/ayu/ayu.json | 6 +++--- assets/themes/gruvbox/gruvbox.json | 12 ++++++------ assets/themes/one/one.json | 4 ++-- crates/grammars/src/c/highlights.scm | 2 +- crates/grammars/src/cpp/highlights.scm | 2 +- crates/theme/src/fallback_themes.rs | 2 +- 6 files changed, 14 insertions(+), 14 deletions(-) diff --git a/assets/themes/ayu/ayu.json b/assets/themes/ayu/ayu.json index c62f9eb11238e21bbc8c04c8fda4bbe7bfc72a81..f27566c4f72cac3938a752c64d95d0500c595306 100644 --- a/assets/themes/ayu/ayu.json +++ b/assets/themes/ayu/ayu.json @@ -283,7 +283,7 @@ "font_weight": null }, "preproc": { - "color": "#bfbdb6ff", + "color": "#ff8f3fff", "font_style": null, "font_weight": null }, @@ -685,7 +685,7 @@ "font_weight": null }, "preproc": { - "color": "#5c6166ff", + "color": "#fa8d3eff", "font_style": null, "font_weight": null }, @@ -1087,7 +1087,7 @@ "font_weight": null }, "preproc": { - "color": "#cccac2ff", + "color": "#ffad65ff", "font_style": null, "font_weight": null }, diff --git a/assets/themes/gruvbox/gruvbox.json b/assets/themes/gruvbox/gruvbox.json index a4fc1b561b36421b353187ef4a2639dae88e901d..4330df54fccae55e7ca077c0da9a891ee71ebe3a 100644 --- a/assets/themes/gruvbox/gruvbox.json +++ b/assets/themes/gruvbox/gruvbox.json @@ -293,7 +293,7 @@ "font_weight": null }, "preproc": { - "color": "#fbf1c7ff", + "color": "#fb4833ff", "font_style": null, "font_weight": null }, @@ -710,7 +710,7 @@ "font_weight": null }, "preproc": { - "color": "#fbf1c7ff", + "color": "#fb4833ff", "font_style": null, "font_weight": null }, @@ -1127,7 +1127,7 @@ "font_weight": null }, "preproc": { - "color": "#fbf1c7ff", + "color": "#fb4833ff", "font_style": null, "font_weight": null }, @@ -1544,7 +1544,7 @@ "font_weight": null }, "preproc": { - "color": "#282828ff", + "color": "#9d0006ff", "font_style": null, "font_weight": null }, @@ -1961,7 +1961,7 @@ "font_weight": null }, "preproc": { - "color": "#282828ff", + "color": "#9d0006ff", "font_style": null, "font_weight": null }, @@ -2378,7 +2378,7 @@ "font_weight": null }, "preproc": { - "color": "#282828ff", + "color": "#9d0006ff", "font_style": null, "font_weight": null }, diff --git a/assets/themes/one/one.json b/assets/themes/one/one.json index 409a5c705c518e83f08dcbaafc1b29c8120beae9..e60b6314b9595ac02bd6a43be4580ba9331ae769 100644 --- a/assets/themes/one/one.json +++ b/assets/themes/one/one.json @@ -290,7 +290,7 @@ "font_weight": null }, "preproc": { - "color": "#dce0e5ff", + "color": "#b477cfff", "font_style": null, "font_weight": null }, @@ -702,7 +702,7 @@ "font_weight": null }, "preproc": { - "color": "#242529ff", + "color": "#a449abff", "font_style": null, "font_weight": null }, diff --git a/crates/grammars/src/c/highlights.scm b/crates/grammars/src/c/highlights.scm index dc5a3bd99937eb3cd1a3af6efb7124aebc4008f1..b73c8e80b8acb61cc0cf47ed6585202eb73f4a7b 100644 --- a/crates/grammars/src/c/highlights.scm +++ b/crates/grammars/src/c/highlights.scm @@ -38,7 +38,7 @@ "#ifndef" "#include" (preproc_directive) -] @preproc +] @keyword.preproc @preproc [ "=" diff --git a/crates/grammars/src/cpp/highlights.scm b/crates/grammars/src/cpp/highlights.scm index e074707d05dec638a1be9ea840c31f47537c438a..281da4215c8269172816c6f37a5e6e866c04a140 100644 --- a/crates/grammars/src/cpp/highlights.scm +++ b/crates/grammars/src/cpp/highlights.scm @@ -196,7 +196,7 @@ type: (primitive_type) @type.builtin "#ifndef" "#include" (preproc_directive) -] @preproc +] @keyword.preproc @preproc (comment) @comment diff --git a/crates/theme/src/fallback_themes.rs b/crates/theme/src/fallback_themes.rs index a3831cafff68a3858a76b8e29071c88579090bcc..a739df3213d297ce8230cfb62a08c91928bd62df 100644 --- a/crates/theme/src/fallback_themes.rs +++ b/crates/theme/src/fallback_themes.rs @@ -357,7 +357,7 @@ pub(crate) fn zed_default_dark() -> Theme { ("number".into(), orange.into()), ("operator".into(), HighlightStyle::default()), ("predictive".into(), HighlightStyle::default()), - ("preproc".into(), HighlightStyle::default()), + ("preproc".into(), purple.into()), ("primary".into(), HighlightStyle::default()), ("property".into(), red.into()), ("punctuation".into(), HighlightStyle::default()), From f8d646794d64577c7f75279a06491f7bc2b7d918 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 2 Apr 2026 16:53:37 +0300 Subject: [PATCH 06/63] Fixup and test edit prediction mocks (#52991) Follow-up of https://github.com/zed-industries/zed/pull/40185 Release Notes: - N/A --- crates/editor/src/edit_prediction_tests.rs | 40 ++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/crates/editor/src/edit_prediction_tests.rs b/crates/editor/src/edit_prediction_tests.rs index d1e326bc93b8052f3ae089c211e65eb3ef020fdf..987801471e5602f256ce2dd65edd57873c878027 100644 --- a/crates/editor/src/edit_prediction_tests.rs +++ b/crates/editor/src/edit_prediction_tests.rs @@ -1081,6 +1081,44 @@ async fn test_cancel_clears_stale_edit_prediction_in_menu(cx: &mut gpui::TestApp }); } +#[gpui::test] +async fn test_discard_clears_delegate_completion(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + load_default_keymap(cx); + + let mut cx = EditorTestContext::new(cx).await; + let provider = cx.new(|_| FakeEditPredictionDelegate::default()); + assign_editor_completion_provider(provider.clone(), &mut cx); + cx.set_state("let x = ˇ;"); + + propose_edits(&provider, vec![(8..8, "42")], &mut cx); + cx.update_editor(|editor, window, cx| editor.update_visible_edit_prediction(window, cx)); + + cx.update_editor(|editor, _window, _cx| { + assert!(editor.active_edit_prediction.is_some()); + }); + + // Dismiss the prediction — this must call discard() on the delegate, + // which should clear self.completion. + cx.simulate_keystroke("escape"); + cx.run_until_parked(); + + cx.update_editor(|editor, _window, _cx| { + assert!(editor.active_edit_prediction.is_none()); + }); + + // update_visible_edit_prediction must NOT bring the prediction back, + // because discard() cleared self.completion in the delegate. + cx.update_editor(|editor, window, cx| editor.update_visible_edit_prediction(window, cx)); + + cx.update_editor(|editor, _window, _cx| { + assert!( + editor.active_edit_prediction.is_none(), + "prediction must not resurface after discard()" + ); + }); +} + fn accept_completion(cx: &mut EditorTestContext) { cx.update_editor(|editor, window, cx| { editor.accept_edit_prediction(&crate::AcceptEditPrediction, window, cx) @@ -1350,6 +1388,7 @@ impl EditPredictionDelegate for FakeEditPredictionDelegate { _reason: edit_prediction_types::EditPredictionDiscardReason, _cx: &mut gpui::Context, ) { + self.completion.take(); } fn suggest<'a>( @@ -1426,6 +1465,7 @@ impl EditPredictionDelegate for FakeNonZedEditPredictionDelegate { _reason: edit_prediction_types::EditPredictionDiscardReason, _cx: &mut gpui::Context, ) { + self.completion.take(); } fn suggest<'a>( From 66ea4b89af5ea8112b94d0c95bf751009eceded3 Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Thu, 2 Apr 2026 17:00:15 +0300 Subject: [PATCH 07/63] ep: Move pure diffing functions to zeta_prompt:udiff (#52959) This PR mostly moves some code around. It also adds a high-level `format_expected_output` function that routes patch formatting to specific prompt formats. This was `zeta_prompt` can format `expected_output` for training. Keeping everything prompt-related in a "pure" module (with no heavy dependencies) makes it easier to write bindings. Release Notes: - N/A --- crates/edit_prediction/src/udiff.rs | 1151 +--------------- .../edit_prediction_cli/src/format_prompt.rs | 2 +- .../src/reversal_tracking.rs | 4 +- crates/edit_prediction_cli/src/score.rs | 2 +- crates/zeta_prompt/src/udiff.rs | 1206 +++++++++++++++++ crates/zeta_prompt/src/zeta_prompt.rs | 108 ++ 6 files changed, 1327 insertions(+), 1146 deletions(-) create mode 100644 crates/zeta_prompt/src/udiff.rs diff --git a/crates/edit_prediction/src/udiff.rs b/crates/edit_prediction/src/udiff.rs index 407dc4fc7239fb1974ef8bc5be4b3a99cd31f187..b2468755a8979f28635aa5e91cacf1490dc1ccd8 100644 --- a/crates/edit_prediction/src/udiff.rs +++ b/crates/edit_prediction/src/udiff.rs @@ -1,11 +1,4 @@ -use std::{ - borrow::Cow, - fmt::{Debug, Display, Write}, - mem, - ops::Range, - path::{Path, PathBuf}, - sync::Arc, -}; +use std::{mem, ops::Range, path::Path, path::PathBuf, sync::Arc}; use anyhow::{Context as _, Result, anyhow}; use collections::{HashMap, hash_map::Entry}; @@ -15,6 +8,14 @@ use postage::stream::Stream as _; use project::Project; use util::{paths::PathStyle, rel_path::RelPath}; use worktree::Worktree; +use zeta_prompt::udiff::{ + DiffEvent, DiffParser, FileStatus, Hunk, disambiguate_by_line_number, find_context_candidates, +}; + +pub use zeta_prompt::udiff::{ + DiffLine, HunkLocation, apply_diff_to_string, apply_diff_to_string_with_hunk_offset, + strip_diff_metadata, strip_diff_path_prefix, +}; #[derive(Clone, Debug)] pub struct OpenedBuffers(HashMap>); @@ -189,209 +190,6 @@ pub async fn refresh_worktree_entries( Ok(()) } -pub fn strip_diff_path_prefix<'a>(diff: &'a str, prefix: &str) -> Cow<'a, str> { - if prefix.is_empty() { - return Cow::Borrowed(diff); - } - - let prefix_with_slash = format!("{}/", prefix); - let mut needs_rewrite = false; - - for line in diff.lines() { - match DiffLine::parse(line) { - DiffLine::OldPath { path } | DiffLine::NewPath { path } => { - if path.starts_with(&prefix_with_slash) { - needs_rewrite = true; - break; - } - } - _ => {} - } - } - - if !needs_rewrite { - return Cow::Borrowed(diff); - } - - let mut result = String::with_capacity(diff.len()); - for line in diff.lines() { - match DiffLine::parse(line) { - DiffLine::OldPath { path } => { - let stripped = path - .strip_prefix(&prefix_with_slash) - .unwrap_or(path.as_ref()); - result.push_str(&format!("--- a/{}\n", stripped)); - } - DiffLine::NewPath { path } => { - let stripped = path - .strip_prefix(&prefix_with_slash) - .unwrap_or(path.as_ref()); - result.push_str(&format!("+++ b/{}\n", stripped)); - } - _ => { - result.push_str(line); - result.push('\n'); - } - } - } - - Cow::Owned(result) -} -/// Strip unnecessary git metadata lines from a diff, keeping only the lines -/// needed for patch application: path headers (--- and +++), hunk headers (@@), -/// and content lines (+, -, space). -pub fn strip_diff_metadata(diff: &str) -> String { - let mut result = String::new(); - - for line in diff.lines() { - let dominated = DiffLine::parse(line); - match dominated { - // Keep path headers, hunk headers, and content lines - DiffLine::OldPath { .. } - | DiffLine::NewPath { .. } - | DiffLine::HunkHeader(_) - | DiffLine::Context(_) - | DiffLine::Deletion(_) - | DiffLine::Addition(_) - | DiffLine::NoNewlineAtEOF => { - result.push_str(line); - result.push('\n'); - } - // Skip garbage lines (diff --git, index, etc.) - DiffLine::Garbage(_) => {} - } - } - - result -} - -/// Find all byte offsets where `hunk.context` occurs as a substring of `text`. -/// -/// If no exact matches are found and the context ends with `'\n'` but `text` -/// does not, retries without the trailing newline, accepting only a match at -/// the very end of `text`. When this fallback fires, the hunk's context is -/// trimmed and its edit ranges are clamped so that downstream code doesn't -/// index past the end of the matched region. This handles diffs that are -/// missing a `\ No newline at end of file` marker: the parser always appends -/// `'\n'` via `writeln!`, so the context can have a trailing newline that -/// doesn't exist in the source text. -fn find_context_candidates(text: &str, hunk: &mut Hunk) -> Vec { - let candidates: Vec = text - .match_indices(&hunk.context) - .map(|(offset, _)| offset) - .collect(); - - if !candidates.is_empty() { - return candidates; - } - - if hunk.context.ends_with('\n') && !hunk.context.is_empty() { - let old_len = hunk.context.len(); - hunk.context.pop(); - let new_len = hunk.context.len(); - - if !hunk.context.is_empty() { - let candidates: Vec = text - .match_indices(&hunk.context) - .filter(|(offset, _)| offset + new_len == text.len()) - .map(|(offset, _)| offset) - .collect(); - - if !candidates.is_empty() { - for edit in &mut hunk.edits { - let touched_phantom = edit.range.end > new_len; - edit.range.start = edit.range.start.min(new_len); - edit.range.end = edit.range.end.min(new_len); - if touched_phantom { - // The replacement text was also written with a - // trailing '\n' that corresponds to the phantom - // newline we just removed from the context. - if edit.text.ends_with('\n') { - edit.text.pop(); - } - } - } - return candidates; - } - - // Restore if fallback didn't help either. - hunk.context.push('\n'); - debug_assert_eq!(hunk.context.len(), old_len); - } else { - hunk.context.push('\n'); - } - } - - Vec::new() -} - -/// Given multiple candidate offsets where context matches, use line numbers to disambiguate. -/// Returns the offset that matches the expected line, or None if no match or no line number available. -fn disambiguate_by_line_number( - candidates: &[usize], - expected_line: Option, - offset_to_line: &dyn Fn(usize) -> u32, -) -> Option { - match candidates.len() { - 0 => None, - 1 => Some(candidates[0]), - _ => { - let expected = expected_line?; - candidates - .iter() - .copied() - .find(|&offset| offset_to_line(offset) == expected) - } - } -} - -pub fn apply_diff_to_string(diff_str: &str, text: &str) -> Result { - apply_diff_to_string_with_hunk_offset(diff_str, text).map(|(text, _)| text) -} - -/// Applies a diff to a string and returns the result along with the offset where -/// the first hunk's context matched in the original text. This offset can be used -/// to adjust cursor positions that are relative to the hunk's content. -pub fn apply_diff_to_string_with_hunk_offset( - diff_str: &str, - text: &str, -) -> Result<(String, Option)> { - let mut diff = DiffParser::new(diff_str); - - let mut text = text.to_string(); - let mut first_hunk_offset = None; - - while let Some(event) = diff.next().context("Failed to parse diff")? { - match event { - DiffEvent::Hunk { - mut hunk, - path: _, - status: _, - } => { - let candidates = find_context_candidates(&text, &mut hunk); - - let hunk_offset = - disambiguate_by_line_number(&candidates, hunk.start_line, &|offset| { - text[..offset].matches('\n').count() as u32 - }) - .ok_or_else(|| anyhow!("couldn't resolve hunk"))?; - - if first_hunk_offset.is_none() { - first_hunk_offset = Some(hunk_offset); - } - - for edit in hunk.edits.iter().rev() { - let range = (hunk_offset + edit.range.start)..(hunk_offset + edit.range.end); - text.replace_range(range, &edit.text); - } - } - DiffEvent::FileEnd { .. } => {} - } - } - - Ok((text, first_hunk_offset)) -} - /// Returns the individual edits that would be applied by a diff to the given content. /// Each edit is a tuple of (byte_range_in_content, replacement_text). /// Uses sub-line diffing to find the precise character positions of changes. @@ -440,227 +238,6 @@ pub fn edits_for_diff(content: &str, diff_str: &str) -> Result Ok(result) } -struct PatchFile<'a> { - old_path: Cow<'a, str>, - new_path: Cow<'a, str>, -} - -struct DiffParser<'a> { - current_file: Option>, - current_line: Option<(&'a str, DiffLine<'a>)>, - hunk: Hunk, - diff: std::str::Lines<'a>, - pending_start_line: Option, - processed_no_newline: bool, - last_diff_op: LastDiffOp, -} - -#[derive(Clone, Copy, Default)] -enum LastDiffOp { - #[default] - None, - Context, - Deletion, - Addition, -} - -#[derive(Debug, PartialEq)] -enum DiffEvent<'a> { - Hunk { - path: Cow<'a, str>, - hunk: Hunk, - status: FileStatus, - }, - FileEnd { - renamed_to: Option>, - }, -} - -#[derive(Debug, Clone, Copy, PartialEq)] -enum FileStatus { - Created, - Modified, - Deleted, -} - -#[derive(Debug, Default, PartialEq)] -struct Hunk { - context: String, - edits: Vec, - start_line: Option, -} - -impl Hunk { - fn is_empty(&self) -> bool { - self.context.is_empty() && self.edits.is_empty() - } -} - -#[derive(Debug, PartialEq)] -struct Edit { - range: Range, - text: String, -} - -impl<'a> DiffParser<'a> { - fn new(diff: &'a str) -> Self { - let mut diff = diff.lines(); - let current_line = diff.next().map(|line| (line, DiffLine::parse(line))); - DiffParser { - current_file: None, - hunk: Hunk::default(), - current_line, - diff, - pending_start_line: None, - processed_no_newline: false, - last_diff_op: LastDiffOp::None, - } - } - - fn next(&mut self) -> Result>> { - loop { - let (hunk_done, file_done) = match self.current_line.as_ref().map(|e| &e.1) { - Some(DiffLine::OldPath { .. }) | Some(DiffLine::Garbage(_)) | None => (true, true), - Some(DiffLine::HunkHeader(_)) => (true, false), - _ => (false, false), - }; - - if hunk_done { - if let Some(file) = &self.current_file - && !self.hunk.is_empty() - { - let status = if file.old_path == "/dev/null" { - FileStatus::Created - } else if file.new_path == "/dev/null" { - FileStatus::Deleted - } else { - FileStatus::Modified - }; - let path = if status == FileStatus::Created { - file.new_path.clone() - } else { - file.old_path.clone() - }; - let mut hunk = mem::take(&mut self.hunk); - hunk.start_line = self.pending_start_line.take(); - self.processed_no_newline = false; - self.last_diff_op = LastDiffOp::None; - return Ok(Some(DiffEvent::Hunk { path, hunk, status })); - } - } - - if file_done { - if let Some(PatchFile { old_path, new_path }) = self.current_file.take() { - return Ok(Some(DiffEvent::FileEnd { - renamed_to: if old_path != new_path && old_path != "/dev/null" { - Some(new_path) - } else { - None - }, - })); - } - } - - let Some((line, parsed_line)) = self.current_line.take() else { - break; - }; - - util::maybe!({ - match parsed_line { - DiffLine::OldPath { path } => { - self.current_file = Some(PatchFile { - old_path: path, - new_path: "".into(), - }); - } - DiffLine::NewPath { path } => { - if let Some(current_file) = &mut self.current_file { - current_file.new_path = path - } - } - DiffLine::HunkHeader(location) => { - if let Some(loc) = location { - self.pending_start_line = Some(loc.start_line_old); - } - } - DiffLine::Context(ctx) => { - if self.current_file.is_some() { - writeln!(&mut self.hunk.context, "{ctx}")?; - self.last_diff_op = LastDiffOp::Context; - } - } - DiffLine::Deletion(del) => { - if self.current_file.is_some() { - let range = self.hunk.context.len() - ..self.hunk.context.len() + del.len() + '\n'.len_utf8(); - if let Some(last_edit) = self.hunk.edits.last_mut() - && last_edit.range.end == range.start - { - last_edit.range.end = range.end; - } else { - self.hunk.edits.push(Edit { - range, - text: String::new(), - }); - } - writeln!(&mut self.hunk.context, "{del}")?; - self.last_diff_op = LastDiffOp::Deletion; - } - } - DiffLine::Addition(add) => { - if self.current_file.is_some() { - let range = self.hunk.context.len()..self.hunk.context.len(); - if let Some(last_edit) = self.hunk.edits.last_mut() - && last_edit.range.end == range.start - { - writeln!(&mut last_edit.text, "{add}").unwrap(); - } else { - self.hunk.edits.push(Edit { - range, - text: format!("{add}\n"), - }); - } - self.last_diff_op = LastDiffOp::Addition; - } - } - DiffLine::NoNewlineAtEOF => { - if !self.processed_no_newline { - self.processed_no_newline = true; - match self.last_diff_op { - LastDiffOp::Addition => { - // Remove trailing newline from the last addition - if let Some(last_edit) = self.hunk.edits.last_mut() { - last_edit.text.pop(); - } - } - LastDiffOp::Deletion => { - // Remove trailing newline from context (which includes the deletion) - self.hunk.context.pop(); - if let Some(last_edit) = self.hunk.edits.last_mut() { - last_edit.range.end -= 1; - } - } - LastDiffOp::Context | LastDiffOp::None => { - // Remove trailing newline from context - self.hunk.context.pop(); - } - } - } - } - DiffLine::Garbage(_) => {} - } - - anyhow::Ok(()) - }) - .with_context(|| format!("on line:\n\n```\n{}```", line))?; - - self.current_line = self.diff.next().map(|line| (line, DiffLine::parse(line))); - } - - anyhow::Ok(None) - } -} - fn resolve_hunk_edits_in_buffer( mut hunk: Hunk, buffer: &TextBufferSnapshot, @@ -713,144 +290,6 @@ fn resolve_hunk_edits_in_buffer( Ok(iter) } -#[derive(Debug, PartialEq)] -pub enum DiffLine<'a> { - OldPath { path: Cow<'a, str> }, - NewPath { path: Cow<'a, str> }, - HunkHeader(Option), - Context(&'a str), - Deletion(&'a str), - Addition(&'a str), - NoNewlineAtEOF, - Garbage(&'a str), -} - -#[derive(Debug, PartialEq)] -pub struct HunkLocation { - pub start_line_old: u32, - count_old: u32, - pub start_line_new: u32, - count_new: u32, -} - -impl<'a> DiffLine<'a> { - pub fn parse(line: &'a str) -> Self { - Self::try_parse(line).unwrap_or(Self::Garbage(line)) - } - - fn try_parse(line: &'a str) -> Option { - if line.starts_with("\\ No newline") { - return Some(Self::NoNewlineAtEOF); - } - if let Some(header) = line.strip_prefix("---").and_then(eat_required_whitespace) { - let path = parse_header_path("a/", header); - Some(Self::OldPath { path }) - } else if let Some(header) = line.strip_prefix("+++").and_then(eat_required_whitespace) { - Some(Self::NewPath { - path: parse_header_path("b/", header), - }) - } else if let Some(header) = line.strip_prefix("@@").and_then(eat_required_whitespace) { - if header.starts_with("...") { - return Some(Self::HunkHeader(None)); - } - - let mut tokens = header.split_whitespace(); - let old_range = tokens.next()?.strip_prefix('-')?; - let new_range = tokens.next()?.strip_prefix('+')?; - - let (start_line_old, count_old) = old_range.split_once(',').unwrap_or((old_range, "1")); - let (start_line_new, count_new) = new_range.split_once(',').unwrap_or((new_range, "1")); - - Some(Self::HunkHeader(Some(HunkLocation { - start_line_old: start_line_old.parse::().ok()?.saturating_sub(1), - count_old: count_old.parse().ok()?, - start_line_new: start_line_new.parse::().ok()?.saturating_sub(1), - count_new: count_new.parse().ok()?, - }))) - } else if let Some(deleted_header) = line.strip_prefix("-") { - Some(Self::Deletion(deleted_header)) - } else if line.is_empty() { - Some(Self::Context("")) - } else if let Some(context) = line.strip_prefix(" ") { - Some(Self::Context(context)) - } else { - Some(Self::Addition(line.strip_prefix("+")?)) - } - } -} - -impl<'a> Display for DiffLine<'a> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - DiffLine::OldPath { path } => write!(f, "--- {path}"), - DiffLine::NewPath { path } => write!(f, "+++ {path}"), - DiffLine::HunkHeader(Some(hunk_location)) => { - write!( - f, - "@@ -{},{} +{},{} @@", - hunk_location.start_line_old + 1, - hunk_location.count_old, - hunk_location.start_line_new + 1, - hunk_location.count_new - ) - } - DiffLine::HunkHeader(None) => write!(f, "@@ ... @@"), - DiffLine::Context(content) => write!(f, " {content}"), - DiffLine::Deletion(content) => write!(f, "-{content}"), - DiffLine::Addition(content) => write!(f, "+{content}"), - DiffLine::NoNewlineAtEOF => write!(f, "\\ No newline at end of file"), - DiffLine::Garbage(line) => write!(f, "{line}"), - } - } -} - -fn parse_header_path<'a>(strip_prefix: &'static str, header: &'a str) -> Cow<'a, str> { - if !header.contains(['"', '\\']) { - let path = header.split_ascii_whitespace().next().unwrap_or(header); - return Cow::Borrowed(path.strip_prefix(strip_prefix).unwrap_or(path)); - } - - let mut path = String::with_capacity(header.len()); - let mut in_quote = false; - let mut chars = header.chars().peekable(); - let mut strip_prefix = Some(strip_prefix); - - while let Some(char) = chars.next() { - if char == '"' { - in_quote = !in_quote; - } else if char == '\\' { - let Some(&next_char) = chars.peek() else { - break; - }; - chars.next(); - path.push(next_char); - } else if char.is_ascii_whitespace() && !in_quote { - break; - } else { - path.push(char); - } - - if let Some(prefix) = strip_prefix - && path == prefix - { - strip_prefix.take(); - path.clear(); - } - } - - Cow::Owned(path) -} - -fn eat_required_whitespace(header: &str) -> Option<&str> { - let trimmed = header.trim_ascii_start(); - - if trimmed.len() == header.len() { - None - } else { - Some(trimmed) - } -} - #[cfg(test)] mod tests { use super::*; @@ -862,387 +301,6 @@ mod tests { use settings::SettingsStore; use util::path; - #[test] - fn parse_lines_simple() { - let input = indoc! {" - diff --git a/text.txt b/text.txt - index 86c770d..a1fd855 100644 - --- a/file.txt - +++ b/file.txt - @@ -1,2 +1,3 @@ - context - -deleted - +inserted - garbage - - --- b/file.txt - +++ a/file.txt - "}; - - let lines = input.lines().map(DiffLine::parse).collect::>(); - - pretty_assertions::assert_eq!( - lines, - &[ - DiffLine::Garbage("diff --git a/text.txt b/text.txt"), - DiffLine::Garbage("index 86c770d..a1fd855 100644"), - DiffLine::OldPath { - path: "file.txt".into() - }, - DiffLine::NewPath { - path: "file.txt".into() - }, - DiffLine::HunkHeader(Some(HunkLocation { - start_line_old: 0, - count_old: 2, - start_line_new: 0, - count_new: 3 - })), - DiffLine::Context("context"), - DiffLine::Deletion("deleted"), - DiffLine::Addition("inserted"), - DiffLine::Garbage("garbage"), - DiffLine::Context(""), - DiffLine::OldPath { - path: "b/file.txt".into() - }, - DiffLine::NewPath { - path: "a/file.txt".into() - }, - ] - ); - } - - #[test] - fn file_header_extra_space() { - let options = ["--- file", "--- file", "---\tfile"]; - - for option in options { - pretty_assertions::assert_eq!( - DiffLine::parse(option), - DiffLine::OldPath { - path: "file".into() - }, - "{option}", - ); - } - } - - #[test] - fn hunk_header_extra_space() { - let options = [ - "@@ -1,2 +1,3 @@", - "@@ -1,2 +1,3 @@", - "@@\t-1,2\t+1,3\t@@", - "@@ -1,2 +1,3 @@", - "@@ -1,2 +1,3 @@", - "@@ -1,2 +1,3 @@", - "@@ -1,2 +1,3 @@ garbage", - ]; - - for option in options { - pretty_assertions::assert_eq!( - DiffLine::parse(option), - DiffLine::HunkHeader(Some(HunkLocation { - start_line_old: 0, - count_old: 2, - start_line_new: 0, - count_new: 3 - })), - "{option}", - ); - } - } - - #[test] - fn hunk_header_without_location() { - pretty_assertions::assert_eq!(DiffLine::parse("@@ ... @@"), DiffLine::HunkHeader(None)); - } - - #[test] - fn test_parse_path() { - assert_eq!(parse_header_path("a/", "foo.txt"), "foo.txt"); - assert_eq!( - parse_header_path("a/", "foo/bar/baz.txt"), - "foo/bar/baz.txt" - ); - assert_eq!(parse_header_path("a/", "a/foo.txt"), "foo.txt"); - assert_eq!( - parse_header_path("a/", "a/foo/bar/baz.txt"), - "foo/bar/baz.txt" - ); - - // Extra - assert_eq!( - parse_header_path("a/", "a/foo/bar/baz.txt 2025"), - "foo/bar/baz.txt" - ); - assert_eq!( - parse_header_path("a/", "a/foo/bar/baz.txt\t2025"), - "foo/bar/baz.txt" - ); - assert_eq!( - parse_header_path("a/", "a/foo/bar/baz.txt \""), - "foo/bar/baz.txt" - ); - - // Quoted - assert_eq!( - parse_header_path("a/", "a/foo/bar/\"baz quox.txt\""), - "foo/bar/baz quox.txt" - ); - assert_eq!( - parse_header_path("a/", "\"a/foo/bar/baz quox.txt\""), - "foo/bar/baz quox.txt" - ); - assert_eq!( - parse_header_path("a/", "\"foo/bar/baz quox.txt\""), - "foo/bar/baz quox.txt" - ); - assert_eq!(parse_header_path("a/", "\"whatever 🤷\""), "whatever 🤷"); - assert_eq!( - parse_header_path("a/", "\"foo/bar/baz quox.txt\" 2025"), - "foo/bar/baz quox.txt" - ); - // unescaped quotes are dropped - assert_eq!(parse_header_path("a/", "foo/\"bar\""), "foo/bar"); - - // Escaped - assert_eq!( - parse_header_path("a/", "\"foo/\\\"bar\\\"/baz.txt\""), - "foo/\"bar\"/baz.txt" - ); - assert_eq!( - parse_header_path("a/", "\"C:\\\\Projects\\\\My App\\\\old file.txt\""), - "C:\\Projects\\My App\\old file.txt" - ); - } - - #[test] - fn test_parse_diff_with_leading_and_trailing_garbage() { - let diff = indoc! {" - I need to make some changes. - - I'll change the following things: - - one - - two - - three - - ``` - --- a/file.txt - +++ b/file.txt - one - +AND - two - ``` - - Summary of what I did: - - one - - two - - three - - That's about it. - "}; - - let mut events = Vec::new(); - let mut parser = DiffParser::new(diff); - while let Some(event) = parser.next().unwrap() { - events.push(event); - } - - assert_eq!( - events, - &[ - DiffEvent::Hunk { - path: "file.txt".into(), - hunk: Hunk { - context: "one\ntwo\n".into(), - edits: vec![Edit { - range: 4..4, - text: "AND\n".into() - }], - start_line: None, - }, - status: FileStatus::Modified, - }, - DiffEvent::FileEnd { renamed_to: None } - ], - ) - } - - #[test] - fn test_no_newline_at_eof() { - let diff = indoc! {" - --- a/file.py - +++ b/file.py - @@ -55,7 +55,3 @@ class CustomDataset(Dataset): - torch.set_rng_state(state) - mask = self.transform(mask) - - - if self.mode == 'Training': - - return (img, mask, name) - - else: - - return (img, mask, name) - \\ No newline at end of file - "}; - - let mut events = Vec::new(); - let mut parser = DiffParser::new(diff); - while let Some(event) = parser.next().unwrap() { - events.push(event); - } - - assert_eq!( - events, - &[ - DiffEvent::Hunk { - path: "file.py".into(), - hunk: Hunk { - context: concat!( - " torch.set_rng_state(state)\n", - " mask = self.transform(mask)\n", - "\n", - " if self.mode == 'Training':\n", - " return (img, mask, name)\n", - " else:\n", - " return (img, mask, name)", - ) - .into(), - edits: vec![Edit { - range: 80..203, - text: "".into() - }], - start_line: Some(54), // @@ -55,7 -> line 54 (0-indexed) - }, - status: FileStatus::Modified, - }, - DiffEvent::FileEnd { renamed_to: None } - ], - ); - } - - #[test] - fn test_no_newline_at_eof_addition() { - let diff = indoc! {" - --- a/file.txt - +++ b/file.txt - @@ -1,2 +1,3 @@ - context - -deleted - +added line - \\ No newline at end of file - "}; - - let mut events = Vec::new(); - let mut parser = DiffParser::new(diff); - while let Some(event) = parser.next().unwrap() { - events.push(event); - } - - assert_eq!( - events, - &[ - DiffEvent::Hunk { - path: "file.txt".into(), - hunk: Hunk { - context: "context\ndeleted\n".into(), - edits: vec![Edit { - range: 8..16, - text: "added line".into() - }], - start_line: Some(0), // @@ -1,2 -> line 0 (0-indexed) - }, - status: FileStatus::Modified, - }, - DiffEvent::FileEnd { renamed_to: None } - ], - ); - } - - #[test] - fn test_double_no_newline_at_eof() { - // Two consecutive "no newline" markers - the second should be ignored - let diff = indoc! {" - --- a/file.txt - +++ b/file.txt - @@ -1,3 +1,3 @@ - line1 - -old - +new - line3 - \\ No newline at end of file - \\ No newline at end of file - "}; - - let mut events = Vec::new(); - let mut parser = DiffParser::new(diff); - while let Some(event) = parser.next().unwrap() { - events.push(event); - } - - assert_eq!( - events, - &[ - DiffEvent::Hunk { - path: "file.txt".into(), - hunk: Hunk { - context: "line1\nold\nline3".into(), // Only one newline removed - edits: vec![Edit { - range: 6..10, // "old\n" is 4 bytes - text: "new\n".into() - }], - start_line: Some(0), - }, - status: FileStatus::Modified, - }, - DiffEvent::FileEnd { renamed_to: None } - ], - ); - } - - #[test] - fn test_no_newline_after_context_not_addition() { - // "No newline" after context lines should remove newline from context, - // not from an earlier addition - let diff = indoc! {" - --- a/file.txt - +++ b/file.txt - @@ -1,4 +1,4 @@ - line1 - -old - +new - line3 - line4 - \\ No newline at end of file - "}; - - let mut events = Vec::new(); - let mut parser = DiffParser::new(diff); - while let Some(event) = parser.next().unwrap() { - events.push(event); - } - - assert_eq!( - events, - &[ - DiffEvent::Hunk { - path: "file.txt".into(), - hunk: Hunk { - // newline removed from line4 (context), not from "new" (addition) - context: "line1\nold\nline3\nline4".into(), - edits: vec![Edit { - range: 6..10, // "old\n" is 4 bytes - text: "new\n".into() // Still has newline - }], - start_line: Some(0), - }, - status: FileStatus::Modified, - }, - DiffEvent::FileEnd { renamed_to: None } - ], - ); - } - #[test] fn test_line_number_disambiguation() { // Test that line numbers from hunk headers are used to disambiguate @@ -1535,197 +593,6 @@ mod tests { assert_eq!(cursor_column, " let x = ".len()); } - #[test] - fn test_strip_diff_metadata() { - let diff_with_metadata = indoc! {r#" - diff --git a/file.txt b/file.txt - index 1234567..abcdefg 100644 - --- a/file.txt - +++ b/file.txt - @@ -1,3 +1,4 @@ - context line - -removed line - +added line - more context - "#}; - - let stripped = strip_diff_metadata(diff_with_metadata); - - assert_eq!( - stripped, - indoc! {r#" - --- a/file.txt - +++ b/file.txt - @@ -1,3 +1,4 @@ - context line - -removed line - +added line - more context - "#} - ); - } - - #[test] - fn test_apply_diff_to_string_no_trailing_newline() { - // Text without trailing newline; diff generated without - // `\ No newline at end of file` marker. - let text = "line1\nline2\nline3"; - let diff = indoc! {" - --- a/file.txt - +++ b/file.txt - @@ -1,3 +1,3 @@ - line1 - -line2 - +replaced - line3 - "}; - - let result = apply_diff_to_string(diff, text).unwrap(); - assert_eq!(result, "line1\nreplaced\nline3"); - } - - #[test] - fn test_apply_diff_to_string_trailing_newline_present() { - // When text has a trailing newline, exact matching still works and - // the fallback is never needed. - let text = "line1\nline2\nline3\n"; - let diff = indoc! {" - --- a/file.txt - +++ b/file.txt - @@ -1,3 +1,3 @@ - line1 - -line2 - +replaced - line3 - "}; - - let result = apply_diff_to_string(diff, text).unwrap(); - assert_eq!(result, "line1\nreplaced\nline3\n"); - } - - #[test] - fn test_apply_diff_to_string_deletion_at_end_no_trailing_newline() { - // Deletion of the last line when text has no trailing newline. - // The edit range must be clamped so it doesn't index past the - // end of the text. - let text = "line1\nline2\nline3"; - let diff = indoc! {" - --- a/file.txt - +++ b/file.txt - @@ -1,3 +1,2 @@ - line1 - line2 - -line3 - "}; - - let result = apply_diff_to_string(diff, text).unwrap(); - assert_eq!(result, "line1\nline2\n"); - } - - #[test] - fn test_apply_diff_to_string_replace_last_line_no_trailing_newline() { - // Replace the last line when text has no trailing newline. - let text = "aaa\nbbb\nccc"; - let diff = indoc! {" - --- a/file.txt - +++ b/file.txt - @@ -1,3 +1,3 @@ - aaa - bbb - -ccc - +ddd - "}; - - let result = apply_diff_to_string(diff, text).unwrap(); - assert_eq!(result, "aaa\nbbb\nddd"); - } - - #[test] - fn test_apply_diff_to_string_multibyte_no_trailing_newline() { - // Multi-byte UTF-8 characters near the end; ensures char boundary - // safety when the fallback clamps edit ranges. - let text = "hello\n세계"; - let diff = indoc! {" - --- a/file.txt - +++ b/file.txt - @@ -1,2 +1,2 @@ - hello - -세계 - +world - "}; - - let result = apply_diff_to_string(diff, text).unwrap(); - assert_eq!(result, "hello\nworld"); - } - - #[test] - fn test_find_context_candidates_no_false_positive_mid_text() { - // The stripped fallback must only match at the end of text, not in - // the middle where a real newline exists. - let text = "aaa\nbbb\nccc\n"; - let mut hunk = Hunk { - context: "bbb\n".into(), - edits: vec![], - start_line: None, - }; - - let candidates = find_context_candidates(text, &mut hunk); - // Exact match at offset 4 — the fallback is not used. - assert_eq!(candidates, vec![4]); - } - - #[test] - fn test_find_context_candidates_fallback_at_end() { - let text = "aaa\nbbb"; - let mut hunk = Hunk { - context: "bbb\n".into(), - edits: vec![], - start_line: None, - }; - - let candidates = find_context_candidates(text, &mut hunk); - assert_eq!(candidates, vec![4]); - // Context should be stripped. - assert_eq!(hunk.context, "bbb"); - } - - #[test] - fn test_find_context_candidates_no_fallback_mid_text() { - // "bbb" appears mid-text followed by a newline, so the exact - // match succeeds. Verify the stripped fallback doesn't produce a - // second, spurious candidate. - let text = "aaa\nbbb\nccc"; - let mut hunk = Hunk { - context: "bbb\nccc\n".into(), - edits: vec![], - start_line: None, - }; - - let candidates = find_context_candidates(text, &mut hunk); - // No exact match (text ends without newline after "ccc"), but the - // stripped context "bbb\nccc" matches at offset 4, which is the end. - assert_eq!(candidates, vec![4]); - assert_eq!(hunk.context, "bbb\nccc"); - } - - #[test] - fn test_find_context_candidates_clamps_edit_ranges() { - let text = "aaa\nbbb"; - let mut hunk = Hunk { - context: "aaa\nbbb\n".into(), - edits: vec![Edit { - range: 4..8, // "bbb\n" — end points at the trailing \n - text: "ccc\n".into(), - }], - start_line: None, - }; - - let candidates = find_context_candidates(text, &mut hunk); - assert_eq!(candidates, vec![0]); - // Edit range end should be clamped to 7 (new context length). - assert_eq!(hunk.edits[0].range, 4..7); - } - #[test] fn test_edits_for_diff_no_trailing_newline() { let content = "foo\nbar\nbaz"; diff --git a/crates/edit_prediction_cli/src/format_prompt.rs b/crates/edit_prediction_cli/src/format_prompt.rs index 2a1b49007bd19e721a6d95ebddda3758c86aaaef..24a6f1acd470fb8ee77e87d993079298f45b390c 100644 --- a/crates/edit_prediction_cli/src/format_prompt.rs +++ b/crates/edit_prediction_cli/src/format_prompt.rs @@ -6,11 +6,11 @@ use crate::{ retrieve_context::run_context_retrieval, }; use anyhow::{Context as _, Result, anyhow}; -use edit_prediction::udiff; use gpui::AsyncApp; use similar::DiffableStr; use std::ops::Range; use std::sync::Arc; +use zeta_prompt::udiff; use zeta_prompt::{ ZetaFormat, encode_patch_as_output_for_format, excerpt_range_for_format, format_zeta_prompt, multi_region, output_end_marker_for_format, resolve_cursor_region, diff --git a/crates/edit_prediction_cli/src/reversal_tracking.rs b/crates/edit_prediction_cli/src/reversal_tracking.rs index 60661cea04beae4aba4713ac86b51fab42c91979..34ddfd5f5ec0edca2b5de64a6f033a6463dcc133 100644 --- a/crates/edit_prediction_cli/src/reversal_tracking.rs +++ b/crates/edit_prediction_cli/src/reversal_tracking.rs @@ -2,8 +2,8 @@ use std::ops::Range; use std::path::Path; use std::sync::Arc; -use edit_prediction::udiff::apply_diff_to_string; use language::{char_diff, text_diff}; +use zeta_prompt::udiff::apply_diff_to_string; use zeta_prompt::ZetaPromptInput; @@ -653,9 +653,9 @@ pub fn compute_prediction_reversal_ratio( #[cfg(test)] mod tests { use super::*; - use edit_prediction::udiff::apply_diff_to_string; use indoc::indoc; use zeta_prompt::ExcerptRanges; + use zeta_prompt::udiff::apply_diff_to_string; fn make_test_prompt_inputs( content: &str, diff --git a/crates/edit_prediction_cli/src/score.rs b/crates/edit_prediction_cli/src/score.rs index cb1bd472c3e4268fe0e1037e331ed8cbd0b51cfb..1dace832d4998362610e860b386f4db49f965144 100644 --- a/crates/edit_prediction_cli/src/score.rs +++ b/crates/edit_prediction_cli/src/score.rs @@ -10,13 +10,13 @@ use crate::{ reversal_tracking, }; use anyhow::Context as _; -use edit_prediction::udiff::{apply_diff_to_string, apply_diff_to_string_with_hunk_offset}; use gpui::AsyncApp; use serde::Serialize; use std::fs::File; use std::io::BufWriter; use std::path::Path; use std::sync::Arc; +use zeta_prompt::udiff::{apply_diff_to_string, apply_diff_to_string_with_hunk_offset}; pub async fn run_scoring( example: &mut Example, diff --git a/crates/zeta_prompt/src/udiff.rs b/crates/zeta_prompt/src/udiff.rs new file mode 100644 index 0000000000000000000000000000000000000000..2658da5893ee923dc0f5798554276f5735abb51a --- /dev/null +++ b/crates/zeta_prompt/src/udiff.rs @@ -0,0 +1,1206 @@ +use std::{ + borrow::Cow, + fmt::{Display, Write}, + mem, + ops::Range, +}; + +use anyhow::{Context as _, Result, anyhow}; + +pub fn strip_diff_path_prefix<'a>(diff: &'a str, prefix: &str) -> Cow<'a, str> { + if prefix.is_empty() { + return Cow::Borrowed(diff); + } + + let prefix_with_slash = format!("{}/", prefix); + let mut needs_rewrite = false; + + for line in diff.lines() { + match DiffLine::parse(line) { + DiffLine::OldPath { path } | DiffLine::NewPath { path } => { + if path.starts_with(&prefix_with_slash) { + needs_rewrite = true; + break; + } + } + _ => {} + } + } + + if !needs_rewrite { + return Cow::Borrowed(diff); + } + + let mut result = String::with_capacity(diff.len()); + for line in diff.lines() { + match DiffLine::parse(line) { + DiffLine::OldPath { path } => { + let stripped = path + .strip_prefix(&prefix_with_slash) + .unwrap_or(path.as_ref()); + result.push_str(&format!("--- a/{}\n", stripped)); + } + DiffLine::NewPath { path } => { + let stripped = path + .strip_prefix(&prefix_with_slash) + .unwrap_or(path.as_ref()); + result.push_str(&format!("+++ b/{}\n", stripped)); + } + _ => { + result.push_str(line); + result.push('\n'); + } + } + } + + Cow::Owned(result) +} + +/// Strip unnecessary git metadata lines from a diff, keeping only the lines +/// needed for patch application: path headers (--- and +++), hunk headers (@@), +/// and content lines (+, -, space). +pub fn strip_diff_metadata(diff: &str) -> String { + let mut result = String::new(); + + for line in diff.lines() { + let dominated = DiffLine::parse(line); + match dominated { + // Keep path headers, hunk headers, and content lines + DiffLine::OldPath { .. } + | DiffLine::NewPath { .. } + | DiffLine::HunkHeader(_) + | DiffLine::Context(_) + | DiffLine::Deletion(_) + | DiffLine::Addition(_) + | DiffLine::NoNewlineAtEOF => { + result.push_str(line); + result.push('\n'); + } + // Skip garbage lines (diff --git, index, etc.) + DiffLine::Garbage(_) => {} + } + } + + result +} + +/// Marker used to encode cursor position in patch comment lines. +pub const CURSOR_POSITION_MARKER: &str = "[CURSOR_POSITION]"; + +/// Extract cursor offset from a patch and return `(clean_patch, cursor_offset)`. +/// +/// Cursor position is encoded as a comment line (starting with `#`) containing +/// `[CURSOR_POSITION]`. A `^` in the line indicates the cursor column; a `<` +/// indicates column 0. The offset is computed relative to addition (`+`) and +/// context (` `) lines accumulated so far in the hunk, which represent the +/// cursor position within the new text contributed by the hunk. +pub fn extract_cursor_from_patch(patch: &str) -> (String, Option) { + let mut clean_patch = String::new(); + let mut cursor_offset: Option = None; + let mut line_start_offset = 0usize; + let mut prev_line_start_offset = 0usize; + + for line in patch.lines() { + let diff_line = DiffLine::parse(line); + + match &diff_line { + DiffLine::Garbage(content) + if content.starts_with('#') && content.contains(CURSOR_POSITION_MARKER) => + { + let caret_column = if let Some(caret_pos) = content.find('^') { + caret_pos + } else if content.find('<').is_some() { + 0 + } else { + continue; + }; + let cursor_column = caret_column.saturating_sub('#'.len_utf8()); + cursor_offset = Some(prev_line_start_offset + cursor_column); + } + _ => { + if !clean_patch.is_empty() { + clean_patch.push('\n'); + } + clean_patch.push_str(line); + + match diff_line { + DiffLine::Addition(content) | DiffLine::Context(content) => { + prev_line_start_offset = line_start_offset; + line_start_offset += content.len() + 1; + } + _ => {} + } + } + } + } + + if patch.ends_with('\n') && !clean_patch.is_empty() { + clean_patch.push('\n'); + } + + (clean_patch, cursor_offset) +} + +/// Find all byte offsets where `hunk.context` occurs as a substring of `text`. +/// +/// If no exact matches are found and the context ends with `'\n'` but `text` +/// does not, retries without the trailing newline, accepting only a match at +/// the very end of `text`. When this fallback fires, the hunk's context is +/// trimmed and its edit ranges are clamped so that downstream code doesn't +/// index past the end of the matched region. This handles diffs that are +/// missing a `\ No newline at end of file` marker: the parser always appends +/// `'\n'` via `writeln!`, so the context can have a trailing newline that +/// doesn't exist in the source text. +pub fn find_context_candidates(text: &str, hunk: &mut Hunk) -> Vec { + let candidates: Vec = text + .match_indices(&hunk.context) + .map(|(offset, _)| offset) + .collect(); + + if !candidates.is_empty() { + return candidates; + } + + if hunk.context.ends_with('\n') && !hunk.context.is_empty() { + let old_len = hunk.context.len(); + hunk.context.pop(); + let new_len = hunk.context.len(); + + if !hunk.context.is_empty() { + let candidates: Vec = text + .match_indices(&hunk.context) + .filter(|(offset, _)| offset + new_len == text.len()) + .map(|(offset, _)| offset) + .collect(); + + if !candidates.is_empty() { + for edit in &mut hunk.edits { + let touched_phantom = edit.range.end > new_len; + edit.range.start = edit.range.start.min(new_len); + edit.range.end = edit.range.end.min(new_len); + if touched_phantom { + // The replacement text was also written with a + // trailing '\n' that corresponds to the phantom + // newline we just removed from the context. + if edit.text.ends_with('\n') { + edit.text.pop(); + } + } + } + return candidates; + } + + // Restore if fallback didn't help either. + hunk.context.push('\n'); + debug_assert_eq!(hunk.context.len(), old_len); + } else { + hunk.context.push('\n'); + } + } + + Vec::new() +} + +/// Given multiple candidate offsets where context matches, use line numbers to disambiguate. +/// Returns the offset that matches the expected line, or None if no match or no line number available. +pub fn disambiguate_by_line_number( + candidates: &[usize], + expected_line: Option, + offset_to_line: &dyn Fn(usize) -> u32, +) -> Option { + match candidates.len() { + 0 => None, + 1 => Some(candidates[0]), + _ => { + let expected = expected_line?; + candidates + .iter() + .copied() + .find(|&offset| offset_to_line(offset) == expected) + } + } +} + +pub fn apply_diff_to_string(diff_str: &str, text: &str) -> Result { + apply_diff_to_string_with_hunk_offset(diff_str, text).map(|(text, _)| text) +} + +/// Applies a diff to a string and returns the result along with the offset where +/// the first hunk's context matched in the original text. This offset can be used +/// to adjust cursor positions that are relative to the hunk's content. +pub fn apply_diff_to_string_with_hunk_offset( + diff_str: &str, + text: &str, +) -> Result<(String, Option)> { + let mut diff = DiffParser::new(diff_str); + + let mut text = text.to_string(); + let mut first_hunk_offset = None; + + while let Some(event) = diff.next().context("Failed to parse diff")? { + match event { + DiffEvent::Hunk { + mut hunk, + path: _, + status: _, + } => { + let candidates = find_context_candidates(&text, &mut hunk); + + let hunk_offset = + disambiguate_by_line_number(&candidates, hunk.start_line, &|offset| { + text[..offset].matches('\n').count() as u32 + }) + .ok_or_else(|| anyhow!("couldn't resolve hunk"))?; + + if first_hunk_offset.is_none() { + first_hunk_offset = Some(hunk_offset); + } + + for edit in hunk.edits.iter().rev() { + let range = (hunk_offset + edit.range.start)..(hunk_offset + edit.range.end); + text.replace_range(range, &edit.text); + } + } + DiffEvent::FileEnd { .. } => {} + } + } + + Ok((text, first_hunk_offset)) +} + +struct PatchFile<'a> { + old_path: Cow<'a, str>, + new_path: Cow<'a, str>, +} + +pub struct DiffParser<'a> { + current_file: Option>, + current_line: Option<(&'a str, DiffLine<'a>)>, + hunk: Hunk, + diff: std::str::Lines<'a>, + pending_start_line: Option, + processed_no_newline: bool, + last_diff_op: LastDiffOp, +} + +#[derive(Clone, Copy, Default)] +enum LastDiffOp { + #[default] + None, + Context, + Deletion, + Addition, +} + +#[derive(Debug, PartialEq)] +pub enum DiffEvent<'a> { + Hunk { + path: Cow<'a, str>, + hunk: Hunk, + status: FileStatus, + }, + FileEnd { + renamed_to: Option>, + }, +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum FileStatus { + Created, + Modified, + Deleted, +} + +#[derive(Debug, Default, PartialEq)] +pub struct Hunk { + pub context: String, + pub edits: Vec, + pub start_line: Option, +} + +impl Hunk { + pub fn is_empty(&self) -> bool { + self.context.is_empty() && self.edits.is_empty() + } +} + +#[derive(Debug, PartialEq)] +pub struct Edit { + pub range: Range, + pub text: String, +} + +impl<'a> DiffParser<'a> { + pub fn new(diff: &'a str) -> Self { + let mut diff = diff.lines(); + let current_line = diff.next().map(|line| (line, DiffLine::parse(line))); + DiffParser { + current_file: None, + hunk: Hunk::default(), + current_line, + diff, + pending_start_line: None, + processed_no_newline: false, + last_diff_op: LastDiffOp::None, + } + } + + pub fn next(&mut self) -> Result>> { + loop { + let (hunk_done, file_done) = match self.current_line.as_ref().map(|e| &e.1) { + Some(DiffLine::OldPath { .. }) | Some(DiffLine::Garbage(_)) | None => (true, true), + Some(DiffLine::HunkHeader(_)) => (true, false), + _ => (false, false), + }; + + if hunk_done { + if let Some(file) = &self.current_file + && !self.hunk.is_empty() + { + let status = if file.old_path == "/dev/null" { + FileStatus::Created + } else if file.new_path == "/dev/null" { + FileStatus::Deleted + } else { + FileStatus::Modified + }; + let path = if status == FileStatus::Created { + file.new_path.clone() + } else { + file.old_path.clone() + }; + let mut hunk = mem::take(&mut self.hunk); + hunk.start_line = self.pending_start_line.take(); + self.processed_no_newline = false; + self.last_diff_op = LastDiffOp::None; + return Ok(Some(DiffEvent::Hunk { path, hunk, status })); + } + } + + if file_done { + if let Some(PatchFile { old_path, new_path }) = self.current_file.take() { + return Ok(Some(DiffEvent::FileEnd { + renamed_to: if old_path != new_path && old_path != "/dev/null" { + Some(new_path) + } else { + None + }, + })); + } + } + + let Some((line, parsed_line)) = self.current_line.take() else { + break; + }; + + (|| { + match parsed_line { + DiffLine::OldPath { path } => { + self.current_file = Some(PatchFile { + old_path: path, + new_path: "".into(), + }); + } + DiffLine::NewPath { path } => { + if let Some(current_file) = &mut self.current_file { + current_file.new_path = path + } + } + DiffLine::HunkHeader(location) => { + if let Some(loc) = location { + self.pending_start_line = Some(loc.start_line_old); + } + } + DiffLine::Context(ctx) => { + if self.current_file.is_some() { + writeln!(&mut self.hunk.context, "{ctx}")?; + self.last_diff_op = LastDiffOp::Context; + } + } + DiffLine::Deletion(del) => { + if self.current_file.is_some() { + let range = self.hunk.context.len() + ..self.hunk.context.len() + del.len() + '\n'.len_utf8(); + if let Some(last_edit) = self.hunk.edits.last_mut() + && last_edit.range.end == range.start + { + last_edit.range.end = range.end; + } else { + self.hunk.edits.push(Edit { + range, + text: String::new(), + }); + } + writeln!(&mut self.hunk.context, "{del}")?; + self.last_diff_op = LastDiffOp::Deletion; + } + } + DiffLine::Addition(add) => { + if self.current_file.is_some() { + let range = self.hunk.context.len()..self.hunk.context.len(); + if let Some(last_edit) = self.hunk.edits.last_mut() + && last_edit.range.end == range.start + { + writeln!(&mut last_edit.text, "{add}").unwrap(); + } else { + self.hunk.edits.push(Edit { + range, + text: format!("{add}\n"), + }); + } + self.last_diff_op = LastDiffOp::Addition; + } + } + DiffLine::NoNewlineAtEOF => { + if !self.processed_no_newline { + self.processed_no_newline = true; + match self.last_diff_op { + LastDiffOp::Addition => { + // Remove trailing newline from the last addition + if let Some(last_edit) = self.hunk.edits.last_mut() { + last_edit.text.pop(); + } + } + LastDiffOp::Deletion => { + // Remove trailing newline from context (which includes the deletion) + self.hunk.context.pop(); + if let Some(last_edit) = self.hunk.edits.last_mut() { + last_edit.range.end -= 1; + } + } + LastDiffOp::Context | LastDiffOp::None => { + // Remove trailing newline from context + self.hunk.context.pop(); + } + } + } + } + DiffLine::Garbage(_) => {} + } + + anyhow::Ok(()) + })() + .with_context(|| format!("on line:\n\n```\n{}```", line))?; + + self.current_line = self.diff.next().map(|line| (line, DiffLine::parse(line))); + } + + anyhow::Ok(None) + } +} + +#[derive(Debug, PartialEq)] +pub enum DiffLine<'a> { + OldPath { path: Cow<'a, str> }, + NewPath { path: Cow<'a, str> }, + HunkHeader(Option), + Context(&'a str), + Deletion(&'a str), + Addition(&'a str), + NoNewlineAtEOF, + Garbage(&'a str), +} + +#[derive(Debug, PartialEq)] +pub struct HunkLocation { + pub start_line_old: u32, + pub count_old: u32, + pub start_line_new: u32, + pub count_new: u32, +} + +impl<'a> DiffLine<'a> { + pub fn parse(line: &'a str) -> Self { + Self::try_parse(line).unwrap_or(Self::Garbage(line)) + } + + fn try_parse(line: &'a str) -> Option { + if line.starts_with("\\ No newline") { + return Some(Self::NoNewlineAtEOF); + } + if let Some(header) = line.strip_prefix("---").and_then(eat_required_whitespace) { + let path = parse_header_path("a/", header); + Some(Self::OldPath { path }) + } else if let Some(header) = line.strip_prefix("+++").and_then(eat_required_whitespace) { + Some(Self::NewPath { + path: parse_header_path("b/", header), + }) + } else if let Some(header) = line.strip_prefix("@@").and_then(eat_required_whitespace) { + if header.starts_with("...") { + return Some(Self::HunkHeader(None)); + } + + let mut tokens = header.split_whitespace(); + let old_range = tokens.next()?.strip_prefix('-')?; + let new_range = tokens.next()?.strip_prefix('+')?; + + let (start_line_old, count_old) = old_range.split_once(',').unwrap_or((old_range, "1")); + let (start_line_new, count_new) = new_range.split_once(',').unwrap_or((new_range, "1")); + + Some(Self::HunkHeader(Some(HunkLocation { + start_line_old: start_line_old.parse::().ok()?.saturating_sub(1), + count_old: count_old.parse().ok()?, + start_line_new: start_line_new.parse::().ok()?.saturating_sub(1), + count_new: count_new.parse().ok()?, + }))) + } else if let Some(deleted_header) = line.strip_prefix("-") { + Some(Self::Deletion(deleted_header)) + } else if line.is_empty() { + Some(Self::Context("")) + } else if let Some(context) = line.strip_prefix(" ") { + Some(Self::Context(context)) + } else { + Some(Self::Addition(line.strip_prefix("+")?)) + } + } +} + +impl<'a> Display for DiffLine<'a> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + DiffLine::OldPath { path } => write!(f, "--- {path}"), + DiffLine::NewPath { path } => write!(f, "+++ {path}"), + DiffLine::HunkHeader(Some(hunk_location)) => { + write!( + f, + "@@ -{},{} +{},{} @@", + hunk_location.start_line_old + 1, + hunk_location.count_old, + hunk_location.start_line_new + 1, + hunk_location.count_new + ) + } + DiffLine::HunkHeader(None) => write!(f, "@@ ... @@"), + DiffLine::Context(content) => write!(f, " {content}"), + DiffLine::Deletion(content) => write!(f, "-{content}"), + DiffLine::Addition(content) => write!(f, "+{content}"), + DiffLine::NoNewlineAtEOF => write!(f, "\\ No newline at end of file"), + DiffLine::Garbage(line) => write!(f, "{line}"), + } + } +} + +fn parse_header_path<'a>(strip_prefix: &'static str, header: &'a str) -> Cow<'a, str> { + if !header.contains(['"', '\\']) { + let path = header.split_ascii_whitespace().next().unwrap_or(header); + return Cow::Borrowed(path.strip_prefix(strip_prefix).unwrap_or(path)); + } + + let mut path = String::with_capacity(header.len()); + let mut in_quote = false; + let mut chars = header.chars().peekable(); + let mut strip_prefix = Some(strip_prefix); + + while let Some(char) = chars.next() { + if char == '"' { + in_quote = !in_quote; + } else if char == '\\' { + let Some(&next_char) = chars.peek() else { + break; + }; + chars.next(); + path.push(next_char); + } else if char.is_ascii_whitespace() && !in_quote { + break; + } else { + path.push(char); + } + + if let Some(prefix) = strip_prefix + && path == prefix + { + strip_prefix.take(); + path.clear(); + } + } + + Cow::Owned(path) +} + +fn eat_required_whitespace(header: &str) -> Option<&str> { + let trimmed = header.trim_ascii_start(); + + if trimmed.len() == header.len() { + None + } else { + Some(trimmed) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use indoc::indoc; + + #[test] + fn parse_lines_simple() { + let input = indoc! {" + diff --git a/text.txt b/text.txt + index 86c770d..a1fd855 100644 + --- a/file.txt + +++ b/file.txt + @@ -1,2 +1,3 @@ + context + -deleted + +inserted + garbage + + --- b/file.txt + +++ a/file.txt + "}; + + let lines = input.lines().map(DiffLine::parse).collect::>(); + + assert_eq!( + lines, + &[ + DiffLine::Garbage("diff --git a/text.txt b/text.txt"), + DiffLine::Garbage("index 86c770d..a1fd855 100644"), + DiffLine::OldPath { + path: "file.txt".into() + }, + DiffLine::NewPath { + path: "file.txt".into() + }, + DiffLine::HunkHeader(Some(HunkLocation { + start_line_old: 0, + count_old: 2, + start_line_new: 0, + count_new: 3 + })), + DiffLine::Context("context"), + DiffLine::Deletion("deleted"), + DiffLine::Addition("inserted"), + DiffLine::Garbage("garbage"), + DiffLine::Context(""), + DiffLine::OldPath { + path: "b/file.txt".into() + }, + DiffLine::NewPath { + path: "a/file.txt".into() + }, + ] + ); + } + + #[test] + fn file_header_extra_space() { + let options = ["--- file", "--- file", "---\tfile"]; + + for option in options { + assert_eq!( + DiffLine::parse(option), + DiffLine::OldPath { + path: "file".into() + }, + "{option}", + ); + } + } + + #[test] + fn hunk_header_extra_space() { + let options = [ + "@@ -1,2 +1,3 @@", + "@@ -1,2 +1,3 @@", + "@@\t-1,2\t+1,3\t@@", + "@@ -1,2 +1,3 @@", + "@@ -1,2 +1,3 @@", + "@@ -1,2 +1,3 @@", + "@@ -1,2 +1,3 @@ garbage", + ]; + + for option in options { + assert_eq!( + DiffLine::parse(option), + DiffLine::HunkHeader(Some(HunkLocation { + start_line_old: 0, + count_old: 2, + start_line_new: 0, + count_new: 3 + })), + "{option}", + ); + } + } + + #[test] + fn hunk_header_without_location() { + assert_eq!(DiffLine::parse("@@ ... @@"), DiffLine::HunkHeader(None)); + } + + #[test] + fn test_parse_path() { + assert_eq!(parse_header_path("a/", "foo.txt"), "foo.txt"); + assert_eq!( + parse_header_path("a/", "foo/bar/baz.txt"), + "foo/bar/baz.txt" + ); + assert_eq!(parse_header_path("a/", "a/foo.txt"), "foo.txt"); + assert_eq!( + parse_header_path("a/", "a/foo/bar/baz.txt"), + "foo/bar/baz.txt" + ); + + // Extra + assert_eq!( + parse_header_path("a/", "a/foo/bar/baz.txt 2025"), + "foo/bar/baz.txt" + ); + assert_eq!( + parse_header_path("a/", "a/foo/bar/baz.txt\t2025"), + "foo/bar/baz.txt" + ); + assert_eq!( + parse_header_path("a/", "a/foo/bar/baz.txt \""), + "foo/bar/baz.txt" + ); + + // Quoted + assert_eq!( + parse_header_path("a/", "a/foo/bar/\"baz quox.txt\""), + "foo/bar/baz quox.txt" + ); + assert_eq!( + parse_header_path("a/", "\"a/foo/bar/baz quox.txt\""), + "foo/bar/baz quox.txt" + ); + assert_eq!( + parse_header_path("a/", "\"foo/bar/baz quox.txt\""), + "foo/bar/baz quox.txt" + ); + assert_eq!(parse_header_path("a/", "\"whatever 🤷\""), "whatever 🤷"); + assert_eq!( + parse_header_path("a/", "\"foo/bar/baz quox.txt\" 2025"), + "foo/bar/baz quox.txt" + ); + // unescaped quotes are dropped + assert_eq!(parse_header_path("a/", "foo/\"bar\""), "foo/bar"); + + // Escaped + assert_eq!( + parse_header_path("a/", "\"foo/\\\"bar\\\"/baz.txt\""), + "foo/\"bar\"/baz.txt" + ); + assert_eq!( + parse_header_path("a/", "\"C:\\\\Projects\\\\My App\\\\old file.txt\""), + "C:\\Projects\\My App\\old file.txt" + ); + } + + #[test] + fn test_parse_diff_with_leading_and_trailing_garbage() { + let diff = indoc! {" + I need to make some changes. + + I'll change the following things: + - one + - two + - three + + ``` + --- a/file.txt + +++ b/file.txt + one + +AND + two + ``` + + Summary of what I did: + - one + - two + - three + + That's about it. + "}; + + let mut events = Vec::new(); + let mut parser = DiffParser::new(diff); + while let Some(event) = parser.next().unwrap() { + events.push(event); + } + + assert_eq!( + events, + &[ + DiffEvent::Hunk { + path: "file.txt".into(), + hunk: Hunk { + context: "one\ntwo\n".into(), + edits: vec![Edit { + range: 4..4, + text: "AND\n".into() + }], + start_line: None, + }, + status: FileStatus::Modified, + }, + DiffEvent::FileEnd { renamed_to: None } + ], + ) + } + + #[test] + fn test_no_newline_at_eof() { + let diff = indoc! {" + --- a/file.py + +++ b/file.py + @@ -55,7 +55,3 @@ class CustomDataset(Dataset): + torch.set_rng_state(state) + mask = self.transform(mask) + + - if self.mode == 'Training': + - return (img, mask, name) + - else: + - return (img, mask, name) + \\ No newline at end of file + "}; + + let mut events = Vec::new(); + let mut parser = DiffParser::new(diff); + while let Some(event) = parser.next().unwrap() { + events.push(event); + } + + assert_eq!( + events, + &[ + DiffEvent::Hunk { + path: "file.py".into(), + hunk: Hunk { + context: concat!( + " torch.set_rng_state(state)\n", + " mask = self.transform(mask)\n", + "\n", + " if self.mode == 'Training':\n", + " return (img, mask, name)\n", + " else:\n", + " return (img, mask, name)", + ) + .into(), + edits: vec![Edit { + range: 80..203, + text: "".into() + }], + start_line: Some(54), // @@ -55,7 -> line 54 (0-indexed) + }, + status: FileStatus::Modified, + }, + DiffEvent::FileEnd { renamed_to: None } + ], + ); + } + + #[test] + fn test_no_newline_at_eof_addition() { + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,2 +1,3 @@ + context + -deleted + +added line + \\ No newline at end of file + "}; + + let mut events = Vec::new(); + let mut parser = DiffParser::new(diff); + while let Some(event) = parser.next().unwrap() { + events.push(event); + } + + assert_eq!( + events, + &[ + DiffEvent::Hunk { + path: "file.txt".into(), + hunk: Hunk { + context: "context\ndeleted\n".into(), + edits: vec![Edit { + range: 8..16, + text: "added line".into() + }], + start_line: Some(0), // @@ -1,2 -> line 0 (0-indexed) + }, + status: FileStatus::Modified, + }, + DiffEvent::FileEnd { renamed_to: None } + ], + ); + } + + #[test] + fn test_double_no_newline_at_eof() { + // Two consecutive "no newline" markers - the second should be ignored + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + line1 + -old + +new + line3 + \\ No newline at end of file + \\ No newline at end of file + "}; + + let mut events = Vec::new(); + let mut parser = DiffParser::new(diff); + while let Some(event) = parser.next().unwrap() { + events.push(event); + } + + assert_eq!( + events, + &[ + DiffEvent::Hunk { + path: "file.txt".into(), + hunk: Hunk { + context: "line1\nold\nline3".into(), // Only one newline removed + edits: vec![Edit { + range: 6..10, // "old\n" is 4 bytes + text: "new\n".into() + }], + start_line: Some(0), + }, + status: FileStatus::Modified, + }, + DiffEvent::FileEnd { renamed_to: None } + ], + ); + } + + #[test] + fn test_no_newline_after_context_not_addition() { + // "No newline" after context lines should remove newline from context, + // not from an earlier addition + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,4 +1,4 @@ + line1 + -old + +new + line3 + line4 + \\ No newline at end of file + "}; + + let mut events = Vec::new(); + let mut parser = DiffParser::new(diff); + while let Some(event) = parser.next().unwrap() { + events.push(event); + } + + assert_eq!( + events, + &[ + DiffEvent::Hunk { + path: "file.txt".into(), + hunk: Hunk { + // newline removed from line4 (context), not from "new" (addition) + context: "line1\nold\nline3\nline4".into(), + edits: vec![Edit { + range: 6..10, // "old\n" is 4 bytes + text: "new\n".into() // Still has newline + }], + start_line: Some(0), + }, + status: FileStatus::Modified, + }, + DiffEvent::FileEnd { renamed_to: None } + ], + ); + } + + #[test] + fn test_strip_diff_metadata() { + let diff_with_metadata = indoc! {r#" + diff --git a/file.txt b/file.txt + index 1234567..abcdefg 100644 + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,4 @@ + context line + -removed line + +added line + more context + "#}; + + let stripped = strip_diff_metadata(diff_with_metadata); + + assert_eq!( + stripped, + indoc! {r#" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,4 @@ + context line + -removed line + +added line + more context + "#} + ); + } + + #[test] + fn test_apply_diff_to_string_no_trailing_newline() { + // Text without trailing newline; diff generated without + // `\ No newline at end of file` marker. + let text = "line1\nline2\nline3"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + line1 + -line2 + +replaced + line3 + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "line1\nreplaced\nline3"); + } + + #[test] + fn test_apply_diff_to_string_trailing_newline_present() { + // When text has a trailing newline, exact matching still works and + // the fallback is never needed. + let text = "line1\nline2\nline3\n"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + line1 + -line2 + +replaced + line3 + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "line1\nreplaced\nline3\n"); + } + + #[test] + fn test_apply_diff_to_string_deletion_at_end_no_trailing_newline() { + // Deletion of the last line when text has no trailing newline. + // The edit range must be clamped so it doesn't index past the + // end of the text. + let text = "line1\nline2\nline3"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,2 @@ + line1 + line2 + -line3 + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "line1\nline2\n"); + } + + #[test] + fn test_apply_diff_to_string_replace_last_line_no_trailing_newline() { + // Replace the last line when text has no trailing newline. + let text = "aaa\nbbb\nccc"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + aaa + bbb + -ccc + +ddd + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "aaa\nbbb\nddd"); + } + + #[test] + fn test_apply_diff_to_string_multibyte_no_trailing_newline() { + // Multi-byte UTF-8 characters near the end; ensures char boundary + // safety when the fallback clamps edit ranges. + let text = "hello\n세계"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,2 +1,2 @@ + hello + -세계 + +world + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "hello\nworld"); + } + + #[test] + fn test_find_context_candidates_no_false_positive_mid_text() { + // The stripped fallback must only match at the end of text, not in + // the middle where a real newline exists. + let text = "aaa\nbbb\nccc\n"; + let mut hunk = Hunk { + context: "bbb\n".into(), + edits: vec![], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + // Exact match at offset 4 — the fallback is not used. + assert_eq!(candidates, vec![4]); + } + + #[test] + fn test_find_context_candidates_fallback_at_end() { + let text = "aaa\nbbb"; + let mut hunk = Hunk { + context: "bbb\n".into(), + edits: vec![], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + assert_eq!(candidates, vec![4]); + // Context should be stripped. + assert_eq!(hunk.context, "bbb"); + } + + #[test] + fn test_find_context_candidates_no_fallback_mid_text() { + // "bbb" appears mid-text followed by a newline, so the exact + // match succeeds. Verify the stripped fallback doesn't produce a + // second, spurious candidate. + let text = "aaa\nbbb\nccc"; + let mut hunk = Hunk { + context: "bbb\nccc\n".into(), + edits: vec![], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + // No exact match (text ends without newline after "ccc"), but the + // stripped context "bbb\nccc" matches at offset 4, which is the end. + assert_eq!(candidates, vec![4]); + assert_eq!(hunk.context, "bbb\nccc"); + } + + #[test] + fn test_find_context_candidates_clamps_edit_ranges() { + let text = "aaa\nbbb"; + let mut hunk = Hunk { + context: "aaa\nbbb\n".into(), + edits: vec![Edit { + range: 4..8, // "bbb\n" — end points at the trailing \n + text: "ccc\n".into(), + }], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + assert_eq!(candidates, vec![0]); + // Edit range end should be clamped to 7 (new context length). + assert_eq!(hunk.edits[0].range, 4..7); + } +} diff --git a/crates/zeta_prompt/src/zeta_prompt.rs b/crates/zeta_prompt/src/zeta_prompt.rs index e3aced7ed81d8bf3835a3e711e472651764a314e..0d72d6cd7a46782aa4b572a4ef564d5fe3dec417 100644 --- a/crates/zeta_prompt/src/zeta_prompt.rs +++ b/crates/zeta_prompt/src/zeta_prompt.rs @@ -1,5 +1,6 @@ pub mod excerpt_ranges; pub mod multi_region; +pub mod udiff; use anyhow::{Result, anyhow}; use serde::{Deserialize, Serialize}; @@ -819,6 +820,113 @@ pub fn encode_patch_as_output_for_format( } } +/// Given a `ZetaPromptInput`, a format, and a patch (with cursor already +/// extracted), produce the expected model output string for training. +pub fn format_expected_output( + input: &ZetaPromptInput, + format: ZetaFormat, + patch: &str, + cursor_offset: Option, +) -> Result { + let (context, editable_range, _, _) = resolve_cursor_region(input, format); + let mut old_editable = context[editable_range].to_string(); + if !old_editable.is_empty() && !old_editable.ends_with('\n') { + old_editable.push('\n'); + } + + // Formats with their own output encoding (hashline, variable-edit, + // multi-region empty patches) are handled here. + if let Some(output) = + encode_patch_as_output_for_format(format, &old_editable, patch, cursor_offset)? + { + return Ok(output); + } + + let empty_patch = patch.lines().count() <= 3; + + match format { + // Multi-region formats: non-empty patches need diff application + // then marker-span encoding. + ZetaFormat::V0316SeedMultiRegions => { + let (new_editable, first_hunk_offset) = + udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)?; + let cursor_in_new = cursor_in_new_text(cursor_offset, first_hunk_offset, &new_editable); + multi_region::encode_from_old_and_new_v0316( + &old_editable, + &new_editable, + cursor_in_new, + CURSOR_MARKER, + multi_region::V0316_END_MARKER, + ) + } + ZetaFormat::V0318SeedMultiRegions => { + let (new_editable, first_hunk_offset) = + udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)?; + let cursor_in_new = cursor_in_new_text(cursor_offset, first_hunk_offset, &new_editable); + multi_region::encode_from_old_and_new_v0318( + &old_editable, + &new_editable, + cursor_in_new, + CURSOR_MARKER, + multi_region::V0318_END_MARKER, + ) + } + ZetaFormat::V0317SeedMultiRegions => { + let (new_editable, first_hunk_offset) = + udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)?; + let cursor_in_new = cursor_in_new_text(cursor_offset, first_hunk_offset, &new_editable); + multi_region::encode_from_old_and_new_v0317( + &old_editable, + &new_editable, + cursor_in_new, + CURSOR_MARKER, + multi_region::V0317_END_MARKER, + ) + } + // V0131-style formats and fallback: produce new editable text with + // cursor marker inserted, followed by the end marker. + _ => { + let (mut result, first_hunk_offset) = if empty_patch { + (old_editable.clone(), None) + } else { + udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)? + }; + + if let Some(cursor) = cursor_offset { + let hunk_start = if !empty_patch { + first_hunk_offset.unwrap_or(0) + } else { + 0 + }; + let offset = (hunk_start + cursor).min(result.len()); + result.insert_str(offset, CURSOR_MARKER); + } + + if !result.is_empty() && !result.ends_with('\n') { + result.push('\n'); + } + + if let Some(end_marker) = output_end_marker_for_format(format) { + result.push_str(end_marker); + } + + Ok(result) + } + } +} + +/// Compute the cursor position within the new text after diff application. +fn cursor_in_new_text( + cursor_offset: Option, + first_hunk_offset: Option, + new_text: &str, +) -> Option { + cursor_offset.map(|cursor| { + let hunk_start = first_hunk_offset.unwrap_or(0); + (hunk_start + cursor).min(new_text.len()) + }) +} + pub struct ParsedOutput { /// Text that should replace the editable region pub new_editable_region: String, From d3435b1444d3eb0381ce2d724fcf3eccd39bf83f Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Thu, 2 Apr 2026 10:30:40 -0400 Subject: [PATCH 08/63] Update links to troubleshooting images (#52851) Now that I've started migrating blog content into the `zeddotdev-images` bucket, I've added a bit of structure. `/blog` `/docs` This PR just updates the links so we can stay organized going forward. Shouldn't be seeing much thrash like this after this PR. Release Notes: - N/A --- docs/src/troubleshooting.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/troubleshooting.md b/docs/src/troubleshooting.md index a852ce779cdb0b719a56e3b12d68ee9b2baab6b7..0ec95cd55e0d127e82430670de9290ec793deb5d 100644 --- a/docs/src/troubleshooting.md +++ b/docs/src/troubleshooting.md @@ -45,13 +45,13 @@ Xcode Instruments (which comes bundled with your [Xcode](https://apps.apple.com/ 1. With Zed running, open Instruments 1. Select `Time Profiler` as the profiling template - ![Instruments template picker with Time Profiler selected](https://images.zed.dev/troubleshooting/instruments-template-picker.webp) + ![Instruments template picker with Time Profiler selected](https://images.zed.dev/docs/troubleshooting/instruments-template-picker.webp) 1. In the `Time Profiler` configuration, set the target to the running Zed process 1. Start recording - ![Time Profiler configuration showing the target dropdown and record button](https://images.zed.dev/troubleshooting/instruments-target-and-record.webp) + ![Time Profiler configuration showing the target dropdown and record button](https://images.zed.dev/docs/troubleshooting/instruments-target-and-record.webp) 1. Perform the action in Zed that causes performance issues 1. Stop recording - ![A completed Time Profiler recording in Instruments](https://images.zed.dev/troubleshooting/instruments-recording.webp) + ![A completed Time Profiler recording in Instruments](https://images.zed.dev/docs/troubleshooting/instruments-recording.webp) 1. Save the trace file 1. Compress the trace file into a zip archive 1. File a [GitHub issue](https://github.com/zed-industries/zed/issues/new/choose) with the trace zip attached From 73cd7ec6248293c6acd8ed6e5fa1cc24b9ef37cf Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Thu, 2 Apr 2026 10:45:29 -0400 Subject: [PATCH 09/63] git_graph: Make the graph canvas resizable (#52953) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Summary This PR integrates the git graph canvas with the `Table` component’s `RedistributableColumnsState`, making the graph column resizable while preserving the table’s existing resize behavior. In particular, column resizing continues to use the same cascading redistribution behavior as the table. This is also the last PR needed to remove the feature flag on the git graph! ### Table API changes I pulled the redistributable column logic out of `Table` into reusable UI helpers so layouts outside of `Table` can participate in the same column resizing behavior. This adds a shared `RedistributableColumnsState` API, along with helpers for binding drag/drop behavior, rendering resize handles, and constructing header resize metadata. I also added `ColumnWidthConfig::explicit` and `TableRenderContext::for_column_widths` so callers can render table like headers and content with externally managed column widths. The reason for this change is that the git graph now renders a custom split layout: a graph canvas on the left and table content on the right. By reusing the same column state and resize machinery, the graph column can resize together with the table columns while preserving the existing table behavior, including cascading column redistribution and double click reset to default sizing. I also adjusted the resize handle interaction styling so the divider stays in its hovered/highlighted state while a drag is active, which makes the drag target feel more stable and visually consistent during resizing. ### Preview https://github.com/user-attachments/assets/347eed71-0cc1-4db4-9dee-a86ee5ab6f91 Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A or Added/Fixed/Improved ... --- crates/git_graph/src/git_graph.rs | 402 ++++++++----- crates/ui/src/components.rs | 2 + crates/ui/src/components/data_table.rs | 532 ++---------------- crates/ui/src/components/data_table/tests.rs | 3 +- .../src/components/redistributable_columns.rs | 485 ++++++++++++++++ 5 files changed, 824 insertions(+), 600 deletions(-) create mode 100644 crates/ui/src/components/redistributable_columns.rs diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index d473fbbec618c6e7b309ab2ff9dc9eb5787ddc43..bb1566aa29eeae016d31ac549434e7b92d50eb4d 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -42,8 +42,10 @@ use theme_settings::ThemeSettings; use time::{OffsetDateTime, UtcOffset, format_description::BorrowedFormatItem}; use ui::{ ButtonLike, Chip, ColumnWidthConfig, CommonAnimationExt as _, ContextMenu, DiffStat, Divider, - HighlightedLabel, RedistributableColumnsState, ScrollableHandle, Table, TableInteractionState, - TableResizeBehavior, Tooltip, WithScrollbar, prelude::*, + HeaderResizeInfo, HighlightedLabel, RedistributableColumnsState, ScrollableHandle, Table, + TableInteractionState, TableRenderContext, TableResizeBehavior, Tooltip, WithScrollbar, + bind_redistributable_columns, prelude::*, render_redistributable_columns_resize_handles, + render_table_header, table_row::TableRow, }; use workspace::{ Workspace, @@ -901,9 +903,8 @@ pub struct GitGraph { context_menu: Option<(Entity, Point, Subscription)>, row_height: Pixels, table_interaction_state: Entity, - table_column_widths: Entity, + column_widths: Entity, horizontal_scroll_offset: Pixels, - graph_viewport_width: Pixels, selected_entry_idx: Option, hovered_entry_idx: Option, graph_canvas_bounds: Rc>>>, @@ -933,8 +934,60 @@ impl GitGraph { font_size + px(12.0) } - fn graph_content_width(&self) -> Pixels { - (LANE_WIDTH * self.graph_data.max_lanes.min(8) as f32) + LEFT_PADDING * 2.0 + fn graph_canvas_content_width(&self) -> Pixels { + (LANE_WIDTH * self.graph_data.max_lanes.max(6) as f32) + LEFT_PADDING * 2.0 + } + + fn preview_column_fractions(&self, window: &Window, cx: &App) -> [f32; 5] { + let fractions = self + .column_widths + .read(cx) + .preview_fractions(window.rem_size()); + [ + fractions[0], + fractions[1], + fractions[2], + fractions[3], + fractions[4], + ] + } + + fn table_column_width_config(&self, window: &Window, cx: &App) -> ColumnWidthConfig { + let [_, description, date, author, commit] = self.preview_column_fractions(window, cx); + let table_total = description + date + author + commit; + + let widths = if table_total > 0.0 { + vec![ + DefiniteLength::Fraction(description / table_total), + DefiniteLength::Fraction(date / table_total), + DefiniteLength::Fraction(author / table_total), + DefiniteLength::Fraction(commit / table_total), + ] + } else { + vec![ + DefiniteLength::Fraction(0.25), + DefiniteLength::Fraction(0.25), + DefiniteLength::Fraction(0.25), + DefiniteLength::Fraction(0.25), + ] + }; + + ColumnWidthConfig::explicit(widths) + } + + fn graph_viewport_width(&self, window: &Window, cx: &App) -> Pixels { + self.column_widths + .read(cx) + .preview_column_width(0, window) + .unwrap_or_else(|| self.graph_canvas_content_width()) + } + + fn clamp_horizontal_scroll_offset(&mut self, graph_viewport_width: Pixels) { + let max_horizontal_scroll = + (self.graph_canvas_content_width() - graph_viewport_width).max(px(0.)); + self.horizontal_scroll_offset = self + .horizontal_scroll_offset + .clamp(px(0.), max_horizontal_scroll); } pub fn new( @@ -972,20 +1025,22 @@ impl GitGraph { }); let table_interaction_state = cx.new(|cx| TableInteractionState::new(cx)); - let table_column_widths = cx.new(|_cx| { + let column_widths = cx.new(|_cx| { RedistributableColumnsState::new( - 4, + 5, vec![ - DefiniteLength::Fraction(0.72), - DefiniteLength::Fraction(0.12), - DefiniteLength::Fraction(0.10), - DefiniteLength::Fraction(0.06), + DefiniteLength::Fraction(0.14), + DefiniteLength::Fraction(0.6192), + DefiniteLength::Fraction(0.1032), + DefiniteLength::Fraction(0.086), + DefiniteLength::Fraction(0.0516), ], vec![ TableResizeBehavior::Resizable, TableResizeBehavior::Resizable, TableResizeBehavior::Resizable, TableResizeBehavior::Resizable, + TableResizeBehavior::Resizable, ], ) }); @@ -1020,9 +1075,8 @@ impl GitGraph { context_menu: None, row_height, table_interaction_state, - table_column_widths, + column_widths, horizontal_scroll_offset: px(0.), - graph_viewport_width: px(88.), selected_entry_idx: None, hovered_entry_idx: None, graph_canvas_bounds: Rc::new(Cell::new(None)), @@ -2089,8 +2143,12 @@ impl GitGraph { let vertical_scroll_offset = scroll_offset_y - (first_visible_row as f32 * row_height); let horizontal_scroll_offset = self.horizontal_scroll_offset; - let max_lanes = self.graph_data.max_lanes.max(6); - let graph_width = LANE_WIDTH * max_lanes as f32 + LEFT_PADDING * 2.0; + let graph_viewport_width = self.graph_viewport_width(window, cx); + let graph_width = if self.graph_canvas_content_width() > graph_viewport_width { + self.graph_canvas_content_width() + } else { + graph_viewport_width + }; let last_visible_row = first_visible_row + (viewport_height / row_height).ceil() as usize + 1; @@ -2414,9 +2472,9 @@ impl GitGraph { let new_y = (current_offset.y + delta.y).clamp(max_vertical_scroll, px(0.)); let new_offset = Point::new(current_offset.x, new_y); - let max_lanes = self.graph_data.max_lanes.max(1); - let graph_content_width = LANE_WIDTH * max_lanes as f32 + LEFT_PADDING * 2.0; - let max_horizontal_scroll = (graph_content_width - self.graph_viewport_width).max(px(0.)); + let graph_viewport_width = self.graph_viewport_width(window, cx); + let max_horizontal_scroll = + (self.graph_canvas_content_width() - graph_viewport_width).max(px(0.)); let new_horizontal_offset = (self.horizontal_scroll_offset - delta.x).clamp(px(0.), max_horizontal_scroll); @@ -2497,6 +2555,8 @@ impl Render for GitGraph { cx, ); self.graph_data.add_commits(&commits); + let graph_viewport_width = self.graph_viewport_width(window, cx); + self.clamp_horizontal_scroll_offset(graph_viewport_width); (commits.len(), is_loading) }) } else { @@ -2527,118 +2587,202 @@ impl Render for GitGraph { this.child(self.render_loading_spinner(cx)) }) } else { - div() + let header_resize_info = HeaderResizeInfo::from_state(&self.column_widths, cx); + let header_context = TableRenderContext::for_column_widths( + Some(self.column_widths.read(cx).widths_to_render()), + true, + ); + let [ + graph_fraction, + description_fraction, + date_fraction, + author_fraction, + commit_fraction, + ] = self.preview_column_fractions(window, cx); + let table_fraction = + description_fraction + date_fraction + author_fraction + commit_fraction; + let table_width_config = self.table_column_width_config(window, cx); + let graph_viewport_width = self.graph_viewport_width(window, cx); + self.clamp_horizontal_scroll_offset(graph_viewport_width); + + h_flex() .size_full() - .flex() - .flex_row() .child( div() - .w(self.graph_content_width()) - .h_full() + .flex_1() + .min_w_0() + .size_full() .flex() .flex_col() - .child( - div() - .flex() - .items_center() - .px_1() - .py_0p5() - .border_b_1() - .whitespace_nowrap() - .border_color(cx.theme().colors().border) - .child(Label::new("Graph").color(Color::Muted)), - ) - .child( - div() - .id("graph-canvas") - .flex_1() - .overflow_hidden() - .child(self.render_graph(window, cx)) - .on_scroll_wheel(cx.listener(Self::handle_graph_scroll)) - .on_mouse_move(cx.listener(Self::handle_graph_mouse_move)) - .on_click(cx.listener(Self::handle_graph_click)) - .on_hover(cx.listener(|this, &is_hovered: &bool, _, cx| { - if !is_hovered && this.hovered_entry_idx.is_some() { - this.hovered_entry_idx = None; - cx.notify(); - } - })), - ), - ) - .child({ - let row_height = self.row_height; - let selected_entry_idx = self.selected_entry_idx; - let hovered_entry_idx = self.hovered_entry_idx; - let weak_self = cx.weak_entity(); - let focus_handle = self.focus_handle.clone(); - div().flex_1().size_full().child( - Table::new(4) - .interactable(&self.table_interaction_state) - .hide_row_borders() - .hide_row_hover() - .header(vec![ - Label::new("Description") - .color(Color::Muted) - .into_any_element(), - Label::new("Date").color(Color::Muted).into_any_element(), - Label::new("Author").color(Color::Muted).into_any_element(), - Label::new("Commit").color(Color::Muted).into_any_element(), - ]) - .width_config(ColumnWidthConfig::redistributable( - self.table_column_widths.clone(), - )) - .map_row(move |(index, row), window, cx| { - let is_selected = selected_entry_idx == Some(index); - let is_hovered = hovered_entry_idx == Some(index); - let is_focused = focus_handle.is_focused(window); - let weak = weak_self.clone(); - let weak_for_hover = weak.clone(); - - let hover_bg = cx.theme().colors().element_hover.opacity(0.6); - let selected_bg = if is_focused { - cx.theme().colors().element_selected - } else { - cx.theme().colors().element_hover - }; - - row.h(row_height) - .when(is_selected, |row| row.bg(selected_bg)) - .when(is_hovered && !is_selected, |row| row.bg(hover_bg)) - .on_hover(move |&is_hovered, _, cx| { - weak_for_hover - .update(cx, |this, cx| { - if is_hovered { - if this.hovered_entry_idx != Some(index) { - this.hovered_entry_idx = Some(index); - cx.notify(); - } - } else if this.hovered_entry_idx == Some(index) { - // Only clear if this row was the hovered one - this.hovered_entry_idx = None; - cx.notify(); - } - }) - .ok(); - }) - .on_click(move |event, window, cx| { - let click_count = event.click_count(); - weak.update(cx, |this, cx| { - this.select_entry(index, ScrollStrategy::Center, cx); - if click_count >= 2 { - this.open_commit_view(index, window, cx); - } - }) - .ok(); - }) - .into_any_element() - }) - .uniform_list( - "git-graph-commits", - commit_count, - cx.processor(Self::render_table_rows), + .child(render_table_header( + TableRow::from_vec( + vec![ + Label::new("Graph") + .color(Color::Muted) + .truncate() + .into_any_element(), + Label::new("Description") + .color(Color::Muted) + .into_any_element(), + Label::new("Date").color(Color::Muted).into_any_element(), + Label::new("Author").color(Color::Muted).into_any_element(), + Label::new("Commit").color(Color::Muted).into_any_element(), + ], + 5, ), - ) - }) + header_context, + Some(header_resize_info), + Some(self.column_widths.entity_id()), + cx, + )) + .child({ + let row_height = self.row_height; + let selected_entry_idx = self.selected_entry_idx; + let hovered_entry_idx = self.hovered_entry_idx; + let weak_self = cx.weak_entity(); + let focus_handle = self.focus_handle.clone(); + + bind_redistributable_columns( + div() + .relative() + .flex_1() + .w_full() + .overflow_hidden() + .child( + h_flex() + .size_full() + .child( + div() + .w(DefiniteLength::Fraction(graph_fraction)) + .h_full() + .min_w_0() + .overflow_hidden() + .child( + div() + .id("graph-canvas") + .size_full() + .overflow_hidden() + .child( + div() + .size_full() + .child(self.render_graph(window, cx)), + ) + .on_scroll_wheel( + cx.listener(Self::handle_graph_scroll), + ) + .on_mouse_move( + cx.listener(Self::handle_graph_mouse_move), + ) + .on_click(cx.listener(Self::handle_graph_click)) + .on_hover(cx.listener( + |this, &is_hovered: &bool, _, cx| { + if !is_hovered + && this.hovered_entry_idx.is_some() + { + this.hovered_entry_idx = None; + cx.notify(); + } + }, + )), + ), + ) + .child( + div() + .w(DefiniteLength::Fraction(table_fraction)) + .h_full() + .min_w_0() + .child( + Table::new(4) + .interactable(&self.table_interaction_state) + .hide_row_borders() + .hide_row_hover() + .width_config(table_width_config) + .map_row(move |(index, row), window, cx| { + let is_selected = + selected_entry_idx == Some(index); + let is_hovered = + hovered_entry_idx == Some(index); + let is_focused = + focus_handle.is_focused(window); + let weak = weak_self.clone(); + let weak_for_hover = weak.clone(); + + let hover_bg = cx + .theme() + .colors() + .element_hover + .opacity(0.6); + let selected_bg = if is_focused { + cx.theme().colors().element_selected + } else { + cx.theme().colors().element_hover + }; + + row.h(row_height) + .when(is_selected, |row| row.bg(selected_bg)) + .when( + is_hovered && !is_selected, + |row| row.bg(hover_bg), + ) + .on_hover(move |&is_hovered, _, cx| { + weak_for_hover + .update(cx, |this, cx| { + if is_hovered { + if this.hovered_entry_idx + != Some(index) + { + this.hovered_entry_idx = + Some(index); + cx.notify(); + } + } else if this + .hovered_entry_idx + == Some(index) + { + this.hovered_entry_idx = + None; + cx.notify(); + } + }) + .ok(); + }) + .on_click(move |event, window, cx| { + let click_count = event.click_count(); + weak.update(cx, |this, cx| { + this.select_entry( + index, + ScrollStrategy::Center, + cx, + ); + if click_count >= 2 { + this.open_commit_view( + index, + window, + cx, + ); + } + }) + .ok(); + }) + .into_any_element() + }) + .uniform_list( + "git-graph-commits", + commit_count, + cx.processor(Self::render_table_rows), + ), + ), + ), + ) + .child(render_redistributable_columns_resize_handles( + &self.column_widths, + window, + cx, + )), + self.column_widths.clone(), + ) + }), + ) .on_drag_move::(cx.listener(|this, event, window, cx| { this.commit_details_split_state.update(cx, |state, cx| { state.on_drag_move(event, window, cx); @@ -3734,9 +3878,11 @@ mod tests { }); cx.run_until_parked(); - git_graph.update_in(&mut *cx, |this, window, cx| { - this.render(window, cx); - }); + cx.draw( + point(px(0.), px(0.)), + gpui::size(px(1200.), px(800.)), + |_, _| git_graph.clone().into_any_element(), + ); cx.run_until_parked(); let commit_count_after_switch_back = diff --git a/crates/ui/src/components.rs b/crates/ui/src/components.rs index 68b1ff9beb7a8918ee3f5e1857e3cc68e15a3fc1..367d80d79c9af8722091e36c8e04bafb7ef0d8b5 100644 --- a/crates/ui/src/components.rs +++ b/crates/ui/src/components.rs @@ -29,6 +29,7 @@ mod notification; mod popover; mod popover_menu; mod progress; +mod redistributable_columns; mod right_click_menu; mod scrollbar; mod stack; @@ -73,6 +74,7 @@ pub use notification::*; pub use popover::*; pub use popover_menu::*; pub use progress::*; +pub use redistributable_columns::*; pub use right_click_menu::*; pub use scrollbar::*; pub use stack::*; diff --git a/crates/ui/src/components/data_table.rs b/crates/ui/src/components/data_table.rs index 2012defc47d9cccea87849fa41470ad1183b552f..e5a14a3ddabc0d918bfe6d6bcb077e32adeb6eb4 100644 --- a/crates/ui/src/components/data_table.rs +++ b/crates/ui/src/components/data_table.rs @@ -1,19 +1,19 @@ use std::{ops::Range, rc::Rc}; use gpui::{ - AbsoluteLength, AppContext as _, DefiniteLength, DragMoveEvent, Entity, EntityId, FocusHandle, - Length, ListHorizontalSizingBehavior, ListSizingBehavior, ListState, Point, Stateful, - UniformListScrollHandle, WeakEntity, list, transparent_black, uniform_list, + DefiniteLength, Entity, EntityId, FocusHandle, Length, ListHorizontalSizingBehavior, + ListSizingBehavior, ListState, Point, Stateful, UniformListScrollHandle, WeakEntity, list, + transparent_black, uniform_list, }; -use itertools::intersperse_with; use crate::{ ActiveTheme as _, AnyElement, App, Button, ButtonCommon as _, ButtonStyle, Color, Component, - ComponentScope, Context, Div, ElementId, FixedWidth as _, FluentBuilder as _, Indicator, - InteractiveElement, IntoElement, ParentElement, Pixels, RegisterComponent, RenderOnce, - ScrollAxes, ScrollableHandle, Scrollbars, SharedString, StatefulInteractiveElement, Styled, - StyledExt as _, StyledTypography, Window, WithScrollbar, div, example_group_with_title, h_flex, - px, single_example, + ComponentScope, Context, Div, ElementId, FixedWidth as _, FluentBuilder as _, HeaderResizeInfo, + Indicator, InteractiveElement, IntoElement, ParentElement, Pixels, RedistributableColumnsState, + RegisterComponent, RenderOnce, ScrollAxes, ScrollableHandle, Scrollbars, SharedString, + StatefulInteractiveElement, Styled, StyledExt as _, StyledTypography, Window, WithScrollbar, + bind_redistributable_columns, div, example_group_with_title, h_flex, px, + render_redistributable_columns_resize_handles, single_example, table_row::{IntoTableRow as _, TableRow}, v_flex, }; @@ -22,16 +22,10 @@ pub mod table_row; #[cfg(test)] mod tests; -const RESIZE_COLUMN_WIDTH: f32 = 8.0; -const RESIZE_DIVIDER_WIDTH: f32 = 1.0; - /// Represents an unchecked table row, which is a vector of elements. /// Will be converted into `TableRow` internally pub type UncheckedTableRow = Vec; -#[derive(Debug)] -pub(crate) struct DraggedColumn(pub(crate) usize); - struct UniformListData { render_list_of_rows_fn: Box, &mut Window, &mut App) -> Vec>>, @@ -113,124 +107,6 @@ impl TableInteractionState { } } -/// Renders invisible resize handles overlaid on top of table content. -/// -/// - Spacer: invisible element that matches the width of table column content -/// - Divider: contains the actual resize handle that users can drag to resize columns -/// -/// Structure: [spacer] [divider] [spacer] [divider] [spacer] -/// -/// Business logic: -/// 1. Creates spacers matching each column width -/// 2. Intersperses (inserts) resize handles between spacers (interactive only for resizable columns) -/// 3. Each handle supports hover highlighting, double-click to reset, and drag to resize -/// 4. Returns an absolute-positioned overlay that sits on top of table content -fn render_resize_handles( - column_widths: &TableRow, - resizable_columns: &TableRow, - initial_sizes: &TableRow, - columns: Option>, - window: &mut Window, - cx: &mut App, -) -> AnyElement { - let spacers = column_widths - .as_slice() - .iter() - .map(|width| base_cell_style(Some(*width)).into_any_element()); - - let mut column_ix = 0; - let resizable_columns_shared = Rc::new(resizable_columns.clone()); - let initial_sizes_shared = Rc::new(initial_sizes.clone()); - let mut resizable_columns_iter = resizable_columns.as_slice().iter(); - - let dividers = intersperse_with(spacers, || { - let resizable_columns = Rc::clone(&resizable_columns_shared); - let initial_sizes = Rc::clone(&initial_sizes_shared); - window.with_id(column_ix, |window| { - let mut resize_divider = div() - .id(column_ix) - .relative() - .top_0() - .w(px(RESIZE_DIVIDER_WIDTH)) - .h_full() - .bg(cx.theme().colors().border.opacity(0.8)); - - let mut resize_handle = div() - .id("column-resize-handle") - .absolute() - .left_neg_0p5() - .w(px(RESIZE_COLUMN_WIDTH)) - .h_full(); - - if resizable_columns_iter - .next() - .is_some_and(TableResizeBehavior::is_resizable) - { - let hovered = window.use_state(cx, |_window, _cx| false); - - resize_divider = resize_divider.when(*hovered.read(cx), |div| { - div.bg(cx.theme().colors().border_focused) - }); - - resize_handle = resize_handle - .on_hover(move |&was_hovered, _, cx| hovered.write(cx, was_hovered)) - .cursor_col_resize() - .when_some(columns.clone(), |this, columns| { - this.on_click(move |event, window, cx| { - if event.click_count() >= 2 { - columns.update(cx, |columns, _| { - columns.on_double_click( - column_ix, - &initial_sizes, - &resizable_columns, - window, - ); - }) - } - - cx.stop_propagation(); - }) - }) - .on_drag(DraggedColumn(column_ix), |_, _offset, _window, cx| { - cx.new(|_cx| gpui::Empty) - }) - } - - column_ix += 1; - resize_divider.child(resize_handle).into_any_element() - }) - }); - - h_flex() - .id("resize-handles") - .absolute() - .inset_0() - .w_full() - .children(dividers) - .into_any_element() -} - -#[derive(Debug, Copy, Clone, PartialEq)] -pub enum TableResizeBehavior { - None, - Resizable, - MinSize(f32), -} - -impl TableResizeBehavior { - pub fn is_resizable(&self) -> bool { - *self != TableResizeBehavior::None - } - - pub fn min_size(&self) -> Option { - match self { - TableResizeBehavior::None => None, - TableResizeBehavior::Resizable => Some(0.05), - TableResizeBehavior::MinSize(min_size) => Some(*min_size), - } - } -} - pub enum ColumnWidthConfig { /// Static column widths (no resize handles). Static { @@ -278,6 +154,21 @@ impl ColumnWidthConfig { } } + /// Explicit column widths with no fixed table width. + pub fn explicit>(widths: Vec) -> Self { + let cols = widths.len(); + ColumnWidthConfig::Static { + widths: StaticColumnWidths::Explicit( + widths + .into_iter() + .map(Into::into) + .collect::>() + .into_table_row(cols), + ), + table_width: None, + } + } + /// Column widths for rendering. pub fn widths_to_render(&self, cx: &App) -> Option> { match self { @@ -292,10 +183,7 @@ impl ColumnWidthConfig { ColumnWidthConfig::Redistributable { columns_state: entity, .. - } => { - let state = entity.read(cx); - Some(state.preview_widths.map_cloned(Length::Definite)) - } + } => Some(entity.read(cx).widths_to_render()), } } @@ -316,296 +204,6 @@ impl ColumnWidthConfig { None => ListHorizontalSizingBehavior::FitList, } } - - /// Render resize handles overlay if applicable. - pub fn render_resize_handles(&self, window: &mut Window, cx: &mut App) -> Option { - match self { - ColumnWidthConfig::Redistributable { - columns_state: entity, - .. - } => { - let (column_widths, resize_behavior, initial_widths) = { - let state = entity.read(cx); - ( - state.preview_widths.map_cloned(Length::Definite), - state.resize_behavior.clone(), - state.initial_widths.clone(), - ) - }; - Some(render_resize_handles( - &column_widths, - &resize_behavior, - &initial_widths, - Some(entity.clone()), - window, - cx, - )) - } - _ => None, - } - } - - /// Returns info needed for header double-click-to-reset, if applicable. - pub fn header_resize_info(&self, cx: &App) -> Option { - match self { - ColumnWidthConfig::Redistributable { columns_state, .. } => { - let state = columns_state.read(cx); - Some(HeaderResizeInfo { - columns_state: columns_state.downgrade(), - resize_behavior: state.resize_behavior.clone(), - initial_widths: state.initial_widths.clone(), - }) - } - _ => None, - } - } -} - -#[derive(Clone)] -pub struct HeaderResizeInfo { - pub columns_state: WeakEntity, - pub resize_behavior: TableRow, - pub initial_widths: TableRow, -} - -pub struct RedistributableColumnsState { - pub(crate) initial_widths: TableRow, - pub(crate) committed_widths: TableRow, - pub(crate) preview_widths: TableRow, - pub(crate) resize_behavior: TableRow, - pub(crate) cached_table_width: Pixels, -} - -impl RedistributableColumnsState { - pub fn new( - cols: usize, - initial_widths: UncheckedTableRow>, - resize_behavior: UncheckedTableRow, - ) -> Self { - let widths: TableRow = initial_widths - .into_iter() - .map(Into::into) - .collect::>() - .into_table_row(cols); - Self { - initial_widths: widths.clone(), - committed_widths: widths.clone(), - preview_widths: widths, - resize_behavior: resize_behavior.into_table_row(cols), - cached_table_width: Default::default(), - } - } - - pub fn cols(&self) -> usize { - self.committed_widths.cols() - } - - pub fn initial_widths(&self) -> &TableRow { - &self.initial_widths - } - - pub fn resize_behavior(&self) -> &TableRow { - &self.resize_behavior - } - - fn get_fraction(length: &DefiniteLength, bounds_width: Pixels, rem_size: Pixels) -> f32 { - match length { - DefiniteLength::Absolute(AbsoluteLength::Pixels(pixels)) => *pixels / bounds_width, - DefiniteLength::Absolute(AbsoluteLength::Rems(rems_width)) => { - rems_width.to_pixels(rem_size) / bounds_width - } - DefiniteLength::Fraction(fraction) => *fraction, - } - } - - pub(crate) fn on_double_click( - &mut self, - double_click_position: usize, - initial_sizes: &TableRow, - resize_behavior: &TableRow, - window: &mut Window, - ) { - let bounds_width = self.cached_table_width; - let rem_size = window.rem_size(); - let initial_sizes = - initial_sizes.map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); - let widths = self - .committed_widths - .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); - - let updated_widths = Self::reset_to_initial_size( - double_click_position, - widths, - initial_sizes, - resize_behavior, - ); - self.committed_widths = updated_widths.map(DefiniteLength::Fraction); - self.preview_widths = self.committed_widths.clone(); - } - - pub(crate) fn reset_to_initial_size( - col_idx: usize, - mut widths: TableRow, - initial_sizes: TableRow, - resize_behavior: &TableRow, - ) -> TableRow { - let diff = initial_sizes[col_idx] - widths[col_idx]; - - let left_diff = - initial_sizes[..col_idx].iter().sum::() - widths[..col_idx].iter().sum::(); - let right_diff = initial_sizes[col_idx + 1..].iter().sum::() - - widths[col_idx + 1..].iter().sum::(); - - let go_left_first = if diff < 0.0 { - left_diff > right_diff - } else { - left_diff < right_diff - }; - - if !go_left_first { - let diff_remaining = - Self::propagate_resize_diff(diff, col_idx, &mut widths, resize_behavior, 1); - - if diff_remaining != 0.0 && col_idx > 0 { - Self::propagate_resize_diff( - diff_remaining, - col_idx, - &mut widths, - resize_behavior, - -1, - ); - } - } else { - let diff_remaining = - Self::propagate_resize_diff(diff, col_idx, &mut widths, resize_behavior, -1); - - if diff_remaining != 0.0 { - Self::propagate_resize_diff( - diff_remaining, - col_idx, - &mut widths, - resize_behavior, - 1, - ); - } - } - - widths - } - - pub(crate) fn on_drag_move( - &mut self, - drag_event: &DragMoveEvent, - window: &mut Window, - cx: &mut Context, - ) { - let drag_position = drag_event.event.position; - let bounds = drag_event.bounds; - - let mut col_position = 0.0; - let rem_size = window.rem_size(); - let bounds_width = bounds.right() - bounds.left(); - let col_idx = drag_event.drag(cx).0; - - let divider_width = Self::get_fraction( - &DefiniteLength::Absolute(AbsoluteLength::Pixels(px(RESIZE_DIVIDER_WIDTH))), - bounds_width, - rem_size, - ); - - let mut widths = self - .committed_widths - .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); - - for length in widths[0..=col_idx].iter() { - col_position += length + divider_width; - } - - let mut total_length_ratio = col_position; - for length in widths[col_idx + 1..].iter() { - total_length_ratio += length; - } - let cols = self.resize_behavior.cols(); - total_length_ratio += (cols - 1 - col_idx) as f32 * divider_width; - - let drag_fraction = (drag_position.x - bounds.left()) / bounds_width; - let drag_fraction = drag_fraction * total_length_ratio; - let diff = drag_fraction - col_position - divider_width / 2.0; - - Self::drag_column_handle(diff, col_idx, &mut widths, &self.resize_behavior); - - self.preview_widths = widths.map(DefiniteLength::Fraction); - } - - pub(crate) fn drag_column_handle( - diff: f32, - col_idx: usize, - widths: &mut TableRow, - resize_behavior: &TableRow, - ) { - if diff > 0.0 { - Self::propagate_resize_diff(diff, col_idx, widths, resize_behavior, 1); - } else { - Self::propagate_resize_diff(-diff, col_idx + 1, widths, resize_behavior, -1); - } - } - - pub(crate) fn propagate_resize_diff( - diff: f32, - col_idx: usize, - widths: &mut TableRow, - resize_behavior: &TableRow, - direction: i8, - ) -> f32 { - let mut diff_remaining = diff; - if resize_behavior[col_idx].min_size().is_none() { - return diff; - } - - let step_right; - let step_left; - if direction < 0 { - step_right = 0; - step_left = 1; - } else { - step_right = 1; - step_left = 0; - } - if col_idx == 0 && direction < 0 { - return diff; - } - let mut curr_column = col_idx + step_right - step_left; - - while diff_remaining != 0.0 && curr_column < widths.cols() { - let Some(min_size) = resize_behavior[curr_column].min_size() else { - if curr_column == 0 { - break; - } - curr_column -= step_left; - curr_column += step_right; - continue; - }; - - let curr_width = widths[curr_column] - diff_remaining; - widths[curr_column] = curr_width; - - if min_size > curr_width { - diff_remaining = min_size - curr_width; - widths[curr_column] = min_size; - } else { - diff_remaining = 0.0; - break; - } - if curr_column == 0 { - break; - } - curr_column -= step_left; - curr_column += step_right; - } - widths[col_idx] = widths[col_idx] + (diff - diff_remaining); - - diff_remaining - } } /// A table component @@ -919,11 +517,8 @@ pub fn render_table_header( if event.click_count() > 1 { info.columns_state .update(cx, |column, _| { - column.on_double_click( - header_idx, - &info.initial_widths, - &info.resize_behavior, - window, + column.reset_column_to_initial_width( + header_idx, window, ); }) .ok(); @@ -962,6 +557,19 @@ impl TableRenderContext { disable_base_cell_style: table.disable_base_cell_style, } } + + pub fn for_column_widths(column_widths: Option>, use_ui_font: bool) -> Self { + Self { + striped: false, + show_row_borders: true, + show_row_hover: true, + total_row_count: 0, + column_widths, + map_row: None, + use_ui_font, + disable_base_cell_style: false, + } + } } impl RenderOnce for Table { @@ -969,9 +577,15 @@ impl RenderOnce for Table { let table_context = TableRenderContext::new(&self, cx); let interaction_state = self.interaction_state.and_then(|state| state.upgrade()); - let header_resize_info = interaction_state - .as_ref() - .and_then(|_| self.column_width_config.header_resize_info(cx)); + let header_resize_info = + interaction_state + .as_ref() + .and_then(|_| match &self.column_width_config { + ColumnWidthConfig::Redistributable { columns_state, .. } => { + Some(HeaderResizeInfo::from_state(columns_state, cx)) + } + _ => None, + }); let table_width = self.column_width_config.table_width(); let horizontal_sizing = self.column_width_config.list_horizontal_sizing(); @@ -985,13 +599,19 @@ impl RenderOnce for Table { ColumnWidthConfig::Redistributable { columns_state: entity, .. - } => Some(entity.downgrade()), + } => Some(entity.clone()), _ => None, }); - let resize_handles = interaction_state - .as_ref() - .and_then(|_| self.column_width_config.render_resize_handles(window, cx)); + let resize_handles = + interaction_state + .as_ref() + .and_then(|_| match &self.column_width_config { + ColumnWidthConfig::Redistributable { columns_state, .. } => Some( + render_redistributable_columns_resize_handles(columns_state, window, cx), + ), + _ => None, + }); let table = div() .when_some(table_width, |this, width| this.w(width)) @@ -1006,38 +626,8 @@ impl RenderOnce for Table { cx, )) }) - .when_some(redistributable_entity, { - |this, widths| { - this.on_drag_move::({ - let widths = widths.clone(); - move |e, window, cx| { - widths - .update(cx, |widths, cx| { - widths.on_drag_move(e, window, cx); - }) - .ok(); - } - }) - .on_children_prepainted({ - let widths = widths.clone(); - move |bounds, _, cx| { - widths - .update(cx, |widths, _| { - // This works because all children x axis bounds are the same - widths.cached_table_width = - bounds[0].right() - bounds[0].left(); - }) - .ok(); - } - }) - .on_drop::(move |_, _, cx| { - widths - .update(cx, |widths, _| { - widths.committed_widths = widths.preview_widths.clone(); - }) - .ok(); - }) - } + .when_some(redistributable_entity, |this, widths| { + bind_redistributable_columns(this, widths) }) .child({ let content = div() diff --git a/crates/ui/src/components/data_table/tests.rs b/crates/ui/src/components/data_table/tests.rs index 0936cd3088cc50bc08bf0a0a09d9a6fa7a2cdaf0..604e8b7cd1aabee85b406ec99d458c949eda599b 100644 --- a/crates/ui/src/components/data_table/tests.rs +++ b/crates/ui/src/components/data_table/tests.rs @@ -1,4 +1,5 @@ -use super::*; +use super::table_row::TableRow; +use crate::{RedistributableColumnsState, TableResizeBehavior}; fn is_almost_eq(a: &[f32], b: &[f32]) -> bool { a.len() == b.len() && a.iter().zip(b).all(|(x, y)| (x - y).abs() < 1e-6) diff --git a/crates/ui/src/components/redistributable_columns.rs b/crates/ui/src/components/redistributable_columns.rs new file mode 100644 index 0000000000000000000000000000000000000000..cd22c31e19736e72e5d88676178053b49a3e65fd --- /dev/null +++ b/crates/ui/src/components/redistributable_columns.rs @@ -0,0 +1,485 @@ +use std::rc::Rc; + +use gpui::{ + AbsoluteLength, AppContext as _, Bounds, DefiniteLength, DragMoveEvent, Empty, Entity, Length, + WeakEntity, +}; +use itertools::intersperse_with; + +use super::data_table::table_row::{IntoTableRow as _, TableRow}; +use crate::{ + ActiveTheme as _, AnyElement, App, Context, Div, FluentBuilder as _, InteractiveElement, + IntoElement, ParentElement, Pixels, StatefulInteractiveElement, Styled, Window, div, h_flex, + px, +}; + +const RESIZE_COLUMN_WIDTH: f32 = 8.0; +const RESIZE_DIVIDER_WIDTH: f32 = 1.0; + +#[derive(Debug)] +struct DraggedColumn(usize); + +#[derive(Debug, Copy, Clone, PartialEq)] +pub enum TableResizeBehavior { + None, + Resizable, + MinSize(f32), +} + +impl TableResizeBehavior { + pub fn is_resizable(&self) -> bool { + *self != TableResizeBehavior::None + } + + pub fn min_size(&self) -> Option { + match self { + TableResizeBehavior::None => None, + TableResizeBehavior::Resizable => Some(0.05), + TableResizeBehavior::MinSize(min_size) => Some(*min_size), + } + } +} + +#[derive(Clone)] +pub struct HeaderResizeInfo { + pub columns_state: WeakEntity, + pub resize_behavior: TableRow, +} + +impl HeaderResizeInfo { + pub fn from_state(columns_state: &Entity, cx: &App) -> Self { + let resize_behavior = columns_state.read(cx).resize_behavior().clone(); + Self { + columns_state: columns_state.downgrade(), + resize_behavior, + } + } +} + +pub struct RedistributableColumnsState { + pub(crate) initial_widths: TableRow, + pub(crate) committed_widths: TableRow, + pub(crate) preview_widths: TableRow, + pub(crate) resize_behavior: TableRow, + pub(crate) cached_container_width: Pixels, +} + +impl RedistributableColumnsState { + pub fn new( + cols: usize, + initial_widths: Vec>, + resize_behavior: Vec, + ) -> Self { + let widths: TableRow = initial_widths + .into_iter() + .map(Into::into) + .collect::>() + .into_table_row(cols); + Self { + initial_widths: widths.clone(), + committed_widths: widths.clone(), + preview_widths: widths, + resize_behavior: resize_behavior.into_table_row(cols), + cached_container_width: Default::default(), + } + } + + pub fn cols(&self) -> usize { + self.committed_widths.cols() + } + + pub fn initial_widths(&self) -> &TableRow { + &self.initial_widths + } + + pub fn preview_widths(&self) -> &TableRow { + &self.preview_widths + } + + pub fn resize_behavior(&self) -> &TableRow { + &self.resize_behavior + } + + pub fn widths_to_render(&self) -> TableRow { + self.preview_widths.map_cloned(Length::Definite) + } + + pub fn preview_fractions(&self, rem_size: Pixels) -> TableRow { + if self.cached_container_width > px(0.) { + self.preview_widths + .map_ref(|length| Self::get_fraction(length, self.cached_container_width, rem_size)) + } else { + self.preview_widths.map_ref(|length| match length { + DefiniteLength::Fraction(fraction) => *fraction, + DefiniteLength::Absolute(_) => 0.0, + }) + } + } + + pub fn preview_column_width(&self, column_index: usize, window: &Window) -> Option { + let width = self.preview_widths().as_slice().get(column_index)?; + match width { + DefiniteLength::Fraction(fraction) if self.cached_container_width > px(0.) => { + Some(self.cached_container_width * *fraction) + } + DefiniteLength::Fraction(_) => None, + DefiniteLength::Absolute(AbsoluteLength::Pixels(pixels)) => Some(*pixels), + DefiniteLength::Absolute(AbsoluteLength::Rems(rems_width)) => { + Some(rems_width.to_pixels(window.rem_size())) + } + } + } + + pub fn cached_container_width(&self) -> Pixels { + self.cached_container_width + } + + pub fn set_cached_container_width(&mut self, width: Pixels) { + self.cached_container_width = width; + } + + pub fn commit_preview(&mut self) { + self.committed_widths = self.preview_widths.clone(); + } + + pub fn reset_column_to_initial_width(&mut self, column_index: usize, window: &Window) { + let bounds_width = self.cached_container_width; + if bounds_width <= px(0.) { + return; + } + + let rem_size = window.rem_size(); + let initial_sizes = self + .initial_widths + .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); + let widths = self + .committed_widths + .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); + + let updated_widths = + Self::reset_to_initial_size(column_index, widths, initial_sizes, &self.resize_behavior); + self.committed_widths = updated_widths.map(DefiniteLength::Fraction); + self.preview_widths = self.committed_widths.clone(); + } + + fn get_fraction(length: &DefiniteLength, bounds_width: Pixels, rem_size: Pixels) -> f32 { + match length { + DefiniteLength::Absolute(AbsoluteLength::Pixels(pixels)) => *pixels / bounds_width, + DefiniteLength::Absolute(AbsoluteLength::Rems(rems_width)) => { + rems_width.to_pixels(rem_size) / bounds_width + } + DefiniteLength::Fraction(fraction) => *fraction, + } + } + + pub(crate) fn reset_to_initial_size( + col_idx: usize, + mut widths: TableRow, + initial_sizes: TableRow, + resize_behavior: &TableRow, + ) -> TableRow { + let diff = initial_sizes[col_idx] - widths[col_idx]; + + let left_diff = + initial_sizes[..col_idx].iter().sum::() - widths[..col_idx].iter().sum::(); + let right_diff = initial_sizes[col_idx + 1..].iter().sum::() + - widths[col_idx + 1..].iter().sum::(); + + let go_left_first = if diff < 0.0 { + left_diff > right_diff + } else { + left_diff < right_diff + }; + + if !go_left_first { + let diff_remaining = + Self::propagate_resize_diff(diff, col_idx, &mut widths, resize_behavior, 1); + + if diff_remaining != 0.0 && col_idx > 0 { + Self::propagate_resize_diff( + diff_remaining, + col_idx, + &mut widths, + resize_behavior, + -1, + ); + } + } else { + let diff_remaining = + Self::propagate_resize_diff(diff, col_idx, &mut widths, resize_behavior, -1); + + if diff_remaining != 0.0 { + Self::propagate_resize_diff( + diff_remaining, + col_idx, + &mut widths, + resize_behavior, + 1, + ); + } + } + + widths + } + + fn on_drag_move( + &mut self, + drag_event: &DragMoveEvent, + window: &mut Window, + cx: &mut Context, + ) { + let drag_position = drag_event.event.position; + let bounds = drag_event.bounds; + let bounds_width = bounds.right() - bounds.left(); + if bounds_width <= px(0.) { + return; + } + + let mut col_position = 0.0; + let rem_size = window.rem_size(); + let col_idx = drag_event.drag(cx).0; + + let divider_width = Self::get_fraction( + &DefiniteLength::Absolute(AbsoluteLength::Pixels(px(RESIZE_DIVIDER_WIDTH))), + bounds_width, + rem_size, + ); + + let mut widths = self + .committed_widths + .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); + + for length in widths[0..=col_idx].iter() { + col_position += length + divider_width; + } + + let mut total_length_ratio = col_position; + for length in widths[col_idx + 1..].iter() { + total_length_ratio += length; + } + let cols = self.resize_behavior.cols(); + total_length_ratio += (cols - 1 - col_idx) as f32 * divider_width; + + let drag_fraction = (drag_position.x - bounds.left()) / bounds_width; + let drag_fraction = drag_fraction * total_length_ratio; + let diff = drag_fraction - col_position - divider_width / 2.0; + + Self::drag_column_handle(diff, col_idx, &mut widths, &self.resize_behavior); + + self.preview_widths = widths.map(DefiniteLength::Fraction); + } + + pub(crate) fn drag_column_handle( + diff: f32, + col_idx: usize, + widths: &mut TableRow, + resize_behavior: &TableRow, + ) { + if diff > 0.0 { + Self::propagate_resize_diff(diff, col_idx, widths, resize_behavior, 1); + } else { + Self::propagate_resize_diff(-diff, col_idx + 1, widths, resize_behavior, -1); + } + } + + pub(crate) fn propagate_resize_diff( + diff: f32, + col_idx: usize, + widths: &mut TableRow, + resize_behavior: &TableRow, + direction: i8, + ) -> f32 { + let mut diff_remaining = diff; + if resize_behavior[col_idx].min_size().is_none() { + return diff; + } + + let step_right; + let step_left; + if direction < 0 { + step_right = 0; + step_left = 1; + } else { + step_right = 1; + step_left = 0; + } + if col_idx == 0 && direction < 0 { + return diff; + } + let mut curr_column = col_idx + step_right - step_left; + + while diff_remaining != 0.0 && curr_column < widths.cols() { + let Some(min_size) = resize_behavior[curr_column].min_size() else { + if curr_column == 0 { + break; + } + curr_column -= step_left; + curr_column += step_right; + continue; + }; + + let curr_width = widths[curr_column] - diff_remaining; + widths[curr_column] = curr_width; + + if min_size > curr_width { + diff_remaining = min_size - curr_width; + widths[curr_column] = min_size; + } else { + diff_remaining = 0.0; + break; + } + if curr_column == 0 { + break; + } + curr_column -= step_left; + curr_column += step_right; + } + widths[col_idx] = widths[col_idx] + (diff - diff_remaining); + + diff_remaining + } +} + +pub fn bind_redistributable_columns( + container: Div, + columns_state: Entity, +) -> Div { + container + .on_drag_move::({ + let columns_state = columns_state.clone(); + move |event, window, cx| { + columns_state.update(cx, |columns, cx| { + columns.on_drag_move(event, window, cx); + }); + } + }) + .on_children_prepainted({ + let columns_state = columns_state.clone(); + move |bounds, _, cx| { + if let Some(width) = child_bounds_width(&bounds) { + columns_state.update(cx, |columns, _| { + columns.set_cached_container_width(width); + }); + } + } + }) + .on_drop::(move |_, _, cx| { + columns_state.update(cx, |columns, _| { + columns.commit_preview(); + }); + }) +} + +pub fn render_redistributable_columns_resize_handles( + columns_state: &Entity, + window: &mut Window, + cx: &mut App, +) -> AnyElement { + let (column_widths, resize_behavior) = { + let state = columns_state.read(cx); + (state.widths_to_render(), state.resize_behavior().clone()) + }; + + let mut column_ix = 0; + let resize_behavior = Rc::new(resize_behavior); + let dividers = intersperse_with( + column_widths + .as_slice() + .iter() + .copied() + .map(|width| resize_spacer(width).into_any_element()), + || { + let current_column_ix = column_ix; + let resize_behavior = Rc::clone(&resize_behavior); + let columns_state = columns_state.clone(); + column_ix += 1; + + window.with_id(current_column_ix, |window| { + let mut resize_divider = div() + .id(current_column_ix) + .relative() + .top_0() + .w(px(RESIZE_DIVIDER_WIDTH)) + .h_full() + .bg(cx.theme().colors().border.opacity(0.8)); + + let mut resize_handle = div() + .id("column-resize-handle") + .absolute() + .left_neg_0p5() + .w(px(RESIZE_COLUMN_WIDTH)) + .h_full(); + + if resize_behavior[current_column_ix].is_resizable() { + let is_highlighted = window.use_state(cx, |_window, _cx| false); + + resize_divider = resize_divider.when(*is_highlighted.read(cx), |div| { + div.bg(cx.theme().colors().border_focused) + }); + + resize_handle = resize_handle + .on_hover({ + let is_highlighted = is_highlighted.clone(); + move |&was_hovered, _, cx| is_highlighted.write(cx, was_hovered) + }) + .cursor_col_resize() + .on_click({ + let columns_state = columns_state.clone(); + move |event, window, cx| { + if event.click_count() >= 2 { + columns_state.update(cx, |columns, _| { + columns.reset_column_to_initial_width( + current_column_ix, + window, + ); + }); + } + + cx.stop_propagation(); + } + }) + .on_drag(DraggedColumn(current_column_ix), { + let is_highlighted = is_highlighted.clone(); + move |_, _offset, _window, cx| { + is_highlighted.write(cx, true); + cx.new(|_cx| Empty) + } + }) + .on_drop::(move |_, _, cx| { + is_highlighted.write(cx, false); + columns_state.update(cx, |state, _| { + state.commit_preview(); + }); + }); + } + + resize_divider.child(resize_handle).into_any_element() + }) + }, + ); + + h_flex() + .id("resize-handles") + .absolute() + .inset_0() + .w_full() + .children(dividers) + .into_any_element() +} + +fn resize_spacer(width: Length) -> Div { + div().w(width).h_full() +} + +fn child_bounds_width(bounds: &[Bounds]) -> Option { + let first_bounds = bounds.first()?; + let mut left = first_bounds.left(); + let mut right = first_bounds.right(); + + for bound in bounds.iter().skip(1) { + left = left.min(bound.left()); + right = right.max(bound.right()); + } + + Some(right - left) +} From c02ea54130f50627dd11d9cd917ae2512f56669a Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 2 Apr 2026 11:47:22 -0300 Subject: [PATCH 10/63] docs: Update typefaces and some other styles (#52992) Update the heading typeface to use IBM Plex Serif, the code typeface to use Lilex (IBM Plex Mono), and pull them from the zed.dev CDN. Also added some stray design adjustments here and there. Release Notes: - N/A --- docs/theme/css/general.css | 33 +++++++++++++----- docs/theme/css/variables.css | 13 +++---- docs/theme/fonts/Lora.var.woff2 | Bin 84124 -> 0 bytes docs/theme/fonts/fonts.css | 32 ++++++++++++++--- .../fonts/iAWriterQuattroS-Regular.woff2 | Bin 44416 -> 0 bytes docs/theme/page-toc.css | 2 +- docs/theme/plugins.css | 6 ++-- 7 files changed, 63 insertions(+), 23 deletions(-) delete mode 100644 docs/theme/fonts/Lora.var.woff2 delete mode 100644 docs/theme/fonts/iAWriterQuattroS-Regular.woff2 diff --git a/docs/theme/css/general.css b/docs/theme/css/general.css index f63fd24d1379aa3f325ba53a92784ba256a0dd97..9c8077bad525da1b7c15572d6fc154b66602e987 100644 --- a/docs/theme/css/general.css +++ b/docs/theme/css/general.css @@ -70,10 +70,21 @@ h5, h6 { position: relative; font-family: var(--title-font); - font-weight: 480; + font-weight: 400; +} + +h1 { color: var(--title-color); } +h2, +h3, +h4, +h5, +h6 { + color: var(--full-contrast); +} + /* Don't change font size in headers. */ h1 code, h2 code, @@ -213,7 +224,7 @@ hr { } .content { - padding: 48px 32px 0 32px; + padding: 32px 32px 0 32px; display: flex; justify-content: space-between; gap: 36px; @@ -272,10 +283,14 @@ hr { border-radius: 8px; overflow: clip; } -.content .header:link, -.content .header:visited { +.content h1 .header:link, +.content h1 .header:visited { color: var(--title-color); } +.content :is(h2, h3, h4, h5, h6) .header:link, +.content :is(h2, h3, h4, h5, h6) .header:visited { + color: var(--full-contrast); +} .content .header:link, .content .header:visited:hover { text-decoration: none; @@ -383,15 +398,17 @@ blockquote .warning:before { } kbd { - background-color: rgba(8, 76, 207, 0.1); + background-color: var(--keybinding-bg); + padding: 4px 4px 6px 4px; border-radius: 4px; + font-family: var(--mono-font); + display: inline-block; + margin: 0 2px; border: solid 1px var(--popover-border); box-shadow: inset 0 -1px 0 var(--theme-hover); - display: inline-block; font-size: var(--code-font-size); - font-family: var(--mono-font); + color: var(--full-contrast); line-height: 10px; - padding: 4px 5px; vertical-align: middle; } diff --git a/docs/theme/css/variables.css b/docs/theme/css/variables.css index 46ea739daf8643db5ad57a239091e557df2a3d0c..ca43e6feb4a17d67ce0a6140ba1459569bb6e33f 100644 --- a/docs/theme/css/variables.css +++ b/docs/theme/css/variables.css @@ -11,11 +11,12 @@ --page-padding: 15px; --content-max-width: 690px; --menu-bar-height: 64px; - --font: "IA Writer Quattro S", sans-serif; - --title-font: "Lora", "Helvetica Neue", Helvetica, Arial, sans-serif; + --font: "iA Writer Quattro S", sans-serif; + --title-font: + "IBM Plex Serif", "Helvetica Neue", Helvetica, Arial, sans-serif; --mono-font: - ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, Liberation Mono, - Courier New, monospace; + "Lilex", ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, + Liberation Mono, Courier New, monospace; --code-font-size: 0.875em /* please adjust the ace font size accordingly in editor.js */; @@ -151,7 +152,7 @@ --inline-code-color: hsl(40, 100%, 80%); --code-text: hsl(220, 13%, 95%); --code-bg: hsl(220, 93%, 50%, 0.2); - --keybinding-bg: hsl(0, 0%, 12%); + --keybinding-bg: hsl(220, 20%, 10%); --pre-bg: hsl(220, 13%, 5%); --pre-border: hsla(220, 93%, 70%, 0.3); @@ -162,7 +163,7 @@ --popover-shadow: 0 10px 15px -3px hsl(0, 0%, 0%, 0.1), 0 4px 6px -4px hsl(0, 0%, 0%, 0.1); - --theme-hover: hsl(220, 13%, 25%); + --theme-hover: hsl(220, 13%, 20%); --hover-section-title: hsl(220, 13%, 11%); --quote-bg: hsl(220, 13%, 25%, 0.4); diff --git a/docs/theme/fonts/Lora.var.woff2 b/docs/theme/fonts/Lora.var.woff2 deleted file mode 100644 index e2d8990a7ee9fe1f2b02c5d9c23b1e8e13e14de9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 84124 zcmZs>W0Wvmvn<%QZQHh=wr$(CZQHhur)}G|ZFkRm&WC$v*3_TEs#TRccVuKl?Q)Y7 zWdZ;M0000K&jvvLXAjI({?9S>|6cp&^#8kIh45nsEg|8g^YM$T3Mz)Pf$8%rDGMq) z0wjRY3nM~61$Qz8jSEA^I0G;QbvOdD0g(V_8G{gjMp%M(!oq-SxxXiZ{5nPNfFCRc zw5q&P8Yb3+0G;_35*NZ~T->R*v&EI(RDz87O+Y?5n-KJFjsWACH0xzH{{GK~PMU<# zWLlU*(g^`k?!TeAT;;aAEAX%&2F> za50*adBfzH!%2xTf=Ui$S?RfsGO5L#*~@IXRfc^fH>7T$>=2OM z!7|9TWO47wCeOykr<8&&A={^SmsBQUVgvnGi1PKjds_3G>v^Kn8v>NyJK!~7rqVXr zWAcqAE)a;&+f^(c`o$e=Xakf}?M**zG#FnO9ZD7=WLw53I&ah^tKoa|yhC>ifjBk-5Y>2UbhNwHL3i$iwqs8ibSOtWV7p5BT*KdIw&Raq3N_>vg>VZNMhs|WS+ zWJQXEjCX&@iWDi+YYqhSjy`tWmv-#eeVdZb_mJ2R8=k3G0502Zc>v-g!ZJdxAQB2@ z^sJzEl;_0RywlT{(=D%EuPDQ63|9Roh>93kw#H#m;heSfkYx;wUgRc*N`?ZwonH8# zs#QK__`a|qae*W06WqWSkKb7(_}^J_`~>g2akT;duSHU##0`CYloQjJDbAwx>={hv zRt?V6iHgPZ3D`ZhQGvRy=lPdizKt*i55GfGLzYO-VPE%d_o=@2KnY5SwK1h%Ef+jp zK;y=1b!pZB#OC+g0g<>d2hKme;e&m3mWmBsSjkv%T;o!uJoiK77K$;+%KjZI$dyYN zJc5zI0a!30l8FHl-s!xvG+w`TS(=Rfnrhk6m!(L7#Vc51!ita;Eo{u{mz*o6KOOy= zgv5d*;qHRMBuB6p z)xSJVeeeiXzrVO@YpFXvVIm$P;C+)p64~h{5@_m;eJ6b+$RB0=Z`U4e85n$Q8c6Da zc4TnPQ{JF}pzT}SK03JwW_kvfGQ5oQ zDVm6Z(DwBI0QMAmxSYUWuJNOv^w{=!UjZ+W(y%-9kwDWx+i1M6{w}}Y{yLrJ!WnhC zsCUkTG}p{ru=BEkuC*GA<1kF3G!LpN5<+yo!o%AOxJqQOCDm@U*9F7h6;o=x*hy_H zKLD^wze*1x&`D9B?{&i)A5F#jbvWLRCmX{tfg>tFje!>@DFXzI_9|J8=z1fwf~y;r zb%qT;#X^hS38W@&K)qy=if7@Uzn-hLT2J=3&?@b_=Cp$Z5Q`gCpKWbfjh7QJ*U9!4 zU)yo^#Q-dK$D7D6JdhtG=x^AmTWHvJx^X|o$$BZ}!kPtmBmzp66TqzXG;!~F z)|4(W(eQhnTuHTBE0MWWG)tzj&R)SYh^r!_Bb3hIw%P}rA>t%n+v?koxmPU8>w#H% zWDsGRqolU>UGu3CDtpU-LQMVkX1M?ga+dI(1;mS{!(A1rkfIk40Am|~!!fu`pjfBi zLpijj9AiY58{>&9I`#~-7!hvOt~c8DMsarbjSKzw zlN#zqxMSZ|OXo<;n+#1~Y}>F$*qQIa3hD3XNQMUsfG)P3KNcO9EtD2@eD%Tcob1#|@_7$T$40H7F`iN|3T zu-GCZl|v?CH3ujbh+h!LvDph&u~#l-p=9kV01bpAV0}=G#o{a$j)3Fb_1+wGfEUB} z**e6V(em2Zm~@#9>&AWMJ8g4)vjTWgJb3W8P@gujSnL}<{^@@0NC!Z6N|@jJ@RaIQ z)Y6LR%lhfp*IxvjVT}MohGCeaDQRc;)SYhm-97Bu0}hK+{%h$?R2*4ZH1zS~=`>bm z1}s#p&O+$onP@_4NTGRVsHd2G$HQ8cSowgsjB@_8jtn{$${Y=apj0mTZu#9E!nNV{ zPV*D;Pw!!!( zV1G7nvVv`Ru_+_5Hu=UU4xESA2>OX%bTAkrp<8ebn?;EcTWP+SE~Id)a@(=0aIN*|)}H_}K9N6XgezvmJ+om+-IB#B`+hwXIT*V{D{I@h)539|Y(E6s)?3ig#lY3K zxT?*0$>fX*^J9F;$O#OmSeDpjd`>Z|bd;p#T8l%|hRL4A(Rn=tZZM=M1OI#T%)e=t z|7o&k{hAXSSx~(+gh2}aUdNGINc9f&Iecd!mL3O0&nsv8sJ8_Fe8A8-l7ZiWssQI% z=n_fpl>&PHs4sUPtz-es2*X?UsiG@m`%IE~BsdOI^ewnfxqP?&_g3XI&E*AwBpiej z1o9_3SQ;T=%5{Ht9lK|C*)XLr1!4fhAewPd>OAR-trs?+?7AtltQS2|Pj`btMhQlQ zSn;2}k+kCW8N5-ZRrlAVHUcI$7(pl=A#iGWcfotMFzv-2^MxU-<)uzRMFAD{o<{-y zXEN2G*EYB-_E()O8hWty&3=Y4tXcysFTwdgozS?Vxj{t+6J625sQ(=SSu|aKBx`mj^d;a(MaV01WAWpouNfy8 z(uJ2_pGQK?KG0BIcL_Tdo0qv8Kr6WUJuLGE0m=C>?g1tj281(W6zWVrCq$M5p49_{_a4Rb2KK4;4}%U$x*e9fBb>Uc zoW71@;PV*#&$zC{AXM&PLP-Aznu(BvY4D1CtkrXg$wQ62K5Def&Sv!r9qfzcO9fk) zGl%4;FJ*t;^`jnTY|6Q@+170r`myJ#AD=7vZ=k){knj#NslsC5DQE;c%8&hUK0;U~ zYPT^API^fZ4L0mB2F0MX7S7YqGC^Q8fif>-X`(q7h1%N{CE9@IkMHlF{V;_auLqJ` zO%$MbiUtgyWwle3_&d5s1@!dY)!e55Fkk>$1WJ#D%42h`hr}z z@6)tASNTh|d+KR9uesGpFV(y5IR;vuJluDR_%b$1eP#J5I&-L8G7meaC~)X9?Tz(Tr)_F1J-(N`%t! zAFtQ)pgSO>FTgQ|j*r-BjB%&+@HRw0;3{Hr7TH(0(NGqMZ9Vj}Jd3KbvuHnyGsRHZ zChTq)d^?QXN<(2fO=6)OrYs&-&&zZVtCpKyOBq)hY8wt)ay~~>H@>W4Ub)0v{wcs#f~XgnD5U6o&Qo3AqV@j2nl8!GNuS|Rvb zU8vyu4mO?mKOR^P z0wp*o30A!Fynl!VGcfo0diMrmS$!-3yuFqakeD=v?zY3QooJ}Q6=fNW5-2nn9}|JM z+U#g?@WmQ+bD@t-1-oJdT{R*TC?xLiVWMBNizkg%Z5P2-@(a`uo23hZ%sN89|C0O> zm?5z?`|{B$-0l6wLma_dQV=c3VO4@M68;R-*k^6u)o|S!DqaJq5oN6L{zdvTi16rr zG!(orJ){f3@woO(a3Nfn)suOAORDN`VzgYp16>C>eZaBTsvToLF*W%en$?ig)n$l` zb0twlfR6Y3jiw`QwgCCWf<*Rnzv$O@4hI0xY-gyrgQ|5kF)#b4;U3|=dXqypP2HlG z-!QmdsdvAu?&o~+V+61d5W1OdT-trpd^3IzOf-DSHj}e$H9!AXPa;YCOn@1Od$t z%ze+_#oV>TgE_hniwc^1Ok_Z~j|iNA20jxNff8_o6f0C}D+i0V?fwWXDuo=K6nr6^ zaOq|EC{$;v4+4hD!a4{C31SXlv=Pk$fH1*dk7*yld0q%OMWSI-n;F&+LSvWoHR*~F ztsauMSw8wxoa#W`{b@S0Z1GJmn`Rg^L;q|pr#ZZ8&ICotmGXJ$3^}L)J@nkMp?UI6 zf5c06#j~2w_ZSL8j|{Q>(t40yjta!;?)N*mXHK+9Wm$YTybC92NSb5ncKVK;CqkJ& zqW#tfXMA5DzAD`*A^`=s*Vr4?P$vBtQIABs4~{T22Md8|2jjv^w9B;41%>hL@6m1pPPo!SsgR9{}u_L~GzaO;R9IHl#@xxr&3E@uuwF_dj>Ji#Fh zg7zc}macusCEwhU*wz#D7LiELn2`OdUSH`+Lxh@wKxPO&HksN!h2=Q*0IELZ0_L6u z{g}p#C8cO|*+W*2;?GYqH#6Dle>6(Uxa)2CyW2pZSlnrdAvim+5Di-gE3ClsA`vPyf1)@Z>J}Zh)Q7mg(1Y1>1ckE+X z4>z;lIq=iZ+5uq=-owHK_Q9a$1k?VjL!sX!504IMSy`xB4iw7r(vB@&;4S;f9U6{G z%f1wQ{Q;d^W@D>KRb$TawdPc4od#bt$7s#EgR?xP+83$RC%+<7m%orquuY?kGWxu* z5WEL=e2O>rgCAUF^5~BKd3MwNH~PjU!(UICu8h@u@yEa}QQ^aOTLBwbtxI`@@YOi| zXL#RyNzK05zR*yI-G(K($w64NxB=&V-@Sem0FE$xqO*V3=+4rPQHFL{9sxc#jaoj> z-ghxT0784g{eeAqVI?rSJ<71sH4lf3Uor&Ym>Pd zEz*md=`K(~_P#C~_y3M1nB`)%XLy@vmLNWxsI$pa#^ZH2Fd}LTJs6SS74kiH>2)6A zv-zIT@HXCgmj>Ef)MM=r+cDK%FSH7$Ttls3Jm4l1pK~r~C2A<;-J$WRzo7Qq+meTQ zYkiAVNY%T90`@8tLM!04dw} z6vX_%kf!m(0)FI%a@~HO_(aYu8b63+ltD?zA;lN6(lt}vS)iDT@#X`KaOg@AK1ISG znhu8Vh|wchh>)Ey)h<64^%5YjxH3;P>r_RnS&w|MsV$~>VTI0|bzi(LK{nV;{1!Qp z7H*uo@5uddi*_{1Xr!+zsVc)bvxFo1$z2E2kC{g`fn*$wLdr4S{sk{)>cO}x;SuY$ zzwGO8mG(7(a42%vYFV4}TI7emHQ^-@2tOgU=KJGSTRG@Q+|Q#=w>PhW%%gpE)Vj(k zxT}c84{Ek0wPoNzf~aJj>w;>lkV+C|q}`SQW!m+>K%gO3JkqW4wk*m1bOg#)nl5UE{6TMM?cO zXH??fe@#!$yh(0A@^834{GHJ`qZeAzo8W_yKThSOOZS(ryx6U!n#i~<`>r?H z1B;w~Vud>Bk8uyv=b6cu+aVT%G@dzb<@C*Kc{;6#09(y-S?4A~_Vq2DzDkjvjVgKW&?QATfm-lWalreZ@fi9!kvIc3 zd?Q}U{5G<-6ggxu_(+Jv9ANHWa`mZ?r~aNMIAmmYldWTY!b7?RFNo^{c7YdyzNZE` zNUzspeTfA9g!m4DeH+4Vd)j6sddQQb0jzHFNaoKfxkxA+7{!{*595X~-|=|XG#@jM z9royw?=SrPx6qyn^w$%IeL9$qYo7!FFu8qiMA%g}T+x&pa_1TluuxQ$fAB%@Q#!BPEL$+V)@GGwE>z{tlD0zH66S_5~c%Zvkf@6KILd{;FXNcShn(L4!`ieoUwqXJd;R2Qk92q8AEw zY>to_;z~TdR_S>7R$oKpcDhU7#0eS+3kZBmDD@gn44%&Ls*I0UDnd?KKOGVV712vf zZo!{pG5FfvRC(}0760poXE^dv)rK4l;-|oKYJAMEfT#w1uTscz;IqY72|_0gqrDUS zLqFnt&BF%fMJ0!m5InBocd*$shTRzJfRvJF2c2UwWBfS&yss~U)_JW%%ZX8jqRFDBHQQ1Ro*zbATksdpe_e*=cZvhUU<1o$Q^+wO_{hrTG7(1mzLB)W6YX5{h5s8rQkU;)^Z%tbwrB~ zN>PvcRG1xzN)$YRT48QNCFSAS0-j^)ECYlO9WwCXt!RS5Xrce6d2<)u@1MatW2_G}#I%nx)$yI{oL=Iewr zIDnb30~pVK`q(7Kd*-^zwK^{AILD`D-8zHRBn^Rf2R><9�?IK+@GkKE@5@K>^n z7|?&%`^l=>e7HRhC7;!PU00$j$&n6qf^-B|{>8Del^HddlozMaY6N@F&A@rW^=_pl zyrdS{wi8W2*QO9XhqfRUu*PbB3akYr$Ma~6uw2vZ;(P^aOwhwlkI}xx{@nJHE;tS6 z{yjU6ljp!0o*BOkkRy``|2PK-QYYd^@6$iu zmv%Q6qaTps2oXqs;8>p+2I1JhAc$j452;7ES{@o(5J>FivX zrY)xRcVK>1sP8IDDpZ8UnV_8$*Vv+uO_i z6nv9`;KEiNc+>H69wk)G-b0JcQPkQCsn5EhYtNu+p%zzvG&xmkS|wX8BOF)*fb8;D!tF;AoTF#oq=wQ4(qhYc-58 z4A>meLh$Bwgi(q6LYRnfMwtQTC*Q98ik6>RZhGjyyT^499eh#9^5hgvwvR|6d(Uwb zefq*y0*)}iCdc>hct~eCF`X6rk0{Hk3)%!srmda8YGCC{L6ST4gf!B=5M|&Dj^MZw zM?h7qL~=e)bvb|hrPt=W80m(2(uaAbflWMHrZcF2qJnwkfy9%6Qg7%H20uI$f9vhC zKawaLH|#y{_l7{veF>*NwC%fGhoYVW#&4_IKYWLkb$Wl7y2jzzdc7serr9&+pj@_n zz#MEiQet^BzHDr@e}BzyBDZZW6fTJ#92a_8P^E^}sif02UIi?<2eqQ=a08re-oSaB z?eJhV{o<#>x1Kqm91{3S^Kp*NQp%n?Dt_wOV#n)w4ZXi-YhO{tjB2OnFGi6Ut+e1RETX?}TL??>%-*uxwR?uUM{ygiko%`t z738mSB<)esjkjDS2@)il+L%E}r znGNXRwX@}n%GWDP12+NM;m{NzftL$7oXsg2oZ_f7uI|J-H|U_GchG<4 z*c3RakCA$jwe(kNb>cxy^e2(sul6nZwARe3hyuYLnP|P%=gA?M*a)5Q*wn7|;6t9k zGWXyk_u6e~_b24!G1Mk0U@ZNVIaN6zoSSLI>8hlDxf}n~-7eHc>@8v7pGC-*@v9Zt zbc$8X;DxJ?aI7mSD9-$vd^288kyJD&8JVqGyX_CDLc7D%h0%K|T^Zfn_j$EC?y z<#Z16P%HuTvi_4}#3p}e1?A+K7YE;yUqSUGygwus(+5F3 z@0T)_0109#3x7Hf$Jm980LEBId@eKVIM;zWFk70hB2Ac_RpmsG4&6z=imL&rF2XNXwwn${J%+5}tg{@M`Wn z$@O2c5%FOwiyx__S&w3@aF6G5yqbZWEldBwRpKcMPq~sCP*ER#RCV>WD|elRTjIE- zVa9Z{F1gV1DOWBYsR+@Q%NYQQ%y+R=N%<^8P!oeL4;xAh}n;AI2?dUC#{rhoHInX zSEn7HAruifAg+w)bZ1DwJ;og{rgcr~TmPh=6%E$Br5Z_XYN%Z=(4j^(vYU!`Dro~f z6Q3R!$p{v|6RnUjOZOzBdDks4W@#hUfy`(;Oju`91AR9X&Yeu$!wv8@qHXNyp68iS zMSxG!*%WOFl20DRDKr;g#HN0yyANvF8t7eeLWVh{24DVlIS0PuBx7OP6VwMDF9QdV zkbn>p(tc|$d9Qf_*JAL)v^T1doMltLEt*C*q;EQM2uDi^!qgDTzwSgc#*O>om3_O` z-k~JO`>Z7jT;&t%6d3E(&c9=I-HRemu4^OC zhg?pjF&(MkL|v%c%L~Pd`WcTC;N75amQ)gXEP%j>?J?(y;i8^_4CZK2A}%Kii)n{^>C*Cy=b9CXmC#cV(3k6F(d*?y7Xz;miZ{pek; zIQHH`?{qB`t_`?Q%e9x!E`TV42fVuk{yc@o1`FLd>;t+8#^lA zT3KWk#+5@=o{2|}Wo+zb68&g30aKedUO>`zNnnWT;kSZU?2y$K?VeNLqYBVJJ*0Ug zVc^OESqc?d^x@d@VB!TaLa=fNS~sG z63vY%$fsoJ-|k#k^vgIp^95nyALY`3j?9JMYs zA!3iW*PsoR@*S|IcjOm>kmww*{TkM-D%A})|d z7?>!pusF9c&&=>}GMI*+1P~CI4hoG*rWvhzKw9r4ew<7v)vEmtg+irTDIKyx)v_$> zh3l+bqKVFmmNLEn0OD!L>~->j>wW`@6*NpvKte=DSd?E}SXxkGrhjlicG3d~(&Ul4 z;vF!BJd4BWda#t?gsjAz%c)0zA_agMNhw|;8FK&v%b^=z%>cnOMi^(9TnK|&F06uS zFc1=l&@d&QP$V3M$Lu?hT%lMdkwxbd8lAypHm)h#nye(jJKek>OTPWYG{$gP-89bD znR}Y3wexl!JASWe0;4ycPso3Rd5g-B0JVCTjIHtil;JPWQpV` z_l#2h6N`laCW=u~vRg8O2`@88r((y%ay+Vvb=pKZ1xvv~g4kB*Zh@KLA~7`2EF?S& zf%uFFTd63iU#L`PsxG7w=ex8+2X{xrm#h?H7wd>fBI)N+7D~^X#a#E{(~AEP0$> zJ#|7kse72fe$5EBKpaZx{}8Pcc7;+7YSU~RNj$#P#a%5K(a0NWQc|7Tn(F$uhR)N1 zZTt+M>pmC`W@?Y&&6fhEj}Z_TO^)h2j`+!R+Vs$#0|##OcJFt|mB*)!f`a1So#&^I zKW6_lVFc+1Tqy%))InQnvd^C&JjngSD;I-S9Dg8!3+1QT7Siu!7ml&H&bM4n8T3c2 z9|9X36qNQt;E$$&s54|dB0IQnvp>oq8pE(NjI?qSr8kr)5CUQWxi_5^P}Cc&_deeq zbWf}r#-_h0iZNaV?1rVQPUB_bB-~Xy(@w=EvTQt%B?0tE>bd9F_!9R{&*3l&coIo* zXP3>FjoRr$SBYH`s2b$E?ewpA6vy=e!)>N#(i^tK?w!{Tf>KJZ=LvT4$%3TyDTH%- z8;9*?Zy2rpQIwuy^GEVh z$$jzxe+Gp+>3S`M1E1;5{>S@rls!ECv_`1&GRj9tDTo$JlquAs*+lrG7JbW$$R%5< zR!u``kM(zTXT7D#)27kt92I6Gpu|xSNP$3LHD+*no&N84q%?*xd(p{9muZJ{YueT2 zX@1St?Asgs@W@MzirA|&^EuL1eKpRHh3y$}7%aoct`t~g{9(Ht$Od|RQ|+%qn%Uml z$Xt$V0spLT@R@eMu(Tw<#{~j2XKV0j`(vkaQaC%rpk1$KK(qEPT6O52n;qA>Z=JCG zuRJUg(SBV(MI6kn*J&-H^+vaab zM`~T)8|cB+rbEQtulBA*nA7i6jsM*1f<1WhzQ+N801_%-U=M;6GNSL0sp$@sAY3Z{ zdz#yu zy=!@!d*`>e_wha?p&yhMrzaq;F05R=9o_W=1`dbIIWx{t#z8%e41|D?fMz*>1^W+v zX{_f09v$Haa{l>O4M>RS;4GZb5C!@1Hw+1&9ph({fM(_MK|G9!G*&nZCT&Mv@~<%; zIsNGU*Q+UlILbftxer;#33=xy;~w{2k{c#=yJ`7?UVS)v+`?8=dkjDT0TVE=2SEZA z(|^FyatA^bAzg4Wk136oHHi)(Vi=)ZD_^;jMo0k)8+LRZ7+4R42y&k!|noaq8ozkz_ZRY1l!>xDJuH@y5%!9D$k`u>=-R1oS zBs4%&X!x16KO|+%$$8@Mo)n*)n3}MCa%V)i#`caPudJx5pi;#x4(ED;AQrosbS|gD z;i8(U7m)vct78O0;O4C(3>{&{^yW;Df@b;fpIPr)=Zuf4^6Lt}!{P%ZM7$?T=pj-Z z8>6JCDDP*{vJubc*q%|ny-I>&WME>nqy3wg5}wlU`ne?FH3WF7ShLF=F7@V-jE%Rm zQuZhjvh)BA0R{O-SXgUmFFj)Dg>jw{GHvPijC#4)Byqd<=ncoaZYR3?FXq2W>bzF<7cve+K?oIN0Hy&ThGUfVl9L`OhEL!@ocJsI6m zWBnvJ7Z;b5m>iQ;etan(Ju1mBFRm`Ed>PHL_mrFFcB1$vamYS?s+|8M&S4|zrLNs>;#@djYDq~z;M9Rizw70q2xz0*-?@cbohzU2~2vlcE5fIp?7f5SC6 zhbKHnIOQ-5G&Oc7;h<==@TjVhK`e&kXZBqty)+$CPDw~UL)2}+8}ntn2I zzh;?(pYkcn7#ZQAI1R{UeT9=P)#9k8^0Bdt3x6v-9XEmN3n)lv5FUOu1kC&QOuEY? zb}Oap#qHn7L7D>u;ozFW3O;zR-{r$2!$%8yuCsAZhnIP7X-q$f578(5)e0s{IFC}3g)3g$3u>}e-xH|^MPGPFDa;zUa2 zo3gBBLg$T@msAQ_N%` z|HkEG9oO$|+=5lBsX9tgdU}<}$i&D@e}^*lt1KNky<^AzzKu^TywCD~DmYJJ*M|S( zbTUnZ;bG37gr1#J;$P4_G);V1SITZCjHh4$iyFLzzFQ6a1G)Z#BF}1xrOD+9Hqwf( zkHU_i6p4wZ%I5kfGzR0?d{ijba-g`P|6N1o*7g{XxNs(gy`}@sq3{L%FRm`1FCd`- zqC$h~Yl7>Yt5Jb2*t(x~-c-X&S z_M)96Bq}Q?Ap%?aE>?pAP?pygBpNF6570VVFz}t58XKJKRndp12RA?RzbUn8d;Rm{ z8H9zOf+)J>hK7GE3gC&Guk~MXhZD?`(7L~hk5N%ll2w$KL${rNQ+S^j-gqFb{!77b-KKKO^|7k2W)2&! z!rw|I?UIGFICzBHpo!Ulfqe)RFtGxKa~L$TO3-oyiDM|0csc#X4ISPA@+AKo!bRc_ zJ0R)MA_pofEG;hYKdi+3U#e&}I_WfKFj!FLwqe|6G+N2_fKH{+YWU!j?b~iUaefWl z9JkFVL)>)nG;isj9Hl29p&_DLu{G#q{&(z~*^XFkxZ_3tOQV{W=n4I}T7eV^#o~m- zW&b!SutnwW#=i)*c)ENoUqFFE0|BpjN@8c!=ys$Hf8a{b?^`lV|NC-< z8IE~EsFi$!q)kV|V~YrmNEp1I3&M$yGI7L}PaQh+OC#SEUhJkspxBKZ9T69R?HJI- ze`66D?3A<9@Os>a9%L_|y}9R!4BaEiB5wT*=KcXyKykz&4dq{5`-iSi-`oyh^rmFS z`!_T?nuk$3ugS}FDwEl+KhcC*^>#*C>sVvH;WKl?Wsh6Mg3LAiT2;J&;Qx)ymL*vZ zNXmaxoT+$j)675Mc^q{6P4CVT{2zir3nO>d#;^V4zYlMB#v4oL%x1KxESsB~Tl6dc zt;bY~xZyG~^$C4b!i>wO_L+kD7piy+dhQu5GXL`gZA@mhoCvX2e%3!-$$%0X>WYg0 z;y3pJ($~p80%yh)3w(e)IsBUGwC~7_ znwsQsvSSG14n*-fjTh(i71))!{*n8!iubB{4kEEvQ#JcJc3Yx%{B!D!>t*(Fd#)&# zycVty>q_IS zBAw2%ENf+0@P9pDEb;qRKZH+R;YX-9>&_Bh$dUTFUsy!tJhbJMxxi7a{X` zayZ;BGFjH`rwgZ(wz-F$VsrE6n&`-G8PE2gfG5{wT;gB>{(maRa+8tq-eoZe5BMLM zzU_Hc8WSgw4$&Y?rADE0jk#LH@n2s5xn@Muxyc!t_@4rw zN4iYPJ=7~!0XM^il%TZ0T#uOeo5f2;4=O@xf~wMD@Va%pskA!QkH|Tsz{w{f@@KSf zXaH=V25F@@K0!%dQ6b2pFiZC-&~w)9EAmIAl0L{_d>2acV#OlO@jS6(4P!2)dN1ZR zFZ?sIv2d3AP$rbbOUeDzeX(Zh>9Y-&dt1}h(PfU>gQHHBS5?0A2ZQbuUK)t-Tt~a4hA8Nlp&N;{um=f-i?C$ zKfESeYYKcT8!u1vuZ$#FBA;{9J;B1nM9WB%SK_Wd>AzyD>~Vaku|nD49;7h3XYT)7 zeW7+bMtfDl8~-O2m0wkONoj$FiH(t!8^sp`%3?ai;9^GzV#jYNsIdPluY#Mz)&I-t z5w;(-3uzs0wCYNudC58lV6_U8Put%$*;`xM}BLR zC&a3&#PD0xi(FH)@fB|1CS?$Z27^mkDXEM?QUcr82p$N8!1|>WvYUUawKe{4CRCUg zqwwmKTJ#Nn4E;{!GH=t;CTnQ8_m6SbuZ|gF-5$CQl1aFJD`jNfAZpSHeG<3fTb;Ec zYef%C7eot6JvcQ|mD8I_O>t4+zV6ltYzi=+yW2LAGJRXAQTP)y-Osxeh+10sjJRlID1aI$!T0I?M4rbeX z9DKYKOWph;CQGgAwHc=RWQCeO2-JSMoo6qzXtK zCif11Nj$=av@kZ(0qxc~+I{or56*i7Dd#cLq#J2#AzfcT60quCv+7mE?$Old+3Du4 zwiEQ~nfLnX4{rlrU~fFLKgf4b#D}&>c^(g(xbOjs!D3BP^k#`8u~VWwlH!>%%ldCK za3(qyETMGGaxpfQ&T6(3NII!HVafitNXZAYwVqyj)@|#&q~B=h#VmX9HOdv;uDzj5 z7YjWhz~W~aaYAPkS5io<*{G?}{=@WtMf&w}f!#uFMAgBcCC%G}_U-+RTiyw9pi|Ka zI6J|^t_{Pvg%ig${;k`MF}4XCTcBVE{W|TV&@auDt*d0}Ja5E&4XnSg!?j6d-56G| zb+MWav23yMEzGgY4yOC(S4o(C4+K_hn@RmIr^d{!^jDWAY?COh_02Er2{F;4ZrZ*3 zxn`?w0i!YHURu<6;%4zvV-x0c@{^uH>c&6QG1F1i>$EgxEB$DLG!-OTr&i|vm?{Y$ zjq0=-o`0JoC42=0E3Av?V~pbs=Nb)|Cl~A^RAyI}Pj)IVp_p|H)QuJXjOCQjy^qP5 z%LX)YThw3NjhaSf4Yhb}W}0XrT6A$8$ShT(TffMxjB%~^Wu)Hio8ofrh?Fq(Ho2kE0HRl=veTR@n!09OLi&(2 z6+VRFtK9j=VTb1x(0_4`$QlQIxm4R~vWE||@k9@;UPK)@cLFAf;K3yJ6BWR1Wb2*BI^d9loHbSJAyPT#NH(r5L=8vtv65TY+! zqbM+K*ZHtG4l(9k%(duz7vklFbnhd2718&a>@3hDw4x$VwjJ?f_iBH+_kF4NbvhG% z*OA`n>$`L3?h*jRVGWv%6QKTt;F4|P1XAoj?PadT+#C;O)3Exy=1+tY5<2!Gz#jAt zgD<--ou#}rNj$b^u_*A`UQ2!2iKGstIjVsVOc6ZG!otfb7H<=MaS3#|g zOzmp@<%|}M@s|Oy80?1rCu>yL`RnDVjmYL=VLen#nX1NXk()>#S%poII9f<6;#ia=9DG34#Yp@h8SlcG|PkP9QGhA%MX-t>~n%;!(}6? z{P4AtO>+Q2bAJa!qMV#4QkfZWE(daDl`B+8op@ass8ou(yKIb{@$gbPS@780n2e&8 z#%Qosuh~LePZ0_F{^*E6hS=siIbJ!v@cj0);JN!ur&Q`(12^OP@~>zWHt+$q`orp@ zmsw7-y_Q;QYeM^1amE2G^(u(~*2uICMSNa!tjV<|qsiB-S5rpgtgtX(o7kMT}XI9}Ug#XQr?xy@ zHS|l;`x$s~(D%GNh~va8d_yxsQ)6=j z?kCK7(=oy9{vSf#sm;=^+15?lc4nn*+qP|0+N!i|+qP}nwrz9Q`(f^Ru&%ZK!FYP~ zh!%0TKH`Z=x+~S-fBIGJFCPg<*p)52g{qUf!y;+@dTHkitRZ+au$3z^_g>sP{!Kmb z2XIzlJ((ku+b_xO-4jw#5D!7f1o8LZiP#)y2l#9Uiw0T`&%!|Ve*wW?#>9IZS!9*K z{hyO~!PRB0Oo*;G5&hfjl>BXW{vx`&@i-+je5+!QE2z$V%`kv#?L|rXWAN-Iw7DKzUD^J%_y(ole!~oe@%^01mlqmdM59JK7!n1+b zL{_9vEZ3U!mo4?2cNyWyjB=4I!w}YY`zbKbZV#0tozT$P6@e~G-^H$D}IZm zMN;w}t$Oo!@`ZD%KH>6r>-76|o^`{FQ_vdNiyB#5F(Kcc)$LNY>9Vl%yNCMX3=3`PGdjL#OO3mwPp*f#A;(L)q;GO5f#7+GhK zO3x-iq$B`ej`^|bz>iCRpxmMLxS$bk=6n>zQHsTBLex7rMRc())sUkMNd5(1$j+H9 zZ*+M(fEgfv~GRsB0 zf-#{Z%^+2(;%a^vX}r+kWw|pf*aU3_5xl!4f$AgF8wI}rltL;(xO^j+{|iT4HTXIb z`I}G>5{s`!h2Zb;!b<@ixF?R-Z~Z<^3vF$xnroogc2e-^eHj$ll{tz(I3TknDAfxG zEFCbq_937*N^(p#&0@g%K3%x@+f;ta>D4McHRkGgZIGH!^gikusiPRbRcQT~9l_sRRJ}JqIu;SulX>_$E^fillODpUrOPco6iW{$opKjYfWJfs z_eMCl+YVi}y<%`l#UXM()YiDNlL>3VEp1>B^lrBd)~S&`HbBZzvU6Ev6TVs<5^CPDd$?ZS@RL0pRqm$qSz3 z{V>82A7v5%Q~amgqC?)!%@_a55Y;Zg{6APC6c&03SUwsV5_BRV|1ccxZhmqksX~V! z#ieYOgs!nRXfI$PA|vF5F-*m7RffpVB zVxnjvIjbF7lEBUjpvm9>Ab!8(F>wCHOfrknviXVQC>GcEE{gw4NX=9-PyQjK)N$je zl1qHuW}F};nGv{~HdzDo*nb7W7FhZ>L7*LBXt>G4n%xFcgiR>XKX^~6-mA`u8uot+ z1*YY{Ct{5e(KYV9Qu9-cRP|MYOp*R{6h;RqNlFWP8K;E^UHP1e=%PNWfABF)`Y{vN z@N?Z^55$T>&4tNOhH!>4el;P*?_Z=!`RLvc4)PwX$4#VEuc0C@qKivhV6A7c*PAFo zq-@sAj?5F@l7dDU>JrtEI%8+=(TmiQQ~PBT61qW$)jCkl6lfssyMkW)psG`dw-OW> zAO;YECV`t`qk;U0GMSlW$br8g0&pVhkMWzSlJ_2p8M80 z{8%MX(F_fxbdBIIGj{FT>zxdsBSp{uV-uzQqq3{8n5>401ut)ea&L=oN_asSm5zO6eW8}pasl>nYamUZM>mP}5nDJ1|v#4?f;uR99K zRtoNa%SBiy%l{CRY@V6P$GiM7{YZws$-n-}Km6B{{glZ64(15%pe!M0U@~hJIb{gtzrI@aa$<)Tutmjs2^LY&Or1W-P#NKgeM}ms*88 z9^>sL|1pdF5o<4698W18tK-Vx$`n2OtvXM?bO!221OvrO&Vyue9|!j`3Cp+@)LKyL zr~n!!NE%R*7M$YxK%P!#_SL#0L8NIvS_De;(R7A%?@_P$52u?z%~ z%J1aBd+IPjf*FrdN#Wv5s4k}a8r~j>00WXA8 zVzCJS@!t1tJKN4yB{;6s*rFgmBmY}j!z7If$7)~<)iN*DK&o6rXZRM9@1eiFlwjK~ z&C55os&aGxrkex(^si`~7BWjD%#vxz2NZsOVT&;n@VdCacG!w}?!S~DY}z*d#h#>k z6gbB>Hq@*Be;UVCr*&yWg>e3<}rLS3efgACHIV9mNz(+>$CI| zU4lyz6J1Cuwp(Z)ZD2J04ek!px!@6&JTRs@t{IN&0|^rpXsRGL%hzUP=;fEiCPYhS zET?HDjZ@>d9P^Boc*a`l_OV;Z1qYAMjGVvLba7M|J5~Lx^Ln}cI8IVwH!gRiszE1V zAREBaF+xsnrEqEV0)u|i8CfnzEXZ@R;1VeK(!t1c3}N%WRem;q%0p9wMN+dtwJ?G} zCDAT>aW@;-cR2h>!QZ<8cZ_ib%rJX@!7)EI*asWCH;t(sr~0X^hD3{*zZPKge$1&T zTfD7mpwJpapELo4lhyJc(4KQnnU&jI-=Hug50Xa$Bm_}rUH&0h7Q2ztuS4=j2!B#)F;oW%mxPo(Wb_#o9F@7(}kZi3yB&Hkkk$cRFFd4XM&`@$}9F8B28RrZT# zFr#v-GfwTE z@Oc=pGVy|4ejKZx_l_BFAYmWmR8~XF=-ZYh(M6CxIgFcaw@-IBEp&5*C{@=ms{l%4 zK)gAhU#MoUHWTX-Y%QO+dWcRN;f;NOPB-u_`@xm06E++WKXSCfs6!s$G(ccAM9e&I zg;u&ec^?|MPYKZx8}{eSL_uiwQuyNwp6=2o!zkdPB}{#!*KxN@6oYPgo*F2^cI~#) z#Co*436ZaU#k$Ynz3^#T0By~2rm(FeL#{ciZ4(%*ZTM|$YSgr z@+y!d@jikqG+qM15a^o+bY>q9ItOyw7ighUi^IuWX|OM!m`uO!I(Cg1IQm+ic2!N z8I_`nd`6*gd#4s)S6)Q*d7^(h6;uBYplTgfj)7^~MW*m(o)!FaAr=S8QXW~$r-}Fd ze%Ty4+CUamwz!WNm)&8LWNf1?`uw3|QOCVr?(2~*uNxVH-x$?dYgpjZ1rR%>nYK17XGX9`@cve{nt%k$W%1>C*m)fKL11--WVhm zY7ZG9XTozjCEF>HAfCDO5@2~ip{xHG43;t^tQOmOK8~?1c&D?NjfJaNvVQeHVdwC3 z34FQPmOI}NsP_7gsZmj2N}*$Kz` z4kr(1m!<%9l>BqDIbV02govF!qPcrV1J|IZ>bH;O?s>s&)m8G_Yj_+JrDb%XdFWu{ z*no?>I}5M*<4aBT4(Wn_>JFQ`+m`1t1cpIfpTTM0fW7$CHKgrb?W^|Zwssh1P9FkI zfe*$AV`z1NmX@9#c%$tMudy_iIy7HLYx7|lH5C-iCtGTM!zH#G27uFpA(ni}laVkW zbF`q|J@?_Z@BzkAg$;_4kDJj)jzdW_j*~P8bHKF=5l4k=GDL z0mX%wra4~}q`3cx@)kX0VmZpv*N-#$a^ESl5mJPzU_X~R%5X1?kFPzFQzWdGad~P-S2hICL0CqtDFnA6jGA(}9A-Q-1EY+HsFax0 z$Y@4z%&AXF$kG_ltnfz~RjT7iLDrF5eOxG9OO`wN< zqHaBkv;cDfxk5Ew%h^*DT3%?_(dhyJ4LLrKfDMI`tu5OS-Q~%w_m9sjiUkKtN_g=m zSr6$B=?$5Ww14`-j;FT6aNfs{d;<8}Ci>>b7GL*p7EwnJTT=V1VQxLH(EG&V`QO6ILl0hBivM+K zy`R7ot8@Z%>qT!1_Q}TL;TVmhu(!s(TNj(&b)4@5k@H7uZ*8?}eb^&PYbBqM^u#rO zk56m+X}d&gBRxSeVu^PIz5B67wRZh#k?!^BmF-Zohp+bHZh5jDHsgAgvxll9t>#l- zA7NDu@L!=+5Qsn&2q*Id>K8^xnBJKC`!5;N5?o;Qj{=-&D-s`Z5(3zB+W|_ zI*FYsW%Wf4)g5?C=uc|cwZV!57!2zIRD9RB?e>GVYo0M?S{Ax~6*h=EDWV>0mi!K~ zPR_GK$LH{Jj06vh?`^%_R+X0I)jN=}Ev_9+`5cLwg)?_SbYB%o_7zUpJ^y^Pa`N539&>KEVIdi9eb% z1Lpf2E%^)C6op?>6pNL!+tI1h8w=@7ZgCDW4>hy2+=tlCnKvW=`p!?F^74umrxxR( zfXk{{Y*O?T^6ro}&I@TJx675+6ggIOV$LJpD`3@y$_yegu5y#`UEQ|W_jizdFvmsf#Zk{vZK$7H8j_bNSK0%Piihf$BqIj35pGgb~T zXQ<}-o9_#!^UeBfNt~@lnUl<9=n+-a7E77~9aEg#ToMrpW)g8oV2{QmhO&%Gz)?=p!9mF99=Kc#r!FL}dT>n*ju&%BKba%aUYK zoUDu?Zr$bFaXo?Om-7rS+IvV#EF7^811uCAt4u`QTVnWaCrOZ8_Y>9vWN9 zV55YCqDlSA(Q{Ag#{#rsOZLlOFUDtq&&T0S$g|r_gXx0Rs@>?~vwDF^OX{u9S3p1K z|4)7OhE>OTzkNTsSOh}DdH0UHw*;b$R9Di1frg2Mg|DtedA|h=>vwxFhbjA$Alt>V zdbz)sq9SNk9y#4oA_$bi!>9?UDyj{a^W%PS#5up>kVYa&g=3|v#v9XI{O=>Rq?_At z`$0W#Q+23_7gT75(`qiV*1%VYGBlq+RtSTBkx(Moplu=reJsW>niqiL+`7*ocY zuDF{je<&&KT1j5=r>UtzxZ08Lw_JZm-aB!mlr2Y}IiEck3{&ICv|7U~*fp9f$V*4R znAf;obojg}!0^t}Kdk?6nW?YN&Z{d+i>kt%9vgakboH8KsN+~$w1a&-k~ zWPB+JF<7*2{O5EL=jEaMWAiI4LElZd42MWmSd*Bx-eLYCt;PZWHZdg2#sK*vJ)qGJ z-{%Qy_Z)y*XkcdZ~Vg1f+751R{E)pw@L z8Y<%V<7;3!`;#I?JI;~;3#XQZYm{N9)c$(Ir$281)nx3ry?7sC2~KM~FPIFb{oyRw zw4Q@0fx0x&mo*f8vYo|oe-<<#{&Gas@pDNB5?yz}G)eX>FDkX_iBYPXuhS&DwI@5k zo?lvyFe8KYimE zd|Tfa;dF%oZpOCKk0(u5!y1OBva~hU{UmRrx)?iuO%5E;lR$3mUZStx`?}rd4%1qV zi|RDECwVUGX;IwZy^P?eCDVZKIuE|ybA$e|0Q&P4ojeOQe9(ZZbQIH~)Q(&>0EWxb z<0PmrCZk&;i_dkqEmmMBy0^qPN`p@tDyYCq=MeC4{J%1f$H)w2Y6|1%&g{yOI)ah0 zXJQ5w1gilIttB9wN92X-weHtJdSvc3lRG8#hvi@A@8`V6q_!1>R7@6&Nj?f{g8;1l zT2T|azCB*lXwa{aRz5;_kOby3!E1&YMj3QqE<US;ly9Oj&OPA>5Go$m0`u{zrQ`SG#mpt--WqyENY6-G%ZmPUO^CESBYel zu?4F%g?=}mugN^HjAxJ5Y^l(iP|&##S#HP9XhD%5;;J~!$-s9#-i0~4?DmNf$QK4G zj+2YITO^a|`3Z`ICy}cZYX?cH{300R1sH8UP95iA1`+TEfe+X2FBmWAyc?xz4sb71 z#@-R@yba`Y;R_R|d!y3lKcYMHzs|jiIEAA;f%B+eZR>+QUQDgGq$_9V}=iH|N<>p^!@#%RP_ zb9kAw=c!@&5tR!@I6aQO4`l!B;H}dDOdqM0_n9<98FIdYM0mA5$7P2j4r}LubzS>% zAFbyZ)yfLixv{H4-~stmtM#zC47B^fG7Ac<0Sgaa;xHer3qyRTP;uf)&gTGDP<$Z8 z4f)}HBFy3yQB?bQv>-Z1AR}7-?tYL;degOI=s}XpxEeI}>$fDoyA8$RRqAhV%HKJ! zP7jKbmM!J4d@qP){d^|5!D_WQ50zibYN_EP*@+@LD+0XiI#t4%@iNRw)R5%9Y6ui6 zz6^*6^8A2+Wf~=)?53tza)!5MD&S-+DExo|uzhdCaq-=gfX?Z zYa(KcNrGs@dKX|+y3~banm!lF@UZYWxF8!26Ez%zM8ax69l>~=l!+V337oNTo|v7_ zRlh_s(8mejVF!K_)P`KM=1g6R;N6qC&kulT5)$QRJqd;)XwQ288T#q}C>7=rE6m-iS-BIE@ecfq4RPMVO}P z9y5DCX5kU?;BfyQ)qj00XDQ*@wu{=x&WnA__V5Qov^HEJzAB8}JCA3~CkG)?#!%J; zC4;ZNO=Vj59gBmZN0`1Pj2uPTa~32CV7oKyDLTy^m78FL)7`WpqXB7ub;5a~w4e;zzb>5%nlK%;dE) z?c*@1iCez@8Sy8X^|}Xlr2*IiRz)jO zbuk{yph_KEJimH=;$qG{U~%e?Y8o5Kn%-A8nGDatgKPBD$$OHd#Qq1YaP%gkQK4!E zA|GMZR{Et?bogu8(f~rNvKMK%1SH?!(evF#x~asaZq+6(olyX(W``IWa>_5G(4!cg zB-Bs(NpT4$R2@e{9h=NL)^@c@L^T&Mn`B4;{(!(cfK;Gl{#gX-)AlnkbiOXf8+bJM zxK(4DcR+d(gmCtzvBU6;(!Rfo0dPVV4yJ5Z>}+(s1v*O$8_U~^yUW_T8vaC{alASX z@33oiFxng3ukG69-i9C#x1gsiUc;>rv7s(p_=0mh==AsDAO0|%F8tWy>hmqGnSrQ- z0o>|^_)I-epjhO!fX8B=t7?@r>bZ&6e`(6n5&u*_Jl~3u26cIYnnp;Q5#v$W)b!+gtA@j1JrO7H# zr$KH>pw+EB#%=>WWq}2S14_AL7zWkQ29qqzG(Fsj8)s(l3k05j+DRlU@)U|xb_)_$ zSe9#6)d>V;E+i*EpqDQIC{861G6IMNKW?v|%Pb&7eXDi?Y8zcfeL-a1$?C&eL}K~8 zGG}<173ECg$@=bm|H~1pKO}$yeV9qh_Vu+{H{8Pe1OB+wR zJEyPHc=g=`bP2ANJ5TfdL(eq|IA6!H<4k%*4K&# zCq-|_4xxEh%M)BJmeR%Bvfg*9W{p+W-|dUfHnpZ}EG6D}B`JiFv>ezadkmU4h$>ST0r@oqjWOfWya^yXfl? z(20_oj$O{1RLGZQO_#StF$r{0m|Ch+UM!qnFf~pyOxJliYkS?aB2WLmpLD$OltwWr z6}ApEKO3KpC2{41R`QyD!LxoS$RXNjKDNHxVQuLOlF2#Aps+hQInBoD`Qi>~{#`V3 zC4RN18S_&MYHc_)aQUTKiiqSv$qCl235iCkd8cUD5#Yq6pH~|(eA>fP)i&vW)YDCT z$;zt|<$W7c@a;IrP_pGXrqUA7Z$<;rx;hp(XH*>KGtW?M-i~RUD&=ZfY!C57R5j3< zb$9~fWA?^QqVL`}G5m*M(1f9ywDC_+JK@uM-T*YMEqhn;Q2@-;_y%YZq(C;^I8v}+ z&LJ^S)dsBf2KW_Mu8|?9lYGgbinDl1^!Yj@3of0~i!A1vbCU$d9E(whwILMg%WNTD z>ben=A}bHcf+8~=jpG6{8El4!p%*J!v6+#}vWc}&bMukDT=nxiDgMsU$%dyAQ)z^% zvE-#|#C*ze6$)L`Z@N`(mi7$OVngqB#-r| zf7^l~upRD2SZjagiD`yv%JQ6bqHvk*Ei1j$7(;$E?F?mm7i%NRL!kW8R*5?|WFfJT zT1pa-38aRhi4NRA`mL3S`*egw$*gKoKCcbSCM6ZMtZDtkJGnfU+kn^&nWv!^ajnV& z?ScLzf>0pNVC*B6OA1>EQ#{9bsv*>-G@?E{X@m+>gF%5sgGq&5lUkZyX|N(wwa@Hn z_1<{=&`yWl-R!x6ve$D)!(_d~rEjm4s70u)h6EE1lvnKt z<|0s2_$VQ|WBeo~ds7H|9iOMeK)?3}s+<4SvZJjfd7LW_{#J3vY2M`;+W-L4pmlDPZ`?yl*#!`*;gX(G; z+XAEOAO~a{>!2gVSDyZJVsZb$ruq$idgq0_egZ1rxzFSQQ~NxU*OCQphedpyxvTg> zX*Zmh=)>cGbhs3=moZD}sBSyK+8Sbb2v#=Uj#SmG#L;yt!+dWnCGCQkW9U&siSRZS zjRHCtuhPjIB{;^ab$fJvG$-mT%DbErK{4OO&q;>m?Xd$qLMi%1GnX$;bbnE55ugb@G;n0fhJGD^2J7Q!k_j7@fmYDR zth0xdW`G03`CZ_NJ+ZB2bopB_R!mEwcUf~%|RXQC3;JN@vn*$@4=n|-Zm zl=PtX=vG&vB)W7)>sl|O`{7rcA^HLvZRZpTX4dw(L2yZYqahTd-E7cg*8k_Ad_#^5 zXf;}t_=An+BAEmc(bKCpS&DII;2UQ;iSVe2VUS6GTdzvI9?Az2@su}8_*`FHtdD$i zmQJC5Yd&A#C!DI)&Hb0$2$2L*eR0U{;O<$dd+7HSI*dYl7+22_Kyhn87By`lnSi(B zTO>+sbQ%}zj6Y1u2K=<5cvs^zn{(DCE+u{k$%JEkO6&$Vw6}GlzsxeV-0wa1kMlDc z*R*s0os|?1UqK7|XL3{6V=6E%L)wVTOHYpd-f%E-$7LITqUvFu? zO!hq;o$c*Zq341aU>w{7N)3LAeX+XNhH}wzo26mnh*;-T8UB(gXx*w9%b6{RUanl<>0P zOsTldPe9S&?3TCAC7czgq$IIFR9(S>AXxd{z@ePanm16F_8cwY~KAS|Y8 z(Gy(np@4IE2)4>9i?&QX1OQ25g&|5Z2J>PCGBIlxzPok7?z(i4TN@{%7p@4Rjm>Y? z`d5e-tO1v^(6n)IoSSIRl^T_>WBI#_l!duIQVWawx2|_T!$4Jw_Sdk8dtwhSz!081 zP1f`l6C12h=+9p`nC$I=jqsA#42SfV0N4>bPV@xLKd}=PYCs6g@H4IhU48sMPA{GE=();2Cm&}j!lJTUEYXfCXOh9rIe4VD zGVjIUaBCd9F1M7h*a&_rYOnNo^b^P1R7ZX>u`YRLMvW~ z;J+9n7UpPCmO*9r12ZPEE=MOd7i{w*thi6x@SW3SfWCYk*XQ$$Z5&mGD2{)cB-wCv z5DamCoNZnOuua5vyfmm!)*iwf>6H^~o9duZL|j7)b#gW+iD8S!-5)#a04N@Kl3kN- z$CRG?3MW*49%|`w`(3|P-BNOay^(vLzZ%kUrO5bo3)@b z|0JXk#k3RyU2Fw9+pjF4*#Eo#4upjVQ=0`6piwF)Hc4%t(XAklt>#c>Y`)sp_rVs* zx+X}#sq^onh-;B-Kb;l3+2?B6u^~w4o;yBw_8>|ibPU0i0TH_i%YDrm<1cT8^44gW z+t%0Dm4$$EIOPc@&y`3!wB!LV+-?YtkxsnndZZ-IVEz1H(UO^!#uqbzG1m1Z+=iQA zG?K-mf!{%XYfB8^pGv0;Ykp}ft_GE6oHjA%WtF-tK1z|~3b;O1Ek(ORqy<&>-Q%G> znOCg|lafH9q}qT+e`y>t{v}qKCYSI)I;8%TU~CZ~MyS@GtzW(C=2?0PV|vIAMi|#@ zYQz~A2M+wzjXQZAM895du2+YMB`P+gg=*xvuIszfwoPnYzc18D0`@UF{71?|NW#Lw z?OnO+BW!`xb$($jflLBvGHw38JhVKO^9k8eH@ru{-VdMH&;~wL>5&igQf2!sY6{FC zo}$|v_Y7x;epnwvycegZ-;%C9QY3dG+DcpJxWsd7#z~@Kz772%*gzpzr`E|A8&311t$Ag^q;F8*m<8P=rJTp}zbiJbBBbZk0kwOncvWg1T zw0K)z_|&Iqb@q<>&XSB)@Kzj8h)wQQ1An+`8v@TykRShA}%VXYw6ETZ}SH z!@Bi&m888<89zgRuY@hbIpvRa&CWdr80v_-hpf&s{yjZA*4IP}*AcmBH_E&DuPv!! z;lHaI!g&H$QXL*p!7`Q(R_a`0L7Ykq3rZX)aBNL{A`EGO<2uIzZr_@o`&;8RbmN@u zisRu2Z16yJLTU3)qQmuQ5LD$xK5ztpi&L1_ixATzNKbbI%A23AHMZTpQ{NFs3VntK zY2Br5G}?|auMlOCS5E|X;86WcxUuq%CwS8&Y@^34e6ukQ&+e9n=)L>~GmL(Oc%WwLl2uWjr@B7U@RQWcm z%)*NsKz^B8;!kPK_a660XI5%~n7y!vgv=oCUOzr<=U5Jix$T`xFa0gDr4CSQxz>?@ zHZK5(vDP#aTC%>?od=v{;>_6^A5dOqrx_Edn5`7YGp{i$$7Ur)BH$&{Fo4E{>hf}I zEkyUE2qCbQS*Y#i%4{Ru5G%8oNMfjt%YO9$hM4X~7jw`#0v}AvuUoF$Td!QqSyTrM z6I?A(*I!}Od#MNK@fB$4>ISM7=rY=k4)et(a;F5U2n*#j7?<({R#2KU>HgG9r6NL=jYfqQ z*G6c=SayZZeLB|%MXS}aH*XPFN!?g+0nHE`8&G4SZF8^jbkaK68{b#y>6lD7Hf+%I zNtx9?c1#uqTvy=qiF+fzYseh#mNaH@@lhrSlxpj;8bR?+LRUeRAx36|b7y4jH@SkTGT+j>#`v5C3`X0e*)>rq@&OeVqj3fn;YEV9w%So|7%q}0{h zS;57j)^HnCNnlAaFRWgx(Hdsmxh2v{N1}Edo=lkwU@E?@3L5cPbQ6|2|Jl4oLSa~1 z`K_y(lb##-dRM~ZUy*+3=jwMJ+|I9E9CnvuzansOxJ23K^OjB){OAk2st`p{KLjiL z;-uPsRhFy-!QB(uF_TWbB&EapO#Ov^4>46EKlIyp<7WkOhPevb7#xe z`qc~eLCprLvfWluze?4jM(IK)R_|U9mPv0imNwh^TsKNdU&vMC zQ(`)~rbBC+7$jU4&X~&_CAcAcsnIwjX+DbDI~u1o*`d3XudfUfN!E20QP(WzKGC<$_)5UBK zKTslFjcS&qQZ8)%4x_7(`ScoY)SKe}=>6c;wbed2x8x)7y?4bq$@df19fR)GW0o33 zUSVqI4CC$S7^<=T;;VXV+CsV2sl7z19;PRK0s?ia_;y7zCUNCZa?3NezCTVYc@=37 zl%MM^7psFop7_ap5}H-qScgBH*qE`eS9E{_N}F{{7TM!f&?q83T9UGu$dGDsgvjwe z_&EBshwzK#_Qq{qpJ%Cjn#B2$ceLFYr9iLQNF5M5)K{KFfEeDad&(`&Red>!XaR9) zqh?;18&C9zAr5xxPch&zT~#niGsI}fr_E%FEpXX~J|r^ez?^1}h2~fbowT(qGhel<9KRxB` z#`@fUsKMWMLGMiBX;PTkdV*4amlo3M%8~VaW01mhImCo-oqAGv<*x-B2Uu{=*XB@LW>vD&AF`L%1bH4Gt;=s&g}Smu$im95p?M>tv}m7(tGlE8Dfti;V{R(o zcof0dn;V*w>FK7#f&Ba+u;ED+91#b_P#PqG0&uYbSP|VMlr)nkl(TM4*%unSTypwr zqO4}!oE-160@0_H0Eq#bbEUYH$P{)IX*SY3m8{iG$gdD^UVWN|W>xXL76e%6=M5d0 zyK_{x9VPMkekcThIp%^nv2y(3I8Vp9mvb{Lir;FVKm7j1e913eyGHrl58jVet&gsi z?efOc$WGv<7vu3!27W|ITn7Ot)x=ZH4+-5>2V;~Jd%XfV_;P+j4A&24R+O&4Sz?lj zz>%v^JlRQKf+M`W>;;DstipY%bo65}dZWD8xa*xa8`~5&m;h*I-J0}D4z+PhL<>N+ z4%A)v2bC3;mayHK7L)m@z8SZ6LBlqKr^fUnxJ{@Xmv!O9&by zc~UM$F+pm|tlg>#${R$iKyY!kt!66_pI+xp$X+5_y72nDKXZA=DDY+k&Or9y_>P)nxGg#Ri_3Bl1np?TQPi9{{7 z&JNh*VbtRgP;7T#zQJjy4ecc}h){6wAv-cl7#Ikp(;A}cocbz)+r(H4E~npT8Gc6N z<=2vC-e(sW4FE%pu#^milm+8i@kwshH%Dyd6pcw`6RL5dGi`5B7i25 z>flq+v?sBYs@JQU)#>o6GX-fN7bNT+d*&^xTjltBkTd;7{u{=0|2|}U1DSP7te=S&I3S<)*tP@BJJ z+0~VYZRYa>C8IJ4+LN>FK{1yVt#G5g_4qsM?KPLY7T~sUIp0J=TR{(M?$wvRlA)>S&&>< z#*olkxU=n|NF7T4!4wD4$(OwlUk9spp+VSmo5GgWy0sP=B1S5lG`3S(}sC;Ij zLMtk0Iy9_G{|N!8Bh|aaNdHVVU&rjDtErQHZuv-IBF;Nf=`pu}Z#vLPZ)gg~yqpE$ z0{ar9`SxWSqs`9e8xRY3X$1 zE#Q{7AJ3?gh1sBf;1k!nYdRYy+nBWx0!kO^2Fnu=)2sgDPv(i72=+KW!kbwb2w<8< zIF>Y7(G5#^*i)#A`QmHx`lTKld#o_#Bjy?tGL2T^`A`^ zx%Xq&Kmk4m!DB`j8ZzXc{p^%|V4|f3z`a+v<>;VO{Qxk)r9B}tGf}R6# z!u34|`Z?g+Ye2*jzEL)j`Y4a1R;O-MM?c`sCMBP=&w(65(v~{tka)$eW&hJR)nKp=PJevQIZ;bWSvn}*gA`X!xK0lvk zR*c`Cn5NkbpB8Wt5%OyF)DYg$aZl~I`@H|&<~&sLE_3BJJ&6~DN9uX?oHWU4*GayF zyUC@NR?sc0YOsDUPmk#P*A_nL12<4F;9t)RoDLDYT~9*v9LA1$6xZbLH02EaB!4b| z<xt^fj8cEi#8@#j&*u zrLRc_LA%u7Z6Mg==dgnjcSd@!Lz{4XKiMvdj=a`fKaK^F= zVD9qPa9Qe?8_E^dvPca|F=gU(86Pt@pqXQ$MktuW-6|+Ok>%bYoh3xBdO|0>(K!PK3au*?b zwrpnXR;&>)q-K*{DkIP53w%mA8nqCx*dINhbiiCa1?Kmk++luasUqFjHp@}$mz zVd%=>E--t)*AH*lI11U4gG&QOcszrn2ntE#M^0`CRTbdvc$|Rm6~JeOXqfvMytlq9 zMx;OZ`1?aTOrzIb%(pE_0j>H)e9QxbGe5Tt=u-)n{9(Dw`1dB9u^&tZ6!vuuPM*pB zqGz0ZDkHz-6*oAP16S@5<#7P_BqsIo&)mwAVaeZ1J*A!h9BFJ8SDt^2OocbOyJ6}NQ(_Dg?#qk z)vA4bQ;%30{n-PiB->(UCZVNo4^E5)&;w=cOT$?*zunHitKJpJCC2Z{rjp=rTGApj zprR9q>h?Mmu5p$WCC*2jY)naZpi+c+++%`Nif8mOWj$iFw3`myf;bVjP03lOlWgfC zp6-khdbU@=!1{8bL3-6B`)XkR;fql;=#lmJrDZ(Cp{nysyB%rY8%B$4Ox+7caHZP4 zoaXz?RVDOGNN&cD|6qtdSg#M<9OjKGegSlMyk0cWn+)lsA|9@HG~GA&lD-o;6*^4< zNO0Absy7C^-6UKh4l*%QfCJc8&v6QNAH^9y!c!8zuJ5)l|DQ_^Le5H5?fEM?x8t5+ z04Dw|Ge)DVbwYPA!<1I7H?#d-Ewv2W(ywg#p8Z5il@t(?-yBv*@fm%Lg^swwj?#3f z9N$=gj6r0%oVhPS{z=;r3(b|g{c(b`SwV;$^itMOB)^oiUxiwy8*LxF$w{#uv1w|Z zb1>xze?a2sR+F()bmr(a44RA`P#@lg*1^ zdp3-R=2m`HTm5Fun*i_qw)a4oMgL!uUs7h?&+f+B2_OSVe_%x>m({QblWlJ@A=`=F1#;nXuw$t9NC$cBH;xck02$SZrDp!6kkQi4?KhTg~o zdRAnwkhh6KNJFd8(&~0#?w%5b6r1vV$`)w*fcmh-Qq&@M%@({oEl3Pkhk_P`Gfa`w z!RW^nIPm)*{dOtUnE=0+iDj`~E$4x%yD1fQO-0&rsk=bX6R7kq7@HTcPc-%|*-@8M zOArMoXiKY={XwsIdVIu`oTBVH+I_d~d6P00c~+LMj-KRm_Tq5Jk$jEn7f`xX)K+Mm zR_azvtg4=$U7579hXSXrJIzS2_fiC6!cJ$EYxcZQJMR*Y3oyCjUU5{`xN#8_=ll;h zeGRllMjzIvorJXkn#imiz2Pjeae=IDRmT8)JjGNZO!?uiMyJeAJHV+!&p z+qVpj%T#3o6_7q=QSTlmq%NnR$6J1hri1Gf5~`r>A}qUUoc<%0?He9tF-y#`n1naG{c6!S2N7Ll9ToLo_*jlr1oka0hb#YkfRcOa3#yC zuY6(IOOz7&SwqrP7Axfws%u`bcf60mO~DCgzNpH1$apBa!CGX;A0*VQJZ&b7S`tN$ z(6Yv*BC2eC>xJcHaBmRK2wpRHQQ*|0(1&T^VmAEp-Ll2+`u-kl&5w&q&@Sl&S6WA2 zhF%t&k%atgQ#{Fz|D!K?2=v0XCq+_Ma#q~%fNaaOpGLr?l(=apb`rQ$&9pP6r&tfa z8k>YU2TLLM53v|Kx>4XEi;aGn0n)zSx2FeAL%{g~a)*zf zQ+O^!QELMXCmC}eYrv6Vj|i9%%6@qol*6-P#DiLJQVb3%4pe$72p@X(fGk8+P0xw7 z3I^;sqW;1WQ|!g%)9rkHy8EPANO-g7j)0m6tZF(zfti7(5l*}FNfUO?4XJVmgsVgWoEo$DN(6Hnyt>78 zCH~TAw(FSXkZHDp7Jk1CYwua~%no~fozICRZM%Iz@ek=|6l>yZ4BGYT59D=pLKaqo zUa>wmJCPoVfehD)}-@wHb?83M8|Uud@fF;?(GUHrZWl&Z{@&z>T zZ$lG`29)Lr?9?Wq?4O`yW5mxp&2=itl(V^P3%?RK&zN-=PT8NR{{7^78A=wRe^`>U zpDMER_`&RGd87u`FCN$>>KZ-`4S>6$1hk}L5|R3ObjBnd7l@RE5{;)==zu~0Yv?(LH=N_`4D0WEGU~;Ma2D*We?Iq+a0J z0ygOnD_9p|$bVl{G=~@DtDL@4r??M9LkIYJ!nAk*$a_ zz2X8H>fzJ_{IIdNMuk2T*i#;R=F@pss8^)VAsC%%oMhylK564V=!Q}M&{_+-Xwg-( zu9q>RO6djGzMpuw$af^4biC`Bayp=-?8hdU-Sw0|>L=F7f6(cI7U^AT@CDSi<XwpPxKdXT^idnb%F1_e7Y%b2ep5$lN;p`84BhQAE9VmaljBU6Rmdz+kln>B9 z+BXX_a|~q6y#25@NG%|9@NAjJ`l}5+aKu#|ZmX2CI!{xtNMWpf%IB~RG)HAa)$GM` z`+v5DHtCnFKgRzX?pT}SD%Ja*Cdw19_+62{W!3afIn^GneK1w&n@(<-Yag&l!HuPV zB8=XJnK51clKxyuSaMT;PTs_MmfXG2&Bcpi>-;}cmoqz*(u*wUZ!rvO&p8ajA_lCp z+G~548ylY;kjVA9yZ;`4=KEiHA7JZ^;X*e2@^eltv+CRbUPJ9roEu}qFIMpSLj4V2 z&Lq(?a{1Db-=&skJ4(f^IJA{>qHpi%b9eh(DQV7rd^wKID`6O7dBvcL7X$%5nt?^L z(BhBYpbL<{)j)A)7^2H}Qprd-2cM!`#dft^?B%XNIDE z$a`I&xKn^}g`4K+BM`jHOb5XZblMPJx}`~T8foHVA-dewXfch>jr%9`I9=>W<{uL{ zMXR)#ak|p2`FCO@Fg_V9kP8tf zqueVj2d@l4K?BCA3qh>Y~ztrPC{hzjv%3#)P@JVG4Vv0mM^{yI%nZp>NEgUu62 zNkN%FXN;JY6rT^^DkU@l@l#)!_!NyeDT2=JNl9&;ztBq)I^D7G=+yy+(%N7(b%tZE z@!c-3*o7w{_+zL*;kVS(sdx(3r=~=qi!4#PwY+M(joak0fHvBcE_808F22s@VU-GS z^s#Q>;oe#=7pTf_TTd+twSi7>WJb<)Ak``<5hQb)(-RHXdy1wqU2w_uRg#h6Q39q7 zr}>|ljCN}+3vw0S8v?0l)f?9X4xQ1i^S>T_jp7g7qiRmT*log6u(m@g@cHbZ)6DM-igX)*59=eckxy^_nl+dW(`2gI|A zxW`dfTB;H=xs5XLq~=56HN{_kJ>xrDy;x4br5;&Bnx6Nd(HBM}KAk3w@$B>Ji&1SKSETH6P zyWt_<(JpJYA|^{(JSsmuYHPFhK`yn|6GeKmEhR6Mj?VwzlcU~As?u92?G`sq^#dH^ z*v^kyLxBBZtb0NpKLo)>dI}lbZeWUybtX%DIPRI;<#BP=t=%#^50RaqbQ!sg4jsML zVbQZ6dJ>SqelWgj%0_@hB-|i0%#Ghgz5WgWHE`=^ zG3p#SOJ69gow5Pc!#D)XRtK5FKwH#sAz`q-=;jd%o}fyRA3_mB0=Cl_bl7v3jlrd^ zW+g9+U?M}j{@@ifc>BXM)lu*sr4Ez()nfQgMyAMS=kUpB427Cd_UO(L&UHTiRk@v- zMX_f*$BXwcF%(;=L~fxon(xV~yu6z-wc4Ib2ag#92WqXL{-l2&8Kw(*SeW2CzpgCi7wMmZ*drMOGiG+Uhaa4 zHqo~h`__fZX6@Krw&8QpFg?@bZyqS;mN0CXN~G(8Tciw^tdy-%S%I=WNv{V zn#22n2-RdrAb1|G85}96$bSudt${W zx8G;sw!AC7ah|Mj-h-S+=UD63e}AZE-H9V`cJ}D$I~9lbM$dg;vtBVCp9->3 z+FC};rd%?k&uUZW^VGp?<>Y+k!!QuX`)y*flOOO}GzBH)gi7vxo%_tVU-|YbktUds zTHZN|gt=LLSC38-@$1UTF8$%2W!|{|rYMh|H~k7`?aK(kf`*KVtbJz>$QF&dfJKnK zFoypmfbcQSx*MO$$9J4VKt)gASO*Tcwv!X70?Qc-NC9w=Tz>Q4mWSrnCBY1z%wd=L?ioP(v=&3X-Vmr z(AhPrV>oRj-`%%)plf=23fMCI^IyrV#A!#?G*!EOm{hCjoA~Uas2TFttJ(U!AplbfxC=K&efj96^g;&~ zfnMMidJqlQ(-R_=IJx#saFvmP zc2sIayf1}n9xHyldSJDLPHAh_DN#7B2-G}fK5aZL@BKn15nr6TGjr9X@6gw%Qo>!{ zx!jYEtyaGL=Oh2@@<+Y_Bkk6&`wJ>;18@UgdJQO1qAEgF&Wa^Xy+(V{xLo^!DnGZ!8LN;Q3d<@| zE4bog22)AW#3vY|b;5D>Jk^}?mPw{lsPCfi27Ch$zJ`k3ol>yhPW-n%@bFLhyA+a& z4>xGKQtlQZ#cefB)h<7jybdq;+`@b_JJ}gj4d%8!cND(rcA#P-&3TQ235+4=4Ni(- z+OuzwN!piN+i462eQrV<(dvpRj|^&Gc2hhD5Li@KYHm@NnAYn>SJlT&%2=H=h+ru% zYk3qF@xnU$(l@IZl*85qI=}nx@sEG_X!~q`Q?zL{;Tdv2TkO%RC1Jk=WTOc0W~>nr z#QH*L8^%k6L8dc(#V?dZ>SF0NyLlkQw!SkdK1VOvTdJ0n=fuQK9tCI^-j3O9uIV@K zY}-YB7X^hq$~%x8Wq=1?hn^xJRmOgF)zk9{1wlyd-z0ib>jbvNS}d#f;?erm!hv0j z>s9Eh$K0+-S0y3H;L|dx`1uG77bxfSW&T>TQjfD6`dz= zj$*MIk`p`mm9IZLm~Xqsz)%58Ks|IBYuwz!kq>8njjVrJ`ZgsNkTI-U8=kju=={i1 z_~L(#^-@>o72laZSBMgZHcHC0qakwjZ8E z&~J}o5w^pI%*O>+?#D0O6)o3I?H3-*EnfHIp8U=K0Pe^nDnYU;B4YX{ZnyNvKb*j@ z)=+R#sz}L?B2Bgf2~MqmKiO6bnuLvzO24b4Zy9Cv1EYV-D*GRpdna|<9#>{f$?WSz zA3^GonSf}^o+K5#h_nbM=eR{v3tOjHi$#T4Y*yLbX&He&k7sCzgh3V;%$pTqMh%7LRa@g7cT-T!E z+C{;)z?pE~5m(;Y(lq^Li1GW&#GdC#)+Al8F>9~sC{Qzds$lhM9Rl3sz;8Oh#0qhS zY@lr@{okiY8>9p(sZV{m`~K#CP#!SE!^C6%E#}r%;WV=GfSCw<3S`JDEv9TEm^p^C zegf;^SJNaF?9_@y9<@W};YHvDy`inG$)&N2 zYU6HRgy{)NCsz2l?e1`mz#=aqgp>ykA8oSM6Z|7RWwal0pofW>@EMYnh+_X_6{Zpc zl6>$UCIw%y={1?Oqm8$^->%GOevsOX6E%Kyz2tN+-PE`{NW4I!@ia$tPSk$y9nD{2@- ze-8>@$j%Kj_FZ#8&h!^-qtusYHH)4Wi394r;?i$6HtK9?()Qni^u0rgY@;Sa<%}cM zqzM~kR2G0`gK|m!@S_HH96r5-Wy`UU&N7%_HZpg#JYFoFkc&u^!~uvibd)DjEioA? zXT4yAG~XXFLW-txSm-gJm|fH|#*`<}#$=WPcDv#>EZH=|94k}<+sCm{y6g&*q(4oy zFp%NqoZVzHOdM^Bw8HwuOLyrThW~=CGv)@++iybuy~lE{1yjOOv=G!7S@m-l{WV$w z!58X_AdN*6_dJzulwtwf3GmIze8pP`gB94Squx_Fwx3Q~a1fcfMM3<2%o?b67T5gC zuMq14aS-!JPYaU+krdBvPL#4bBP!1F;(RKzZ*0kN!<^bk3DQPk)53#&eD$Hp_x_mH@!*aaH!n{F^rI&- zH(<-U%#y+cIoJJqM!T{=L<_sO%FNFIpY>-UYkv z9gmE*%X$T!V~Rg!MeNsFX3h(b%_q`_<-+a9)?M}~xiBd(%yesAoro~OQCK1AbB4uJ zqAJo5<#A-sBOb#u=4mhFK#D)*=H! zdzpD!Y~}Rz9ce&&i8h~%|7RC6^{;ZqjQZ^NcVtlrtAlZ35737m`0L>84x{Rs5{6VO2;pJb!K*xwXkY~ARPsEA@fLtZ(0jlGfc;j25&U(trOzP9Pt$7u zp5JjbiT|EB5EMW$kxCuSt>cVoqdSWYlDZrdvsoYU$S6h+?6| zuLveN8lJg=uVHj%DeP`YKC?}`Z*hRGb(JF0dY3#gIU$dfS+5-0k2;3_Bk&TJYm?|vp%PgMiYi?>8b7+_gpxZ)TuU@ z{A#s~C*ohQF=W>zqH%N^hPg+Kf9}a(`l?+fiOyj;5O8J7)Q(af`0G2yBpnRA@4oZQV1paU3TR1VnH5tpS3B0E|Kftd{+B#5SR+cF7dkxXwIOhn2lPLjtx zoVcC9L4O}T$o2QM=;vv0!;eLn3b!gnbJNFpFUO7`L=WPwVHlI zFP(a-NT!gAY!K1c^oL)y@xjMk!d}*(Y>*%36HljAJ06MNf{#n`?-RJ>rP1f}uliD^ zaZhvW1EatI15!|i93q3fS!Qv+eGtZ26@myDs(ckBOs_DU!-p+9ny`$Ilqsy$_jKBa zg2JL?#0x~EgsC&HHvaG;RpD8sFlW*TkCXgtkz$FFqqt3Pa0oNPxtoN1n?O&!6X%{~ zFPf@oo+gy8pfx%~8KK-wBI@oc*7q@)vCd>}565#Rw|anfB4ZiOeov^3RafB1w9#D< zdlE|Bo{i)6fEnu|11l>7&OUW|#L3!>103US^L7088ar2NG-dui=wjI2IYMv>`*(Z_ z0@PGt9&x75Mi4pL5x;g|)>w1PWpm$4UJHPo9#@H-%&-8y&53d+%62;k*rTx#+Q3a67+ z8C@XnDdF6q`PY}GDc_-*zojtMgYvBJ~u(|wXks`2dTYO?B(Q((`RR$gZLq&ClCQO~q z)T5>7W82`Z4i5rzaQ@EON?TYGUsnZY3g>n&Q%$T%Zl0F?`pC}->*}5@mVm_Xgp(ts z{gi9fgSl|Q+>|l*>DZ}TLy*PP6AzVCOK4K8_KN-&>C2rT{b0)Sq-SiIzw*Sb-_Bfw zkmxbO4_{SkV+%88k;sfiqT_T*yN+A-Y9K3b>Y+c00m)U<_5`=zNT|*{wD{&*;C+E) zP5S;ImbBsa+rcNx2-TK%?|6rFjQs`wbK6S^awqmHexVzVFGvu&e0W{)#+;3BUcH-L z_GB-E8z#!m^KAuB5AiW78l*ru6n@>@|FZ8HF9TU@Aia(LDS-=p09T*eye@S!XQ?Li zdJ^~H5qX6oEZ$HtWks5n&`(DUjaR{FrH%OF>t^)6yXFZIZ6eIOtS-x%84&@#Xhu2 z()JQr-Tz{WMI~KcC6v&YIh|Qxtb{|`2laSN8p#c|{#jk`cOZP#6G>~NT zN9>=(-1Azy!sSYgCkx9=?(C5bHZX|g^lNq1d%dIHORU!n>7|S~B@XeL8Uxx2rwx7L zAbKkbCcn6sk6DW*Qa$v^YlO<%zBxOS0+h~cy8!$@ibn8O{Sv!;zyHdxRM126>^#36 zeCf7mB#L~FvY^7C64@U#U$F41R+-|5Y!Oe!noj2_Y$}r|#7R1|)FpLcfR1MSv6k_f znx$n;`5>Bsq(w_UmNn;gLbhwj@jaDmZfu&aFJW#W%-`e*Y1aC~M z6Qnc#8g^<#vg{v#X#!yc3cVdX0B}?%clcvIzyij#Os~*J#qy9=DUG^RQMv;=j}y#x zW!E=qJ$%_{a(!c^q(?2qneMFk4$B%fXFo1LbI&W6Jfih;z4bghv}NFG)+^vSar!OyFIt1GXr&(C|+2lKtOBx_F}bXb-5>Cp=` z@Y8Pa~tPosMI>)esJX%8gkIx3;T2UCJ8^Zc4_;Giw4uWMtO))+RKmF8cC5`eH zm>{86P*5Jte6=jTUIdz8E#lJEKd%BhRH(a6-%6OAlS3V_wADB~{%U)6D{*p;l`>#z zt9JY;8OAJ0Z?js`#|AX4mP$5rN>C$h;-m^qDaT(10xVYW5gU4AM9BR45F1otVKm!9 zn-s{k%&6Ywu{muPLyL-!^HeBB?{_WlMWjrv!bBz$@QXrWszw;V9AUe%NPfA)A1HI` z3u|llCV&9s69FAQ2Yf*JI*Y&I_w62<$cx`k0#ZtDQ00RptJcX7xK!;alAS8T*)($5 zhcUD25Xn<48Tbl3RpKaBx^-nNeJF?V!lS)bnbOuH*X1ehlz?SzIG9`+VT%4?2^hv_ zJhoERa*Op%Juo$tPx!_38#D*}hq84L|KIZODDw1cMpi@iQiEb@vh(o`ZAKDG@*^Y% zX4uW9z^8^2?~V#(NUeo@osE<$3b^V=sR18V(yKtaqLrIg8_;Ynt6UBYxFfsQXknM*dhI04NJ(M;Y_d~wlqfJJeFXd7>VLM92g z%uPX=rBtHey6yyU{H$WG&Z`k{?Vmf&2OYAoB*z{Z!8>FF9=lQFt1aUBvn_b@y)gT`9lA^I;1U2CU!)dK`9*Yh2>t%X&Y0AZ6%un32l_(v#f$^1&{=e7bE&@~*>4=EZxJlg7 z??vpeYSu00HzR?my=ns7lnezc$SJutd=dnuR0f&x>dm%KganEZIO1v2X*Mvc#3;}G zMP+zq5}lNk<1{5D{|o7l<+x*$g^Wfz5Bm?noY@vHfY2lzL=vn5b}APhW+318pua=F z9U68mK~T@gIAq}@AnTbhI1Lp~NhR%$cAS$tO^M0>Li!UqW=B|%^;Ak)vO;4PsY5QX zPDy)+qo!`BpT&|vT8B9s!^5n@4Y6zH?;N`g5N&ThlD-)VUY~V!2t;kNt3%gk1>X!w z!B`ZLOQC3qBshhl0X_RS?&^ABsokH5-HE5B6F3?^3FBqDz~~?Nt4Nm_%-tmZD$HB( zt(udrpuMkEk0xm{9%1R`x~iNADWR;jf(pM98 zuK&N|P#Tw*te(36{b9rf4V{{a!4V&4k{*FS^e3S#>CUs|kzwU!w6_l#q?AS0myxhV zVJkIr#W!c@!;5Ex!?xM8ro+9`M}t|-JCKZ@Z*47} z`!73l=T5fs-wDNA!E%&zc*{SVbkDZ`vsH4`AwK#YQ*MR)yfy3D)KwohN;t<2fA%4}-O>m%fW&v#pDRl_)r2NQn=KpB zs;lEvDvO>E+Vh&7(?!b*3Ph`&v+_m&G4drk4`~$iKMqwMT7sK7jaj3!i5g<|#4JHd z%;IH~h9xd4n>88lF%a4-GDUyN|An40y%+1@LEf)7;KMykDi9#4RAmD`Y#_q@d{8fb zulxr3ne|VRVPqx2fcK12v8nEmgi-31ro=+C=IMzVqJAKYoZw_`Uk{@yqyV`efcWba z^9d*@=sp8A1LEuWhk?F#8+Ucxlh^J~#I3|r|04#v_+*TS>HZSGFS}SO$J(cON)n3Mw5;L^X}38~*5K58FjMiE{+1BEG#FKgbL@US zt;Q5Ak{F1e4XTpeg1v7hi3!CzyO3Bn0y?sI{-7y8@!OhgXDO#MC#vX)hMKsenfZu< zFyWX#r)gJwMadI4kh(Poo?tlvwhNsvP#*(Gl%)XYkP8A@d#uh&Ur7F&0p6xqBrCw3 zFXS5}w%e_~Yo(63;Zg{bFWaGlPTHa<9IdjWFJZRq)*@# zef5~_Ey=y?eMuF9&r~Ht#->*N!omXMP)qxewXkT&+T3m!D#$l4XdAh}lBWvx=?iOQ z6Wnh3q@ud9*#ZBU?0B7glG`nv5U&}l&uehE*>c)lQhJHSUnVv4ocKjq>TSX#Tb@?K z_lRWqR=u<+Po-hy3-(<*=aYKT334qrDk?B6j9Q9{iwLGHdtpHVw9Tdtj?u@f!9%JY zlN}87mDS27dCalu|C~Dp*|wb=I8qR6#QhG46Z@+nMa3FI%=&AWbGT^pNhG`IiDC5h ztzBz|cpbGF$;mHxd@`_W9FJw_dhA@uZ9Cloj)+Hn2!1>}l!Q%6x+udDq3n6oD?B8u zBWqNp!!`;Biog4)@M77K<%arqJ*?B;p^zsf2r&k2rf#U@Cr?Kt=@65W955;kN=dh! zSRkWAr;dP4rGxvgOKAu~(p59~>Wac8;&~+!vD+&W{{rqDUXN(1NG=wsZ2`0PU#}eG zy^>cVd_EwG1e&BExRO9&KeV?H)|cRM>fiQpA7p-OxNaR^bg6PLfm<40; zqcUE({OR7Ygoz$hO7K2)=#JDP*`eek@N!5( zs`WKU4I-wPd5u^3$qN|P3o=XEZM zMOn}U43NqFfE%%LC2r*5X(`Erk%>i}>t3g1u>|DkT);>R*162R{(z;Y-5JOm;WqR@ zypwi*?WPKQ938L;eeK`y2{X4xqqD&K>Ai=lZTgV`tEQ*A;O|yDXbHhebLE6d{kiIq zgDf->Oqlw=>-5gih9TvAi#&3UbR&ne zIMNBBM;tRVv1`jKsMLz`wb;y=jtF@3F_@VVB<}U`f}l@g)K#jTHAqh>WUCn@$P=*O&~JcP zJe&AOH@^(8RDiG+d62k}w!F{hQycp_uGSog-=xR%J_H8%gm0I>u)Kr~DZ*eGsmEY8m%^WQ_*@0!0;90nfq(e|&VE0jM> z7BCW5cx*98kYRUwkc8_(^PIN~8&@<0iBe*iY>@*W8JUY+kVkfJJ6Wd7ZfY*>`K4{}m< z3>1?=4hOCU3!YlS^v<8U=b7-WZ=hdMeg3NZp3+%6>bXBwG60ZMfMg1QOA)94^#G=z z!o!`xK4hp9xZqCYXT~oSpgp19Zf~w4N|^w(nDe|mZqP2#sN!l%O`0bGvXu#l38|ma zb`#*Mdphz=BXr&+Wh0@3u7>XYwYzoef>0R3d6kz+EILW`2ohztA`wcZa12K$KC2l~~6 z@z+jneVBb2rgNt?NziT7&{v zY4&=xJ#)HAvs18kulO%!KjZP|Zkat|W6oc#bqQs;|9>*_V2*Yd*tH5eX&S_O0&)aw zIFRxO`FlkcrbI;mV+K{#MosikVZDxcOq;e7Sw8K3RUT=yH1qN!dd^E6@Xx1}dXL%$#Q<=|dE zbnkC$w~N>p0Z>C%PQLsdr4PzE(~z*U<&#BN*TOwTDp4|2ndtzR8M2T{QR6GO54QS; z_i{Dr%qmT=RN{fK%91A7LtU)pB@Kc(u2?be2Nsd##Z{%Wr=X=Qj?*9=8L2SVflamc z;SP;CB3fZgsSp)K3`19=T0vZ%r7FqSKjF?rJD8a>YVv5DH5pD8e!9K-@{}u^oUpe> zRa~IUGUsWA4>L-+s5sBi$(e+9F$jb9QFP8qw2evnxONn9M$peqETV=&9b%!qiq2gH z&-hiY(ldEk@ibmMUyX@yY}n?mvqY&)!4648tcYE!kUPZOfI`8Kx>R-4_ZMljG$+t0 zDfj)b)oFK2H$U$qA^gA`?jWC?NIu?69_~x3BMTS{`LKmNaKdJ~yP5Fo6J2m%L z0In8yl|*`sCG778Up@9q9Mo3hU*%a)Jn``rf`I+bQ7x?SMl~z@;P(ZCz~?;ZPn;hP z>NC`fLl`hzD;hKPo?}gO;+dr{%7^unk7eKL?($y-UeBa9Xn(`pHEW6qJL#m6w8Qjd0J*MS^ zoumiogDxt%BNXWKNSq7QFC$BKL5V2bPTegAbwoBfdc&8&$fDC{{cJ~qWl-hEGD(iN zMk6On&T3c;HwlBfgMvSgb;Vt)Gg2>vhGC0GGVOa!B6bhm|Ft@Eo#Xm>vw$U0QQZ5#F(XQbZQW08K!$ zzYfifZY|ngk~2I65MAN)zDo~RzOgg;d^_{a&WHD!z~L29E*sI?;UAt$LAafe)OW$J z1%C!wQ{&{`|JY{T*HkSM3xyT=P_!}_j1>-6K4D163#-pEc?dxMW9-f9&j_>w<~JA} zN`gVyk{@1KLc!xGjXwc3EWY?;f)ItDtNlqy9c?7XZZ@Xzo9Xo94!SD?qWn>Q~VAuD@VX!Q2(|po>N*FVgY%{p>QE%RW8Z36gi@Jv*3%<_+H9;YmOCdiiMO z_nNnV+u3s@H~B)6$lCrNzH#qaCCIs_co#43{9k015$L}3S?wBCVPW!>{4pD1qi@v< zuhsuZ>H>)nd8prCNoef?Sa|cB_kL8}@0TJv3ZOgI8=7d2n+R zWv!_XX{?5O-7sj$N4`=D-NRrJJ?&_?Z`IzL4X)a{GQ5A^q7JKV&GY^Cqe1y2NZ$zE z`xo&tHqW%n4ry~|tspmNByi6P?wv=P9vFoS`Wi+KfdlV_zw0Ff&f~rCKD-ys%i;Z@ zZin}4`&R)L)o3;}PM*Oa-V5`1Z}w2FV3(Y8&ya8nUE&iV=gr*HtZrxcs~ozKRz4eR z)+}`L5)}589QY=_GLi$_*S0`E5(GRYw(!^9fqg6p{a*P1ub0!HdUu(f!XF7o?KT-@ zj$Ycg0i|IzkNfr9-@?06?dZyPd(3-yl51Qa>XaWzKAi$M`G$=U`LKA#bqUl^%-rmUHO}|u5v|t{G=Y+7* zv_c+DXMPG!C58GIiI&l=8BX<9);IS)T#1N^}T zI0X$ibFzXPR#J0kt_5IA&D|rqbeC7HQBjR}R}|**uDk*??t*^Y1(~=D9PBP#sE@yn zP#jB@9}74N-=DNSBBLYam>~a9^=#YZ)!G++!QrxW6du|vS}ABccFXPdH;Q*J%Br&V zRjFhZI!gBx%VuvS9uNzB{bkX*O-I_oz;%KO492*G6fP$C&K+>#m@QiYIC0W(ai(-< zhuCg!B;^m=#DnI!`JM02{gST@J3mK&G7D~BNJUf z!#>^S6a1u&@Tp{*-#b#??dbl}cT0RH!yATxewK6>!>6KEs3Yx(*SbDrbmaX0W6qZ| zu(%d_spz$jU*n{yx`mb9@BS2|H9PLPThF>3SX5pz2^<=DTK^HKpI6;IX$h3!p9y&) zyQh>bgeN>_-{bqbe$4gMciugsWt#xBAmvt_X6l?qadEY@ z_N#u@!C>jIT^Bf(s%OMO>VkG=x(|T6lzmh6vo5y*UDyeMQp%m^a9rMQ4-&6corc7pg zCEN3Z1Qmr{iAkrF;ukjwW-&J{a*O+*F$l~_!eiKL2ZBDB(kQP?JoUS|=WuHL5|=pO z4T81TX#Z+&3pYTzU6cW4OS%&?CaX+6cG)_tv#VrU>If5H%K^#HYL4CSgT8CnvS_)f z_qMNJPrNt)mc~E4u)+N$B`Jsd?-+e2H_3++W$bbBlK4|1<6aY~`}R-njk-Tk4)@&{ zKJ5gM)%g53R5j!G%Or`rZN+1y`|+1U)K5GBAdSTQ6+>6uqCmqIDQY_5f|P0*!S&@x_)zp)8&%E%n(gf=(&kznL- z-qsnJJ1(u%rLx%`N}Gz0%m)WZaC0p+4DV zaBNRtxM*YdfW*qKIH6hh0Vg;Wn=x$ieBLD}hv&lly{l-U)mWIP!FAnkY0I&A-X05l!dbeEjR`2KPhxYL+7WRaE_?2gBzD2M4f-seq#MIU0@XM>~ zD4975e56cOdL^^RzISa~cdaC{)>Dy8<;3F{>LW%y56O@Wt6960l{K}7IsK6tkUOtZ z;v|WcO)~p4*C(InSV}=ECztFlFLJ2j!?mHX0`On!zt!7)5-c?Z?~d>P|C^>1AsBDW z{lC+P8G`w6-~SU&*3-%!^zBQ(`sJU!NB;QVz-mB5>6K6ac(i!*>0=1*83KTu=guFS zfKm5xU_bhYNZ%udt9U4c)<0Z8_)z+PJpf>Y+y84B1M=~VTnpyPuqaQ4Ru8oC&Q>KL z)^fKX7R5cGsyi{tlETDd9O4{g`Tze?Sn+5^LyP5hO!R>K>>(Bha`SWx z=mFSTt=zX}NOg#NLREJV6=SR=@M0lufZVnBKg&qY1{2T|w(vM(ZF(Y#w|`imLkC-F zs6v;y?b(vuvr#rElS8@%ovsRwbi2;n~TZ@^_ zr_`?ov#kl5eEHEVuBW5ViWWH*FL#*HyR64}*gsSeuC`$@m79+%=XiSmT zK~xZ9i$SExDBTV{(&Zq!V|0JFqsJj7M{qU;4b(pdy>|{FbOn6SFkn>XBNl)J#0r$7 z1~D$~ydqT0@P8IV+bAWy#TH8-i=A`SoBRHxmfzhJD@4h=)Sr?`sk_!dg2yQ^g~Rno zj>maI5%;r~E-49u!rd{Y3u^~Pp$5vhm(Fv-@D}8vFWP$gDccr=p_I3GozKNic_%j6 zis<+z0=F(W;EgZ~SDl@R`CkO`rmKQ3e3QFMjgoQ=&LKkW5WLmna899x2?1Odc<7$> zUcYVlL%H)`5QLyo?(^uaDil*KM#pZ(2(n94i}`LGpEr}u!ukYZ1hqU0hjJd&?2=wFk$;X(-IGDGgxaS!u`K_W9G5V`YV?q=a3yZ*Uv_$R8mAczOon z<*vL~LpQ*iBJZKnbvr7|*4IXwM zOO+eWQEIL%IB^<=gX^x-amKs(53Vnue`OhQsoSGWq z?Pme7FF&z|w2)E>UegA?))xC%pLro+08b-G1r@W)f$?LQsLpcfs{61OQ1ffX3t)&< zEYC@aWz(R~!C9CA0rV_~uCi5#_&*AM^3#v6@kzmHkSvg`%uvNvX5^M7LPO~%2Y!zJ zX8i2>uAwIKE}bE9d=`k1tudHrretX5I9mt;K2m&U6Dw{M#}FzIuW|Q@K%u+ z!4Ra7q>!SJHsOzw82{-U1@ixl`gS?*@J%9%s6H3fT5pQ)sBUHtADL%eylvDdE>-f& zsL8|EJpdsi%uk5FR-_dPo}cthG;~zFa}xm)HS&1X@CUXAP&?pzYTz%oXRJ;lYS2>+ zT!9|ta{QMP_lWVDpJwd4gI>95lrq^jgZa!H74;HF`oqc8 zRn5EX$UxM6Re*?e#Fe?)Gt(D9!ZE!8U>;QK_EP?8GCheEwrX+A^6xWzjK-Y0B9|{Z zHOU`4k#U82fXm@*tO=4Ay?P0E&|4=~my)(U#3I1r1{qsC9*wB5zjVCA3&ptN*o+vjE06>ciB09>U|9waT05WhIBiI z<1V=2p_lmEK$M`ml(DHt%ure9CW-c??<}*< zRtYxd8G4aHSQ_)O2rFjQteI_3_?6SZ&2Sgj#C33M;alN*;D_L+;ODq+xEtI9?nmx- zo|4E(tOMV_6u1!nIm73v^oeLpC%?yq2FV;45|^1t%CIL5%}elvAV$&FPdj^pZZ(28z^Bru}q+{<0fv!!Fq)t7n2` z?3A^vpRh7)D|QI`F?Jnx8+I@D2=;UA1?+X~9qbeAFW5h@A8{0iz+rI|oC_Dk72sy# zUgDbZfKSGw@tOEa{3`r?{EzsT_-2BUz$S1fsOo#>XLbQn<@ho{M`8?T6=<-l@B$J8!OlH!_ZaFNc<)6B)ChB9gsM1ufVkln$ z#gtLKDm2la4z<=pz0f;-qpNh2mYQ_XTbii@4RxU_^Nsn{beSPDWtL33$uotf%aF~P z@zameAJd0)D4p9^tgv@>%wAijh5tWR9%~Y7I%^(lDI3bRvZt_Tvlp>nv70%-F>{=p z04K_s$yvZz##zmI!1%(*$1% zGla#$rNY}HttjLB*no-;Ha;>|&4=bYbIdwpU9uW%5!sRUOPg%3*xdJy&I_j-f{J5d z5*$E>&;{gy0#Fy?eRzvgFpHcf8nKB>Vsf8^lX#L!@PtU{WQ+pwKWhPPP(XExD5H-k z$vx^Ga5LPOZlOEno$}6k7;nH^@-O?;f9;cfhCd!03JwQhL3~gVFoNG-Y}tJl#^P8C zTM4g(cSA%N8!q$fJme2~9be=dUzd>;(dbBIAGt(*(Lf|P_K#o0!^vLDkx6zkke*1d zroJgQol2+Eg>>WlMm!ZAL!@+!ZLOoBe8RrSu~D(8d~p8TL#dv262^MxV;99W(zW09 z#GU3&bB}Q!aliAZJtIBco`ar0yjfnO*WsPx{o0r6EAS2aj{DB|F8Z$f?)a1aDu1S* z=`Zuw`A7J>{A>MN{d@d}{HOfq{NMO*1WYl9C$^iXkVUg&aI z8=e%N9-bFo8eSDXn+weq=c;o3xr4d)^7MI4c}w%|M4*wjkrj~*ksl(zM*fO|s5ROd zJrR8p{V!jiKP`V-{*wYxL9k$M!L@>01utWwm@-xnD~VOd8e=14-Ld}IjM)6x(%A0U zm1nY&f5~aNBA3+fsZ9Oo_w_$|KDnL5 ziAhGu=K3G`tK>f!k}9RCmtLpS>34dOeRZ~&QS$flhrB=k$VvH0?$KT9Q9>DwX+ayh zpnK||A)2LRdPNwjq&6avM}!92m@RWKkEX$F8M3`cm+G0feDv&7$CSmB zd*+-km6QD}w%k`f$g3)<`nLC1&Tkjf#mckG&qwQI>moOIdw1^&x9qyzlE3P;f6w>( z75~Opd$K?A%wLAB@NqC&%4_hW8-o-#UT9+4a*h&v>jkz@C8S!tF3v8#7lVvGw??uLqcpf$UKf2}-$Q{C5P z5d=%UQCt{`ey=FoZoTgR+c@Xdhz&(tHh%-%emvhL&2vIKjI9fTW2=?1^?OYerD?=4 zJ_bDo9r%(BRHg(l^S@|IK&>VmS64fiVni2%{&2ThUR~_2zR2^~COUmxZuezWv$sY< zNl&uc>2G(^K3zLM?kBtJN1hXKj*%XU>CF7Qm!X2tbsoVbMPi1@g~)xZ`p>wKGUoHL z73j+H>pYxF3AW6-lBsUwMm?Yc2I~HUXD*W)c99$3E5XEw6vimI5|w@zxj;$UaWt#q zByKRy^0w!ij5$=SF*s&$m@1|*q$UTxi{QNRSmnF>a8q-$SIN}(+V4;Mal?ChdIH|? zknTU}Hl{T^>j@nqe{|R;Sos6Vth>*5+imR2f}bA)lgn2RU&byuftKqbpDn{bc&PA; zxX(^c;{W@8scAc%-K`(79C9c@tcTgq-(aV}pcKZE8J|udUk0%ljaF#v5 zdxsC;=a6-VP>Q5BW(n#3bbc<-gE;P?yHx<~RMnfwfvneIglNmaGybSmG~9dvbyLer zMor(X*QHyo@*F@$o+;_ZZpNH4N^i5Eq4>U*S|kK21Vy;kR0Eh0HV5=6-N0oCtBo5E zTFCyRs9EM8YZNEvi^VPstUw6SrLq=-ttuxvpa@Q;DwgkgFP8{1q-~}eVKKfjeqbv~ z8N1GI(~EsHtZWB}_=Z31UvyTP3P|@75=ol^;_;V#pxn;f*Pex_42jp|FE74~LxO+G z!ipr%KjLi_n8cW$4a~UK^H(`1itA@}{R021V9R;K09y-@UUaQq^muK(MXOS}JPJv* zBBKpP!hMpiM&X^-xnqjf$M!MsYt|4RdfF-DH77EaG()hEGIlO&k+ zSy%b)V4O~yz84Ym+_t|}t_o{E2L!hgL1a^`OV_6h>He3_&^?MYzuJ(7*&Hi-#ESjJ z(>f6P7LpMDLe9OEsZb#i`HcGlFR2cFw(2Gl4bo*M_t<0`Ax49>+ zHSjGw(~`67Yj8af0)p-2yR3goa6Dej^daU#oM6B&nqg9iKAax(L=gOaKx)%;oDItn zAy%V&yS`6h0J$RPb-|09eHXBrp#PwDH^51m{D1>eI;X*fY2XB_jWCoMdheP5TwwrPy_hVT%EN^s7F+yh%i{N&_cfA$yu(*LusR`;35pYYEG>CT>{_cB>n z^Ufc}lo5lZBEz1IobxEqbA1blL4LauLLS1*aJt8DdBdmwIt&xQCvc0_e*WsN8nj4J zB-?&$AAzal3m>Z{)%x_$s8*`fQEOBbui7%ug9yxW!`|@Lk9_7c&0|PlO=%kW?@n>p z4E%&=cvVj!>SJE-b2QpK?Yt*^_2^Fm*$1(zHBSpu3UKx%(wW-ObO+u_$X-+7b_GLS zApi&jp4VEvY@7Viep0@(ODY*BRRP5wdJpV*#{d7%UGu#wpl7)PG@|`qrshKiuXUZQCUU(Q4IE)qaZIK4z@Pl-I_E7YjUEAIjxlm$!swhXjl`r+Q6Ry98$P$8O2Q%KrHR|50H!%u+ z437~MSx1|wH+b1FkfQ|xG6r+jll}_4zuBxeZ7HIgAE^{}+r>w~pUfga^}*;+H@jV} zKs-@nmpr?x5~p3XI)&7J*ygAKEWg)jIS;fdjNU*97yL<~$Ek+xfbAnLie1)o*-m+B&nLRf)ava2@NN)^-PYX%VHlJM8+HUAja39p4Yhwy@9kaA%N-1petc*S8- z=kUxV6i|?grR)bF%(MgP1k$`E^+r{~cZbckU`W}!uYW5Cq!8~PR;#6n{qmMyX#nT; zs*Co?Q{PfJnwM)S-GC)|yC0`q2!Raa5tJtXIEf*b_3bZTsll`o+ltK5!&`N2tp4kk z)r?!ik-ve`aq6hDF&EE;;GAZR{S#e8MZrIeA^d1}3^2{jSB=sook$S1 zU=z(@WA|#^I#OME5e?j84)m>HZX@aqAT<#h%-WvUCDOx>N)Grlx{kY1dq~ zBGUy#Fhxt4rr_~fh6Sc4s$ z$PD7L}x?Y|B+tbrWvAd3f+(NPcH(xFA+mEk2{uovh27BMEQB*4Q zG^32A*O6~Ui}`g|GU&NPBndfP#4oP*P0^GLd7fi~|K^NRY1?H(`#iBt97e_TbC2nSd?YcuLuA3^yqY6$8C3>WKNoa z7KHrPpJzojeRh>U7kWK8ll$IkR<@!#FxeamL+RV8@ZP>eStbEZ8gkmr)Zu`u;-q`6 z%3l*QN*`Z-mr)@YA(h(NQi6!_MH+X@>iS}N@fEt1I+SVp|Fkno@PiC7&YHvSvTebV zksSt(^X9MrcYXriT+ZMY&ZV{Wai{Sn%-(ypQHxebK`!-(a3RGpJd+GlcK83-3}R2o z=p{rH9joF|F14zc#`{9b>&=tvDvP+rjgjThI~Os4t}>L^qFHZOUDvOM>bh!Q*FZ2b z0krY1y(Ok#nbKS&z~Jhuw$zOOX&S+&PCB{$lFTz;Gs|<-n+q{l-(_1D(tlSZqp1*SMuhS)afj*nSIqsF4t6dk+7P zOvqf?jh9uda|$juMK=f-ZjLH(Pko!a+$n zYxZ4enzlE`ClDlT5JEN>=cb1=YexuAR=bNtMM{FRKeZ|on&)|7nNyaWPWIEgP6WQ? zROpXP$6jghFC89TOb$L(LjREd7@H%=%gp5EUeC$&tYM>bnm4c&%>(*B>+3IdR6EP2 zZXNo+M9U;|cghwuEnu5;sAwmvs2PRWDz@QeUr%23i5RxlvW7R?Mh{6P9(+gcjW>IuG z@um>MkXRTx+H9r>rX?&Eyj<{*sbK*YsK0m;#&MGDLc%#L`3-~_8N-;Wyq$Cf4?{eg z-GqUku`va0JO|1vHXx)7$(PY(G9#Z)(z3d;^?hM{0@-!_(in73PE(9IMdOYWYy;9% z)LkbwIf7|*m_%tDQF^vFUcBw(og%vV_}FK3M@rqXOSKQ7S)t;vvO z7aHS#^ZmsnUD5yBC}K8+wGI1`GSf`7QU z!3#y1CjJ2C=ep=c^vl#z@VfuyU->x>_nZG*ZcmEeV(jsQ$j9+~My|jp$)zt&PLH0S zOz_%2Bw0@F2%Jmaq9$4!87tp!+*Imq`l~K;Brl{mdK2|34o>fJ7X(~QKnGM zyssnVFaTdYxwsIu$$Gw|1?p2D#CM*j6NZOD5EY!}S!AP@SL37jzxbCAa}`DE$PNM) z|95pr3q4y9k96vx+>AwZs@hpj7#T@v+37NxS>7|S zblXFz?2j1mIj$>IwVS3)h5;0)Yt3j9PRKJp+8IePy6`jLFi&=j5eOd%(I;4^wpK)j zF;j7v^92I4ho1>83eLuUk~gf=cDufRr^Wom^o1-n#tk#KX{tP;Lfj1%gkd7Yzc@8w zw+F~ccy8kD=hs|7fo9h1gcQePLGX(;KeIDAP)Slli0!Gp{tblBhm3w3-ulkVDw;G+ zqdX|Iv^;?Do0=9eqOe6b!a85B>)q}iT}NKXbZXS055++`OvmQ|^T80u9nhq#obLZK zy2i8zCTkGA)`73D!S{Z{Udea?bk*6OKGrJrVtoxn5%tYM9y{lKR3RYOBCFM^i9k-? z-X*d*i5xh35ISlAWjKVH6hy02U2$oQ)42wex?t1I=wPHiJ?O}%IH)ArAR z3-8xMo+0NMLTYW|BmJo6wcP&up?%5o@Xqhw#}$|a^hvSXAozN#jp&rWXJ~JF4FMIP zC9nBt{~D0Z{6>%qDtirVcs0hmvIZpgZ?7M5pmVYC?jRJyDCw22NB8ouTa`ssq(VmS zc&h3&eXCk!BP+w!h%M#@`EQ)HcWi~pFxo3nDPfFE7M}>*Lxq>txmBJo9$-3 zdp6Wjo~Vd8Nt-PDAFFg#+3Fzl+hvBTFmX{JG;pw-u94Q_or*VN3dxBdguH7g=Arc? zpwFk}Cps0#)PZ!*m;%U^L>6oacO|bWU02t_3zASzvE$W~a`4avlzc1c0K?7B`BC^b zX0B7ek4GC|gl!AFpahMOVjHm_T1cYRB7s+{`E&AlqK2%lzE6q7^W9tOKLYcbi$uX&%L&LHz z&>sSbqX-s-xDNv#k!OE8dB;Dz(2_(TSjAOAnUq`sCTiDJWo(sIt*CE~AO*2jODe}q zxo(=rSSQGAHHf0?cEJqn%$sopL;%}A&&NdYxZ!zuXguo2<|KQuox}i6GP$0fot2B4 zoh)(%aSlW?oy2geIMIa4YP7-Fd}S83zPg?y3tO%L$DVk4ncm0S%FVM}877_dtB3}g zu&sQvUa#^6uD08I^a#$v4##$%q%*>*X|@=F@r8{xjf_Ogqrj1m6Oz z!my9u*%v4cBp!-|650r9M)@sfgm3`vL}y+xyVbOEYpxq%o`3+49B$lsGHwYY6OC+I zQR*vOn*_%Y$~=x{?%>G|Doz>%n-fk~Qr0(u8V?^3lFrjDOm`r#Hy0<)aiSt`(1WVp z5D6TwAaKAuS2YWXLA?=;^DN zdcND^(9Ds@Yn?7vsuX&-H`@lriCv7d-QIXc|0j0aC;imuum&Hlg7+l_VhoceW!{Vt zbRz{cZf~2?PPl3p%nIhJeeG7cJ?S#SsT`ETIS&4vLFV?oi77@0`7yX$6cHtJlNGTy z!Qccrg@}s(3|)xv#QsmT-X2C#grWrlf;nzuU$)~1^RK(83aooH<7{1Q_g!0G1caHS z81TJpU&AZ=43L@vf@h;=alUTUQMw!CO?P0|Di6j*Eu=!`GDr9gHW0*yl92y2Mr%FX z`E$br{d zPRr$bf9PYkhyVVU(8odO%XiCyURQ*85ofHPr1!m_DL1tv6BHtT(z?*~>L-Y;78#bL z9SjYP+I#2vePI1f0`pO2NSY+fZ4be@c^=CkzHxP%)|vxijLen{t=D3nJCH<_?p z4nGY}7HsR8R*>!jT}iPi-9Mm;KN}StcPjem(Yl}5&Kq5j!<+44RI=8*paGPRpX&ds zq&XplZCRzB|6EcY2E^8!XYD5dXCe?C@h6(jDTw9%S+g?R-7ot-59wTFRc9Km^-aI< z8%J+ObE>yH9L%wS^MKSd5DS&7+S-&nix8&AM@MIA9QbE(tna1W43@rPn;R4c{`7HB zz4@;^oi6~9CvrU&)|k*C*li+5Ro9!gbu(`yV}{UsH)-2p6`>_vLJ^;89e7&8LN@q+ zam1xf%5Ah3rb@5-RX5<0KtMs|khTM7aaxG_EVh*nLC->;B}RQL&_1%HzP@>cFau)G zgiH=3ZO)m|QfiCWWL?U2hROuB#14A(3^-{hA6W<~y`mojmNegU7o1-#i*lLU@HVSN z`XNe7xRlCX7i48b@C4!vntk{+Ihv#Ygb~7(fjCQ&Gm?>l%&Zq1r7yG!nDfpkhs01ysdZMzbrq%CWup z>--Y498Qa-&=yjXFz$hF#$Hz9%rgvAy^2^lPLO{HjvEWrO~qZ4g|P8uK~3BAz2vzx zz35Q?li3;in?yc@XPiPuMdt@>-Q-cW$D`I4+f7x*v2I zKfbi}RU%y8(RUqtL)wJIbVVU!Y09QP`Bm2D-b4;~+^*LLU8Zf{V0s2&%#y1BF1#9$ zQuLTlZ|?*%xW7_??_>=I)qwrg7@luWs;hbz90>7RvgwsG`}sB-h*+jIf-v}#r(Q|S z=NuwpFQQ35qT1pxlRHoT`A;$cU75l6@k)+hbsk!@)f<^$_K#ra#K$XA z%lxfo?Sc!14E*VPp#L$C?E#D^DP(Rs3n@LNj1LUpzh+gglLUM zyb_|ir&lu_f|p%^eKBqM5Ao3BlEC0^cTR4uu2wC13dKM~-r=lhLScE~$Q#z#>aNk? zS4XgY4&)#!Bn8PG@-8NU&?oHpsr=o-GVnA>}{OMaWGm3JHcQ`V-O& z&rtWWX6jGxij#-cK9R`&fzz#F(1T+-Wt+v~A94Vxvi)qmTJLrrtWeIV-dp?m?&@-5 z{wKyxsIBIO;ho)r5YgR8xlQB-j6u`19LvKmiIzPGaCXqZHRnp5s@isI z0Drh!&@TCddQNx?hNF?uB9adX@tCWsC+nTiG|4ck?jS$x7D|Beo@$DcP+SGw%n=GQ zPllPiyx4V7WNaSh_RwE?Nppek^CFH4^XRYti10tr(^bH>ZM+N5GvGnd^h&hq0)`k? zgZ!^W7z=IpiznCH{Am=XHV{x0xB%cAUs%=PqG1)Vof#E`%Tai}0K(ab?Q5Fi1QTH# zI!=9h;WcoR4Z3#NM7zV?%`K*Mb+FDYAF}B_R`@{FV z;a5%!Ko@sbT?;^7CxKzfOPRQHpzC2v3wRLpBfM`3QcPIY< z#6gQ8s=?N5USwc%Oul8g*IC~x}m3S(~FOmoAt7Tl;ep!ttDeBa;WjEY`6Y5 zgC$m73s8(=w*=CviKcS1!dZMmQl`4xPLp{=2&TS2o!H=+%^k|}svFX|?Mo6uRfGK1 zs3ahi{f{Z1Erc`$_+tMhENR1SHeVuv3j{Hmfw>QP!7^{uy09I~F)pe|t?^{dr925? zeap>!4(rmU>nyf?%DD(jN-9c2b`QZIm(CVZGHJB>`mT;zcXAP{+dj4jRRjG7lstG8 zGml3mOR7GccvL!@!zN~mei;$z%sgmVaV6#y78RFOnHMQ7tt<;wj%{>8rG-%u{Jj+6>=?(~frBoDUu z2~c~9LL425?aV|dbMggTtBECK79}Ck;MleATvPWH+o~v*fu0Tj-#ckXH0m9&^O|FU z0Ig`->d=|K{d^T_qN0I5kh^b~6{m!fOhUCww^Z~^uKI-9ngZ7S2LHPMfn`-}Hb`Gs zvVmCm+OQ*VGQv(stgaduvOJPjxmTB`u>|jsJa@oH>BYp_50w@Pc4bAD2@Ov?>LjEv z=3Mh)R3e@;N6r%THzLABvdFj`b~u4tg2};3`}^$cgNILlcxuIxkWMB{@9dDk6SFe} z0&*%+W&RqgH?Pm_F>7^3i$N%?tsOnG+K8&g=9+R!X<>}ZC6M^}8tV&-VlE|zT~(S} zuO6NnECU=eukT*ZdLlCH_~YHPmW1_W@o6Ii8hfhgq2M;>M5-s+sJl)c|Kd88di?m`K_5Jz zB+jPF9Yl$P~R$1JXEuHUfzN9uqP1iN)sJ3C;^%3@&k)qA# znVTof-g=i`gb(2RkEUxGa=mn{%XNw7E7Fs-(^n7#6 zzVGUqk-TEX)@{E%+*}{_@Q!l5I`say5Vcn;)|X#9Wfue+0&=?|a=qcTEW5rw>AvN$ zF#^BAsvQ!KP$U$!4ex(j_vzzDe3oSDQXskQqv zBmoeNT4%E8n%6GGzVF7EBk&dW{g#?((IDGKuk~_ijYWaVUjiA;17_oorv-@8!YmuT zTDLU7l97uen+mJC&Hsf*e95Vw^SIxwcI#F;n*s!kqjE2@Q1Gs~EDnypuaydpXUz~{ zD0CTvX?T-NnOQc5I4Q6Wwp)_wQ&IfJua2BrZCctuT3nR!gK%(JC5q*=fxsXB=_Tr` zbWU&IQ5EFpUB?{O`bzsX1S|$aMOZG>uyVe}3!N{$x7! z<}s;5rRx-O@9fHaY%~I`kaWd#-8f!M%itv~12-iz`&%tzqh_T4wWRI*tf&+CLkhej zgmk2eRd6{((7!0?4f>87#UqMhRh26g1f_O4eImLgl-Jb~_$wPW7NF?Qwie;j)!M-Q zH=|c?3d|#+Tq*Iw*|@zQ-hqXc*;G#{&@k_uMO}^G>mGY>`ArJE{jJ~ni`mrkV)6ig zBfrm^GS#Q&Aj_`4h^%7A8J^wcWcq)P{{7efpGv}a>YSoYdnHR_1L9CQ%`6n7D+zOh zE>^@9;GRY;0L_S25J9#Ro}yt9dqF%WVGR!rj+);0WSP+8xErfAuqOdp!W(uBjP$O) zoYHQS;Yc|z61kG;(ZmZGWd~NF)gINX>kL!5_oOlbw?$EwS^wo7m6RCf+@2B#$6>Fn z>uR~|mZt~{R!k}Q`+qU&`@9aR=PqJtd(HzrG_0X9toj&3JMdfcG^elxUO-V%^kz?= zwseWD8OnrS5Um^l=?s@3z-Zo`4k@CJY0<;nv+!-;VB@oCU)(R?6dpz6E4~ z-1I^?yY9NSwPc!GbfY`kTMWB?gT=XW`3ba4w8lzWcvan&=wkdygr8UESlh$YX?{6Pb9w|Q$OL#J`w;&AR%e;<~Ad{5_s6h$KpRJY>7Z?#n`rFO)gs6jJJ;K~oRF@yg^264qp_JD_YR z(-kc=#!@EsWWG@zW^WiYlM@s9oTqk~BqP99=9j~bP1Wt)srpI-;L)Qkxn~GaOC^g_ z$g2uOpw-jqmM_HMt zBHwyr0=cw9|52TJ6zv&>r+X1-vjJeY63Up#`@oaN zcEorZI~2`T{=xi0-r*bB+{%y(5i(?Xl%f%qu?hLUYT#;0Q`H~;$G-a?LSz}3@EnKDl zPgkl-%088yH#cBi6VB4)OjsvXJ0w=wjnX;jWFo@m`O3q-WkXSHH@R9h{Z?<)fAmM0 zu<7pGOh(qCROJHcVcX{2`5nS|YfDrSu5+$mlo8zwo-_>5B7DLsvIc86dpc)Y!y~07 z%@I6(lE1j$D%-X(7a9YCT5l(=Mx4-L<;Y|jDXV%%*AE{6c>D1-5MN(q$|FK2^O|;X zLuV{CkjF}Z4lifEC+(XT&c|UqZB`QSr{5>C+Bd`Njr2iGg4@1nf=|uZ(TM9z8H0`u ziFK6D1Y;0%UBms*%70E0cjo*LVneC-5O%w1IuK7TS#To zr0Z5|pP#~c>@nH-<0P^s=?BCIWadZ^+$`F$e}@Ss-N@wtw2PwqSdt#svTjQru(IoG4Cs?hAyl zSc1%sV(g;TGX#;d)B672zfq>H;RihAD%3=AQSC&vW$T-(!4j}78ZUKiTK``zcau8O zzkPc0m1=&tEgF{EZBt9<=r6sdp8esR`bmVHFb$G zErk$g-*(0NuR*q7*PI_|oBr4p5Fx15+-Qb;)znlKP3U;6)jvc43gysbNAzE>{a<$M z5jdR*$9qo6D)*PMwELysqbuD@{PI0}pCwI#FHzg&zy|2zbe67bzeL+s1s?Nq)`x30 zJU!~9h9%L}*O6}6#NwY=WT;PYn%(JT1<*y^YaRQ=qM@9*^u7|Q9}IJf5r7ng#_g7e znar6w`O#|o$(6X?=vJhlp@!RnGh&$m8Qc$D7iE&tXL9-@qwCNeHf@rITGrt0zmo+* zzEh-P`VS@6Psv+E#{|AFAwPmE=Ry?MUU}g|?8Ssxrc;(0>6O`Lv*{G1^zUEU@3sXt zOubgg*~Z;XB=Ci@(s0D%1%Lnu$|06{hmZ|YZoBcN3-vR~iV)%F2cdGl;U>vk!Pe7F zrsAYPP((uG*FZL78GyRJsnl*Klt4T26tJh+#hBX0o0Id+fYcbFJS&D*Ky-JFKCd~3$O9D5d+BpJ)dLt@90s2#RXNEaTeq+u3MTZ*VCf-5mdU^N`s*}o z@Nb-EJ^_%yj6DIM;MiQdQy?S{OXZ)z9L1gmjFLS+=|F<|)7$MTD$mK#j_W^nBP*>X zPlEA!l2BKM0^7HFq8 zw6xYAvibL`cm)qUEst1t)Dp=1!!S|`e(%X$Q=j~xJr3{p%F!a_`HM|jgvi$6Uq!?v zVRn#}wMr;w>i1|RXn=GMZeoNzWWW~~_EJdByQhnXi}2MJc+dg;fDn%vjVZNG=gOtU zJcI7`HQWYWi*c9|cS`I)2|zy~U~hDD86*iD-9pm1u?}w~k)Lt@h~)yogn%nDzoWa9 zE)?TEXJOkE07Gdom!Zs_uD8`j01^=pQWQfK;gSGCK)t{DpjW?%EWja3ZF`{V>3uIf z-30w;y(sa;XA}^0o3Q)@`Q+zUPa~&YpFtANnXEx-pLKoR#M1GwF9I&e`I6M;wvo24 z&=dlXr&Ge(tyWU)hBnfzqUUp=OfS1}N~s1Ah^VB{KNo>BuQmu~VmBEk@)Lwd5)?L56`6AlZh)tB__4=}0I5AxI>y7SmgOM@SI>k;rVjc6U)wUYx=W|*3 zsN(l!<$Z~kHdQI_zMxnM;bDoYe~RoG52i|k->NM*V>Ltwk5+33sR${?1*&L5cs-rl zD7xD1_q+YK@Td1`%K)9fei#Cf0#`rpW)QPe3i?w&ZGUTg6{s$KzC0YW)FMr zFS6r4Ugh^n3ge-BkMax=6N$Iiox9A|6)`nyT({puLc|weWstPyct%nGkIu&esSMit z*OHkD)nV%oP5&EZ5WyAa61}7Zq}fBnfLC|?WR9IHuN6Xf#w8Elde8Taq*2NQk=e;# zQAA6HV6GvA=ly$jFbMJ0gi)it)rMJ;sPv_bz&Gm68_L~;h`^HukLF=qi)!>yTXzix#J0#ZrZqA>gzo&?v~~{9s;kB%WE0c8DZYiyUAiP* zf<8&pg+kL6S(0Qcll;q%iG*!8{dhD#*hjz-dhE{zt{VgbMbjkCDzc&|D$7#XAL5`< zK2O4|F7v!p;G0KwTcvDLgLA%=E0DSR|DqVvm(9xH)vu1D^uS^iv>zygkdWOp-G^Qn z@P?2@>R2tA(MsdN=sZA(9CrEkLUNyE(b$qbs!lpC`81*AdH${$ME|Cg-sd^uT zM&1Xga)apZH5GRIIW~2kB{6dx+cEXP9lfvFVR4diILEjGmV&5MYvvs4dSR)|_(rvE zW>7nQw_IwHjq9`S>a38BPfO)7JU?F7Pq;TQnBvlHMG?72sN zhpE+TiBw0j9hrk;SHHv%1c~>WD>)sHnrseWz(ZerXoP`s$%fFhY%O&Va%RAEEWy{o zC!}x9yR28F5fLfv+x5-3^o^Cfj0UQDIG>kkMMCbaFh-bCOG2iIrr9(uhRoyg{xCG! zOes0!j$)R>o)^csO?;ha=xRdX2VAT56iz5tVZomJj`P93qQ(-CgQ74-uLGUdkV{Ss z(vShht}3j0C-Ldgn{kA6#yad(9{#Dcwnn10@5Uxgj0+p4_1{}Eu3TKjl92zzc7?hw zMFtj_eg5Bnz6pn2>mj@)g{2io|D)_ZiaToc#MLvGFl*uzBx&={ASIA9vXIZNTB{fB zj7i>Q9IA47cvPS(S;ErXDg!9&4S&I)xfyGyk{0n*iXt%XHiy z1lGURXVIAqs7x)l>_rqj6v16Ec#*S*8?Ft9tmPj5k7P!j3O3tHEoc9)HrD> z2x1)a(w5SbGYdct; zXF0BX+s&7I_5h}S8rdP)gQY$wJYaMZzX9=jjRTbm9u8CGJry<^1~7`H`#)*ltT@Xo zp?t||8h#qq5h4U43Alfh=ddhM=gY?~UH)Qj)OA5f$Dp}?&G{QN-!7dq=N;5+2mep9 z(5Sn{`H!F>a;i6ke!S(})M< z5+;I$GNbXR7=>~Hr2&ryd1ch0uJ@&srcqpKlcdr2qv+9eZ*Owu(UaM(?bY-7985A` zu%KKUr1X{%Z(5oxs2>yFWLb)$QzHV$iBgtTt8-c{s#W-yunV*HfglT^fR$>!$M_lr z@;P5L@zNvuX~VW`2SI<(ky`0+l{I|)h)yFX8pqd_T2j4SsY|um^_^}fjH2EG{a=H@ z-mDQ%*qcDX)PN#=4H67>xp^rX9`s~IHK)5lr1gAoutG>(XflmZbm_-`KFjZj2r0PC zp_CJwhgkbBQ*&5I8cdlQ5#m5dN3_;tNF?`fsdSXu2L1y9Bb(mLuj`~i+`gb=+tW`h z%Y@N-Uh6NJ>2$gc8~;_c%yI@0hehgFI&V^J{N^A+Zn=ASRRN6jU`XvWmQO3hw4z=Pq982kM?v!w?#pjUMXiB^o1#BeFDp0IUpC%*rKQ_CRW2rG)h1^~-&6 zc8Bm02k*1(BQAV5B&UVdkN^_R!hc+g>%%jKVQqGYnMw* z&vm68a#X(9^@r|&WS5s6$E;aaO|58EC%IBY|r|?*4gfdhQGb8b9*L( z*5l)Nj?<%}ahYs~)w$_!N}vC1RLop5s?rxTPX`ZRJE&Q=o{hg8KPI%aqbY+BBWNwy zk~I`1Pai&cJad+(Yyn74JLzuh5Ejls#8}vVVE8OsQ|N(n5!>eL+Ipxif3!SYZ+hP+ z=p$q;l(048OhRV3cTQ()`#DH8Z5F_G_k+b46Av<%+I}5>R|Kw0??rGQ1dpFQd7kpB zW-Knz6>@esf5Ym#1&S!sIIhsyKnf#l+;$qaeIM*eYnN_93dPPQJ5(9jU~?S+xv|6e z1C=xyt_uxaNH4=URKtkWhcNXVLp5w0jY_3wYZ}8&&oDk`r~@Tr>??+)Qu-P~m|K+@ zmXy;pS>`I1VcOWEPIJeFi^ZI>h=oE^B>{^J<71?y#;xo5D(qu6KNKwqkhZAW)N3tA02$uUGTbkRbz9Y1BKn75XFTQ|*9 z{~Wuz`!@We3S4J7zGk~I{_4UkfKGOnmY~u@JAgmQv=j2AR^>RuKKphzH{fOr!bzQj zIkxmjjYAS>CD5MSlzO9uZM!bUQzBGOnN1;+&`(#$xmvnZDM_SPG8jmF zKjO^ZnD*g7a5^8m!d3^AdaU`l>kg^(JM|V^J$VD;?oH-#3{&@*6xUFj5}Hw0caiH? zS8bk>(&N$FO>qo7V5eHq-CdzdP^FGe@+#>n@CyRH3}_nUpS-DhQBItUExEVK@=0FU z(6%kByIoI*O%-FC1xPi9? zpQYQ;9nQYH?huK$G0=_Xoeu(>&rR%$l%&FUk&fn#1L$St6DzQy7GVbJp|vjzwz zd6CFcKscE5I%yp`mUSj9_0DJ$GHMn*zg*FfUT+o~|1aWsZJZw=H z1tZN5-_s^oleiP4hb}YUkwyaKC5*{rY^74{Oc|O`k?di**+0$dpt>+_wNg}BkVEE_ zc}EYGeC*1o{F!Vjyw>)K^)a|e@hm8~8W)o5wymnJ{}4+WE;&N0{m(1bfAKaLD|^DcueDNrl**d4*Xe?iQPUT*VE5YjPD)}~ z0_iRHEM77FDiu7cZnuX1LT(#qX8MiQQ=)c8VMs=V_PGt>XP~7TAsxkBh#UH~*am!+UXrZN^TLk8DY%D&D zwyT*(P60+Q`K6*cMMapC8j%pX_A7qq>N_%~rfFKXJ*B6MuZx0#QJzOBi=*iDdEfWR zGIe&#%G@VX?>@AAi@9O0zxHP~g}1K+ep;4_}D`!f>s4CN+OW)qS+2AGIIapU}D zAs7YldM3mv*^QsVZNfw@eQtxC9ZGF3B5q`;+0a7n=l_oW*YV8rrjx2*F(j0=tkTcl zCL0qxuSMBp(4GN_Oq+Oy7pVMy^Ig9;`S3UA^i(HFf)DIZ9I`Qlj+U;pm*CvY0r*L9 z`OwDOnRU@`vx<4h`<4oxMFz3Rp%vf~#SIQ=ys82WQ93rE2Ox^ey_G=! z?~W)$+|PXp4__v6PihF^#pCh$VaI4D3gIlV?7$_XwhcbPd1Z&avPihF>>U(|9^f^2pJMC zePd=KYYL=$&7l$mnHe7tQUkyLbCq0@uMdR{&t&N5uz+Zb3erMVJ8VMdwKzhaWziu2 zEQ0A6nmVR)WO{H24wFZPR1Ex+^E8hJ%6F}MT`}1x@J;r$RuPJg|0@BJdTMvQ@Z95!J_UApXR|GGT z!M-0wBEzyYS=MAtN|F^*QJ9T)eMBh5A!0v?xeg-#+%^*CIGhn(#7WeHCGpC>=@){T zEH7;@_ZN7Z1|PoZZNwW89R=zOeUfE83vj53}^ zGX?Jm{0oh*1f7?_^HrF@m3(E@#9T1;p9&+P?@EeLW$jBc;z0lyBeRvPD*NB1S|1dP zIeJNd27lo*A zC<_3-*9yK2rfHJGJ($o>B(ue51pH1q$Xg4O{HM`M?`GVPQ|!BKXjx?3qJ2(NRe=hi z;QVJSo>`9@e3JYJo_XoIijzXg?u6;BNvKtU%#T9MzBvxs)k|wv<5b%(3Tcdvge#;h z95QAP6zPTfxS5P2^zTZQ#n7(6hJX@sGbNdjQd(Y1yHE?p%|qd+J-m`S&HV&ELC&3_ zsX~lFl~<@TGzoQQ+B3fvHkTzR3y@Fj!kGn$3KuLd)@a?8rJ_WR?6%5P@r<2w5zDCb zwstVEB{$9s8CaVb=XC`xq2=sRju6zr~@wgncYkwYtfx>Ui5}k! zUB`t8L)X%`%f>dD(hO!=gn?Hsitwuh+`JdWh)@R4YE@_pAzDfuxsk4|8pdg-<*pRA z$x2AQ*Cb2bhxF0}Y_KI^-R<$yr}yFi zKcDZf)lIB}^@^6M2m?IX!4S-ctRSq*!Lu`tl27V5SL2y5k4mElzW;EdoAJ=v3J!ko zrXlp{AG>4lSWS@myqa)&2zdKr-{oz!{Qs@jHhGUZ1=|oagmel`Q=L!wk@HWK!6@`$ zod{1wQ4~o?DH!G>+o6mLr3BH0V0i4PiA)r7sg&T1(qsG*_g#BDU*&mD%OOtMLKY!B zRpvycl>AW;5)IG)X9D*-x>2&=Te}hU1436r*V1*JhrZ?Wz(a*+60w~B9e<@ViZ#tU zxLuV*Rhcu0p9?meB#jomm}(l$$TR}QnTKLi&z)9Ak7J0*x#=r0*L(96RYjDohKd;@ zs1VRNTf9>nN0xOdRq6%oVd^>%Nb<|mEbtPfyNPEH{kb=p+CBj=2cq`~%;ZJc9;3)# zXk?s>nnm{+^5e;KVYsMUYo7Oopg9NC79+~xYQ8jrF)RvXM~iGUKkvqauWF z&%yruJ$S&q$(SwH$R1gaGt4Zw;ZpWWgsp{AwzxRzTEa!2Bt%~rpNs~C|L|{1mp#)8 zq80E^yO!n4a1tg?@oOE@W6{LKCp=wvMKLa>grz-(P{*@uH@)O?oTC>X|f@g$vIXNI>)a$&BgvfQZdmNbuUxvdrL|GDhh{ z2Wdk=Eb+L;6s6`XoRey4<2gzp%cuA)9cx+;;PeFZ_vK*Wh~H*33ZdQUsLd_On0)Tt zTgag~`@Hk$`ms8SGZBmzAjH{`@7b@#AB20qGy%h6$#vGql{+;Lx$%)MfzNm&Y}dZ8 z>0F4bG%O7~rI~#(&g+O2HWHHr+)QP7f2!VeWoOmE>&YE364$~-P;P7|C__5 z@fK63#A@B|P!C z7cq%mxXd2i$x8-m^zwE<_~}B zCLsg_S8LVRu18Z<2z&;tj)H^`LM(A7?Qietfv7LrK|&@F!7Pjd!1yidT&LIlWzFDA ztxTa>;iJz~F$gq+I@`~XXe(C?UE!kU9Q1#sk^`mt-rDCT64=J zHYLp(#xm1t22-WOq#!T{v#-@dj>Wnc$5qxq6dY+JtF+35wu_ihvFRv5Y3R)ZRvO+9 zku2z*0AG66I;YwW;dSF@2pUIhi?(ql1(p$$qh|wWls48BmyRv1t;BZXCtB*;^V)Bi z639D}N$_lnEsT7lz9MN6B+R8y3fQ%=AB9r&gggx%8xwf}y$nuxh1?PO{Ef>c^XF7?sAfYPIZAf`l;4HZ&S(T`IT= zUFah#aK=h0Z92FBU2L zYrthjbSbXU7s;RCzBY(1!{huW?E!b3tuc7{DiEKxz2W!W?zHI5TwiPhxrk@?7Anto z5z}&OzTuQI#>dXYA*3n?9!#Q7rl{`yv+%8Yg6o1XB}n)kKbbw|@7UAZTxd_uXZ>VI z2nPxQ0wdiwQ$w1PDe?tbQB-?t3jfPZ49TB2Of9imS*C z!q1t=d7q3BkMnclbZ06$H*W(6C__ubYli$jneL3q2}X68Foxl<)2@03{_%WHd%@tz z9{izjJZF|cKecqLPEnQhGGELTll^5=)s5r9tSW%yO>`k_^f`GG`rpL5j1M;Z(s-QF z)Bia+g6j&^%JQcZ5F9MDYex`D5IC$V=L3;iPHMYm`4X~L75K^NAZcRgO6eWx>iXy; ztY^&cl=}@I<>)v`{+KGTh4+^e0P*O|N%ciiXD0!C#&{;j`o@0oYBgc0Hn>34Iy)J! zpVE?48C|f+Wrib56O3UbJy83{MOo%-4hqxDu>E(W^% z@=OReZlDSH2-l)V>bLM)Pn|6AZvg}#w{!F1I=6ZAdyGkoAw5n zARkVkiO@JRKkl0Hz_B0%7PPqQF&BFYixLkezw!5^wz#+u zI?*n^*qD2N8=88e#DBygx2dDi3XBc~^V_aiFj-xuiMualu*Mezd4!1I*l4&=+;7$0 zIq}JN_=u{|v2@Fd0eJBl6VduI#=h@cpybf2>xu+~GgxTJX&yLE!GpV#g}E-0Gz=m9 za`Mh~nV*W?b^^^$6-8>k+3hP=4e<4T zvbc_b+8`C##+WQNaU9Dbuf2V1&Nk!lr;EK^FB)?~8Yh}nvAz?cPLVIE3^nUKa&PBwf+My6B!-@%&Hx)ukR3JKAKQ_|?S1!()!N@uTATMpl z-i$xbqiRVFlT%l__3!hMjo*-4j$#OY=Jo8<&a03cGaEZf>GL1A2VQO?RTChZeF_ez6JO!?nBWNQ-Hvt77`AqtmEZ!?Ki3=&8=39_! zLx zJ*XHiYRW23+YN>~((*oIL&g!r_@uf5&zi)?J+yVf&Y*kC6mzBBU07byIPDa8k-Hl2 z45X?R+Ia%BXbap9K9>u0ANv18?56Ig!`9a_Qc&2$4x__L3Ooe>Uj~=K?dEtX%`*?c zYeBp=J+j|l1D)lxRBMH}|Ml#f$pV^#5v-6g)Xv2!oON|8`2stYfDrN_0omwzj;{BG zP7%V4gZQ_y&dB#fJdrWQW`G~mhmbwQ3QH$jPM!x4P zC@e%>N?BYuL_DnFlMjBP)r!FBV%qKh&Wr1lJpKme5e6=o*$MjnOr=e98zuv8+tBu z4STxcaci;8g{j7v^AM82r+9L)w#yKLZp(hyq<%o4G1@w-RA;m)S6fwgJ%P|RG*(*K zatbk`{V>shWRH9pLm&(pVI(80W-77a_Da-$;&mT?lYuy?S5QtYmaL#!aV;j43q)~R z$)S*L{S>^WkWp6PmyWlROf9zmn36KKr2W(cG!mJ${;r;_m= z`IM`)@(qJxf4EF8$a{GH_~uGMmxY^FV$jF9Z6Uhq5N;aISH^}}zZ4Lj5~^ttVKHNt zRC=4Clj`!Ro-j&u4i|rZMZ{Une^_Jlkpzs>)8$yw3A{`wW4Tu;Z3t z7~mzF6erz|Kc*&rh)>W7kS2faRwwi|&KW5cp<)RU9!6RtwCkQC1O%977#pWJ(AZh&Po^2z zUtZ}F*2~XWK-p*1izH|J{`24o^leGwc6rrxh^@B@!`rML07eof#2^i8kno6rb37*$ zCe)zi$BjRUd?ig(A{2G&1`pAKkX>Vn~eIbiQ@>XTBq#dhI|<=x*`b9yUUbCh?-d>pC84!{eo}2r;}`HVP?jO3$besE79l| zmo=ICk|x=&9WI-HKYRL6RcmC2|MN!=k3BEXJr^!m0AGX}X4g&XnYCC!Re6PJy zE$r1Y&x_%uWtxJBQG6!sj|*{~uiXim{{rfV-=toEXMWc`pUM7>As+4o-?<*UCk|2? z^4u2PtBTp4mE4A2t)FnbrHvdH6nOUnE4XM}qeZzduBaX}AZ;@`Zc)M_Z4jKUOu}{J zbaT!f&RkJr&$K?xUZ3V{&Yn%Wmsv*m7okhQ|VK1N09xjORe!g26C0uBlees(;3y@Km3Ht+NXurgIqCj z%bl}syP-TX3xM*zA&wq^CJSfmoR8F8oxjB?YndT_aDEQR`3-2vPnQC5hqv@adJ7-u z!Jr-vyYuz6w@}t2=MUF_@cm;|j;r`rm*;G%XZ+;k-ioXl)B3CFp1B;`yZZ*{&H1<$ zfTKBhc03Yi672RZ^kbNpgHcjd%*B5D!Fu)CWF$`$Ak;v9I;f#K$iCoPNytB&P3gy0 z6S!;YdJrVJt3{$Av$AR0x|qA@e=Kp8EX$h=a|={xZ6v#|Y;G~HBQhEwffykQ5rhD9 zhQU|CT;I9jZAZ@oCLw~sOKPw4Ua@RniHh->&_C`ULybUuPBakv5~MjsR#5*vu|iwf ztosdqN9P48O5uDmIezqSzx9XzjKr!7EG^LNx$>Vyq(_Bs(BEOT?qdGp{uSd2x;Q%+ zH=W1i2`RElFT+gXw?lCMF^IhL<00VYb{ zHrt_N96wz(G@A1Z9Q6_W*hRPgU*=>O`9)cUkgy=B$~-(RU7RIec62fKpdcui&gSe2 zA_#|(I#vXpWU$?{Gaf7wBnd)K4)R~6r&CDW%v2lU`dv3#t=|_gdnxH9Cy0v8Ln$Rk z7~@OnIU_QCijmaM#Z!u9Z%r{F+YuvfLH7 z@1)*?yubR{O=+xiPaiV^_gAq%nrcOD13GrXqF#l)q_T3>H5faEod2?l%wc>s?|67Cs?OAWCKwp3B4 z_Wc9NF{v)|Ei8_)f>&zM8tT1;=9JmE(%OcEt~{;D)iN_RRghC|L*&||h0Q#|NUrJ5 zDogQM(3WfVTkA{#DLkO(m@cF7Cr=!y|Cw`J&O4Q}%b>7~Eat06KD zv#TlN$nHj@C~qprBAlr;v`$YM@Ffa@>|ek4Yw;$o7H&p4B8o0YBqm zhQ01sD9sY7bXaFyN28M+nA*+wSTT}$1c4wX2)7j)fvrWt2yzpPh^ku&1+CJKZirR* zl7#2p%wyIP>yU?j9S7;w`KrgtC;Gr9eCL*agiAabvnqVTYxLTU$3Of3eQ|(lY&IE< zTZ0IN-sK8}nP(ed#3&)mr4|JL5aBHeDyT82jD9a`}mXQ#PS<;z1^DFKE?7)#vF#v%W63FV) zYMd47{Gr~Q;E#1~OAU?bS>-3%twqwtz5?)lcQsMj zW2hTPC=E@$p=&8FD;r}XT;NiIA9LX>N5ntm^K5sp32p%}Dzh5|&Ss2(L3q2wv9pc9 zt782X=w-m8xV>bl%aty35K1-!r?E69Xb_4mff(zvEN*Vu*1Hgl^As`>x|1a(YN|6i z2~|3e{Mw!axvX`$qf1Ipv7f`At2YqwG^v9G#|)J3&kfH6nN2`@jCk9g*BQ2wz3-Tn zq8l(HY-NX`Fr%OeGT?ABR2_YDJ9U zq|tg^)Z?7Arm7ARC0qk$;G;&{2SpnYp+^CjF`=2%*1$XPTO4{m0O`^&c7QKq9&&%e zP>d_k40*8D)q5Uk_OQBDEd!gUiMuWymDEDO5)()Q&rD-&d{J^LUV8=2Hh+yzoND}j#|?$$E;cxW3;SWDl%v;vMkSp z8n4_lZkXnA_ciHEq*h!&<{Dxg`Z$GiLNH}wN`bx~1hZQ)P#J`J<`XcGffJa5EjZs{ z%+e?>m?ZFuy(f^_oKUrD#Z_4>FBc!KP6_9~UWzSeL~UDD1UV?(Jj-;O+vq<-pOj@z z1`t?zaR6 ztaY2xLVaELhQg679uOwP!AQuCi$c+(flO&zvb@NXI8DPiA&{}hkYQ>BCTxZ2#O7l$ zD#HBaV11TbYxRj(iK({2c1^F@jve&6l2)@@rm9OUcT~DMM+`jMF5MhaYQXCKlbh== zlsDrA4Q6OoLO^tk5JQM1t9IZJt}u-!S<(pi#iY1~q_=e0vRh^eanJ2!=1VG9WujA| zn(~y1^@i!oVLYkI$)5>=SmQ<6cWuCdA|3$Va}(5FLv9tSW*pn zgqyZ3zFcrft$8C}@7K%QNP{VjA(1%?Wxa1@rplXt@l??|^;9LifiH_zpEq?~U*~=2HbyZN?PR@L?#j2vZ7s;DBe&gEg)uf&GwEVBz{_Re zt7#?(nP#JvFdd6k6?u(YG3&Q%J}8QGkaH*ZrHPS}aG#sg0}fUU%F!^h!(wvjn=TtS zaO9rPpvT#Yg6y|~`Iv1@(b9cJrw!tS7k3}W|yEx=wTo#6d!vNW?F;OH+ zOW^A!mO1dK2&T)6{qD(^Hm7LbRV4m^Mc@H*! z?+>RLQd~)4I9f|8%}{Kn9K#$HGNsi*smEo#BX{jHs5{9axX9pHwqRAXUllVR7=(8? zd^vDLp3Qg{=$17GDl0ow+;oI>I~)0k~yJG8Y%2G?|Jk_$A&7zlbbd37we z9<`{f(hsscOYba1d;!-U5UgXQO@Yv@d47Oz54OaG-JP4CCc(B77LpY96RO)0Fx2$I$WC0ZA7x(e=nSc*LCp4~|Tj z24gsK06JOe2gyP6;=_LvP ze*?SF3{m<~JLt2l{x=EJl%JLxnGfrZiB>azVnv>8*oLNa94NTfAk59>QvD@{%1#`^;wTC*PTSGs;p8XHMU90ue{ph^pul}&}ZmaX1@YlYX#2bABdqr8o{4i?pp6wi=yRzKt1^;(G0_Zp1>=mcI^4r_aVb06X z=e|xolTOONXFdO6Z%PlmW;LV6#N985&onyWS5y=GXO5efG($UfC$i>780X+C`;IKg zvrVyvU&=xfEgHdcMY2f0BGNN;C*fnnw#^2Mu$s`!*qilawE4_QZYloS*&;dK+s9V4 zPtwsWB{bo2rifjfTzlvqnSphkXGeVTP^BL(N*wr{W06yFbosr)+X~OM_0<} z#5xt-evVWbE#u{XO56@H_g290Le5B*)6Q~9xmqelv#92aa!;vOD(k%20bm?f&}_o5 z!L}Tnt!8a8k0^DFcfa4cOFNZ}n@!BP{FoLT=2LZd6%2=7Ac8z7gc7JS`aF5SXVdX~ zmQ(1BOiaWZbfk;O6d3v<$!$J*DQASjGH%wHa>QoJd?-0Lk^c!IeHNT8VgBjnI|PJo zwpv)6UN`!B!q@lP%C((6`jHtx93TavArhjzdEmjVd_f%>R@zdA2Ux-8P)&i%^bAR# zwLqb-ifVL%#TPJoD_2emWcdcK(}QI|o5^hnvc_6jf)xLW_DQ5qqM9zR%x4CV)IFlN zw@PW5pz5ZZ3uX%Y2jr1`+SzB9%R=;jpo725yG}hKn1=iPpO=kKP8Sj=yu<+T_^kH| zB-;H7Lb0X_Dtez(ForFu;21Z~&1c5i3PH;F6(X`3E5zE@S0QC?O}Y#f@&b*mP)L%K z=4ERw1dy>K4K3Cp$&Aqz6jb~w7>MZH+}nJ(*$~k^=mpX zz1e!!XYXhbb7n6p+L(gw&u>k*vTOw^O8RQs;iH?@0j^;0PJUBm_f1bT>sXF$d6_j@ zeY5+7v}S>+V+W6%7h8k%HcF8{$MWLxe+`B*NN})pe$a#g`V0rwbZ(zqwrSmljmTAo z7SwkuMsu)}m^N*Om zpG);>G0)aR=UXxHtyOdh(O%aQN}N9WQP}KY;1B%x$9wBJM2?L$~(9 z9vjC7-R=Ez8Z5+TEx?^vSfSyxIEgoySh3^8jVD}$=FTQa zm?*KxBuSI8BzL5%jymQ;*BtM9>ax;^N-H{DdNHnremBo@(iVzpbrR=9N>1_?Ub|ZA zdZyvDs-s7L&oc`$vd@0;o=A{bG?Ppv6?5Ci%(KXnwPN=|JvL{u>xr`(-rl*M_AG}S za}qFgyL#H)^N`Hmh`jR7heL`-`T7~da7d|jyIZgdkvkWS`oUD1h^u7+d9r}H5 z%I|Fj6)G&A&5cpyyrC5vZUi+%oniBo?Pa34%Bqx9U5(OetE;{Sx0ETbv8I~C8Ci=8 zqiQv#w%QFML}S$&t);8(dW>ql{c;r8h8Ku&4ruMEN^PP$4kn^e zd*cl$to0`DI-&6xSxJ%wz=k8(78qTTJncKt+BS{ z0_c;XtV6d5_6NDvOzB(wKaJ-Xu`wh>Fv2aFs-j{9aWT#+iAu3)8g$Euu|rpybQXF3cu@Hu~Fiay&IxW3)=} z(vo>dhkPhhzVh`>$si?Dt%g&YfBfqk-}=t?cG&4RzvD=3-RVOYL~Mp>W}9V>$&)xt zUEH6PN&cGj;T-Panc`PMm9%c^$}Qc>>XUUeM|*T_(obRv&ph`7llcPpTVk9I8gdW5RJK-ndM4IQaOmkgxVNq_l zn)cVROnEZ!FQzIGNi%yRaSyd*fGboe(vb5B zLVEW8`<`K}Q{``NUDN`HdX~<3$wA$tQOL~7PXpF&4AC{jej^NCz0k3bDu*g3Ha7Of z&Kx`~72j4krwFfrhpV8NH*DgEZ$EA(D@2#ZqyJE#C+ppxcA4BJ+u0@3JI>zSHoE3=seotdhI5Ywc>i|(cd}gL z{Ur!_r$dmWlE`wq;y>ks5FRgwh@i`z0539#W1INKu=`8P=5pKbnwvpF{id!kjwjbu z<*c+>Eah3zJ2~KWu3Ymp0d^_mjpCTgpva_((OKT9&69rVP(^HpE5(Ca{f%9vhlsuA zSPpMWU*(<}-eeyo52`7ik3sFJ3Fu{R(|_I8-L=fCLW~7W)HWA+Tosg_veU5DaF#6x#Ot^40)qo zC3=a(APMXr2*w99=_#nmD6F05@EW4YlO3%pPyTf_`Q_N4Mv2YssOi{`#lvwK@juYF zyYcw(Rooh3`qOnZ8I4TPRNYOnyk0&`)nbg8R{qq~w3@~=X2Y}9S5Ob zl$hjs0Jp256li0|Nw@&aH7lE8!iqAUtCq9YDpxU6vPg0fWuPc${9WA`)AoLD*ayqN zrJ{^ZhQTS6OC3Aq`h|e^KXqQ4smr`oZqu36u~RLGDKD924$=o%RK0Kj5hhejngucC zQpZkvK}@;SvC~-)bE&I6$YDYG=$?`|cxs9c>3+Z2+`Hi#`<#dYB^cFsx@}k2JEvc7 z3Fjxy+|R$}D5Zsl0Wj!ap{L-XQ8Oo)6d*(hLI|x$6e0*AgfPaKV2m-w1Y=AHA(#-- zkmh`Kz9L8&M3_*<0XqaeYDDB}P`m{80(nL|vu)MW!egb8IFfCv-HH~XPfXq3@Q>!WpczP6Llr1G&mijOe$BXlk447^8_s|6`k|2Vhmnr0G^&s z*aK4RK#@lF3Xkmc5eDc!-Rco^c#?QAftnKO1X3 z%~|*AX?~<19XQK5sX#=R&LzZHyeroLl`P$*SM>8qxt+Zdj{DY{O+pB|hj0B(ZB=fhA3LBpG!iQdvKB8iJMJxx7an%d9zW(da4o8!z z(n@{GjsKwXR&cl4-lKy)x93T^920x|ec@1J|FO_c1=~VIY}6zl|D1BB1f8}i4eO>6zkoZ3kxIh5_7S+;( diff --git a/docs/theme/fonts/fonts.css b/docs/theme/fonts/fonts.css index f55cb6ee898ad7c346d7e1774323a70e1fda001f..49a3bd666476efc571f483c5170e882e7e2c436c 100644 --- a/docs/theme/fonts/fonts.css +++ b/docs/theme/fonts/fonts.css @@ -3,15 +3,37 @@ /* open-sans-300 - latin_vietnamese_latin-ext_greek-ext_greek_cyrillic-ext_cyrillic */ @font-face { - font-family: "IA Writer Quattro S"; + font-family: "iA Writer Quattro S"; + src: url("https://cdn.zed.dev/fonts/iAWriterQuattroV.woff2") + format("woff2-variations"); + font-weight: 100 900; font-style: normal; - font-weight: 400; - src: url("iAWriterQuattroS-Regular.woff2") format("woff2"); + font-display: swap; } @font-face { - font-family: "Lora"; - src: url("Lora.var.woff2") format("woff2-variations"); + font-family: "iA Writer Quattro S"; + src: url("https://cdn.zed.dev/fonts/iAWriterQuattroV-Italic.woff2") + format("woff2-variations"); font-weight: 100 900; + font-style: italic; + font-display: swap; +} + +@font-face { + font-family: "IBM Plex Serif"; + src: url("https://cdn.zed.dev/fonts/IBMPlexSerif-Var.woff2") + format("woff2-variations"); + font-weight: 400 700; + font-style: normal; + font-display: swap; +} + +@font-face { + font-family: "Lilex"; + src: url("https://cdn.zed.dev/fonts/Lilex-Regular.woff2") + format("woff2-variations"); + font-weight: 400; font-style: normal; + font-display: swap; } diff --git a/docs/theme/fonts/iAWriterQuattroS-Regular.woff2 b/docs/theme/fonts/iAWriterQuattroS-Regular.woff2 deleted file mode 100644 index a25cdbcdd3f2127e7c2f6d0fe2832a83ae2fc6e5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 44416 zcmV(^K-Ir@Pew8T0RR910Ih%k5dZ)H0n@|)0IeJV0ssI200000000000000000000 z0000QgaR9iP8^a}24Db`PzZrO37i!X2nvO_D1_&00X7081EgRJl0E>O=-pl%OU|Hq0GGA5gXRQ-Bx9(``)AH%qObPfv|rELax=HQdOO%|Kq-P4v8Pd3l;C$Jet^(y8hVG`U}KkJ5&jZGzO^U40G8>KH-BO_k3q& z?uYa~Hj0c=bZEOJDr6|3ts9i+J3r6Q?azICqc%syh%v?pNA-ve989VnkrGOj7=(O< zk%ECyfHsOni&%j5!A4YU>MLTQV)4cKUu9~&_x}HxSyc^BW4q~O2|3KHvuq&P&5VFN z0_zF|N>M1CI9hX@z#8;vZwDSl5vmZ@&5MzrJGRTd|;L`rZO2 zpaKN^|Bp?3532XVtP)~XrRK!#3o_!mZMJm}6?Xz7B4%kD9tO?m0eP50Ks2Ny7li zrxXBJ^W2b9v$GbidIK%UkvV|A$VpCm94oaEL1r>k=-(`}~8 z7U+K0oGfm@Odtqi=~qz5%n%Fai4Wny z2(&M_Z=k;X!JC8$$5&?m*zC>&qDYeGCHU(5*tc1|fSUu)qfX_cmW!J?$Qo^k)Lt;Rc~!sq&=CxTxHedMNMTy?;NwKM@&O z9}KusX!Sx0QkOF0%5g_nCEU7CQM*lR`2YXw$1~HlYzqXS@4VCh&a5sF|`bD8(w&!(~3(pn%3yQu^P8WU>`*Z==%&E}rn?9zlOU8&t} zEC-{=Oa32{`8V5|qHJjjx=LfnR7xufiS5Fos&X6@5@AptTr&pYB$hjpwnzf)<5pk9 zfwUE1XkXf86iGs!|2Zx3-M3FEuoeIV*!wGDsG0B(yq5p&&BRt}Wxb)3g}&CrVLp}u z$Po}0N`i6V|G%YrxaPVKQ&1T3nwdq6MZ5F9>vR7prJy*hGnu7S1ar6nOaMUI5-*kg z6P>U@??(Up?-Zwd@7q1Um31U*BpGXDtRzX^AlvE&qFy9YN)0w*EEq6CJr6z;0noFV zs7oAV&Zt4?S*_RJ8XXEjBEtldkTDtU|5jC5ybA4YY&R)lL+k*e`Bv{#cdpw!8Zu8= z6wwGFgfT*R8)KHq@fk9V&FI*?sMrmorcNGHc3T+4j6T<4uB5XDgJ1!76Jcv7Qh_7%+gcwMiHIRat3n@|x5h;gM&GL~N4Ui_+ zAvfKFJn#_m#2d);tSmBP7V_2qkcG&Al0+0hEkd$@l1~9jDGrqBtWf6JK{@0I#a$$n zO9457fRIq4swe>S)KHTJu14)CG0D4(e7O zs5{+-x<4z_aF3xzdk*z-kx*leLrwJtYPye5Uls}VtvRS4{e=46JT!P7KqHAs0~+lW zgGP5)&=`y{G@LPm#$s%tu^(q>T*nO>ejPL(d7u%D5*lG1XuRhrH2rnZT&;uVdL1;k z>Y#a$2bxDmBy^+#*0jd&{1JrKba_Y+U?sjnq$fVvCj^vURgjkfj5jCbX8{Kepke~o z5XECiZ+S&tIw&a}L*hiDKx%q>oD2mTn48Err2O+MS>WJ9NB+?Tq0&eMD6+zbQ&#Q~hK zM!9DJ%6$STKH!NFbeu6m0~>sBhYFiD+}^4K&`iv<2pbh3z-}_TiOG;DYmVGLLjfP2 zwQ;maDGplW62w{wF!hJt`Wv8Wn0ETxd4?g-9ryFTd;Ed4Q&g{URI17*+wFD0F{hM% ztak1B?SomDT{}g1X~=z#y)a?gCv(ubZ{9?eAwXw{iWE#Np8LMkwzv-guMPYbibFL9 zc%h61wpiK@HHS3TY79O4;6#dtV+vE-sHO(+p$apcRRc)C7@g_Ze5KM^58qX#Ts9-% ze?c*IZS0)3z}w2^7YtgLbpwbE3_X5cZ1x?lw)R*P26~?!?IMGql)SvXn7DB7&e|d9iF6Rd4c~0Lt4{y z0^?J=8Ev<2BRa$pMx9@Fl{rh1v4$d8@2BTNdSaYu<}44>OLaAK+sK9Bf;~)#xr>37 zc1~X-h3ic+?Epf8G%9>vfig98+H|oPFpl|J1Sd(xB61X|P^U=;lRiU>m-oStNui*T zr$m(oEev|tjBuE|A|WFmMOYGK#;cZP=Ae9NlK1MY>`VjzOYLj(5|$woHtBxrs?0p( zqpscZw!C)@jRz)IyWd5vbASbM*}GO`rJxn8ksIH&(%b@_qCHU0mgsWV3~hz(_t=ZA z(L~ceZG+~R?fD=KR=y`^B^E%ajQU`5UN)9M+S79Y2Ixq1s6%14NNQ)YvxGD5EEggj zbhJPy#}S(3V+i%Q&yc(eS#IT9*U#9$$t+(BCnTl1~7w_9x0?bf@gWslqt=c>`h;;}aD)v%4X>*mI& zQ~5>XiM+D7BIuPj@g_mS$48gm0@!d0--zIm!Q6|ztKr(`nS@;rv=YC9Ee=?fRY8k^ z1O=U5rI}^XvzO$wbNp8_fX>X!cTpW;dNM%3{x=4V(yFha#+qubx|UjNYi#Y+G_LVY zXkxWZs-w;(H)ZH!IaWXcniSiB=1bhj$~gdyEeg;Anjl^X*(ynP+t?5{=>6X{@Dr@@HHG|rkVZG3(agU_FLF-)CF}MFZ)(ESG zg1HdRWjksoZfKC0%xZ1=w-c?kRlIjPSLxf>K|2HKsx+C6CV;`#;Sxv>9+!dXNO>T3#2jTHuWBA-7<5D*l>q%pz>stp1hZ(o>gW!W z8Y~O(bA*ypnIk}mLp7%d(Tvt<@!E8~oft5bn5yq|be$&A;SPka- z&KwN4bT>1C!M$Gyu-t&wojR2DQ&3s(-=Gr&5gZttwS%;-K=9m8g^V=uEkY(WHA4U{ zKYV&A?(F1diT4No9CqPzf$<@DyWIGUa6fRvxYi`9n1!aAI!dDdYaCHUWpXUA^;9^I4Mu@`n=pYH2@x8Jw57cy5K~glsawKaxdsT0C10sn`p_z>zZd|=TBtwyv3XAT1NjuZNa3OP6 z?0)EyJ~UqWAD11$Mv2Z!Twj{Pp;w@xk%tuG+lvt0Z3hdkS|HLv*42hq&Jl-_QXdtyQR4 zg(Fo}bZt8|-8+J`_mGbKfW3%xMt9frA^q7O%KXg7n08?!mbi@NOthNq-m$an?wQjH z*+XT!o&A08Hsd^#2L|PgS}Y~k;kqchLCakX59k`E?H-m# z^gU+d4MR^^m|*KIBhNVb$k7MRKI8ht%~!6z@Fj*f7I^r<$6uc2@cj`G4uT>e2?!|& z!U}=Vf+0L25QRci;Sf`l=pvI7TeSQNBo=RF`PN3ZE{1<%DT!%AwnRB7%U)@AYI9SQ zi|U;1FIYnXHs`LXAp7#ySfIm&J5h`lttDwMMO(=-%d#UORsYg(%j$}h=V)Wnyfxtn zK!r-D)gv?uD?57|v%$z>TW$ZOTE}`vL1~=olP-N|#%>JC++SZG`}p%uKKtU+FKtEJ z!_3Vrt*qsyHY5T^3<5)-2qXrJ!xOnY0beMR$|M%HcK(|3!7rhtHh~&m<<xmVc#n~PWKd++_U&L=(Rj|-pm$`|k}=xg{Jq!L;gtAbZ0s*%;H8gx2SldZ+o z=H1Tth;^k*xt@}x*4MK221Y}(k=59~*vWC5c$@l~`TK?DjuzoiVU9$QH$c%f6c;UW z8O;^4`cPd$)=g3mDd$P+CE*-PldS#6&a}l|F!741*UXIJcxkEsdHF4^bQxtzDp5+w zQcJPCRLLdDDqBt@x#h|$Pj)%#6WE@Yoh`LHAA9n(H$S`bcB*(Mi*vdJXG-kRYv^4w z0Lsd&$^luZXoN&GeEQJGKKH5F-uJuM;)*V&_$?E;gcWZ!TGnJ;R%d(7k6TnwQJKNQXS~|JHCF|4g zqRXzhH2vk-?u7}j47=sAanFo+eH z<0b7wEDo`c;iU{*rXP98JR?SlFm9wws}3@b6PN3>6u()B@M+k1kJH5zwU<0x^Y-V2 z80z}Jmigr}%=fo47hdq(vQqg45+#;0yizI(TYldyo!!?XvL!9bb)MoeFN3Etp9tf2fZyKY^Ad4 zy6v1S`z-pKbR#&Kyx|Qx-U=AN;%GpoiDmlNk!S+4)4^IRs0~GoX260NmCqAIqTHMG zF(;48ME}gjV2wG&G;a7#^N52^l)5GSL}q{pEt>XqYS3Cu50Pt(L*_a9+BySQ>n6ot z$ICjP$#D-kzRumiCpX7_dLs_USFo-#pErCUKxLUd{*xww5Kxo|_$G7J-I16H19lx@ zTS1QqYh+vhgsZ6Z-_R`T|4~Luv_OQnyQzxKBwxqt+By*9?N)lb6EkrBIun;CsF>nj zJGwtZDWWX{!Zg-kXW>iGoC&U*sn*>#LSx^$AJmDk=?zMU)TI%9#Pm*C2+dj79CATJ zU8VWHsBnW!AcBowver_6OL3$k!|x^{iJIa|#1mAEB8hxap!By`IEqMj%EgCLm?SLl zU)N-y<5$aF%Yh6Pvlhrv$wA=^rE`p2VC)hTSD3ninVYEGT6C~R7^7l+pD;DhTc3Fv z*hwg4+0V+|!YcdqOmiF|Fm5Sk_BXfZM6-pJZAnM9F;aunBCU|tNE@Urq;0gd2ep8d z7W>BwMby~*F>Y5>UW)8>+^q*--}alP890PASCe|JduWq;>ZyVg+;r5=-s<=5b-Pj= zHmmfr({wWnn0Z_`DW&^PpnFWWp5h1?VXwM!_kj^Po%alFUK`d)NHzil88C0_{U`u{Z;xY$|vXUE)Y(Ra*QTy zs;{w+&}Ie2AsZ}$?ATO5uq+M9Zkgl}?A$>pW5=f4wk1%~3lDa_K^IHgt2fO6CFS}I@`{b5Mo9yqSp-m7%R6WX(x;A^=x&pNnI4!eIn@*dj|LVi8zr>il3BAmiSVO+d+=bVVpVH@!bQX}=0z%c@Y54;-8 z2%0h~;qfX}YFQwOZIoRmmHLiyDtE_K+4o0+4*YQ)? zsk~KZ>HDru;cve3v7qLb1uGd{`Kqxpj_rG<`oHHY`+ujmp} zrYJJrS}CE6>}XAqmI?);wu~0Z+D4s}okoHjd@Qx)&~BY^o1Cof!bRkp(_=~ zIEFF@>B@Cy8I%uo&Jravn5yCwIa5x?57u%Bt~G>};pydppkFj2LS#t%+vO|@Ja2Fz z=Vc$lOfY1e@9JffSk`}E;NM0`_j;xc3(v{t(V=}ZnM`Vj@yvo{K0>LD9RVQKc2HDO zLzV2cIWuVGU6oSPf)PjZX!KuD8$z1Xntd$-2uv|{(=2Y*>cCuPNd8tHV<|`2E-oc^ zH~+M8XO8mO%q(aFyTXq+$w8vyrh)(^se!`GDECSDF^8~@Ruy<15GXzsQeqOtWk_)< z%4kUrp-e&oXBHEXu%;GPRU2UeonPJLvgp>z;)r&u1|&j$q8EChcQTbmDmeq!Dudcx)(Np3+k{`_5cr=!?X(_RS z6}$G+G4K~kma~v{x7XP9PDxgzhmM|_7N6B`yia_y!ShY+OP#tAE)U)N<0tuAxn(?* zb+`WesQ0f0*d=Wzi_*=GRRzSDu(=Q$_cC{fmZxFVmi_ULW47P0376ByB+!F1BGd>g zI?py%uxXvk>aJefu1vnVv6e`#wF;IlzFIvad>9@sg=`C=bGu@9D7jF=KbT~wbQqV@ zpq!wrg%-{8R=qf7IuxT~%_MY)1%gu5>TVewIn~m2_R6Y;p!N{ZN-3m4{aWUaI1;H< zd)FA*gf~F?oTTT(=DS@*N?Dc(be|9#$D!PIA`S8+GplY&Snp3EXD4uIi>eSAOvVI( z!69=I))%an-Tc7{V+2=H;^5my!9x{%KMo~>GP;#4&cMgVE>9)XssJjQ1guv?PIqrf_-KX7VX^QO3CDpBBNUUSP=-7b61@+v zxgZqUxR}glus}T_Z*__^A=k33rhX*3-G@aM2eupII}NBZ!3j8_O(El2)9-3Au00SI zo`f}4wzGXg#|yu};5uKLuK*m4O|v8M&3EEd>?sF`H`4wNCFYNl!L4b=L)t8B-B0#j%sU$y{PVv*Xd_a-?E(=O;bA%a;bwE zBm4HfM_bSGm}(7U@O6OW9|*a8#xcCZG0<}PvyHi0(m3!F&jV3N4UkLFHN zIeAas8MMpVQ`u{Y8!!^s(`ZO>CsI#aKh>LR)@8fe6u0!VNeaMs_nh+hCSRJTL?v@4 z%3`4wSi=$HpQn9j_#|z*y6$={9LC-@CONCSHF6;}B6RR;6uzk9`qhB(^<3NCsPPyb zVgIq{A(N`@N?5Z}7Xt+7PQ}$(U)Vyo7|U`6sBMVR4GVr@th-|k76ic;#$O9d<5R*C zqy~JqqSP!SWCB~_`6vs`Xq~&jhvtN@OyU%DIZ`r-06N5ssY2LU;#`ZycYC9Ci(Bkg z)J>^kqNNrkZg)y)sZq1+{|MO}6xrp$A3_p!P^{UeBQTvG;Tmm|s2>&V=joij;?n7< zEXDO|hou2{HXx)6{?yJyyE>)<4BEDBUR&ignl0>2A?3kBr??JFx9PJgIsh&vq%A^% z(J;t>@{^PvYY$IG1slwQm6Z6I1&x<@_!^QR!_Dh^(gS_(hd1dl%c$^)e!-##eUe5ijra)8ph-3Rx%AXec*uh zWt;ZF)#%tP8FfWj=>?GX;yediFPxfYOx5EhLQaMt#MM$Lln3Dv@$LzV9Mg`#jr~&}8s*E&nkC720 zAmF*8ra#czh(-BJvNtp8B(o+NP+A5l0@n3&CMx)+tehWR}?SwR7DZaJ(>p*8vxp%#&P0csjaBs-#&}RQHl(XZL zQ7z&|WP3k{a^8#gJ!A|GEZK+4@^!$1Gzc7dSGuDUv;PZ9-%3R(hK!HA@a= z06vr%uOeY%2W6McWH%c+0^Tz^O|`jzBPgzXLM*$+K(6eZJCpP3+IE1n+QyZHem%9~ zp0?}M@Tf5se~o6*sGP?$;p!pulIXEJH7)5}6$PkYDjM|iuP4CVf47A4bkqDI&e-ssX3e;AEJ7!xl zv1wD`gxP;=TOu@tFG)V|cEK+>+ObPqs-(3CV`%CqC(M%F!@dJ936-^o2 zLtzuMWB+9sthmLQ(Ou#yzw{pk*n}D7i$T#t2c_6W9QW|Hw|Bcy2}pU6L9ikuATR8s zCS<^cWD}Y#>yO_fXDP@a4XUE*lve$?F`tp_&K-?bv<#fcc!^n4a>)03M$WiDkOn#@ z;B7D#zG|#Yx7Az_b_e6itz~eQ+m~OtRe6MNkXL{LB-5wD7?!P%!g{wt70bL8C(l1Tvb)M!4jkfdxo zJQKmTMeaU~$^$u^G2ya{R5K>d2Jykp$=qq72Mo2S9N?#c#%QiW9ljNVOn{6En`2xm zY|&X-x!tY&-I+_OkUXL$)!#PNjkfG@@$A_rPhw@W>wDb-WBwXO7|?NioyvNT~N;o z{J#*Yq8d8e0!E_yyv*<~!Nt@B-8;=R7li48h;1odDuM}tF?IK$?>ePM5e5-0rLdw zycWrPrd$oacn-x&_565A@~8(F^GKWa(+#{j z+C1kmo$5zn@96l-nStDdXF++Y>frYvsQE=$;vhmiE#t9og^sF;X5Z-t53fwJaG4auus!Xc} zo`w1cw?V=9jg%WhpgGCcQZG+^={a@MHU1TgfwDxTsk9;Y5DDrqV zM#9dh(=V?Sgz#W9i1d3@NdUYfZZ=5D2+-Rmp$61-j{L>V-WWxPgUTZ(*rgB% zU+yOnaowTkLllN!5|zS9Ovr>NJ}#5m<>28N5t`PU-B5XOW&@6dAK9RjKZV&6M%lY4 z^4rIt1%xlWxB=sgyvdSFd+4BzzPOo%fqzK~-7p}&>!cUHLV?i(1zOCp2`TD%ZHITA z*0&qm_JTm2dPVR!T}h#+Vygupzgqu88Co;zHIWI$Srg-;v-S7>q_?GSv z=6&iR>lQp_%6h+#{S6tuPUuS&3K{qEfGMq+JpjkLmDP zf}RuqEn7G!I*yb{i+nGfq9qH5s}U~4ie<8`l&mSKR+yyK5~U9S0#UuBnN zt|HF{Si4A>sqk-rT*N#Xd*<76K&FWK)`$s_$qHIrsr4#ZfqT02shj1fC|7CA3tUi? z&9C(3CPjEh`)3wBd9xfs{Q}dJ@59h`UbCI=UJ%-O_Nn<#-*gJvH|GdM>20;9%Os51)lHdbL<-*V? z+g1`zP*jx#s7;9Ra8txL+6A78)KHv@W@IqHn1zha9`JRkmX4U92@K1f!Y?y`!O3CY z5iZGF5Rs65c^B~_k{@z|9kINj0$qYmiOj!qbOKR%Moej>3S>-g^f=-!HI7_~ly~=g z+GlZ9D%twJ+va$pDH(rkVFc8VP!7Tbx8cZk#Xzd4+EKZg*-69+aGe6i7d7pwiVXDj z;qe8h%PpU>2U9q1)%O7wBe!Fk#=9jA+=aHvJf6=-Sla>T+{&B!+t-ve4LM@tjC2H} znp1XbmBEXO>iS}J@}*?1ZD`nXd`a1n*Vk`8mebd+2+?dh7nImMv2KN&Id;UbB$zo7 z#wD}=;_M=4KqWfT{}ERqT)5C6zIh8qN=#C`b_#kH!(>l4E7Oixx(jX$pK%p7Rpa=j7ef@EMj>|3dNP6K- z&^keWa{;_ddP`S32}ux9PBvTn)Wk2*n6PnL6_o6FBQ5%jf%JULT#p-m`~0V4sPk9O z<+P|8y!{ScL19me%V3qzLMYfvu|K`;=b?O54UGq0S`C6)DF59x*A z>d<{hBYv-=;ox~bD1t-^4Q0(4Znvea$Su#sKiBqU*NA-WZi^TX<-oCk!06;t>tH?u`o6wbGZ ztQe&PQz}ti6;&+0(D5DwwwSDUv}Dy#DTQVg1fr)TnZGwhR}DLx*wLW#@ASK?P5!vh zgv|51aM_w=1^)W(Jn815n1F+fCBBlvH4*l=Uag@;kQ=>5)F~{tfBb3UOs)s1{4MsD| zcd&LSb}96vw4H%Q@!vxPM`nqOA~MvN!VyEnj#(UmSgsiSAS@2GRE)bx_;ea@j6Ac}>OyqqMDUVtsqP^R<1Z zL=Jz7ud4NUuxRhr{MOCv^>h|}J$rLe=`glM1w+7)Kxlpi(kxSM-QyioGBUl)wKB+e8m?nb!BBOjb#(W*rFyNBbW2pj>XlA z#_G0Tr?p~-I&0$=W;W|4*_Z}i0t#isS;v=_c8@=_v+jkTN;9!3gcs6OCx?nHEv;DV zJ$0@+(_%m|bPyQ)CotQ+e+B{Sz|eqkjCHY8x_IR?4qyvgGrC=Jmuu*IlGcTpDMa5D z_$mb%A6bf%v#5lbEMY#f?qB!FqlC!5uxRS9q^vp1qgiRCPDOfm%R=t|=ZEZrJ_3!_ z{S7ZOVaSdc_;`N+Ut;iL1^|Z>;`9dVo*`GML;4we|Jj*xWlBA!KF8Pi*sn|aoVO~j zSv=@$>pXnuJo9Ylbu7a_>glHViv8Fsd~b;RJktu<`EN9Rqz?5p&0JD{?h|8(nUL+7 zVDy;PU;Wj@QTi!>#gN+IH?a~j|Dk00F}_%6r5$+tB1>0|SH8P9Y3@+6FT*+qP4 zk*hsLMOdFx5I@f1a^FH(c)F+2Q@3F1RL$aPqaS3Kp0YSFp9tzPK+HRTIu~qVMb+d&wQ}JkBP#5umqXe5{oFe( zAuqPk70a8fVo^DhSdPaM7(q?LSPP8&c;@p4kJW%ROqii@CmG_vcz)j>G*&OzdmJrz z6Hz~5QhhrJ-zl*ySz+aWtGj+)M~6hWl2}#_bG>RigUBxu)T!KjPo2qI?b4uPSPY5S z2!mFI=9POT1>^;!HsY?-yuzTMMjVPqqkp(vH~F=uCI6=1IxC5-7uJGx{tQ%crWU@@ zudKMsgHP0yQT*4YVS7WXDd<>+7ZbpMWhK_!LNDX(*tY)itEV6HD^3w@=wUP`>4Udq z1A$Ehd=UQUXmot2o#gfoMJ4T^n#d_z|-Al?V!>pbfJr5&Ppa43cs zJ?C=FOqTygy+C5H2@(r&AORC%P^-A=WP^OBa_?AJj>k;K@0(%{aWaN^h+USObXVw> z-823n{z=&!Q5VAZpE&c}SbvLHBjZgnnRH-{Or<)^1cC;W5iuG}5ad1^#mQrRVriTy zzfOVcED~x)&xtDpS)%X!2b$^129$$Yc1g4+f^y$;w7X#3yq_imz{Q7olxAX?w40Dv zDV0Jsauw7|-{%67mkGxQU}a*3!YAdlsdL9BfTj|dt|B87zpRo%-2jM~B{EY97?}Fq zV}r?n7;IN4Tv_X`cdfp79Vuvn|AduvYD*{Hk!GTNch|~d!A6LE-og0K8g|p)Cr|5O{?BLQ)i;FfqF4ekWUK#GJqJTPC!`N zahQ)1aYFtI<4+FO+Y0EXZp2*v)9$`g&8j?xfC!3P43nPx!tR3S>ugUj(57twbu)86pgyqh`06|Ix<}149k&B!&i# zZUI{kpa)ihD+@wT+OOtJZ~E;LD{$67$=_ny_uLh>sC^7vgmdVhdhq(Ka}beCQAGEijEJA^H*TDEp7JEvi2_b+4$gT7|Dg3u z*K~I@15%wiHuS0mDR>y4W0^QCI%+Q#3D*ZRu4Z`P_Uy&R2vu5cisHw!3300n+RB_N z*}0sJ)KuyQf#6HKoUa5&Znx}Z(MkP6y;56P_)S&QB;n|VzLoXwMdpPYw%@D?Xe)PC z&u!|EgYr@<04K_w{Hh3W=dCfv1$!=6eso1e`vEscN|@KNyu0k9#HackkskMZd_?^3 znVEJ-*?t&=>nK{&3eKEvz9Swze*ZYyQGX2f!cK5OXd~Ev9Od6SI-eu*c3;MsMEvQ} zBKDVLmX4`br|5N=?+^znsp*HCH}!pi(2 zw((&n8)J7iKa9JX$wIPHhs?YFc*G`*BY%!Qd@3ay0aA~1JO3{dW8%&!|9dV<1rYN| z;&|Dd6Q&Y62S(+hShh-{ELS^w+G6P-58!I@W{pC198G;|xdm1k4-t5QCLV z)G&m3!%mY^3ee#dpBDX2#_C+kw9kzG;%6qocRm5x^baXTEls;jsQRKz_|7C`+R~O=P1XVm9?&yy&=V2jkj}m zrb%bFtd5%dW9c;+f$Ub&d`u6vAL3ogZ{E9=6S213w>>%&If)w&@5_aS{p>#f z`3d+5#n1|X{H$~!zs;ru6cr>%Z`9+MY~!ts)+roXujwsASYb%zkG<~;vK>Q0nXhJs zBBjUt%Py9NT06}hv-E%IsbQMB!B!HhYjy_D^A-M^`CY&e$~D}_N?{{{Fz9a`^Q4%U z@ZfnXhrhVd==YN9PU9Wvc zl1ryuOL9M`lwizfCh_}aAmu%4Fvx{H3btV~k`L6i-o)hOpt{hy1M3#ep5{@59t@iA zTlhufMQD#5z?Z*ZzpMZ?f@7X{>G}~^=N)HvvhigHkxo^|06Gt&4$vljN?*L#;vz( zD2>gQcj#j|mFVAgRou{}of)EB*Tlpd=PfX&<2>yBG4s^=}7 zr|!Hkd8f~jGV2cgYiNv+)g|4CFVa)%1z|LpDn1W>&QWyK8Q&&eozddM=2OI4M#N6Kz7dia<&MeAr_EJNr^RN6BAo(+ zR5^}S#nx3;spJlKJU+LXu4%W{NZmLzFmMKr^?BpnbDdU)LvJ+oT{lnpK6sfDp0R%v zna&vv$_E8W4r=+frcsofr~pP&dpnkv;L+nlMt=l}e3Fr#sl+vn4+!AED!fkN?+92b zNMk@d6uJxobKH10isI4HchhL57xz_`La+MJWZbdK;8}3Q%kvf6Aj`30Gkbe=Y}Oo^ z0RTxrw!ilVeM1V*@(KNf!~U>~M^~C{X4`pyE4bpy?OOhjaKST`jadzb1L#g#qe!ey zPd{IwgY>MQ81)QFJUsh4Ze(BtHT&Y*@gx@>^~a4^e^JjPdvR>NgYX+`)$=xGH{sC3 zYxEAAR%f>wo=qvd+Cq)@%M*obj(r8*IlKgV{;g|$drr{)!X0kjKDsw}6&}T(xr7wv zKBs29H)U?W_>chPV;lGUx8UkQgdJ4Q2>nU~}?UL@qJ^A_s#prwY`CAm< zbTn}kk~yMV1l@GM88%PUDLhBI85G){2^17uWKLMplw}a&1KKwi+RIj_6?W5RGI8Rx zfV^>|C?35rA-Hb*L=RXg00kR`tBLmE?YACFDjDFOQ1{ZCIa*|rVzc^bp{j$JUYbR z{PhNjzdN|*9XKI5@i&?$pMeFIlqn!dn^Rx;VDQzrGCboJONnQK+h9{t*jIbZRY&b} zyt;ebaY~;{-zI3k53!P<7qbR*3x(&)i$3(7T=>3KO4p+fV_j4;|G3LZD3dc@GCqIi zb@PPp-tJ)G89$y<%`Oe4^u4&rS*4ru$4SJBx6lpmwCm>f6F08s;KwZhXQUQL?oydo zdq+{Gd9W$ZMmxJ_o^$kBf92Vcp9}ZjAb|Uiqoeyr(Y(Z97qNOl>n!7}22{MMTV06% zo@7*?HvEkzu=STIPw7-ZDo)%Z>51c!9errQE5TbmxARTl#O47ePFDQP`G{~qkjm+w z%Oom6k zuI?Pau%u^JwN%bD<&bOvN`i^R(L2qsrWtLhqH#|PX#Z8dk8<-AdKJ7afXFfA&;c*N zYMoUXn(=Na`&Dagfc6we4bgg*yYmQ&NG)s(J)$6D8C33MM@PFe2z~lky^hGqKJ97@pmvLX4{f8 zGnUVs`8s((b%VpX1>52*zB=&8W-gyO<7bQ%^y_+KbPnLB$@b&({A2>I3<&5^dq3Zg z!j6uB+EuD7;{bn$+IkbI$Y{baCXNte6R)hD`hMm+@kY{X`&gEM89(9|rLZsmeC|$k zeRzv(Y;e$1;X+RSxm-NiQwz9djOj?P!fqMB(Nd*&3>^_(TUy9VKVGQgaTR(qY=ixz zD(+%I^$nd>rS%?@(qOgW;h=JdL&~i5@?{Jp+l>O#(oyLT6`d3b?RN z4JLqBWwHb@UmpcvZdUIa*^iX<1nc!~Ki|VcZm#UOeh80WkXvCuXPeD}N%7%BV699r zUWW-Ll!*m|MeAHvIDJC2ilsClLCyvz$2gPs-OCsy_*gD!T3cCTfNHzq@Rg^iMKrlH zSAx{DNBWt%Jg#tgYG|g4+@a{;)Z~Tc-It6nLV2yX+C|_0AqI;ZnPYe|i7~n>5FBk6R_sJ+a13uuTbvpH#3wKL${i>k{fH-hv;N5+v20wskkiLek}gli zsP&0d^(}oIiD^AKz?nK%zv&AcIO(S(r0$EZ*PF z-*B@OZF@?lk|9`>qq

j5OEScBpA$90YThpFBbuNQF{{VKZbOq z^xh2%RYm1KN>Yee-&8Y{HR<-*6K2;=7%PMoXYgkP3gOt=yX)6QAT41mQIg^jm*BO} zL<@qRk_}x)P(;KF)|wb};_WL7GlHX0K^q|^XOJ${=q{&)fcKvAZEL&H=!T1L)8c3n z$?l&wu|<7~8MyN>zUS18)op~s|}ze zNTO3{%_^{?*M^YqbUdtqYI>dL!nWaNFIsrfA%XjOB$n92WP9Zxucn@_MyoQen8_LTxe2g>fR2Oryb@UKA%Z%Mmc%Mho9W?ZoRF z=rL1Uh?#TOxq=K~w#$b~ee(-e5UyNaym;i;VjZJiReWTwdZ0k|P11SeV8+yLP)p zpkx$h9gf36k8z~)v4ARup0IrVA1X0yd*1(IYT+z zQwrDsB|m+_RebE%S!9UK6T{L0;rz>{zo|4TAa2=;%iTh`O2`IJjD{qq(UWnBl9a4I zW$2On%r!yzgoCQ7)RQzOBohgq5)?|2T-}&a0@0Mn4s<|7`G(4Z0Ko-GGBH__nrgpr z)h0~`xa}`(rz8OI6M~#wlu;9eKdqmVcaz7vnU_L3jc>4fk(@0`IXM~|7(1Gp3e2>x zlj4KplhV^C@{|gysAAc&p(>m_>Z?yraOB&_j6Lg<<(&5gMDSKiAD{o!rl!plibnPppQdd ztBX|I%B)Q}_1!aa_C&v)4Q(-sgy7)h(PYBU#Lu=+D`5_1y}xdWJrv|Kv@;X@_*#Dn z&I8KFaAiCpACX=`l4F!D_EJx4m9?a~+7MQmXhxQ0Q?sZv|5d?bAb zv$u20PqG-ouRDEqzH&MxB)x_Va_ozTRq9o7y%=bVsng{zl4=+5oC5&tJDlHUBIInvChg=*Yc zIdz|2qVH2joW*4)%K?y1n#*( zlpE$O{WhHeH*#z;Cvpq{xu6fFlzx4v_NjgAo0ZRoD^E_6G@^eQ+Yd}oEy zKVUQ*3LMInv<7`BYV^349NgsGNozgfBm~Ag1 z@Z#mj{6R&_+nkT#nW6}?P;-;f1Ygo zoIT>@BCQA#LpQYwj)GAzAs=MY#Gr%&aX^Xq@(FMscp+`ZmY8MPnzKV8?!)m*qFWjc z5 zU~W|b&syu$<@{MDTL!0czQH#Y3FJ~?p)O})R#yvMYB+=`Rgj8;Q8-t~p6lcr(50Jj z)$cH`FzG_AJSkY~lU2w+`N{a(vLrlDElasb-TLs1rjWaEVaA-lWM?XdqxQ=VOggrf zuwJ<0RU#Q8aacY6!Vd1oAP@&w#8npNom_7!5%i8VnCA=Lvt%>o#X9VD)i1$|eGb$= z;6JQE94HCBrPpm=u)$>E%D8C$rO&oAmndZXz(BrC@t$?r>I!0%!O=fvEF>KJcAn!b zNJQeR#Uv0 z;4k_lZ)SY*AC@dolgj`AK4*ApCm4ARH(zf7#(Wn|y@GzcBY<tuxnS{fllji_$wnxr1x8_7%aLpw)6pFHzhm|C$*X-`fE}@6L^uG57Eb_A06Szii@W%sYL7)|19z!bSHYivxJ<{ zv?CTg>;AgbS^CqQWSydMSumqp#nAvU3j8r1SPW|)DgJT#-LJn#XOdPGaf()v=r+l` z_aj*|t~_66_wIqNJkw^$Vk>$><|OOJE^ix{G)d=t`B=%i7c#vb7i*q!XXxw8p-P<= zt}L(Di)K7sezBO0{M6L^W0U3sDhnP^LvwXQM~1Q1m{u$!>f{wo$XGnz441m0quGTCG$3@KoKcA>EY5c2T?4x|?X( zq|_D*6F%u@0+?5~slnn#8*)VkGXl!W*jwsKkmp~`hFTYA5WJ7TM+9%iVvF_Z-SaUi z0N6nQGNej_;WHPfl==gN4vwbb;-yk$ z1_9V7)A^nL5es@h!hefCLQkcib-pOv3OruI#`v|zY6(C8q#ZUF6E|n9_0ZN^1OA=K z)}z;6BM=I8c^=4j6>!WVk-l5_y@j>W+qGB;i6$_MTC=?#;$<5l3_N{e(@ORv!#>xE zK`@N)k$Bxd3U3j%Z<#!7yFvro6ZmM&6s*@jS3)~TyYi7pVeZE8`Z8XCd;P!%q_jGx z1Q0q|OplZZ5&@l20D)fRI=l<=4#cigf?i0VGXgkIP0I8*Swg@mq18zTz;{gM5IFV+ z{}wsN&{OYFEa=o54U;wST&LUKj})V`E$a|wvr8zJI01UA3KO;L9>`F&gBfDQaXr_=d* zfVCwxcDqhrN0XBi79JL+6#hq+k^d`9Yy21(4^tWLw-IJ4&BSDezQ-~chmg%0r7%Xg zGYQ=lyqG^^NqL+UEg+CJYjG##ifrcw`)7ba6ElgVUj4qi}iDcns7T9gV`LrJ>=`k<4w z-vi&@gl~ecnt27!iDsuhVfw!W^FUjEwax{DDvQxuy{*prhdMu2 zR8Na^$%H{CGnpH*BX6;b7d0$06wp;=a27DzvTBWSvQx3Xx?1`cjfa7Q?7VYp^tp8U zaevpGleXLT?`vq-x4&V(ZFnlfN%9Zyf^LDi3j`DmMUa$SX#dtNOs-zQzsRo&ewQl_ zIFgzX{eO@+`AB1i`9o$-T0%kClxzIz@g#y`lV48Nen{?<;b5CvlyI=PQFBC`!BBCr zEvl@5+d+^e8X(CinvvMp5e-E~iUuHylKj@%wBWDyC*!IfpBeRW%iE&7jBxbR?!u~F zAF>vl$6|JD@LaB&dGm(vs0xrRz97$2@xCbYWMcUC1<5-Lj9B05h;Q>rZPi2S%+i|B_jty0G50TaX%xb~xwk*m!U3f40VLXiiLtjf1-gQQhG0N1XpkDf5N+a z!dyRE9Bzy;8~3dVC~7&RjLWMfKWxi;MH6FZnkIVpoJtEl=-Y#h@c!MU;y2{-|I{C^%*hGaccBfN-r{Y-*T^dtq>>RdkJg{lzAu- zF%;f0%$%2zF>j{fj!>(I^8=$RpiFE_fOE$!LkPceA!1PG6U-g%$z$^B@c&acGjYr> zR5fd|<3iuW^A6&=48)UE*j8=%=z{@;MK{JS>~C*NT`{TzGVX|qmZqmKEfU?yPy*of=Bmia)-XP6w>3hMf9x$|{5^5z zuIX@$zb@WG3HsiluiDDFD*>^EMq)cB0} z5n$v;63b^MPva%~JaCLhvc7+SZ-o7113>KMOwgO`);OkJpwq0&d3&~A@ol($>J3r( zZc1AS-^=&$Mhwke0K>VXj&K->TRw~exO=BoJ$+w*9?~2I+*M&PhOC9%GkxfoQS1Nj z*sD6HF$jZAh!^apg{_9-Ty*x((|jK`))3QUSdXd}FGjhej!31b)NR*9987o`+Ym!cR-;G`#5kro z%AAWw&c^B3^f+8RkuX?|P17DHSJ^t?C@az^;*J3ibihSVDMcfwXj!3){0oiyXW6po zOuIn+_0f5IR-}13R!qGDN?v0V@LFESH=}3nf})T+XzyILufx(czYjfnfQ9{2maiPJ za7Qk6Gld=~ATN4S;Sai_hY@&rZkpO^C}w0Mh_xPi8aLMv)1z09YU&lDxkD;?lVAIO z9l2lcP*5?sl)T#6mi>3ZDVA;q>vc*;E)3?3w-2)XULhWL=YrCczD-?h$8%v+aOh2< zxOeLZp2vF52M7VMupV@}R|*K<=bC#fnaxZ7X&Gbi30}?{;Zes_?LXt?@eTDvd&EEd zz<7Atkr|ZyI^}3Zqq`CKV!6c8HT0UrBAC~Dybd?lI!3t*3QGsgSr$#I>M*Wep#V(56Z>iQMkQD8j1 zAlfKV3(?X+3n_D9JPK_>?SGwK>d+f~$uER9wb3pc7jVx~sH1L0GzyHT=dN#~C|nkS zj=EL7j8YtWkXM@q)ADpSUWX1pc=?wqjf~(o}La^R15G1{Fit(>OZ~cd8gElp%~5!_jaEv z;F16q0q|_1hRrDj%G2Dx3b5!9Mazy^Z~e#e%qEFv2Q}X4P!40zJHsYy?40NPz$yWY zwg++-aUF`_GQ+uoFS}l5`3MMydPH&zli6Oo1F&ce;N{;Iz`eVFch&v4)VrbR>zpU_ zJ}`dbrYC!G;*mmDi>aBp(Et}|PuvDz-}>a?4gl;649yyFJ~sNoAxbfOQuO$NwHHkX zau@TbPcQZO8 zonr_Sr~pHe!!?y*zN(yx$4X;;wYYx(EXE!XVS>k%K*`bb0>Sn8;CqqqENV{WdruK~ z8j`Xmv>8#lp!B6fiSIcrnUo!Zm&`^m?uxnJO`rRwnX`!svvsbj6ueU@dbbjUiFgxI zqzTn77$(oNQ^G;U$xWCi_hFG8B$1sY`wV6G=D9Mr?-HBv6nDksxarf-&76zff`(0t z8oml;qQFz!6;t0$pSo`5WaZg4c`j~2{hgu)Z0^((4vw)s*!4`2lJdz|Ns;lA$e2ln zvs!oH$L1drz0CoGDTg}5vJpLH|;|D5N#+=6NxDk}X4<3avm z6yYc;5*Nx-{4OIr5uON7M4sIs&&3}=gl9ptCJo(DL#;#^drC88nr7HE&CqF@;nNIf zcUqCl|2GunHu0|uUk3;tTfZcD#)@z~zHEH}NCJ-~;fIQ+_r$L}`xb!l*a_XErhA#+ zU8-FFvU`yq+5I;c@AdT$78VpZDL8{;!FX|Uh1h_|-1femeB zW1HC2W;W;5KBw{x%s>0roF3ti@nF^e`r!<6jU^H3{0#_Gmgr&UN3KZBN6-tY^xpo z*d@OhYnZ!{`u-ob=sdn$DH)HOrez~zC$d>CQ#Z#Jy|!X56h?}`iccL;0pA|nwaS3b zXD;#k&)EylmEgc|vTryQ`*M4lJ;R=5&*8<7v019`A42b&*6Zj!7+LdkNHwU`&#c4O5& zn-%^CHiNrV`VjdHJI+-NQ)Fr+r@qyYSH;sGs_uUi&)4HRa8U=DmI@0`PJD z6#$Fl=J&;Z^94ijpZKxRQNWR7R#E|<=5A!LApXnVByk*|DL=jl2VekHU3!@In8zOm zWRs};G-3}_spSX^Ds*}cS)jZ4GyiWgJf9wYc&8GOOLD|VjYn*H$zKvx>f7OxxF_Ic zpAXV<$0Q*XNjR+l@Fv_?V-o?3P5u@&PT_ml7pnXn{S1GDHZV8wv7JH!a8K!cAn>_z z`kKafoRVN}%ERHSOA=4n=n)g09_Kqk9(&i_%>~7_a}y_fnW$3{<~%&ZdC9(U<#nG; zUYvYdLt$Un>#M>g1G(2-{{PAvR0qRP^{2>J)39sDDYDTot+~H%g%}S)cjm=s0~`nY zVuSAcZ=Z@6&Umdmwf=eke_+Pxm2)Nf`&qp3tG9ojxBK4t<%Pcf)ag~n@N9MAYw^p^ zLndF0Km)kSXQDqL1%k)GGW*Z(7vlaG-sSU;3!f+d1^zE-En!Rm@NDm{S9x3?0DpJ{ z0gmsAH%TD-e*YhUAM69{m@cbmsUBt%)chTyl2t*sr5VY}s|HbNSjiTPJrlXNCtG7n zx=8n_j1x++CWx8@Z4-3P8diC>vbk6Zaex`_CGG)A{z&{RC0Ht3xSX!J)@E2K4Ne6U zAW`^PeHeOdINK`~n^?j`Sa0jf=F0T#OR?_@%aB@xZl(w}>_*yJ0$LNFfgf+EmlZM7 zv#Z}hvcsAlPa@VJ#gyU4dz4?%V9u!*eSo*E3qF4Qt{IufrOc%F({wH=WohR$mM%h@ z2`$-tE?tG6RfV;zqa~i=xf<$CYt%^8Md2(jrBXSyr-&iS(Bd`UZa%DHb2@4$5}`2k z+0!ZZ=%dKAQY~WrwC2nDF-DjvQHfHg?4J&`S8POFIs<`HKJ)RCt#dCX+R;uIILn#~ zgoAtob-=ERxFFbnZwbXhAb8Rx8oRqe?N6M!}XhMqo-*;-yYOI_|%+6m=jO z=|Xc_oZBN0*b_7tikH;n=9z$cfyaJyxy zMD5Dh-}Z7!vfHkesS-bR`;?h z+9v%#!H1C+*f0xr;2u?)vUj?2u)Uqn)rj>-4P>Za!R{W#7r)pcjiX41bGuJ0&w7k_ z0mJVFFLjR+il0fM#3!Et8-d#w`mOM>(UPQ+RIad4q9}lsyUZ#W$gSCEy;OU+#yO}) zF%6d-ay>@sC$`bW+}u&z%dy4T<|1dvv5`uXkvj(02MPSFvs(n1a+UYL+Nt>J8*I56 z6N4*sCh~A3Q@&9nTFSeLxl%Zcv&%9qXr%J$;U#kiFju|2vQ1=jOsV(IR=<;^Ej`8A zN~K3ddcOF|RUS10r5lQG?}VE|B=3-;5;$rVU45giHB4NI!1kxW1Nd3}taZx~S9@!s zqSqz2a@&A2fs#IZ920pu+Dv3kp&Kzeg1GfwNhX3#lyeHsGnyOC)^GXYh*6?OarB>w z!y*IM;HR0D3%>xohvC)X#!SbI;mE7>jaGw&Cj2|s8?O!vCUOhW+K5>NgQ7(hf1PoM zDn8kR8NeGM%kMKUu9KRYt^#I81u`HbJ>C)aEgHk1cRBhcVbmeiy9&*op}mh7M5W2{ zO@2jv$G@liE&(p0DZP&R2cOZq1gds#j}uK7~NSDCqo)J z6H0rdmccf(*hDu1_E1}GlTxX8^pHkQpb?b^XHw)EgWd@AX9Gs1q2Ack{F}=n+y9=h zV6b(_isd!=J_pG<%}^f?R?>TZ)}7HqKyNM%D|R+!J?k@PGv;i?ob?#kwR5paJnT`_yQkB!J@R&A zHQdrGGvym$gb_v<*rK?a7w5H!@aOxttWHsWR#)$PjQ~Gw7^$lS$^dO^>Lex)hsY6kkr#0~p}I zX3!bc3lvbeh`X~~i8ceaEbZwBRbtdr4=Ti>w<4_^`{P0Pu=-!iCVizHY8kA!rcHtK zRp|zg_PI2nv)qsM_xytnG48HNTXn5oF&9H%`VjK$>0lpd-vVC1p@EcE$<7x5?% z@_`;U_?6BS*pt7kaGIA76^{#_cC`mjOOr~Jw{lu8N>a-f`7EA(gbL39y?Mgn_GzWM zB|H}7LCXHWIHy@=ZT2OBE)^;Bw9?V41a_gHGbK~aDAp6QAC+i6Ez6%S(I*`Xhjkq_Eh-9da5hk94zzkV6swf^MB_;-#E0 z8SzK^v(^g_D!e9SI{3+i*4mBsuxoP?n?2fmuVbG3d(+;akpgT$mZy5F!LWO@Cu%_v zG1WZsDWk|9FV`gx2h~w1GsV27q=b}EZy`5~3U@pDQOgiitAI;u5EZ@~3o0C|ff%v> z!9SJqwe9DCsI%MQ5!aTB=G*QYP5{BvxM1k#vU3^ zfB4ds&ifkA;(ixGNPq$(?&^_V)nO`fsUmRNrXVBlX<0O4!F{|Iy`!k=o)NwwV4iGb z1bTp{3hv7aqz0tdI%dl+tjCLf+=Z>dnCYt!!WAQCIU+~g*+cTQ_QnK^%57mY0g}kE zS@yjyM0v6*9uKlYVgUH4AlYD(0dmcFF}uXVn1DyY)%M zQoR>!`}TFy%s47iDKZmI)E_JR-&$kt)?>CbaoXQ6M8fIr*?(>n;8j29LJS6T*$7$Z z^F}WsmM#J~==PrUP!E5!5ia{ajeI~6Rx_Sl?#aBYFd_{^dM)vI<8vPkUiQ2TZicV{ z_odaH78)#JPFD;uY$*gjlKP8KltwBf+`>nsdZSiTnqd&YCu?kxkxUGCq$Qe()y}fA z(k{l>^do%MR8fdg%3>-wK!(whzB15ILV1$$yeZ3A8$;F>sWXk84lsv|!u?gnFpY~P z3)W6H0$>5W>qQq5t3+q*T~3p0!*yTFCy#uB9gaeTt(~jG&9*i(nySMfJH-v@0~$#P z2+=ww$$HCd)rAOAJUkwZ>;w&D)yP-12Y6=n36NI@?koSP50IaiKyQCnN zKoSN;l6WEqtHPRVm}|08%xVaLDF9NWu`;ylcLZRXD*OXxH10RKRxwn7dE2}V4rHWf zs_%BnI-8LEoddAJkfwbkr6F%pAj*S~-yjIA;2!naoUc$}m(5f_^77VOs`L()nUq!- zfu4ZFx2IY(JK{~EJp*m^wDfDtMY56bbk69`lgTKW)eLKmtGb$R93zbDTnmmg!u6Fg zoWA6Cha!4<2@?X5JjRes*6!~Xuay!8zsa$}Lbq_wc5{T!-m-ZUCxBrf@*$Sk`!d%a zRGDPhy9`+GscG7oh03NUXqo!ThSwIHw%JI4SRWjeam)iE61 zXquxtabo~a<-d2$ki?9eIejPs>Q~%tz$}HR6elSF7?(lLuYLCB3!n%lT*DP=Xb#{O zH?^$YDeOwd-yL~nVr*QDopwVOJo}`>6qEc#EL3egg828 z-lNkD#f(AX-Xp>wMuqaY#$tKvYM9;3XjJtU#jW|i>11YZW%yS^*#mldo00wD)*_e!09z z4Zj(zTT@=|pF;{K5&OshUaEv!Q|l0`R@JD@rRR5m*|JdLm`wvsnpHrOG*4$e6_mcQ z1hXiRj#>^&qu(zrQ9MvCJeZt#N#dsqHW~JWJltuyPp%026+jJMO4Y{#pmu~`?Ec!` zUR)uI>tYx<4snnw%z?dsCb0YAGSNuT;<@yEL!zaw(?FAE6$zV*fm&TIHBLCAQBb?& zrn*O3zx&=_szPG*Jvs4JRC+yL0KuI03q->4O$|++h7V=mXU_XGeSXfOoQOdzl}OnR z9#^mw$TxQcG=x+V!eAXHE#?M^o7^@83Sc+sBORU!A)$)#xj@JpVXA^aF(`u8%&zGS z4)6s{DdI|6qG%e8_Ty^nNY$>{Sg5>S3!E8Ck|s-VYE;05c%q+omk$ zwjCUlbM*AXj=Bo5Y|E_VGE-H73gk8OYSMRCnZ*2;-##YCCp6q^L@!y^h*6zU-ALqf z*9?JrJ=cX@gx#~#b+?Xi?t1yIHi~$6us?=wX<-E*Mk!$6QQD<_k z0Z5%zV8EFZ$5P@Tx|IdXU?%c#lp=*F{nBnghJ+#28$c+`OFz*3K9CU(gr8K%(UV>! z4B>j}ksWmXK=`?+r2QA;O-C!eP_QhwD-L;G2g!tl~S@4pZI^p%X!j z62X>P=eV#+xC{XpV_ZgnN9Vfd&aLJBkJSH#-p_v^Q_}rfAu1q;vG!j8bph0;v@!BH zXoI>SERkDN@0%B~8Bi9Cz`;o_>e+J&7;MjRpJK7Q-tX?ll&)nF$ON_yFiYFrb?MX& z8haiD@GH=IJ)-^Vgn{lF+RC#@)(Ng%izRi#gND;t3JDRejXbe!+(X1Hma(!O7pOX^ zj77C^9TNK2ORLc2aI|c_y@NUC7mN@v^{!NfQk{-r$)ujO05KJLC7QkG46hB%IPv8#X~TQ60q z-ZtofbW9()XplyN0Ab=dJvVu)8#KzNz1K12UuoxwTEYiSk#Ctyw}@sn!FowtS-2vk z3mmGFq;6}UR7u(U*u|+`Sx~0u%h=phNtkCF8-TeCTq4pPL&~Mb%eZ`iaBzF`@%(r> z9ecDwYlu3%+X?|KQy=ay_o&QnsmAJ6rhe{M6J7&m=NOtat4MC@OHEUgyAkS-TIXom z+bVISvVAqL5RJh^pb4{c3{9Fb2aHCka0OJf2E z@KbPb+7_LE;+i4E1aOITK(cm_xsGn+Ci>K+r(vPTb9+gL2d;}@@c!>`TNh>=ZS`BR z3LPelY;-Zb^9P9-;X@F^V22%ad2pijY&sXP@%;$RTD z*10_zGrL4Bh#UNg9+IqgvX-@1*(nh#Q($W+@H-Pfce-Cl$O zM8ecrZq%5UQazWN3yvpP?`%s_Mv+!|q(sS<2^!28)=A`cpb?kHP+GiJnj~BT%DH;$ zbelRpw(i%<*g?s3EiR)Kh)j?q!1;9EAuO+hUX9fELvc{hwV#p^?A3y)e4Xh6i5IQm zB!VkY%;k(*1c4{0P)yqc3r)mKnz^PbVOYgyz6cm(*TILX1@of8ysAzSMVjTcg&>^| zjENRXaYwl!&B?GJHAiXLW%3-Mv*#UFZ2BAJM zm!U&TEcB71UnQ7#QHeP0BTt@7R*5)0mPIFIup}Fk43F%SODw#&n5FayHD{f4%K~78 zq_}0n=aGIs>xnW~L*mihpBK-f^8W7k*XPUVZB-UBVQQ0iyKqkTWSM~$5;$T=#nMD( zZ^E}2FtxM74RjF=-UF^9rz~M$);aG2`L<;_TxAZx-8eK;U}vyW1A9)tf!|}?JJvsj zB;u*@nxiTyks=l!(krm_2CZf zpj+Fj5MiYtZ1YZMsJ4fYA%gq7R2=z-j1`GaqA)oxy}l;l`EhWgAq5N(3e-#0)RX{L zLzG~E4=1)T`@n-f#t_`pdvOv-^=J=1m_V@!M0Ch!#yn*ymVxr|4YP)Swee;EP+Y0g&Bc#?DeJAl9&MmgwQzU+nRIn(5V zqP`>2zR#!%BkQOJ5%_>@Y!g&i3Bl93?JBj#>e!4etQ|d4gJkk-JRZh_0Be}EYMQyV zRQTJQ*O%v~$A{a?c~o^(tZ&K7;Be0Hcl1w?cjf`_M_DX;PLxTYRRluBH}z}^Kn^0} zxttt|*lw+IPAn^uT?VNuGF58S+`Z7Od-on%46;ImF-8ps^Z2E3pNmyuH2?|Gx)64T z9VNJbsCigP!Q)vy89W}P-f?cfQBzvQ0Lm(W&m1-9>v&3XAKVoXb;CKt#*l?6GflZx z28{C`C!I0aM#%W)@zHOp|jIe!z?eV3lJA)C#))hCnGwarDD!yi&^lD2Arlo42R@v>G6c2E9Am2yD&{#+i2#&QHlGEwDtjwS;H_}PAsmP3|X!k)SEe}rvhQ#jU90FNR;bZ5l;B#6NC6C%H<`OF>wl|5z$S52;Ddj;F`5q4w+TE4x z*w_tI;qM>aeTZ>S-?d)+0$~o5Vok{@9RHv`=b(}GAwC5QIA!(&B;qZ5y7)RE$dO1m zg-b({k z4ruq_lwKj$n~M*@Ormc#+1q&j)+McVE7Bo{iUkE$<`A#co`Er57$U4-Bv|@DJ1db9 zL7LsER6?{20s%`F?JVNg<5vs}?vtA5Anb`Ac;zg|qE;cT z$A4n&>OCG*ZrmJe5E31nyx8VUF&iWHbB02k`zsosnbZxlUR^hLRtXV)dVl5erHdEN zpP_gk{(tdvxLAldb?!YlyqJ$v%N!*mW$kLz0XKRf@*zPS0oIhvrkro7ukFk_@M}&H zi71^X@6zdcd(39}X-Jh)dOVODbKe?DpslUBw_@cEygom}ll6c5czu3)=tXNa)7gQ` z|KtA*l#n*`@|s?}(bTXI4%fkoIJLEx!4HC_-E8J<4($P&nVeiDKApzMsT~nmf6jV zhN1UdP*3F(O`~v$FU_MOHJR73i22LQm#5Ql=(fC}>OIRsCLjB+=nwi?ja$<{pr`oVLn#|N5&ah{ue(V9Djd5iBeN!2_w^ZC^2Zja@aPq075lI8(Isl z%!cCH)HP%42I|fXeOv3AqncUOd}S{9`o_Kv4m2pw&FJng@%`x~XiTR&q^nI8T*R(> zN$uddH0NzSJ*oZHJ+XVU;1)(4QU+byBGbgR$}AK&L#I2ebg995u4%fSJxev@o5GWbyg>$3=*s~z9~yfLg)eTrtkr&WS~%PR4K4E0lZbj;#)Sugtk{a;hXjn0 zQLY&2g|U!L-BQ60P1>WcLT?3G>3%)5X4zp!=>;Y0Ofed>yE{po<8>#3H%nT3UF&fgEC ztFhsd@ma%(@>}flRZxZkJSqM|aU*%O7Ay72)?YeN?-a{`TW(W%)I7Ub#HH2)^R`)z z!4fNr$ygUO)Han`)5Q98^YNr^%+UbSmu%^>FvUi_DD%I|-(WVyUe=(IpATU6C0~86 zU|J6??Mxk*p%t^2@SW({@fBD z;rBb#Ku{9+@Gf^$B4uWUkzgGnm~eIX)OKkdn&7zZcGu*>n>~2JR(B0W9JZYuPP-`< zzgd4j6dQ?jXBd+8kwSOMnl<_-eYky!F#|A=QLA}d#@w;yusjfSg$>hS&e%;$zgd##%FIdp$-#`3hpC5nj$6s}5E!*gAXz#tk;(UVGpWMKI+zzFR5^PBwA0n@zwO9is5lL9IH zO~sxgj*9IRL$h8|Yi~fbf?3K#y4HX>5ibJ=y$B9yL)lu~q|$^&KfqHkd7DGAh(y+H zAtqWuh=$mzS$jMt( z61ksUl;UO9%(**?o8+7JQHG%PC?{4{LRX|LECPu~dBrgQ5Fv~T($EHJ=(b%zn*H~k z<9loGsI5bVtGzO?yYygWI0zI_!R;k#I79&nmr_H- zSk7M>%dhzEjni&UdhZz>NMWxJ07$D~)tI+&lL*dwaJn&HZks#B#2IxuCVfK%op8uJXW3Q`Z`C2f5y#qv?OBH@e@Y}Ixz@VE=3+)ee zYIORVufX#07NRiBW3nL7G-{U5US-PyvMQcAxF-9Zdz!!^x#{wGO&}iTtTa~BqqL_bs&rnL4 zuzKX2y7pAeH2{`+cDpDLw$F#B+e^_Gol$6v(awPq8^zesaS+a_u+mD8Twdop;p@Do zQl)XLJlP*^DqBfhaPv?G3IXe~j(y8Z^8zqN5)^CrC+f%Z`e@2%Or+q;EA!Hu2h6-s zdc*H@-@|=rOkn1RM+ z-FM9Q7y4T;InRb@DY*`(3p;60un9>z`*#%^2j#E{U>-m=zqO0`&}-Cc_=ib;)KNC# zz?Ls2SQoW@SIaILFlw!n|j?V7o_b@7!B`AjuKsxI-7F)r-L!jw4mU+@{lv zL>ea}c824W_)6wH1lfx9=x67Py&WdEiJ5DjE_8Ft7IMyk+Q^l_bwBY<G~6pK@+R zD&Sfn*yl8>B%<$Zi>?5KnLk`PX{7O9W|%fW#Vc}fhFjer;$2fwAqiA@I{ID&2sgh> z-VlXEbXEp3%i{n53W+5z%z>G0LWw`NwZBC8C!G-IN&w8D34v4>O`OBp92c@K5H~pv zFt0J&oFPr8=hn{8vsTCgPTGt{O>U~Yzh@fFDy^qBUjPK@)lgcUROzxTYIcS>MoWWf z+`-ULz)2gBU&H#}O|1i6J`E24EXsl=XvEflCWa(Xi^d?KxlG_@KA<2i#pfA@W66L$ z{2IY3l{V5L>Nh}`jdyWVMFNeD>hqi~y~yD&_<}5f5PAQkiUec30!_WICSbtt(8WZO zEi987X5}`Z0|xu-mdRe=wg_R8j8P5mY+`I<)P_-OMKjb7XA!k3hPi_>yyDcVn03WA ziK>fgEE*>F%v)gyN7{Fjlf_9mMhsO&>0)WtE~%QMQ^VWW6?;Xfx~O$W8#X6`CQ}>U z*~HkUQb9SRno+M{X%*B31cPQh?6@9Qm=`df-w=DFD?}S29A_*M1HAsgS8^rNYG7;D~_J z4@2676b;_lJ`8V#t)k~rI3Svoobt-3pHWA53cO7QRiJ1Avuea?g(LFUILeZxz?ZcS zEHxYsVN7>3kv*BxG>OrdI|gr;0j1sfKs+Tx2OXtPo1I_i{qM^7z%nBNI@5a*PRmv1 z{2sdz0SF=J069tZ0)4Z}qAOpp#AR1#lQgd&iaARA4 z2vLfW2?=h1mtm$T9zeJm$^U8}3|Sd=pW}$&=`exk{4FSga6W25xHulGT7nmX@A^YT z1b~qDDS?3@j*LVER7x$!HeiGS4i{UP^RXF_XHsv|1x_2H2!DFo$clRLXY|W*3&amE zI8cuur}{X|?QKveUL1h0isWLBgP}}1%fxQE#g8Kvs3T4+O!uh3WiDa|!YT;VQ@>NBy~rLiTY?hY38X@o*^^;ANR z4fE1S?d#TW1CMS^n7T&-+teWJ3;m}4yK|AV3O1kn@g$Dmtm~nY-D$mc zT3CI6YKnX~d7Cv13Sj=yh3UY?4rrDKgw}9^hmk0{nZNT0&>tvtZRtj~rspHCG+m|M zN}hA^h@U|CSSlJAGSP0l3N!5i~;^sBqh*wzGw*)U( zB0hlDr^lHVDL)$o^0b+=ql`K%gaec)i<6}VHi3BPi@m|uRW4I8w;A-I=B=#JJ9B^Wy-B;7eWn`LHZ*k>|rg9{Rhf^ey)44<6bQKBUN2omm1rE;#I|oW299(xn-!6LLv&&XTcqvojY6dy2rU62G+hBJoY()Tz1Z2l`VQD1h$E@Ii2v0Z4YcnofJkF zb%qIDRT}VoQ+P-exVEyYpnmbov}^G@r!)3+ffhUM$Ub8YqO!<)Y*|&#YHxg{ayE4t zFYtgzl*iDBtwm2fit^MIq1O0r|M8ZIj&D}1$ z@Rb8+z&W<}Tn4$*T`LHq=pg4&k^O>__|91xuoubgxfBz$clL07zq@@wpZXl)udA#c$c zv@G13m?ZD6e`GflyArdJ+`)FQL}@a)*iql5rmtY{`m)^G*f~urgq|38-V{Y+8mTha zx^`8S#|-P2m=xa(ui}-vYu^ZlBTyb)!c3-#;#)YQxBMT!b#!gTXk@2%-XEp^G>O)#PHZy(VYD$gVt2Jm< z{1952RM*6HWGf2nW;313K*Y+v0T^S1qd?3X;lM6~69-As52zfS!xW-sr5k0y@kK4QRD81Bnd7TnP7Y5VCQ3zOlc*c^X|VDP zx6$6xFe|FHbf`SgZz2uWF3zoSb_#s6Oy$ly2kz87fobrb!~q$#YULSS@}b&gErzPx zib?V|xKZT_NVR?REPNV4Ik_KWmahS-2MnB}<3>;@Oy!p>TC!0h$BLs*^P^5QjSXGt z#BQVt9`(Su96%710T_7*ssJ2RYeimM@Z zx7l};_A{U`nPzO6+Fxd3o9uG*^eFF|&(4NoT^e2mBX0sFyTqZHoW(0*xKn+7B?v2} zyS@3iyQ>(-&C6ahFzo34?(O+eA$8Z|P)T%R35!orjWJR2b8xK^-gYs!)1{`ivIBIpzppN(spj-j)opT$#2Cg%e^g*W7> zk=6Nxw5A>@Qd*Su619R*UIpuXE6>JIr~KIy>a!+b$doH6w;6ag zazn#joZnB|@#=?^62V7gP#s6Ht;8l8J?zCdHTy$IVS4Kk!Bi{spfYM2 z^4!;T0v#HLk+8FUhQKb@45qFl`o3BKqzmE>19)C5up)S7Jj2>c1Y^b^=c?8;Vuim8 zjGD_c(d;^TtU)zAoZa5lYA~xy#{G(#O6own?6p8LJOKmf(nrif8!AgWS00OizB!ik zAOVJ}COw!9AHnd=@wfx=TTb}1gict=;K2ve^;ikq1(V~{se?BN<61YPeFZC&c^}If(TjlnMu+7a?C-sD?5Yv;**Gd3@A7)jn?g% zP4Ig&9+B9C|0)tk!#cjuEjDV%{R-SxGO8r~LUlyJTWd4HRfVk)Mad^|Ae3rSJymG; zh?5W|Jo^0R%ptw;Xs%OYuUq;C{`O~^)Sd(2z)2Z%MfmHF{rkBy+l$*v-&!N$Fv0PD z%Q{dsUf&N&KN<~48981Xu|EIzjma8M!et6i1AhC$Rye0+G zjBgmCf(2U=y?;U2zqmZ3J?y5CyK&W~q2b^%==#L;e|TBVD+681z~5`rDmu|W+}UsV zJZM4Cs;gvf;m8~FxGiOF6^1(G-sa*sOe6T>w6he}xhJA!wv?yHNIQSJ#5e>zDWkC% zBP?(9`;!1GuJc&oCKGx*l7r=Hg zB3s)q)7|U1Uj?Q_DL{0&FxXZC03P)V$5nUFnVJiX|^KXQZXpi(JxGgcbHxlsu5IX^4`o!+SqeAyli{p&6!|aUSGCpjGn$Z{Y+<=0waFMZZ(pLB z#*lqIFOcF%Capg`@4-W{V5HtPgt;JJuh7JvRN}Gd_6I;uI)R<}P=DT~*4W_J7(9aL zW%`H7wKyIOS576YD0$lWFiD;NK?f?D`Vgu+9~a6twBDrZ)X0L>hCz2qQHUzS*zgf{wc3 zer^O|M6k}v6dvUJv@xQcltq{3KNv3Ioa5AKB;K(E6&n6Ue>R=}Bja;SArKsPFo0aM za@ce*v5S$&vqcg}o3@*;qE7Ny@ogKCIL{BA&_?H7N_OUFxP{ z;ZOB`jCA(rk!^cl!c{($E`QE?^$hbfrOgPkDqNf4V~kOK zP`e(o+RzTB3-=`y5BAD12EdaWE`nJ_YWwYf}=ES3G8ApEy7-jCx<|MsEVZCOo; zf^UApKQ7LG&9I-4{q6tYK{0T`5XoX9kUixIhl*=T%B%ck>aEgD&pSP4d)_{Bg>VTV zd=8Y~X#Jb$6K>x+d&c-Lp6)+{_ue^5#HUQpU&?3iK9l*UPVa;9-Fq*q`qN+8$+vVd z7NB_sl0GoIFe|&7QtV%7rjkwgB3fZlHH+a-v#^=}qQ(coq0y@aJbzA!kZ;9vMI3Im zRN^lZi$x{R`cDs!9kAiVoGXJy9)A)7F0NLySs1#ms){1}?l-%IL{ShN4!hlEGh(gt zobxz(-(y5)cgOAOStAAEb&P^5m%E&ghy8A~t=p_;%gZh1(>zPVq%Mx!;BvmVRE}+# zs!^wFqEaVItWd{uBx9kee@5F@0>?mvIv5&G>c*q)pswF*Uh6LB-LdYlKa|)nr%O#} ze5>wwU|a7gIS!wr!dPmwBPjB(4VYJ=mZ(5EL(CBII8<`Mj^8dPe1gNlic`(shf~Hz zgoz1ji|!L?yy1|TA(Xr_o&ISwLb@#O2o=|?HbwH=I(5Rob9&+1AA@HpIN>3aiMiJ@ zTD=G2dK(OnA0gK+Xe~MYF()cgJV6v7=k}jO?OKawV9mbXBl921QQU0m=Jni~sAAUb z5q>UTR>0$%-3Vc{B^NYoPt7c+_PA(VYXJ4FaMH*4sr$ivn%W5}%7Jjhfs622GD$_Z zJ!imWPIGRnYYLK3Ca__OK0@Xa)?z*z(}ZWV8r$H?7iL{Ik|Ob%d!`lNamXCtq~M~0 zJpR*4CK4Q+W#e!|LvuGFgjr~WAu>T5(c6b90}=`O4^eW~%EYK1S;4+B?&Zvz>G(=V z6y@V+N!I&34R86;D5$XSDulZ4zdQ!3$3envqO8i_&6m%7C7iwiBbwtOAeyF02Gyc{ zE{~YSFr)H6KJuU$tzef;0`l$FnV;0{HYK$Ngz_$)kG2 z;deY`V6Wmx2KxISV6=`|G5@$NqQCq{j(QVhXaF!l`nD#bwI;B21%@XdXSJ4654n^` zXFQ-mh^`ff3|Mxpu%<6pLP7$TK_7ha`dI1F$0k6;wqPR1nPw_KB5FbD3a{4u{BdDZ zpTvs@%_B%X##EuyTP=}a1%2!=CnD6>njon=ksZ-`O5U(|_C$mmYBg)Yph!u6)~b+t z3QKW@G;ltui9q>sris2=)oNKQ9LBBh9uqj&X%K0gt=(@MO;EdiojAlU_?y7ZbVS7W z6v_KNHC>T6GJifX^g7EN#OPsJqycg^q}FP6gh+u!kf&Py6%#lgvLY3IS!)7uci0_S zdRo+oWKZhRu*#UL=)*jwj{;jblc9(rGc08~B1rSChLR^l;4;qy<{H9r5)|unMEIS1 ziq_2M2^roaohAq&iAb}uWBhy078E8BMz|xxBrJ}_6PV6fo~qwG(*(8}ei@Of9`x@+ z>rr&HFlHtTA`{qJtNj(OG&O!T-oAAV=Zm*MG`3phXG12aH$jh-#m!R4&_=EqY*5?u2pZ@RoPr*>I>usIB7f`?TnpbA>L$X#J@|hSh zNWKkukpw1%@gr~Zc2Oi7_F*p$Vi&gYK#C)1KqHQ$1*h4ZXFJ*&b<$Ikp4Ak#O7hQo zXk`)1gtu+z+H=q3jLU%39?=XOk8v}!;XdmYJ{)LoEtQ2I@-%?l-n6x3zmgH?up(BeZ zS%^vt5QnpU<&tt9AZ5b7_AieJ-e_Gfy2fFu(FJ3m-&;Gz6zs1cdR)31t&RAMbxzmN zc#OcZubC@V#`FVqIDbAW%N&{xxQK@UW%ugTZfX*&9;{ib}|TYQ{5 zT#(t`c{(zjpRF{(SfBV&f&KGoL)b0wlPfw9m)utp6Kql2e8kvf7*lJ4g8oBs&WiZrAK=}BZU#in~%3g6M%W$AfsVK9`|`MqM401cP0EWhxBex1PF|;xrJ#t z37F2<>VW=tb7drC!2Uk$Tt0P91&4+xZWLH!l7B<(U7;r!1OwgbV2MSx21}`F>MTe9 z3pp=@6}&tE!7B2pv(>)wGXyr~#DZ^YKygt}fat(QUiX4VWk~KqX#gItUkecS$OahN z*aRekQ3N>KsSe0A=1L&Je4YkUY!B%`+LpsLpkN;N7}fvs30unHH zL-i|$gY}-4In?&#xwaARZf>6;AIxE>zP$pJ3OIvYO!zYA0HII-Pl0?#)fN0SzCB0s zuud4GSJ-Z-P7eLu)y)*%|KP*!ch1t;)eE+l$ude{#C?$utO6yZbU!GUz4`_tk|*=V zrt^j}NNKrqJ`!YQ$#T4a6($EA3RGk~Geil?OjW}B%WgC@=;)%Q<8DzKVq-}{Bo-TE z`*wZZPrxVFPp*VwsED_4;HGs1PJ8kO0|6SZ*TL06mMW0cl!gD=nY^|RS@a2X)O3aNmf8{nYh>;@52eH*a6Jm-JF2eo z#NRk^gm%L^q0VBYo5psdZL5=8&-Hg#H_r#Z7t`;BCrasA_gSxKd-*n0>Vi%Dg*7*@ ziewBMZ)?>;PVrufaf4$@TbOxc%OE2)BUL&4b3SSihcBNUEJ6ChwZd{;(6tB;8!Qk% zTf+#(*vKR*hsAI1NMM0?0PBvW+ejUNY!hY)rjd{vRtsA-G`4To&$=GBsOw59Nh!4o z`EbWK09MO`#j6G@BqJ>gA8#payw&p z+%?}~_u^!dX=Y3^&w^=|%(BWln`}+Id0%$fo82E;3QPBM@*r-`x#XIg)MOsUPeBwu z`*6xssY7hd+gYuwd}PfgpLFTj=5NaX1!T;WnH`Wg###w`BYlD*EYI$-l=vOz%#}M& z-pVSkB43ppIF?d%HPv#$^}aM0sMYt#xomIB;bDcg8jK=6D9tjkN+Vg9kkvGq4zxi_ zytKaoJW_XMJk2BuftMT|d@!$`st~~zD;ubj+X6`N(5=-ITI#nZ*uuMKa~B^^JD^}U+X%|-xwT!tY(t}1Tus9mML}0T0sgLRKyT2)mmHaircFY zBuu*1!QV8_*V~0II@l%avko#b@dOuqy!m3eT5q;HWbj!zoiEp0uRj=$#uE^N5fl># zmm(>eVL4rJyeP?v>J5hG;Nw}g<9dD&p6?ZDmQQE%#d5XYYR?y@5I?)f~G}DKb%{$DW)w3c>ZV<-H(Nrn=|Cf1#wj1L zI6Q$!B2%a|`r^%=J)6Vj@dZMWSR$3l6-t#_LsLte+5O$Mr*B|rWNcz;W^Q3=Wo=_? zXYb(XBQd^8=2tBTnWUc$ODs zRX1%X`eB^rW!?7Uyl#jvq0Hk#-UU(m^vqZvtWgumhq|GH4Dqi>vQlX~2{(Z0kgF;! z;+U7`iXxOd-i4c7?AIP0UGz2Ab?VaXoD*7Hb_H3;zCk-uDhcE%5S1r6IaV>9$>#Ef zVyRqFUb|Me6U`H|;A7ZKmyR8_+ooB<5xja0b(=W*dmOd99w|SyJKbJ?FdU61)7gBn zT&*`-rZd_2v;Vxfe{jCb6Ft)AsIXCuT4my8tLXkzbpKXhL!a&&uBau z8F<7Y*>ju)PsX&?AY(K`{%Eswz#!J-cjKZoI@MaIrDLl6v4(xAk0*?X=za`|?j}Y2 z((R)J7k{Mx{E~l?ydG_~PNzQQrTh6IB#%-p7#Db|-~tSI|4C>A+cA?!t?EK@NF%Oh zt8BP;X^$NSP+XHqovW0eCvjj(I8$E7{Y3hvMf4o{an zPQ}ZMWW^k9>@1ZV)sAgjyS5x#LT!2#+Pc|SyE+~syR2?_!_>ei#B9GLU@aAL36?UQz0;Hb5esg1_s(ulB+^rFw@bPObreoRMiI5j@aJ?_Ko>a}Jb_96 zsOW07w?6~pafG$Qjdc}!W+xI8d?boXtLsDq5B5EP$tg`F(kPWw`qlk>U;4FA6UA|& z8aKyz{_J$%oqF^3hi4dA?~ULmCv=V@69#p_$@KQHBMp>>CD zRk|zBD3{Mu_Bf?OjR;`6D zhIfSi{n2SK%R}u&4#%g*Oc2x=7`eg*_Lc`JYLE(OgW-u6YZBp(uaPWyRNxwf8Nr8m>D$Xi-epb!FvsXl_^q8CAK! zTVwa666uQqAl=dMMr=z{nawv_AZ|vFi7P^n8b?6C!jMQ$(Ik@hd)?GnI6<SlC1j(jKyS+4>UK+7)lm|**F=zNEB*L)nsB<`arii>qy8LIV z=f3^wtOkbt`0#85`%#djG$N~!AKlAGXl=J zxBvi6z({zp*b5#(C@6}eC=A0e3`;YO-ed&2aM!w9xm%<&gCoeoLm{nRN!knQ7mSDB z{VSn%lltWMq2Y;ulQuOlaxj8o4Ce8=tjJG^v42~0fJ%-X5_&Ef@D*>$l|%aLlo;#p#&9hLEnI@adOtTTjwoz z1cdWn`e;;Hqpd*??&nsiS>sJWLci_%=GZ|DZVMW9Zy3`{!v)%Tz!Zq*T`I%07;qWy z>IuEGU&p53T;nb{d{m(xm*?%~LYtei?kIduFZSqR{nNXBYzth64d|8~bDi{wCWA|5LB4^;Lmza;}0 z!}ET*F1}+=mY4q(CqqdzhbQ-P_E&=Z>r$Qwrs)EsxXZmPco^d^%~bkFrN)()HJ1ukxj~|S$)Lcp&_Px*aHkT5wQgbMZt8V&m-8W-~ z;%k8Sule`+QJX@WUeK!ke)m^;dmECE{}ph>qpVpHU>s;XmS+F3hx5oT;IGjI^z#xj zF_G(|w+>UEiO1rMc@A_fdi|R zf}b2ms-8!*KCapi7Ltmm+dTn{S#ZG%=xw7n7w8S^p>_NG$B Date: Thu, 2 Apr 2026 08:06:20 -0700 Subject: [PATCH 11/63] Expose flexible panel settings in settings UI (#52946) Release Notes: - Added controls to the settings UI for whether the terminal and agent panels use flexible width. --- crates/settings_ui/src/page_data.rs | 30 +++++++++++++++++++++++++++-- 1 file changed, 28 insertions(+), 2 deletions(-) diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index 8496620f9b4db94f93b2ea65952423b73512e724..f0cf87c403b340dacd33e2c04b043ab8085a461a 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -4990,7 +4990,7 @@ fn panels_page() -> SettingsPage { ] } - fn terminal_panel_section() -> [SettingsPageItem; 3] { + fn terminal_panel_section() -> [SettingsPageItem; 4] { [ SettingsPageItem::SectionHeader("Terminal Panel"), SettingsPageItem::SettingItem(SettingItem { @@ -5006,6 +5006,19 @@ fn panels_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Terminal Panel Flexible Sizing", + description: "Whether the terminal panel should use flexible (proportional) sizing when docked to the left or right.", + field: Box::new(SettingField { + json_path: Some("terminal.flexible"), + pick: |settings_content| settings_content.terminal.as_ref()?.flexible.as_ref(), + write: |settings_content, value| { + settings_content.terminal.get_or_insert_default().flexible = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Show Count Badge", description: "Show a badge on the terminal panel icon with the count of open terminals.", @@ -5666,7 +5679,7 @@ fn panels_page() -> SettingsPage { ] } - fn agent_panel_section() -> [SettingsPageItem; 5] { + fn agent_panel_section() -> [SettingsPageItem; 6] { [ SettingsPageItem::SectionHeader("Agent Panel"), SettingsPageItem::SettingItem(SettingItem { @@ -5695,6 +5708,19 @@ fn panels_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Agent Panel Flexible Sizing", + description: "Whether the agent panel should use flexible (proportional) sizing when docked to the left or right.", + field: Box::new(SettingField { + json_path: Some("agent.flexible"), + pick: |settings_content| settings_content.agent.as_ref()?.flexible.as_ref(), + write: |settings_content, value| { + settings_content.agent.get_or_insert_default().flexible = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Agent Panel Default Width", description: "Default width when the agent panel is docked to the left or right.", From 7892b932795911516f26f3c1c1c72249ed181ba8 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Thu, 2 Apr 2026 11:26:37 -0400 Subject: [PATCH 12/63] git_graph: Remove feature flag (#52972) After #52953 gets merged the git graph will be ready for it's preview release, so we can finally remove the feature flag! AKA this PR releases the git graph Self-Review Checklist: - [ ] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [ ] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [ ] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - Add Git Graph. Can be accessed through the button on the bottom of the git panel or the `git graph: Open` action --- Cargo.lock | 2 -- crates/feature_flags/src/flags.rs | 6 ------ crates/git_graph/Cargo.toml | 1 - crates/git_graph/src/git_graph.rs | 4 +--- crates/git_ui/Cargo.toml | 1 - crates/git_ui/src/commit_view.rs | 29 +++++++++++++---------------- crates/git_ui/src/git_panel.rs | 24 ++++++++++-------------- script/docs-suggest-publish | 16 ++++++++-------- 8 files changed, 32 insertions(+), 51 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8b428dbcd537e33088f40fdde5e3251a6148672a..ce645cae5bf4bbf76dac037880e9e7038df67df9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7132,7 +7132,6 @@ dependencies = [ "collections", "db", "editor", - "feature_flags", "fs", "git", "git_ui", @@ -7190,7 +7189,6 @@ dependencies = [ "ctor", "db", "editor", - "feature_flags", "file_icons", "futures 0.3.31", "fuzzy", diff --git a/crates/feature_flags/src/flags.rs b/crates/feature_flags/src/flags.rs index 4d477aa4b393ee8b04829833324cd9092c2a04cd..54dc96ad37f8e51a1074a0a32976f8236cb1a0ed 100644 --- a/crates/feature_flags/src/flags.rs +++ b/crates/feature_flags/src/flags.rs @@ -47,12 +47,6 @@ impl FeatureFlag for DiffReviewFeatureFlag { } } -pub struct GitGraphFeatureFlag; - -impl FeatureFlag for GitGraphFeatureFlag { - const NAME: &'static str = "git-graph"; -} - pub struct StreamingEditFileToolFeatureFlag; impl FeatureFlag for StreamingEditFileToolFeatureFlag { diff --git a/crates/git_graph/Cargo.toml b/crates/git_graph/Cargo.toml index cc3374a85932435d010daabdfe0e4b4eef628de6..e9e31a8361e367275c994e125ae6e04cbd652fc3 100644 --- a/crates/git_graph/Cargo.toml +++ b/crates/git_graph/Cargo.toml @@ -24,7 +24,6 @@ anyhow.workspace = true collections.workspace = true db.workspace = true editor.workspace = true -feature_flags.workspace = true git.workspace = true git_ui.workspace = true gpui.workspace = true diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index bb1566aa29eeae016d31ac549434e7b92d50eb4d..c56fb051b896f32ac364cd15e73ae8708498ca5a 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -1,6 +1,5 @@ use collections::{BTreeMap, HashMap, IndexSet}; use editor::Editor; -use feature_flags::{FeatureFlagAppExt as _, GitGraphFeatureFlag}; use git::{ BuildCommitPermalinkParams, GitHostingProviderRegistry, GitRemote, Oid, ParsedGitRemote, parse_git_remote_url, @@ -732,8 +731,7 @@ pub fn init(cx: &mut App) { cx.observe_new(|workspace: &mut workspace::Workspace, _, _| { workspace.register_action_renderer(|div, workspace, _, cx| { div.when( - workspace.project().read(cx).active_repository(cx).is_some() - && cx.has_flag::(), + workspace.project().read(cx).active_repository(cx).is_some(), |div| { let workspace = workspace.weak_handle(); diff --git a/crates/git_ui/Cargo.toml b/crates/git_ui/Cargo.toml index d95e25fbc7821d42fac4386b522c4effb9462715..e06d16708697f721d9377365223dc444ba7b08ae 100644 --- a/crates/git_ui/Cargo.toml +++ b/crates/git_ui/Cargo.toml @@ -27,7 +27,6 @@ db.workspace = true editor.workspace = true file_icons.workspace = true futures.workspace = true -feature_flags.workspace = true fuzzy.workspace = true git.workspace = true gpui.workspace = true diff --git a/crates/git_ui/src/commit_view.rs b/crates/git_ui/src/commit_view.rs index a298380336515aad24e9c55d637d392fa6898b35..aac44c7f9c6eaf6f18c72bea390c0a0b7ad1a4bd 100644 --- a/crates/git_ui/src/commit_view.rs +++ b/crates/git_ui/src/commit_view.rs @@ -3,7 +3,6 @@ use buffer_diff::BufferDiff; use collections::HashMap; use editor::display_map::{BlockPlacement, BlockProperties, BlockStyle}; use editor::{Addon, Editor, EditorEvent, ExcerptRange, MultiBuffer, multibuffer_context_lines}; -use feature_flags::{FeatureFlagAppExt as _, GitGraphFeatureFlag}; use git::repository::{CommitDetails, CommitDiff, RepoPath, is_binary_content}; use git::status::{FileStatus, StatusCode, TrackedStatus}; use git::{ @@ -1045,21 +1044,19 @@ impl Render for CommitViewToolbar { }), ) .when(!is_stash, |this| { - this.when(cx.has_flag::(), |this| { - this.child( - IconButton::new("show-in-git-graph", IconName::GitGraph) - .icon_size(IconSize::Small) - .tooltip(Tooltip::text("Show in Git Graph")) - .on_click(move |_, window, cx| { - window.dispatch_action( - Box::new(crate::git_panel::OpenAtCommit { - sha: sha_for_graph.clone(), - }), - cx, - ); - }), - ) - }) + this.child( + IconButton::new("show-in-git-graph", IconName::GitGraph) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Show in Git Graph")) + .on_click(move |_, window, cx| { + window.dispatch_action( + Box::new(crate::git_panel::OpenAtCommit { + sha: sha_for_graph.clone(), + }), + cx, + ); + }), + ) .children(remote_info.map(|(provider_name, url)| { let icon = match provider_name.as_str() { "GitHub" => IconName::Github, diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 00a3b4041b91454d0587a1503b66dc3fa8629917..5b40c4bffc3a492f0113a8c5e45b2cfc1763d380 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -20,7 +20,6 @@ use editor::{ actions::ExpandAllDiffHunks, }; use editor::{EditorStyle, RewrapOptions}; -use feature_flags::{FeatureFlagAppExt as _, GitGraphFeatureFlag}; use file_icons::FileIcons; use futures::StreamExt as _; use git::commit::ParsedCommitMessage; @@ -4529,7 +4528,6 @@ impl GitPanel { let commit = branch.most_recent_commit.as_ref()?.clone(); let workspace = self.workspace.clone(); let this = cx.entity(); - let can_open_git_graph = cx.has_flag::(); Some( h_flex() @@ -4607,18 +4605,16 @@ impl GitPanel { ), ) }) - .when(can_open_git_graph, |this| { - this.child( - panel_icon_button("git-graph-button", IconName::GitGraph) - .icon_size(IconSize::Small) - .tooltip(|_window, cx| { - Tooltip::for_action("Open Git Graph", &Open, cx) - }) - .on_click(|_, window, cx| { - window.dispatch_action(Open.boxed_clone(), cx) - }), - ) - }), + .child( + panel_icon_button("git-graph-button", IconName::GitGraph) + .icon_size(IconSize::Small) + .tooltip(|_window, cx| { + Tooltip::for_action("Open Git Graph", &Open, cx) + }) + .on_click(|_, window, cx| { + window.dispatch_action(Open.boxed_clone(), cx) + }), + ), ), ) } diff --git a/script/docs-suggest-publish b/script/docs-suggest-publish index 23578785159b5fd720e84d3658f7f76dddf3ada9..fc420f3fbc774df0dbd7667a5cd6dd76682e9548 100755 --- a/script/docs-suggest-publish +++ b/script/docs-suggest-publish @@ -131,14 +131,14 @@ if [[ "$DRY_RUN" == "true" ]]; then echo "Would auto-apply suggestions to docs via Droid and create a draft PR." echo "Model: $MODEL" echo "" - + # Show each suggestion file for file in $(echo "$MANIFEST" | jq -r '.suggestions[].file'); do echo "--- $file ---" git show "origin/$SUGGESTIONS_BRANCH:$file" 2>/dev/null || echo "(file not found)" echo "" done - + echo -e "${YELLOW}=== END DRY RUN ===${NC}" echo "" echo "Run without --dry-run to create the PR." @@ -213,7 +213,7 @@ fi FLAGGED_PRS=() FLAGS_FILE="$REPO_ROOT/crates/feature_flags/src/flags.rs" if [[ -f "$FLAGS_FILE" ]]; then - # Extract feature flag struct names (e.g. SubagentsFeatureFlag, GitGraphFeatureFlag) + # Extract feature flag struct names (e.g. SubagentsFeatureFlag) FLAG_NAMES=$(grep -oE 'pub struct \w+FeatureFlag' "$FLAGS_FILE" | awk '{print $3}') if [[ -n "$FLAG_NAMES" ]]; then FLAG_PATTERN=$(echo "$FLAG_NAMES" | tr '\n' '|' | sed 's/|$//') @@ -538,10 +538,10 @@ echo -e "${GREEN}PR created:${NC} $PR_URL" if [[ "$KEEP_QUEUE" != "true" ]]; then echo "" echo "Resetting suggestions queue..." - + git checkout --orphan "${SUGGESTIONS_BRANCH}-reset" git rm -rf . > /dev/null 2>&1 || true - + cat > README.md << 'EOF' # Documentation Suggestions Queue @@ -562,19 +562,19 @@ run `script/docs-suggest-publish` to create a documentation PR from these sugges 3. At preview release, suggestions are collected into a docs PR 4. After docs PR is created, this branch is reset EOF - + mkdir -p suggestions echo '{"suggestions":[]}' > manifest.json git add README.md suggestions manifest.json git commit -m "Reset documentation suggestions queue Previous suggestions published in: $PR_URL" - + # Force push required: replacing the orphan suggestions branch with a clean slate git push -f origin "${SUGGESTIONS_BRANCH}-reset:$SUGGESTIONS_BRANCH" git checkout "$ORIGINAL_BRANCH" git branch -D "${SUGGESTIONS_BRANCH}-reset" - + echo "Suggestions queue reset." else git checkout "$ORIGINAL_BRANCH" From efc53c2173a787245c1a932764c12fac78e57cc5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Raz=20Guzm=C3=A1n=20Macedo?= Date: Thu, 2 Apr 2026 10:24:43 -0600 Subject: [PATCH 13/63] docs: Center and re-flow perf images (#53004) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A or Added/Fixed/Improved ... Co-authored-by: Joseph T. Lyons --- docs/src/performance.md | 32 ++++++++++++++++++++++---------- 1 file changed, 22 insertions(+), 10 deletions(-) diff --git a/docs/src/performance.md b/docs/src/performance.md index b8f76179e16fcf1f1b886a5c3ef00bcc85aa9ed4..d25ac246f3dbc03ba4286f8e130c566657bbf196 100644 --- a/docs/src/performance.md +++ b/docs/src/performance.md @@ -15,7 +15,7 @@ See [samply](https://github.com/mstange/samply)'s README on how to install and r The profile.json does not contain any symbols. Firefox profiler can add the local symbols to the profile for for. To do that hit the upload local profile button in the top right corner. -image +image # In depth CPU profiling (Tracing) @@ -53,20 +53,40 @@ Download the profiler: Open the profiler (tracy-profiler), you should see zed in the list of `Discovered clients` click it. -image +image Tracy is an incredibly powerful profiler which can do a lot however it's UI is not that friendly. This is not the place for an in depth guide to Tracy, I do however want to highlight one particular workflow that is helpful when figuring out why a piece of code is _sometimes_ slow. Here are the steps: 1. Click the flamechart button at the top. + +Click flamechart + 2. Click on a function that takes a lot of time. + +Click snapshot + 3. Expand the list of function calls by clicking on main thread. + +Click main thread + 4. Filter that list to the slower calls then click on one of the slow calls in the list + +Select the tail calls in the histogram to filter down the list of calls then click on one call + 5. Click zoom to zone to go to that specific function call in the timeline + +Click zoom to zone + 6. Scroll to zoom in and see more detail about the callers + +Scroll to zoom in + 7. Click on a caller to to get statistics on _it_. +Click on any of the zones to get statistics + While normally the blue bars in the Tracy timeline correspond to function calls they can time any part of a codebase. In the example below we have added an extra span "for block in edits" and added metadata to it: the block_height. You can do that like this: ```rust @@ -74,14 +94,6 @@ let span = ztracing::debug_span!("for block in edits", block_height = block.heig let _enter = span.enter(); // span guard, when this is dropped the span ends (and its duration is recorded) ``` -Click flamechart -Click snapshot -Click main thread -Select the tail calls in the histogram to filter down the list of calls then click on one call -Click zoom to zone -Scroll to zoom in -Click on any of the zones to get statistics - # Task/Async profiling Get a profile of the zed foreground executor and background executors. Check if From bd6dadaa0b2e950c80ab3c9610dc1678078b67a9 Mon Sep 17 00:00:00 2001 From: finico Date: Thu, 2 Apr 2026 19:26:08 +0300 Subject: [PATCH 14/63] languages: Change syntax highlighting for JSX elements (#49881) Syntax highlighting and its customization are very important to many developers, including me. I've looked through a number of issues and discussions on this topic and haven't found any active PRs. Currently, there's no way to customize highlighting for custom JSX tags, as they use `@type`. Since TSX has a particularly complex syntax and can often contain types/aliases/generics/tags in a dense sequence, they all blends into a single color and makes it difficult to "parse" by eyes. To avoid proposing something arbitrary, I looked into how this is done elsewhere. - VS Code `support.class.component.tsx` [TypeScriptReact.tmLanguage.json](https://github.com/microsoft/vscode/blob/724656efa2c26ab6e7eb2023426dcf2658dc3203/extensions/typescript-basics/syntaxes/TypeScriptReact.tmLanguage.json#L5802) But it relies on both legacy [tmLanguage naming conventions](https://macromates.com/manual/en/language_grammars#:~:text=rarely%20be%20used\).-,support,-%E2%80%94%20things%20provided%20by) and the outdated assumption that React components are always classes. - ReScript `@tag` [rescript-zed](https://github.com/rescript-lang/rescript-zed/blob/b3930c1754ab2762938244546ea2c7fb97d01cb3/languages/rescript/highlights.scm#L277) It's not entirely correct to just use a `@tag` - it's better to distinguish JSX Intrinsic Elements from custom ones. - Vue `@tag @tag.component.type.constructor` [zed-extensions/vue](https://github.com/zed-extensions/vue/blob/2697588c5cde11375d47f53f9449af8e32600d81/languages/vue/highlights.scm#L9C21-L9C52) - Svelte `@tag @tag.component.type.constructor` [zed-extensions/svelte](https://github.com/zed-extensions/svelte/blob/ae381a1217d14c26cbedfaf84b0a2f5ae508f40c/languages/svelte/highlights.scm#L46C21-L46C52) The similarity between Vue and Svelte implementations (perhaps one borrowed from the other) didn't seem coincidental and the approach felt appropriate. **I decided to adopt the same one to maintain consistency for theme creators.** So, how it looks: **Release (0.224.9) version** zed-one-release **Local version with changes** - no breaking changes for builtin themes - uses `type` color as before and can be changed in themes separately if needed zed-one-local **Local version with changes and theme overrides** zed-one-with-overrides With these changes in the config: ```jsonc "theme_overrides": { "One Light": { "syntax": { // "tag.component" also matches "type.component": { "color": "#d3604fff", }, }, }, }, ``` I'm pretty sure this will help many developers enjoy Zed even more. Release Notes: - Improved syntax highlighting for custom jsx elements in TSX and JavaScript languages. Theme authors and users can now highlight these in their theme/theme overrides using `tag.component.jsx` --- crates/grammars/src/javascript/highlights.scm | 18 +++++++++--------- crates/grammars/src/tsx/highlights.scm | 18 +++++++++--------- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/crates/grammars/src/javascript/highlights.scm b/crates/grammars/src/javascript/highlights.scm index 4af87cc578e3060e72d1e1374f4904d8c7629ddf..f6354dd3a016f544e5be1616c3dfb12144855775 100644 --- a/crates/grammars/src/javascript/highlights.scm +++ b/crates/grammars/src/javascript/highlights.scm @@ -328,26 +328,26 @@ ; JSX elements (jsx_opening_element [ - (identifier) @type + (identifier) @type @tag.component.jsx (member_expression - object: (identifier) @type - property: (property_identifier) @type) + object: (identifier) @type @tag.component.jsx + property: (property_identifier) @type @tag.component.jsx) ]) (jsx_closing_element [ - (identifier) @type + (identifier) @type @tag.component.jsx (member_expression - object: (identifier) @type - property: (property_identifier) @type) + object: (identifier) @type @tag.component.jsx + property: (property_identifier) @type @tag.component.jsx) ]) (jsx_self_closing_element [ - (identifier) @type + (identifier) @type @tag.component.jsx (member_expression - object: (identifier) @type - property: (property_identifier) @type) + object: (identifier) @type @tag.component.jsx + property: (property_identifier) @type @tag.component.jsx) ]) (jsx_opening_element diff --git a/crates/grammars/src/tsx/highlights.scm b/crates/grammars/src/tsx/highlights.scm index 482bba7f081a44b78a2f2d72c3435d8a6419b874..0f203e7112cf14268d0edfed39b5624375d1a859 100644 --- a/crates/grammars/src/tsx/highlights.scm +++ b/crates/grammars/src/tsx/highlights.scm @@ -389,26 +389,26 @@ (jsx_opening_element [ - (identifier) @type + (identifier) @type @tag.component.jsx (member_expression - object: (identifier) @type - property: (property_identifier) @type) + object: (identifier) @type @tag.component.jsx + property: (property_identifier) @type @tag.component.jsx) ]) (jsx_closing_element [ - (identifier) @type + (identifier) @type @tag.component.jsx (member_expression - object: (identifier) @type - property: (property_identifier) @type) + object: (identifier) @type @tag.component.jsx + property: (property_identifier) @type @tag.component.jsx) ]) (jsx_self_closing_element [ - (identifier) @type + (identifier) @type @tag.component.jsx (member_expression - object: (identifier) @type - property: (property_identifier) @type) + object: (identifier) @type @tag.component.jsx + property: (property_identifier) @type @tag.component.jsx) ]) (jsx_opening_element From fbdeb934519e955fd33653ff00a9ad29752fed5f Mon Sep 17 00:00:00 2001 From: Oliver Azevedo Barnes Date: Thu, 2 Apr 2026 17:43:42 +0100 Subject: [PATCH 15/63] devcontainer: Implement remote support for git checkpoint operations (#48896) Closes #47907 Implements the four git checkpoint operations (`create`, `restore`, `compare`, `diff`) that had been stubbed out for remote repositories, and related test infrastructure. Testing steps: 1. Open a project with a `.devcontainer` configuration and connect to the Dev Container 2. Open an Agent thread and ask the agent to make a code change 3. After the agent completes, verify the "Restore from checkpoint" button appears (previously missing in Dev Container sessions) 4. Click "Restore from checkpoint" and confirm the file reverts to its prior state Release Notes: - Added support for git checkpoint operations in remote/Dev Container sessions, restoring the "Restore from checkpoint" button in Agent threads. --------- Co-authored-by: KyleBarton --- crates/fs/src/fake_git_repo.rs | 84 +++++++++- crates/fs/tests/integration/fake_git_repo.rs | 23 ++- crates/project/src/git_store.rs | 144 ++++++++++++++++- crates/proto/proto/git.proto | 37 +++++ crates/proto/proto/zed.proto | 9 +- crates/proto/src/proto.rs | 15 ++ .../remote_server/src/remote_editing_tests.rs | 147 ++++++++++++++++++ 7 files changed, 449 insertions(+), 10 deletions(-) diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index fc66e27fc9a32c2a8897eb5c9faee917c21177c5..a00061452e4dbd2051b961fdde9e33dc05fba0b1 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -1053,10 +1053,88 @@ impl GitRepository for FakeGitRepository { fn diff_checkpoints( &self, - _base_checkpoint: GitRepositoryCheckpoint, - _target_checkpoint: GitRepositoryCheckpoint, + base_checkpoint: GitRepositoryCheckpoint, + target_checkpoint: GitRepositoryCheckpoint, ) -> BoxFuture<'_, Result> { - unimplemented!() + let executor = self.executor.clone(); + let checkpoints = self.checkpoints.clone(); + async move { + executor.simulate_random_delay().await; + let checkpoints = checkpoints.lock(); + let base = checkpoints + .get(&base_checkpoint.commit_sha) + .context(format!( + "invalid base checkpoint: {}", + base_checkpoint.commit_sha + ))?; + let target = checkpoints + .get(&target_checkpoint.commit_sha) + .context(format!( + "invalid target checkpoint: {}", + target_checkpoint.commit_sha + ))?; + + fn collect_files( + entry: &FakeFsEntry, + prefix: String, + out: &mut std::collections::BTreeMap, + ) { + match entry { + FakeFsEntry::File { content, .. } => { + out.insert(prefix, String::from_utf8_lossy(content).into_owned()); + } + FakeFsEntry::Dir { entries, .. } => { + for (name, child) in entries { + let path = if prefix.is_empty() { + name.clone() + } else { + format!("{prefix}/{name}") + }; + collect_files(child, path, out); + } + } + FakeFsEntry::Symlink { .. } => {} + } + } + + let mut base_files = std::collections::BTreeMap::new(); + let mut target_files = std::collections::BTreeMap::new(); + collect_files(base, String::new(), &mut base_files); + collect_files(target, String::new(), &mut target_files); + + let all_paths: std::collections::BTreeSet<&String> = + base_files.keys().chain(target_files.keys()).collect(); + + let mut diff = String::new(); + for path in all_paths { + match (base_files.get(path), target_files.get(path)) { + (Some(base_content), Some(target_content)) + if base_content != target_content => + { + diff.push_str(&format!("diff --git a/{path} b/{path}\n")); + diff.push_str(&format!("--- a/{path}\n")); + diff.push_str(&format!("+++ b/{path}\n")); + for line in base_content.lines() { + diff.push_str(&format!("-{line}\n")); + } + for line in target_content.lines() { + diff.push_str(&format!("+{line}\n")); + } + } + (Some(_), None) => { + diff.push_str(&format!("diff --git a/{path} /dev/null\n")); + diff.push_str("deleted file\n"); + } + (None, Some(_)) => { + diff.push_str(&format!("diff --git /dev/null b/{path}\n")); + diff.push_str("new file\n"); + } + _ => {} + } + } + Ok(diff) + } + .boxed() } fn default_branch( diff --git a/crates/fs/tests/integration/fake_git_repo.rs b/crates/fs/tests/integration/fake_git_repo.rs index e327f92e996bfa0e89cc60a0a9c0d919bec8bc47..6428083c161235001ef29daf3583520e7f7d25a2 100644 --- a/crates/fs/tests/integration/fake_git_repo.rs +++ b/crates/fs/tests/integration/fake_git_repo.rs @@ -155,7 +155,10 @@ async fn test_checkpoints(executor: BackgroundExecutor) { .unwrap() ); - repository.restore_checkpoint(checkpoint_1).await.unwrap(); + repository + .restore_checkpoint(checkpoint_1.clone()) + .await + .unwrap(); assert_eq!( fs.files_with_contents(Path::new("")), [ @@ -164,4 +167,22 @@ async fn test_checkpoints(executor: BackgroundExecutor) { (Path::new(path!("/foo/b")).into(), b"ipsum".into()) ] ); + + // diff_checkpoints: identical checkpoints produce empty diff + let diff = repository + .diff_checkpoints(checkpoint_2.clone(), checkpoint_3.clone()) + .await + .unwrap(); + assert!( + diff.is_empty(), + "identical checkpoints should produce empty diff" + ); + + // diff_checkpoints: different checkpoints produce non-empty diff + let diff = repository + .diff_checkpoints(checkpoint_1.clone(), checkpoint_2.clone()) + .await + .unwrap(); + assert!(diff.contains("b"), "diff should mention changed file 'b'"); + assert!(diff.contains("c"), "diff should mention added file 'c'"); } diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 6f838f02768a38d1c84935f5a7e7a303e682847d..e22d13b5fe5fd0bc64b6d95c52432437a41569f1 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -560,6 +560,10 @@ impl GitStore { client.add_entity_request_handler(Self::handle_run_hook); client.add_entity_request_handler(Self::handle_reset); client.add_entity_request_handler(Self::handle_show); + client.add_entity_request_handler(Self::handle_create_checkpoint); + client.add_entity_request_handler(Self::handle_restore_checkpoint); + client.add_entity_request_handler(Self::handle_compare_checkpoints); + client.add_entity_request_handler(Self::handle_diff_checkpoints); client.add_entity_request_handler(Self::handle_load_commit_diff); client.add_entity_request_handler(Self::handle_file_history); client.add_entity_request_handler(Self::handle_checkout_files); @@ -2619,6 +2623,92 @@ impl GitStore { }) } + async fn handle_create_checkpoint( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + + let checkpoint = repository_handle + .update(&mut cx, |repository, _| repository.checkpoint()) + .await??; + + Ok(proto::GitCreateCheckpointResponse { + commit_sha: checkpoint.commit_sha.as_bytes().to_vec(), + }) + } + + async fn handle_restore_checkpoint( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + + let checkpoint = GitRepositoryCheckpoint { + commit_sha: Oid::from_bytes(&envelope.payload.commit_sha)?, + }; + + repository_handle + .update(&mut cx, |repository, _| { + repository.restore_checkpoint(checkpoint) + }) + .await??; + + Ok(proto::Ack {}) + } + + async fn handle_compare_checkpoints( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + + let left = GitRepositoryCheckpoint { + commit_sha: Oid::from_bytes(&envelope.payload.left_commit_sha)?, + }; + let right = GitRepositoryCheckpoint { + commit_sha: Oid::from_bytes(&envelope.payload.right_commit_sha)?, + }; + + let equal = repository_handle + .update(&mut cx, |repository, _| { + repository.compare_checkpoints(left, right) + }) + .await??; + + Ok(proto::GitCompareCheckpointsResponse { equal }) + } + + async fn handle_diff_checkpoints( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + + let base = GitRepositoryCheckpoint { + commit_sha: Oid::from_bytes(&envelope.payload.base_commit_sha)?, + }; + let target = GitRepositoryCheckpoint { + commit_sha: Oid::from_bytes(&envelope.payload.target_commit_sha)?, + }; + + let diff = repository_handle + .update(&mut cx, |repository, _| { + repository.diff_checkpoints(base, target) + }) + .await??; + + Ok(proto::GitDiffCheckpointsResponse { diff }) + } + async fn handle_load_commit_diff( this: Entity, envelope: TypedEnvelope, @@ -6229,12 +6319,24 @@ impl Repository { } pub fn checkpoint(&mut self) -> oneshot::Receiver> { - self.send_job(None, |repo, _cx| async move { + let id = self.id; + self.send_job(None, move |repo, _cx| async move { match repo { RepositoryState::Local(LocalRepositoryState { backend, .. }) => { backend.checkpoint().await } - RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"), + RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { + let response = client + .request(proto::GitCreateCheckpoint { + project_id: project_id.0, + repository_id: id.to_proto(), + }) + .await?; + + Ok(GitRepositoryCheckpoint { + commit_sha: Oid::from_bytes(&response.commit_sha)?, + }) + } } }) } @@ -6243,12 +6345,22 @@ impl Repository { &mut self, checkpoint: GitRepositoryCheckpoint, ) -> oneshot::Receiver> { + let id = self.id; self.send_job(None, move |repo, _cx| async move { match repo { RepositoryState::Local(LocalRepositoryState { backend, .. }) => { backend.restore_checkpoint(checkpoint).await } - RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"), + RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { + client + .request(proto::GitRestoreCheckpoint { + project_id: project_id.0, + repository_id: id.to_proto(), + commit_sha: checkpoint.commit_sha.as_bytes().to_vec(), + }) + .await?; + Ok(()) + } } }) } @@ -6342,12 +6454,23 @@ impl Repository { left: GitRepositoryCheckpoint, right: GitRepositoryCheckpoint, ) -> oneshot::Receiver> { + let id = self.id; self.send_job(None, move |repo, _cx| async move { match repo { RepositoryState::Local(LocalRepositoryState { backend, .. }) => { backend.compare_checkpoints(left, right).await } - RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"), + RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { + let response = client + .request(proto::GitCompareCheckpoints { + project_id: project_id.0, + repository_id: id.to_proto(), + left_commit_sha: left.commit_sha.as_bytes().to_vec(), + right_commit_sha: right.commit_sha.as_bytes().to_vec(), + }) + .await?; + Ok(response.equal) + } } }) } @@ -6357,6 +6480,7 @@ impl Repository { base_checkpoint: GitRepositoryCheckpoint, target_checkpoint: GitRepositoryCheckpoint, ) -> oneshot::Receiver> { + let id = self.id; self.send_job(None, move |repo, _cx| async move { match repo { RepositoryState::Local(LocalRepositoryState { backend, .. }) => { @@ -6364,7 +6488,17 @@ impl Repository { .diff_checkpoints(base_checkpoint, target_checkpoint) .await } - RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"), + RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { + let response = client + .request(proto::GitDiffCheckpoints { + project_id: project_id.0, + repository_id: id.to_proto(), + base_commit_sha: base_checkpoint.commit_sha.as_bytes().to_vec(), + target_commit_sha: target_checkpoint.commit_sha.as_bytes().to_vec(), + }) + .await?; + Ok(response.diff) + } } }) } diff --git a/crates/proto/proto/git.proto b/crates/proto/proto/git.proto index cb878cade726002e7e09670cf7c190880d8e66cb..0cbb635d78dddc81aa7c75340f2fbebe83a474e3 100644 --- a/crates/proto/proto/git.proto +++ b/crates/proto/proto/git.proto @@ -586,6 +586,43 @@ message GitCreateWorktree { optional string commit = 5; } +message GitCreateCheckpoint { + uint64 project_id = 1; + uint64 repository_id = 2; +} + +message GitCreateCheckpointResponse { + bytes commit_sha = 1; +} + +message GitRestoreCheckpoint { + uint64 project_id = 1; + uint64 repository_id = 2; + bytes commit_sha = 3; +} + +message GitCompareCheckpoints { + uint64 project_id = 1; + uint64 repository_id = 2; + bytes left_commit_sha = 3; + bytes right_commit_sha = 4; +} + +message GitCompareCheckpointsResponse { + bool equal = 1; +} + +message GitDiffCheckpoints { + uint64 project_id = 1; + uint64 repository_id = 2; + bytes base_commit_sha = 3; + bytes target_commit_sha = 4; +} + +message GitDiffCheckpointsResponse { + string diff = 1; +} + message GitRemoveWorktree { uint64 project_id = 1; uint64 repository_id = 2; diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index d165bcb9529a41294d2bc25572f454c425f8c3f0..24e7c5372f2679eab1726487e1967edcef6024ed 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -467,7 +467,14 @@ message Envelope { SpawnKernelResponse spawn_kernel_response = 427; KillKernel kill_kernel = 428; GitRemoveWorktree git_remove_worktree = 431; - GitRenameWorktree git_rename_worktree = 432; // current max + GitRenameWorktree git_rename_worktree = 432; + GitCreateCheckpoint git_create_checkpoint = 433; + GitCreateCheckpointResponse git_create_checkpoint_response = 434; + GitRestoreCheckpoint git_restore_checkpoint = 435; + GitCompareCheckpoints git_compare_checkpoints = 436; + GitCompareCheckpointsResponse git_compare_checkpoints_response = 437; + GitDiffCheckpoints git_diff_checkpoints = 438; + GitDiffCheckpointsResponse git_diff_checkpoints_response = 439; // current max } reserved 87 to 88; diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 8c72fa08c57755dc45b9658db441a037d0a9fe2e..c21934338f97cc8ed3e04b917c7db84fccecd031 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -294,6 +294,13 @@ messages!( (GitCommitDetails, Background), (GitFileHistory, Background), (GitFileHistoryResponse, Background), + (GitCreateCheckpoint, Background), + (GitCreateCheckpointResponse, Background), + (GitRestoreCheckpoint, Background), + (GitCompareCheckpoints, Background), + (GitCompareCheckpointsResponse, Background), + (GitDiffCheckpoints, Background), + (GitDiffCheckpointsResponse, Background), (SetIndexText, Background), (Push, Background), (Fetch, Background), @@ -514,6 +521,10 @@ request_messages!( (RegisterBufferWithLanguageServers, Ack), (GitShow, GitCommitDetails), (GitFileHistory, GitFileHistoryResponse), + (GitCreateCheckpoint, GitCreateCheckpointResponse), + (GitRestoreCheckpoint, Ack), + (GitCompareCheckpoints, GitCompareCheckpointsResponse), + (GitDiffCheckpoints, GitDiffCheckpointsResponse), (GitReset, Ack), (GitDeleteBranch, Ack), (GitCheckoutFiles, Ack), @@ -696,6 +707,10 @@ entity_messages!( RegisterBufferWithLanguageServers, GitShow, GitFileHistory, + GitCreateCheckpoint, + GitRestoreCheckpoint, + GitCompareCheckpoints, + GitDiffCheckpoints, GitReset, GitDeleteBranch, GitCheckoutFiles, diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 86b7f93eb2c737cac55dbf2882f91ec277e4e174..90546773df234767489df96ee37d50e3fcaeea3b 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -1917,6 +1917,153 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA assert_eq!(server_branch.name(), "totally-new-branch"); } +#[gpui::test] +async fn test_remote_git_checkpoints(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + path!("/code"), + json!({ + "project1": { + ".git": {}, + "file.txt": "original content", + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; + + let (_worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree(path!("/code/project1"), true, cx) + }) + .await + .unwrap(); + cx.run_until_parked(); + + let repository = project.update(cx, |project, cx| project.active_repository(cx).unwrap()); + + // 1. Create a checkpoint of the original state + let checkpoint_1 = repository + .update(cx, |repository, _| repository.checkpoint()) + .await + .unwrap() + .unwrap(); + + // 2. Modify a file on the server-side fs + fs.write( + Path::new(path!("/code/project1/file.txt")), + b"modified content", + ) + .await + .unwrap(); + + // 3. Create a second checkpoint with the modified state + let checkpoint_2 = repository + .update(cx, |repository, _| repository.checkpoint()) + .await + .unwrap() + .unwrap(); + + // 4. compare_checkpoints: same checkpoint with itself => equal + let equal = repository + .update(cx, |repository, _| { + repository.compare_checkpoints(checkpoint_1.clone(), checkpoint_1.clone()) + }) + .await + .unwrap() + .unwrap(); + assert!(equal, "a checkpoint compared with itself should be equal"); + + // 5. compare_checkpoints: different states => not equal + let equal = repository + .update(cx, |repository, _| { + repository.compare_checkpoints(checkpoint_1.clone(), checkpoint_2.clone()) + }) + .await + .unwrap() + .unwrap(); + assert!( + !equal, + "checkpoints of different states should not be equal" + ); + + // 6. diff_checkpoints: same checkpoint => empty diff + let diff = repository + .update(cx, |repository, _| { + repository.diff_checkpoints(checkpoint_1.clone(), checkpoint_1.clone()) + }) + .await + .unwrap() + .unwrap(); + assert!( + diff.is_empty(), + "diff of identical checkpoints should be empty" + ); + + // 7. diff_checkpoints: different checkpoints => non-empty diff mentioning the changed file + let diff = repository + .update(cx, |repository, _| { + repository.diff_checkpoints(checkpoint_1.clone(), checkpoint_2.clone()) + }) + .await + .unwrap() + .unwrap(); + assert!( + !diff.is_empty(), + "diff of different checkpoints should be non-empty" + ); + assert!( + diff.contains("file.txt"), + "diff should mention the changed file" + ); + assert!( + diff.contains("original content"), + "diff should contain removed content" + ); + assert!( + diff.contains("modified content"), + "diff should contain added content" + ); + + // 8. restore_checkpoint: restore to original state + repository + .update(cx, |repository, _| { + repository.restore_checkpoint(checkpoint_1.clone()) + }) + .await + .unwrap() + .unwrap(); + cx.run_until_parked(); + + // 9. Create a checkpoint after restore + let checkpoint_3 = repository + .update(cx, |repository, _| repository.checkpoint()) + .await + .unwrap() + .unwrap(); + + // 10. compare_checkpoints: restored state matches original + let equal = repository + .update(cx, |repository, _| { + repository.compare_checkpoints(checkpoint_1.clone(), checkpoint_3.clone()) + }) + .await + .unwrap() + .unwrap(); + assert!(equal, "restored state should match original checkpoint"); + + // 11. diff_checkpoints: restored state vs original => empty diff + let diff = repository + .update(cx, |repository, _| { + repository.diff_checkpoints(checkpoint_1.clone(), checkpoint_3.clone()) + }) + .await + .unwrap() + .unwrap(); + assert!(diff.is_empty(), "diff after restore should be empty"); +} + #[gpui::test] async fn test_remote_agent_fs_tool_calls(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let fs = FakeFs::new(server_cx.executor()); From dc3f5b9972d2623330714af36cd127b3b1f791e5 Mon Sep 17 00:00:00 2001 From: Toni Alatalo Date: Thu, 2 Apr 2026 20:20:49 +0300 Subject: [PATCH 16/63] cli: Add --dev-container flag to open project in dev container (#51175) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) ## Summary Adds a `--dev-container` CLI flag that automatically triggers "Reopen in Dev Container" when a `.devcontainer/` configuration is found in the project directory. ```sh zed --dev-container /path/to/project ``` ## Motivation This enables fully scripted dev container workflows — for example, creating a git worktree and immediately opening it in a dev container without any manual UI interaction: ```sh git worktree add ../feature-branch zed --dev-container ../feature-branch ``` The dev container modal fires automatically once the workspace finishes initializing, so the environment is ready by the time you look at the window. This is useful for automation scripts that prepare environments and kick off agent runs for tasks like bug report triage. Here's an [example script](https://github.com/antont/todo-rs-ts/blob/main/scripts/devcontainer-new.sh) that creates a worktree and opens it as a dev container in one step. Related: #48682 requests a `devcontainer://` protocol for connecting to already-running dev containers — a complementary but different use case. This PR covers the "open project and trigger dev container setup" path. ## How it works - The `--dev-container` flag flows through the CLI IPC protocol to the workspace as an `open_in_dev_container` option. - On the first worktree scan completion, if devcontainer configs are detected, the dev container modal opens automatically. - If no `.devcontainer/` config is found, the flag is cleared and a warning is logged. ## Notable changes - **`Workspace::worktree_scans_complete`** promoted from `#[cfg(test)]` to production. It was only test-gated because it had no production callers — it's a pure read-only future with no side effects. - **`suggest_on_worktree_updated`** now takes `&mut Workspace` to read and clear the CLI flag. - Extracted **`open_dev_container_modal`** helper shared between the CLI code path and the suggest notification. ## Test plan - [x] `cargo test -p zed open_listener` — includes `test_dev_container_flag_opens_modal` and `test_dev_container_flag_cleared_without_config` - [x] `cargo test -p recent_projects` — existing suggest tests still pass - [x] Manual: `cargo run -- --dev-container /path/to/project-with-devcontainer` opens the modal Release Notes: - Added `--dev-container` CLI flag to automatically open a project in a dev container when `.devcontainer/` configuration is present. 🤖 Generated with [Claude Code](https://claude.com/claude-code) --------- Co-authored-by: Claude Opus 4.6 --- crates/cli/src/cli.rs | 1 + crates/cli/src/main.rs | 7 + .../src/dev_container_suggest.rs | 35 ++++- crates/recent_projects/src/recent_projects.rs | 3 +- crates/workspace/src/workspace.rs | 32 ++++- crates/zed/src/main.rs | 14 +- crates/zed/src/zed.rs | 1 + crates/zed/src/zed/open_listener.rs | 127 ++++++++++++++++++ crates/zed/src/zed/windows_only_instance.rs | 1 + 9 files changed, 215 insertions(+), 6 deletions(-) diff --git a/crates/cli/src/cli.rs b/crates/cli/src/cli.rs index 1a3ce059b8116ac7438f3eb0330b47660cc863de..d8da78c53210230597dab49ce297d9fa694e62f1 100644 --- a/crates/cli/src/cli.rs +++ b/crates/cli/src/cli.rs @@ -21,6 +21,7 @@ pub enum CliRequest { reuse: bool, env: Option>, user_data_dir: Option, + dev_container: bool, }, } diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index b8af5896285d3080ca3320a5909b3f58f72de643..41f2d14c1908ac18e7ea297eef19d8d9bd1cf8b5 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -118,6 +118,12 @@ struct Args { /// Will attempt to give the correct command to run #[arg(long)] system_specs: bool, + /// Open the project in a dev container. + /// + /// Automatically triggers "Reopen in Dev Container" if a `.devcontainer/` + /// configuration is found in the project directory. + #[arg(long)] + dev_container: bool, /// Pairs of file paths to diff. Can be specified multiple times. /// When directories are provided, recurses into them and shows all changed files in a single multi-diff view. #[arg(long, action = clap::ArgAction::Append, num_args = 2, value_names = ["OLD_PATH", "NEW_PATH"])] @@ -670,6 +676,7 @@ fn main() -> Result<()> { reuse: args.reuse, env, user_data_dir: user_data_dir_for_thread, + dev_container: args.dev_container, })?; while let Ok(response) = rx.recv() { diff --git a/crates/recent_projects/src/dev_container_suggest.rs b/crates/recent_projects/src/dev_container_suggest.rs index b134833688fa081c288e5b90a371bc3c462401f0..759eef2ba32074a964979ee670c0b4ec216f404b 100644 --- a/crates/recent_projects/src/dev_container_suggest.rs +++ b/crates/recent_projects/src/dev_container_suggest.rs @@ -30,17 +30,20 @@ fn project_devcontainer_key(project_path: &str) -> String { } pub fn suggest_on_worktree_updated( + workspace: &mut Workspace, worktree_id: WorktreeId, updated_entries: &UpdatedEntriesSet, project: &gpui::Entity, window: &mut Window, cx: &mut Context, ) { + let cli_auto_open = workspace.open_in_dev_container(); + let devcontainer_updated = updated_entries.iter().any(|(path, _, _)| { path.as_ref() == devcontainer_dir_path() || path.as_ref() == devcontainer_json_path() }); - if !devcontainer_updated { + if !devcontainer_updated && !cli_auto_open { return; } @@ -54,7 +57,35 @@ pub fn suggest_on_worktree_updated( return; } - if find_configs_in_snapshot(worktree).is_empty() { + let has_configs = !find_configs_in_snapshot(worktree).is_empty(); + + if cli_auto_open { + workspace.set_open_in_dev_container(false); + let task = cx.spawn_in(window, async move |workspace, cx| { + let scans_complete = + workspace.update(cx, |workspace, cx| workspace.worktree_scans_complete(cx))?; + scans_complete.await; + + workspace.update_in(cx, |workspace, window, cx| { + let has_configs = workspace + .project() + .read(cx) + .worktrees(cx) + .any(|wt| !find_configs_in_snapshot(wt.read(cx)).is_empty()); + if has_configs { + cx.on_next_frame(window, move |_workspace, window, cx| { + window.dispatch_action(Box::new(zed_actions::OpenDevContainer), cx); + }); + } else { + log::warn!("--dev-container: no devcontainer configuration found in project"); + } + }) + }); + workspace.set_dev_container_task(task); + return; + } + + if !has_configs { return; } diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index e1a0cb0609a9883bfe73048eda64cc8d1b299c2e..b3f918e204c5600193cd01a0f7569888d333edd9 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -475,11 +475,12 @@ pub fn init(cx: &mut App) { cx.subscribe_in( workspace.project(), window, - move |_, project, event, window, cx| { + move |workspace, project, event, window, cx| { if let project::Event::WorktreeUpdatedEntries(worktree_id, updated_entries) = event { dev_container_suggest::suggest_on_worktree_updated( + workspace, *worktree_id, updated_entries, project, diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index aa692ab39a6084126c9b15b07856549364b13842..ecc03806f7eeffbb62ad1340022e0ea475fe9531 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1344,6 +1344,8 @@ pub struct Workspace { scheduled_tasks: Vec>, last_open_dock_positions: Vec, removing: bool, + open_in_dev_container: bool, + _dev_container_task: Option>>, _panels_task: Option>>, sidebar_focus_handle: Option, multi_workspace: Option>, @@ -1778,6 +1780,8 @@ impl Workspace { removing: false, sidebar_focus_handle: None, multi_workspace, + open_in_dev_container: false, + _dev_container_task: None, } } @@ -2800,6 +2804,18 @@ impl Workspace { self.debugger_provider = Some(Arc::new(provider)); } + pub fn set_open_in_dev_container(&mut self, value: bool) { + self.open_in_dev_container = value; + } + + pub fn open_in_dev_container(&self) -> bool { + self.open_in_dev_container + } + + pub fn set_dev_container_task(&mut self, task: Task>) { + self._dev_container_task = Some(task); + } + pub fn debugger_provider(&self) -> Option> { self.debugger_provider.clone() } @@ -3026,7 +3042,6 @@ impl Workspace { self.project.read(cx).visible_worktrees(cx) } - #[cfg(any(test, feature = "test-support"))] pub fn worktree_scans_complete(&self, cx: &App) -> impl Future + 'static + use<> { let futures = self .worktrees(cx) @@ -9214,6 +9229,7 @@ pub struct OpenOptions { pub requesting_window: Option>, pub open_mode: OpenMode, pub env: Option>, + pub open_in_dev_container: bool, } /// The result of opening a workspace via [`open_paths`], [`Workspace::new_local`], @@ -9393,12 +9409,17 @@ pub fn open_paths( } } + let open_in_dev_container = open_options.open_in_dev_container; + let result = if let Some((existing, target_workspace)) = existing { let open_task = existing .update(cx, |multi_workspace, window, cx| { window.activate_window(); multi_workspace.activate(target_workspace.clone(), window, cx); target_workspace.update(cx, |workspace, cx| { + if open_in_dev_container { + workspace.set_open_in_dev_container(true); + } workspace.open_paths( abs_paths, OpenOptions { @@ -9426,6 +9447,13 @@ pub fn open_paths( Ok(OpenResult { window: existing, workspace: target_workspace, opened_items: open_task }) } else { + let init = if open_in_dev_container { + Some(Box::new(|workspace: &mut Workspace, _window: &mut Window, _cx: &mut Context| { + workspace.set_open_in_dev_container(true); + }) as Box) + Send>) + } else { + None + }; let result = cx .update(move |cx| { Workspace::new_local( @@ -9433,7 +9461,7 @@ pub fn open_paths( app_state.clone(), open_options.requesting_window, open_options.env, - None, + init, open_options.open_mode, cx, ) diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 303f21b8ffa62f9d9f380d9c18beecd77775df20..0e1cbc96ff1521626bfe8bcf62091404324132a0 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -857,6 +857,7 @@ fn main() { diff_paths, wsl, diff_all: diff_all_mode, + dev_container: args.dev_container, }) } @@ -1208,6 +1209,7 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut } let mut task = None; + let dev_container = request.dev_container; if !request.open_paths.is_empty() || !request.diff_paths.is_empty() { let app_state = app_state.clone(); task = Some(cx.spawn(async move |cx| { @@ -1218,7 +1220,10 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut &request.diff_paths, request.diff_all, app_state, - workspace::OpenOptions::default(), + workspace::OpenOptions { + open_in_dev_container: dev_container, + ..Default::default() + }, cx, ) .await?; @@ -1636,6 +1641,13 @@ struct Args { #[arg(long, value_name = "USER@DISTRO")] wsl: Option, + /// Open the project in a dev container. + /// + /// Automatically triggers "Reopen in Dev Container" if a `.devcontainer/` + /// configuration is found in the project directory. + #[arg(long)] + dev_container: bool, + /// Instructs zed to run as a dev server on this machine. (not implemented) #[arg(long)] dev_server_token: Option, diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 75fe04feff794f21ff3fdd0e763084e4887a040b..fbebb37985c2ebd76a63db5b4b807a8a7e0203ce 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -5167,6 +5167,7 @@ mod tests { app_state.languages.add(markdown_lang()); gpui_tokio::init(cx); + AppState::set_global(app_state.clone(), cx); theme_settings::init(theme::LoadThemes::JustBase, cx); audio::init(cx); channel::init(&app_state.client, app_state.user_store.clone(), cx); diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index 7645eae88d69f777f650ac9f86724bfef0f10bc5..0a302291cacc8caa9e0618da00b8d7c6370ccf0e 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -37,6 +37,7 @@ pub struct OpenRequest { pub open_paths: Vec, pub diff_paths: Vec<[String; 2]>, pub diff_all: bool, + pub dev_container: bool, pub open_channel_notes: Vec<(u64, Option)>, pub join_channel: Option, pub remote_connection: Option, @@ -78,6 +79,7 @@ impl OpenRequest { this.diff_paths = request.diff_paths; this.diff_all = request.diff_all; + this.dev_container = request.dev_container; if let Some(wsl) = request.wsl { let (user, distro_name) = if let Some((user, distro)) = wsl.split_once('@') { if user.is_empty() { @@ -256,6 +258,7 @@ pub struct RawOpenRequest { pub urls: Vec, pub diff_paths: Vec<[String; 2]>, pub diff_all: bool, + pub dev_container: bool, pub wsl: Option, } @@ -413,6 +416,7 @@ pub async fn handle_cli_connection( reuse, env, user_data_dir: _, + dev_container, } => { if !urls.is_empty() { cx.update(|cx| { @@ -421,6 +425,7 @@ pub async fn handle_cli_connection( urls, diff_paths, diff_all, + dev_container, wsl, }, cx, @@ -450,6 +455,7 @@ pub async fn handle_cli_connection( reuse, &responses, wait, + dev_container, app_state.clone(), env, cx, @@ -471,6 +477,7 @@ async fn open_workspaces( reuse: bool, responses: &IpcSender, wait: bool, + dev_container: bool, app_state: Arc, env: Option>, cx: &mut AsyncApp, @@ -532,6 +539,7 @@ async fn open_workspaces( requesting_window: replace_window, wait, env: env.clone(), + open_in_dev_container: dev_container, ..Default::default() }; @@ -1545,4 +1553,123 @@ mod tests { }) .unwrap(); } + + #[gpui::test] + async fn test_dev_container_flag_opens_modal(cx: &mut TestAppContext) { + let app_state = init_test(cx); + cx.update(|cx| recent_projects::init(cx)); + + app_state + .fs + .as_fake() + .insert_tree( + path!("/project"), + json!({ + ".devcontainer": { + "devcontainer.json": "{}" + }, + "src": { + "main.rs": "fn main() {}" + } + }), + ) + .await; + + let (response_tx, _) = ipc::channel::().unwrap(); + let errored = cx + .spawn({ + let app_state = app_state.clone(); + |mut cx| async move { + open_local_workspace( + vec![path!("/project").to_owned()], + vec![], + false, + workspace::OpenOptions { + open_in_dev_container: true, + ..Default::default() + }, + &response_tx, + &app_state, + &mut cx, + ) + .await + } + }) + .await; + + assert!(!errored); + + let multi_workspace = cx.update(|cx| cx.windows()[0].downcast::().unwrap()); + multi_workspace + .update(cx, |multi_workspace, _, cx| { + let flag = multi_workspace.workspace().read(cx).open_in_dev_container(); + assert!( + !flag, + "open_in_dev_container flag should be consumed by suggest_on_worktree_updated" + ); + }) + .unwrap(); + } + + #[gpui::test] + async fn test_dev_container_flag_cleared_without_config(cx: &mut TestAppContext) { + let app_state = init_test(cx); + cx.update(|cx| recent_projects::init(cx)); + + app_state + .fs + .as_fake() + .insert_tree( + path!("/project"), + json!({ + "src": { + "main.rs": "fn main() {}" + } + }), + ) + .await; + + let (response_tx, _) = ipc::channel::().unwrap(); + let errored = cx + .spawn({ + let app_state = app_state.clone(); + |mut cx| async move { + open_local_workspace( + vec![path!("/project").to_owned()], + vec![], + false, + workspace::OpenOptions { + open_in_dev_container: true, + ..Default::default() + }, + &response_tx, + &app_state, + &mut cx, + ) + .await + } + }) + .await; + + assert!(!errored); + + // Let any pending worktree scan events and updates settle. + cx.run_until_parked(); + + // With no .devcontainer config, the flag should be cleared once the + // worktree scan completes, rather than persisting on the workspace. + let multi_workspace = cx.update(|cx| cx.windows()[0].downcast::().unwrap()); + multi_workspace + .update(cx, |multi_workspace, _, cx| { + let flag = multi_workspace + .workspace() + .read(cx) + .open_in_dev_container(); + assert!( + !flag, + "open_in_dev_container flag should be cleared when no devcontainer config exists" + ); + }) + .unwrap(); + } } diff --git a/crates/zed/src/zed/windows_only_instance.rs b/crates/zed/src/zed/windows_only_instance.rs index 5790715bc13bdcc68d180519d9176873bd81bc50..f22f49e26a982cb8cb68e21645033819e059de36 100644 --- a/crates/zed/src/zed/windows_only_instance.rs +++ b/crates/zed/src/zed/windows_only_instance.rs @@ -162,6 +162,7 @@ fn send_args_to_instance(args: &Args) -> anyhow::Result<()> { reuse: false, env: None, user_data_dir: args.user_data_dir.clone(), + dev_container: args.dev_container, } }; From 05c749c3d7807dc5b655c8f0467fab32305e6bac Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 2 Apr 2026 15:06:35 -0300 Subject: [PATCH 17/63] settings_ui: Make all number fields editable (#52986) Taking advantage that we do have this capability now within the number field component. I initially thought that some wouldn't make sense to be editable but upon further reflection, why not? The buttons continue to work, but if you want to type a more precise value, it should be possible, too! Release Notes: - N/A --- crates/settings_ui/src/settings_ui.rs | 59 ++++++--------------------- 1 file changed, 12 insertions(+), 47 deletions(-) diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index 70aaaa15412793aae54c7c29fe8a2613854c8adb..634db0e247fdc370c479df0ed4f6d1f84a5284f6 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -500,18 +500,18 @@ fn init_renderers(cx: &mut App) { .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::>(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::>(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) @@ -4051,41 +4051,6 @@ fn render_toggle_button + From + Copy>( .into_any_element() } -fn render_number_field( - field: SettingField, - file: SettingsUiFile, - _metadata: Option<&SettingsFieldMetadata>, - window: &mut Window, - cx: &mut App, -) -> AnyElement { - let (_, value) = SettingsStore::global(cx).get_value_from_file(file.to_settings(), field.pick); - let value = value.copied().unwrap_or_else(T::min_value); - - let id = field - .json_path - .map(|p| format!("numeric_stepper_{}", p)) - .unwrap_or_else(|| "numeric_stepper".to_string()); - - NumberField::new(id, value, window, cx) - .tab_index(0_isize) - .on_change({ - move |value, window, cx| { - let value = *value; - update_settings_file( - file.clone(), - field.json_path, - window, - cx, - move |settings, _cx| { - (field.write)(settings, Some(value)); - }, - ) - .log_err(); // todo(settings_ui) don't log err - } - }) - .into_any_element() -} - fn render_editable_number_field( field: SettingField, file: SettingsUiFile, From 34f51c1b0d44e8611dbed12485446c127ee48618 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 2 Apr 2026 15:15:09 -0300 Subject: [PATCH 18/63] agent_ui: Remeasure changed entries in the thread view list (#53017) This PR fixes an issue where, sometimes, you couldn't scroll all the way to the bottom of the thread's content. The scrollbar would show up at the bottom of the scrollable container but the content was visibly cut off. Turns out that's a consequence of the top-down thread generation introduced in https://github.com/zed-industries/zed/pull/52440, where changing the list alignment to `Top` made it visible that sometimes, the maximum scroll area would get underestimated because the items in the thread view's list would have a stale height measurement. So, the way this PR fixes the issue is by calling `splice_focusable` in the `EntryUpdated` event, too, so that the height of the items in the overdraw area get marked as unmeasured, triggering a list re-render and re-measuring. We started by writing a test at the list level that would reproduce the regression but then later figured out that this is not an inherent list problem; it was rather a problem with its use within the thread view layer. Then, we explored writing a test that documented the regression, but it turned out to be very hard to simulate this sort of set up in which certain elements would have its height changed during streaming, which would be how you'd get to a mismatched height situation. Therefore, given `AcpThreadEvent::NewEntry` already called `splice_focusable` and don't have a test for it, we figure it'd be safe to move forward without one, too. We then introduced a helper that's now shared between `AcpThreadEvent::NewEntry` and `AcpThreadEvent::EntryUpdated`. Release Notes: - Agent: Fixed an issue where sometimes you couldn't scroll all the way to the bottom of the thread even though there's visibly more content below the fold. Co-authored-by: Eric Holk --- crates/agent_ui/src/conversation_view.rs | 57 +++++++++++++++++++----- 1 file changed, 45 insertions(+), 12 deletions(-) diff --git a/crates/agent_ui/src/conversation_view.rs b/crates/agent_ui/src/conversation_view.rs index 83a0c158a11c54be1ff54f553ce4b427da2cabc2..924f59437e51b02217289a5570f9560948c23ca2 100644 --- a/crates/agent_ui/src/conversation_view.rs +++ b/crates/agent_ui/src/conversation_view.rs @@ -1240,15 +1240,15 @@ impl ConversationView { if let Some(active) = self.thread_view(&thread_id) { let entry_view_state = active.read(cx).entry_view_state.clone(); let list_state = active.read(cx).list_state.clone(); - entry_view_state.update(cx, |view_state, cx| { - view_state.sync_entry(index, thread, window, cx); - list_state.splice_focusable( - index..index, - [view_state - .entry(index) - .and_then(|entry| entry.focus_handle(cx))], - ); - }); + notify_entry_changed( + &entry_view_state, + &list_state, + index..index, + index, + thread, + window, + cx, + ); active.update(cx, |active, cx| { active.sync_editor_mode_for_empty_state(cx); }); @@ -1257,9 +1257,16 @@ impl ConversationView { AcpThreadEvent::EntryUpdated(index) => { if let Some(active) = self.thread_view(&thread_id) { let entry_view_state = active.read(cx).entry_view_state.clone(); - entry_view_state.update(cx, |view_state, cx| { - view_state.sync_entry(*index, thread, window, cx) - }); + let list_state = active.read(cx).list_state.clone(); + notify_entry_changed( + &entry_view_state, + &list_state, + *index..*index + 1, + *index, + thread, + window, + cx, + ); active.update(cx, |active, cx| { active.auto_expand_streaming_thought(cx); }); @@ -2598,6 +2605,32 @@ impl ConversationView { } } +/// Syncs an entry's view state with the latest thread data and splices +/// the list item so the list knows to re-measure it on the next paint. +/// +/// Used by both `NewEntry` (splice range `index..index` to insert) and +/// `EntryUpdated` (splice range `index..index+1` to replace), which is +/// why the caller provides the splice range. +fn notify_entry_changed( + entry_view_state: &Entity, + list_state: &ListState, + splice_range: std::ops::Range, + index: usize, + thread: &Entity, + window: &mut Window, + cx: &mut App, +) { + entry_view_state.update(cx, |view_state, cx| { + view_state.sync_entry(index, thread, window, cx); + list_state.splice_focusable( + splice_range, + [view_state + .entry(index) + .and_then(|entry| entry.focus_handle(cx))], + ); + }); +} + fn loading_contents_spinner(size: IconSize) -> AnyElement { Icon::new(IconName::LoadCircle) .size(size) From cb99ab4ac7ba4c8e60c437491ff2891e039fde26 Mon Sep 17 00:00:00 2001 From: Aleksei Gusev Date: Thu, 2 Apr 2026 21:26:18 +0300 Subject: [PATCH 19/63] Add PageUp/PageDown scrolling in agent view (#52657) Fixes #52656 Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #52656 Release Notes: - Added keybindings for scrolling in agent view --------- Co-authored-by: Oleksiy Syvokon --- assets/keymaps/default-linux.json | 24 ++++ assets/keymaps/default-macos.json | 24 ++++ assets/keymaps/default-windows.json | 24 ++++ crates/agent_ui/src/agent_ui.rs | 16 +++ crates/agent_ui/src/conversation_view.rs | 7 +- .../src/conversation_view/thread_view.rs | 131 ++++++++++++++++-- crates/gpui/src/elements/list.rs | 7 + crates/vim/src/test/vim_test_context.rs | 12 +- docs/src/ai/agent-panel.md | 4 +- 9 files changed, 231 insertions(+), 18 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 98053432c5a186ecc886318f2d677f73a62295a2..4930fbea84b2b449f3b5c35fee2a390525cb3551 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -284,12 +284,36 @@ "context": "AcpThread", "bindings": { "ctrl--": "pane::GoBack", + "pageup": "agent::ScrollOutputPageUp", + "pagedown": "agent::ScrollOutputPageDown", + "home": "agent::ScrollOutputToTop", + "end": "agent::ScrollOutputToBottom", + "up": "agent::ScrollOutputLineUp", + "down": "agent::ScrollOutputLineDown", + "shift-pageup": "agent::ScrollOutputToPreviousMessage", + "shift-pagedown": "agent::ScrollOutputToNextMessage", + "ctrl-alt-pageup": "agent::ScrollOutputPageUp", + "ctrl-alt-pagedown": "agent::ScrollOutputPageDown", + "ctrl-alt-home": "agent::ScrollOutputToTop", + "ctrl-alt-end": "agent::ScrollOutputToBottom", + "ctrl-alt-up": "agent::ScrollOutputLineUp", + "ctrl-alt-down": "agent::ScrollOutputLineDown", + "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage", + "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage", }, }, { "context": "AcpThread > Editor", "use_key_equivalents": true, "bindings": { + "ctrl-alt-pageup": "agent::ScrollOutputPageUp", + "ctrl-alt-pagedown": "agent::ScrollOutputPageDown", + "ctrl-alt-home": "agent::ScrollOutputToTop", + "ctrl-alt-end": "agent::ScrollOutputToBottom", + "ctrl-alt-up": "agent::ScrollOutputLineUp", + "ctrl-alt-down": "agent::ScrollOutputLineDown", + "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage", + "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage", "ctrl-shift-r": "agent::OpenAgentDiff", "ctrl-shift-d": "git::Diff", "shift-alt-y": "agent::KeepAll", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index f0835a139a39602547d9d8da1cba93eaa7ee82a9..85c01bb33b54c30a55b5d046d03eb391d8c058c1 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -327,12 +327,36 @@ "context": "AcpThread", "bindings": { "ctrl--": "pane::GoBack", + "pageup": "agent::ScrollOutputPageUp", + "pagedown": "agent::ScrollOutputPageDown", + "home": "agent::ScrollOutputToTop", + "end": "agent::ScrollOutputToBottom", + "up": "agent::ScrollOutputLineUp", + "down": "agent::ScrollOutputLineDown", + "shift-pageup": "agent::ScrollOutputToPreviousMessage", + "shift-pagedown": "agent::ScrollOutputToNextMessage", + "ctrl-pageup": "agent::ScrollOutputPageUp", + "ctrl-pagedown": "agent::ScrollOutputPageDown", + "ctrl-home": "agent::ScrollOutputToTop", + "ctrl-end": "agent::ScrollOutputToBottom", + "ctrl-alt-up": "agent::ScrollOutputLineUp", + "ctrl-alt-down": "agent::ScrollOutputLineDown", + "ctrl-alt-pageup": "agent::ScrollOutputToPreviousMessage", + "ctrl-alt-pagedown": "agent::ScrollOutputToNextMessage", }, }, { "context": "AcpThread > Editor", "use_key_equivalents": true, "bindings": { + "ctrl-pageup": "agent::ScrollOutputPageUp", + "ctrl-pagedown": "agent::ScrollOutputPageDown", + "ctrl-home": "agent::ScrollOutputToTop", + "ctrl-end": "agent::ScrollOutputToBottom", + "ctrl-alt-up": "agent::ScrollOutputLineUp", + "ctrl-alt-down": "agent::ScrollOutputLineDown", + "ctrl-alt-pageup": "agent::ScrollOutputToPreviousMessage", + "ctrl-alt-pagedown": "agent::ScrollOutputToNextMessage", "shift-ctrl-r": "agent::OpenAgentDiff", "shift-ctrl-d": "git::Diff", "shift-alt-y": "agent::KeepAll", diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 41f36638e1dec40890ddecc6a808c669672e9317..0705717062ab5015de20cc3b93f651f867b5116d 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -285,12 +285,36 @@ "context": "AcpThread", "bindings": { "ctrl--": "pane::GoBack", + "pageup": "agent::ScrollOutputPageUp", + "pagedown": "agent::ScrollOutputPageDown", + "home": "agent::ScrollOutputToTop", + "end": "agent::ScrollOutputToBottom", + "up": "agent::ScrollOutputLineUp", + "down": "agent::ScrollOutputLineDown", + "shift-pageup": "agent::ScrollOutputToPreviousMessage", + "shift-pagedown": "agent::ScrollOutputToNextMessage", + "ctrl-alt-pageup": "agent::ScrollOutputPageUp", + "ctrl-alt-pagedown": "agent::ScrollOutputPageDown", + "ctrl-alt-home": "agent::ScrollOutputToTop", + "ctrl-alt-end": "agent::ScrollOutputToBottom", + "ctrl-alt-up": "agent::ScrollOutputLineUp", + "ctrl-alt-down": "agent::ScrollOutputLineDown", + "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage", + "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage", }, }, { "context": "AcpThread > Editor", "use_key_equivalents": true, "bindings": { + "ctrl-alt-pageup": "agent::ScrollOutputPageUp", + "ctrl-alt-pagedown": "agent::ScrollOutputPageDown", + "ctrl-alt-home": "agent::ScrollOutputToTop", + "ctrl-alt-end": "agent::ScrollOutputToBottom", + "ctrl-alt-up": "agent::ScrollOutputLineUp", + "ctrl-alt-down": "agent::ScrollOutputLineDown", + "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage", + "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage", "ctrl-shift-r": "agent::OpenAgentDiff", "ctrl-shift-d": "git::Diff", "shift-alt-y": "agent::KeepAll", diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 185a54825d3af18f16f2eb30188ea866c099bf32..e58c7eb3526cc1a53d7b8e6d449e968a5923425a 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -173,6 +173,22 @@ actions!( ToggleThinkingEffortMenu, /// Toggles fast mode for models that support it. ToggleFastMode, + /// Scroll the output by one page up. + ScrollOutputPageUp, + /// Scroll the output by one page down. + ScrollOutputPageDown, + /// Scroll the output up by three lines. + ScrollOutputLineUp, + /// Scroll the output down by three lines. + ScrollOutputLineDown, + /// Scroll the output to the top. + ScrollOutputToTop, + /// Scroll the output to the bottom. + ScrollOutputToBottom, + /// Scroll the output to the previous user message. + ScrollOutputToPreviousMessage, + /// Scroll the output to the next user message. + ScrollOutputToNextMessage, ] ); diff --git a/crates/agent_ui/src/conversation_view.rs b/crates/agent_ui/src/conversation_view.rs index 924f59437e51b02217289a5570f9560948c23ca2..1b9d364e9ce03702b47c63e8a856f0ba4b8aba87 100644 --- a/crates/agent_ui/src/conversation_view.rs +++ b/crates/agent_ui/src/conversation_view.rs @@ -85,8 +85,11 @@ use crate::{ AuthorizeToolCall, ClearMessageQueue, CycleFavoriteModels, CycleModeSelector, CycleThinkingEffort, EditFirstQueuedMessage, ExpandMessageEditor, Follow, KeepAll, NewThread, OpenAddContextMenu, OpenAgentDiff, OpenHistory, RejectAll, RejectOnce, - RemoveFirstQueuedMessage, SendImmediately, SendNextQueuedMessage, ToggleFastMode, - ToggleProfileSelector, ToggleThinkingEffortMenu, ToggleThinkingMode, UndoLastReject, + RemoveFirstQueuedMessage, ScrollOutputLineDown, ScrollOutputLineUp, ScrollOutputPageDown, + ScrollOutputPageUp, ScrollOutputToBottom, ScrollOutputToNextMessage, + ScrollOutputToPreviousMessage, ScrollOutputToTop, SendImmediately, SendNextQueuedMessage, + ToggleFastMode, ToggleProfileSelector, ToggleThinkingEffortMenu, ToggleThinkingMode, + UndoLastReject, }; const STOPWATCH_THRESHOLD: Duration = Duration::from_secs(30); diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs index c065c3de3d83c0eb5b68bf9a3610ff925762c952..c113eb0b768ee143eb69b5e705c15c91e367e6c2 100644 --- a/crates/agent_ui/src/conversation_view/thread_view.rs +++ b/crates/agent_ui/src/conversation_view/thread_view.rs @@ -552,17 +552,10 @@ impl ThreadView { let scroll_top = list_state.logical_scroll_top(); let _ = thread_view.update(cx, |this, cx| { if !is_following_tail { - let is_at_bottom = { - let current_offset = - list_state.scroll_px_offset_for_scrollbar().y.abs(); - let max_offset = list_state.max_offset_for_scrollbar().y; - current_offset >= max_offset - px(1.0) - }; - let is_generating = matches!(this.thread.read(cx).status(), ThreadStatus::Generating); - if is_at_bottom && is_generating { + if list_state.is_at_bottom() && is_generating { list_state.set_follow_tail(true); } } @@ -4952,7 +4945,7 @@ impl ThreadView { } pub fn scroll_to_end(&mut self, cx: &mut Context) { - self.list_state.scroll_to_end(); + self.list_state.set_follow_tail(true); cx.notify(); } @@ -4974,10 +4967,122 @@ impl ThreadView { } pub(crate) fn scroll_to_top(&mut self, cx: &mut Context) { + self.list_state.set_follow_tail(false); self.list_state.scroll_to(ListOffset::default()); cx.notify(); } + fn scroll_output_page_up( + &mut self, + _: &ScrollOutputPageUp, + _window: &mut Window, + cx: &mut Context, + ) { + let page_height = self.list_state.viewport_bounds().size.height; + self.list_state.set_follow_tail(false); + self.list_state.scroll_by(-page_height * 0.9); + cx.notify(); + } + + fn scroll_output_page_down( + &mut self, + _: &ScrollOutputPageDown, + _window: &mut Window, + cx: &mut Context, + ) { + let page_height = self.list_state.viewport_bounds().size.height; + self.list_state.set_follow_tail(false); + self.list_state.scroll_by(page_height * 0.9); + if self.list_state.is_at_bottom() { + self.list_state.set_follow_tail(true); + } + cx.notify(); + } + + fn scroll_output_line_up( + &mut self, + _: &ScrollOutputLineUp, + window: &mut Window, + cx: &mut Context, + ) { + self.list_state.set_follow_tail(false); + self.list_state.scroll_by(-window.line_height() * 3.); + cx.notify(); + } + + fn scroll_output_line_down( + &mut self, + _: &ScrollOutputLineDown, + window: &mut Window, + cx: &mut Context, + ) { + self.list_state.set_follow_tail(false); + self.list_state.scroll_by(window.line_height() * 3.); + if self.list_state.is_at_bottom() { + self.list_state.set_follow_tail(true); + } + cx.notify(); + } + + fn scroll_output_to_top( + &mut self, + _: &ScrollOutputToTop, + _window: &mut Window, + cx: &mut Context, + ) { + self.scroll_to_top(cx); + } + + fn scroll_output_to_bottom( + &mut self, + _: &ScrollOutputToBottom, + _window: &mut Window, + cx: &mut Context, + ) { + self.scroll_to_end(cx); + } + + fn scroll_output_to_previous_message( + &mut self, + _: &ScrollOutputToPreviousMessage, + _window: &mut Window, + cx: &mut Context, + ) { + let entries = self.thread.read(cx).entries(); + let current_ix = self.list_state.logical_scroll_top().item_ix; + if let Some(target_ix) = (0..current_ix) + .rev() + .find(|&i| matches!(entries.get(i), Some(AgentThreadEntry::UserMessage(_)))) + { + self.list_state.set_follow_tail(false); + self.list_state.scroll_to(ListOffset { + item_ix: target_ix, + offset_in_item: px(0.), + }); + cx.notify(); + } + } + + fn scroll_output_to_next_message( + &mut self, + _: &ScrollOutputToNextMessage, + _window: &mut Window, + cx: &mut Context, + ) { + let entries = self.thread.read(cx).entries(); + let current_ix = self.list_state.logical_scroll_top().item_ix; + if let Some(target_ix) = (current_ix + 1..entries.len()) + .find(|&i| matches!(entries.get(i), Some(AgentThreadEntry::UserMessage(_)))) + { + self.list_state.set_follow_tail(false); + self.list_state.scroll_to(ListOffset { + item_ix: target_ix, + offset_in_item: px(0.), + }); + cx.notify(); + } + } + pub fn open_thread_as_markdown( &self, workspace: Entity, @@ -8541,6 +8646,14 @@ impl Render for ThreadView { .on_action(cx.listener(Self::handle_toggle_command_pattern)) .on_action(cx.listener(Self::open_permission_dropdown)) .on_action(cx.listener(Self::open_add_context_menu)) + .on_action(cx.listener(Self::scroll_output_page_up)) + .on_action(cx.listener(Self::scroll_output_page_down)) + .on_action(cx.listener(Self::scroll_output_line_up)) + .on_action(cx.listener(Self::scroll_output_line_down)) + .on_action(cx.listener(Self::scroll_output_to_top)) + .on_action(cx.listener(Self::scroll_output_to_bottom)) + .on_action(cx.listener(Self::scroll_output_to_previous_message)) + .on_action(cx.listener(Self::scroll_output_to_next_message)) .on_action(cx.listener(|this, _: &ToggleFastMode, _window, cx| { this.toggle_fast_mode(cx); })) diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index ed441e3b40534690d02b31109e719c60dd5802e0..b4c8e7ca9015190fb8bb1698f79f1b025bfa4829 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -427,6 +427,13 @@ impl ListState { self.0.borrow().follow_tail } + /// Returns whether the list is scrolled to the bottom (within 1px). + pub fn is_at_bottom(&self) -> bool { + let current_offset = self.scroll_px_offset_for_scrollbar().y.abs(); + let max_offset = self.max_offset_for_scrollbar().y; + current_offset >= max_offset - px(1.0) + } + /// Scroll the list to the given offset pub fn scroll_to(&self, mut scroll_top: ListOffset) { let state = &mut *self.0.borrow_mut(); diff --git a/crates/vim/src/test/vim_test_context.rs b/crates/vim/src/test/vim_test_context.rs index 510d218df050455d0df0f9c2b7b782a651694cd7..6f15450aa3f70593c6877c293fecb765978e065d 100644 --- a/crates/vim/src/test/vim_test_context.rs +++ b/crates/vim/src/test/vim_test_context.rs @@ -109,12 +109,12 @@ impl VimTestContext { } cx.bind_keys(default_key_bindings); if enabled { - let vim_key_bindings = settings::KeymapFile::load_asset( - "keymaps/vim.json", - Some(settings::KeybindSource::Vim), - cx, - ) - .unwrap(); + let mut vim_key_bindings = + settings::KeymapFile::load_asset_allow_partial_failure("keymaps/vim.json", cx) + .unwrap(); + for key_binding in &mut vim_key_bindings { + key_binding.set_meta(settings::KeybindSource::Vim.meta()); + } cx.bind_keys(vim_key_bindings); } } diff --git a/docs/src/ai/agent-panel.md b/docs/src/ai/agent-panel.md index 2da2f37a67edea48e0c34b14cab1ec0fc81a522b..89b0126c55a12b08d4f21a01fea38758c4d509b7 100644 --- a/docs/src/ai/agent-panel.md +++ b/docs/src/ai/agent-panel.md @@ -67,7 +67,9 @@ Right-click on any agent response in the thread view to access a context menu wi ### Navigating the Thread {#navigating-the-thread} -In long conversations, use the scroll arrow buttons at the bottom of the panel to jump to your most recent prompt or to the very beginning of the thread. +In long conversations, use the scroll arrow buttons at the bottom of the panel to jump to your most recent prompt or to the very beginning of the thread. You can also scroll the thread using arrow keys, Page Up/Down, Home/End, and Shift+Page Up/Down to jump between messages, when the thread pane is focused. + +When focus is in the message editor, you can also use {#kb agent::ScrollOutputPageUp}, {#kb agent::ScrollOutputPageDown}, {#kb agent::ScrollOutputToTop}, {#kb agent::ScrollOutputToBottom}, {#kb agent::ScrollOutputLineUp}, and {#kb agent::ScrollOutputLineDown} to navigate the thread, or {#kb agent::ScrollOutputToPreviousMessage} and {#kb agent::ScrollOutputToNextMessage} to jump between your prompts. ### Navigating History {#navigating-history} From 34c77a0eb9fd35bc19a19788ecdbf19c64f0b582 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 2 Apr 2026 15:27:35 -0300 Subject: [PATCH 20/63] collab_panel: Add small design adjustments (#52994) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Some tiny tweaks so that things look just a bit tidier in the collab panel. | Before | After | |--------|--------| | Screenshot 2026-04-02 at 11  39@2x | Screenshot 2026-04-02 at 11 
34@2x | Release Notes: - N/A --- crates/collab_ui/src/collab_panel.rs | 113 +++++++++++++++++---------- 1 file changed, 71 insertions(+), 42 deletions(-) diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 91385b298dc661c4a79e4fb52d5be0f38672bff5..d16db59ea4ae2d766018dfc03c245839e4862cb4 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -13,12 +13,13 @@ use db::kvp::KeyValueStore; use editor::{Editor, EditorElement, EditorStyle}; use fuzzy::{StringMatch, StringMatchCandidate, match_strings}; use gpui::{ - AnyElement, App, AsyncWindowContext, Bounds, ClickEvent, ClipboardItem, Context, DismissEvent, - Div, Entity, EventEmitter, FocusHandle, Focusable, FontStyle, InteractiveElement, IntoElement, - KeyContext, ListOffset, ListState, MouseDownEvent, ParentElement, Pixels, Point, PromptLevel, - Render, SharedString, Styled, Subscription, Task, TextStyle, WeakEntity, Window, actions, - anchored, canvas, deferred, div, fill, list, point, prelude::*, px, + AnyElement, App, AsyncWindowContext, Bounds, ClickEvent, ClipboardItem, DismissEvent, Div, + Empty, Entity, EventEmitter, FocusHandle, Focusable, FontStyle, KeyContext, ListOffset, + ListState, MouseDownEvent, Pixels, Point, PromptLevel, SharedString, Subscription, Task, + TextStyle, WeakEntity, Window, actions, anchored, canvas, deferred, div, fill, list, point, + prelude::*, px, }; + use menu::{Cancel, Confirm, SecondaryConfirm, SelectNext, SelectPrevious}; use project::{Fs, Project}; use rpc::{ @@ -1091,27 +1092,30 @@ impl CollabPanel { room.read(cx).local_participant().role == proto::ChannelRole::Admin }); + let end_slot = if is_pending { + Label::new("Calling").color(Color::Muted).into_any_element() + } else if is_current_user { + IconButton::new("leave-call", IconName::Exit) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Leave Call")) + .on_click(move |_, window, cx| Self::leave_call(window, cx)) + .into_any_element() + } else if role == proto::ChannelRole::Guest { + Label::new("Guest").color(Color::Muted).into_any_element() + } else if role == proto::ChannelRole::Talker { + Label::new("Mic only") + .color(Color::Muted) + .into_any_element() + } else { + Empty.into_any_element() + }; + ListItem::new(user.github_login.clone()) .start_slot(Avatar::new(user.avatar_uri.clone())) .child(render_participant_name_and_handle(user)) .toggle_state(is_selected) - .end_slot(if is_pending { - Label::new("Calling").color(Color::Muted).into_any_element() - } else if is_current_user { - IconButton::new("leave-call", IconName::Exit) - .style(ButtonStyle::Subtle) - .on_click(move |_, window, cx| Self::leave_call(window, cx)) - .tooltip(Tooltip::text("Leave Call")) - .into_any_element() - } else if role == proto::ChannelRole::Guest { - Label::new("Guest").color(Color::Muted).into_any_element() - } else if role == proto::ChannelRole::Talker { - Label::new("Mic only") - .color(Color::Muted) - .into_any_element() - } else { - div().into_any_element() - }) + .end_slot(end_slot) + .tooltip(Tooltip::text("Click to Follow")) .when_some(peer_id, |el, peer_id| { if role == proto::ChannelRole::Guest { return el; @@ -1156,6 +1160,7 @@ impl CollabPanel { .into(); ListItem::new(project_id as usize) + .height(px(24.)) .toggle_state(is_selected) .on_click(cx.listener(move |this, _, window, cx| { this.workspace @@ -1173,9 +1178,13 @@ impl CollabPanel { })) .start_slot( h_flex() - .gap_1() + .gap_1p5() .child(render_tree_branch(is_last, false, window, cx)) - .child(IconButton::new(0, IconName::Folder)), + .child( + Icon::new(IconName::Folder) + .size(IconSize::Small) + .color(Color::Muted), + ), ) .child(Label::new(project_name.clone())) .tooltip(Tooltip::text(format!("Open {}", project_name))) @@ -1192,12 +1201,17 @@ impl CollabPanel { let id = peer_id.map_or(usize::MAX, |id| id.as_u64() as usize); ListItem::new(("screen", id)) + .height(px(24.)) .toggle_state(is_selected) .start_slot( h_flex() - .gap_1() + .gap_1p5() .child(render_tree_branch(is_last, false, window, cx)) - .child(IconButton::new(0, IconName::Screen)), + .child( + Icon::new(IconName::Screen) + .size(IconSize::Small) + .color(Color::Muted), + ), ) .child(Label::new("Screen")) .when_some(peer_id, |this, _| { @@ -1208,7 +1222,7 @@ impl CollabPanel { }) .ok(); })) - .tooltip(Tooltip::text("Open shared screen")) + .tooltip(Tooltip::text("Open Shared Screen")) }) } @@ -1232,7 +1246,9 @@ impl CollabPanel { ) -> impl IntoElement { let channel_store = self.channel_store.read(cx); let has_channel_buffer_changed = channel_store.has_channel_buffer_changed(channel_id); + ListItem::new("channel-notes") + .height(px(24.)) .toggle_state(is_selected) .on_click(cx.listener(move |this, _, window, cx| { this.open_channel_notes(channel_id, window, cx); @@ -1240,17 +1256,25 @@ impl CollabPanel { .start_slot( h_flex() .relative() - .gap_1() + .gap_1p5() .child(render_tree_branch(false, true, window, cx)) - .child(IconButton::new(0, IconName::File)) - .children(has_channel_buffer_changed.then(|| { - div() - .w_1p5() - .absolute() - .right(px(2.)) - .top(px(2.)) - .child(Indicator::dot().color(Color::Info)) - })), + .child( + h_flex() + .child( + Icon::new(IconName::Reader) + .size(IconSize::Small) + .color(Color::Muted), + ) + .when(has_channel_buffer_changed, |this| { + this.child( + div() + .absolute() + .top_neg_0p5() + .right_0() + .child(Indicator::dot().color(Color::Info)), + ) + }), + ), ) .child(Label::new("notes")) .tooltip(Tooltip::text("Open Channel Notes")) @@ -3144,10 +3168,14 @@ impl CollabPanel { (IconName::Star, Color::Default, "Add to Favorites") }; + let height = px(24.); + h_flex() .id(ix) .group("") + .h(height) .w_full() + .overflow_hidden() .when(!channel.is_root_channel(), |el| { el.on_drag(channel.clone(), move |channel, _, _, cx| { cx.new(|_| DraggedChannelView { @@ -3175,6 +3203,7 @@ impl CollabPanel { ) .child( ListItem::new(ix) + .height(height) // Add one level of depth for the disclosure arrow. .indent_level(depth + 1) .indent_step_size(px(20.)) @@ -3256,12 +3285,13 @@ impl CollabPanel { .child( h_flex() .visible_on_hover("") + .h_full() .absolute() .right_0() .px_1() .gap_px() - .bg(cx.theme().colors().background) .rounded_l_md() + .bg(cx.theme().colors().background) .child({ let focus_handle = self.focus_handle.clone(); IconButton::new("channel_favorite", favorite_icon) @@ -3335,9 +3365,8 @@ fn render_tree_branch( ) -> impl IntoElement { let rem_size = window.rem_size(); let line_height = window.text_style().line_height_in_pixels(rem_size); - let width = rem_size * 1.5; let thickness = px(1.); - let color = cx.theme().colors().text; + let color = cx.theme().colors().icon_disabled; canvas( |_, _, _| {}, @@ -3367,8 +3396,8 @@ fn render_tree_branch( )); }, ) - .w(width) - .h(line_height) + .w(rem_size) + .h(line_height - px(2.)) } fn render_participant_name_and_handle(user: &User) -> impl IntoElement { From 29609d3599c10fbec40a3756cfeac41d5e04e57e Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Thu, 2 Apr 2026 22:06:57 +0200 Subject: [PATCH 21/63] language_model: Decouple from Zed-specific implementation details (#52913) This PR decouples `language_model`'s dependence on Zed-specific implementation details. In particular * `credentials_provider` is split into a generic `credentials_provider` crate that provides a trait, and `zed_credentials_provider` that implements the said trait for Zed-specific providers and has functions that can populate a global state with them * `zed_env_vars` is split into a generic `env_var` crate that provides generic tooling for managing env vars, and `zed_env_vars` that contains Zed-specific statics * `client` is now dependent on `language_model` and not vice versa Release Notes: - N/A --- Cargo.lock | 40 +++- Cargo.toml | 4 + crates/agent/src/edit_agent/evals.rs | 5 +- crates/agent/src/tests/mod.rs | 8 +- .../src/tools/evals/streaming_edit_file.rs | 5 +- crates/agent_servers/Cargo.toml | 2 +- crates/agent_servers/src/custom.rs | 3 +- crates/agent_servers/src/e2e_tests.rs | 4 +- .../add_llm_provider_modal.rs | 2 +- crates/agent_ui/src/agent_diff.rs | 4 +- crates/agent_ui/src/inline_assistant.rs | 5 +- crates/client/Cargo.toml | 3 + crates/client/src/client.rs | 72 ++++++- crates/client/src/llm_token.rs | 116 +++++++++++ crates/codestral/Cargo.toml | 1 + crates/codestral/src/codestral.rs | 3 +- crates/credentials_provider/Cargo.toml | 4 - .../src/credentials_provider.rs | 167 +--------------- crates/edit_prediction/Cargo.toml | 2 + crates/edit_prediction/src/capture_example.rs | 4 +- crates/edit_prediction/src/edit_prediction.rs | 38 +++- .../src/edit_prediction_tests.rs | 9 +- crates/edit_prediction/src/mercury.rs | 7 +- .../edit_prediction/src/open_ai_compatible.rs | 3 +- crates/edit_prediction_cli/src/headless.rs | 5 +- crates/env_var/Cargo.toml | 15 ++ crates/env_var/LICENSE-GPL | 1 + crates/env_var/src/env_var.rs | 40 ++++ crates/eval_cli/src/headless.rs | 5 +- crates/language_model/Cargo.toml | 3 +- crates/language_model/src/api_key.rs | 22 +-- crates/language_model/src/language_model.rs | 13 +- .../language_model/src/model/cloud_model.rs | 158 ++------------- crates/language_models/src/language_models.rs | 78 ++++++-- .../language_models/src/provider/anthropic.rs | 37 +++- .../language_models/src/provider/bedrock.rs | 14 +- crates/language_models/src/provider/cloud.rs | 29 +-- .../language_models/src/provider/deepseek.rs | 37 +++- crates/language_models/src/provider/google.rs | 37 +++- .../language_models/src/provider/lmstudio.rs | 45 ++++- .../language_models/src/provider/mistral.rs | 37 +++- crates/language_models/src/provider/ollama.rs | 38 +++- .../language_models/src/provider/open_ai.rs | 37 +++- .../src/provider/open_ai_compatible.rs | 31 ++- .../src/provider/open_router.rs | 29 ++- .../language_models/src/provider/opencode.rs | 37 +++- crates/language_models/src/provider/vercel.rs | 37 +++- .../src/provider/vercel_ai_gateway.rs | 29 ++- crates/language_models/src/provider/x_ai.rs | 37 +++- crates/project/Cargo.toml | 1 + crates/project/src/context_server_store.rs | 11 +- crates/settings_ui/Cargo.toml | 1 + .../pages/edit_prediction_provider_setup.rs | 17 +- crates/web_search_providers/src/cloud.rs | 14 +- crates/zed/src/main.rs | 9 +- crates/zed/src/visual_test_runner.rs | 7 +- crates/zed/src/zed.rs | 7 +- .../zed/src/zed/edit_prediction_registry.rs | 7 +- crates/zed_credentials_provider/Cargo.toml | 22 +++ crates/zed_credentials_provider/LICENSE-GPL | 1 + .../src/zed_credentials_provider.rs | 181 ++++++++++++++++++ crates/zed_env_vars/Cargo.toml | 2 +- crates/zed_env_vars/src/zed_env_vars.rs | 41 +--- 63 files changed, 1122 insertions(+), 561 deletions(-) create mode 100644 crates/client/src/llm_token.rs create mode 100644 crates/env_var/Cargo.toml create mode 120000 crates/env_var/LICENSE-GPL create mode 100644 crates/env_var/src/env_var.rs create mode 100644 crates/zed_credentials_provider/Cargo.toml create mode 120000 crates/zed_credentials_provider/LICENSE-GPL create mode 100644 crates/zed_credentials_provider/src/zed_credentials_provider.rs diff --git a/Cargo.lock b/Cargo.lock index ce645cae5bf4bbf76dac037880e9e7038df67df9..aae7afecc5ea6f6ba3d63453321c829b677e1c58 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -260,7 +260,6 @@ dependencies = [ "chrono", "client", "collections", - "credentials_provider", "env_logger 0.11.8", "feature_flags", "fs", @@ -289,6 +288,7 @@ dependencies = [ "util", "uuid", "watch", + "zed_credentials_provider", ] [[package]] @@ -2856,6 +2856,7 @@ dependencies = [ "chrono", "clock", "cloud_api_client", + "cloud_api_types", "cloud_llm_client", "collections", "credentials_provider", @@ -2869,6 +2870,7 @@ dependencies = [ "http_client", "http_client_tls", "httparse", + "language_model", "log", "objc2-foundation", "parking_lot", @@ -2900,6 +2902,7 @@ dependencies = [ "util", "windows 0.61.3", "worktree", + "zed_credentials_provider", ] [[package]] @@ -3059,6 +3062,7 @@ dependencies = [ "serde", "serde_json", "text", + "zed_credentials_provider", "zeta_prompt", ] @@ -4035,12 +4039,8 @@ name = "credentials_provider" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", "gpui", - "paths", - "release_channel", "serde", - "serde_json", ] [[package]] @@ -5115,6 +5115,7 @@ dependencies = [ "collections", "copilot", "copilot_ui", + "credentials_provider", "ctor", "db", "edit_prediction_context", @@ -5157,6 +5158,7 @@ dependencies = [ "workspace", "worktree", "zed_actions", + "zed_credentials_provider", "zeta_prompt", "zlog", "zstd", @@ -5583,6 +5585,13 @@ dependencies = [ "log", ] +[[package]] +name = "env_var" +version = "0.1.0" +dependencies = [ + "gpui", +] + [[package]] name = "envy" version = "0.4.2" @@ -9315,12 +9324,12 @@ dependencies = [ "anthropic", "anyhow", "base64 0.22.1", - "client", "cloud_api_client", "cloud_api_types", "cloud_llm_client", "collections", "credentials_provider", + "env_var", "futures 0.3.31", "gpui", "http_client", @@ -9336,7 +9345,6 @@ dependencies = [ "smol", "thiserror 2.0.17", "util", - "zed_env_vars", ] [[package]] @@ -13137,6 +13145,7 @@ dependencies = [ "wax", "which 6.0.3", "worktree", + "zed_credentials_provider", "zeroize", "zlog", "ztracing", @@ -15746,6 +15755,7 @@ dependencies = [ "util", "workspace", "zed_actions", + "zed_credentials_provider", ] [[package]] @@ -22180,10 +22190,24 @@ dependencies = [ ] [[package]] -name = "zed_env_vars" +name = "zed_credentials_provider" version = "0.1.0" dependencies = [ + "anyhow", + "credentials_provider", + "futures 0.3.31", "gpui", + "paths", + "release_channel", + "serde", + "serde_json", +] + +[[package]] +name = "zed_env_vars" +version = "0.1.0" +dependencies = [ + "env_var", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 3a393237ab9f5a5a8cd4b02517f6d22382ff51ff..81bbb1176ddddcc117fc9082586cbc08dbb95d61 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -61,6 +61,7 @@ members = [ "crates/edit_prediction_ui", "crates/editor", "crates/encoding_selector", + "crates/env_var", "crates/etw_tracing", "crates/eval_cli", "crates/eval_utils", @@ -220,6 +221,7 @@ members = [ "crates/x_ai", "crates/zed", "crates/zed_actions", + "crates/zed_credentials_provider", "crates/zed_env_vars", "crates/zeta_prompt", "crates/zlog", @@ -309,6 +311,7 @@ dev_container = { path = "crates/dev_container" } diagnostics = { path = "crates/diagnostics" } editor = { path = "crates/editor" } encoding_selector = { path = "crates/encoding_selector" } +env_var = { path = "crates/env_var" } etw_tracing = { path = "crates/etw_tracing" } eval_utils = { path = "crates/eval_utils" } extension = { path = "crates/extension" } @@ -465,6 +468,7 @@ worktree = { path = "crates/worktree" } x_ai = { path = "crates/x_ai" } zed = { path = "crates/zed" } zed_actions = { path = "crates/zed_actions" } +zed_credentials_provider = { path = "crates/zed_credentials_provider" } zed_env_vars = { path = "crates/zed_env_vars" } edit_prediction = { path = "crates/edit_prediction" } zeta_prompt = { path = "crates/zeta_prompt" } diff --git a/crates/agent/src/edit_agent/evals.rs b/crates/agent/src/edit_agent/evals.rs index e7b67e37bf4a8b71664a78b99b757c6985794ec6..ba8b7ed867ea26bcdcdee7f8bf20390c2f9592b3 100644 --- a/crates/agent/src/edit_agent/evals.rs +++ b/crates/agent/src/edit_agent/evals.rs @@ -4,7 +4,7 @@ use crate::{ ListDirectoryTool, ListDirectoryToolInput, ReadFileTool, ReadFileToolInput, }; use Role::*; -use client::{Client, UserStore}; +use client::{Client, RefreshLlmTokenListener, UserStore}; use eval_utils::{EvalOutput, EvalOutputProcessor, OutcomeKind}; use fs::FakeFs; use futures::{FutureExt, future::LocalBoxFuture}; @@ -1423,7 +1423,8 @@ impl EditAgentTest { let client = Client::production(cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); settings::init(cx); - language_model::init(user_store.clone(), client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); language_models::init(user_store, client.clone(), cx); }); diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index 036a6f1030c43b16d51f864a1d0176891e90b772..9808b95dd0812f9a857da8a9c39e78fde40af1f9 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -6,7 +6,7 @@ use acp_thread::{ use agent_client_protocol::{self as acp}; use agent_settings::AgentProfileId; use anyhow::Result; -use client::{Client, UserStore}; +use client::{Client, RefreshLlmTokenListener, UserStore}; use collections::IndexMap; use context_server::{ContextServer, ContextServerCommand, ContextServerId}; use feature_flags::FeatureFlagAppExt as _; @@ -3253,7 +3253,8 @@ async fn test_agent_connection(cx: &mut TestAppContext) { let clock = Arc::new(clock::FakeSystemClock::new()); let client = Client::new(clock, http_client, cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - language_model::init(user_store.clone(), client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); language_models::init(user_store, client.clone(), cx); LanguageModelRegistry::test(cx); }); @@ -3982,7 +3983,8 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest { cx.set_http_client(Arc::new(http_client)); let client = Client::production(cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - language_model::init(user_store.clone(), client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); language_models::init(user_store, client.clone(), cx); } }; diff --git a/crates/agent/src/tools/evals/streaming_edit_file.rs b/crates/agent/src/tools/evals/streaming_edit_file.rs index 6a55517037e54ae4166cd22427201d9325ef0f76..0c6290ec098f9c37a0f6a077daf0a041c013d8ff 100644 --- a/crates/agent/src/tools/evals/streaming_edit_file.rs +++ b/crates/agent/src/tools/evals/streaming_edit_file.rs @@ -6,7 +6,7 @@ use crate::{ }; use Role::*; use anyhow::{Context as _, Result}; -use client::{Client, UserStore}; +use client::{Client, RefreshLlmTokenListener, UserStore}; use fs::FakeFs; use futures::{FutureExt, StreamExt, future::LocalBoxFuture}; use gpui::{AppContext as _, AsyncApp, Entity, TestAppContext, UpdateGlobal as _}; @@ -274,7 +274,8 @@ impl StreamingEditToolTest { cx.set_http_client(http_client); let client = Client::production(cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - language_model::init(user_store.clone(), client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); language_models::init(user_store, client, cx); }); diff --git a/crates/agent_servers/Cargo.toml b/crates/agent_servers/Cargo.toml index 1542466be35bbce80983a73a3fc2e0998799160c..7151f0084b1cb7d9b206f57551ce715ef67483f7 100644 --- a/crates/agent_servers/Cargo.toml +++ b/crates/agent_servers/Cargo.toml @@ -32,7 +32,6 @@ futures.workspace = true gpui.workspace = true feature_flags.workspace = true gpui_tokio = { workspace = true, optional = true } -credentials_provider.workspace = true google_ai.workspace = true http_client.workspace = true indoc.workspace = true @@ -53,6 +52,7 @@ terminal.workspace = true uuid.workspace = true util.workspace = true watch.workspace = true +zed_credentials_provider.workspace = true [target.'cfg(unix)'.dependencies] libc.workspace = true diff --git a/crates/agent_servers/src/custom.rs b/crates/agent_servers/src/custom.rs index 0dcd2240d6ecf6dc052cdd55953cff8ec1442eae..fb8d0a515244576d2cf02e4989cbd71beca448c7 100644 --- a/crates/agent_servers/src/custom.rs +++ b/crates/agent_servers/src/custom.rs @@ -3,7 +3,6 @@ use acp_thread::AgentConnection; use agent_client_protocol as acp; use anyhow::{Context as _, Result}; use collections::HashSet; -use credentials_provider::CredentialsProvider; use fs::Fs; use gpui::{App, AppContext as _, Entity, Task}; use language_model::{ApiKey, EnvVar}; @@ -392,7 +391,7 @@ fn api_key_for_gemini_cli(cx: &mut App) -> Task> { if let Some(key) = env_var.value { return Task::ready(Ok(key)); } - let credentials_provider = ::global(cx); + let credentials_provider = zed_credentials_provider::global(cx); let api_url = google_ai::API_URL.to_string(); cx.spawn(async move |cx| { Ok( diff --git a/crates/agent_servers/src/e2e_tests.rs b/crates/agent_servers/src/e2e_tests.rs index 956d106df2a260bd2eb31c14f4f1f1705bf74cd6..aa29a0c230c13949b15f2b39a245ae41ead4884d 100644 --- a/crates/agent_servers/src/e2e_tests.rs +++ b/crates/agent_servers/src/e2e_tests.rs @@ -1,6 +1,7 @@ use crate::{AgentServer, AgentServerDelegate}; use acp_thread::{AcpThread, AgentThreadEntry, ToolCall, ToolCallStatus}; use agent_client_protocol as acp; +use client::RefreshLlmTokenListener; use futures::{FutureExt, StreamExt, channel::mpsc, select}; use gpui::AppContext; use gpui::{Entity, TestAppContext}; @@ -413,7 +414,8 @@ pub async fn init_test(cx: &mut TestAppContext) -> Arc { cx.set_http_client(Arc::new(http_client)); let client = client::Client::production(cx); let user_store = cx.new(|cx| client::UserStore::new(client.clone(), cx)); - language_model::init(user_store, client, cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store, cx); #[cfg(test)] project::agent_server_store::AllAgentServersSettings::override_global( diff --git a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs index 4e3dd63b0337f9be54b550f4f4a6a5ca2e7cdd42..b97583377a00d28ea1a8aae6a1380cff3b69e6a0 100644 --- a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs +++ b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs @@ -815,7 +815,7 @@ mod tests { cx.set_global(store); theme_settings::init(theme::LoadThemes::JustBase, cx); - language_model::init_settings(cx); + language_model::init(cx); editor::init(cx); }); diff --git a/crates/agent_ui/src/agent_diff.rs b/crates/agent_ui/src/agent_diff.rs index d5cf63f6cdde9a85a54daaa29f8fc2c6833bdd77..7b70740dd1ac462614a9d08d9e48d7d13ac2ed32 100644 --- a/crates/agent_ui/src/agent_diff.rs +++ b/crates/agent_ui/src/agent_diff.rs @@ -1809,7 +1809,7 @@ mod tests { cx.set_global(settings_store); prompt_store::init(cx); theme_settings::init(theme::LoadThemes::JustBase, cx); - language_model::init_settings(cx); + language_model::init(cx); }); let fs = FakeFs::new(cx.executor()); @@ -1966,7 +1966,7 @@ mod tests { cx.set_global(settings_store); prompt_store::init(cx); theme_settings::init(theme::LoadThemes::JustBase, cx); - language_model::init_settings(cx); + language_model::init(cx); workspace::register_project_item::(cx); }); diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index 20e0b702978b7e72a8526b03570854965335310c..39d70790e0d4a18554b2a1c11510e529d921cd1b 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -2025,7 +2025,7 @@ fn merge_ranges(ranges: &mut Vec>, buffer: &MultiBufferSnapshot) { pub mod evals { use crate::InlineAssistant; use agent::ThreadStore; - use client::{Client, UserStore}; + use client::{Client, RefreshLlmTokenListener, UserStore}; use editor::{Editor, MultiBuffer, MultiBufferOffset}; use eval_utils::{EvalOutput, NoProcessor}; use fs::FakeFs; @@ -2091,7 +2091,8 @@ pub mod evals { client::init(&client, cx); workspace::init(app_state.clone(), cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - language_model::init(user_store.clone(), client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); language_models::init(user_store, client.clone(), cx); cx.set_global(inline_assistant); diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index 1edbb3399e4332e2ebd23f812c66697bda72d587..7bbaccb22e0e6c7508240186103e216f83be2f0c 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -22,6 +22,7 @@ base64.workspace = true chrono = { workspace = true, features = ["serde"] } clock.workspace = true cloud_api_client.workspace = true +cloud_api_types.workspace = true cloud_llm_client.workspace = true collections.workspace = true credentials_provider.workspace = true @@ -35,6 +36,7 @@ gpui_tokio.workspace = true http_client.workspace = true http_client_tls.workspace = true httparse = "1.10" +language_model.workspace = true log.workspace = true parking_lot.workspace = true paths.workspace = true @@ -60,6 +62,7 @@ tokio.workspace = true url.workspace = true util.workspace = true worktree.workspace = true +zed_credentials_provider.workspace = true [dev-dependencies] clock = { workspace = true, features = ["test-support"] } diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 6a11a6b924eed3dfd79ff379638ed4085e2b7bcb..dfd9963a0ee52d167f8d4edb0b850f4debed7fd4 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -1,6 +1,7 @@ #[cfg(any(test, feature = "test-support"))] pub mod test; +mod llm_token; mod proxy; pub mod telemetry; pub mod user; @@ -13,8 +14,9 @@ use async_tungstenite::tungstenite::{ http::{HeaderValue, Request, StatusCode}, }; use clock::SystemClock; -use cloud_api_client::CloudApiClient; use cloud_api_client::websocket_protocol::MessageToClient; +use cloud_api_client::{ClientApiError, CloudApiClient}; +use cloud_api_types::OrganizationId; use credentials_provider::CredentialsProvider; use feature_flags::FeatureFlagAppExt as _; use futures::{ @@ -24,6 +26,7 @@ use futures::{ }; use gpui::{App, AsyncApp, Entity, Global, Task, WeakEntity, actions}; use http_client::{HttpClient, HttpClientWithUrl, http, read_proxy_from_env}; +use language_model::LlmApiToken; use parking_lot::{Mutex, RwLock}; use postage::watch; use proxy::connect_proxy_stream; @@ -51,6 +54,7 @@ use tokio::net::TcpStream; use url::Url; use util::{ConnectionResult, ResultExt}; +pub use llm_token::*; pub use rpc::*; pub use telemetry_events::Event; pub use user::*; @@ -339,7 +343,7 @@ pub struct ClientCredentialsProvider { impl ClientCredentialsProvider { pub fn new(cx: &App) -> Self { Self { - provider: ::global(cx), + provider: zed_credentials_provider::global(cx), } } @@ -568,6 +572,10 @@ impl Client { self.http.clone() } + pub fn credentials_provider(&self) -> Arc { + self.credentials_provider.provider.clone() + } + pub fn cloud_client(&self) -> Arc { self.cloud_client.clone() } @@ -1513,6 +1521,66 @@ impl Client { }) } + pub async fn acquire_llm_token( + &self, + llm_token: &LlmApiToken, + organization_id: Option, + ) -> Result { + let system_id = self.telemetry().system_id().map(|x| x.to_string()); + let cloud_client = self.cloud_client(); + match llm_token + .acquire(&cloud_client, system_id, organization_id) + .await + { + Ok(token) => Ok(token), + Err(ClientApiError::Unauthorized) => { + self.request_sign_out(); + Err(ClientApiError::Unauthorized).context("Failed to create LLM token") + } + Err(err) => Err(anyhow::Error::from(err)), + } + } + + pub async fn refresh_llm_token( + &self, + llm_token: &LlmApiToken, + organization_id: Option, + ) -> Result { + let system_id = self.telemetry().system_id().map(|x| x.to_string()); + let cloud_client = self.cloud_client(); + match llm_token + .refresh(&cloud_client, system_id, organization_id) + .await + { + Ok(token) => Ok(token), + Err(ClientApiError::Unauthorized) => { + self.request_sign_out(); + return Err(ClientApiError::Unauthorized).context("Failed to create LLM token"); + } + Err(err) => return Err(anyhow::Error::from(err)), + } + } + + pub async fn clear_and_refresh_llm_token( + &self, + llm_token: &LlmApiToken, + organization_id: Option, + ) -> Result { + let system_id = self.telemetry().system_id().map(|x| x.to_string()); + let cloud_client = self.cloud_client(); + match llm_token + .clear_and_refresh(&cloud_client, system_id, organization_id) + .await + { + Ok(token) => Ok(token), + Err(ClientApiError::Unauthorized) => { + self.request_sign_out(); + return Err(ClientApiError::Unauthorized).context("Failed to create LLM token"); + } + Err(err) => return Err(anyhow::Error::from(err)), + } + } + pub async fn sign_out(self: &Arc, cx: &AsyncApp) { self.state.write().credentials = None; self.cloud_client.clear_credentials(); diff --git a/crates/client/src/llm_token.rs b/crates/client/src/llm_token.rs new file mode 100644 index 0000000000000000000000000000000000000000..f62aa6dd4dc3462bc3a0f6f46c35f0e4e5499816 --- /dev/null +++ b/crates/client/src/llm_token.rs @@ -0,0 +1,116 @@ +use super::{Client, UserStore}; +use cloud_api_types::websocket_protocol::MessageToClient; +use cloud_llm_client::{EXPIRED_LLM_TOKEN_HEADER_NAME, OUTDATED_LLM_TOKEN_HEADER_NAME}; +use gpui::{ + App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _, Subscription, +}; +use language_model::LlmApiToken; +use std::sync::Arc; + +pub trait NeedsLlmTokenRefresh { + /// Returns whether the LLM token needs to be refreshed. + fn needs_llm_token_refresh(&self) -> bool; +} + +impl NeedsLlmTokenRefresh for http_client::Response { + fn needs_llm_token_refresh(&self) -> bool { + self.headers().get(EXPIRED_LLM_TOKEN_HEADER_NAME).is_some() + || self.headers().get(OUTDATED_LLM_TOKEN_HEADER_NAME).is_some() + } +} + +enum TokenRefreshMode { + Refresh, + ClearAndRefresh, +} + +pub fn global_llm_token(cx: &App) -> LlmApiToken { + RefreshLlmTokenListener::global(cx) + .read(cx) + .llm_api_token + .clone() +} + +struct GlobalRefreshLlmTokenListener(Entity); + +impl Global for GlobalRefreshLlmTokenListener {} + +pub struct LlmTokenRefreshedEvent; + +pub struct RefreshLlmTokenListener { + client: Arc, + user_store: Entity, + llm_api_token: LlmApiToken, + _subscription: Subscription, +} + +impl EventEmitter for RefreshLlmTokenListener {} + +impl RefreshLlmTokenListener { + pub fn register(client: Arc, user_store: Entity, cx: &mut App) { + let listener = cx.new(|cx| RefreshLlmTokenListener::new(client, user_store, cx)); + cx.set_global(GlobalRefreshLlmTokenListener(listener)); + } + + pub fn global(cx: &App) -> Entity { + GlobalRefreshLlmTokenListener::global(cx).0.clone() + } + + fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self { + client.add_message_to_client_handler({ + let this = cx.weak_entity(); + move |message, cx| { + if let Some(this) = this.upgrade() { + Self::handle_refresh_llm_token(this, message, cx); + } + } + }); + + let subscription = cx.subscribe(&user_store, |this, _user_store, event, cx| { + if matches!(event, super::user::Event::OrganizationChanged) { + this.refresh(TokenRefreshMode::ClearAndRefresh, cx); + } + }); + + Self { + client, + user_store, + llm_api_token: LlmApiToken::default(), + _subscription: subscription, + } + } + + fn refresh(&self, mode: TokenRefreshMode, cx: &mut Context) { + let client = self.client.clone(); + let llm_api_token = self.llm_api_token.clone(); + let organization_id = self + .user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()); + cx.spawn(async move |this, cx| { + match mode { + TokenRefreshMode::Refresh => { + client + .refresh_llm_token(&llm_api_token, organization_id) + .await?; + } + TokenRefreshMode::ClearAndRefresh => { + client + .clear_and_refresh_llm_token(&llm_api_token, organization_id) + .await?; + } + } + this.update(cx, |_this, cx| cx.emit(LlmTokenRefreshedEvent)) + }) + .detach_and_log_err(cx); + } + + fn handle_refresh_llm_token(this: Entity, message: &MessageToClient, cx: &mut App) { + match message { + MessageToClient::UserUpdated => { + this.update(cx, |this, cx| this.refresh(TokenRefreshMode::Refresh, cx)); + } + } + } +} diff --git a/crates/codestral/Cargo.toml b/crates/codestral/Cargo.toml index 0daaee8fb1420c76757ca898655e8dd1a5244d7e..801221d3128b8aa2d25175e086a741d5d85da626 100644 --- a/crates/codestral/Cargo.toml +++ b/crates/codestral/Cargo.toml @@ -22,6 +22,7 @@ log.workspace = true serde.workspace = true serde_json.workspace = true text.workspace = true +zed_credentials_provider.workspace = true zeta_prompt.workspace = true [dev-dependencies] diff --git a/crates/codestral/src/codestral.rs b/crates/codestral/src/codestral.rs index 3930e2e873a91618bfae456bc188bbd90ffa64b9..7685fa8f5b1eae9e98a621484602e199c2b76f96 100644 --- a/crates/codestral/src/codestral.rs +++ b/crates/codestral/src/codestral.rs @@ -48,9 +48,10 @@ pub fn codestral_api_key(cx: &App) -> Option> { } pub fn load_codestral_api_key(cx: &mut App) -> Task> { + let credentials_provider = zed_credentials_provider::global(cx); let api_url = codestral_api_url(cx); codestral_api_key_state(cx).update(cx, |key_state, cx| { - key_state.load_if_needed(api_url, |s| s, cx) + key_state.load_if_needed(api_url, |s| s, credentials_provider, cx) }) } diff --git a/crates/credentials_provider/Cargo.toml b/crates/credentials_provider/Cargo.toml index bf47bb24b12b90d54bc04f766efe06489c730b43..da83c0cd79a1b71bbb84746b3e893f33094783d6 100644 --- a/crates/credentials_provider/Cargo.toml +++ b/crates/credentials_provider/Cargo.toml @@ -13,9 +13,5 @@ path = "src/credentials_provider.rs" [dependencies] anyhow.workspace = true -futures.workspace = true gpui.workspace = true -paths.workspace = true -release_channel.workspace = true serde.workspace = true -serde_json.workspace = true diff --git a/crates/credentials_provider/src/credentials_provider.rs b/crates/credentials_provider/src/credentials_provider.rs index 249b8333e114223aa558cd33637fd103294a8f8d..b98e97673cc11272826af24c76e8a0a6a38b9211 100644 --- a/crates/credentials_provider/src/credentials_provider.rs +++ b/crates/credentials_provider/src/credentials_provider.rs @@ -1,26 +1,8 @@ -use std::collections::HashMap; use std::future::Future; -use std::path::PathBuf; use std::pin::Pin; -use std::sync::{Arc, LazyLock}; use anyhow::Result; -use futures::FutureExt as _; -use gpui::{App, AsyncApp}; -use release_channel::ReleaseChannel; - -/// An environment variable whose presence indicates that the system keychain -/// should be used in development. -/// -/// By default, running Zed in development uses the development credentials -/// provider. Setting this environment variable allows you to interact with the -/// system keychain (for instance, if you need to test something). -/// -/// Only works in development. Setting this environment variable in other -/// release channels is a no-op. -static ZED_DEVELOPMENT_USE_KEYCHAIN: LazyLock = LazyLock::new(|| { - std::env::var("ZED_DEVELOPMENT_USE_KEYCHAIN").is_ok_and(|value| !value.is_empty()) -}); +use gpui::AsyncApp; /// A provider for credentials. /// @@ -50,150 +32,3 @@ pub trait CredentialsProvider: Send + Sync { cx: &'a AsyncApp, ) -> Pin> + 'a>>; } - -impl dyn CredentialsProvider { - /// Returns the global [`CredentialsProvider`]. - pub fn global(cx: &App) -> Arc { - // The `CredentialsProvider` trait has `Send + Sync` bounds on it, so it - // seems like this is a false positive from Clippy. - #[allow(clippy::arc_with_non_send_sync)] - Self::new(cx) - } - - fn new(cx: &App) -> Arc { - let use_development_provider = match ReleaseChannel::try_global(cx) { - Some(ReleaseChannel::Dev) => { - // In development we default to using the development - // credentials provider to avoid getting spammed by relentless - // keychain access prompts. - // - // However, if the `ZED_DEVELOPMENT_USE_KEYCHAIN` environment - // variable is set, we will use the actual keychain. - !*ZED_DEVELOPMENT_USE_KEYCHAIN - } - Some(ReleaseChannel::Nightly | ReleaseChannel::Preview | ReleaseChannel::Stable) - | None => false, - }; - - if use_development_provider { - Arc::new(DevelopmentCredentialsProvider::new()) - } else { - Arc::new(KeychainCredentialsProvider) - } - } -} - -/// A credentials provider that stores credentials in the system keychain. -struct KeychainCredentialsProvider; - -impl CredentialsProvider for KeychainCredentialsProvider { - fn read_credentials<'a>( - &'a self, - url: &'a str, - cx: &'a AsyncApp, - ) -> Pin)>>> + 'a>> { - async move { cx.update(|cx| cx.read_credentials(url)).await }.boxed_local() - } - - fn write_credentials<'a>( - &'a self, - url: &'a str, - username: &'a str, - password: &'a [u8], - cx: &'a AsyncApp, - ) -> Pin> + 'a>> { - async move { - cx.update(move |cx| cx.write_credentials(url, username, password)) - .await - } - .boxed_local() - } - - fn delete_credentials<'a>( - &'a self, - url: &'a str, - cx: &'a AsyncApp, - ) -> Pin> + 'a>> { - async move { cx.update(move |cx| cx.delete_credentials(url)).await }.boxed_local() - } -} - -/// A credentials provider that stores credentials in a local file. -/// -/// This MUST only be used in development, as this is not a secure way of storing -/// credentials on user machines. -/// -/// Its existence is purely to work around the annoyance of having to constantly -/// re-allow access to the system keychain when developing Zed. -struct DevelopmentCredentialsProvider { - path: PathBuf, -} - -impl DevelopmentCredentialsProvider { - fn new() -> Self { - let path = paths::config_dir().join("development_credentials"); - - Self { path } - } - - fn load_credentials(&self) -> Result)>> { - let json = std::fs::read(&self.path)?; - let credentials: HashMap)> = serde_json::from_slice(&json)?; - - Ok(credentials) - } - - fn save_credentials(&self, credentials: &HashMap)>) -> Result<()> { - let json = serde_json::to_string(credentials)?; - std::fs::write(&self.path, json)?; - - Ok(()) - } -} - -impl CredentialsProvider for DevelopmentCredentialsProvider { - fn read_credentials<'a>( - &'a self, - url: &'a str, - _cx: &'a AsyncApp, - ) -> Pin)>>> + 'a>> { - async move { - Ok(self - .load_credentials() - .unwrap_or_default() - .get(url) - .cloned()) - } - .boxed_local() - } - - fn write_credentials<'a>( - &'a self, - url: &'a str, - username: &'a str, - password: &'a [u8], - _cx: &'a AsyncApp, - ) -> Pin> + 'a>> { - async move { - let mut credentials = self.load_credentials().unwrap_or_default(); - credentials.insert(url.to_string(), (username.to_string(), password.to_vec())); - - self.save_credentials(&credentials) - } - .boxed_local() - } - - fn delete_credentials<'a>( - &'a self, - url: &'a str, - _cx: &'a AsyncApp, - ) -> Pin> + 'a>> { - async move { - let mut credentials = self.load_credentials()?; - credentials.remove(url); - - self.save_credentials(&credentials) - } - .boxed_local() - } -} diff --git a/crates/edit_prediction/Cargo.toml b/crates/edit_prediction/Cargo.toml index 75a589dea8f9c7fefe7bf13400cbdde54bf90bf1..eabb1641fd4fbec7b2f8ef0ba399a8fe9600dfa3 100644 --- a/crates/edit_prediction/Cargo.toml +++ b/crates/edit_prediction/Cargo.toml @@ -26,6 +26,7 @@ cloud_llm_client.workspace = true collections.workspace = true copilot.workspace = true copilot_ui.workspace = true +credentials_provider.workspace = true db.workspace = true edit_prediction_types.workspace = true edit_prediction_context.workspace = true @@ -65,6 +66,7 @@ uuid.workspace = true workspace.workspace = true worktree.workspace = true zed_actions.workspace = true +zed_credentials_provider.workspace = true zeta_prompt.workspace = true zstd.workspace = true diff --git a/crates/edit_prediction/src/capture_example.rs b/crates/edit_prediction/src/capture_example.rs index 5eb422246775c4409f7f15e3a672a2d407386acc..9463456132ce391b54aca8327cb6f900d81481d6 100644 --- a/crates/edit_prediction/src/capture_example.rs +++ b/crates/edit_prediction/src/capture_example.rs @@ -258,6 +258,7 @@ fn generate_timestamp_name() -> String { mod tests { use super::*; use crate::EditPredictionStore; + use client::RefreshLlmTokenListener; use client::{Client, UserStore}; use clock::FakeSystemClock; use gpui::{AppContext as _, TestAppContext, http_client::FakeHttpClient}; @@ -548,7 +549,8 @@ mod tests { let http_client = FakeHttpClient::with_404_response(); let client = Client::new(Arc::new(FakeSystemClock::new()), http_client, cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - language_model::init(user_store.clone(), client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); EditPredictionStore::global(&client, &user_store, cx); }) } diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index 61690c470829ca4bb16a6af9f1df2ea6e7cc6023..280427df006b510e1854ffb40cd7f995fcd9fdc6 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -1,5 +1,5 @@ use anyhow::Result; -use client::{Client, EditPredictionUsage, UserStore}; +use client::{Client, EditPredictionUsage, NeedsLlmTokenRefresh, UserStore, global_llm_token}; use cloud_api_types::{OrganizationId, SubmitEditPredictionFeedbackBody}; use cloud_llm_client::predict_edits_v3::{ PredictEditsV3Request, PredictEditsV3Response, RawCompletionRequest, RawCompletionResponse, @@ -11,6 +11,7 @@ use cloud_llm_client::{ }; use collections::{HashMap, HashSet}; use copilot::{Copilot, Reinstall, SignIn, SignOut}; +use credentials_provider::CredentialsProvider; use db::kvp::{Dismissable, KeyValueStore}; use edit_prediction_context::{RelatedExcerptStore, RelatedExcerptStoreEvent, RelatedFile}; use feature_flags::{FeatureFlag, FeatureFlagAppExt as _}; @@ -30,7 +31,7 @@ use heapless::Vec as ArrayVec; use language::language_settings::all_language_settings; use language::{Anchor, Buffer, File, Point, TextBufferSnapshot, ToOffset, ToPoint}; use language::{BufferSnapshot, OffsetRangeExt}; -use language_model::{LlmApiToken, NeedsLlmTokenRefresh}; +use language_model::LlmApiToken; use project::{DisableAiSettings, Project, ProjectPath, WorktreeId}; use release_channel::AppVersion; use semver::Version; @@ -150,6 +151,7 @@ pub struct EditPredictionStore { rated_predictions: HashSet, #[cfg(test)] settled_event_callback: Option>, + credentials_provider: Arc, } pub(crate) struct EditPredictionRejectionPayload { @@ -746,7 +748,7 @@ impl EditPredictionStore { pub fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self { let data_collection_choice = Self::load_data_collection_choice(cx); - let llm_token = LlmApiToken::global(cx); + let llm_token = global_llm_token(cx); let (reject_tx, reject_rx) = mpsc::unbounded(); cx.background_spawn({ @@ -787,6 +789,8 @@ impl EditPredictionStore { .log_err(); }); + let credentials_provider = zed_credentials_provider::global(cx); + let this = Self { projects: HashMap::default(), client, @@ -807,6 +811,8 @@ impl EditPredictionStore { shown_predictions: Default::default(), #[cfg(test)] settled_event_callback: None, + + credentials_provider, }; this @@ -871,7 +877,9 @@ impl EditPredictionStore { let experiments = cx .background_spawn(async move { let http_client = client.http_client(); - let token = llm_token.acquire(&client, organization_id).await?; + let token = client + .acquire_llm_token(&llm_token, organization_id.clone()) + .await?; let url = http_client.build_zed_llm_url("/edit_prediction_experiments", &[])?; let request = http_client::Request::builder() .method(Method::GET) @@ -2315,7 +2323,10 @@ impl EditPredictionStore { zeta::request_prediction_with_zeta(self, inputs, capture_data, cx) } EditPredictionModel::Fim { format } => fim::request_prediction(inputs, format, cx), - EditPredictionModel::Mercury => self.mercury.request_prediction(inputs, cx), + EditPredictionModel::Mercury => { + self.mercury + .request_prediction(inputs, self.credentials_provider.clone(), cx) + } }; cx.spawn(async move |this, cx| { @@ -2536,12 +2547,15 @@ impl EditPredictionStore { Res: DeserializeOwned, { let http_client = client.http_client(); - let mut token = if require_auth { - Some(llm_token.acquire(&client, organization_id.clone()).await?) + Some( + client + .acquire_llm_token(&llm_token, organization_id.clone()) + .await?, + ) } else { - llm_token - .acquire(&client, organization_id.clone()) + client + .acquire_llm_token(&llm_token, organization_id.clone()) .await .ok() }; @@ -2585,7 +2599,11 @@ impl EditPredictionStore { return Ok((serde_json::from_slice(&body)?, usage)); } else if !did_retry && token.is_some() && response.needs_llm_token_refresh() { did_retry = true; - token = Some(llm_token.refresh(&client, organization_id.clone()).await?); + token = Some( + client + .refresh_llm_token(&llm_token, organization_id.clone()) + .await?, + ); } else { let mut body = String::new(); response.body_mut().read_to_string(&mut body).await?; diff --git a/crates/edit_prediction/src/edit_prediction_tests.rs b/crates/edit_prediction/src/edit_prediction_tests.rs index 6fe61338e764a40aec9cf6f3191f1191bafe9200..1ba8b27aa785024a47a09c3299a1f3786a028ccf 100644 --- a/crates/edit_prediction/src/edit_prediction_tests.rs +++ b/crates/edit_prediction/src/edit_prediction_tests.rs @@ -1,6 +1,6 @@ use super::*; use crate::udiff::apply_diff_to_string; -use client::{UserStore, test::FakeServer}; +use client::{RefreshLlmTokenListener, UserStore, test::FakeServer}; use clock::FakeSystemClock; use clock::ReplicaId; use cloud_api_types::{CreateLlmTokenResponse, LlmToken}; @@ -23,7 +23,7 @@ use language::{ Anchor, Buffer, Capability, CursorShape, Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSeverity, Operation, Point, Selection, SelectionGoal, }; -use language_model::RefreshLlmTokenListener; + use lsp::LanguageServerId; use parking_lot::Mutex; use pretty_assertions::{assert_eq, assert_matches}; @@ -2439,7 +2439,8 @@ fn init_test_with_fake_client( client.cloud_client().set_credentials(1, "test".into()); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - language_model::init(user_store.clone(), client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); let ep_store = EditPredictionStore::global(&client, &user_store, cx); ( @@ -2891,7 +2892,7 @@ async fn test_unauthenticated_without_custom_url_blocks_prediction_impl(cx: &mut cx.update(|cx| client::Client::new(Arc::new(FakeSystemClock::new()), http_client, cx)); let user_store = cx.update(|cx| cx.new(|cx| client::UserStore::new(client.clone(), cx))); cx.update(|cx| { - language_model::RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); }); let ep_store = cx.new(|cx| EditPredictionStore::new(client, project.read(cx).user_store(), cx)); diff --git a/crates/edit_prediction/src/mercury.rs b/crates/edit_prediction/src/mercury.rs index df47a38062344512a784c6d2feb563e9848afb27..155fd449904687081da0a9eae3d4731863f02254 100644 --- a/crates/edit_prediction/src/mercury.rs +++ b/crates/edit_prediction/src/mercury.rs @@ -5,6 +5,7 @@ use crate::{ }; use anyhow::{Context as _, Result}; use cloud_llm_client::EditPredictionRejectReason; +use credentials_provider::CredentialsProvider; use futures::AsyncReadExt as _; use gpui::{ App, AppContext as _, Context, Entity, Global, SharedString, Task, @@ -51,10 +52,11 @@ impl Mercury { debug_tx, .. }: EditPredictionModelInput, + credentials_provider: Arc, cx: &mut Context, ) -> Task>> { self.api_token.update(cx, |key_state, cx| { - _ = key_state.load_if_needed(MERCURY_CREDENTIALS_URL, |s| s, cx); + _ = key_state.load_if_needed(MERCURY_CREDENTIALS_URL, |s| s, credentials_provider, cx); }); let Some(api_token) = self.api_token.read(cx).key(&MERCURY_CREDENTIALS_URL) else { return Task::ready(Ok(None)); @@ -387,8 +389,9 @@ pub fn mercury_api_token(cx: &mut App) -> Entity { } pub fn load_mercury_api_token(cx: &mut App) -> Task> { + let credentials_provider = zed_credentials_provider::global(cx); mercury_api_token(cx).update(cx, |key_state, cx| { - key_state.load_if_needed(MERCURY_CREDENTIALS_URL, |s| s, cx) + key_state.load_if_needed(MERCURY_CREDENTIALS_URL, |s| s, credentials_provider, cx) }) } diff --git a/crates/edit_prediction/src/open_ai_compatible.rs b/crates/edit_prediction/src/open_ai_compatible.rs index ca378ba1fd0bc9bdbb3e85c7610e1b94c1be388f..9a11164822857d78c2fe0d9245faeb5d4f7400a0 100644 --- a/crates/edit_prediction/src/open_ai_compatible.rs +++ b/crates/edit_prediction/src/open_ai_compatible.rs @@ -42,9 +42,10 @@ pub fn open_ai_compatible_api_token(cx: &mut App) -> Entity { pub fn load_open_ai_compatible_api_token( cx: &mut App, ) -> Task> { + let credentials_provider = zed_credentials_provider::global(cx); let api_url = open_ai_compatible_api_url(cx); open_ai_compatible_api_token(cx).update(cx, |key_state, cx| { - key_state.load_if_needed(api_url, |s| s, cx) + key_state.load_if_needed(api_url, |s| s, credentials_provider, cx) }) } diff --git a/crates/edit_prediction_cli/src/headless.rs b/crates/edit_prediction_cli/src/headless.rs index 3a204a7052f8a41d6e7c2c49860b62f588358644..48b7381020f48d868d9f6413ef343b30718e5be6 100644 --- a/crates/edit_prediction_cli/src/headless.rs +++ b/crates/edit_prediction_cli/src/headless.rs @@ -1,4 +1,4 @@ -use client::{Client, ProxySettings, UserStore}; +use client::{Client, ProxySettings, RefreshLlmTokenListener, UserStore}; use db::AppDatabase; use extension::ExtensionHostProxy; use fs::RealFs; @@ -109,7 +109,8 @@ pub fn init(cx: &mut App) -> EpAppState { debug_adapter_extension::init(extension_host_proxy.clone(), cx); language_extension::init(LspAccess::Noop, extension_host_proxy, languages.clone()); - language_model::init(user_store.clone(), client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); language_models::init(user_store.clone(), client.clone(), cx); languages::init(languages.clone(), fs.clone(), node_runtime.clone(), cx); prompt_store::init(cx); diff --git a/crates/env_var/Cargo.toml b/crates/env_var/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..2cbbd08c7833d3e57a09766d42ffffe35c620a93 --- /dev/null +++ b/crates/env_var/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "env_var" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/env_var.rs" + +[dependencies] +gpui.workspace = true diff --git a/crates/env_var/LICENSE-GPL b/crates/env_var/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/env_var/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/env_var/src/env_var.rs b/crates/env_var/src/env_var.rs new file mode 100644 index 0000000000000000000000000000000000000000..79f671e0147ebfaad4ab76a123cc477dc7e55cb7 --- /dev/null +++ b/crates/env_var/src/env_var.rs @@ -0,0 +1,40 @@ +use gpui::SharedString; + +#[derive(Clone)] +pub struct EnvVar { + pub name: SharedString, + /// Value of the environment variable. Also `None` when set to an empty string. + pub value: Option, +} + +impl EnvVar { + pub fn new(name: SharedString) -> Self { + let value = std::env::var(name.as_str()).ok(); + if value.as_ref().is_some_and(|v| v.is_empty()) { + Self { name, value: None } + } else { + Self { name, value } + } + } + + pub fn or(self, other: EnvVar) -> EnvVar { + if self.value.is_some() { self } else { other } + } +} + +/// Creates a `LazyLock` expression for use in a `static` declaration. +#[macro_export] +macro_rules! env_var { + ($name:expr) => { + ::std::sync::LazyLock::new(|| $crate::EnvVar::new(($name).into())) + }; +} + +/// Generates a `LazyLock` expression for use in a `static` declaration. Checks if the +/// environment variable exists and is non-empty. +#[macro_export] +macro_rules! bool_env_var { + ($name:expr) => { + ::std::sync::LazyLock::new(|| $crate::EnvVar::new(($name).into()).value.is_some()) + }; +} diff --git a/crates/eval_cli/src/headless.rs b/crates/eval_cli/src/headless.rs index 72feaacbae270224240f1da9e6e6c1008ba97c84..0ddd99e8f8abd9dbd73e1d7461526f3e7cb24f11 100644 --- a/crates/eval_cli/src/headless.rs +++ b/crates/eval_cli/src/headless.rs @@ -1,7 +1,7 @@ use std::path::PathBuf; use std::sync::Arc; -use client::{Client, ProxySettings, UserStore}; +use client::{Client, ProxySettings, RefreshLlmTokenListener, UserStore}; use db::AppDatabase; use extension::ExtensionHostProxy; use fs::RealFs; @@ -108,7 +108,8 @@ pub fn init(cx: &mut App) -> Arc { let extension_host_proxy = ExtensionHostProxy::global(cx); debug_adapter_extension::init(extension_host_proxy.clone(), cx); language_extension::init(LspAccess::Noop, extension_host_proxy, languages.clone()); - language_model::init(user_store.clone(), client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); language_models::init(user_store.clone(), client.clone(), cx); languages::init(languages.clone(), fs.clone(), node_runtime.clone(), cx); prompt_store::init(cx); diff --git a/crates/language_model/Cargo.toml b/crates/language_model/Cargo.toml index 911100fc25b498ba5471c85d6177052495974665..4712d86dff6c44f9cdd8576a08349ccfa7d0ecca 100644 --- a/crates/language_model/Cargo.toml +++ b/crates/language_model/Cargo.toml @@ -20,11 +20,11 @@ anthropic = { workspace = true, features = ["schemars"] } anyhow.workspace = true credentials_provider.workspace = true base64.workspace = true -client.workspace = true cloud_api_client.workspace = true cloud_api_types.workspace = true cloud_llm_client.workspace = true collections.workspace = true +env_var.workspace = true futures.workspace = true gpui.workspace = true http_client.workspace = true @@ -40,7 +40,6 @@ serde_json.workspace = true smol.workspace = true thiserror.workspace = true util.workspace = true -zed_env_vars.workspace = true [dev-dependencies] gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/language_model/src/api_key.rs b/crates/language_model/src/api_key.rs index 754fde069295d8799820020bef286b1a1a3c590c..4be5a64d3db6231c98b830a524d5e299faace457 100644 --- a/crates/language_model/src/api_key.rs +++ b/crates/language_model/src/api_key.rs @@ -1,5 +1,6 @@ use anyhow::{Result, anyhow}; use credentials_provider::CredentialsProvider; +use env_var::EnvVar; use futures::{FutureExt, future}; use gpui::{AsyncApp, Context, SharedString, Task}; use std::{ @@ -7,7 +8,6 @@ use std::{ sync::Arc, }; use util::ResultExt as _; -use zed_env_vars::EnvVar; use crate::AuthenticateError; @@ -101,6 +101,7 @@ impl ApiKeyState { url: SharedString, key: Option, get_this: impl Fn(&mut Ent) -> &mut Self + 'static, + provider: Arc, cx: &Context, ) -> Task> { if self.is_from_env_var() { @@ -108,18 +109,14 @@ impl ApiKeyState { "bug: attempted to store API key in system keychain when API key is from env var", ))); } - let credentials_provider = ::global(cx); cx.spawn(async move |ent, cx| { if let Some(key) = &key { - credentials_provider + provider .write_credentials(&url, "Bearer", key.as_bytes(), cx) .await .log_err(); } else { - credentials_provider - .delete_credentials(&url, cx) - .await - .log_err(); + provider.delete_credentials(&url, cx).await.log_err(); } ent.update(cx, |ent, cx| { let this = get_this(ent); @@ -144,12 +141,13 @@ impl ApiKeyState { &mut self, url: SharedString, get_this: impl Fn(&mut Ent) -> &mut Self + Clone + 'static, + provider: Arc, cx: &mut Context, ) { if url != self.url { if !self.is_from_env_var() { // loading will continue even though this result task is dropped - let _task = self.load_if_needed(url, get_this, cx); + let _task = self.load_if_needed(url, get_this, provider, cx); } } } @@ -163,6 +161,7 @@ impl ApiKeyState { &mut self, url: SharedString, get_this: impl Fn(&mut Ent) -> &mut Self + Clone + 'static, + provider: Arc, cx: &mut Context, ) -> Task> { if let LoadStatus::Loaded { .. } = &self.load_status @@ -185,7 +184,7 @@ impl ApiKeyState { let task = if let Some(load_task) = &self.load_task { load_task.clone() } else { - let load_task = Self::load(url.clone(), get_this.clone(), cx).shared(); + let load_task = Self::load(url.clone(), get_this.clone(), provider, cx).shared(); self.url = url; self.load_status = LoadStatus::NotPresent; self.load_task = Some(load_task.clone()); @@ -206,14 +205,13 @@ impl ApiKeyState { fn load( url: SharedString, get_this: impl Fn(&mut Ent) -> &mut Self + 'static, + provider: Arc, cx: &Context, ) -> Task<()> { - let credentials_provider = ::global(cx); cx.spawn({ async move |ent, cx| { let load_status = - ApiKey::load_from_system_keychain_impl(&url, credentials_provider.as_ref(), cx) - .await; + ApiKey::load_from_system_keychain_impl(&url, provider.as_ref(), cx).await; ent.update(cx, |ent, cx| { let this = get_this(ent); this.url = url; diff --git a/crates/language_model/src/language_model.rs b/crates/language_model/src/language_model.rs index ce71cee6bcaf4f7ea1e210cc3756bd3162715f55..3f309b7b1d4152c54324efaaf0ad3bdb7035eea4 100644 --- a/crates/language_model/src/language_model.rs +++ b/crates/language_model/src/language_model.rs @@ -11,12 +11,10 @@ pub mod tool_schema; pub mod fake_provider; use anyhow::{Result, anyhow}; -use client::Client; -use client::UserStore; use cloud_llm_client::CompletionRequestStatus; use futures::FutureExt; use futures::{StreamExt, future::BoxFuture, stream::BoxStream}; -use gpui::{AnyView, App, AsyncApp, Entity, SharedString, Task, Window}; +use gpui::{AnyView, App, AsyncApp, SharedString, Task, Window}; use http_client::{StatusCode, http}; use icons::IconName; use parking_lot::Mutex; @@ -36,15 +34,10 @@ pub use crate::registry::*; pub use crate::request::*; pub use crate::role::*; pub use crate::tool_schema::LanguageModelToolSchemaFormat; +pub use env_var::{EnvVar, env_var}; pub use provider::*; -pub use zed_env_vars::{EnvVar, env_var}; -pub fn init(user_store: Entity, client: Arc, cx: &mut App) { - init_settings(cx); - RefreshLlmTokenListener::register(client, user_store, cx); -} - -pub fn init_settings(cx: &mut App) { +pub fn init(cx: &mut App) { registry::init(cx); } diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index a1362d78292082522f4e883efe42b2ca1e0a0300..db926aab1f70a46a4e70b1b67c2c9e4c4f465c2c 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -1,16 +1,9 @@ use std::fmt; use std::sync::Arc; -use anyhow::{Context as _, Result}; -use client::Client; -use client::UserStore; use cloud_api_client::ClientApiError; +use cloud_api_client::CloudApiClient; use cloud_api_types::OrganizationId; -use cloud_api_types::websocket_protocol::MessageToClient; -use cloud_llm_client::{EXPIRED_LLM_TOKEN_HEADER_NAME, OUTDATED_LLM_TOKEN_HEADER_NAME}; -use gpui::{ - App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _, Subscription, -}; use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard}; use thiserror::Error; @@ -30,18 +23,12 @@ impl fmt::Display for PaymentRequiredError { pub struct LlmApiToken(Arc>>); impl LlmApiToken { - pub fn global(cx: &App) -> Self { - RefreshLlmTokenListener::global(cx) - .read(cx) - .llm_api_token - .clone() - } - pub async fn acquire( &self, - client: &Arc, + client: &CloudApiClient, + system_id: Option, organization_id: Option, - ) -> Result { + ) -> Result { let lock = self.0.upgradable_read().await; if let Some(token) = lock.as_ref() { Ok(token.to_string()) @@ -49,6 +36,7 @@ impl LlmApiToken { Self::fetch( RwLockUpgradableReadGuard::upgrade(lock).await, client, + system_id, organization_id, ) .await @@ -57,10 +45,11 @@ impl LlmApiToken { pub async fn refresh( &self, - client: &Arc, + client: &CloudApiClient, + system_id: Option, organization_id: Option, - ) -> Result { - Self::fetch(self.0.write().await, client, organization_id).await + ) -> Result { + Self::fetch(self.0.write().await, client, system_id, organization_id).await } /// Clears the existing token before attempting to fetch a new one. @@ -69,28 +58,22 @@ impl LlmApiToken { /// leave a token for the wrong organization. pub async fn clear_and_refresh( &self, - client: &Arc, + client: &CloudApiClient, + system_id: Option, organization_id: Option, - ) -> Result { + ) -> Result { let mut lock = self.0.write().await; *lock = None; - Self::fetch(lock, client, organization_id).await + Self::fetch(lock, client, system_id, organization_id).await } async fn fetch( mut lock: RwLockWriteGuard<'_, Option>, - client: &Arc, + client: &CloudApiClient, + system_id: Option, organization_id: Option, - ) -> Result { - let system_id = client - .telemetry() - .system_id() - .map(|system_id| system_id.to_string()); - - let result = client - .cloud_client() - .create_llm_token(system_id, organization_id) - .await; + ) -> Result { + let result = client.create_llm_token(system_id, organization_id).await; match result { Ok(response) => { *lock = Some(response.token.0.clone()); @@ -98,112 +81,7 @@ impl LlmApiToken { } Err(err) => { *lock = None; - match err { - ClientApiError::Unauthorized => { - client.request_sign_out(); - Err(err).context("Failed to create LLM token") - } - ClientApiError::Other(err) => Err(err), - } - } - } - } -} - -pub trait NeedsLlmTokenRefresh { - /// Returns whether the LLM token needs to be refreshed. - fn needs_llm_token_refresh(&self) -> bool; -} - -impl NeedsLlmTokenRefresh for http_client::Response { - fn needs_llm_token_refresh(&self) -> bool { - self.headers().get(EXPIRED_LLM_TOKEN_HEADER_NAME).is_some() - || self.headers().get(OUTDATED_LLM_TOKEN_HEADER_NAME).is_some() - } -} - -enum TokenRefreshMode { - Refresh, - ClearAndRefresh, -} - -struct GlobalRefreshLlmTokenListener(Entity); - -impl Global for GlobalRefreshLlmTokenListener {} - -pub struct LlmTokenRefreshedEvent; - -pub struct RefreshLlmTokenListener { - client: Arc, - user_store: Entity, - llm_api_token: LlmApiToken, - _subscription: Subscription, -} - -impl EventEmitter for RefreshLlmTokenListener {} - -impl RefreshLlmTokenListener { - pub fn register(client: Arc, user_store: Entity, cx: &mut App) { - let listener = cx.new(|cx| RefreshLlmTokenListener::new(client, user_store, cx)); - cx.set_global(GlobalRefreshLlmTokenListener(listener)); - } - - pub fn global(cx: &App) -> Entity { - GlobalRefreshLlmTokenListener::global(cx).0.clone() - } - - fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self { - client.add_message_to_client_handler({ - let this = cx.weak_entity(); - move |message, cx| { - if let Some(this) = this.upgrade() { - Self::handle_refresh_llm_token(this, message, cx); - } - } - }); - - let subscription = cx.subscribe(&user_store, |this, _user_store, event, cx| { - if matches!(event, client::user::Event::OrganizationChanged) { - this.refresh(TokenRefreshMode::ClearAndRefresh, cx); - } - }); - - Self { - client, - user_store, - llm_api_token: LlmApiToken::default(), - _subscription: subscription, - } - } - - fn refresh(&self, mode: TokenRefreshMode, cx: &mut Context) { - let client = self.client.clone(); - let llm_api_token = self.llm_api_token.clone(); - let organization_id = self - .user_store - .read(cx) - .current_organization() - .map(|organization| organization.id.clone()); - cx.spawn(async move |this, cx| { - match mode { - TokenRefreshMode::Refresh => { - llm_api_token.refresh(&client, organization_id).await?; - } - TokenRefreshMode::ClearAndRefresh => { - llm_api_token - .clear_and_refresh(&client, organization_id) - .await?; - } - } - this.update(cx, |_this, cx| cx.emit(LlmTokenRefreshedEvent)) - }) - .detach_and_log_err(cx); - } - - fn handle_refresh_llm_token(this: Entity, message: &MessageToClient, cx: &mut App) { - match message { - MessageToClient::UserUpdated => { - this.update(cx, |this, cx| this.refresh(TokenRefreshMode::Refresh, cx)); + Err(err) } } } diff --git a/crates/language_models/src/language_models.rs b/crates/language_models/src/language_models.rs index 4db1db8fa6ce5afb9d77a6685bfc0861d0fb8885..3154db91a43d1381f5b3f122a724be249adeb79b 100644 --- a/crates/language_models/src/language_models.rs +++ b/crates/language_models/src/language_models.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use ::settings::{Settings, SettingsStore}; use client::{Client, UserStore}; use collections::HashSet; +use credentials_provider::CredentialsProvider; use gpui::{App, Context, Entity}; use language_model::{LanguageModelProviderId, LanguageModelRegistry}; use provider::deepseek::DeepSeekLanguageModelProvider; @@ -31,9 +32,16 @@ use crate::provider::x_ai::XAiLanguageModelProvider; pub use crate::settings::*; pub fn init(user_store: Entity, client: Arc, cx: &mut App) { + let credentials_provider = client.credentials_provider(); let registry = LanguageModelRegistry::global(cx); registry.update(cx, |registry, cx| { - register_language_model_providers(registry, user_store, client.clone(), cx); + register_language_model_providers( + registry, + user_store, + client.clone(), + credentials_provider.clone(), + cx, + ); }); // Subscribe to extension store events to track LLM extension installations @@ -104,6 +112,7 @@ pub fn init(user_store: Entity, client: Arc, cx: &mut App) { &HashSet::default(), &openai_compatible_providers, client.clone(), + credentials_provider.clone(), cx, ); }); @@ -124,6 +133,7 @@ pub fn init(user_store: Entity, client: Arc, cx: &mut App) { &openai_compatible_providers, &openai_compatible_providers_new, client.clone(), + credentials_provider.clone(), cx, ); }); @@ -138,6 +148,7 @@ fn register_openai_compatible_providers( old: &HashSet>, new: &HashSet>, client: Arc, + credentials_provider: Arc, cx: &mut Context, ) { for provider_id in old { @@ -152,6 +163,7 @@ fn register_openai_compatible_providers( Arc::new(OpenAiCompatibleLanguageModelProvider::new( provider_id.clone(), client.http_client(), + credentials_provider.clone(), cx, )), cx, @@ -164,6 +176,7 @@ fn register_language_model_providers( registry: &mut LanguageModelRegistry, user_store: Entity, client: Arc, + credentials_provider: Arc, cx: &mut Context, ) { registry.register_provider( @@ -177,62 +190,105 @@ fn register_language_model_providers( registry.register_provider( Arc::new(AnthropicLanguageModelProvider::new( client.http_client(), + credentials_provider.clone(), cx, )), cx, ); registry.register_provider( - Arc::new(OpenAiLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(OpenAiLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( - Arc::new(OllamaLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(OllamaLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( - Arc::new(LmStudioLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(LmStudioLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( - Arc::new(DeepSeekLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(DeepSeekLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( - Arc::new(GoogleLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(GoogleLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( - MistralLanguageModelProvider::global(client.http_client(), cx), + MistralLanguageModelProvider::global( + client.http_client(), + credentials_provider.clone(), + cx, + ), cx, ); registry.register_provider( - Arc::new(BedrockLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(BedrockLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( Arc::new(OpenRouterLanguageModelProvider::new( client.http_client(), + credentials_provider.clone(), cx, )), cx, ); registry.register_provider( - Arc::new(VercelLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(VercelLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( Arc::new(VercelAiGatewayLanguageModelProvider::new( client.http_client(), + credentials_provider.clone(), cx, )), cx, ); registry.register_provider( - Arc::new(XAiLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(XAiLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( - Arc::new(OpenCodeLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(OpenCodeLanguageModelProvider::new( + client.http_client(), + credentials_provider, + cx, + )), cx, ); registry.register_provider(Arc::new(CopilotChatLanguageModelProvider::new(cx)), cx); diff --git a/crates/language_models/src/provider/anthropic.rs b/crates/language_models/src/provider/anthropic.rs index a98a0ce142dfdbaaaddc056ab378455a45147830..c1b8bc1a3bb1b602b67ae5563d8acc3b05a94d47 100644 --- a/crates/language_models/src/provider/anthropic.rs +++ b/crates/language_models/src/provider/anthropic.rs @@ -6,6 +6,7 @@ use anthropic::{ }; use anyhow::Result; use collections::{BTreeMap, HashMap}; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream}; use gpui::{AnyView, App, AsyncApp, Context, Entity, Task}; use http_client::HttpClient; @@ -51,6 +52,7 @@ static API_KEY_ENV_VAR: LazyLock = env_var!(API_KEY_ENV_VAR_NAME); pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -59,30 +61,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = AnthropicLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = AnthropicLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl AnthropicLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs index f53f145dbd387aa948b977d854ba77f1cbe49ded..4320763e2c5c6de7f3fe9238d7a4991565c3bfcd 100644 --- a/crates/language_models/src/provider/bedrock.rs +++ b/crates/language_models/src/provider/bedrock.rs @@ -195,12 +195,13 @@ pub struct State { settings: Option, /// Whether credentials came from environment variables (only relevant for static credentials) credentials_from_env: bool, + credentials_provider: Arc, _subscription: Subscription, } impl State { fn reset_auth(&self, cx: &mut Context) -> Task> { - let credentials_provider = ::global(cx); + let credentials_provider = self.credentials_provider.clone(); cx.spawn(async move |this, cx| { credentials_provider .delete_credentials(AMAZON_AWS_URL, cx) @@ -220,7 +221,7 @@ impl State { cx: &mut Context, ) -> Task> { let auth = credentials.clone().into_auth(); - let credentials_provider = ::global(cx); + let credentials_provider = self.credentials_provider.clone(); cx.spawn(async move |this, cx| { credentials_provider .write_credentials( @@ -287,7 +288,7 @@ impl State { &self, cx: &mut Context, ) -> Task> { - let credentials_provider = ::global(cx); + let credentials_provider = self.credentials_provider.clone(); cx.spawn(async move |this, cx| { // Try environment variables first let (auth, from_env) = if let Some(bearer_token) = &ZED_BEDROCK_BEARER_TOKEN_VAR.value { @@ -400,11 +401,16 @@ pub struct BedrockLanguageModelProvider { } impl BedrockLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| State { auth: None, settings: Some(AllLanguageModelSettings::get_global(cx).bedrock.clone()), credentials_from_env: false, + credentials_provider, _subscription: cx.observe_global::(|_, cx| { cx.notify(); }), diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index f9372a4d7ea9c078c58f633cc58bd5597ef49212..29623cc998ad0fe933e9a29c45c651f7be010b07 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -1,7 +1,9 @@ use ai_onboarding::YoungAccountBanner; use anthropic::AnthropicModelMode; use anyhow::{Context as _, Result, anyhow}; -use client::{Client, UserStore, zed_urls}; +use client::{ + Client, NeedsLlmTokenRefresh, RefreshLlmTokenListener, UserStore, global_llm_token, zed_urls, +}; use cloud_api_types::{OrganizationId, Plan}; use cloud_llm_client::{ CLIENT_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, CLIENT_SUPPORTS_STATUS_STREAM_ENDED_HEADER_NAME, @@ -24,10 +26,9 @@ use language_model::{ LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelEffortLevel, LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, - LanguageModelToolChoice, LanguageModelToolSchemaFormat, LlmApiToken, NeedsLlmTokenRefresh, - OPEN_AI_PROVIDER_ID, OPEN_AI_PROVIDER_NAME, PaymentRequiredError, RateLimiter, - RefreshLlmTokenListener, X_AI_PROVIDER_ID, X_AI_PROVIDER_NAME, ZED_CLOUD_PROVIDER_ID, - ZED_CLOUD_PROVIDER_NAME, + LanguageModelToolChoice, LanguageModelToolSchemaFormat, LlmApiToken, OPEN_AI_PROVIDER_ID, + OPEN_AI_PROVIDER_NAME, PaymentRequiredError, RateLimiter, X_AI_PROVIDER_ID, X_AI_PROVIDER_NAME, + ZED_CLOUD_PROVIDER_ID, ZED_CLOUD_PROVIDER_NAME, }; use release_channel::AppVersion; use schemars::JsonSchema; @@ -111,7 +112,7 @@ impl State { cx: &mut Context, ) -> Self { let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx); - let llm_api_token = LlmApiToken::global(cx); + let llm_api_token = global_llm_token(cx); Self { client: client.clone(), llm_api_token, @@ -226,7 +227,9 @@ impl State { organization_id: Option, ) -> Result { let http_client = &client.http_client(); - let token = llm_api_token.acquire(&client, organization_id).await?; + let token = client + .acquire_llm_token(&llm_api_token, organization_id) + .await?; let request = http_client::Request::builder() .method(Method::GET) @@ -414,8 +417,8 @@ impl CloudLanguageModel { ) -> Result { let http_client = &client.http_client(); - let mut token = llm_api_token - .acquire(&client, organization_id.clone()) + let mut token = client + .acquire_llm_token(&llm_api_token, organization_id.clone()) .await?; let mut refreshed_token = false; @@ -447,8 +450,8 @@ impl CloudLanguageModel { } if !refreshed_token && response.needs_llm_token_refresh() { - token = llm_api_token - .refresh(&client, organization_id.clone()) + token = client + .refresh_llm_token(&llm_api_token, organization_id.clone()) .await?; refreshed_token = true; continue; @@ -713,7 +716,9 @@ impl LanguageModel for CloudLanguageModel { into_google(request, model_id.clone(), GoogleModelMode::Default); async move { let http_client = &client.http_client(); - let token = llm_api_token.acquire(&client, organization_id).await?; + let token = client + .acquire_llm_token(&llm_api_token, organization_id) + .await?; let request_body = CountTokensBody { provider: cloud_llm_client::LanguageModelProvider::Google, diff --git a/crates/language_models/src/provider/deepseek.rs b/crates/language_models/src/provider/deepseek.rs index bd2469d865fd8421d6ad31208e6a4be413c0fe14..0cfb1af425c7cb0279d98fa124a589437f1bb1a1 100644 --- a/crates/language_models/src/provider/deepseek.rs +++ b/crates/language_models/src/provider/deepseek.rs @@ -1,5 +1,6 @@ use anyhow::{Result, anyhow}; use collections::{BTreeMap, HashMap}; +use credentials_provider::CredentialsProvider; use deepseek::DEEPSEEK_API_URL; use futures::Stream; @@ -49,6 +50,7 @@ pub struct DeepSeekLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -57,30 +59,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = DeepSeekLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = DeepSeekLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl DeepSeekLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); diff --git a/crates/language_models/src/provider/google.rs b/crates/language_models/src/provider/google.rs index 8fdfb514ac6e872bd24968d33f2c1169401d5a9c..244f7835a85ff67f0c4826321910ea13516371cb 100644 --- a/crates/language_models/src/provider/google.rs +++ b/crates/language_models/src/provider/google.rs @@ -1,5 +1,6 @@ use anyhow::{Context as _, Result}; use collections::BTreeMap; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, Stream, StreamExt, future::BoxFuture}; use google_ai::{ FunctionDeclaration, GenerateContentResponse, GoogleModelMode, Part, SystemInstruction, @@ -60,6 +61,7 @@ pub struct GoogleLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } const GEMINI_API_KEY_VAR_NAME: &str = "GEMINI_API_KEY"; @@ -76,30 +78,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = GoogleLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = GoogleLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl GoogleLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); diff --git a/crates/language_models/src/provider/lmstudio.rs b/crates/language_models/src/provider/lmstudio.rs index 6c8d3c6e1c50185a4b09e9afc80c688f4c8d1381..0d60fef16791087e35bac7d846b2ec99821d5470 100644 --- a/crates/language_models/src/provider/lmstudio.rs +++ b/crates/language_models/src/provider/lmstudio.rs @@ -1,5 +1,6 @@ use anyhow::{Result, anyhow}; use collections::HashMap; +use credentials_provider::CredentialsProvider; use fs::Fs; use futures::Stream; use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream}; @@ -52,6 +53,7 @@ pub struct LmStudioLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, http_client: Arc, available_models: Vec, fetch_model_task: Option>>, @@ -64,10 +66,15 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = LmStudioLanguageModelProvider::api_url(cx).into(); - let task = self - .api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx); + let task = self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); self.restart_fetch_models_task(cx); task } @@ -114,10 +121,14 @@ impl State { } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = LmStudioLanguageModelProvider::api_url(cx).into(); - let _task = self - .api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx); + let _task = self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); if self.is_authenticated() { return Task::ready(Ok(())); @@ -152,16 +163,29 @@ impl State { } impl LmStudioLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let this = Self { http_client: http_client.clone(), state: cx.new(|cx| { let subscription = cx.observe_global::({ let mut settings = AllLanguageModelSettings::get_global(cx).lmstudio.clone(); move |this: &mut State, cx| { - let new_settings = &AllLanguageModelSettings::get_global(cx).lmstudio; - if &settings != new_settings { - settings = new_settings.clone(); + let new_settings = + AllLanguageModelSettings::get_global(cx).lmstudio.clone(); + if settings != new_settings { + let credentials_provider = this.credentials_provider.clone(); + let api_url = Self::api_url(cx).into(); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); + settings = new_settings; this.restart_fetch_models_task(cx); cx.notify(); } @@ -173,6 +197,7 @@ impl LmStudioLanguageModelProvider { Self::api_url(cx).into(), (*API_KEY_ENV_VAR).clone(), ), + credentials_provider, http_client, available_models: Default::default(), fetch_model_task: None, diff --git a/crates/language_models/src/provider/mistral.rs b/crates/language_models/src/provider/mistral.rs index 72f0cae2993da4efb3e19cb19ec42b186290920d..4cd1375fe50cd792a3a7bc8c85ba7b5b5af9520a 100644 --- a/crates/language_models/src/provider/mistral.rs +++ b/crates/language_models/src/provider/mistral.rs @@ -1,5 +1,6 @@ use anyhow::{Result, anyhow}; use collections::BTreeMap; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream}; use gpui::{AnyView, App, AsyncApp, Context, Entity, Global, SharedString, Task, Window}; @@ -43,6 +44,7 @@ pub struct MistralLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -51,15 +53,26 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = MistralLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = MistralLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } @@ -73,20 +86,30 @@ impl MistralLanguageModelProvider { .map(|this| &this.0) } - pub fn global(http_client: Arc, cx: &mut App) -> Arc { + pub fn global( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Arc { if let Some(this) = cx.try_global::() { return this.0.clone(); } let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); diff --git a/crates/language_models/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs index 551fcd55358c11bdf64bf2f27b32fa9a7f702252..49c326683a225bf73f604a584307ea1316a710c4 100644 --- a/crates/language_models/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -1,4 +1,5 @@ use anyhow::{Result, anyhow}; +use credentials_provider::CredentialsProvider; use fs::Fs; use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream}; use futures::{Stream, TryFutureExt, stream}; @@ -54,6 +55,7 @@ pub struct OllamaLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, http_client: Arc, fetched_models: Vec, fetch_model_task: Option>>, @@ -65,10 +67,15 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OllamaLanguageModelProvider::api_url(cx); - let task = self - .api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx); + let task = self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); self.fetched_models.clear(); cx.spawn(async move |this, cx| { @@ -80,10 +87,14 @@ impl State { } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OllamaLanguageModelProvider::api_url(cx); - let task = self - .api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx); + let task = self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); // Always try to fetch models - if no API key is needed (local Ollama), it will work // If API key is needed and provided, it will work @@ -157,7 +168,11 @@ impl State { } impl OllamaLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let this = Self { http_client: http_client.clone(), state: cx.new(|cx| { @@ -170,6 +185,14 @@ impl OllamaLanguageModelProvider { let url_changed = last_settings.api_url != current_settings.api_url; last_settings = current_settings.clone(); if url_changed { + let credentials_provider = this.credentials_provider.clone(); + let api_url = Self::api_url(cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); this.fetched_models.clear(); this.authenticate(cx).detach(); } @@ -184,6 +207,7 @@ impl OllamaLanguageModelProvider { fetched_models: Default::default(), fetch_model_task: None, api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }), }; diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index 9289c66b2a4c9213826d2d027555511c9746d00e..6a2313487f4a1922cdc2aa20d23ede01c4b7d158 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -1,5 +1,6 @@ use anyhow::{Result, anyhow}; use collections::{BTreeMap, HashMap}; +use credentials_provider::CredentialsProvider; use futures::Stream; use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; @@ -55,6 +56,7 @@ pub struct OpenAiLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -63,30 +65,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OpenAiLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OpenAiLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl OpenAiLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); diff --git a/crates/language_models/src/provider/open_ai_compatible.rs b/crates/language_models/src/provider/open_ai_compatible.rs index 87a08097782198238a5d2467af32cc66b3183664..9f63a1e1a039998c275637f3831b51474c8049ac 100644 --- a/crates/language_models/src/provider/open_ai_compatible.rs +++ b/crates/language_models/src/provider/open_ai_compatible.rs @@ -1,5 +1,6 @@ use anyhow::Result; use convert_case::{Case, Casing}; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; use http_client::HttpClient; @@ -44,6 +45,7 @@ pub struct State { id: Arc, api_key_state: ApiKeyState, settings: OpenAiCompatibleSettings, + credentials_provider: Arc, } impl State { @@ -52,20 +54,36 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = SharedString::new(self.settings.api_url.as_str()); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = SharedString::new(self.settings.api_url.clone()); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl OpenAiCompatibleLanguageModelProvider { - pub fn new(id: Arc, http_client: Arc, cx: &mut App) -> Self { + pub fn new( + id: Arc, + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { fn resolve_settings<'a>(id: &'a str, cx: &'a App) -> Option<&'a OpenAiCompatibleSettings> { crate::AllLanguageModelSettings::get_global(cx) .openai_compatible @@ -79,10 +97,12 @@ impl OpenAiCompatibleLanguageModelProvider { return; }; if &this.settings != &settings { + let credentials_provider = this.credentials_provider.clone(); let api_url = SharedString::new(settings.api_url.as_str()); this.api_key_state.handle_url_change( api_url, |this| &mut this.api_key_state, + credentials_provider, cx, ); this.settings = settings; @@ -98,6 +118,7 @@ impl OpenAiCompatibleLanguageModelProvider { EnvVar::new(api_key_env_var_name), ), settings, + credentials_provider, } }); diff --git a/crates/language_models/src/provider/open_router.rs b/crates/language_models/src/provider/open_router.rs index a4a679be73c0276351a6524ad7e8fc40e2c26860..09c8eb768d12c61ed1dc86a1251ad52114be6162 100644 --- a/crates/language_models/src/provider/open_router.rs +++ b/crates/language_models/src/provider/open_router.rs @@ -1,5 +1,6 @@ use anyhow::Result; use collections::HashMap; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, Stream, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task}; use http_client::HttpClient; @@ -42,6 +43,7 @@ pub struct OpenRouterLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, http_client: Arc, available_models: Vec, fetch_models_task: Option>>, @@ -53,16 +55,26 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OpenRouterLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OpenRouterLanguageModelProvider::api_url(cx); - let task = self - .api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx); + let task = self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.spawn(async move |this, cx| { let result = task.await; @@ -114,7 +126,11 @@ impl State { } impl OpenRouterLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::({ let mut last_settings = OpenRouterLanguageModelProvider::settings(cx).clone(); @@ -131,6 +147,7 @@ impl OpenRouterLanguageModelProvider { .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, http_client: http_client.clone(), available_models: Vec::new(), fetch_models_task: None, diff --git a/crates/language_models/src/provider/opencode.rs b/crates/language_models/src/provider/opencode.rs index f3953f3cafa4a1f59ff86004628c0a4022f6257e..aae3a552544ebf2cc59255da954d84cf7b78c7da 100644 --- a/crates/language_models/src/provider/opencode.rs +++ b/crates/language_models/src/provider/opencode.rs @@ -1,5 +1,6 @@ use anyhow::Result; use collections::BTreeMap; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; use http_client::HttpClient; @@ -43,6 +44,7 @@ pub struct OpenCodeLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -51,30 +53,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OpenCodeLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OpenCodeLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl OpenCodeLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); diff --git a/crates/language_models/src/provider/vercel.rs b/crates/language_models/src/provider/vercel.rs index b71da5b7db05710ee30115ab54379c9ee4e4c750..cedbc9c3cb988375b90864ceb23a3b14fc50abdd 100644 --- a/crates/language_models/src/provider/vercel.rs +++ b/crates/language_models/src/provider/vercel.rs @@ -1,5 +1,6 @@ use anyhow::Result; use collections::BTreeMap; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; use http_client::HttpClient; @@ -38,6 +39,7 @@ pub struct VercelLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -46,30 +48,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = VercelLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = VercelLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl VercelLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); diff --git a/crates/language_models/src/provider/vercel_ai_gateway.rs b/crates/language_models/src/provider/vercel_ai_gateway.rs index 78f900de0c94fd3bbbff3962e92d1a8cb9f3e118..66767edd809531b4b020263654922d742a1a04be 100644 --- a/crates/language_models/src/provider/vercel_ai_gateway.rs +++ b/crates/language_models/src/provider/vercel_ai_gateway.rs @@ -1,5 +1,6 @@ use anyhow::Result; use collections::BTreeMap; +use credentials_provider::CredentialsProvider; use futures::{AsyncReadExt, FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest, http}; @@ -41,6 +42,7 @@ pub struct VercelAiGatewayLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, http_client: Arc, available_models: Vec, fetch_models_task: Option>>, @@ -52,16 +54,26 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx); - let task = self - .api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx); + let task = self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.spawn(async move |this, cx| { let result = task.await; @@ -100,7 +112,11 @@ impl State { } impl VercelAiGatewayLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::({ let mut last_settings = VercelAiGatewayLanguageModelProvider::settings(cx).clone(); @@ -116,6 +132,7 @@ impl VercelAiGatewayLanguageModelProvider { .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, http_client: http_client.clone(), available_models: Vec::new(), fetch_models_task: None, diff --git a/crates/language_models/src/provider/x_ai.rs b/crates/language_models/src/provider/x_ai.rs index c00637bce7e67b624f5cdcae9aebe43fb43971f8..88189864c7b4b650a24afb2b872c1d6105cf9782 100644 --- a/crates/language_models/src/provider/x_ai.rs +++ b/crates/language_models/src/provider/x_ai.rs @@ -1,5 +1,6 @@ use anyhow::Result; use collections::BTreeMap; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, Task, Window}; use http_client::HttpClient; @@ -39,6 +40,7 @@ pub struct XAiLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -47,30 +49,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = XAiLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = XAiLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl XAiLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index ccffbd29f4bd03b0d4bb0a070f4229a517597468..cd037786a399eb979fd5d9053c57efe3100dd473 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -98,6 +98,7 @@ watch.workspace = true wax.workspace = true which.workspace = true worktree.workspace = true +zed_credentials_provider.workspace = true zeroize.workspace = true zlog.workspace = true ztracing.workspace = true diff --git a/crates/project/src/context_server_store.rs b/crates/project/src/context_server_store.rs index 395056384a79d39c978e14643166148685ea0b90..7b9fc16f10022805ea62df2f8b3df279fc96ae3d 100644 --- a/crates/project/src/context_server_store.rs +++ b/crates/project/src/context_server_store.rs @@ -684,7 +684,7 @@ impl ContextServerStore { let server_url = url.clone(); let id = id.clone(); cx.spawn(async move |_this, cx| { - let credentials_provider = cx.update(|cx| ::global(cx)); + let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx)); if let Err(err) = Self::clear_session(&credentials_provider, &server_url, &cx).await { log::warn!("{} failed to clear OAuth session on removal: {}", id, err); @@ -797,8 +797,7 @@ impl ContextServerStore { if configuration.has_static_auth_header() { None } else { - let credentials_provider = - cx.update(|cx| ::global(cx)); + let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx)); let http_client = cx.update(|cx| cx.http_client()); match Self::load_session(&credentials_provider, url, &cx).await { @@ -1070,7 +1069,7 @@ impl ContextServerStore { .context("Failed to start OAuth callback server")?; let http_client = cx.update(|cx| cx.http_client()); - let credentials_provider = cx.update(|cx| ::global(cx)); + let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx)); let server_url = match configuration.as_ref() { ContextServerConfiguration::Http { url, .. } => url.clone(), _ => anyhow::bail!("OAuth authentication only supported for HTTP servers"), @@ -1233,7 +1232,7 @@ impl ContextServerStore { self.stop_server(&id, cx)?; cx.spawn(async move |this, cx| { - let credentials_provider = cx.update(|cx| ::global(cx)); + let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx)); if let Err(err) = Self::clear_session(&credentials_provider, &server_url, &cx).await { log::error!("{} failed to clear OAuth session: {}", id, err); } @@ -1451,7 +1450,7 @@ async fn resolve_start_failure( // (e.g. timeout because the server rejected the token silently). Clear it // so the next start attempt can get a clean 401 and trigger the auth flow. if www_authenticate.is_none() { - let credentials_provider = cx.update(|cx| ::global(cx)); + let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx)); match ContextServerStore::load_session(&credentials_provider, &server_url, cx).await { Ok(Some(_)) => { log::info!("{id} start failed with a cached OAuth session present; clearing it"); diff --git a/crates/settings_ui/Cargo.toml b/crates/settings_ui/Cargo.toml index 9d79481596f4b4259760ff6c2f19f8f5cf709d1e..0228f6886fc741505ffbe02fe82242d5f3e1dfd4 100644 --- a/crates/settings_ui/Cargo.toml +++ b/crates/settings_ui/Cargo.toml @@ -59,6 +59,7 @@ ui.workspace = true util.workspace = true workspace.workspace = true zed_actions.workspace = true +zed_credentials_provider.workspace = true [dev-dependencies] fs = { workspace = true, features = ["test-support"] } diff --git a/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs b/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs index 193be67aad4760763637f116fad23066438b5b61..a2a457d33eb0788ff0bed981ce5666423890f05a 100644 --- a/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs +++ b/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs @@ -185,9 +185,15 @@ fn render_api_key_provider( cx: &mut Context, ) -> impl IntoElement { let weak_page = cx.weak_entity(); + let credentials_provider = zed_credentials_provider::global(cx); _ = window.use_keyed_state(current_url(cx), cx, |_, cx| { let task = api_key_state.update(cx, |key_state, cx| { - key_state.load_if_needed(current_url(cx), |state| state, cx) + key_state.load_if_needed( + current_url(cx), + |state| state, + credentials_provider.clone(), + cx, + ) }); cx.spawn(async move |_, cx| { task.await.ok(); @@ -208,10 +214,17 @@ fn render_api_key_provider( }); let write_key = move |api_key: Option, cx: &mut App| { + let credentials_provider = zed_credentials_provider::global(cx); api_key_state .update(cx, |key_state, cx| { let url = current_url(cx); - key_state.store(url, api_key, |key_state| key_state, cx) + key_state.store( + url, + api_key, + |key_state| key_state, + credentials_provider, + cx, + ) }) .detach_and_log_err(cx); }; diff --git a/crates/web_search_providers/src/cloud.rs b/crates/web_search_providers/src/cloud.rs index 17addd24d445a666138a1b37fef872beedd07aed..11227d8fb5c7152dc5b7e03b95fadea6cb714717 100644 --- a/crates/web_search_providers/src/cloud.rs +++ b/crates/web_search_providers/src/cloud.rs @@ -1,13 +1,13 @@ use std::sync::Arc; use anyhow::{Context as _, Result}; -use client::{Client, UserStore}; +use client::{Client, NeedsLlmTokenRefresh, UserStore, global_llm_token}; use cloud_api_types::OrganizationId; use cloud_llm_client::{WebSearchBody, WebSearchResponse}; use futures::AsyncReadExt as _; use gpui::{App, AppContext, Context, Entity, Task}; use http_client::{HttpClient, Method}; -use language_model::{LlmApiToken, NeedsLlmTokenRefresh}; +use language_model::LlmApiToken; use web_search::{WebSearchProvider, WebSearchProviderId}; pub struct CloudWebSearchProvider { @@ -30,7 +30,7 @@ pub struct State { impl State { pub fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self { - let llm_api_token = LlmApiToken::global(cx); + let llm_api_token = global_llm_token(cx); Self { client, @@ -73,8 +73,8 @@ async fn perform_web_search( let http_client = &client.http_client(); let mut retries_remaining = MAX_RETRIES; - let mut token = llm_api_token - .acquire(&client, organization_id.clone()) + let mut token = client + .acquire_llm_token(&llm_api_token, organization_id.clone()) .await?; loop { @@ -100,8 +100,8 @@ async fn perform_web_search( response.body_mut().read_to_string(&mut body).await?; return Ok(serde_json::from_str(&body)?); } else if response.needs_llm_token_refresh() { - token = llm_api_token - .refresh(&client, organization_id.clone()) + token = client + .refresh_llm_token(&llm_api_token, organization_id.clone()) .await?; retries_remaining -= 1; } else { diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 0e1cbc96ff1521626bfe8bcf62091404324132a0..902d147084ce42b34a34477593ecc755bc6aa7cc 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -10,7 +10,7 @@ use agent_ui::AgentPanel; use anyhow::{Context as _, Error, Result}; use clap::Parser; use cli::FORCE_CLI_MODE_ENV_VAR_NAME; -use client::{Client, ProxySettings, UserStore, parse_zed_link}; +use client::{Client, ProxySettings, RefreshLlmTokenListener, UserStore, parse_zed_link}; use collab_ui::channel_view::ChannelView; use collections::HashMap; use crashes::InitCrashHandler; @@ -664,7 +664,12 @@ fn main() { ); copilot_ui::init(&app_state, cx); - language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register( + app_state.client.clone(), + app_state.user_store.clone(), + cx, + ); language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); acp_tools::init(cx); zed::telemetry_log::init(cx); diff --git a/crates/zed/src/visual_test_runner.rs b/crates/zed/src/visual_test_runner.rs index 7e081c15a564cb996f176345ee3330f00ee6b6f3..ad44ba4128b436597a74621694ae47c661f57bd1 100644 --- a/crates/zed/src/visual_test_runner.rs +++ b/crates/zed/src/visual_test_runner.rs @@ -201,7 +201,12 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()> }); prompt_store::init(cx); let prompt_builder = prompt_store::PromptBuilder::load(app_state.fs.clone(), false, cx); - language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx); + language_model::init(cx); + client::RefreshLlmTokenListener::register( + app_state.client.clone(), + app_state.user_store.clone(), + cx, + ); language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); git_ui::init(cx); project::AgentRegistryStore::init_global( diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index fbebb37985c2ebd76a63db5b4b807a8a7e0203ce..8d7759948fcabba7388a5c63e0bfa6710aa21f74 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -5189,7 +5189,12 @@ mod tests { cx, ); image_viewer::init(cx); - language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx); + language_model::init(cx); + client::RefreshLlmTokenListener::register( + app_state.client.clone(), + app_state.user_store.clone(), + cx, + ); language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); web_search::init(cx); git_graph::init(cx); diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index 8c9e74a42e6c3ddb2b340ac58da39752009825f0..d09dc07af839a681cea96d43217c4217927864d5 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -313,7 +313,12 @@ mod tests { let app_state = cx.update(|cx| { let app_state = AppState::test(cx); client::init(&app_state.client, cx); - language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx); + language_model::init(cx); + client::RefreshLlmTokenListener::register( + app_state.client.clone(), + app_state.user_store.clone(), + cx, + ); editor::init(cx); app_state }); diff --git a/crates/zed_credentials_provider/Cargo.toml b/crates/zed_credentials_provider/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..9f64801d4664111bceb0fb7b9ee8c007977b6389 --- /dev/null +++ b/crates/zed_credentials_provider/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "zed_credentials_provider" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/zed_credentials_provider.rs" + +[dependencies] +anyhow.workspace = true +credentials_provider.workspace = true +futures.workspace = true +gpui.workspace = true +paths.workspace = true +release_channel.workspace = true +serde.workspace = true +serde_json.workspace = true diff --git a/crates/zed_credentials_provider/LICENSE-GPL b/crates/zed_credentials_provider/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/zed_credentials_provider/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/zed_credentials_provider/src/zed_credentials_provider.rs b/crates/zed_credentials_provider/src/zed_credentials_provider.rs new file mode 100644 index 0000000000000000000000000000000000000000..6705e58d400b1a66990f2451d318b5950ea08dde --- /dev/null +++ b/crates/zed_credentials_provider/src/zed_credentials_provider.rs @@ -0,0 +1,181 @@ +use std::collections::HashMap; +use std::future::Future; +use std::path::PathBuf; +use std::pin::Pin; +use std::sync::{Arc, LazyLock}; + +use anyhow::Result; +use credentials_provider::CredentialsProvider; +use futures::FutureExt as _; +use gpui::{App, AsyncApp, Global}; +use release_channel::ReleaseChannel; + +/// An environment variable whose presence indicates that the system keychain +/// should be used in development. +/// +/// By default, running Zed in development uses the development credentials +/// provider. Setting this environment variable allows you to interact with the +/// system keychain (for instance, if you need to test something). +/// +/// Only works in development. Setting this environment variable in other +/// release channels is a no-op. +static ZED_DEVELOPMENT_USE_KEYCHAIN: LazyLock = LazyLock::new(|| { + std::env::var("ZED_DEVELOPMENT_USE_KEYCHAIN").is_ok_and(|value| !value.is_empty()) +}); + +pub struct ZedCredentialsProvider(pub Arc); + +impl Global for ZedCredentialsProvider {} + +/// Returns the global [`CredentialsProvider`]. +pub fn init_global(cx: &mut App) { + // The `CredentialsProvider` trait has `Send + Sync` bounds on it, so it + // seems like this is a false positive from Clippy. + #[allow(clippy::arc_with_non_send_sync)] + let provider = new(cx); + cx.set_global(ZedCredentialsProvider(provider)); +} + +pub fn global(cx: &App) -> Arc { + cx.try_global::() + .map(|provider| provider.0.clone()) + .unwrap_or_else(|| new(cx)) +} + +fn new(cx: &App) -> Arc { + let use_development_provider = match ReleaseChannel::try_global(cx) { + Some(ReleaseChannel::Dev) => { + // In development we default to using the development + // credentials provider to avoid getting spammed by relentless + // keychain access prompts. + // + // However, if the `ZED_DEVELOPMENT_USE_KEYCHAIN` environment + // variable is set, we will use the actual keychain. + !*ZED_DEVELOPMENT_USE_KEYCHAIN + } + Some(ReleaseChannel::Nightly | ReleaseChannel::Preview | ReleaseChannel::Stable) | None => { + false + } + }; + + if use_development_provider { + Arc::new(DevelopmentCredentialsProvider::new()) + } else { + Arc::new(KeychainCredentialsProvider) + } +} + +/// A credentials provider that stores credentials in the system keychain. +struct KeychainCredentialsProvider; + +impl CredentialsProvider for KeychainCredentialsProvider { + fn read_credentials<'a>( + &'a self, + url: &'a str, + cx: &'a AsyncApp, + ) -> Pin)>>> + 'a>> { + async move { cx.update(|cx| cx.read_credentials(url)).await }.boxed_local() + } + + fn write_credentials<'a>( + &'a self, + url: &'a str, + username: &'a str, + password: &'a [u8], + cx: &'a AsyncApp, + ) -> Pin> + 'a>> { + async move { + cx.update(move |cx| cx.write_credentials(url, username, password)) + .await + } + .boxed_local() + } + + fn delete_credentials<'a>( + &'a self, + url: &'a str, + cx: &'a AsyncApp, + ) -> Pin> + 'a>> { + async move { cx.update(move |cx| cx.delete_credentials(url)).await }.boxed_local() + } +} + +/// A credentials provider that stores credentials in a local file. +/// +/// This MUST only be used in development, as this is not a secure way of storing +/// credentials on user machines. +/// +/// Its existence is purely to work around the annoyance of having to constantly +/// re-allow access to the system keychain when developing Zed. +struct DevelopmentCredentialsProvider { + path: PathBuf, +} + +impl DevelopmentCredentialsProvider { + fn new() -> Self { + let path = paths::config_dir().join("development_credentials"); + + Self { path } + } + + fn load_credentials(&self) -> Result)>> { + let json = std::fs::read(&self.path)?; + let credentials: HashMap)> = serde_json::from_slice(&json)?; + + Ok(credentials) + } + + fn save_credentials(&self, credentials: &HashMap)>) -> Result<()> { + let json = serde_json::to_string(credentials)?; + std::fs::write(&self.path, json)?; + + Ok(()) + } +} + +impl CredentialsProvider for DevelopmentCredentialsProvider { + fn read_credentials<'a>( + &'a self, + url: &'a str, + _cx: &'a AsyncApp, + ) -> Pin)>>> + 'a>> { + async move { + Ok(self + .load_credentials() + .unwrap_or_default() + .get(url) + .cloned()) + } + .boxed_local() + } + + fn write_credentials<'a>( + &'a self, + url: &'a str, + username: &'a str, + password: &'a [u8], + _cx: &'a AsyncApp, + ) -> Pin> + 'a>> { + async move { + let mut credentials = self.load_credentials().unwrap_or_default(); + credentials.insert(url.to_string(), (username.to_string(), password.to_vec())); + + self.save_credentials(&credentials) + } + .boxed_local() + } + + fn delete_credentials<'a>( + &'a self, + url: &'a str, + _cx: &'a AsyncApp, + ) -> Pin> + 'a>> { + async move { + let mut credentials = self.load_credentials()?; + credentials.remove(url); + + self.save_credentials(&credentials) + } + .boxed_local() + } +} diff --git a/crates/zed_env_vars/Cargo.toml b/crates/zed_env_vars/Cargo.toml index 1cf32174c351c28ec7eb16deab7b7986655d4a48..bf863b742568f3f607ba7cb54bc8fc267f045cc9 100644 --- a/crates/zed_env_vars/Cargo.toml +++ b/crates/zed_env_vars/Cargo.toml @@ -15,4 +15,4 @@ path = "src/zed_env_vars.rs" default = [] [dependencies] -gpui.workspace = true +env_var.workspace = true diff --git a/crates/zed_env_vars/src/zed_env_vars.rs b/crates/zed_env_vars/src/zed_env_vars.rs index e601cc9536602ac943bd76bf1bfd8b8ac8979dd9..13451911295735762074bcb1cf152470afa55c36 100644 --- a/crates/zed_env_vars/src/zed_env_vars.rs +++ b/crates/zed_env_vars/src/zed_env_vars.rs @@ -1,45 +1,6 @@ -use gpui::SharedString; +pub use env_var::{EnvVar, bool_env_var, env_var}; use std::sync::LazyLock; /// Whether Zed is running in stateless mode. /// When true, Zed will use in-memory databases instead of persistent storage. pub static ZED_STATELESS: LazyLock = bool_env_var!("ZED_STATELESS"); - -#[derive(Clone)] -pub struct EnvVar { - pub name: SharedString, - /// Value of the environment variable. Also `None` when set to an empty string. - pub value: Option, -} - -impl EnvVar { - pub fn new(name: SharedString) -> Self { - let value = std::env::var(name.as_str()).ok(); - if value.as_ref().is_some_and(|v| v.is_empty()) { - Self { name, value: None } - } else { - Self { name, value } - } - } - - pub fn or(self, other: EnvVar) -> EnvVar { - if self.value.is_some() { self } else { other } - } -} - -/// Creates a `LazyLock` expression for use in a `static` declaration. -#[macro_export] -macro_rules! env_var { - ($name:expr) => { - ::std::sync::LazyLock::new(|| $crate::EnvVar::new(($name).into())) - }; -} - -/// Generates a `LazyLock` expression for use in a `static` declaration. Checks if the -/// environment variable exists and is non-empty. -#[macro_export] -macro_rules! bool_env_var { - ($name:expr) => { - ::std::sync::LazyLock::new(|| $crate::EnvVar::new(($name).into()).value.is_some()) - }; -} From 9537861e458709537824b6d082d78e4eead73e3c Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 2 Apr 2026 19:17:29 -0300 Subject: [PATCH 22/63] Refine split diff icons (#53022) Follow-up to https://github.com/zed-industries/zed/pull/52781, adding some different icons to better express the state in which the split diff _is selected_ but _isn't active_, which happens when the editor is smaller than a given amount of defined columns. https://github.com/user-attachments/assets/2e7aaf6c-077f-4be5-9439-ce6c2050e63d Release Notes: - N/A --- assets/icons/diff_split.svg | 5 +- assets/icons/diff_split_auto.svg | 7 ++ assets/icons/diff_unified.svg | 4 +- crates/icons/src/icons.rs | 1 + crates/search/src/buffer_search.rs | 144 +++++++++++++++-------------- 5 files changed, 87 insertions(+), 74 deletions(-) create mode 100644 assets/icons/diff_split_auto.svg diff --git a/assets/icons/diff_split.svg b/assets/icons/diff_split.svg index de2056466f7ef1081ee00dabb8b4d5baa8fc9217..dcafeb8df5c28bcac1f1fe8cf5783eebd8d8cd8a 100644 --- a/assets/icons/diff_split.svg +++ b/assets/icons/diff_split.svg @@ -1,5 +1,4 @@ - - - + + diff --git a/assets/icons/diff_split_auto.svg b/assets/icons/diff_split_auto.svg new file mode 100644 index 0000000000000000000000000000000000000000..f9dd7076be75aaf3e90286140a60deece5016114 --- /dev/null +++ b/assets/icons/diff_split_auto.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/assets/icons/diff_unified.svg b/assets/icons/diff_unified.svg index b2d3895ae5466454e9cefc4e77e3c3f2a19cde8c..28735c16f682159b6b0a099176d6fc3b75cd248e 100644 --- a/assets/icons/diff_unified.svg +++ b/assets/icons/diff_unified.svg @@ -1,4 +1,4 @@ - - + + diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index 6929ae4e4ca8ca0ee00c9793c948892043dd6dd6..e29b7d3593025556771d62dc0124786672c540de 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -95,6 +95,7 @@ pub enum IconName { DebugStepOver, Diff, DiffSplit, + DiffSplitAuto, DiffUnified, Disconnected, Download, diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 93fbab59a6f1b9da0cb9faf0657fc4a1c5f679bd..2ea386b85df21a72262b70eb7016028a49c2b8c0 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -114,81 +114,23 @@ impl Render for BufferSearchBar { .map(|splittable_editor| { let editor_ref = splittable_editor.read(cx); let diff_view_style = editor_ref.diff_view_style(); - let is_split = editor_ref.is_split(); + + let is_split_set = diff_view_style == DiffViewStyle::Split; + let is_split_active = editor_ref.is_split(); let min_columns = EditorSettings::get_global(cx).minimum_split_diff_width as u32; - let mut split_button = IconButton::new("diff-split", IconName::DiffSplit) - .shape(IconButtonShape::Square) - .tooltip(Tooltip::element(move |_, cx| { - let message = if min_columns == 0 { - SharedString::from("Split") - } else { - format!("Split when wider than {} columns", min_columns).into() - }; - - v_flex() - .child(message) - .child( - h_flex() - .gap_0p5() - .text_ui_sm(cx) - .text_color(Color::Muted.color(cx)) - .children(render_modifiers( - &gpui::Modifiers::secondary_key(), - PlatformStyle::platform(), - None, - Some(TextSize::Small.rems(cx).into()), - false, - )) - .child("click to change min width"), - ) - .into_any() - })) - .on_click({ - let splittable_editor = splittable_editor.downgrade(); - move |_, window, cx| { - if window.modifiers().secondary() { - window.dispatch_action( - OpenSettingsAt { - path: "minimum_split_diff_width".to_string(), - } - .boxed_clone(), - cx, - ); - } else { - update_settings_file( - ::global(cx), - cx, - |settings, _| { - settings.editor.diff_view_style = - Some(DiffViewStyle::Split); - }, - ); - if diff_view_style == DiffViewStyle::Unified { - splittable_editor - .update(cx, |editor, cx| { - editor.toggle_split(&ToggleSplitDiff, window, cx); - }) - .ok(); - } - } - } - }); - - if diff_view_style == DiffViewStyle::Split { - if !is_split { - split_button = split_button.icon_color(Color::Disabled) - } else { - split_button = split_button.toggle_state(true) - } - } + let split_icon = if is_split_set && !is_split_active { + IconName::DiffSplitAuto + } else { + IconName::DiffSplit + }; h_flex() .gap_1() .child( IconButton::new("diff-unified", IconName::DiffUnified) - .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) .toggle_state(diff_view_style == DiffViewStyle::Unified) .tooltip(Tooltip::text("Unified")) .on_click({ @@ -216,7 +158,71 @@ impl Render for BufferSearchBar { } }), ) - .child(split_button) + .child( + IconButton::new("diff-split", split_icon) + .toggle_state(diff_view_style == DiffViewStyle::Split) + .icon_size(IconSize::Small) + .tooltip(Tooltip::element(move |_, cx| { + let message = if is_split_set && !is_split_active { + format!("Split when wider than {} columns", min_columns) + .into() + } else { + SharedString::from("Split") + }; + + v_flex() + .child(message) + .child( + h_flex() + .gap_0p5() + .text_ui_sm(cx) + .text_color(Color::Muted.color(cx)) + .children(render_modifiers( + &gpui::Modifiers::secondary_key(), + PlatformStyle::platform(), + None, + Some(TextSize::Small.rems(cx).into()), + false, + )) + .child("click to change min width"), + ) + .into_any() + })) + .on_click({ + let splittable_editor = splittable_editor.downgrade(); + move |_, window, cx| { + if window.modifiers().secondary() { + window.dispatch_action( + OpenSettingsAt { + path: "minimum_split_diff_width".to_string(), + } + .boxed_clone(), + cx, + ); + } else { + update_settings_file( + ::global(cx), + cx, + |settings, _| { + settings.editor.diff_view_style = + Some(DiffViewStyle::Split); + }, + ); + if diff_view_style == DiffViewStyle::Unified { + splittable_editor + .update(cx, |editor, cx| { + editor.toggle_split( + &ToggleSplitDiff, + window, + cx, + ); + }) + .ok(); + } + } + } + }), + ) }) } else { None @@ -240,7 +246,7 @@ impl Render for BufferSearchBar { let collapse_expand_icon_button = |id| { IconButton::new(id, icon) - .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) .tooltip(move |_, cx| { Tooltip::for_action_in( tooltip_label, From d430cc5945f371ec87dd295d1f01dd840cbed3d8 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 2 Apr 2026 20:08:22 -0300 Subject: [PATCH 23/63] sidebar: Add some design tweaks (#53026) - Make notification icons show up even for threads of the currently active workspace - When with a notification/any other status, replace thread item's agent icon a status icon for higher visbility - Remove hover state from currently active project/workspace's header - Make project/workspace label brighter if I'm inside of it - Adjust colors all around a bit (sidebar background and border, and icons within the project header) Release Notes: - N/A --- crates/sidebar/src/sidebar.rs | 40 +++++++------- crates/ui/src/components/ai/thread_item.rs | 62 ++++++++++------------ 2 files changed, 46 insertions(+), 56 deletions(-) diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index a9664a048123253d617a08507cfe4288914d0e9e..7d7786fd59087f7d78088ae4517933ad089e8584 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -974,21 +974,21 @@ impl Sidebar { let session_id = &thread.metadata.session_id; - let is_thread_workspace_active = match &thread.workspace { - ThreadEntryWorkspace::Open(thread_workspace) => active_workspace - .as_ref() - .is_some_and(|active| active == thread_workspace), - ThreadEntryWorkspace::Closed(_) => false, - }; + let is_active_thread = self.active_entry.as_ref().is_some_and(|entry| { + entry.is_active_thread(session_id) + && active_workspace + .as_ref() + .is_some_and(|active| active == entry.workspace()) + }); if thread.status == AgentThreadStatus::Completed - && !is_thread_workspace_active + && !is_active_thread && old_statuses.get(session_id) == Some(&AgentThreadStatus::Running) { notified_threads.insert(session_id.clone()); } - if is_thread_workspace_active && !thread.is_background { + if is_active_thread && !thread.is_background { notified_threads.remove(session_id); } } @@ -1280,7 +1280,7 @@ impl Sidebar { v_flex() .w_full() .border_t_1() - .border_color(cx.theme().colors().border.opacity(0.5)) + .border_color(cx.theme().colors().border) .child(rendered) .into_any_element() } else { @@ -1327,7 +1327,7 @@ impl Sidebar { has_running_threads: bool, waiting_thread_count: usize, is_active: bool, - is_selected: bool, + is_focused: bool, cx: &mut Context, ) -> AnyElement { let id_prefix = if is_sticky { "sticky-" } else { "" }; @@ -1359,11 +1359,11 @@ impl Sidebar { let label = if highlight_positions.is_empty() { Label::new(label.clone()) - .color(Color::Muted) + .when(!is_active, |this| this.color(Color::Muted)) .into_any_element() } else { HighlightedLabel::new(label.clone(), highlight_positions.to_vec()) - .color(Color::Muted) + .when(!is_active, |this| this.color(Color::Muted)) .into_any_element() }; @@ -1381,14 +1381,13 @@ impl Sidebar { .pr_1p5() .border_1() .map(|this| { - if is_selected { + if is_focused { this.border_color(color.border_focused) } else { this.border_color(gpui::transparent_black()) } }) .justify_between() - .hover(|s| s.bg(hover_color)) .child( h_flex() .when(!is_active, |this| this.cursor_pointer()) @@ -1469,7 +1468,6 @@ impl Sidebar { IconName::ListCollapse, ) .icon_size(IconSize::Small) - .icon_color(Color::Muted) .tooltip(Tooltip::text("Collapse Displayed Threads")) .on_click(cx.listener({ let path_list_for_collapse = path_list_for_collapse.clone(); @@ -1491,7 +1489,6 @@ impl Sidebar { IconName::Plus, ) .icon_size(IconSize::Small) - .icon_color(Color::Muted) .tooltip(Tooltip::text("New Thread")) .on_click(cx.listener({ let workspace_for_new_thread = workspace_for_new_thread.clone(); @@ -1508,7 +1505,9 @@ impl Sidebar { }) }) .when(!is_active, |this| { - this.tooltip(Tooltip::text("Activate Workspace")) + this.cursor_pointer() + .hover(|s| s.bg(hover_color)) + .tooltip(Tooltip::text("Activate Workspace")) .on_click(cx.listener({ move |this, _, window, cx| { this.active_entry = @@ -1690,8 +1689,7 @@ impl Sidebar { IconName::Ellipsis, ) .selected_style(ButtonStyle::Tinted(TintColor::Accent)) - .icon_size(IconSize::Small) - .icon_color(Color::Muted), + .icon_size(IconSize::Small), ) .anchor(gpui::Corner::TopRight) .offset(gpui::Point { @@ -2825,7 +2823,7 @@ impl Sidebar { let color = cx.theme().colors(); let sidebar_bg = color .title_bar_background - .blend(color.panel_background.opacity(0.32)); + .blend(color.panel_background.opacity(0.25)); let timestamp = format_history_entry_timestamp( self.thread_last_message_sent_or_queued @@ -3682,7 +3680,7 @@ impl Render for Sidebar { let color = cx.theme().colors(); let bg = color .title_bar_background - .blend(color.panel_background.opacity(0.32)); + .blend(color.panel_background.opacity(0.25)); let no_open_projects = !self.contents.has_open_projects; let no_search_results = self.contents.entries.is_empty(); diff --git a/crates/ui/src/components/ai/thread_item.rs b/crates/ui/src/components/ai/thread_item.rs index d6b5f56e0abb33521ae69acc0b61b36b015cf987..7658946b6395d6314d90db52716020a922c85ccc 100644 --- a/crates/ui/src/components/ai/thread_item.rs +++ b/crates/ui/src/components/ai/thread_item.rs @@ -1,7 +1,4 @@ -use crate::{ - CommonAnimationExt, DecoratedIcon, DiffStat, GradientFade, HighlightedLabel, IconDecoration, - IconDecorationKind, Tooltip, prelude::*, -}; +use crate::{CommonAnimationExt, DiffStat, GradientFade, HighlightedLabel, Tooltip, prelude::*}; use gpui::{ Animation, AnimationExt, AnyView, ClickEvent, Hsla, MouseButton, SharedString, @@ -218,7 +215,7 @@ impl RenderOnce for ThreadItem { let color = cx.theme().colors(); let sidebar_base_bg = color .title_bar_background - .blend(color.panel_background.opacity(0.32)); + .blend(color.panel_background.opacity(0.25)); let raw_bg = self.base_bg.unwrap_or(sidebar_base_bg); let apparent_bg = color.background.blend(raw_bg); @@ -266,31 +263,31 @@ impl RenderOnce for ThreadItem { Icon::new(self.icon).color(icon_color).size(IconSize::Small) }; - let decoration = |icon: IconDecorationKind, color: Hsla| { - IconDecoration::new(icon, base_bg, cx) - .color(color) - .position(gpui::Point { - x: px(-2.), - y: px(-2.), - }) - }; - - let (decoration, icon_tooltip) = if self.status == AgentThreadStatus::Error { + let (status_icon, icon_tooltip) = if self.status == AgentThreadStatus::Error { ( - Some(decoration(IconDecorationKind::X, cx.theme().status().error)), + Some( + Icon::new(IconName::Close) + .size(IconSize::Small) + .color(Color::Error), + ), Some("Thread has an Error"), ) } else if self.status == AgentThreadStatus::WaitingForConfirmation { ( - Some(decoration( - IconDecorationKind::Triangle, - cx.theme().status().warning, - )), + Some( + Icon::new(IconName::Warning) + .size(IconSize::XSmall) + .color(Color::Warning), + ), Some("Thread is Waiting for Confirmation"), ) } else if self.notified { ( - Some(decoration(IconDecorationKind::Dot, color.text_accent)), + Some( + Icon::new(IconName::Circle) + .size(IconSize::Small) + .color(Color::Accent), + ), Some("Thread's Generation is Complete"), ) } else { @@ -306,9 +303,9 @@ impl RenderOnce for ThreadItem { .with_rotate_animation(2), ) .into_any_element() - } else if let Some(decoration) = decoration { + } else if let Some(status_icon) = status_icon { icon_container() - .child(DecoratedIcon::new(agent_icon, Some(decoration))) + .child(status_icon) .when_some(icon_tooltip, |icon, tooltip| { icon.tooltip(Tooltip::text(tooltip)) }) @@ -551,12 +548,17 @@ impl Component for ThreadItem { } fn preview(_window: &mut Window, cx: &mut App) -> Option { + let color = cx.theme().colors(); + let bg = color + .title_bar_background + .blend(color.panel_background.opacity(0.25)); + let container = || { v_flex() .w_72() .border_1() - .border_color(cx.theme().colors().border_variant) - .bg(cx.theme().colors().panel_background) + .border_color(color.border_variant) + .bg(bg) }; let thread_item_examples = vec![ @@ -570,16 +572,6 @@ impl Component for ThreadItem { ) .into_any_element(), ), - single_example( - "Timestamp Only (hours)", - container() - .child( - ThreadItem::new("ti-1b", "Thread with just a timestamp") - .icon(IconName::AiClaude) - .timestamp("3h"), - ) - .into_any_element(), - ), single_example( "Notified (weeks)", container() From 134dec8f95bf884db5e27270f9beabd00ef4132d Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Thu, 2 Apr 2026 23:06:45 -0400 Subject: [PATCH 24/63] Add persistence to the `Show Occupied Channels` collab toggle (#53029) This PR adds persistence to the toggle state for the `Show Occupied Channels` filter. Also, while driving by, I: - renamed a few variables to use `occupied` over `active`, which should've happened in https://github.com/zed-industries/zed/pull/52531. - extracted `"favorite_channels"` into a global Self-Review Checklist: - [X] I've reviewed my own diff for quality, security, and reliability - [X] Unsafe blocks (if any) have justifying comments - [X] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [X] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - Added persistence to the `Show Occupied Channels` collab toggle. --- crates/collab_ui/src/collab_panel.rs | 74 ++++++++++++++++++++++------ 1 file changed, 58 insertions(+), 16 deletions(-) diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index d16db59ea4ae2d766018dfc03c245839e4862cb4..8d0cdf351163dadf0ac8cbf6a8dc04886f30f583 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -44,6 +44,9 @@ use workspace::{ notifications::{DetachAndPromptErr, NotifyResultExt}, }; +const FILTER_OCCUPIED_CHANNELS_KEY: &str = "filter_occupied_channels"; +const FAVORITE_CHANNELS_KEY: &str = "favorite_channels"; + actions!( collab_panel, [ @@ -244,7 +247,9 @@ pub struct CollabPanel { fs: Arc, focus_handle: FocusHandle, channel_clipboard: Option, - pending_serialization: Task>, + pending_panel_serialization: Task>, + pending_favorites_serialization: Task>, + pending_filter_serialization: Task>, context_menu: Option<(Entity, Point, Subscription)>, list_state: ListState, filter_editor: Entity, @@ -260,7 +265,7 @@ pub struct CollabPanel { subscriptions: Vec, collapsed_sections: Vec

, collapsed_channels: Vec, - filter_active_channels: bool, + filter_occupied_channels: bool, workspace: WeakEntity, } @@ -378,7 +383,9 @@ impl CollabPanel { focus_handle: cx.focus_handle(), channel_clipboard: None, fs: workspace.app_state().fs.clone(), - pending_serialization: Task::ready(None), + pending_panel_serialization: Task::ready(None), + pending_favorites_serialization: Task::ready(None), + pending_filter_serialization: Task::ready(None), context_menu: None, list_state: ListState::new(0, gpui::ListAlignment::Top, px(1000.)), channel_name_editor, @@ -393,7 +400,7 @@ impl CollabPanel { match_candidates: Vec::default(), collapsed_sections: vec![Section::Offline], collapsed_channels: Vec::default(), - filter_active_channels: false, + filter_occupied_channels: false, workspace: workspace.weak_handle(), client: workspace.app_state().client.clone(), }; @@ -474,8 +481,22 @@ impl CollabPanel { }); } + let filter_occupied_channels = KeyValueStore::global(cx) + .read_kvp(FILTER_OCCUPIED_CHANNELS_KEY) + .ok() + .flatten() + .is_some(); + + panel.update(cx, |panel, cx| { + panel.filter_occupied_channels = filter_occupied_channels; + + if filter_occupied_channels { + panel.update_entries(false, cx); + } + }); + let favorites: Vec = KeyValueStore::global(cx) - .read_kvp("favorite_channels") + .read_kvp(FAVORITE_CHANNELS_KEY) .ok() .flatten() .and_then(|json| serde_json::from_str::>(&json).ok()) @@ -520,7 +541,7 @@ impl CollabPanel { }; let kvp = KeyValueStore::global(cx); - self.pending_serialization = cx.background_spawn( + self.pending_panel_serialization = cx.background_spawn( async move { kvp.write_kvp( serialization_key, @@ -780,14 +801,14 @@ impl CollabPanel { channels.retain(|chan| channel_ids_of_matches_or_parents.contains(&chan.id)); - if self.filter_active_channels { - let active_channel_ids_or_ancestors: HashSet<_> = channel_store + if self.filter_occupied_channels { + let occupied_channel_ids_or_ancestors: HashSet<_> = channel_store .ordered_channels() .map(|(_, channel)| channel) .filter(|channel| !channel_store.channel_participants(channel.id).is_empty()) .flat_map(|channel| channel.parent_path.iter().copied().chain(Some(channel.id))) .collect(); - channels.retain(|channel| active_channel_ids_or_ancestors.contains(&channel.id)); + channels.retain(|channel| occupied_channel_ids_or_ancestors.contains(&channel.id)); } if let Some(state) = &self.channel_editing_state @@ -796,7 +817,7 @@ impl CollabPanel { self.entries.push(ListEntry::ChannelEditor { depth: 0 }); } - let should_respect_collapse = query.is_empty() && !self.filter_active_channels; + let should_respect_collapse = query.is_empty() && !self.filter_occupied_channels; let mut collapse_depth = None; for (idx, channel) in channels.into_iter().enumerate() { @@ -1970,6 +1991,26 @@ impl CollabPanel { self.channel_store.read(cx).is_channel_favorited(channel_id) } + fn persist_filter_occupied_channels(&mut self, cx: &mut Context) { + let is_enabled = self.filter_occupied_channels; + let kvp_store = KeyValueStore::global(cx); + self.pending_filter_serialization = cx.background_spawn( + async move { + if is_enabled { + kvp_store + .write_kvp(FILTER_OCCUPIED_CHANNELS_KEY.to_string(), "1".to_string()) + .await?; + } else { + kvp_store + .delete_kvp(FILTER_OCCUPIED_CHANNELS_KEY.to_string()) + .await?; + } + anyhow::Ok(()) + } + .log_err(), + ); + } + fn persist_favorites(&mut self, cx: &mut Context) { let favorite_ids: Vec = self .channel_store @@ -1979,11 +2020,11 @@ impl CollabPanel { .map(|id| id.0) .collect(); let kvp_store = KeyValueStore::global(cx); - self.pending_serialization = cx.background_spawn( + self.pending_favorites_serialization = cx.background_spawn( async move { let json = serde_json::to_string(&favorite_ids)?; kvp_store - .write_kvp("favorite_channels".to_string(), json) + .write_kvp(FAVORITE_CHANNELS_KEY.to_string(), json) .await?; anyhow::Ok(()) } @@ -2843,14 +2884,15 @@ impl CollabPanel { Some( h_flex() .child( - IconButton::new("filter-active-channels", IconName::ListFilter) + IconButton::new("filter-occupied-channels", IconName::ListFilter) .icon_size(IconSize::Small) - .toggle_state(self.filter_active_channels) + .toggle_state(self.filter_occupied_channels) .on_click(cx.listener(|this, _, _window, cx| { - this.filter_active_channels = !this.filter_active_channels; + this.filter_occupied_channels = !this.filter_occupied_channels; this.update_entries(true, cx); + this.persist_filter_occupied_channels(cx); })) - .tooltip(Tooltip::text(if self.filter_active_channels { + .tooltip(Tooltip::text(if self.filter_occupied_channels { "Show All Channels" } else { "Show Occupied Channels" From 20f730867763a6c9cd3af84a5eddd20d2c9575c8 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 2 Apr 2026 21:16:35 -0700 Subject: [PATCH 25/63] Maintain root repo common dir path as a field on Worktree (#53023) This enables us to always different git worktrees of the same repo together. Depends on https://github.com/zed-industries/cloud/pull/2220 Release Notes: - N/A --------- Co-authored-by: Eric Holk --- .../20221109000000_test_schema.sql | 1 + .../migrations/20251208000000_test_schema.sql | 3 +- crates/collab/src/db.rs | 2 + crates/collab/src/db/queries/projects.rs | 4 + crates/collab/src/db/queries/rooms.rs | 1 + crates/collab/src/db/tables/worktree.rs | 1 + crates/collab/src/rpc.rs | 2 + crates/collab/tests/integration/git_tests.rs | 57 +++++++++++- .../edit_prediction/src/license_detection.rs | 1 + crates/project/src/lsp_store.rs | 3 +- crates/project/src/manifest_tree.rs | 2 +- crates/project/src/worktree_store.rs | 1 + crates/proto/proto/call.proto | 1 + crates/proto/src/proto.rs | 1 + .../remote_server/src/remote_editing_tests.rs | 82 +++++++++++++++++ crates/worktree/src/worktree.rs | 48 ++++++++++ crates/worktree/tests/integration/main.rs | 91 +++++++++++++++++++ 17 files changed, 297 insertions(+), 4 deletions(-) diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index 2a87d617ebb19117ca87c00cc0887b07e416c8bd..75175372f24a83cfb50e8f87deae93e3f03e1a8a 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -65,6 +65,7 @@ CREATE TABLE "worktrees" ( "scan_id" INTEGER NOT NULL, "is_complete" BOOL NOT NULL DEFAULT FALSE, "completed_scan_id" INTEGER NOT NULL, + "root_repo_common_dir" VARCHAR, PRIMARY KEY (project_id, id) ); diff --git a/crates/collab/migrations/20251208000000_test_schema.sql b/crates/collab/migrations/20251208000000_test_schema.sql index 8a56b9ce982f9a39a14bfc55fe8a34870ddea1c6..0110dd149b1143a3edcf76a1e0b18fbf1a22287c 100644 --- a/crates/collab/migrations/20251208000000_test_schema.sql +++ b/crates/collab/migrations/20251208000000_test_schema.sql @@ -484,7 +484,8 @@ CREATE TABLE public.worktrees ( visible boolean NOT NULL, scan_id bigint NOT NULL, is_complete boolean DEFAULT false NOT NULL, - completed_scan_id bigint + completed_scan_id bigint, + root_repo_common_dir character varying ); ALTER TABLE ONLY public.breakpoints ALTER COLUMN id SET DEFAULT nextval('public.breakpoints_id_seq'::regclass); diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 3e4c36631b29d35871cac101542bcc6904fbb271..44abc37af66e3f169d3af1a7d5e29063e382c620 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -559,6 +559,7 @@ pub struct RejoinedWorktree { pub settings_files: Vec, pub scan_id: u64, pub completed_scan_id: u64, + pub root_repo_common_dir: Option, } pub struct LeftRoom { @@ -638,6 +639,7 @@ pub struct Worktree { pub settings_files: Vec, pub scan_id: u64, pub completed_scan_id: u64, + pub root_repo_common_dir: Option, } #[derive(Debug)] diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index 3fc59f96332180d7d7bca4b6f71a345d9699e9e2..b1ea638072a30d6b881a711448223449aa9f53e2 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -87,6 +87,7 @@ impl Database { visible: ActiveValue::set(worktree.visible), scan_id: ActiveValue::set(0), completed_scan_id: ActiveValue::set(0), + root_repo_common_dir: ActiveValue::set(None), } })) .exec(&*tx) @@ -203,6 +204,7 @@ impl Database { visible: ActiveValue::set(worktree.visible), scan_id: ActiveValue::set(0), completed_scan_id: ActiveValue::set(0), + root_repo_common_dir: ActiveValue::set(None), })) .on_conflict( OnConflict::columns([worktree::Column::ProjectId, worktree::Column::Id]) @@ -266,6 +268,7 @@ impl Database { ActiveValue::default() }, abs_path: ActiveValue::set(update.abs_path.clone()), + root_repo_common_dir: ActiveValue::set(update.root_repo_common_dir.clone()), ..Default::default() }) .exec(&*tx) @@ -761,6 +764,7 @@ impl Database { settings_files: Default::default(), scan_id: db_worktree.scan_id as u64, completed_scan_id: db_worktree.completed_scan_id as u64, + root_repo_common_dir: db_worktree.root_repo_common_dir, legacy_repository_entries: Default::default(), }, ) diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index 3197d142cba7a1969e6fdb9423dc94497f6ca53c..94e003fd2d27c97a53f66606d11ed2e15609b728 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -629,6 +629,7 @@ impl Database { settings_files: Default::default(), scan_id: db_worktree.scan_id as u64, completed_scan_id: db_worktree.completed_scan_id as u64, + root_repo_common_dir: db_worktree.root_repo_common_dir, }; let rejoined_worktree = rejoined_project diff --git a/crates/collab/src/db/tables/worktree.rs b/crates/collab/src/db/tables/worktree.rs index 46d9877dff152cdc3b30531606febec65595fec1..f67a9749a48e51fce81f97ad2faf8609c50a0204 100644 --- a/crates/collab/src/db/tables/worktree.rs +++ b/crates/collab/src/db/tables/worktree.rs @@ -15,6 +15,7 @@ pub struct Model { pub scan_id: i64, /// The last scan that fully completed. pub completed_scan_id: i64, + pub root_repo_common_dir: Option, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index e05df1909db1e8afed0c06425d84799ff985f3c5..7ed488b0ba62c10326a0e2154f0d2ba895e20a4f 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -1485,6 +1485,7 @@ fn notify_rejoined_projects( worktree_id: worktree.id, abs_path: worktree.abs_path.clone(), root_name: worktree.root_name, + root_repo_common_dir: worktree.root_repo_common_dir, updated_entries: worktree.updated_entries, removed_entries: worktree.removed_entries, scan_id: worktree.scan_id, @@ -1943,6 +1944,7 @@ async fn join_project( worktree_id, abs_path: worktree.abs_path.clone(), root_name: worktree.root_name, + root_repo_common_dir: worktree.root_repo_common_dir, updated_entries: worktree.entries, removed_entries: Default::default(), scan_id: worktree.scan_id, diff --git a/crates/collab/tests/integration/git_tests.rs b/crates/collab/tests/integration/git_tests.rs index a64233caba014aa49bd64f98634b40abeef88e8e..fdaacd768444bd44d8414247f922f38afb7e81d5 100644 --- a/crates/collab/tests/integration/git_tests.rs +++ b/crates/collab/tests/integration/git_tests.rs @@ -1,4 +1,4 @@ -use std::path::{Path, PathBuf}; +use std::path::{self, Path, PathBuf}; use call::ActiveCall; use client::RECEIVE_TIMEOUT; @@ -17,6 +17,61 @@ use workspace::{MultiWorkspace, Workspace}; use crate::TestServer; +#[gpui::test] +async fn test_root_repo_common_dir_sync( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + // Set up a project whose root IS a git repository. + client_a + .fs() + .insert_tree( + path!("/project"), + json!({ ".git": {}, "file.txt": "content" }), + ) + .await; + + let (project_a, _) = client_a.build_local_project(path!("/project"), cx_a).await; + executor.run_until_parked(); + + // Host should see root_repo_common_dir pointing to .git at the root. + let host_common_dir = project_a.read_with(cx_a, |project, cx| { + let worktree = project.worktrees(cx).next().unwrap(); + worktree.read(cx).snapshot().root_repo_common_dir().cloned() + }); + assert_eq!( + host_common_dir.as_deref(), + Some(path::Path::new(path!("/project/.git"))), + ); + + // Share the project and have client B join. + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.join_remote_project(project_id, cx_b).await; + executor.run_until_parked(); + + // Guest should see the same root_repo_common_dir as the host. + let guest_common_dir = project_b.read_with(cx_b, |project, cx| { + let worktree = project.worktrees(cx).next().unwrap(); + worktree.read(cx).snapshot().root_repo_common_dir().cloned() + }); + assert_eq!( + guest_common_dir, host_common_dir, + "guest should see the same root_repo_common_dir as host", + ); +} + fn collect_diff_stats( panel: &gpui::Entity, cx: &C, diff --git a/crates/edit_prediction/src/license_detection.rs b/crates/edit_prediction/src/license_detection.rs index 55635bcfd04cb6288f44907da051fa1f33d41922..88edfc306ebca21076908b3c05f7cf2837b19209 100644 --- a/crates/edit_prediction/src/license_detection.rs +++ b/crates/edit_prediction/src/license_detection.rs @@ -319,6 +319,7 @@ impl LicenseDetectionWatcher { } worktree::Event::DeletedEntry(_) | worktree::Event::UpdatedGitRepositories(_) + | worktree::Event::UpdatedRootRepoCommonDir | worktree::Event::Deleted => {} }); diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 286d3a85f86173bff5d17d8d7c86d26464a04714..2f579f5a724db143bbd4b0f9853a217bd6b14655 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -4414,7 +4414,8 @@ impl LspStore { } worktree::Event::UpdatedGitRepositories(_) | worktree::Event::DeletedEntry(_) - | worktree::Event::Deleted => {} + | worktree::Event::Deleted + | worktree::Event::UpdatedRootRepoCommonDir => {} }) .detach() } diff --git a/crates/project/src/manifest_tree.rs b/crates/project/src/manifest_tree.rs index 1ae5b0e809f3803c3f8858afb065637ba0a0f256..fb1b7e96e4a20370493e0837360a28583ffbbfc0 100644 --- a/crates/project/src/manifest_tree.rs +++ b/crates/project/src/manifest_tree.rs @@ -59,7 +59,7 @@ impl WorktreeRoots { let path = TriePath::from(entry.path.as_ref()); this.roots.remove(&path); } - WorktreeEvent::Deleted => {} + WorktreeEvent::Deleted | WorktreeEvent::UpdatedRootRepoCommonDir => {} } }), }) diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 92f7db453a81c6224455002b7811f2e6945f2a82..ca448ce53118fd23fec0dfc920ee67f5d6d19c41 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -812,6 +812,7 @@ impl WorktreeStore { // The worktree root itself has been deleted (for single-file worktrees) // The worktree will be removed via the observe_release callback } + worktree::Event::UpdatedRootRepoCommonDir => {} } }) .detach(); diff --git a/crates/proto/proto/call.proto b/crates/proto/proto/call.proto index aa964c64cd04db71a71ac081e034be10cbf95048..71351fb74c5834fe0b1650f22e851c21cd752466 100644 --- a/crates/proto/proto/call.proto +++ b/crates/proto/proto/call.proto @@ -225,6 +225,7 @@ message UpdateWorktree { uint64 scan_id = 8; bool is_last_update = 9; string abs_path = 10; + optional string root_repo_common_dir = 11; } // deprecated diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index c21934338f97cc8ed3e04b917c7db84fccecd031..dd77d2a2da8d4dbc2c0f91f63cb59dd1591ee3f4 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -881,6 +881,7 @@ pub fn split_worktree_update(mut message: UpdateWorktree) -> impl Iterator> = Rc::new(Cell::new(0)); + tree.update(cx, { + let event_count = event_count.clone(); + |_, cx| { + cx.subscribe(&cx.entity(), move |_, _, event, _| { + if matches!(event, Event::UpdatedRootRepoCommonDir) { + event_count.set(event_count.get() + 1); + } + }) + .detach(); + } + }); + + // Remove .git — root_repo_common_dir should become None. + fs.remove_file( + &PathBuf::from(path!("/linked_worktree/.git")), + Default::default(), + ) + .await + .unwrap(); + tree.flush_fs_events(cx).await; + + tree.read_with(cx, |tree, _| { + assert_eq!(tree.snapshot().root_repo_common_dir(), None); + }); + assert_eq!( + event_count.get(), + 1, + "should have emitted UpdatedRootRepoCommonDir on removal" + ); +} + fn init_test(cx: &mut gpui::TestAppContext) { zlog::init_test(); From 843615c35e4bc0d226eae951712f85ca63147eb0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Soares?= <37777652+Dnreikronos@users.noreply.github.com> Date: Fri, 3 Apr 2026 05:26:08 -0300 Subject: [PATCH 26/63] markdown: Fix visible escape characters in LSP diagnostics (#51766) Closes #51622 Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Fixed markdown escape characters being visible in LSP diagnostic messages when leading whitespace caused indented code blocks --------- Co-authored-by: Smit Barmase --- crates/markdown/src/markdown.rs | 222 ++++++++++++++++++++++++++++---- 1 file changed, 195 insertions(+), 27 deletions(-) diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index c31ca79e7581926e7696fa596aaccc9371512841..52c5fd38ac84b259ca2b39b97a53a11c6dc75d03 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -314,6 +314,78 @@ actions!( ] ); +enum EscapeAction { + PassThrough, + Nbsp(usize), + DoubleNewline, + PrefixBackslash, +} + +impl EscapeAction { + fn output_len(&self) -> usize { + match self { + Self::PassThrough => 1, + Self::Nbsp(count) => count * '\u{00A0}'.len_utf8(), + Self::DoubleNewline => 2, + Self::PrefixBackslash => 2, + } + } + + fn write_to(&self, c: char, output: &mut String) { + match self { + Self::PassThrough => output.push(c), + Self::Nbsp(count) => { + for _ in 0..*count { + output.push('\u{00A0}'); + } + } + Self::DoubleNewline => { + output.push('\n'); + output.push('\n'); + } + Self::PrefixBackslash => { + // '\\' is a single backslash in Rust, e.g. '|' -> '\|' + output.push('\\'); + output.push(c); + } + } + } +} + +// Valid to operate on raw bytes since multi-byte UTF-8 +// sequences never contain ASCII-range bytes. +struct MarkdownEscaper { + in_leading_whitespace: bool, +} + +impl MarkdownEscaper { + const TAB_SIZE: usize = 4; + + fn new() -> Self { + Self { + in_leading_whitespace: true, + } + } + + fn next(&mut self, byte: u8) -> EscapeAction { + let action = if self.in_leading_whitespace && byte == b'\t' { + EscapeAction::Nbsp(Self::TAB_SIZE) + } else if self.in_leading_whitespace && byte == b' ' { + EscapeAction::Nbsp(1) + } else if byte == b'\n' { + EscapeAction::DoubleNewline + } else if byte.is_ascii_punctuation() { + EscapeAction::PrefixBackslash + } else { + EscapeAction::PassThrough + }; + + self.in_leading_whitespace = + byte == b'\n' || (self.in_leading_whitespace && (byte == b' ' || byte == b'\t')); + action + } +} + impl Markdown { pub fn new( source: SharedString, @@ -477,30 +549,21 @@ impl Markdown { } pub fn escape(s: &str) -> Cow<'_, str> { - // Valid to use bytes since multi-byte UTF-8 doesn't use ASCII chars. - let count = s - .bytes() - .filter(|c| *c == b'\n' || c.is_ascii_punctuation()) - .count(); - if count > 0 { - let mut output = String::with_capacity(s.len() + count); - let mut is_newline = false; - for c in s.chars() { - if is_newline && c == ' ' { - continue; - } - is_newline = c == '\n'; - if c == '\n' { - output.push('\n') - } else if c.is_ascii_punctuation() { - output.push('\\') - } - output.push(c) - } - output.into() - } else { - s.into() + let output_len: usize = { + let mut escaper = MarkdownEscaper::new(); + s.bytes().map(|byte| escaper.next(byte).output_len()).sum() + }; + + if output_len == s.len() { + return s.into(); + } + + let mut escaper = MarkdownEscaper::new(); + let mut output = String::with_capacity(output_len); + for c in s.chars() { + escaper.next(c as u8).write_to(c, &mut output); } + output.into() } pub fn selected_text(&self) -> Option { @@ -3077,13 +3140,118 @@ mod tests { ); } + fn nbsp(n: usize) -> String { + "\u{00A0}".repeat(n) + } + + #[test] + fn test_escape_plain_text() { + assert_eq!(Markdown::escape("hello world"), "hello world"); + assert_eq!(Markdown::escape(""), ""); + assert_eq!(Markdown::escape("café ☕ naïve"), "café ☕ naïve"); + } + #[test] - fn test_escape() { - assert_eq!(Markdown::escape("hello `world`"), "hello \\`world\\`"); + fn test_escape_punctuation() { + assert_eq!(Markdown::escape("hello `world`"), r"hello \`world\`"); + assert_eq!(Markdown::escape("a|b"), r"a\|b"); + } + + #[test] + fn test_escape_leading_spaces() { + assert_eq!(Markdown::escape(" hello"), [ (4), "hello"].concat()); + assert_eq!( + Markdown::escape(" | { a: string }"), + [ (4), r"\| \{ a\: string \}"].concat() + ); assert_eq!( - Markdown::escape("hello\n cool world"), - "hello\n\ncool world" + Markdown::escape(" first\n second"), + [ (2), "first\n\n",  (2), "second"].concat() ); + assert_eq!(Markdown::escape("hello world"), "hello world"); + } + + #[test] + fn test_escape_leading_tabs() { + assert_eq!(Markdown::escape("\thello"), [ (4), "hello"].concat()); + assert_eq!( + Markdown::escape("hello\n\t\tindented"), + ["hello\n\n",  (8), "indented"].concat() + ); + assert_eq!( + Markdown::escape(" \t hello"), + [ (1 + 4 + 1), "hello"].concat() + ); + assert_eq!(Markdown::escape("hello\tworld"), "hello\tworld"); + } + + #[test] + fn test_escape_newlines() { + assert_eq!(Markdown::escape("a\nb"), "a\n\nb"); + assert_eq!(Markdown::escape("a\n\nb"), "a\n\n\n\nb"); + assert_eq!(Markdown::escape("\nhello"), "\n\nhello"); + } + + #[test] + fn test_escape_multiline_diagnostic() { + assert_eq!( + Markdown::escape(" | { a: string }\n | { b: number }"), + [ +  (4), + r"\| \{ a\: string \}", + "\n\n", +  (4), + r"\| \{ b\: number \}", + ] + .concat() + ); + } + + fn has_code_block(markdown: &str) -> bool { + let parsed_data = parse_markdown_with_options(markdown, false); + parsed_data + .events + .iter() + .any(|(_, event)| matches!(event, MarkdownEvent::Start(MarkdownTag::CodeBlock { .. }))) + } + + #[test] + fn test_escape_output_len_matches_precomputed() { + let cases = [ + "", + "hello world", + "hello `world`", + " hello", + " | { a: string }", + "\thello", + "hello\n\t\tindented", + " \t hello", + "hello\tworld", + "a\nb", + "a\n\nb", + "\nhello", + " | { a: string }\n | { b: number }", + "café ☕ naïve", + ]; + for input in cases { + let mut escaper = MarkdownEscaper::new(); + let precomputed: usize = input.bytes().map(|b| escaper.next(b).output_len()).sum(); + + let mut escaper = MarkdownEscaper::new(); + let mut output = String::new(); + for c in input.chars() { + escaper.next(c as u8).write_to(c, &mut output); + } + + assert_eq!(precomputed, output.len(), "length mismatch for {:?}", input); + } + } + + #[test] + fn test_escape_prevents_code_block() { + let diagnostic = " | { a: string }"; + assert!(has_code_block(diagnostic)); + assert!(!has_code_block(&Markdown::escape(diagnostic))); } #[track_caller] From 90fcf8539eacfefd604e1c95ffe242b149129f3d Mon Sep 17 00:00:00 2001 From: David Alecrim <35930364+davidalecrim1@users.noreply.github.com> Date: Fri, 3 Apr 2026 08:00:13 -0300 Subject: [PATCH 27/63] fuzzy: Fix crash with Unicode chars whose lowercase expands to multiple codepoints (#52989) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #52973 ## Problem The file picker crashes with `highlight index N is not a valid UTF-8 boundary` when file paths contain Unicode characters whose lowercase form expands to multiple codepoints. Turkish `İ` (U+0130) is the trigger here: Rust's `char::to_lowercase()` turns it into `i` + combining dot above (two codepoints). That expansion breaks the fuzzy matcher in two ways: 1. The `j_regular` index mapping mixes the expanded lowercase index space with the original character index space, so highlight positions land on invalid byte boundaries. 2. The scoring matrices are allocated with the expanded length but indexed with the original length as stride, so rows alias each other and corrupt stored values. Users with Turkish locale filenames were hitting this on v0.229.0 and v0.230.0 stable. ## Fix I went with simple 1:1 case mapping: a `simple_lowercase` helper in `char_bag.rs` that takes only the first codepoint from `to_lowercase()` and drops any trailing combining characters. For `İ` this gives `i`, which is what anyone would actually type in a search query. The same function is used in the matcher, the char bag pre-filter, and both query-lowercasing call sites (`paths.rs` and `strings.rs`). This gets rid of the `extra_lowercase_chars` BTreeMap, the `j_regular` adjustment, and the matrix sizing discrepancy. The matcher now works with a flat character array where `lowercase_candidate_chars.len() == candidate_chars.len()`, so there's no expanded-vs-original index space to get wrong. I also fixed `CharBag::insert`, which used `to_ascii_lowercase()` and silently ignored non-ASCII characters. A file like `aİbİcdef.txt` wouldn't show up when searching `ai` because `İ` was never registered as `i` in the bag. It now goes through `simple_lowercase` too. The alternative was keeping full case folding and fixing the index tracking with a `Vec` mapping expanded positions back to originals. That would work but keeps the dual-index-space complexity that caused these bugs, plus adds a per-candidate allocation for the mapping vector. ## Prior art fzf uses Go's `unicode.To(unicode.LowerCase, r)`, which is simple case mapping -- always one rune in, one rune out. `İ` maps to `i`, no expansion. VS Code's `String.toLowerCase()` does produce the expanded form, but the scorer compares UTF-16 code units independently and sidesteps the problem in practice. Neither tool maintains a mapping between expanded and original index spaces. ## Trade-off Searching for the combining dot above (U+0307) won't match `İ` in a path anymore. Nobody types combining characters in a file picker, and fzf doesn't support it either. ## Screenshot Screenshot 2026-04-02 at 09 56 34 Release Notes: - Fixed a crash and improved matching and highlighting in the file picker for paths with non-ASCII characters (e.g., Turkish İ, ß, fi). --------- Co-authored-by: Oleksiy Syvokon --- crates/fuzzy/src/char_bag.rs | 6 ++- crates/fuzzy/src/matcher.rs | 93 +++++++++++++++++++++++------------- crates/fuzzy/src/paths.rs | 9 ++-- crates/fuzzy/src/strings.rs | 3 +- 4 files changed, 73 insertions(+), 38 deletions(-) diff --git a/crates/fuzzy/src/char_bag.rs b/crates/fuzzy/src/char_bag.rs index 13b00816ed0141117fb6d5ac9265e4b82c7aa57d..1821a63793337862d9d6ad01a6a42072588d7be5 100644 --- a/crates/fuzzy/src/char_bag.rs +++ b/crates/fuzzy/src/char_bag.rs @@ -1,5 +1,9 @@ use std::iter::FromIterator; +pub fn simple_lowercase(c: char) -> char { + c.to_lowercase().next().unwrap_or(c) +} + #[derive(Copy, Clone, Debug, Default, PartialEq, Eq, Hash)] pub struct CharBag(u64); @@ -9,7 +13,7 @@ impl CharBag { } fn insert(&mut self, c: char) { - let c = c.to_ascii_lowercase(); + let c = simple_lowercase(c); if c.is_ascii_lowercase() { let mut count = self.0; let idx = c as u8 - b'a'; diff --git a/crates/fuzzy/src/matcher.rs b/crates/fuzzy/src/matcher.rs index 782c9caca832d81fb6e4bce8f49b4f310664b292..102708d2fad6b560b1a606c34246033587affdda 100644 --- a/crates/fuzzy/src/matcher.rs +++ b/crates/fuzzy/src/matcher.rs @@ -1,10 +1,9 @@ use std::{ borrow::Borrow, - collections::BTreeMap, sync::atomic::{self, AtomicBool}, }; -use crate::CharBag; +use crate::{CharBag, char_bag::simple_lowercase}; const BASE_DISTANCE_PENALTY: f64 = 0.6; const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05; @@ -69,7 +68,6 @@ impl<'a> Matcher<'a> { { let mut candidate_chars = Vec::new(); let mut lowercase_candidate_chars = Vec::new(); - let mut extra_lowercase_chars = BTreeMap::new(); for candidate in candidates { if !candidate.borrow().has_chars(self.query_char_bag) { @@ -82,14 +80,9 @@ impl<'a> Matcher<'a> { candidate_chars.clear(); lowercase_candidate_chars.clear(); - extra_lowercase_chars.clear(); - for (i, c) in candidate.borrow().candidate_chars().enumerate() { + for c in candidate.borrow().candidate_chars() { candidate_chars.push(c); - let mut char_lowercased = c.to_lowercase().collect::>(); - if char_lowercased.len() > 1 { - extra_lowercase_chars.insert(i, char_lowercased.len() - 1); - } - lowercase_candidate_chars.append(&mut char_lowercased); + lowercase_candidate_chars.push(simple_lowercase(c)); } if !self.find_last_positions(lowercase_prefix, &lowercase_candidate_chars) { @@ -108,7 +101,6 @@ impl<'a> Matcher<'a> { &lowercase_candidate_chars, prefix, lowercase_prefix, - &extra_lowercase_chars, ); if score > 0.0 { @@ -146,7 +138,6 @@ impl<'a> Matcher<'a> { path_lowercased: &[char], prefix: &[char], lowercase_prefix: &[char], - extra_lowercase_chars: &BTreeMap, ) -> f64 { let score = self.recursive_score_match( path, @@ -156,7 +147,6 @@ impl<'a> Matcher<'a> { 0, 0, self.query.len() as f64, - extra_lowercase_chars, ) * self.query.len() as f64; if score <= 0.0 { @@ -201,7 +191,6 @@ impl<'a> Matcher<'a> { query_idx: usize, path_idx: usize, cur_score: f64, - extra_lowercase_chars: &BTreeMap, ) -> f64 { if query_idx == self.query.len() { return 1.0; @@ -228,13 +217,6 @@ impl<'a> Matcher<'a> { let mut last_slash = 0; for j in path_idx..=safe_limit { - let extra_lowercase_chars_count = extra_lowercase_chars - .iter() - .take_while(|&(&i, _)| i < j) - .map(|(_, increment)| increment) - .sum::(); - let j_regular = j - extra_lowercase_chars_count; - let path_char = if j < prefix.len() { lowercase_prefix[j] } else { @@ -247,20 +229,20 @@ impl<'a> Matcher<'a> { let is_path_sep = path_char == '/'; if query_idx == 0 && is_path_sep { - last_slash = j_regular; + last_slash = j; } let need_to_score = query_char == path_char || (is_path_sep && query_char == '_'); if need_to_score { - let curr = match prefix.get(j_regular) { + let curr = match prefix.get(j) { Some(&curr) => curr, - None => path[j_regular - prefix.len()], + None => path[j - prefix.len()], }; let mut char_score = 1.0; if j > path_idx { - let last = match prefix.get(j_regular - 1) { + let last = match prefix.get(j - 1) { Some(&last) => last, - None => path[j_regular - 1 - prefix.len()], + None => path[j - 1 - prefix.len()], }; if last == '/' { @@ -316,12 +298,11 @@ impl<'a> Matcher<'a> { query_idx + 1, j + 1, next_score, - extra_lowercase_chars, ) * multiplier; if new_score > score { score = new_score; - best_position = j_regular; + best_position = j; // Optimization: can't score better than 1. if new_score == 1.0 { break; @@ -469,12 +450,12 @@ mod tests { assert_eq!( match_single_path_query("İo/oluş", false, &mixed_unicode_paths), - vec![("İolu/oluş", vec![0, 2, 4, 6, 8, 10, 12])] + vec![("İolu/oluş", vec![0, 2, 5, 6, 7, 8, 9])] ); assert_eq!( match_single_path_query("İst/code", false, &mixed_unicode_paths), - vec![("İstanbul/code", vec![0, 2, 4, 6, 8, 10, 12, 14])] + vec![("İstanbul/code", vec![0, 2, 3, 9, 10, 11, 12, 13])] ); assert_eq!( @@ -536,12 +517,60 @@ mod tests { ); } + #[test] + fn test_positions_are_valid_char_boundaries_with_expanding_lowercase() { + // İ (U+0130) lowercases to "i\u{307}" (2 chars) under full case folding. + // With simple case mapping (used by this matcher), İ → 'i' (1 char), + // so positions remain valid byte boundaries. + let paths = vec!["İstanbul/code.rs", "aİbİc/dİeİf.txt", "src/İmport/İndex.ts"]; + + for query in &["code", "İst", "dİe", "İndex", "İmport", "abcdef"] { + let results = match_single_path_query(query, false, &paths); + for (path, positions) in &results { + for &pos in positions { + assert!( + path.is_char_boundary(pos), + "Position {pos} is not a valid char boundary in path {path:?} \ + (query: {query:?}, all positions: {positions:?})" + ); + } + } + } + } + + #[test] + fn test_positions_valid_with_various_multibyte_chars() { + // German ß uppercases to SS but lowercases to itself — no expansion. + // Armenian ligatures and other characters that could expand under full + // case folding should still produce valid byte boundaries. + let paths = vec![ + "straße/config.rs", + "Straße/München/file.txt", + "file/path.rs", // fi (U+FB01, fi ligature) + "ffoo/bar.txt", // ff (U+FB00, ff ligature) + "aÇbŞc/dÖeÜf.txt", // Turkish chars that don't expand + ]; + + for query in &["config", "Mün", "file", "bar", "abcdef", "straße", "ÇŞ"] { + let results = match_single_path_query(query, false, &paths); + for (path, positions) in &results { + for &pos in positions { + assert!( + path.is_char_boundary(pos), + "Position {pos} is not a valid char boundary in path {path:?} \ + (query: {query:?}, all positions: {positions:?})" + ); + } + } + } + } + fn match_single_path_query<'a>( query: &str, smart_case: bool, paths: &[&'a str], ) -> Vec<(&'a str, Vec)> { - let lowercase_query = query.to_lowercase().chars().collect::>(); + let lowercase_query = query.chars().map(simple_lowercase).collect::>(); let query = query.chars().collect::>(); let query_chars = CharBag::from(&lowercase_query[..]); @@ -551,7 +580,7 @@ mod tests { .collect::>(); let mut path_entries = Vec::new(); for (i, path) in paths.iter().enumerate() { - let lowercase_path = path.to_lowercase().chars().collect::>(); + let lowercase_path: Vec = path.chars().map(simple_lowercase).collect(); let char_bag = CharBag::from(lowercase_path.as_slice()); path_entries.push(PathMatchCandidate { is_dir: false, diff --git a/crates/fuzzy/src/paths.rs b/crates/fuzzy/src/paths.rs index cce0e082840c4cd05d6e2b21eac0073d3eb7700f..2f92f05b96a3be2da7053365d8a7c53722db6ab8 100644 --- a/crates/fuzzy/src/paths.rs +++ b/crates/fuzzy/src/paths.rs @@ -10,6 +10,7 @@ use util::{paths::PathStyle, rel_path::RelPath}; use crate::{ CharBag, + char_bag::simple_lowercase, matcher::{MatchCandidate, Matcher}, }; @@ -94,7 +95,7 @@ pub fn match_fixed_path_set( max_results: usize, path_style: PathStyle, ) -> Vec { - let lowercase_query = query.to_lowercase().chars().collect::>(); + let lowercase_query = query.chars().map(simple_lowercase).collect::>(); let query = query.chars().collect::>(); let query_char_bag = CharBag::from(&lowercase_query[..]); @@ -110,7 +111,7 @@ pub fn match_fixed_path_set( path_prefix_chars.extend(path_style.primary_separator().chars()); let lowercase_pfx = path_prefix_chars .iter() - .map(|c| c.to_ascii_lowercase()) + .map(|c| simple_lowercase(*c)) .collect::>(); (worktree_root_name, path_prefix_chars, lowercase_pfx) @@ -171,7 +172,7 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( let lowercase_query = query .iter() - .map(|query| query.to_ascii_lowercase()) + .map(|query| simple_lowercase(*query)) .collect::>(); let query = &query; @@ -217,7 +218,7 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( } let lowercase_prefix = prefix .iter() - .map(|c| c.to_ascii_lowercase()) + .map(|c| simple_lowercase(*c)) .collect::>(); matcher.match_candidates( &prefix, diff --git a/crates/fuzzy/src/strings.rs b/crates/fuzzy/src/strings.rs index 54539840cfb0ca251428d9f78d5d134f16afdf4c..fb191bd9dcadd81a5a9890032ef8b185cdf7342e 100644 --- a/crates/fuzzy/src/strings.rs +++ b/crates/fuzzy/src/strings.rs @@ -1,5 +1,6 @@ use crate::{ CharBag, + char_bag::simple_lowercase, matcher::{MatchCandidate, Matcher}, }; use gpui::BackgroundExecutor; @@ -141,7 +142,7 @@ where .collect(); } - let lowercase_query = query.to_lowercase().chars().collect::>(); + let lowercase_query = query.chars().map(simple_lowercase).collect::>(); let query = query.chars().collect::>(); let lowercase_query = &lowercase_query; From 0b984b5ade7604e3f1c618c0ef77879de800b868 Mon Sep 17 00:00:00 2001 From: Brandon Chinn Date: Fri, 3 Apr 2026 08:52:17 -0500 Subject: [PATCH 28/63] Ignore user config when checking remote git URL for dev extensions (#52538) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Context Fixes #48163 Also update the logic from `git remote -v` + manually parse => `git remote get-url origin` Not sure the best way to test this ## How to Review ## Self-Review Checklist - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Fixed rebuilding dev extensions when user git config contains url rewriting rules --- crates/extension/src/extension_builder.rs | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/crates/extension/src/extension_builder.rs b/crates/extension/src/extension_builder.rs index 1c204398c34728cab6b05687050243b4a988902c..f0e789994127c9347c8eb6b8d16417ba7eaaf831 100644 --- a/crates/extension/src/extension_builder.rs +++ b/crates/extension/src/extension_builder.rs @@ -296,16 +296,12 @@ impl ExtensionBuilder { let remotes_output = util::command::new_command("git") .arg("--git-dir") .arg(&git_dir) - .args(["remote", "-v"]) + .args(["remote", "get-url", "origin"]) + .env("GIT_CONFIG_GLOBAL", "/dev/null") .output() .await?; let has_remote = remotes_output.status.success() - && String::from_utf8_lossy(&remotes_output.stdout) - .lines() - .any(|line| { - let mut parts = line.split(|c: char| c.is_whitespace()); - parts.next() == Some("origin") && parts.any(|part| part == url) - }); + && String::from_utf8_lossy(&remotes_output.stdout).trim() == url; if !has_remote { bail!( "grammar directory '{}' already exists, but is not a git clone of '{}'", From 45d6a9595ff1a29a558a86da2b02069f9dd8633f Mon Sep 17 00:00:00 2001 From: Eric Holk Date: Fri, 3 Apr 2026 09:23:52 -0700 Subject: [PATCH 29/63] Track project groups in MultiWorkspace (#53032) This PR adds tracking of project groups to the MultiWorkspace and serialization/restoration of them. This will later be used by the sidebar to provide reliable reloading of threads across Zed reloads. Release Notes: - N/A --------- Co-authored-by: Max Brunsfeld Co-authored-by: Mikayla Maki --- crates/git_ui/src/worktree_picker.rs | 4 +- crates/project/src/project.rs | 63 +++++ crates/project_panel/src/project_panel.rs | 2 +- crates/recent_projects/src/recent_projects.rs | 16 +- crates/recent_projects/src/wsl_picker.rs | 2 +- crates/remote/src/remote_client.rs | 2 +- crates/remote/src/transport/docker.rs | 13 +- crates/remote/src/transport/mock.rs | 2 +- crates/remote/src/transport/ssh.rs | 4 +- crates/remote/src/transport/wsl.rs | 4 +- crates/sidebar/src/project_group_builder.rs | 61 +---- crates/sidebar/src/sidebar.rs | 9 + crates/util/src/path_list.rs | 2 +- crates/workspace/src/multi_workspace.rs | 89 +++---- crates/workspace/src/multi_workspace_tests.rs | 247 ++++++++++++++---- crates/workspace/src/persistence.rs | 2 + crates/workspace/src/persistence/model.rs | 25 +- crates/workspace/src/welcome.rs | 2 +- crates/workspace/src/workspace.rs | 19 +- 19 files changed, 382 insertions(+), 186 deletions(-) diff --git a/crates/git_ui/src/worktree_picker.rs b/crates/git_ui/src/worktree_picker.rs index 2f600ae4c5620aa0d60cfc96b2d2c767b115f8aa..1b4497be1f4ea96bd4f0431c97bb538eda9faa57 100644 --- a/crates/git_ui/src/worktree_picker.rs +++ b/crates/git_ui/src/worktree_picker.rs @@ -364,7 +364,7 @@ impl WorktreeListDelegate { workspace .update_in(cx, |workspace, window, cx| { workspace.open_workspace_for_paths( - OpenMode::Replace, + OpenMode::Activate, vec![new_worktree_path], window, cx, @@ -418,7 +418,7 @@ impl WorktreeListDelegate { return; }; let open_mode = if replace_current_window { - OpenMode::Replace + OpenMode::Activate } else { OpenMode::NewWindow }; diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index bbfa7ffe208c198e76a9838695765c912977385d..41f57299835f37b001575b682118aa17a6516ad9 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -2349,6 +2349,22 @@ impl Project { .find(|tree| tree.read(cx).root_name() == root_name) } + pub fn project_group_key(&self, cx: &App) -> ProjectGroupKey { + let roots = self + .visible_worktrees(cx) + .map(|worktree| { + let snapshot = worktree.read(cx).snapshot(); + snapshot + .root_repo_common_dir() + .and_then(|dir| Some(dir.parent()?.to_path_buf())) + .unwrap_or(snapshot.abs_path().to_path_buf()) + }) + .collect::>(); + let host = self.remote_connection_options(cx); + let path_list = PathList::new(&roots); + ProjectGroupKey::new(host, path_list) + } + #[inline] pub fn worktree_root_names<'a>(&'a self, cx: &'a App) -> impl Iterator { self.visible_worktrees(cx) @@ -6018,6 +6034,53 @@ impl Project { } } +/// Identifies a project group by a set of paths the workspaces in this group +/// have. +/// +/// Paths are mapped to their main worktree path first so we can group +/// workspaces by main repos. +#[derive(PartialEq, Eq, Hash, Clone, Debug)] +pub struct ProjectGroupKey { + paths: PathList, + host: Option, +} + +impl ProjectGroupKey { + /// Creates a new `ProjectGroupKey` with the given path list. + /// + /// The path list should point to the git main worktree paths for a project. + /// + /// This should be used only in a few places to make sure we can ensure the + /// main worktree path invariant. Namely, this should only be called from + /// [`Workspace`]. + pub(crate) fn new(host: Option, paths: PathList) -> Self { + Self { paths, host } + } + + pub fn display_name(&self) -> SharedString { + let mut names = Vec::with_capacity(self.paths.paths().len()); + for abs_path in self.paths.paths() { + if let Some(name) = abs_path.file_name() { + names.push(name.to_string_lossy().to_string()); + } + } + if names.is_empty() { + // TODO: Can we do something better in this case? + "Empty Workspace".into() + } else { + names.join(", ").into() + } + } + + pub fn path_list(&self) -> &PathList { + &self.paths + } + + pub fn host(&self) -> Option { + self.host.clone() + } +} + pub struct PathMatchCandidateSet { pub snapshot: Snapshot, pub include_ignored: bool, diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index e9062364fc73ed6e266e3f8904be51eaaf5b6535..c2f1bb7131ad31ea75aee84bad17b7971d489a09 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -7126,7 +7126,7 @@ impl Render for ProjectPanel { .workspace .update(cx, |workspace, cx| { workspace.open_workspace_for_paths( - OpenMode::Replace, + OpenMode::Activate, external_paths.paths().to_owned(), window, cx, diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index b3f918e204c5600193cd01a0f7569888d333edd9..dc952764056f6465840825d2a1f0fce886f401c0 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -1160,7 +1160,7 @@ impl PickerDelegate for RecentProjectsDelegate { .update(cx, |multi_workspace, window, cx| { multi_workspace.open_project( paths, - OpenMode::Replace, + OpenMode::Activate, window, cx, ) @@ -2122,14 +2122,12 @@ mod tests { cx.dispatch_action(*multi_workspace, menu::Confirm); cx.run_until_parked(); - // prepare_to_close triggers a save prompt for the dirty buffer. - // Choose "Don't Save" (index 2) to discard and continue replacing. + // In multi-workspace mode, the dirty workspace is kept and a new one is + // opened alongside it — no save prompt needed. assert!( - cx.has_pending_prompt(), - "Should prompt to save dirty buffer before replacing workspace" + !cx.has_pending_prompt(), + "Should not prompt in multi-workspace mode — dirty workspace is kept" ); - cx.simulate_prompt_answer("Don't Save"); - cx.run_until_parked(); multi_workspace .update(cx, |multi_workspace, _, cx| { @@ -2143,8 +2141,8 @@ mod tests { ); assert!( - !multi_workspace.workspaces().contains(&dirty_workspace), - "The original dirty workspace should have been replaced" + multi_workspace.workspaces().contains(&dirty_workspace), + "The dirty workspace should still be present in multi-workspace mode" ); assert!( diff --git a/crates/recent_projects/src/wsl_picker.rs b/crates/recent_projects/src/wsl_picker.rs index 9c08c4f5f4941a80afdd2d9cbb6f2c51ee8ec754..c53dd7c3fb68bc087216764536506f85117ffb36 100644 --- a/crates/recent_projects/src/wsl_picker.rs +++ b/crates/recent_projects/src/wsl_picker.rs @@ -246,7 +246,7 @@ impl WslOpenModal { false => !secondary, }; let open_mode = if replace_current_window { - workspace::OpenMode::Replace + workspace::OpenMode::Activate } else { workspace::OpenMode::NewWindow }; diff --git a/crates/remote/src/remote_client.rs b/crates/remote/src/remote_client.rs index f31fc9ebec028b6a42a7cbc0d61cf9574a4a0f3c..e746d82aac857d3174a4bab14c937a7538b2f1b4 100644 --- a/crates/remote/src/remote_client.rs +++ b/crates/remote/src/remote_client.rs @@ -1273,7 +1273,7 @@ impl ConnectionPool { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] pub enum RemoteConnectionOptions { Ssh(SshConnectionOptions), Wsl(WslConnectionOptions), diff --git a/crates/remote/src/transport/docker.rs b/crates/remote/src/transport/docker.rs index eddfa1216927dffa88f63c00c2e373233b426e83..6322cd9193d383cfcd3e9ff5cb93670bcd136023 100644 --- a/crates/remote/src/transport/docker.rs +++ b/crates/remote/src/transport/docker.rs @@ -30,7 +30,18 @@ use crate::{ transport::parse_platform, }; -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +#[derive( + Debug, + Default, + Clone, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + serde::Serialize, + serde::Deserialize, +)] pub struct DockerConnectionOptions { pub name: String, pub container_id: String, diff --git a/crates/remote/src/transport/mock.rs b/crates/remote/src/transport/mock.rs index 06e13196583fef9743e3f337bfe9cd9acf0efbca..f567d24eb122f72b4dbb79cdeb2c98c744f02da4 100644 --- a/crates/remote/src/transport/mock.rs +++ b/crates/remote/src/transport/mock.rs @@ -56,7 +56,7 @@ use std::{ use util::paths::{PathStyle, RemotePathBuf}; /// Unique identifier for a mock connection. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] pub struct MockConnectionOptions { pub id: u64, } diff --git a/crates/remote/src/transport/ssh.rs b/crates/remote/src/transport/ssh.rs index 42cfc8f86dc34712e6b2cd0e4b5d8f379e443834..1884ea43b6492efba91623eb1ab4c5a1ed4d3de1 100644 --- a/crates/remote/src/transport/ssh.rs +++ b/crates/remote/src/transport/ssh.rs @@ -45,7 +45,7 @@ pub(crate) struct SshRemoteConnection { _temp_dir: TempDir, } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] pub enum SshConnectionHost { IpAddr(IpAddr), Hostname(String), @@ -102,7 +102,7 @@ fn bracket_ipv6(host: &str) -> String { } } -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Default, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] pub struct SshConnectionOptions { pub host: SshConnectionHost, pub username: Option, diff --git a/crates/remote/src/transport/wsl.rs b/crates/remote/src/transport/wsl.rs index 5a37e1c65bfe11221b60499779c57f0ce7dca364..1bbbaca2235c0bcf14c414a9419ab9dd92b4e814 100644 --- a/crates/remote/src/transport/wsl.rs +++ b/crates/remote/src/transport/wsl.rs @@ -28,7 +28,9 @@ use util::{ shell_builder::ShellBuilder, }; -#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Deserialize, schemars::JsonSchema)] +#[derive( + Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize, schemars::JsonSchema, +)] pub struct WslConnectionOptions { pub distro_name: String, pub user: Option, diff --git a/crates/sidebar/src/project_group_builder.rs b/crates/sidebar/src/project_group_builder.rs index 9d06c7d31f1e1b34676db84a4f8e50131897f94d..20919647c185ce7014f056a99bb9c85ae595c560 100644 --- a/crates/sidebar/src/project_group_builder.rs +++ b/crates/sidebar/src/project_group_builder.rs @@ -9,46 +9,14 @@ //! lookup and mapping. use collections::{HashMap, HashSet, vecmap::VecMap}; +use gpui::{App, Entity}; +use project::ProjectGroupKey; use std::{ path::{Path, PathBuf}, sync::Arc, }; - -use gpui::{App, Entity}; -use ui::SharedString; use workspace::{MultiWorkspace, PathList, Workspace}; -/// Identifies a project group by a set of paths the workspaces in this group -/// have. -/// -/// Paths are mapped to their main worktree path first so we can group -/// workspaces by main repos. -#[derive(PartialEq, Eq, Hash, Clone)] -pub struct ProjectGroupName { - path_list: PathList, -} - -impl ProjectGroupName { - pub fn display_name(&self) -> SharedString { - let mut names = Vec::with_capacity(self.path_list.paths().len()); - for abs_path in self.path_list.paths() { - if let Some(name) = abs_path.file_name() { - names.push(name.to_string_lossy().to_string()); - } - } - if names.is_empty() { - // TODO: Can we do something better in this case? - "Empty Workspace".into() - } else { - names.join(", ").into() - } - } - - pub fn path_list(&self) -> &PathList { - &self.path_list - } -} - #[derive(Default)] pub struct ProjectGroup { pub workspaces: Vec>, @@ -88,7 +56,7 @@ impl ProjectGroup { pub struct ProjectGroupBuilder { /// Maps git repositories' work_directory_abs_path to their original_repo_abs_path directory_mappings: HashMap, - project_groups: VecMap, + project_groups: VecMap, } impl ProjectGroupBuilder { @@ -111,7 +79,7 @@ impl ProjectGroupBuilder { // Second pass: group each workspace using canonical paths derived // from the full set of mappings. for workspace in mw.workspaces() { - let group_name = builder.canonical_workspace_paths(workspace, cx); + let group_name = workspace.read(cx).project_group_key(cx); builder .project_group_entry(&group_name) .add_workspace(workspace, cx); @@ -119,7 +87,7 @@ impl ProjectGroupBuilder { builder } - fn project_group_entry(&mut self, name: &ProjectGroupName) -> &mut ProjectGroup { + fn project_group_entry(&mut self, name: &ProjectGroupKey) -> &mut ProjectGroup { self.project_groups.entry_ref(name).or_insert_default() } @@ -150,23 +118,6 @@ impl ProjectGroupBuilder { } } - /// Derives the canonical group name for a workspace by canonicalizing - /// each of its root paths using the builder's directory mappings. - fn canonical_workspace_paths( - &self, - workspace: &Entity, - cx: &App, - ) -> ProjectGroupName { - let root_paths = workspace.read(cx).root_paths(cx); - let paths: Vec<_> = root_paths - .iter() - .map(|p| self.canonicalize_path(p).to_path_buf()) - .collect(); - ProjectGroupName { - path_list: PathList::new(&paths), - } - } - pub fn canonicalize_path<'a>(&'a self, path: &'a Path) -> &'a Path { self.directory_mappings .get(path) @@ -203,7 +154,7 @@ impl ProjectGroupBuilder { PathList::new(&paths) } - pub fn groups(&self) -> impl Iterator { + pub fn groups(&self) -> impl Iterator { self.project_groups.iter() } } diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 7d7786fd59087f7d78088ae4517933ad089e8584..6816898ffc55bbf81b2c17719b3bde6eb8b58e68 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -3829,6 +3829,15 @@ pub fn dump_workspace_info( .map(|mw| mw.read(cx).active_workspace_index()); writeln!(output, "MultiWorkspace: {} workspace(s)", workspaces.len()).ok(); + + if let Some(mw) = &multi_workspace { + let keys: Vec<_> = mw.read(cx).project_group_keys().cloned().collect(); + writeln!(output, "Project group keys ({}):", keys.len()).ok(); + for key in keys { + writeln!(output, " - {key:?}").ok(); + } + } + if let Some(index) = active_index { writeln!(output, "Active workspace index: {index}").ok(); } diff --git a/crates/util/src/path_list.rs b/crates/util/src/path_list.rs index 0ea8bce6face2c248239c92e43a14ed010fb0c6e..47ade219c6bd4a2217f7ac00ecccfd92fe64c199 100644 --- a/crates/util/src/path_list.rs +++ b/crates/util/src/path_list.rs @@ -38,7 +38,7 @@ impl Hash for PathList { } } -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct SerializedPathList { pub paths: String, pub order: String, diff --git a/crates/workspace/src/multi_workspace.rs b/crates/workspace/src/multi_workspace.rs index 10a5ce70ead2d5aea7cc21a9af53ee9f216859c3..6aa369774b63dd0d250ba67ba4a5b69a335a2de9 100644 --- a/crates/workspace/src/multi_workspace.rs +++ b/crates/workspace/src/multi_workspace.rs @@ -5,9 +5,9 @@ use gpui::{ ManagedView, MouseButton, Pixels, Render, Subscription, Task, Tiling, Window, WindowId, actions, deferred, px, }; -use project::DisableAiSettings; #[cfg(any(test, feature = "test-support"))] use project::Project; +use project::{DisableAiSettings, ProjectGroupKey}; use settings::Settings; pub use settings::SidebarSide; use std::future::Future; @@ -26,6 +26,7 @@ const SIDEBAR_RESIZE_HANDLE_SIZE: Pixels = px(6.0); use crate::{ CloseIntent, CloseWindow, DockPosition, Event as WorkspaceEvent, Item, ModalView, OpenMode, Panel, Workspace, WorkspaceId, client_side_decorations, + persistence::model::MultiWorkspaceState, }; actions!( @@ -222,6 +223,7 @@ pub struct MultiWorkspace { window_id: WindowId, workspaces: Vec>, active_workspace_index: usize, + project_group_keys: Vec, sidebar: Option>, sidebar_open: bool, sidebar_overlay: Option, @@ -269,6 +271,7 @@ impl MultiWorkspace { }); Self { window_id: window.window_handle().window_id(), + project_group_keys: vec![workspace.read(cx).project_group_key(cx)], workspaces: vec![workspace], active_workspace_index: 0, sidebar: None, @@ -438,6 +441,20 @@ impl MultiWorkspace { window: &Window, cx: &mut Context, ) { + let project = workspace.read(cx).project().clone(); + cx.subscribe_in(&project, window, { + let workspace = workspace.downgrade(); + move |this, _project, event, _window, cx| match event { + project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => { + if let Some(workspace) = workspace.upgrade() { + this.add_project_group_key(workspace.read(cx).project_group_key(cx)); + } + } + _ => {} + } + }) + .detach(); + cx.subscribe_in(workspace, window, |this, workspace, event, window, cx| { if let WorkspaceEvent::Activate = event { this.activate(workspace.clone(), window, cx); @@ -446,6 +463,17 @@ impl MultiWorkspace { .detach(); } + pub fn add_project_group_key(&mut self, project_group_key: ProjectGroupKey) { + if self.project_group_keys.contains(&project_group_key) { + return; + } + self.project_group_keys.push(project_group_key); + } + + pub fn project_group_keys(&self) -> impl Iterator { + self.project_group_keys.iter() + } + pub fn workspace(&self) -> &Entity { &self.workspaces[self.active_workspace_index] } @@ -492,48 +520,6 @@ impl MultiWorkspace { cx.notify(); } - /// Replaces the currently active workspace with a new one. If the - /// workspace is already in the list, this just switches to it. - pub fn replace( - &mut self, - workspace: Entity, - window: &Window, - cx: &mut Context, - ) { - if !self.multi_workspace_enabled(cx) { - self.set_single_workspace(workspace, cx); - return; - } - - if let Some(index) = self.workspaces.iter().position(|w| *w == workspace) { - let changed = self.active_workspace_index != index; - self.active_workspace_index = index; - if changed { - cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged); - self.serialize(cx); - } - cx.notify(); - return; - } - - let old_workspace = std::mem::replace( - &mut self.workspaces[self.active_workspace_index], - workspace.clone(), - ); - - let old_entity_id = old_workspace.entity_id(); - self.detach_workspace(&old_workspace, cx); - - Self::subscribe_to_workspace(&workspace, window, cx); - self.sync_sidebar_to_workspace(&workspace, cx); - - cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old_entity_id)); - cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace)); - cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged); - self.serialize(cx); - cx.notify(); - } - fn set_single_workspace(&mut self, workspace: Entity, cx: &mut Context) { self.workspaces[0] = workspace; self.active_workspace_index = 0; @@ -553,12 +539,16 @@ impl MultiWorkspace { if let Some(index) = self.workspaces.iter().position(|w| *w == workspace) { index } else { + let project_group_key = workspace.read(cx).project().read(cx).project_group_key(cx); + Self::subscribe_to_workspace(&workspace, window, cx); self.sync_sidebar_to_workspace(&workspace, cx); let weak_self = cx.weak_entity(); workspace.update(cx, |workspace, cx| { workspace.set_multi_workspace(weak_self, cx); }); + + self.add_project_group_key(project_group_key); self.workspaces.push(workspace.clone()); cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace)); cx.notify(); @@ -625,8 +615,13 @@ impl MultiWorkspace { self._serialize_task = Some(cx.spawn(async move |this, cx| { let Some((window_id, state)) = this .read_with(cx, |this, cx| { - let state = crate::persistence::model::MultiWorkspaceState { + let state = MultiWorkspaceState { active_workspace_id: this.workspace().read(cx).database_id(), + project_group_keys: this + .project_group_keys() + .cloned() + .map(Into::into) + .collect::>(), sidebar_open: this.sidebar_open, sidebar_state: this.sidebar.as_ref().and_then(|s| s.serialized_state(cx)), }; @@ -894,6 +889,7 @@ impl MultiWorkspace { }); } + // TODO: Move group to a new window? fn move_active_workspace_to_new_window( &mut self, _: &MoveWorkspaceToNewWindow, @@ -913,12 +909,11 @@ impl MultiWorkspace { ) -> Task>> { let workspace = self.workspace().clone(); - let needs_close_prompt = - open_mode == OpenMode::Replace || !self.multi_workspace_enabled(cx); + let needs_close_prompt = !self.multi_workspace_enabled(cx); let open_mode = if self.multi_workspace_enabled(cx) { open_mode } else { - OpenMode::Replace + OpenMode::Activate }; if needs_close_prompt { diff --git a/crates/workspace/src/multi_workspace_tests.rs b/crates/workspace/src/multi_workspace_tests.rs index 50161121719ec7b2835fd11e389f24860e57d8f5..3083c23f6e3add91b0389a961567fc88e2043678 100644 --- a/crates/workspace/src/multi_workspace_tests.rs +++ b/crates/workspace/src/multi_workspace_tests.rs @@ -2,7 +2,8 @@ use super::*; use feature_flags::FeatureFlagAppExt; use fs::FakeFs; use gpui::TestAppContext; -use project::DisableAiSettings; +use project::{DisableAiSettings, ProjectGroupKey}; +use serde_json::json; use settings::SettingsStore; fn init_test(cx: &mut TestAppContext) { @@ -87,86 +88,232 @@ async fn test_sidebar_disabled_when_disable_ai_is_enabled(cx: &mut TestAppContex } #[gpui::test] -async fn test_replace(cx: &mut TestAppContext) { +async fn test_project_group_keys_initial(cx: &mut TestAppContext) { init_test(cx); let fs = FakeFs::new(cx.executor()); - let project_a = Project::test(fs.clone(), [], cx).await; - let project_b = Project::test(fs.clone(), [], cx).await; - let project_c = Project::test(fs.clone(), [], cx).await; - let project_d = Project::test(fs.clone(), [], cx).await; + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + let project = Project::test(fs, ["/root_a".as_ref()], cx).await; + + let expected_key = project.read_with(cx, |project, cx| project.project_group_key(cx)); let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); + assert_eq!(keys.len(), 1, "should have exactly one key on creation"); + assert_eq!(*keys[0], expected_key); + }); +} - let workspace_a_id = multi_workspace.read_with(cx, |mw, _cx| mw.workspaces()[0].entity_id()); +#[gpui::test] +async fn test_project_group_keys_add_workspace(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; + let project_a = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await; + let project_b = Project::test(fs.clone(), ["/root_b".as_ref()], cx).await; - // Replace the only workspace (single-workspace case). - let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = cx.new(|cx| Workspace::test_new(project_b.clone(), window, cx)); - mw.replace(workspace.clone(), &*window, cx); - workspace + let key_a = project_a.read_with(cx, |p, cx| p.project_group_key(cx)); + let key_b = project_b.read_with(cx, |p, cx| p.project_group_key(cx)); + assert_ne!( + key_a, key_b, + "different roots should produce different keys" + ); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + + multi_workspace.read_with(cx, |mw, _cx| { + assert_eq!(mw.project_group_keys().count(), 1); + }); + + // Adding a workspace with a different project root adds a new key. + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b, window, cx); }); multi_workspace.read_with(cx, |mw, _cx| { - assert_eq!(mw.workspaces().len(), 1); + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); assert_eq!( - mw.workspaces()[0].entity_id(), - workspace_b.entity_id(), - "slot should now be project_b" - ); - assert_ne!( - mw.workspaces()[0].entity_id(), - workspace_a_id, - "project_a should be gone" + keys.len(), + 2, + "should have two keys after adding a second workspace" ); + assert_eq!(*keys[0], key_a); + assert_eq!(*keys[1], key_b); }); +} + +#[gpui::test] +async fn test_project_group_keys_duplicate_not_added(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + let project_a = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await; + // A second project entity pointing at the same path produces the same key. + let project_a2 = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await; - // Add project_c as a second workspace, then replace it with project_d. - let workspace_c = multi_workspace.update_in(cx, |mw, window, cx| { - mw.test_add_workspace(project_c.clone(), window, cx) + let key_a = project_a.read_with(cx, |p, cx| p.project_group_key(cx)); + let key_a2 = project_a2.read_with(cx, |p, cx| p.project_group_key(cx)); + assert_eq!(key_a, key_a2, "same root path should produce the same key"); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_a2, window, cx); }); multi_workspace.read_with(cx, |mw, _cx| { - assert_eq!(mw.workspaces().len(), 2); - assert_eq!(mw.active_workspace_index(), 1); + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); + assert_eq!( + keys.len(), + 1, + "duplicate key should not be added when a workspace with the same root is inserted" + ); }); +} - let workspace_d = multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = cx.new(|cx| Workspace::test_new(project_d.clone(), window, cx)); - mw.replace(workspace.clone(), &*window, cx); - workspace - }); +#[gpui::test] +async fn test_project_group_keys_on_worktree_added(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; + let project = Project::test(fs, ["/root_a".as_ref()], cx).await; + + let initial_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + // Add a second worktree to the same project. + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/root_b", true, cx) + }) + .await + .unwrap(); + worktree + .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete()) + .await; + cx.run_until_parked(); + + let updated_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); + assert_ne!( + initial_key, updated_key, + "key should change after adding a worktree" + ); multi_workspace.read_with(cx, |mw, _cx| { - assert_eq!(mw.workspaces().len(), 2, "should still have 2 workspaces"); - assert_eq!(mw.active_workspace_index(), 1); + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); assert_eq!( - mw.workspaces()[1].entity_id(), - workspace_d.entity_id(), - "active slot should now be project_d" - ); - assert_ne!( - mw.workspaces()[1].entity_id(), - workspace_c.entity_id(), - "project_c should be gone" + keys.len(), + 2, + "should have both the original and updated key" ); + assert_eq!(*keys[0], initial_key); + assert_eq!(*keys[1], updated_key); }); +} - // Replace with workspace_b which is already in the list — should just switch. - multi_workspace.update_in(cx, |mw, window, cx| { - mw.replace(workspace_b.clone(), &*window, cx); +#[gpui::test] +async fn test_project_group_keys_on_worktree_removed(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; + let project = Project::test(fs, ["/root_a".as_ref(), "/root_b".as_ref()], cx).await; + + let initial_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + // Remove one worktree. + let worktree_b_id = project.read_with(cx, |project, cx| { + project + .worktrees(cx) + .find(|wt| wt.read(cx).root_name().as_unix_str() == "root_b") + .unwrap() + .read(cx) + .id() + }); + project.update(cx, |project, cx| { + project.remove_worktree(worktree_b_id, cx); }); + cx.run_until_parked(); + + let updated_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); + assert_ne!( + initial_key, updated_key, + "key should change after removing a worktree" + ); multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); assert_eq!( - mw.workspaces().len(), + keys.len(), 2, - "no workspace should be added or removed" + "should accumulate both the original and post-removal key" ); + assert_eq!(*keys[0], initial_key); + assert_eq!(*keys[1], updated_key); + }); +} + +#[gpui::test] +async fn test_project_group_keys_across_multiple_workspaces_and_worktree_changes( + cx: &mut TestAppContext, +) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_c", json!({ "file.txt": "" })).await; + let project_a = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await; + let project_b = Project::test(fs.clone(), ["/root_b".as_ref()], cx).await; + + let key_a = project_a.read_with(cx, |p, cx| p.project_group_key(cx)); + let key_b = project_b.read_with(cx, |p, cx| p.project_group_key(cx)); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b, window, cx); + }); + + multi_workspace.read_with(cx, |mw, _cx| { + assert_eq!(mw.project_group_keys().count(), 2); + }); + + // Now add a worktree to project_a. This should produce a third key. + let (worktree, _) = project_a + .update(cx, |project, cx| { + project.find_or_create_worktree("/root_c", true, cx) + }) + .await + .unwrap(); + worktree + .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete()) + .await; + cx.run_until_parked(); + + let key_a_updated = project_a.read_with(cx, |p, cx| p.project_group_key(cx)); + assert_ne!(key_a, key_a_updated); + + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); assert_eq!( - mw.active_workspace_index(), - 0, - "should have switched to workspace_b" + keys.len(), + 3, + "should have key_a, key_b, and the updated key_a with root_c" ); + assert_eq!(*keys[0], key_a); + assert_eq!(*keys[1], key_b); + assert_eq!(*keys[2], key_a_updated); }); } diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index d38602ea768e8edc4f3de1ec439e67f0ee432a63..d9e440eb151bf7e8fc24f328b6ba73dc416a7c12 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -3993,6 +3993,7 @@ mod tests { window_10, MultiWorkspaceState { active_workspace_id: Some(WorkspaceId(2)), + project_group_keys: vec![], sidebar_open: true, sidebar_state: None, }, @@ -4004,6 +4005,7 @@ mod tests { window_20, MultiWorkspaceState { active_workspace_id: Some(WorkspaceId(3)), + project_group_keys: vec![], sidebar_open: false, sidebar_state: None, }, diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index 6b55d09ebbc2375f8cce3f2b81bc4f1aa9620e76..61fe3bc4861d9ebb000681d8b4f887c3a45feebe 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -13,7 +13,7 @@ use db::sqlez::{ use gpui::{AsyncWindowContext, Entity, WeakEntity, WindowId}; use language::{Toolchain, ToolchainScope}; -use project::{Project, debugger::breakpoint_store::SourceBreakpoint}; +use project::{Project, ProjectGroupKey, debugger::breakpoint_store::SourceBreakpoint}; use remote::RemoteConnectionOptions; use serde::{Deserialize, Serialize}; use std::{ @@ -21,7 +21,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use util::ResultExt; +use util::{ResultExt, path_list::SerializedPathList}; use uuid::Uuid; #[derive( @@ -36,7 +36,7 @@ pub(crate) enum RemoteConnectionKind { Docker, } -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, serde::Serialize, serde::Deserialize)] pub enum SerializedWorkspaceLocation { Local, Remote(RemoteConnectionOptions), @@ -59,11 +59,30 @@ pub struct SessionWorkspace { pub window_id: Option, } +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct SerializedProjectGroupKey { + pub path_list: SerializedPathList, + pub(crate) location: SerializedWorkspaceLocation, +} + +impl From for SerializedProjectGroupKey { + fn from(value: ProjectGroupKey) -> Self { + SerializedProjectGroupKey { + path_list: value.path_list().serialize(), + location: match value.host() { + Some(host) => SerializedWorkspaceLocation::Remote(host), + None => SerializedWorkspaceLocation::Local, + }, + } + } +} + /// Per-window state for a MultiWorkspace, persisted to KVP. #[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)] pub struct MultiWorkspaceState { pub active_workspace_id: Option, pub sidebar_open: bool, + pub project_group_keys: Vec, #[serde(default)] pub sidebar_state: Option, } diff --git a/crates/workspace/src/welcome.rs b/crates/workspace/src/welcome.rs index efd9b75a6802f888f43654e21006f202cc36c5a4..dceca3e85f4308952563e689c608c92e9f77144f 100644 --- a/crates/workspace/src/welcome.rs +++ b/crates/workspace/src/welcome.rs @@ -326,7 +326,7 @@ impl WelcomePage { self.workspace .update(cx, |workspace, cx| { workspace - .open_workspace_for_paths(OpenMode::Replace, paths, window, cx) + .open_workspace_for_paths(OpenMode::Activate, paths, window, cx) .detach_and_log_err(cx); }) .log_err(); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index ecc03806f7eeffbb62ad1340022e0ea475fe9531..e5b927cbbbc571966d2483e82d98ce61adb06cda 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -90,8 +90,8 @@ pub use persistence::{ }; use postage::stream::Stream; use project::{ - DirectoryLister, Project, ProjectEntryId, ProjectPath, ResolvedPath, Worktree, WorktreeId, - WorktreeSettings, + DirectoryLister, Project, ProjectEntryId, ProjectGroupKey, ProjectPath, ResolvedPath, Worktree, + WorktreeId, WorktreeSettings, debugger::{breakpoint_store::BreakpointStoreEvent, session::ThreadStatus}, project_settings::ProjectSettings, toolchain_store::ToolchainStoreEvent, @@ -672,7 +672,7 @@ fn prompt_and_open_paths(app_state: Arc, options: PathPromptOptions, c None, None, None, - OpenMode::Replace, + OpenMode::Activate, cx, ); cx.spawn(async move |cx| { @@ -713,7 +713,7 @@ pub fn prompt_for_open_path_and_open( if let Some(handle) = multi_workspace_handle { if let Some(task) = handle .update(cx, |multi_workspace, window, cx| { - multi_workspace.open_project(paths, OpenMode::Replace, window, cx) + multi_workspace.open_project(paths, OpenMode::Activate, window, cx) }) .log_err() { @@ -1380,8 +1380,6 @@ pub enum OpenMode { /// Add to the window's multi workspace and activate it. #[default] Activate, - /// Replace the currently active workspace, and any of it's linked workspaces - Replace, } impl Workspace { @@ -1921,9 +1919,6 @@ impl Workspace { workspace }); match open_mode { - OpenMode::Replace => { - multi_workspace.replace(workspace.clone(), &*window, cx); - } OpenMode::Activate => { multi_workspace.activate(workspace.clone(), window, cx); } @@ -2056,6 +2051,10 @@ impl Workspace { }) } + pub fn project_group_key(&self, cx: &App) -> ProjectGroupKey { + self.project.read(cx).project_group_key(cx) + } + pub fn weak_handle(&self) -> WeakEntity { self.weak_self.clone() } @@ -3409,7 +3408,7 @@ impl Workspace { let workspace_is_empty = !is_remote && !has_worktree && !has_dirty_items; if workspace_is_empty { - open_mode = OpenMode::Replace; + open_mode = OpenMode::Activate; } let app_state = self.app_state.clone(); From bde6a010686b1c18737548a5523b0e6938876bef Mon Sep 17 00:00:00 2001 From: Om Chillure Date: Fri, 3 Apr 2026 22:01:18 +0530 Subject: [PATCH 30/63] Fix/devcontainer compose labels (#53057) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [ ] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #53042 Release Notes: - Fixed dev container failing to open when Docker Compose file contains `labels` --------- Co-authored-by: KyleBarton --- .../src/devcontainer_manifest.rs | 47 ++-- crates/dev_container/src/docker.rs | 208 +++++++++++++++++- 2 files changed, 225 insertions(+), 30 deletions(-) diff --git a/crates/dev_container/src/devcontainer_manifest.rs b/crates/dev_container/src/devcontainer_manifest.rs index 1c2863f96118b5bac006f3a590da8cf8980994e2..8529604be9b1f3728b9638c2ca6852ff741c6ce2 100644 --- a/crates/dev_container/src/devcontainer_manifest.rs +++ b/crates/dev_container/src/devcontainer_manifest.rs @@ -1052,7 +1052,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true network_mode_service: Option<&str>, resources: DockerBuildResources, ) -> Result { - let mut runtime_labels = vec![]; + let mut runtime_labels = HashMap::new(); if let Some(metadata) = &resources.image.config.labels.metadata { let serialized_metadata = serde_json_lenient::to_string(metadata).map_err(|e| { @@ -1060,14 +1060,11 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true DevContainerError::ContainerNotValid(resources.image.id.clone()) })?; - runtime_labels.push(format!( - "{}={}", - "devcontainer.metadata", serialized_metadata - )); + runtime_labels.insert("devcontainer.metadata".to_string(), serialized_metadata); } for (k, v) in self.identifying_labels() { - runtime_labels.push(format!("{}={}", k, v)); + runtime_labels.insert(k.to_string(), v.to_string()); } let config_volumes: HashMap = resources @@ -2292,23 +2289,21 @@ fn get_remote_user_from_config( { return Ok(user.clone()); } - let Some(metadata) = &docker_config.config.labels.metadata else { - log::error!("Could not locate metadata"); - return Err(DevContainerError::ContainerNotValid( - docker_config.id.clone(), - )); - }; - for metadatum in metadata { - if let Some(remote_user) = metadatum.get("remoteUser") { - if let Some(remote_user_str) = remote_user.as_str() { - return Ok(remote_user_str.to_string()); + if let Some(metadata) = &docker_config.config.labels.metadata { + for metadatum in metadata { + if let Some(remote_user) = metadatum.get("remoteUser") { + if let Some(remote_user_str) = remote_user.as_str() { + return Ok(remote_user_str.to_string()); + } } } } - log::error!("Could not locate the remote user"); - Err(DevContainerError::ContainerNotValid( - docker_config.id.clone(), - )) + if let Some(image_user) = &docker_config.config.image_user { + if !image_user.is_empty() { + return Ok(image_user.to_string()); + } + } + Ok("root".to_string()) } // This should come from spec - see the docs @@ -2332,7 +2327,7 @@ fn get_container_user_from_config( return Ok(image_user.to_string()); } - Err(DevContainerError::DevContainerParseFailed) + Ok("root".to_string()) } #[cfg(test)] @@ -3526,11 +3521,11 @@ ENV DOCKER_BUILDKIT=1 cap_add: Some(vec!["SYS_PTRACE".to_string()]), security_opt: Some(vec!["seccomp=unconfined".to_string()]), privileged: Some(true), - labels: Some(vec![ - "devcontainer.metadata=[{\"remoteUser\":\"vscode\"}]".to_string(), - "devcontainer.local_folder=/path/to/local/project".to_string(), - "devcontainer.config_file=/path/to/local/project/.devcontainer/devcontainer.json".to_string() - ]), + labels: Some(HashMap::from([ + ("devcontainer.metadata".to_string(), "[{\"remoteUser\":\"vscode\"}]".to_string()), + ("devcontainer.local_folder".to_string(), "/path/to/local/project".to_string()), + ("devcontainer.config_file".to_string(), "/path/to/local/project/.devcontainer/devcontainer.json".to_string()) + ])), volumes: vec![ MountDefinition { source: "dind-var-lib-docker-42dad4b4ca7b8ced".to_string(), diff --git a/crates/dev_container/src/docker.rs b/crates/dev_container/src/docker.rs index 9594eae3d0faf67669e7d1ad487925b77a54fc34..1658acfadc059327e2e7b43d393324e9f37d42db 100644 --- a/crates/dev_container/src/docker.rs +++ b/crates/dev_container/src/docker.rs @@ -1,7 +1,7 @@ use std::{collections::HashMap, path::PathBuf}; use async_trait::async_trait; -use serde::{Deserialize, Deserializer, Serialize}; +use serde::{Deserialize, Deserializer, Serialize, de}; use util::command::Command; use crate::{ @@ -31,9 +31,10 @@ pub(crate) struct DockerInspect { pub(crate) state: Option, } -#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)] +#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)] pub(crate) struct DockerConfigLabels { #[serde( + default, rename = "devcontainer.metadata", deserialize_with = "deserialize_metadata" )] @@ -43,6 +44,7 @@ pub(crate) struct DockerConfigLabels { #[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)] #[serde(rename_all = "PascalCase")] pub(crate) struct DockerInspectConfig { + #[serde(default, deserialize_with = "deserialize_nullable_labels")] pub(crate) labels: DockerConfigLabels, #[serde(rename = "User")] pub(crate) image_user: Option, @@ -93,8 +95,12 @@ pub(crate) struct DockerComposeService { pub(crate) cap_add: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) security_opt: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - pub(crate) labels: Option>, + #[serde( + skip_serializing_if = "Option::is_none", + default, + deserialize_with = "deserialize_labels" + )] + pub(crate) labels: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) build: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -118,6 +124,7 @@ pub(crate) struct DockerComposeConfig { #[serde(skip_serializing_if = "Option::is_none")] pub(crate) name: Option, pub(crate) services: HashMap, + #[serde(default)] pub(crate) volumes: HashMap, } @@ -355,6 +362,73 @@ pub(crate) trait DockerClient { fn docker_cli(&self) -> String; } +fn deserialize_labels<'de, D>(deserializer: D) -> Result>, D::Error> +where + D: Deserializer<'de>, +{ + struct LabelsVisitor; + + impl<'de> de::Visitor<'de> for LabelsVisitor { + type Value = Option>; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + formatter.write_str("a sequence of strings or a map of string key-value pairs") + } + + fn visit_seq(self, seq: A) -> Result + where + A: de::SeqAccess<'de>, + { + let values = Vec::::deserialize(de::value::SeqAccessDeserializer::new(seq))?; + + Ok(Some( + values + .iter() + .filter_map(|v| { + let parts: Vec<&str> = v.split("=").collect(); + if parts.len() != 2 { + None + } else { + Some((parts[0].to_string(), parts[1].to_string())) + } + }) + .collect(), + )) + } + + fn visit_map(self, map: M) -> Result + where + M: de::MapAccess<'de>, + { + HashMap::::deserialize(de::value::MapAccessDeserializer::new(map)) + .map(|v| Some(v)) + } + + fn visit_none(self) -> Result + where + E: de::Error, + { + Ok(None) + } + + fn visit_unit(self) -> Result + where + E: de::Error, + { + Ok(None) + } + } + + deserializer.deserialize_any(LabelsVisitor) +} + +fn deserialize_nullable_labels<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + Option::::deserialize(deserializer).map(|opt| opt.unwrap_or_default()) +} + fn deserialize_metadata<'de, D>( deserializer: D, ) -> Result>>, D::Error> @@ -895,4 +969,130 @@ mod test { assert_eq!(docker_compose_config, expected_config); } + + #[test] + fn should_deserialize_compose_labels_as_map() { + let given_config = r#" + { + "name": "devcontainer", + "services": { + "app": { + "image": "node:22-alpine", + "volumes": [], + "labels": { + "com.example.test": "value", + "another.label": "another-value" + } + } + } + } + "#; + + let config: DockerComposeConfig = serde_json_lenient::from_str(given_config).unwrap(); + let service = config.services.get("app").unwrap(); + let labels = service.labels.clone().unwrap(); + assert_eq!( + labels, + HashMap::from([ + ("another.label".to_string(), "another-value".to_string()), + ("com.example.test".to_string(), "value".to_string()) + ]) + ); + } + + #[test] + fn should_deserialize_compose_labels_as_array() { + let given_config = r#" + { + "name": "devcontainer", + "services": { + "app": { + "image": "node:22-alpine", + "volumes": [], + "labels": ["com.example.test=value"] + } + } + } + "#; + + let config: DockerComposeConfig = serde_json_lenient::from_str(given_config).unwrap(); + let service = config.services.get("app").unwrap(); + assert_eq!( + service.labels, + Some(HashMap::from([( + "com.example.test".to_string(), + "value".to_string() + )])) + ); + } + + #[test] + fn should_deserialize_compose_without_volumes() { + let given_config = r#" + { + "name": "devcontainer", + "services": { + "app": { + "image": "node:22-alpine", + "volumes": [] + } + } + } + "#; + + let config: DockerComposeConfig = serde_json_lenient::from_str(given_config).unwrap(); + assert!(config.volumes.is_empty()); + } + + #[test] + fn should_deserialize_inspect_without_labels() { + let given_config = r#" + { + "Id": "sha256:abc123", + "Config": { + "Env": ["PATH=/usr/bin"], + "Cmd": ["node"], + "WorkingDir": "/" + } + } + "#; + + let inspect: DockerInspect = serde_json_lenient::from_str(given_config).unwrap(); + assert!(inspect.config.labels.metadata.is_none()); + assert!(inspect.config.image_user.is_none()); + } + + #[test] + fn should_deserialize_inspect_with_null_labels() { + let given_config = r#" + { + "Id": "sha256:abc123", + "Config": { + "Labels": null, + "Env": ["PATH=/usr/bin"] + } + } + "#; + + let inspect: DockerInspect = serde_json_lenient::from_str(given_config).unwrap(); + assert!(inspect.config.labels.metadata.is_none()); + } + + #[test] + fn should_deserialize_inspect_with_labels_but_no_metadata() { + let given_config = r#" + { + "Id": "sha256:abc123", + "Config": { + "Labels": { + "com.example.test": "value" + }, + "Env": ["PATH=/usr/bin"] + } + } + "#; + + let inspect: DockerInspect = serde_json_lenient::from_str(given_config).unwrap(); + assert!(inspect.config.labels.metadata.is_none()); + } } From e93beb05c4a3236aba13b111d01fbd29550170f1 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Fri, 3 Apr 2026 13:33:50 -0400 Subject: [PATCH 31/63] git_graph: Remove horizontal scrolling from canvas (#53082) I also added a keybinding to focus the search bar. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A or Added/Fixed/Improved ... --- crates/git_graph/src/git_graph.rs | 63 +++++++++---------------------- 1 file changed, 18 insertions(+), 45 deletions(-) diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index c56fb051b896f32ac364cd15e73ae8708498ca5a..3439ca9fabd75d15a0a32fc09751b35c4e18b5a1 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -25,7 +25,7 @@ use project::git_store::{ }; use search::{ SearchOption, SearchOptions, SearchSource, SelectNextMatch, SelectPreviousMatch, - ToggleCaseSensitive, + ToggleCaseSensitive, buffer_search, }; use settings::Settings; use smallvec::{SmallVec, smallvec}; @@ -275,6 +275,8 @@ actions!( [ /// Opens the commit view for the selected commit. OpenCommitView, + /// Focuses the search field. + FocusSearch, ] ); @@ -833,8 +835,8 @@ pub fn init(cx: &mut App) { .detach(); } -fn lane_center_x(bounds: Bounds, lane: f32, horizontal_scroll_offset: Pixels) -> Pixels { - bounds.origin.x + LEFT_PADDING + lane * LANE_WIDTH + LANE_WIDTH / 2.0 - horizontal_scroll_offset +fn lane_center_x(bounds: Bounds, lane: f32) -> Pixels { + bounds.origin.x + LEFT_PADDING + lane * LANE_WIDTH + LANE_WIDTH / 2.0 } fn to_row_center( @@ -902,7 +904,6 @@ pub struct GitGraph { row_height: Pixels, table_interaction_state: Entity, column_widths: Entity, - horizontal_scroll_offset: Pixels, selected_entry_idx: Option, hovered_entry_idx: Option, graph_canvas_bounds: Rc>>>, @@ -980,14 +981,6 @@ impl GitGraph { .unwrap_or_else(|| self.graph_canvas_content_width()) } - fn clamp_horizontal_scroll_offset(&mut self, graph_viewport_width: Pixels) { - let max_horizontal_scroll = - (self.graph_canvas_content_width() - graph_viewport_width).max(px(0.)); - self.horizontal_scroll_offset = self - .horizontal_scroll_offset - .clamp(px(0.), max_horizontal_scroll); - } - pub fn new( repo_id: RepositoryId, git_store: Entity, @@ -1074,7 +1067,6 @@ impl GitGraph { row_height, table_interaction_state, column_widths, - horizontal_scroll_offset: px(0.), selected_entry_idx: None, hovered_entry_idx: None, graph_canvas_bounds: Rc::new(Cell::new(None)), @@ -2139,7 +2131,6 @@ impl GitGraph { let first_visible_row = (scroll_offset_y / row_height).floor() as usize; let vertical_scroll_offset = scroll_offset_y - (first_visible_row as f32 * row_height); - let horizontal_scroll_offset = self.horizontal_scroll_offset; let graph_viewport_width = self.graph_viewport_width(window, cx); let graph_width = if self.graph_canvas_content_width() > graph_viewport_width { @@ -2214,8 +2205,7 @@ impl GitGraph { bounds.origin.y + row_idx as f32 * row_height + row_height / 2.0 - vertical_scroll_offset; - let commit_x = - lane_center_x(bounds, row.lane as f32, horizontal_scroll_offset); + let commit_x = lane_center_x(bounds, row.lane as f32); draw_commit_circle(commit_x, row_y_center, row_color, window); } @@ -2227,8 +2217,7 @@ impl GitGraph { continue; }; - let line_x = - lane_center_x(bounds, start_column as f32, horizontal_scroll_offset); + let line_x = lane_center_x(bounds, start_column as f32); let start_row = line.full_interval.start as i32 - first_visible_row as i32; @@ -2273,11 +2262,7 @@ impl GitGraph { on_row, curve_kind, } => { - let mut to_column = lane_center_x( - bounds, - *to_column as f32, - horizontal_scroll_offset, - ); + let mut to_column = lane_center_x(bounds, *to_column as f32); let mut to_row = to_row_center( *on_row - first_visible_row, @@ -2470,25 +2455,8 @@ impl GitGraph { let new_y = (current_offset.y + delta.y).clamp(max_vertical_scroll, px(0.)); let new_offset = Point::new(current_offset.x, new_y); - let graph_viewport_width = self.graph_viewport_width(window, cx); - let max_horizontal_scroll = - (self.graph_canvas_content_width() - graph_viewport_width).max(px(0.)); - - let new_horizontal_offset = - (self.horizontal_scroll_offset - delta.x).clamp(px(0.), max_horizontal_scroll); - - let vertical_changed = new_offset != current_offset; - let horizontal_changed = new_horizontal_offset != self.horizontal_scroll_offset; - - if vertical_changed { + if new_offset != current_offset { table_state.set_scroll_offset(new_offset); - } - - if horizontal_changed { - self.horizontal_scroll_offset = new_horizontal_offset; - } - - if vertical_changed || horizontal_changed { cx.notify(); } } @@ -2553,8 +2521,6 @@ impl Render for GitGraph { cx, ); self.graph_data.add_commits(&commits); - let graph_viewport_width = self.graph_viewport_width(window, cx); - self.clamp_horizontal_scroll_offset(graph_viewport_width); (commits.len(), is_loading) }) } else { @@ -2600,8 +2566,6 @@ impl Render for GitGraph { let table_fraction = description_fraction + date_fraction + author_fraction + commit_fraction; let table_width_config = self.table_column_width_config(window, cx); - let graph_viewport_width = self.graph_viewport_width(window, cx); - self.clamp_horizontal_scroll_offset(graph_viewport_width); h_flex() .size_full() @@ -2806,6 +2770,11 @@ impl Render for GitGraph { this.open_selected_commit_view(window, cx); })) .on_action(cx.listener(Self::cancel)) + .on_action(cx.listener(|this, _: &FocusSearch, window, cx| { + this.search_state + .editor + .update(cx, |editor, cx| editor.focus_handle(cx).focus(window, cx)); + })) .on_action(cx.listener(Self::select_first)) .on_action(cx.listener(Self::select_prev)) .on_action(cx.listener(Self::select_next)) @@ -2837,6 +2806,10 @@ impl Render for GitGraph { ) .with_priority(1) })) + .on_action(cx.listener(|_, _: &buffer_search::Deploy, window, cx| { + window.dispatch_action(Box::new(FocusSearch), cx); + cx.stop_propagation(); + })) } } From d1e84f9d0acca392ebbd0e3a331dc3091fc9906e Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Fri, 3 Apr 2026 13:48:13 -0400 Subject: [PATCH 32/63] Document generate-action-metadata step for local docs preview (#53038) I needed to run the docs locally and ran into this error when following the [docs README.md](https://github.com/zed-industries/zed/blob/main/docs/README.md). ``` Error: Found 27 errors in docs 2026-04-01 10:15:39 [ERROR] (mdbook::utils): Error: The "zed_docs_preprocessor" preprocessor exited unsuccessfully with exit status: 1 status ``` It turns out I needed to run `script/generate-action-metadata` first. This PR adds that step to the doc. Self-Review Checklist: - [X] I've reviewed my own diff for quality, security, and reliability - [X] Unsafe blocks (if any) have justifying comments - [X] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [X] Tests cover the new/changed behavior - [X] Performance impact has been considered and is acceptable Closes #ISSUE Release Notes: - N/A --- docs/README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/README.md b/docs/README.md index f03f008223ba1102585c34f3b98bf93a985c1284..38be153de34b7e32e410fa67710297cca653d699 100644 --- a/docs/README.md +++ b/docs/README.md @@ -4,12 +4,15 @@ Welcome to Zed's documentation. This is built on push to `main` and published automatically to [https://zed.dev/docs](https://zed.dev/docs). -To preview the docs locally you will need to install [mdBook](https://rust-lang.github.io/mdBook/) (`cargo install mdbook@0.4.40`) and then run: +To preview the docs locally you will need to install [mdBook](https://rust-lang.github.io/mdBook/) (`cargo install mdbook@0.4.40`), generate the action metadata, and then serve: ```sh +script/generate-action-metadata mdbook serve docs ``` +The first command dumps an action manifest to `crates/docs_preprocessor/actions.json`. Without it, the preprocessor cannot validate keybinding and action references in the docs and will report errors. You only need to re-run it when actions change. + It's important to note the version number above. For an unknown reason, as of 2025-04-23, running 0.4.48 will cause odd URL behavior that breaks things. Before committing, verify that the docs are formatted in the way Prettier expects with: From 5edb40c7a8dc718ef6eb29a3f6d109eeacb3a496 Mon Sep 17 00:00:00 2001 From: KyleBarton Date: Fri, 3 Apr 2026 11:51:31 -0700 Subject: [PATCH 33/63] Use an object for docker compose ports rather than raw string (#53090) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #53048 Release Notes: - Fixed serialization error with Docker Compose for dev containers --- .../src/devcontainer_manifest.rs | 77 +++++++++++++++---- crates/dev_container/src/docker.rs | 76 +++++++++++++++++- 2 files changed, 137 insertions(+), 16 deletions(-) diff --git a/crates/dev_container/src/devcontainer_manifest.rs b/crates/dev_container/src/devcontainer_manifest.rs index 8529604be9b1f3728b9638c2ca6852ff741c6ce2..0ba7e8c82a036477103e18db0940f8950fb875d2 100644 --- a/crates/dev_container/src/devcontainer_manifest.rs +++ b/crates/dev_container/src/devcontainer_manifest.rs @@ -20,7 +20,8 @@ use crate::{ }, docker::{ Docker, DockerClient, DockerComposeConfig, DockerComposeService, DockerComposeServiceBuild, - DockerComposeVolume, DockerInspect, DockerPs, get_remote_dir_from_config, + DockerComposeServicePort, DockerComposeVolume, DockerInspect, DockerPs, + get_remote_dir_from_config, }, features::{DevContainerFeatureJson, FeatureManifest, parse_oci_feature_ref}, get_oci_token, @@ -1137,18 +1138,30 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true // If the main service uses a different service's network bridge, append to that service's ports instead if let Some(network_service_name) = network_mode_service { if let Some(service) = service_declarations.get_mut(network_service_name) { - service.ports.push(format!("{port}:{port}")); + service.ports.push(DockerComposeServicePort { + target: port.clone(), + published: port.clone(), + ..Default::default() + }); } else { service_declarations.insert( network_service_name.to_string(), DockerComposeService { - ports: vec![format!("{port}:{port}")], + ports: vec![DockerComposeServicePort { + target: port.clone(), + published: port.clone(), + ..Default::default() + }], ..Default::default() }, ); } } else { - main_service.ports.push(format!("{port}:{port}")); + main_service.ports.push(DockerComposeServicePort { + target: port.clone(), + published: port.clone(), + ..Default::default() + }); } } let other_service_ports: Vec<(&str, &str)> = forward_ports @@ -1171,12 +1184,20 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true .collect(); for (service_name, port) in other_service_ports { if let Some(service) = service_declarations.get_mut(service_name) { - service.ports.push(format!("{port}:{port}")); + service.ports.push(DockerComposeServicePort { + target: port.to_string(), + published: port.to_string(), + ..Default::default() + }); } else { service_declarations.insert( service_name.to_string(), DockerComposeService { - ports: vec![format!("{port}:{port}")], + ports: vec![DockerComposeServicePort { + target: port.to_string(), + published: port.to_string(), + ..Default::default() + }], ..Default::default() }, ); @@ -1186,18 +1207,30 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true if let Some(port) = &self.dev_container().app_port { if let Some(network_service_name) = network_mode_service { if let Some(service) = service_declarations.get_mut(network_service_name) { - service.ports.push(format!("{port}:{port}")); + service.ports.push(DockerComposeServicePort { + target: port.clone(), + published: port.clone(), + ..Default::default() + }); } else { service_declarations.insert( network_service_name.to_string(), DockerComposeService { - ports: vec![format!("{port}:{port}")], + ports: vec![DockerComposeServicePort { + target: port.clone(), + published: port.clone(), + ..Default::default() + }], ..Default::default() }, ); } } else { - main_service.ports.push(format!("{port}:{port}")); + main_service.ports.push(DockerComposeServicePort { + target: port.clone(), + published: port.clone(), + ..Default::default() + }); } } @@ -3278,6 +3311,8 @@ chmod +x ./install.sh #[cfg(not(target_os = "windows"))] #[gpui::test] async fn test_spawns_devcontainer_with_docker_compose(cx: &mut TestAppContext) { + use crate::docker::DockerComposeServicePort; + cx.executor().allow_parking(); env_logger::try_init().ok(); let given_devcontainer_contents = r#" @@ -3540,10 +3575,26 @@ ENV DOCKER_BUILDKIT=1 "db".to_string(), DockerComposeService { ports: vec![ - "8083:8083".to_string(), - "5432:5432".to_string(), - "1234:1234".to_string(), - "8084:8084".to_string() + DockerComposeServicePort { + target: "8083".to_string(), + published: "8083".to_string(), + ..Default::default() + }, + DockerComposeServicePort { + target: "5432".to_string(), + published: "5432".to_string(), + ..Default::default() + }, + DockerComposeServicePort { + target: "1234".to_string(), + published: "1234".to_string(), + ..Default::default() + }, + DockerComposeServicePort { + target: "8084".to_string(), + published: "8084".to_string(), + ..Default::default() + }, ], ..Default::default() }, diff --git a/crates/dev_container/src/docker.rs b/crates/dev_container/src/docker.rs index 1658acfadc059327e2e7b43d393324e9f37d42db..9320ec360968425cf85644e96b12c1d089c1f05f 100644 --- a/crates/dev_container/src/docker.rs +++ b/crates/dev_container/src/docker.rs @@ -86,6 +86,43 @@ pub(crate) struct DockerComposeServiceBuild { pub(crate) additional_contexts: Option>, } +#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)] +pub(crate) struct DockerComposeServicePort { + #[serde(deserialize_with = "deserialize_string_or_int")] + pub(crate) target: String, + #[serde(deserialize_with = "deserialize_string_or_int")] + pub(crate) published: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) mode: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) protocol: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) host_ip: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) app_protocol: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) name: Option, +} + +fn deserialize_string_or_int<'de, D>(deserializer: D) -> Result +where + D: serde::Deserializer<'de>, +{ + use serde::Deserialize; + + #[derive(Deserialize)] + #[serde(untagged)] + enum StringOrInt { + String(String), + Int(u32), + } + + match StringOrInt::deserialize(deserializer)? { + StringOrInt::String(s) => Ok(s), + StringOrInt::Int(b) => Ok(b.to_string()), + } +} + #[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)] pub(crate) struct DockerComposeService { pub(crate) image: Option, @@ -109,7 +146,7 @@ pub(crate) struct DockerComposeService { #[serde(skip_serializing_if = "Option::is_none")] pub(crate) env_file: Option>, #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub(crate) ports: Vec, + pub(crate) ports: Vec, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) network_mode: Option, } @@ -491,8 +528,8 @@ mod test { command_json::deserialize_json_output, devcontainer_json::MountDefinition, docker::{ - Docker, DockerComposeConfig, DockerComposeService, DockerComposeVolume, DockerInspect, - DockerPs, get_remote_dir_from_config, + Docker, DockerComposeConfig, DockerComposeService, DockerComposeServicePort, + DockerComposeVolume, DockerInspect, DockerPs, get_remote_dir_from_config, }, }; @@ -879,6 +916,22 @@ mod test { "POSTGRES_PORT": "5432", "POSTGRES_USER": "postgres" }, + "ports": [ + { + "target": "5443", + "published": "5442" + }, + { + "name": "custom port", + "protocol": "udp", + "host_ip": "127.0.0.1", + "app_protocol": "http", + "mode": "host", + "target": "8081", + "published": "8083" + + } + ], "image": "mcr.microsoft.com/devcontainers/rust:2-1-bookworm", "network_mode": "service:db", "volumes": [ @@ -943,6 +996,23 @@ mod test { target: "/workspaces".to_string(), }], network_mode: Some("service:db".to_string()), + + ports: vec![ + DockerComposeServicePort { + target: "5443".to_string(), + published: "5442".to_string(), + ..Default::default() + }, + DockerComposeServicePort { + target: "8081".to_string(), + published: "8083".to_string(), + mode: Some("host".to_string()), + protocol: Some("udp".to_string()), + host_ip: Some("127.0.0.1".to_string()), + app_protocol: Some("http".to_string()), + name: Some("custom port".to_string()), + }, + ], ..Default::default() }, ), From e4ebd3aae5c243087940f48de5cf27c46a569525 Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Fri, 3 Apr 2026 22:31:03 +0300 Subject: [PATCH 34/63] Fix crash in WgpuAtlas when viewing a screen share (#53088) When atlas tiles are rapidly allocated and freed (e.g. watching a shared screen in Collab), a texture can become unreferenced and be removed while GPU uploads for it are still pending. On the next frame, `flush_uploads` indexes into the now-empty texture slot and panics: ``` thread 'main' panicked at crates/gpui_wgpu/src/wgpu_atlas.rs:231:40: texture must exist... #11 core::option::expect_failed #12 gpui_wgpu::wgpu_atlas::WgpuAtlas::before_frame #13 gpui_wgpu::wgpu_renderer::WgpuRenderer::draw ``` This change drains pending uploads for a texture when it becomes unreferenced in `remove`, and skips uploads for missing textures in `flush_uploads` as a safety net. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Fixed occasional crashes when viewing a screen share --- crates/gpui_wgpu/src/wgpu_atlas.rs | 82 +++++++++++++++++++++++++++++- 1 file changed, 81 insertions(+), 1 deletion(-) diff --git a/crates/gpui_wgpu/src/wgpu_atlas.rs b/crates/gpui_wgpu/src/wgpu_atlas.rs index 3eba5c533f80d727425cc87ae89b754afa8722b1..55f6edee21b9f2da02268c66c665c34d5b52066a 100644 --- a/crates/gpui_wgpu/src/wgpu_atlas.rs +++ b/crates/gpui_wgpu/src/wgpu_atlas.rs @@ -115,6 +115,8 @@ impl PlatformAtlas for WgpuAtlas { if let Some(mut texture) = texture_slot.take() { texture.decrement_ref_count(); if texture.is_unreferenced() { + lock.pending_uploads + .retain(|upload| upload.id != texture.id); lock.storage[id.kind] .free_list .push(texture.id.index as usize); @@ -228,7 +230,9 @@ impl WgpuAtlasState { fn flush_uploads(&mut self) { for upload in self.pending_uploads.drain(..) { - let texture = &self.storage[upload.id]; + let Some(texture) = self.storage.get(upload.id) else { + continue; + }; let bytes_per_pixel = texture.bytes_per_pixel(); self.queue.write_texture( @@ -286,6 +290,15 @@ impl ops::IndexMut for WgpuAtlasStorage { } } +impl WgpuAtlasStorage { + fn get(&self, id: AtlasTextureId) -> Option<&WgpuAtlasTexture> { + self[id.kind] + .textures + .get(id.index as usize) + .and_then(|t| t.as_ref()) + } +} + impl ops::Index for WgpuAtlasStorage { type Output = WgpuAtlasTexture; fn index(&self, id: AtlasTextureId) -> &Self::Output { @@ -341,3 +354,70 @@ impl WgpuAtlasTexture { self.live_atlas_keys == 0 } } + +#[cfg(all(test, not(target_family = "wasm")))] +mod tests { + use super::*; + use gpui::{ImageId, RenderImageParams}; + use pollster::block_on; + use std::sync::Arc; + + fn test_device_and_queue() -> anyhow::Result<(Arc, Arc)> { + block_on(async { + let instance = wgpu::Instance::new(wgpu::InstanceDescriptor { + backends: wgpu::Backends::all(), + flags: wgpu::InstanceFlags::default(), + backend_options: wgpu::BackendOptions::default(), + memory_budget_thresholds: wgpu::MemoryBudgetThresholds::default(), + display: None, + }); + let adapter = instance + .request_adapter(&wgpu::RequestAdapterOptions { + power_preference: wgpu::PowerPreference::LowPower, + compatible_surface: None, + force_fallback_adapter: false, + }) + .await + .map_err(|error| anyhow::anyhow!("failed to request adapter: {error}"))?; + let (device, queue) = adapter + .request_device(&wgpu::DeviceDescriptor { + label: Some("wgpu_atlas_test_device"), + required_features: wgpu::Features::empty(), + required_limits: wgpu::Limits::downlevel_defaults() + .using_resolution(adapter.limits()) + .using_alignment(adapter.limits()), + memory_hints: wgpu::MemoryHints::MemoryUsage, + trace: wgpu::Trace::Off, + experimental_features: wgpu::ExperimentalFeatures::disabled(), + }) + .await + .map_err(|error| anyhow::anyhow!("failed to request device: {error}"))?; + Ok((Arc::new(device), Arc::new(queue))) + }) + } + + #[test] + fn before_frame_skips_uploads_for_removed_texture() -> anyhow::Result<()> { + let (device, queue) = test_device_and_queue()?; + + let atlas = WgpuAtlas::new(device, queue); + let key = AtlasKey::Image(RenderImageParams { + image_id: ImageId(1), + frame_index: 0, + }); + let size = Size { + width: DevicePixels(1), + height: DevicePixels(1), + }; + let mut build = || Ok(Some((size, Cow::Owned(vec![0, 0, 0, 255])))); + + // Regression test: before the fix, this panicked in flush_uploads + atlas + .get_or_insert_with(&key, &mut build)? + .expect("tile should be created"); + atlas.remove(&key); + atlas.before_frame(); + + Ok(()) + } +} From 203f48d25cfd1a923a70941bdb388109f06d6e13 Mon Sep 17 00:00:00 2001 From: Josh Robson Chase Date: Fri, 3 Apr 2026 15:42:00 -0400 Subject: [PATCH 35/63] workspace: Implement focus-follows-mouse for panes (#46740) Implements basic focus-follows-mouse behavior. Right now, it's only applied in the `workspace` crate for `Pane`s, so anything that lives outside of that container (panels and such for the most part) won't have this behavior applied. The core logic is implemented as an extension trait, and should be trivial to apply to other elements as it makes sense. https://github.com/user-attachments/assets/d338fa30-7f9c-439f-8b50-1720e3f509b1 Closes #8167 Release Notes: - Added "Focus Follows Mouse" for editor and terminal panes --------- Co-authored-by: Conrad Irwin --- assets/settings/default.json | 5 ++ crates/settings/src/vscode_import.rs | 1 + crates/settings_content/src/workspace.rs | 10 +++ crates/settings_ui/src/page_data.rs | 48 +++++++++++++- crates/workspace/src/dock.rs | 10 ++- crates/workspace/src/focus_follows_mouse.rs | 71 +++++++++++++++++++++ crates/workspace/src/pane.rs | 13 +++- crates/workspace/src/workspace.rs | 5 +- crates/workspace/src/workspace_settings.rs | 23 ++++++- 9 files changed, 178 insertions(+), 8 deletions(-) create mode 100644 crates/workspace/src/focus_follows_mouse.rs diff --git a/assets/settings/default.json b/assets/settings/default.json index 74a4e15a044fa5686441f2e8a587595936ea08fb..e9d21eb0dcc18ae939a41e3415b93eaeba1e4546 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -225,6 +225,11 @@ // 3. Hide on both typing and cursor movement: // "on_typing_and_movement" "hide_mouse": "on_typing_and_movement", + // Determines whether the focused panel follows the mouse location. + "focus_follows_mouse": { + "enabled": false, + "debounce_ms": 250, + }, // Determines how snippets are sorted relative to other completion items. // // 1. Place snippets at the top of the completion list: diff --git a/crates/settings/src/vscode_import.rs b/crates/settings/src/vscode_import.rs index c40b38c460a17f30b1fce26c50b40a893f7724a8..1211cbd8a4519ea295773eb0d979b48258908311 100644 --- a/crates/settings/src/vscode_import.rs +++ b/crates/settings/src/vscode_import.rs @@ -999,6 +999,7 @@ impl VsCodeSettings { } }), zoomed_padding: None, + focus_follows_mouse: None, } } diff --git a/crates/settings_content/src/workspace.rs b/crates/settings_content/src/workspace.rs index ef00a44790fd10b8c56278362a2f552a40f52cbb..0bae7c260f6607f2015f750e5bb9dec7cc26342d 100644 --- a/crates/settings_content/src/workspace.rs +++ b/crates/settings_content/src/workspace.rs @@ -122,6 +122,9 @@ pub struct WorkspaceSettingsContent { /// What draws window decorations/titlebar, the client application (Zed) or display server /// Default: client pub window_decorations: Option, + /// Whether the focused panel follows the mouse location + /// Default: false + pub focus_follows_mouse: Option, } #[with_fallible_options] @@ -928,3 +931,10 @@ impl DocumentSymbols { self == &Self::On } } + +#[with_fallible_options] +#[derive(Copy, Clone, PartialEq, Default, Serialize, Deserialize, JsonSchema, MergeFrom, Debug)] +pub struct FocusFollowsMouse { + pub enabled: Option, + pub debounce_ms: Option, +} diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index f0cf87c403b340dacd33e2c04b043ab8085a461a..828a574115c4664b3ab2f37f32ad4087363b3978 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -4159,7 +4159,7 @@ fn window_and_layout_page() -> SettingsPage { ] } - fn layout_section() -> [SettingsPageItem; 4] { + fn layout_section() -> [SettingsPageItem; 6] { [ SettingsPageItem::SectionHeader("Layout"), SettingsPageItem::SettingItem(SettingItem { @@ -4223,6 +4223,52 @@ fn window_and_layout_page() -> SettingsPage { }), metadata: None, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Focus Follows Mouse", + description: "Whether to change focus to a pane when the mouse hovers over it.", + field: Box::new(SettingField { + json_path: Some("focus_follows_mouse.enabled"), + pick: |settings_content| { + settings_content + .workspace + .focus_follows_mouse + .as_ref() + .and_then(|s| s.enabled.as_ref()) + }, + write: |settings_content, value| { + settings_content + .workspace + .focus_follows_mouse + .get_or_insert_default() + .enabled = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Focus Follows Mouse Debounce ms", + description: "Amount of time to wait before changing focus.", + field: Box::new(SettingField { + json_path: Some("focus_follows_mouse.debounce_ms"), + pick: |settings_content| { + settings_content + .workspace + .focus_follows_mouse + .as_ref() + .and_then(|s| s.debounce_ms.as_ref()) + }, + write: |settings_content, value| { + settings_content + .workspace + .focus_follows_mouse + .get_or_insert_default() + .debounce_ms = value; + }, + }), + metadata: None, + files: USER, + }), ] } diff --git a/crates/workspace/src/dock.rs b/crates/workspace/src/dock.rs index e36b48f06fd3ca0983b13ddb564af08ddab9fba5..e58b4b59100c05085c93993370b85a788fc159ca 100644 --- a/crates/workspace/src/dock.rs +++ b/crates/workspace/src/dock.rs @@ -1,5 +1,6 @@ +use crate::focus_follows_mouse::FocusFollowsMouse as _; use crate::persistence::model::DockData; -use crate::{DraggedDock, Event, ModalLayer, Pane}; +use crate::{DraggedDock, Event, FocusFollowsMouse, ModalLayer, Pane, WorkspaceSettings}; use crate::{Workspace, status_bar::StatusItemView}; use anyhow::Context as _; use client::proto; @@ -12,7 +13,7 @@ use gpui::{ px, }; use serde::{Deserialize, Serialize}; -use settings::SettingsStore; +use settings::{Settings, SettingsStore}; use std::sync::Arc; use ui::{ ContextMenu, CountBadge, Divider, DividerColor, IconButton, Tooltip, prelude::*, @@ -252,6 +253,7 @@ pub struct Dock { is_open: bool, active_panel_index: Option, focus_handle: FocusHandle, + focus_follows_mouse: FocusFollowsMouse, pub(crate) serialized_dock: Option, zoom_layer_open: bool, modal_layer: Entity, @@ -376,6 +378,7 @@ impl Dock { active_panel_index: None, is_open: false, focus_handle: focus_handle.clone(), + focus_follows_mouse: WorkspaceSettings::get_global(cx).focus_follows_mouse, _subscriptions: [focus_subscription, zoom_subscription], serialized_dock: None, zoom_layer_open: false, @@ -1086,8 +1089,10 @@ impl Render for Dock { }; div() + .id("dock-panel") .key_context(dispatch_context) .track_focus(&self.focus_handle(cx)) + .focus_follows_mouse(self.focus_follows_mouse, cx) .flex() .bg(cx.theme().colors().panel_background) .border_color(cx.theme().colors().border) @@ -1121,6 +1126,7 @@ impl Render for Dock { }) } else { div() + .id("dock-panel") .key_context(dispatch_context) .track_focus(&self.focus_handle(cx)) } diff --git a/crates/workspace/src/focus_follows_mouse.rs b/crates/workspace/src/focus_follows_mouse.rs new file mode 100644 index 0000000000000000000000000000000000000000..da433cefcf059960181c190da83b06260651b063 --- /dev/null +++ b/crates/workspace/src/focus_follows_mouse.rs @@ -0,0 +1,71 @@ +use gpui::{ + AnyWindowHandle, AppContext as _, Context, FocusHandle, Focusable, Global, + StatefulInteractiveElement, Task, +}; + +use crate::workspace_settings; + +#[derive(Default)] +struct FfmState { + // The window and element to be focused + handles: Option<(AnyWindowHandle, FocusHandle)>, + // The debounced task which will do the focusing + _debounce_task: Option>, +} + +impl Global for FfmState {} + +pub trait FocusFollowsMouse: StatefulInteractiveElement { + fn focus_follows_mouse( + self, + settings: workspace_settings::FocusFollowsMouse, + cx: &Context, + ) -> Self { + if settings.enabled { + self.on_hover(cx.listener(move |this, enter, window, cx| { + if *enter { + let window_handle = window.window_handle(); + let focus_handle = this.focus_handle(cx); + + let state = cx.try_global::(); + + // Only replace the target if the new handle doesn't contain the existing one. + // This ensures that hovering over a parent (e.g., Dock) doesn't override + // a more specific child target (e.g., a Pane inside the Dock). + let should_replace = state + .and_then(|s| s.handles.as_ref()) + .map(|(_, existing)| !focus_handle.contains(existing, window)) + .unwrap_or(true); + + if !should_replace { + return; + } + + let debounce_task = cx.spawn(async move |_this, cx| { + cx.background_executor().timer(settings.debounce).await; + + cx.update(|cx| { + let state = cx.default_global::(); + let Some((window, focus)) = state.handles.take() else { + return; + }; + + let _ = cx.update_window(window, move |_view, window, cx| { + window.focus(&focus, cx); + }); + }); + }); + + cx.set_global(FfmState { + handles: Some((window_handle, focus_handle)), + _debounce_task: Some(debounce_task), + }); + } + })) + } else { + self + } + } +} + +impl FocusFollowsMouse for T {} diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index deb7e1efef37acff992d8f5be5825741e887b979..92f0781f82234ce79d47db08785b6592fb53f566 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -2,6 +2,7 @@ use crate::{ CloseWindow, NewFile, NewTerminal, OpenInTerminal, OpenOptions, OpenTerminal, OpenVisible, SplitDirection, ToggleFileFinder, ToggleProjectSymbols, ToggleZoom, Workspace, WorkspaceItemBuilder, ZoomIn, ZoomOut, + focus_follows_mouse::FocusFollowsMouse as _, invalid_item_view::InvalidItemView, item::{ ActivateOnClose, ClosePosition, Item, ItemBufferKind, ItemHandle, ItemSettings, @@ -11,7 +12,7 @@ use crate::{ move_item, notifications::NotifyResultExt, toolbar::Toolbar, - workspace_settings::{AutosaveSetting, TabBarSettings, WorkspaceSettings}, + workspace_settings::{AutosaveSetting, FocusFollowsMouse, TabBarSettings, WorkspaceSettings}, }; use anyhow::Result; use collections::{BTreeSet, HashMap, HashSet, VecDeque}; @@ -443,6 +444,7 @@ pub struct Pane { pinned_tab_count: usize, diagnostics: HashMap, zoom_out_on_close: bool, + focus_follows_mouse: FocusFollowsMouse, diagnostic_summary_update: Task<()>, /// If a certain project item wants to get recreated with specific data, it can persist its data before the recreation here. pub project_item_restoration_data: HashMap>, @@ -615,6 +617,7 @@ impl Pane { pinned_tab_count: 0, diagnostics: Default::default(), zoom_out_on_close: true, + focus_follows_mouse: WorkspaceSettings::get_global(cx).focus_follows_mouse, diagnostic_summary_update: Task::ready(()), project_item_restoration_data: HashMap::default(), welcome_page: None, @@ -782,7 +785,6 @@ impl Pane { fn settings_changed(&mut self, window: &mut Window, cx: &mut Context) { let tab_bar_settings = TabBarSettings::get_global(cx); - let new_max_tabs = WorkspaceSettings::get_global(cx).max_tabs; if let Some(display_nav_history_buttons) = self.display_nav_history_buttons.as_mut() { *display_nav_history_buttons = tab_bar_settings.show_nav_history_buttons; @@ -795,6 +797,12 @@ impl Pane { self.nav_history.0.lock().preview_item_id = None; } + let workspace_settings = WorkspaceSettings::get_global(cx); + + self.focus_follows_mouse = workspace_settings.focus_follows_mouse; + + let new_max_tabs = workspace_settings.max_tabs; + if self.use_max_tabs && new_max_tabs != self.max_tabs { self.max_tabs = new_max_tabs; self.close_items_on_settings_change(window, cx); @@ -4460,6 +4468,7 @@ impl Render for Pane { placeholder.child(self.welcome_page.clone().unwrap()) } } + .focus_follows_mouse(self.focus_follows_mouse, cx) }) .child( // drag target diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index e5b927cbbbc571966d2483e82d98ce61adb06cda..1bf0d2bc4a09a2c6417ce2b35e46372d274c6161 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -19,6 +19,7 @@ mod security_modal; pub mod shared_screen; use db::smol::future::yield_now; pub use shared_screen::SharedScreen; +pub mod focus_follows_mouse; mod status_bar; pub mod tasks; mod theme_preview; @@ -147,8 +148,8 @@ use util::{ }; use uuid::Uuid; pub use workspace_settings::{ - AutosaveSetting, BottomDockLayout, RestoreOnStartupBehavior, StatusBarSettings, TabBarSettings, - WorkspaceSettings, + AutosaveSetting, BottomDockLayout, FocusFollowsMouse, RestoreOnStartupBehavior, + StatusBarSettings, TabBarSettings, WorkspaceSettings, }; use zed_actions::{Spawn, feedback::FileBugReport, theme::ToggleMode}; diff --git a/crates/workspace/src/workspace_settings.rs b/crates/workspace/src/workspace_settings.rs index d78b233229800b571ccc37f87719d09125f1c4c3..ee0e80336d744cadaecdf0201525deddb8d5eec9 100644 --- a/crates/workspace/src/workspace_settings.rs +++ b/crates/workspace/src/workspace_settings.rs @@ -1,4 +1,4 @@ -use std::num::NonZeroUsize; +use std::{num::NonZeroUsize, time::Duration}; use crate::DockPosition; use collections::HashMap; @@ -35,6 +35,13 @@ pub struct WorkspaceSettings { pub use_system_window_tabs: bool, pub zoomed_padding: bool, pub window_decorations: settings::WindowDecorations, + pub focus_follows_mouse: FocusFollowsMouse, +} + +#[derive(Copy, Clone, Deserialize)] +pub struct FocusFollowsMouse { + pub enabled: bool, + pub debounce: Duration, } #[derive(Copy, Clone, PartialEq, Debug, Default)] @@ -113,6 +120,20 @@ impl Settings for WorkspaceSettings { use_system_window_tabs: workspace.use_system_window_tabs.unwrap(), zoomed_padding: workspace.zoomed_padding.unwrap(), window_decorations: workspace.window_decorations.unwrap(), + focus_follows_mouse: FocusFollowsMouse { + enabled: workspace + .focus_follows_mouse + .unwrap() + .enabled + .unwrap_or(false), + debounce: Duration::from_millis( + workspace + .focus_follows_mouse + .unwrap() + .debounce_ms + .unwrap_or(250), + ), + }, } } } From 2fbf83049ff924b8612365fa7c19b65c7d7e1e07 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 3 Apr 2026 18:27:52 -0300 Subject: [PATCH 36/63] gpui: Refactor follow_tail implementation to fix scroll snapping bugs (#53101) Follow up to https://github.com/zed-industries/zed/pull/53017 This PR does some significant refactoring of the `follow_tail` feature in the GPUI list. That's only used by the agent panel's thread view and given to the height-changing nature of streaming agent responses, we were seeing some scroll snapping bugs upon scrolling while the thread is generating. In the process of fixing it, we introduced a `remeasure_items` method as an alternative to `splice` so that we could get the remeasurement fix without scroll position changes. We already had a `remeasure` method that did that for all of the indexes, but we needed something more scoped out for the agent panel case, so as to not remeasure the entire list's content on every new streamed token. Effectively, this ends up reverting what the PR linked above introduced, but it improved the API in the process. Release Notes: - N/A Co-authored-by: Mikayla Maki --- crates/agent_ui/src/conversation_view.rs | 63 +-- .../src/conversation_view/thread_view.rs | 28 +- crates/gpui/src/elements/list.rs | 426 +++++++++++++++--- 3 files changed, 390 insertions(+), 127 deletions(-) diff --git a/crates/agent_ui/src/conversation_view.rs b/crates/agent_ui/src/conversation_view.rs index 1b9d364e9ce03702b47c63e8a856f0ba4b8aba87..ce125a5d7c901ccb6fc89f405f482cbf52b94f5d 100644 --- a/crates/agent_ui/src/conversation_view.rs +++ b/crates/agent_ui/src/conversation_view.rs @@ -831,6 +831,8 @@ impl ConversationView { let count = thread.read(cx).entries().len(); let list_state = ListState::new(0, gpui::ListAlignment::Top, px(2048.0)); + list_state.set_follow_mode(gpui::FollowMode::Tail); + entry_view_state.update(cx, |view_state, cx| { for ix in 0..count { view_state.sync_entry(ix, &thread, window, cx); @@ -844,7 +846,7 @@ impl ConversationView { if let Some(scroll_position) = thread.read(cx).ui_scroll_position() { list_state.scroll_to(scroll_position); } else { - list_state.set_follow_tail(true); + list_state.scroll_to_end(); } AgentDiff::set_active_thread(&self.workspace, thread.clone(), window, cx); @@ -1243,15 +1245,15 @@ impl ConversationView { if let Some(active) = self.thread_view(&thread_id) { let entry_view_state = active.read(cx).entry_view_state.clone(); let list_state = active.read(cx).list_state.clone(); - notify_entry_changed( - &entry_view_state, - &list_state, - index..index, - index, - thread, - window, - cx, - ); + entry_view_state.update(cx, |view_state, cx| { + view_state.sync_entry(index, thread, window, cx); + list_state.splice_focusable( + index..index, + [view_state + .entry(index) + .and_then(|entry| entry.focus_handle(cx))], + ); + }); active.update(cx, |active, cx| { active.sync_editor_mode_for_empty_state(cx); }); @@ -1261,15 +1263,10 @@ impl ConversationView { if let Some(active) = self.thread_view(&thread_id) { let entry_view_state = active.read(cx).entry_view_state.clone(); let list_state = active.read(cx).list_state.clone(); - notify_entry_changed( - &entry_view_state, - &list_state, - *index..*index + 1, - *index, - thread, - window, - cx, - ); + entry_view_state.update(cx, |view_state, cx| { + view_state.sync_entry(*index, thread, window, cx); + }); + list_state.remeasure_items(*index..*index + 1); active.update(cx, |active, cx| { active.auto_expand_streaming_thought(cx); }); @@ -1313,7 +1310,6 @@ impl ConversationView { active.clear_auto_expand_tracking(); if active.list_state.is_following_tail() { active.list_state.scroll_to_end(); - active.list_state.set_follow_tail(false); } } active.sync_generating_indicator(cx); @@ -1391,7 +1387,6 @@ impl ConversationView { active.thread_retry_status.take(); if active.list_state.is_following_tail() { active.list_state.scroll_to_end(); - active.list_state.set_follow_tail(false); } } active.sync_generating_indicator(cx); @@ -2608,32 +2603,6 @@ impl ConversationView { } } -/// Syncs an entry's view state with the latest thread data and splices -/// the list item so the list knows to re-measure it on the next paint. -/// -/// Used by both `NewEntry` (splice range `index..index` to insert) and -/// `EntryUpdated` (splice range `index..index+1` to replace), which is -/// why the caller provides the splice range. -fn notify_entry_changed( - entry_view_state: &Entity, - list_state: &ListState, - splice_range: std::ops::Range, - index: usize, - thread: &Entity, - window: &mut Window, - cx: &mut App, -) { - entry_view_state.update(cx, |view_state, cx| { - view_state.sync_entry(index, thread, window, cx); - list_state.splice_focusable( - splice_range, - [view_state - .entry(index) - .and_then(|entry| entry.focus_handle(cx))], - ); - }); -} - fn loading_contents_spinner(size: IconSize) -> AnyElement { Icon::new(IconName::LoadCircle) .size(size) diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs index c113eb0b768ee143eb69b5e705c15c91e367e6c2..53e63268c51aa1aa5537a87b6055dea62ecd630e 100644 --- a/crates/agent_ui/src/conversation_view/thread_view.rs +++ b/crates/agent_ui/src/conversation_view/thread_view.rs @@ -541,24 +541,15 @@ impl ThreadView { let thread_view = cx.entity().downgrade(); this.list_state - .set_scroll_handler(move |event, _window, cx| { + .set_scroll_handler(move |_event, _window, cx| { let list_state = list_state_for_scroll.clone(); let thread_view = thread_view.clone(); - let is_following_tail = event.is_following_tail; // N.B. We must defer because the scroll handler is called while the // ListState's RefCell is mutably borrowed. Reading logical_scroll_top() // directly would panic from a double borrow. cx.defer(move |cx| { let scroll_top = list_state.logical_scroll_top(); let _ = thread_view.update(cx, |this, cx| { - if !is_following_tail { - let is_generating = - matches!(this.thread.read(cx).status(), ThreadStatus::Generating); - - if list_state.is_at_bottom() && is_generating { - list_state.set_follow_tail(true); - } - } if let Some(thread) = this.as_native_thread(cx) { thread.update(cx, |thread, _cx| { thread.set_ui_scroll_position(Some(scroll_top)); @@ -1070,7 +1061,7 @@ impl ThreadView { })?; let _ = this.update(cx, |this, cx| { - this.list_state.set_follow_tail(true); + this.list_state.scroll_to_end(); cx.notify(); }); @@ -4945,7 +4936,7 @@ impl ThreadView { } pub fn scroll_to_end(&mut self, cx: &mut Context) { - self.list_state.set_follow_tail(true); + self.list_state.scroll_to_end(); cx.notify(); } @@ -4967,7 +4958,6 @@ impl ThreadView { } pub(crate) fn scroll_to_top(&mut self, cx: &mut Context) { - self.list_state.set_follow_tail(false); self.list_state.scroll_to(ListOffset::default()); cx.notify(); } @@ -4979,7 +4969,6 @@ impl ThreadView { cx: &mut Context, ) { let page_height = self.list_state.viewport_bounds().size.height; - self.list_state.set_follow_tail(false); self.list_state.scroll_by(-page_height * 0.9); cx.notify(); } @@ -4991,11 +4980,7 @@ impl ThreadView { cx: &mut Context, ) { let page_height = self.list_state.viewport_bounds().size.height; - self.list_state.set_follow_tail(false); self.list_state.scroll_by(page_height * 0.9); - if self.list_state.is_at_bottom() { - self.list_state.set_follow_tail(true); - } cx.notify(); } @@ -5005,7 +4990,6 @@ impl ThreadView { window: &mut Window, cx: &mut Context, ) { - self.list_state.set_follow_tail(false); self.list_state.scroll_by(-window.line_height() * 3.); cx.notify(); } @@ -5016,11 +5000,7 @@ impl ThreadView { window: &mut Window, cx: &mut Context, ) { - self.list_state.set_follow_tail(false); self.list_state.scroll_by(window.line_height() * 3.); - if self.list_state.is_at_bottom() { - self.list_state.set_follow_tail(true); - } cx.notify(); } @@ -5054,7 +5034,6 @@ impl ThreadView { .rev() .find(|&i| matches!(entries.get(i), Some(AgentThreadEntry::UserMessage(_)))) { - self.list_state.set_follow_tail(false); self.list_state.scroll_to(ListOffset { item_ix: target_ix, offset_in_item: px(0.), @@ -5074,7 +5053,6 @@ impl ThreadView { if let Some(target_ix) = (current_ix + 1..entries.len()) .find(|&i| matches!(entries.get(i), Some(AgentThreadEntry::UserMessage(_)))) { - self.list_state.set_follow_tail(false); self.list_state.scroll_to(ListOffset { item_ix: target_ix, offset_in_item: px(0.), diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index b4c8e7ca9015190fb8bb1698f79f1b025bfa4829..5525f5c17d2ad33e1ce9696afded1cea5447020c 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -72,7 +72,7 @@ struct StateInner { scrollbar_drag_start_height: Option, measuring_behavior: ListMeasuringBehavior, pending_scroll: Option, - follow_tail: bool, + follow_state: FollowState, } /// Keeps track of a fractional scroll position within an item for restoration @@ -84,6 +84,49 @@ struct PendingScrollFraction { fraction: f32, } +/// Controls whether the list automatically follows new content at the end. +#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)] +pub enum FollowMode { + /// Normal scrolling — no automatic following. + #[default] + Normal, + /// The list should auto-scroll along with the tail, when scrolled to bottom. + Tail, +} + +#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)] +enum FollowState { + #[default] + Normal, + Tail { + is_following: bool, + }, +} + +impl FollowState { + fn is_following(&self) -> bool { + matches!(self, FollowState::Tail { is_following: true }) + } + + fn has_stopped_following(&self) -> bool { + matches!( + self, + FollowState::Tail { + is_following: false + } + ) + } + + fn start_following(&mut self) { + if let FollowState::Tail { + is_following: false, + } = self + { + *self = FollowState::Tail { is_following: true }; + } + } +} + /// Whether the list is scrolling from top to bottom or bottom to top. #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum ListAlignment { @@ -169,6 +212,7 @@ pub struct ListPrepaintState { #[derive(Clone)] enum ListItem { Unmeasured { + size_hint: Option>, focus_handle: Option, }, Measured { @@ -186,9 +230,16 @@ impl ListItem { } } + fn size_hint(&self) -> Option> { + match self { + ListItem::Measured { size, .. } => Some(*size), + ListItem::Unmeasured { size_hint, .. } => *size_hint, + } + } + fn focus_handle(&self) -> Option { match self { - ListItem::Unmeasured { focus_handle } | ListItem::Measured { focus_handle, .. } => { + ListItem::Unmeasured { focus_handle, .. } | ListItem::Measured { focus_handle, .. } => { focus_handle.clone() } } @@ -196,7 +247,7 @@ impl ListItem { fn contains_focused(&self, window: &Window, cx: &App) -> bool { match self { - ListItem::Unmeasured { focus_handle } | ListItem::Measured { focus_handle, .. } => { + ListItem::Unmeasured { focus_handle, .. } | ListItem::Measured { focus_handle, .. } => { focus_handle .as_ref() .is_some_and(|handle| handle.contains_focused(window, cx)) @@ -240,7 +291,7 @@ impl ListState { scrollbar_drag_start_height: None, measuring_behavior: ListMeasuringBehavior::default(), pending_scroll: None, - follow_tail: false, + follow_state: FollowState::default(), }))); this.splice(0..0, item_count); this @@ -275,37 +326,63 @@ impl ListState { /// Use this when item heights may have changed (e.g., font size changes) /// but the number and identity of items remains the same. pub fn remeasure(&self) { - let state = &mut *self.0.borrow_mut(); + let count = self.item_count(); + self.remeasure_items(0..count); + } - let new_items = state.items.iter().map(|item| ListItem::Unmeasured { - focus_handle: item.focus_handle(), - }); + /// Mark items in `range` as needing remeasurement while preserving + /// the current scroll position. Unlike [`Self::splice`], this does + /// not change the number of items or blow away `logical_scroll_top`. + /// + /// Use this when an item's content has changed and its rendered + /// height may be different (e.g., streaming text, tool results + /// loading), but the item itself still exists at the same index. + pub fn remeasure_items(&self, range: Range) { + let state = &mut *self.0.borrow_mut(); - // If there's a `logical_scroll_top`, we need to keep track of it as a - // `PendingScrollFraction`, so we can later preserve that scroll - // position proportionally to the item, in case the item's height - // changes. + // If the scroll-top item falls within the remeasured range, + // store a fractional offset so the layout can restore the + // proportional scroll position after the item is re-rendered + // at its new height. if let Some(scroll_top) = state.logical_scroll_top { - let mut cursor = state.items.cursor::(()); - cursor.seek(&Count(scroll_top.item_ix), Bias::Right); + if range.contains(&scroll_top.item_ix) { + let mut cursor = state.items.cursor::(()); + cursor.seek(&Count(scroll_top.item_ix), Bias::Right); - if let Some(item) = cursor.item() { - if let Some(size) = item.size() { - let fraction = if size.height.0 > 0.0 { - (scroll_top.offset_in_item.0 / size.height.0).clamp(0.0, 1.0) - } else { - 0.0 - }; - - state.pending_scroll = Some(PendingScrollFraction { - item_ix: scroll_top.item_ix, - fraction, - }); + if let Some(item) = cursor.item() { + if let Some(size) = item.size() { + let fraction = if size.height.0 > 0.0 { + (scroll_top.offset_in_item.0 / size.height.0).clamp(0.0, 1.0) + } else { + 0.0 + }; + + state.pending_scroll = Some(PendingScrollFraction { + item_ix: scroll_top.item_ix, + fraction, + }); + } } } } - state.items = SumTree::from_iter(new_items, ()); + // Rebuild the tree, replacing items in the range with + // Unmeasured copies that keep their focus handles. + let new_items = { + let mut cursor = state.items.cursor::(()); + let mut new_items = cursor.slice(&Count(range.start), Bias::Right); + let invalidated = cursor.slice(&Count(range.end), Bias::Right); + new_items.extend( + invalidated.iter().map(|item| ListItem::Unmeasured { + size_hint: item.size_hint(), + focus_handle: item.focus_handle(), + }), + (), + ); + new_items.append(cursor.suffix(), ()); + new_items + }; + state.items = new_items; state.measuring_behavior.reset(); } @@ -339,7 +416,10 @@ impl ListState { new_items.extend( focus_handles.into_iter().map(|focus_handle| { spliced_count += 1; - ListItem::Unmeasured { focus_handle } + ListItem::Unmeasured { + size_hint: None, + focus_handle, + } }), (), ); @@ -414,24 +494,37 @@ impl ListState { }); } - /// Set whether the list should automatically follow the tail (auto-scroll to the end). - pub fn set_follow_tail(&self, follow: bool) { - self.0.borrow_mut().follow_tail = follow; - if follow { - self.scroll_to_end(); + /// Set the follow mode for the list. In `Tail` mode, the list + /// will auto-scroll to the end and re-engage after the user + /// scrolls back to the bottom. In `Normal` mode, no automatic + /// following occurs. + pub fn set_follow_mode(&self, mode: FollowMode) { + let state = &mut *self.0.borrow_mut(); + + match mode { + FollowMode::Normal => { + state.follow_state = FollowState::Normal; + } + FollowMode::Tail => { + state.follow_state = FollowState::Tail { is_following: true }; + if matches!(mode, FollowMode::Tail) { + let item_count = state.items.summary().count; + state.logical_scroll_top = Some(ListOffset { + item_ix: item_count, + offset_in_item: px(0.), + }); + } + } } } - /// Returns whether the list is currently in follow-tail mode (auto-scrolling to the end). + /// Returns whether the list is currently actively following the + /// tail (snapping to the end on each layout). pub fn is_following_tail(&self) -> bool { - self.0.borrow().follow_tail - } - - /// Returns whether the list is scrolled to the bottom (within 1px). - pub fn is_at_bottom(&self) -> bool { - let current_offset = self.scroll_px_offset_for_scrollbar().y.abs(); - let max_offset = self.max_offset_for_scrollbar().y; - current_offset >= max_offset - px(1.0) + matches!( + self.0.borrow().follow_state, + FollowState::Tail { is_following: true } + ) } /// Scroll the list to the given offset @@ -599,6 +692,7 @@ impl StateInner { if self.reset { return; } + let padding = self.last_padding.unwrap_or_default(); let scroll_max = (self.items.summary().height + padding.top + padding.bottom - height).max(px(0.)); @@ -620,8 +714,10 @@ impl StateInner { }); } - if self.follow_tail && delta.y > px(0.) { - self.follow_tail = false; + if let FollowState::Tail { is_following } = &mut self.follow_state { + if delta.y > px(0.) { + *is_following = false; + } } if let Some(handler) = self.scroll_handler.as_mut() { @@ -631,7 +727,10 @@ impl StateInner { visible_range, count: self.items.summary().count, is_scrolled: self.logical_scroll_top.is_some(), - is_following_tail: self.follow_tail, + is_following_tail: matches!( + self.follow_state, + FollowState::Tail { is_following: true } + ), }, window, cx, @@ -722,7 +821,7 @@ impl StateInner { let mut max_item_width = px(0.); let mut scroll_top = self.logical_scroll_top(); - if self.follow_tail { + if self.follow_state.is_following() { scroll_top = ListOffset { item_ix: self.items.summary().count, offset_in_item: px(0.), @@ -875,6 +974,18 @@ impl StateInner { new_items.append(cursor.suffix(), ()); self.items = new_items; + // If follow_tail mode is on but the user scrolled away + // (is_following is false), check whether the current scroll + // position has returned to the bottom. + if self.follow_state.has_stopped_following() { + let padding = self.last_padding.unwrap_or_default(); + let total_height = self.items.summary().height + padding.top + padding.bottom; + let scroll_offset = self.scroll_top(&scroll_top); + if scroll_offset + available_height >= total_height - px(1.0) { + self.follow_state.start_following(); + } + } + // If none of the visible items are focused, check if an off-screen item is focused // and include it to be rendered after the visible items so keyboard interaction continues // to work for it. @@ -1011,7 +1122,7 @@ impl StateInner { content_height - self.scrollbar_drag_start_height.unwrap_or(content_height); let new_scroll_top = (point.y - drag_offset).abs().max(px(0.)).min(scroll_max); - self.follow_tail = false; + self.follow_state = FollowState::Normal; if self.alignment == ListAlignment::Bottom && new_scroll_top == scroll_max { self.logical_scroll_top = None; @@ -1159,6 +1270,7 @@ impl Element for List { { let new_items = SumTree::from_iter( state.items.iter().map(|item| ListItem::Unmeasured { + size_hint: None, focus_handle: item.focus_handle(), }), (), @@ -1245,11 +1357,18 @@ impl sum_tree::Item for ListItem { fn summary(&self, _: ()) -> Self::Summary { match self { - ListItem::Unmeasured { focus_handle } => ListItemSummary { + ListItem::Unmeasured { + size_hint, + focus_handle, + } => ListItemSummary { count: 1, rendered_count: 0, unrendered_count: 1, - height: px(0.), + height: if let Some(size) = size_hint { + size.height + } else { + px(0.) + }, has_focus_handles: focus_handle.is_some(), }, ListItem::Measured { @@ -1319,8 +1438,8 @@ mod test { use std::rc::Rc; use crate::{ - self as gpui, AppContext, Context, Element, IntoElement, ListState, Render, Styled, - TestAppContext, Window, div, list, point, px, size, + self as gpui, AppContext, Context, Element, FollowMode, IntoElement, ListState, Render, + Styled, TestAppContext, Window, div, list, point, px, size, }; #[gpui::test] @@ -1545,7 +1664,7 @@ mod test { }) }); - state.set_follow_tail(true); + state.set_follow_mode(FollowMode::Tail); // First paint — items are 50px, total 500px, viewport 200px. // Follow-tail should anchor to the end. @@ -1599,7 +1718,7 @@ mod test { } } - state.set_follow_tail(true); + state.set_follow_mode(FollowMode::Tail); // Paint with follow-tail — scroll anchored to the bottom. cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, cx| { @@ -1641,7 +1760,7 @@ mod test { let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone()))); - state.set_follow_tail(true); + state.set_follow_mode(FollowMode::Tail); // Paint with follow-tail — scroll anchored to the bottom. cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { @@ -1709,7 +1828,7 @@ mod test { // Enable follow-tail — this should immediately snap the scroll anchor // to the end, like the user just sent a prompt. - state.set_follow_tail(true); + state.set_follow_mode(FollowMode::Tail); cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { view.into_any_element() @@ -1764,4 +1883,201 @@ mod test { -scroll_offset.y, max_offset.y, ); } + + /// When the user scrolls away from the bottom during follow_tail, + /// follow_tail suspends. If they scroll back to the bottom, the + /// next paint should re-engage follow_tail using fresh measurements. + #[gpui::test] + fn test_follow_tail_reengages_when_scrolled_back_to_bottom(cx: &mut TestAppContext) { + let cx = cx.add_empty_window(); + + // 10 items × 50px = 500px total, 200px viewport. + let state = ListState::new(10, crate::ListAlignment::Top, px(0.)); + + struct TestView(ListState); + impl Render for TestView { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + list(self.0.clone(), |_, _, _| { + div().h(px(50.)).w_full().into_any() + }) + .w_full() + .h_full() + } + } + + let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone()))); + + state.set_follow_mode(FollowMode::Tail); + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!(state.is_following_tail()); + + // Scroll up — follow_tail should suspend (not fully disengage). + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(50.))), + ..Default::default() + }); + assert!(!state.is_following_tail()); + + // Scroll back down to the bottom. + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(-10000.))), + ..Default::default() + }); + + // After a paint, follow_tail should re-engage because the + // layout confirmed we're at the true bottom. + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!( + state.is_following_tail(), + "follow_tail should re-engage after scrolling back to the bottom" + ); + } + + /// When an item is spliced to unmeasured (0px) while follow_tail + /// is suspended, the re-engagement check should still work correctly + #[gpui::test] + fn test_follow_tail_reengagement_not_fooled_by_unmeasured_items(cx: &mut TestAppContext) { + let cx = cx.add_empty_window(); + + // 20 items × 50px = 1000px total, 200px viewport, 1000px + // overdraw so all items get measured during the follow_tail + // paint (matching realistic production settings). + let state = ListState::new(20, crate::ListAlignment::Top, px(1000.)); + + struct TestView(ListState); + impl Render for TestView { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + list(self.0.clone(), |_, _, _| { + div().h(px(50.)).w_full().into_any() + }) + .w_full() + .h_full() + } + } + + let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone()))); + + state.set_follow_mode(FollowMode::Tail); + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!(state.is_following_tail()); + + // Scroll up a meaningful amount — suspends follow_tail. + // 20 items × 50px = 1000px. viewport 200px. scroll_max = 800px. + // Scrolling up 200px puts us at 600px, clearly not at bottom. + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(200.))), + ..Default::default() + }); + assert!(!state.is_following_tail()); + + // Invalidate the last item (simulates EntryUpdated calling + // remeasure_items). This makes items.summary().height + // temporarily wrong (0px for the invalidated item). + state.remeasure_items(19..20); + + // Paint — layout re-measures the invalidated item with its true + // height. The re-engagement check uses these fresh measurements. + // Since we scrolled 200px up from the 800px max, we're at + // ~600px — NOT at the bottom, so follow_tail should NOT + // re-engage. + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!( + !state.is_following_tail(), + "follow_tail should not falsely re-engage due to an unmeasured item \ + reducing items.summary().height" + ); + } + + /// Calling `set_follow_mode(FollowState::Normal)` or dragging the scrollbar should + /// fully disengage follow_tail — clearing any suspended state so + /// follow_tail won’t auto-re-engage. + #[gpui::test] + fn test_follow_tail_suspended_state_cleared_by_explicit_actions(cx: &mut TestAppContext) { + let cx = cx.add_empty_window(); + + // 10 items × 50px = 500px total, 200px viewport. + let state = ListState::new(10, crate::ListAlignment::Top, px(0.)).measure_all(); + + struct TestView(ListState); + impl Render for TestView { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + list(self.0.clone(), |_, _, _| { + div().h(px(50.)).w_full().into_any() + }) + .w_full() + .h_full() + } + } + + let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone()))); + + state.set_follow_mode(FollowMode::Tail); + // --- Part 1: set_follow_mode(FollowState::Normal) clears suspended state --- + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + + // Scroll up — suspends follow_tail. + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(50.))), + ..Default::default() + }); + assert!(!state.is_following_tail()); + + // Scroll back to the bottom — should re-engage follow_tail. + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(-10000.))), + ..Default::default() + }); + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!( + state.is_following_tail(), + "follow_tail should re-engage after scrolling back to the bottom" + ); + + // --- Part 2: scrollbar drag clears suspended state --- + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + + // Drag the scrollbar to the middle — should clear suspended state. + state.set_offset_from_scrollbar(point(px(0.), px(150.))); + + // Scroll to the bottom. + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(-10000.))), + ..Default::default() + }); + + // Paint — should NOT re-engage because the scrollbar drag + // cleared the suspended state. + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!( + !state.is_following_tail(), + "follow_tail should not re-engage after scrollbar drag cleared the suspended state" + ); + } } From e9b280afe00815cced8c50ca6e97d7987e5782ec Mon Sep 17 00:00:00 2001 From: KyleBarton Date: Fri, 3 Apr 2026 14:55:12 -0700 Subject: [PATCH 37/63] Account for windows absolute paths in bind mounts (#53093) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Addresses an auxiliary windows bug found in #52924 - bind mounts are not working in Windows because MountDefinition is not accounting for absolute Windows paths. Release Notes: - Fixed windows bind mount issue with dev containers --- crates/dev_container/src/devcontainer_json.rs | 54 ++++++++++++++++++- 1 file changed, 53 insertions(+), 1 deletion(-) diff --git a/crates/dev_container/src/devcontainer_json.rs b/crates/dev_container/src/devcontainer_json.rs index 4429c63a37a87d1b54455b8169359ddf40511e24..f034026a8de4c4a6c3186c97870e25f3510ebc85 100644 --- a/crates/dev_container/src/devcontainer_json.rs +++ b/crates/dev_container/src/devcontainer_json.rs @@ -72,7 +72,11 @@ impl Display for MountDefinition { f, "type={},source={},target={},consistency=cached", self.mount_type.clone().unwrap_or_else(|| { - if self.source.starts_with('/') { + if self.source.starts_with('/') + || self.source.starts_with("\\\\") + || self.source.get(1..3) == Some(":\\") + || self.source.get(1..3) == Some(":/") + { "bind".to_string() } else { "volume".to_string() @@ -1355,4 +1359,52 @@ mod test { assert_eq!(devcontainer.build_type(), DevContainerBuildType::Dockerfile); } + + #[test] + fn mount_definition_should_use_bind_type_for_unix_absolute_paths() { + let mount = MountDefinition { + source: "/home/user/project".to_string(), + target: "/workspaces/project".to_string(), + mount_type: None, + }; + + let rendered = mount.to_string(); + + assert!( + rendered.starts_with("type=bind,"), + "Expected mount type 'bind' for Unix absolute path, but got: {rendered}" + ); + } + + #[test] + fn mount_definition_should_use_bind_type_for_windows_unc_paths() { + let mount = MountDefinition { + source: "\\\\server\\share\\project".to_string(), + target: "/workspaces/project".to_string(), + mount_type: None, + }; + + let rendered = mount.to_string(); + + assert!( + rendered.starts_with("type=bind,"), + "Expected mount type 'bind' for Windows UNC path, but got: {rendered}" + ); + } + + #[test] + fn mount_definition_should_use_bind_type_for_windows_absolute_paths() { + let mount = MountDefinition { + source: "C:\\Users\\mrg\\cli".to_string(), + target: "/workspaces/cli".to_string(), + mount_type: None, + }; + + let rendered = mount.to_string(); + + assert!( + rendered.starts_with("type=bind,"), + "Expected mount type 'bind' for Windows absolute path, but got: {rendered}" + ); + } } From eeb87cb17768c4a1372047ad67a1e9a1d7dd507b Mon Sep 17 00:00:00 2001 From: Saketh <126517689+SAKETH11111@users.noreply.github.com> Date: Fri, 3 Apr 2026 17:55:40 -0500 Subject: [PATCH 38/63] remote: Use SSH nicknames in display names (#53103) Closes #52943 ## Summary - Prefer SSH nicknames over raw hosts in remote connection display names - Add regression tests for nickname and host fallback behavior ## Why The `nickname` field is documented as the user-facing label for SSH connections, but `RemoteConnectionOptions::display_name()` always returned the raw host. That meant recent-projects UI surfaces kept ignoring nicknames even when they were configured. ## Validation - `cargo test -p remote ssh_display_name` - `cargo test -p remote` Release Notes: - Fixed SSH recent-project labels to show configured nicknames instead of raw hosts when available. --- crates/remote/src/remote_client.rs | 31 +++++++++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/crates/remote/src/remote_client.rs b/crates/remote/src/remote_client.rs index e746d82aac857d3174a4bab14c937a7538b2f1b4..c04d3630f92bcc27afb01a619176d3ae79d3fac7 100644 --- a/crates/remote/src/remote_client.rs +++ b/crates/remote/src/remote_client.rs @@ -1285,7 +1285,10 @@ pub enum RemoteConnectionOptions { impl RemoteConnectionOptions { pub fn display_name(&self) -> String { match self { - RemoteConnectionOptions::Ssh(opts) => opts.host.to_string(), + RemoteConnectionOptions::Ssh(opts) => opts + .nickname + .clone() + .unwrap_or_else(|| opts.host.to_string()), RemoteConnectionOptions::Wsl(opts) => opts.distro_name.clone(), RemoteConnectionOptions::Docker(opts) => { if opts.use_podman { @@ -1300,6 +1303,32 @@ impl RemoteConnectionOptions { } } +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_ssh_display_name_prefers_nickname() { + let options = RemoteConnectionOptions::Ssh(SshConnectionOptions { + host: "1.2.3.4".into(), + nickname: Some("My Cool Project".to_string()), + ..Default::default() + }); + + assert_eq!(options.display_name(), "My Cool Project"); + } + + #[test] + fn test_ssh_display_name_falls_back_to_host() { + let options = RemoteConnectionOptions::Ssh(SshConnectionOptions { + host: "1.2.3.4".into(), + ..Default::default() + }); + + assert_eq!(options.display_name(), "1.2.3.4"); + } +} + impl From for RemoteConnectionOptions { fn from(opts: SshConnectionOptions) -> Self { RemoteConnectionOptions::Ssh(opts) From 5ae174fa5f811c154ed8f05de1c75c5ad5160790 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 3 Apr 2026 16:02:55 -0700 Subject: [PATCH 39/63] Rework sidebar rendering to use MultiWorkspace's project groups (#53096) Release Notes: * [x] It's possible to get into a state where agent panel shows a thread that is archived - N/A --------- Co-authored-by: Eric Holk Co-authored-by: Mikayla Maki --- Cargo.lock | 1 - crates/agent_ui/src/agent_panel.rs | 4 + crates/project/src/project.rs | 6 +- crates/sidebar/Cargo.toml | 1 - crates/sidebar/src/project_group_builder.rs | 282 -------- crates/sidebar/src/sidebar.rs | 739 ++++++++++---------- crates/sidebar/src/sidebar_tests.rs | 582 +++++++-------- crates/workspace/src/multi_workspace.rs | 20 + 8 files changed, 713 insertions(+), 922 deletions(-) delete mode 100644 crates/sidebar/src/project_group_builder.rs diff --git a/Cargo.lock b/Cargo.lock index aae7afecc5ea6f6ba3d63453321c829b677e1c58..906c5e65456c604e5123bfde9ac1c39e261eedfd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -15871,7 +15871,6 @@ dependencies = [ "agent_ui", "anyhow", "chrono", - "collections", "editor", "feature_flags", "fs", diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 118f0dce6cb53c4e7851c79513cf936d6023a711..5fd39509df4ec2263e47c7e87b3e4b7852eaf154 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -2076,6 +2076,10 @@ impl AgentPanel { window: &mut Window, cx: &mut Context, ) { + if let Some(store) = ThreadMetadataStore::try_global(cx) { + store.update(cx, |store, cx| store.unarchive(&session_id, cx)); + } + if let Some(conversation_view) = self.background_threads.remove(&session_id) { self.set_active_view( ActiveView::AgentThread { conversation_view }, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 41f57299835f37b001575b682118aa17a6516ad9..c5b1f982ceacc59a60ff1303faffc972a3ce505d 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -6049,11 +6049,7 @@ impl ProjectGroupKey { /// Creates a new `ProjectGroupKey` with the given path list. /// /// The path list should point to the git main worktree paths for a project. - /// - /// This should be used only in a few places to make sure we can ensure the - /// main worktree path invariant. Namely, this should only be called from - /// [`Workspace`]. - pub(crate) fn new(host: Option, paths: PathList) -> Self { + pub fn new(host: Option, paths: PathList) -> Self { Self { paths, host } } diff --git a/crates/sidebar/Cargo.toml b/crates/sidebar/Cargo.toml index 04ed8808a14d4c6853b08669523d55a2ebba4482..d76fd139557dd10438d7cf98f9168d87dcae9804 100644 --- a/crates/sidebar/Cargo.toml +++ b/crates/sidebar/Cargo.toml @@ -23,7 +23,6 @@ agent_settings.workspace = true agent_ui = { workspace = true, features = ["audio"] } anyhow.workspace = true chrono.workspace = true -collections.workspace = true editor.workspace = true feature_flags.workspace = true fs.workspace = true diff --git a/crates/sidebar/src/project_group_builder.rs b/crates/sidebar/src/project_group_builder.rs deleted file mode 100644 index 20919647c185ce7014f056a99bb9c85ae595c560..0000000000000000000000000000000000000000 --- a/crates/sidebar/src/project_group_builder.rs +++ /dev/null @@ -1,282 +0,0 @@ -//! The sidebar groups threads by a canonical path list. -//! -//! Threads have a path list associated with them, but this is the absolute path -//! of whatever worktrees they were associated with. In the sidebar, we want to -//! group all threads by their main worktree, and then we add a worktree chip to -//! the sidebar entry when that thread is in another worktree. -//! -//! This module is provides the functions and structures necessary to do this -//! lookup and mapping. - -use collections::{HashMap, HashSet, vecmap::VecMap}; -use gpui::{App, Entity}; -use project::ProjectGroupKey; -use std::{ - path::{Path, PathBuf}, - sync::Arc, -}; -use workspace::{MultiWorkspace, PathList, Workspace}; - -#[derive(Default)] -pub struct ProjectGroup { - pub workspaces: Vec>, - /// Root paths of all open workspaces in this group. Used to skip - /// redundant thread-store queries for linked worktrees that already - /// have an open workspace. - covered_paths: HashSet>, -} - -impl ProjectGroup { - fn add_workspace(&mut self, workspace: &Entity, cx: &App) { - if !self.workspaces.contains(workspace) { - self.workspaces.push(workspace.clone()); - } - for path in workspace.read(cx).root_paths(cx) { - self.covered_paths.insert(path); - } - } - - pub fn first_workspace(&self) -> &Entity { - self.workspaces - .first() - .expect("groups always have at least one workspace") - } - - pub fn main_workspace(&self, cx: &App) -> &Entity { - self.workspaces - .iter() - .find(|ws| { - !crate::root_repository_snapshots(ws, cx) - .any(|snapshot| snapshot.is_linked_worktree()) - }) - .unwrap_or_else(|| self.first_workspace()) - } -} - -pub struct ProjectGroupBuilder { - /// Maps git repositories' work_directory_abs_path to their original_repo_abs_path - directory_mappings: HashMap, - project_groups: VecMap, -} - -impl ProjectGroupBuilder { - fn new() -> Self { - Self { - directory_mappings: HashMap::default(), - project_groups: VecMap::new(), - } - } - - pub fn from_multiworkspace(mw: &MultiWorkspace, cx: &App) -> Self { - let mut builder = Self::new(); - // First pass: collect all directory mappings from every workspace - // so we know how to canonicalize any path (including linked - // worktree paths discovered by the main repo's workspace). - for workspace in mw.workspaces() { - builder.add_workspace_mappings(workspace.read(cx), cx); - } - - // Second pass: group each workspace using canonical paths derived - // from the full set of mappings. - for workspace in mw.workspaces() { - let group_name = workspace.read(cx).project_group_key(cx); - builder - .project_group_entry(&group_name) - .add_workspace(workspace, cx); - } - builder - } - - fn project_group_entry(&mut self, name: &ProjectGroupKey) -> &mut ProjectGroup { - self.project_groups.entry_ref(name).or_insert_default() - } - - fn add_mapping(&mut self, work_directory: &Path, original_repo: &Path) { - let old = self - .directory_mappings - .insert(PathBuf::from(work_directory), PathBuf::from(original_repo)); - if let Some(old) = old { - debug_assert_eq!( - &old, original_repo, - "all worktrees should map to the same main worktree" - ); - } - } - - pub fn add_workspace_mappings(&mut self, workspace: &Workspace, cx: &App) { - for repo in workspace.project().read(cx).repositories(cx).values() { - let snapshot = repo.read(cx).snapshot(); - - self.add_mapping( - &snapshot.work_directory_abs_path, - &snapshot.original_repo_abs_path, - ); - - for worktree in snapshot.linked_worktrees.iter() { - self.add_mapping(&worktree.path, &snapshot.original_repo_abs_path); - } - } - } - - pub fn canonicalize_path<'a>(&'a self, path: &'a Path) -> &'a Path { - self.directory_mappings - .get(path) - .map(AsRef::as_ref) - .unwrap_or(path) - } - - /// Whether the given group should load threads for a linked worktree - /// at `worktree_path`. Returns `false` if the worktree already has an - /// open workspace in the group (its threads are loaded via the - /// workspace loop) or if the worktree's canonical path list doesn't - /// match `group_path_list`. - pub fn group_owns_worktree( - &self, - group: &ProjectGroup, - group_path_list: &PathList, - worktree_path: &Path, - ) -> bool { - if group.covered_paths.contains(worktree_path) { - return false; - } - let canonical = self.canonicalize_path_list(&PathList::new(&[worktree_path])); - canonical == *group_path_list - } - - /// Canonicalizes every path in a [`PathList`] using the builder's - /// directory mappings. - fn canonicalize_path_list(&self, path_list: &PathList) -> PathList { - let paths: Vec<_> = path_list - .paths() - .iter() - .map(|p| self.canonicalize_path(p).to_path_buf()) - .collect(); - PathList::new(&paths) - } - - pub fn groups(&self) -> impl Iterator { - self.project_groups.iter() - } -} - -#[cfg(test)] -mod tests { - use std::sync::Arc; - - use super::*; - use fs::FakeFs; - use gpui::TestAppContext; - use settings::SettingsStore; - - fn init_test(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - theme_settings::init(theme::LoadThemes::JustBase, cx); - }); - } - - async fn create_fs_with_main_and_worktree(cx: &mut TestAppContext) -> Arc { - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/project", - serde_json::json!({ - ".git": { - "worktrees": { - "feature-a": { - "commondir": "../../", - "HEAD": "ref: refs/heads/feature-a", - }, - }, - }, - "src": {}, - }), - ) - .await; - fs.insert_tree( - "/wt/feature-a", - serde_json::json!({ - ".git": "gitdir: /project/.git/worktrees/feature-a", - "src": {}, - }), - ) - .await; - fs.add_linked_worktree_for_repo( - std::path::Path::new("/project/.git"), - false, - git::repository::Worktree { - path: std::path::PathBuf::from("/wt/feature-a"), - ref_name: Some("refs/heads/feature-a".into()), - sha: "abc".into(), - is_main: false, - }, - ) - .await; - fs - } - - #[gpui::test] - async fn test_main_repo_maps_to_itself(cx: &mut TestAppContext) { - init_test(cx); - let fs = create_fs_with_main_and_worktree(cx).await; - cx.update(|cx| ::set_global(fs.clone(), cx)); - - let project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; - project - .update(cx, |project, cx| project.git_scans_complete(cx)) - .await; - - let (multi_workspace, cx) = cx.add_window_view(|window, cx| { - workspace::MultiWorkspace::test_new(project.clone(), window, cx) - }); - - multi_workspace.read_with(cx, |mw, cx| { - let mut canonicalizer = ProjectGroupBuilder::new(); - for workspace in mw.workspaces() { - canonicalizer.add_workspace_mappings(workspace.read(cx), cx); - } - - // The main repo path should canonicalize to itself. - assert_eq!( - canonicalizer.canonicalize_path(Path::new("/project")), - Path::new("/project"), - ); - - // An unknown path returns None. - assert_eq!( - canonicalizer.canonicalize_path(Path::new("/something/else")), - Path::new("/something/else"), - ); - }); - } - - #[gpui::test] - async fn test_worktree_checkout_canonicalizes_to_main_repo(cx: &mut TestAppContext) { - init_test(cx); - let fs = create_fs_with_main_and_worktree(cx).await; - cx.update(|cx| ::set_global(fs.clone(), cx)); - - // Open the worktree checkout as its own project. - let project = project::Project::test(fs.clone(), ["/wt/feature-a".as_ref()], cx).await; - project - .update(cx, |project, cx| project.git_scans_complete(cx)) - .await; - - let (multi_workspace, cx) = cx.add_window_view(|window, cx| { - workspace::MultiWorkspace::test_new(project.clone(), window, cx) - }); - - multi_workspace.read_with(cx, |mw, cx| { - let mut canonicalizer = ProjectGroupBuilder::new(); - for workspace in mw.workspaces() { - canonicalizer.add_workspace_mappings(workspace.read(cx), cx); - } - - // The worktree checkout path should canonicalize to the main repo. - assert_eq!( - canonicalizer.canonicalize_path(Path::new("/wt/feature-a")), - Path::new("/project"), - ); - }); - } -} diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 6816898ffc55bbf81b2c17719b3bde6eb8b58e68..25a2b7ecb75ae11a551caa221609e8c5bfa1751e 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -23,7 +23,9 @@ use gpui::{ use menu::{ Cancel, Confirm, SelectChild, SelectFirst, SelectLast, SelectNext, SelectParent, SelectPrevious, }; -use project::{AgentId, AgentRegistryStore, Event as ProjectEvent, linked_worktree_short_name}; +use project::{ + AgentId, AgentRegistryStore, Event as ProjectEvent, ProjectGroupKey, linked_worktree_short_name, +}; use recent_projects::sidebar_recent_projects::SidebarRecentProjects; use remote::RemoteConnectionOptions; use ui::utils::platform_title_bar_height; @@ -54,10 +56,6 @@ use zed_actions::agents_sidebar::{FocusSidebarFilter, ToggleThreadSwitcher}; use crate::thread_switcher::{ThreadSwitcher, ThreadSwitcherEntry, ThreadSwitcherEvent}; -use crate::project_group_builder::ProjectGroupBuilder; - -mod project_group_builder; - #[cfg(test)] mod sidebar_tests; @@ -136,13 +134,7 @@ impl ActiveEntry { (ActiveEntry::Thread { session_id, .. }, ListEntry::Thread(thread)) => { thread.metadata.session_id == *session_id } - ( - ActiveEntry::Draft(workspace), - ListEntry::NewThread { - workspace: entry_workspace, - .. - }, - ) => workspace == entry_workspace, + (ActiveEntry::Draft(_workspace), ListEntry::DraftThread { .. }) => true, _ => false, } } @@ -209,9 +201,8 @@ impl ThreadEntry { #[derive(Clone)] enum ListEntry { ProjectHeader { - path_list: PathList, + key: ProjectGroupKey, label: SharedString, - workspace: Entity, highlight_positions: Vec, has_running_threads: bool, waiting_thread_count: usize, @@ -219,30 +210,25 @@ enum ListEntry { }, Thread(ThreadEntry), ViewMore { - path_list: PathList, + key: ProjectGroupKey, is_fully_expanded: bool, }, + /// The user's active draft thread. Shows a prefix of the currently-typed + /// prompt, or "Untitled Thread" if the prompt is empty. + DraftThread { + worktrees: Vec, + }, + /// A convenience row for starting a new thread. Shown when a project group + /// has no threads, or when the active workspace contains linked worktrees + /// with no threads for that specific worktree set. NewThread { - path_list: PathList, - workspace: Entity, + key: project::ProjectGroupKey, worktrees: Vec, }, } #[cfg(test)] impl ListEntry { - fn workspace(&self) -> Option> { - match self { - ListEntry::ProjectHeader { workspace, .. } => Some(workspace.clone()), - ListEntry::Thread(thread_entry) => match &thread_entry.workspace { - ThreadEntryWorkspace::Open(workspace) => Some(workspace.clone()), - ThreadEntryWorkspace::Closed(_) => None, - }, - ListEntry::ViewMore { .. } => None, - ListEntry::NewThread { workspace, .. } => Some(workspace.clone()), - } - } - fn session_id(&self) -> Option<&acp::SessionId> { match self { ListEntry::Thread(thread_entry) => Some(&thread_entry.metadata.session_id), @@ -321,27 +307,32 @@ fn workspace_path_list(workspace: &Entity, cx: &App) -> PathList { /// Derives worktree display info from a thread's stored path list. /// -/// For each path in the thread's `folder_paths` that canonicalizes to a -/// different path (i.e. it's a git worktree), produces a [`WorktreeInfo`] -/// with the short worktree name and full path. +/// For each path in the thread's `folder_paths` that is not one of the +/// group's main paths (i.e. it's a git linked worktree), produces a +/// [`WorktreeInfo`] with the short worktree name and full path. fn worktree_info_from_thread_paths( folder_paths: &PathList, - project_groups: &ProjectGroupBuilder, + group_key: &project::ProjectGroupKey, ) -> Vec { + let main_paths = group_key.path_list().paths(); folder_paths .paths() .iter() .filter_map(|path| { - let canonical = project_groups.canonicalize_path(path); - if canonical != path.as_path() { - Some(WorktreeInfo { - name: linked_worktree_short_name(canonical, path).unwrap_or_default(), - full_path: SharedString::from(path.display().to_string()), - highlight_positions: Vec::new(), - }) - } else { - None + if main_paths.iter().any(|mp| mp.as_path() == path.as_path()) { + return None; } + // Find the main path whose file name matches this linked + // worktree's file name, falling back to the first main path. + let main_path = main_paths + .iter() + .find(|mp| mp.file_name() == path.file_name()) + .or(main_paths.first())?; + Some(WorktreeInfo { + name: linked_worktree_short_name(main_path, path).unwrap_or_default(), + full_path: SharedString::from(path.display().to_string()), + highlight_positions: Vec::new(), + }) }) .collect() } @@ -677,10 +668,41 @@ impl Sidebar { result } + /// Finds an open workspace whose project group key matches the given path list. + fn workspace_for_group(&self, path_list: &PathList, cx: &App) -> Option> { + let mw = self.multi_workspace.upgrade()?; + let mw = mw.read(cx); + mw.workspaces() + .iter() + .find(|ws| ws.read(cx).project_group_key(cx).path_list() == path_list) + .cloned() + } + + /// Opens a new workspace for a group that has no open workspaces. + fn open_workspace_for_group( + &mut self, + path_list: &PathList, + window: &mut Window, + cx: &mut Context, + ) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; + + let paths: Vec = + path_list.paths().iter().map(|p| p.to_path_buf()).collect(); + + multi_workspace + .update(cx, |mw, cx| { + mw.open_project(paths, workspace::OpenMode::Activate, window, cx) + }) + .detach_and_log_err(cx); + } + /// Rebuilds the sidebar contents from current workspace and thread state. /// - /// Uses [`ProjectGroupBuilder`] to group workspaces by their main git - /// repository, then populates thread entries from the metadata store and + /// Iterates [`MultiWorkspace::project_group_keys`] to determine project + /// groups, then populates thread entries from the metadata store and /// merges live thread info from active agent panels. /// /// Aim for a single forward pass over workspaces and threads plus an @@ -764,11 +786,6 @@ impl Sidebar { let mut current_session_ids: HashSet = HashSet::new(); let mut project_header_indices: Vec = Vec::new(); - // Use ProjectGroupBuilder to canonically group workspaces by their - // main git repository. This replaces the manual absorbed-workspace - // detection that was here before. - let project_groups = ProjectGroupBuilder::from_multiworkspace(mw, cx); - let has_open_projects = workspaces .iter() .any(|ws| !workspace_path_list(ws, cx).paths().is_empty()); @@ -785,38 +802,28 @@ impl Sidebar { (icon, icon_from_external_svg) }; - for (group_name, group) in project_groups.groups() { - let path_list = group_name.path_list().clone(); + for (group_key, group_workspaces) in mw.project_groups(cx) { + let path_list = group_key.path_list().clone(); if path_list.paths().is_empty() { continue; } - let label = group_name.display_name(); + let label = group_key.display_name(); let is_collapsed = self.collapsed_groups.contains(&path_list); let should_load_threads = !is_collapsed || !query.is_empty(); let is_active = active_workspace .as_ref() - .is_some_and(|active| group.workspaces.contains(active)); - - // Pick a representative workspace for the group: prefer the active - // workspace if it belongs to this group, otherwise use the main - // repo workspace (not a linked worktree). - let representative_workspace = active_workspace - .as_ref() - .filter(|_| is_active) - .unwrap_or_else(|| group.main_workspace(cx)); + .is_some_and(|active| group_workspaces.contains(active)); // Collect live thread infos from all workspaces in this group. - let live_infos: Vec<_> = group - .workspaces + let live_infos: Vec<_> = group_workspaces .iter() .flat_map(|ws| all_thread_infos_for_workspace(ws, cx)) .collect(); let mut threads: Vec = Vec::new(); - let mut threadless_workspaces: Vec<(Entity, Vec)> = Vec::new(); let mut has_running_threads = false; let mut waiting_thread_count: usize = 0; @@ -824,61 +831,88 @@ impl Sidebar { let mut seen_session_ids: HashSet = HashSet::new(); let thread_store = ThreadMetadataStore::global(cx); - // Load threads from each workspace in the group. - for workspace in &group.workspaces { - let ws_path_list = workspace_path_list(workspace, cx); - let mut workspace_rows = thread_store - .read(cx) - .entries_for_path(&ws_path_list) - .cloned() - .peekable(); - if workspace_rows.peek().is_none() { - let worktrees = - worktree_info_from_thread_paths(&ws_path_list, &project_groups); - threadless_workspaces.push((workspace.clone(), worktrees)); + // Build a lookup from workspace root paths to their workspace + // entity, used to assign ThreadEntryWorkspace::Open for threads + // whose folder_paths match an open workspace. + let workspace_by_path_list: HashMap> = + group_workspaces + .iter() + .map(|ws| (workspace_path_list(ws, cx), ws)) + .collect(); + + // Resolve a ThreadEntryWorkspace for a thread row. If any open + // workspace's root paths match the thread's folder_paths, use + // Open; otherwise use Closed. + let resolve_workspace = |row: &ThreadMetadata| -> ThreadEntryWorkspace { + workspace_by_path_list + .get(&row.folder_paths) + .map(|ws| ThreadEntryWorkspace::Open((*ws).clone())) + .unwrap_or_else(|| ThreadEntryWorkspace::Closed(row.folder_paths.clone())) + }; + + // Build a ThreadEntry from a metadata row. + let make_thread_entry = |row: ThreadMetadata, + workspace: ThreadEntryWorkspace| + -> ThreadEntry { + let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id); + let worktrees = worktree_info_from_thread_paths(&row.folder_paths, &group_key); + ThreadEntry { + metadata: row, + icon, + icon_from_external_svg, + status: AgentThreadStatus::default(), + workspace, + is_live: false, + is_background: false, + is_title_generating: false, + highlight_positions: Vec::new(), + worktrees, + diff_stats: DiffStats::default(), } - for row in workspace_rows { - if !seen_session_ids.insert(row.session_id.clone()) { - continue; - } - let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id); - let worktrees = - worktree_info_from_thread_paths(&row.folder_paths, &project_groups); - threads.push(ThreadEntry { - metadata: row, - icon, - icon_from_external_svg, - status: AgentThreadStatus::default(), - workspace: ThreadEntryWorkspace::Open(workspace.clone()), - is_live: false, - is_background: false, - is_title_generating: false, - highlight_positions: Vec::new(), - worktrees, - diff_stats: DiffStats::default(), - }); + }; + + // === Main code path: one query per group via main_worktree_paths === + // The main_worktree_paths column is set on all new threads and + // points to the group's canonical paths regardless of which + // linked worktree the thread was opened in. + for row in thread_store + .read(cx) + .entries_for_main_worktree_path(&path_list) + .cloned() + { + if !seen_session_ids.insert(row.session_id.clone()) { + continue; } + let workspace = resolve_workspace(&row); + threads.push(make_thread_entry(row, workspace)); } - // Load threads from linked git worktrees whose - // canonical paths belong to this group. - let linked_worktree_queries = group - .workspaces - .iter() - .flat_map(|ws| root_repository_snapshots(ws, cx)) - .filter(|snapshot| !snapshot.is_linked_worktree()) - .flat_map(|snapshot| { - snapshot - .linked_worktrees() - .iter() - .filter(|wt| { - project_groups.group_owns_worktree(group, &path_list, &wt.path) - }) - .map(|wt| PathList::new(std::slice::from_ref(&wt.path))) - .collect::>() - }); + // Legacy threads did not have `main_worktree_paths` populated, so they + // must be queried by their `folder_paths`. + + // Load any legacy threads for the main worktrees of this project group. + for row in thread_store.read(cx).entries_for_path(&path_list).cloned() { + if !seen_session_ids.insert(row.session_id.clone()) { + continue; + } + let workspace = resolve_workspace(&row); + threads.push(make_thread_entry(row, workspace)); + } - for worktree_path_list in linked_worktree_queries { + // Load any legacy threads for any single linked wortree of this project group. + let mut linked_worktree_paths = HashSet::new(); + for workspace in &group_workspaces { + if workspace.read(cx).visible_worktrees(cx).count() != 1 { + continue; + } + for snapshot in root_repository_snapshots(workspace, cx) { + for linked_worktree in snapshot.linked_worktrees() { + linked_worktree_paths.insert(linked_worktree.path.clone()); + } + } + } + for path in linked_worktree_paths { + let worktree_path_list = PathList::new(std::slice::from_ref(&path)); for row in thread_store .read(cx) .entries_for_path(&worktree_path_list) @@ -887,67 +921,10 @@ impl Sidebar { if !seen_session_ids.insert(row.session_id.clone()) { continue; } - let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id); - let worktrees = - worktree_info_from_thread_paths(&row.folder_paths, &project_groups); - threads.push(ThreadEntry { - metadata: row, - icon, - icon_from_external_svg, - status: AgentThreadStatus::default(), - workspace: ThreadEntryWorkspace::Closed(worktree_path_list.clone()), - is_live: false, - is_background: false, - is_title_generating: false, - highlight_positions: Vec::new(), - worktrees, - diff_stats: DiffStats::default(), - }); - } - } - - // Load threads from main worktrees when a workspace in this - // group is itself a linked worktree checkout. - let main_repo_queries: Vec = group - .workspaces - .iter() - .flat_map(|ws| root_repository_snapshots(ws, cx)) - .filter(|snapshot| snapshot.is_linked_worktree()) - .map(|snapshot| { - PathList::new(std::slice::from_ref(&snapshot.original_repo_abs_path)) - }) - .collect(); - - for main_repo_path_list in main_repo_queries { - let folder_path_matches = thread_store - .read(cx) - .entries_for_path(&main_repo_path_list) - .cloned(); - let main_worktree_path_matches = thread_store - .read(cx) - .entries_for_main_worktree_path(&main_repo_path_list) - .cloned(); - - for row in folder_path_matches.chain(main_worktree_path_matches) { - if !seen_session_ids.insert(row.session_id.clone()) { - continue; - } - let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id); - let worktrees = - worktree_info_from_thread_paths(&row.folder_paths, &project_groups); - threads.push(ThreadEntry { - metadata: row, - icon, - icon_from_external_svg, - status: AgentThreadStatus::default(), - workspace: ThreadEntryWorkspace::Closed(main_repo_path_list.clone()), - is_live: false, - is_background: false, - is_title_generating: false, - highlight_positions: Vec::new(), - worktrees, - diff_stats: DiffStats::default(), - }); + threads.push(make_thread_entry( + row, + ThreadEntryWorkspace::Closed(worktree_path_list.clone()), + )); } } @@ -1051,9 +1028,8 @@ impl Sidebar { project_header_indices.push(entries.len()); entries.push(ListEntry::ProjectHeader { - path_list: path_list.clone(), + key: group_key.clone(), label, - workspace: representative_workspace.clone(), highlight_positions: workspace_highlight_positions, has_running_threads, waiting_thread_count, @@ -1065,15 +1041,13 @@ impl Sidebar { entries.push(thread.into()); } } else { - let is_draft_for_workspace = is_active - && matches!(&self.active_entry, Some(ActiveEntry::Draft(_))) - && self.active_entry_workspace() == Some(representative_workspace); + let is_draft_for_group = is_active + && matches!(&self.active_entry, Some(ActiveEntry::Draft(ws)) if group_workspaces.contains(ws)); project_header_indices.push(entries.len()); entries.push(ListEntry::ProjectHeader { - path_list: path_list.clone(), + key: group_key.clone(), label, - workspace: representative_workspace.clone(), highlight_positions: Vec::new(), has_running_threads, waiting_thread_count, @@ -1084,25 +1058,61 @@ impl Sidebar { continue; } - // Emit "New Thread" entries for threadless workspaces - // and active drafts, right after the header. - for (workspace, worktrees) in &threadless_workspaces { - entries.push(ListEntry::NewThread { - path_list: path_list.clone(), - workspace: workspace.clone(), - worktrees: worktrees.clone(), - }); + // Emit a DraftThread entry when the active draft belongs to this group. + if is_draft_for_group { + if let Some(ActiveEntry::Draft(draft_ws)) = &self.active_entry { + let ws_path_list = workspace_path_list(draft_ws, cx); + let worktrees = worktree_info_from_thread_paths(&ws_path_list, &group_key); + entries.push(ListEntry::DraftThread { worktrees }); + } } - if is_draft_for_workspace - && !threadless_workspaces - .iter() - .any(|(ws, _)| ws == representative_workspace) + + // Emit a NewThread entry when: + // 1. The group has zero threads (convenient affordance). + // 2. The active workspace has linked worktrees but no threads + // for the active workspace's specific set of worktrees. + let group_has_no_threads = threads.is_empty() && !group_workspaces.is_empty(); + let active_ws_has_threadless_linked_worktrees = is_active + && !is_draft_for_group + && active_workspace.as_ref().is_some_and(|active_ws| { + let ws_path_list = workspace_path_list(active_ws, cx); + let has_linked_worktrees = + !worktree_info_from_thread_paths(&ws_path_list, &group_key).is_empty(); + if !has_linked_worktrees { + return false; + } + let thread_store = ThreadMetadataStore::global(cx); + let has_threads_for_ws = thread_store + .read(cx) + .entries_for_path(&ws_path_list) + .next() + .is_some() + || thread_store + .read(cx) + .entries_for_main_worktree_path(&ws_path_list) + .next() + .is_some(); + !has_threads_for_ws + }); + + if !is_draft_for_group + && (group_has_no_threads || active_ws_has_threadless_linked_worktrees) { - let ws_path_list = workspace_path_list(representative_workspace, cx); - let worktrees = worktree_info_from_thread_paths(&ws_path_list, &project_groups); + let worktrees = if active_ws_has_threadless_linked_worktrees { + active_workspace + .as_ref() + .map(|ws| { + worktree_info_from_thread_paths( + &workspace_path_list(ws, cx), + &group_key, + ) + }) + .unwrap_or_default() + } else { + Vec::new() + }; entries.push(ListEntry::NewThread { - path_list: path_list.clone(), - workspace: representative_workspace.clone(), + key: group_key.clone(), worktrees, }); } @@ -1148,7 +1158,7 @@ impl Sidebar { if total > DEFAULT_THREADS_SHOWN { entries.push(ListEntry::ViewMore { - path_list: path_list.clone(), + key: group_key.clone(), is_fully_expanded, }); } @@ -1236,9 +1246,8 @@ impl Sidebar { let rendered = match entry { ListEntry::ProjectHeader { - path_list, + key, label, - workspace, highlight_positions, has_running_threads, waiting_thread_count, @@ -1246,9 +1255,8 @@ impl Sidebar { } => self.render_project_header( ix, false, - path_list, + key, label, - workspace, highlight_positions, *has_running_threads, *waiting_thread_count, @@ -1258,22 +1266,15 @@ impl Sidebar { ), ListEntry::Thread(thread) => self.render_thread(ix, thread, is_active, is_selected, cx), ListEntry::ViewMore { - path_list, + key, is_fully_expanded, - } => self.render_view_more(ix, path_list, *is_fully_expanded, is_selected, cx), - ListEntry::NewThread { - path_list, - workspace, - worktrees, - } => self.render_new_thread( - ix, - path_list, - workspace, - is_active, - worktrees, - is_selected, - cx, - ), + } => self.render_view_more(ix, key.path_list(), *is_fully_expanded, is_selected, cx), + ListEntry::DraftThread { worktrees, .. } => { + self.render_draft_thread(ix, is_active, worktrees, is_selected, cx) + } + ListEntry::NewThread { key, worktrees, .. } => { + self.render_new_thread(ix, key, worktrees, is_selected, cx) + } }; if is_group_header_after_first { @@ -1291,13 +1292,9 @@ impl Sidebar { fn render_remote_project_icon( &self, ix: usize, - workspace: &Entity, - cx: &mut Context, + host: Option<&RemoteConnectionOptions>, ) -> Option { - let project = workspace.read(cx).project().read(cx); - let remote_connection_options = project.remote_connection_options(cx)?; - - let remote_icon_per_type = match remote_connection_options { + let remote_icon_per_type = match host? { RemoteConnectionOptions::Wsl(_) => IconName::Linux, RemoteConnectionOptions::Docker(_) => IconName::Box, _ => IconName::Server, @@ -1320,9 +1317,8 @@ impl Sidebar { &self, ix: usize, is_sticky: bool, - path_list: &PathList, + key: &ProjectGroupKey, label: &SharedString, - workspace: &Entity, highlight_positions: &[usize], has_running_threads: bool, waiting_thread_count: usize, @@ -1330,6 +1326,9 @@ impl Sidebar { is_focused: bool, cx: &mut Context, ) -> AnyElement { + let path_list = key.path_list(); + let host = key.host(); + let id_prefix = if is_sticky { "sticky-" } else { "" }; let id = SharedString::from(format!("{id_prefix}project-header-{ix}")); let disclosure_id = SharedString::from(format!("disclosure-{ix}")); @@ -1342,16 +1341,15 @@ impl Sidebar { (IconName::ChevronDown, "Collapse Project") }; - let has_new_thread_entry = self - .contents - .entries - .get(ix + 1) - .is_some_and(|entry| matches!(entry, ListEntry::NewThread { .. })); + let has_new_thread_entry = self.contents.entries.get(ix + 1).is_some_and(|entry| { + matches!( + entry, + ListEntry::NewThread { .. } | ListEntry::DraftThread { .. } + ) + }); let show_new_thread_button = !has_new_thread_entry && !self.has_filter_query(cx); - let workspace_for_remove = workspace.clone(); - let workspace_for_menu = workspace.clone(); - let workspace_for_open = workspace.clone(); + let workspace = self.workspace_for_group(path_list, cx); let path_list_for_toggle = path_list.clone(); let path_list_for_collapse = path_list.clone(); @@ -1408,7 +1406,7 @@ impl Sidebar { ) .child(label) .when_some( - self.render_remote_project_icon(ix, workspace, cx), + self.render_remote_project_icon(ix, host.as_ref()), |this, icon| this.child(icon), ) .when(is_collapsed, |this| { @@ -1452,13 +1450,13 @@ impl Sidebar { .on_mouse_down(gpui::MouseButton::Left, |_, _, cx| { cx.stop_propagation(); }) - .child(self.render_project_header_menu( - ix, - id_prefix, - &workspace_for_menu, - &workspace_for_remove, - cx, - )) + .when_some(workspace, |this, workspace| { + this.child( + self.render_project_header_menu( + ix, id_prefix, &workspace, &workspace, cx, + ), + ) + }) .when(view_more_expanded && !is_collapsed, |this| { this.child( IconButton::new( @@ -1480,52 +1478,56 @@ impl Sidebar { })), ) }) - .when(show_new_thread_button, |this| { - this.child( - IconButton::new( - SharedString::from(format!( - "{id_prefix}project-header-new-thread-{ix}", + .when( + show_new_thread_button && workspace_for_new_thread.is_some(), + |this| { + let workspace_for_new_thread = + workspace_for_new_thread.clone().unwrap(); + let path_list_for_new_thread = path_list_for_new_thread.clone(); + this.child( + IconButton::new( + SharedString::from(format!( + "{id_prefix}project-header-new-thread-{ix}", + )), + IconName::Plus, + ) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("New Thread")) + .on_click(cx.listener( + move |this, _, window, cx| { + this.collapsed_groups.remove(&path_list_for_new_thread); + this.selection = None; + this.create_new_thread( + &workspace_for_new_thread, + window, + cx, + ); + }, )), - IconName::Plus, ) - .icon_size(IconSize::Small) - .tooltip(Tooltip::text("New Thread")) - .on_click(cx.listener({ - let workspace_for_new_thread = workspace_for_new_thread.clone(); - let path_list_for_new_thread = path_list_for_new_thread.clone(); - move |this, _, window, cx| { - // Uncollapse the group if collapsed so - // the new-thread entry becomes visible. - this.collapsed_groups.remove(&path_list_for_new_thread); - this.selection = None; - this.create_new_thread(&workspace_for_new_thread, window, cx); - } - })), - ) - }) + }, + ) }) .when(!is_active, |this| { + let path_list_for_open = path_list.clone(); this.cursor_pointer() .hover(|s| s.bg(hover_color)) - .tooltip(Tooltip::text("Activate Workspace")) - .on_click(cx.listener({ - move |this, _, window, cx| { - this.active_entry = - Some(ActiveEntry::Draft(workspace_for_open.clone())); + .tooltip(Tooltip::text("Open Workspace")) + .on_click(cx.listener(move |this, _, window, cx| { + if let Some(workspace) = this.workspace_for_group(&path_list_for_open, cx) { + this.active_entry = Some(ActiveEntry::Draft(workspace.clone())); if let Some(multi_workspace) = this.multi_workspace.upgrade() { multi_workspace.update(cx, |multi_workspace, cx| { - multi_workspace.activate( - workspace_for_open.clone(), - window, - cx, - ); + multi_workspace.activate(workspace.clone(), window, cx); }); } - if AgentPanel::is_visible(&workspace_for_open, cx) { - workspace_for_open.update(cx, |workspace, cx| { + if AgentPanel::is_visible(&workspace, cx) { + workspace.update(cx, |workspace, cx| { workspace.focus_panel::(window, cx); }); } + } else { + this.open_workspace_for_group(&path_list_for_open, window, cx); } })) }) @@ -1720,9 +1722,8 @@ impl Sidebar { } let ListEntry::ProjectHeader { - path_list, + key, label, - workspace, highlight_positions, has_running_threads, waiting_thread_count, @@ -1738,9 +1739,8 @@ impl Sidebar { let header_element = self.render_project_header( header_idx, true, - &path_list, + key, &label, - workspace, &highlight_positions, *has_running_threads, *waiting_thread_count, @@ -1961,8 +1961,8 @@ impl Sidebar { }; match entry { - ListEntry::ProjectHeader { path_list, .. } => { - let path_list = path_list.clone(); + ListEntry::ProjectHeader { key, .. } => { + let path_list = key.path_list().clone(); self.toggle_collapse(&path_list, window, cx); } ListEntry::Thread(thread) => { @@ -1983,11 +1983,11 @@ impl Sidebar { } } ListEntry::ViewMore { - path_list, + key, is_fully_expanded, .. } => { - let path_list = path_list.clone(); + let path_list = key.path_list().clone(); if *is_fully_expanded { self.expanded_groups.remove(&path_list); } else { @@ -1997,9 +1997,16 @@ impl Sidebar { self.serialize(cx); self.update_entries(cx); } - ListEntry::NewThread { workspace, .. } => { - let workspace = workspace.clone(); - self.create_new_thread(&workspace, window, cx); + ListEntry::DraftThread { .. } => { + // Already active — nothing to do. + } + ListEntry::NewThread { key, .. } => { + let path_list = key.path_list().clone(); + if let Some(workspace) = self.workspace_for_group(&path_list, cx) { + self.create_new_thread(&workspace, window, cx); + } else { + self.open_workspace_for_group(&path_list, window, cx); + } } } } @@ -2251,9 +2258,9 @@ impl Sidebar { let Some(ix) = self.selection else { return }; match self.contents.entries.get(ix) { - Some(ListEntry::ProjectHeader { path_list, .. }) => { - if self.collapsed_groups.contains(path_list) { - let path_list = path_list.clone(); + Some(ListEntry::ProjectHeader { key, .. }) => { + if self.collapsed_groups.contains(key.path_list()) { + let path_list = key.path_list().clone(); self.collapsed_groups.remove(&path_list); self.update_entries(cx); } else if ix + 1 < self.contents.entries.len() { @@ -2275,23 +2282,23 @@ impl Sidebar { let Some(ix) = self.selection else { return }; match self.contents.entries.get(ix) { - Some(ListEntry::ProjectHeader { path_list, .. }) => { - if !self.collapsed_groups.contains(path_list) { - let path_list = path_list.clone(); - self.collapsed_groups.insert(path_list); + Some(ListEntry::ProjectHeader { key, .. }) => { + if !self.collapsed_groups.contains(key.path_list()) { + self.collapsed_groups.insert(key.path_list().clone()); self.update_entries(cx); } } Some( - ListEntry::Thread(_) | ListEntry::ViewMore { .. } | ListEntry::NewThread { .. }, + ListEntry::Thread(_) + | ListEntry::ViewMore { .. } + | ListEntry::NewThread { .. } + | ListEntry::DraftThread { .. }, ) => { for i in (0..ix).rev() { - if let Some(ListEntry::ProjectHeader { path_list, .. }) = - self.contents.entries.get(i) + if let Some(ListEntry::ProjectHeader { key, .. }) = self.contents.entries.get(i) { - let path_list = path_list.clone(); self.selection = Some(i); - self.collapsed_groups.insert(path_list); + self.collapsed_groups.insert(key.path_list().clone()); self.update_entries(cx); break; } @@ -2313,7 +2320,10 @@ impl Sidebar { let header_ix = match self.contents.entries.get(ix) { Some(ListEntry::ProjectHeader { .. }) => Some(ix), Some( - ListEntry::Thread(_) | ListEntry::ViewMore { .. } | ListEntry::NewThread { .. }, + ListEntry::Thread(_) + | ListEntry::ViewMore { .. } + | ListEntry::NewThread { .. } + | ListEntry::DraftThread { .. }, ) => (0..ix).rev().find(|&i| { matches!( self.contents.entries.get(i), @@ -2324,15 +2334,14 @@ impl Sidebar { }; if let Some(header_ix) = header_ix { - if let Some(ListEntry::ProjectHeader { path_list, .. }) = - self.contents.entries.get(header_ix) + if let Some(ListEntry::ProjectHeader { key, .. }) = self.contents.entries.get(header_ix) { - let path_list = path_list.clone(); - if self.collapsed_groups.contains(&path_list) { - self.collapsed_groups.remove(&path_list); + let path_list = key.path_list(); + if self.collapsed_groups.contains(path_list) { + self.collapsed_groups.remove(path_list); } else { self.selection = Some(header_ix); - self.collapsed_groups.insert(path_list); + self.collapsed_groups.insert(path_list.clone()); } self.update_entries(cx); } @@ -2346,8 +2355,8 @@ impl Sidebar { cx: &mut Context, ) { for entry in &self.contents.entries { - if let ListEntry::ProjectHeader { path_list, .. } = entry { - self.collapsed_groups.insert(path_list.clone()); + if let ListEntry::ProjectHeader { key, .. } = entry { + self.collapsed_groups.insert(key.path_list().clone()); } } self.update_entries(cx); @@ -2402,17 +2411,18 @@ impl Sidebar { }); // Find the workspace that owns this thread's project group by - // walking backwards to the nearest ProjectHeader. We must use - // *this* workspace (not the active workspace) because the user - // might be archiving a thread in a non-active group. + // walking backwards to the nearest ProjectHeader and looking up + // an open workspace for that group's path_list. let group_workspace = current_pos.and_then(|pos| { - self.contents.entries[..pos] - .iter() - .rev() - .find_map(|e| match e { - ListEntry::ProjectHeader { workspace, .. } => Some(workspace.clone()), - _ => None, - }) + let path_list = + self.contents.entries[..pos] + .iter() + .rev() + .find_map(|e| match e { + ListEntry::ProjectHeader { key, .. } => Some(key.path_list()), + _ => None, + })?; + self.workspace_for_group(path_list, cx) }); let next_thread = current_pos.and_then(|pos| { @@ -2527,28 +2537,26 @@ impl Sidebar { .insert(session_id.clone(), Utc::now()); } - fn mru_threads_for_switcher(&self, _cx: &App) -> Vec { + fn mru_threads_for_switcher(&self, cx: &App) -> Vec { let mut current_header_label: Option = None; - let mut current_header_workspace: Option> = None; + let mut current_header_path_list: Option = None; let mut entries: Vec = self .contents .entries .iter() .filter_map(|entry| match entry { - ListEntry::ProjectHeader { - label, workspace, .. - } => { + ListEntry::ProjectHeader { label, key, .. } => { current_header_label = Some(label.clone()); - current_header_workspace = Some(workspace.clone()); + current_header_path_list = Some(key.path_list().clone()); None } ListEntry::Thread(thread) => { let workspace = match &thread.workspace { - ThreadEntryWorkspace::Open(workspace) => workspace.clone(), - ThreadEntryWorkspace::Closed(_) => { - current_header_workspace.as_ref()?.clone() - } - }; + ThreadEntryWorkspace::Open(workspace) => Some(workspace.clone()), + ThreadEntryWorkspace::Closed(_) => current_header_path_list + .as_ref() + .and_then(|pl| self.workspace_for_group(pl, cx)), + }?; let notified = self .contents .is_thread_notified(&thread.metadata.session_id); @@ -3055,7 +3063,9 @@ impl Sidebar { .rev() .find(|&&header_ix| header_ix <= selected_ix) .and_then(|&header_ix| match &self.contents.entries[header_ix] { - ListEntry::ProjectHeader { workspace, .. } => Some(workspace.clone()), + ListEntry::ProjectHeader { key, .. } => { + self.workspace_for_group(key.path_list(), cx) + } _ => None, }) } else { @@ -3098,11 +3108,9 @@ impl Sidebar { }); } - fn render_new_thread( + fn render_draft_thread( &self, ix: usize, - _path_list: &PathList, - workspace: &Entity, is_active: bool, worktrees: &[WorktreeInfo], is_selected: bool, @@ -3110,12 +3118,48 @@ impl Sidebar { ) -> AnyElement { let label: SharedString = if is_active { self.active_draft_text(cx) - .unwrap_or_else(|| DEFAULT_THREAD_TITLE.into()) + .unwrap_or_else(|| "Untitled Thread".into()) } else { - DEFAULT_THREAD_TITLE.into() + "Untitled Thread".into() }; - let workspace = workspace.clone(); + let id = SharedString::from(format!("draft-thread-btn-{}", ix)); + + let thread_item = ThreadItem::new(id, label) + .icon(IconName::Plus) + .icon_color(Color::Custom(cx.theme().colors().icon_muted.opacity(0.8))) + .worktrees( + worktrees + .iter() + .map(|wt| ThreadItemWorktreeInfo { + name: wt.name.clone(), + full_path: wt.full_path.clone(), + highlight_positions: wt.highlight_positions.clone(), + }) + .collect(), + ) + .selected(true) + .focused(is_selected); + + div() + .on_mouse_down(gpui::MouseButton::Left, |_, _, cx| { + cx.stop_propagation(); + }) + .child(thread_item) + .into_any_element() + } + + fn render_new_thread( + &self, + ix: usize, + key: &ProjectGroupKey, + worktrees: &[WorktreeInfo], + is_selected: bool, + cx: &mut Context, + ) -> AnyElement { + let label: SharedString = DEFAULT_THREAD_TITLE.into(); + let path_list = key.path_list().clone(); + let id = SharedString::from(format!("new-thread-btn-{}", ix)); let thread_item = ThreadItem::new(id, label) @@ -3131,25 +3175,18 @@ impl Sidebar { }) .collect(), ) - .selected(is_active) + .selected(false) .focused(is_selected) - .when(!is_active, |this| { - this.on_click(cx.listener(move |this, _, window, cx| { - this.selection = None; + .on_click(cx.listener(move |this, _, window, cx| { + this.selection = None; + if let Some(workspace) = this.workspace_for_group(&path_list, cx) { this.create_new_thread(&workspace, window, cx); - })) - }); + } else { + this.open_workspace_for_group(&path_list, window, cx); + } + })); - if is_active { - div() - .on_mouse_down(gpui::MouseButton::Left, |_, _, cx| { - cx.stop_propagation(); - }) - .child(thread_item) - .into_any_element() - } else { - thread_item.into_any_element() - } + thread_item.into_any_element() } fn render_no_results(&self, cx: &mut Context) -> impl IntoElement { diff --git a/crates/sidebar/src/sidebar_tests.rs b/crates/sidebar/src/sidebar_tests.rs index 1499fc48a9fd094b07d181701866ab941c5968f3..cf1ee8a0f524d9d94edf83c24ecea900f3261fb8 100644 --- a/crates/sidebar/src/sidebar_tests.rs +++ b/crates/sidebar/src/sidebar_tests.rs @@ -88,14 +88,18 @@ fn setup_sidebar( sidebar } -async fn save_n_test_threads(count: u32, path_list: &PathList, cx: &mut gpui::VisualTestContext) { +async fn save_n_test_threads( + count: u32, + project: &Entity, + cx: &mut gpui::VisualTestContext, +) { for i in 0..count { save_thread_metadata( acp::SessionId::new(Arc::from(format!("thread-{}", i))), format!("Thread {}", i + 1).into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, i).unwrap(), None, - path_list.clone(), + project, cx, ) } @@ -104,7 +108,7 @@ async fn save_n_test_threads(count: u32, path_list: &PathList, cx: &mut gpui::Vi async fn save_test_thread_metadata( session_id: &acp::SessionId, - path_list: PathList, + project: &Entity, cx: &mut TestAppContext, ) { save_thread_metadata( @@ -112,7 +116,7 @@ async fn save_test_thread_metadata( "Test".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), None, - path_list, + project, cx, ) } @@ -120,7 +124,7 @@ async fn save_test_thread_metadata( async fn save_named_thread_metadata( session_id: &str, title: &str, - path_list: &PathList, + project: &Entity, cx: &mut gpui::VisualTestContext, ) { save_thread_metadata( @@ -128,7 +132,7 @@ async fn save_named_thread_metadata( SharedString::from(title.to_string()), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), None, - path_list.clone(), + project, cx, ); cx.run_until_parked(); @@ -139,21 +143,31 @@ fn save_thread_metadata( title: SharedString, updated_at: DateTime, created_at: Option>, - path_list: PathList, + project: &Entity, cx: &mut TestAppContext, ) { - let metadata = ThreadMetadata { - session_id, - agent_id: agent::ZED_AGENT_ID.clone(), - title, - updated_at, - created_at, - folder_paths: path_list, - main_worktree_paths: PathList::default(), - archived: false, - }; cx.update(|cx| { - ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx)) + let (folder_paths, main_worktree_paths) = { + let project_ref = project.read(cx); + let paths: Vec> = project_ref + .visible_worktrees(cx) + .map(|worktree| worktree.read(cx).abs_path()) + .collect(); + let folder_paths = PathList::new(&paths); + let main_worktree_paths = project_ref.project_group_key(cx).path_list().clone(); + (folder_paths, main_worktree_paths) + }; + let metadata = ThreadMetadata { + session_id, + agent_id: agent::ZED_AGENT_ID.clone(), + title, + updated_at, + created_at, + folder_paths, + main_worktree_paths, + archived: false, + }; + ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx)); }); cx.run_until_parked(); } @@ -193,11 +207,11 @@ fn visible_entries_as_strings( match entry { ListEntry::ProjectHeader { label, - path_list, + key, highlight_positions: _, .. } => { - let icon = if sidebar.collapsed_groups.contains(path_list) { + let icon = if sidebar.collapsed_groups.contains(key.path_list()) { ">" } else { "v" @@ -248,6 +262,22 @@ fn visible_entries_as_strings( format!(" + View More{}", selected) } } + ListEntry::DraftThread { worktrees, .. } => { + let worktree = if worktrees.is_empty() { + String::new() + } else { + let mut seen = Vec::new(); + let mut chips = Vec::new(); + for wt in worktrees { + if !seen.contains(&wt.name) { + seen.push(wt.name.clone()); + chips.push(format!("{{{}}}", wt.name)); + } + } + format!(" {}", chips.join(", ")) + }; + format!(" [~ Draft{}]{}", worktree, selected) + } ListEntry::NewThread { worktrees, .. } => { let worktree = if worktrees.is_empty() { String::new() @@ -274,11 +304,14 @@ fn visible_entries_as_strings( async fn test_serialization_round_trip(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(3, &path_list, cx).await; + save_n_test_threads(3, &project, cx).await; + + let path_list = project.read_with(cx, |project, cx| { + project.project_group_key(cx).path_list().clone() + }); // Set a custom width, collapse the group, and expand "View More". sidebar.update_in(cx, |sidebar, window, cx| { @@ -437,17 +470,15 @@ async fn test_single_workspace_no_threads(cx: &mut TestAppContext) { async fn test_single_workspace_with_saved_threads(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_thread_metadata( acp::SessionId::new(Arc::from("thread-1")), "Fix crash in project panel".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 3, 0, 0, 0).unwrap(), None, - path_list.clone(), + &project, cx, ); @@ -456,7 +487,7 @@ async fn test_single_workspace_with_saved_threads(cx: &mut TestAppContext) { "Add inline diff view".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), None, - path_list, + &project, cx, ); cx.run_until_parked(); @@ -478,18 +509,16 @@ async fn test_single_workspace_with_saved_threads(cx: &mut TestAppContext) { async fn test_workspace_lifecycle(cx: &mut TestAppContext) { let project = init_test_project("/project-a", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); // Single workspace with a thread - let path_list = PathList::new(&[std::path::PathBuf::from("/project-a")]); - save_thread_metadata( acp::SessionId::new(Arc::from("thread-a1")), "Thread A1".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), None, - path_list, + &project, cx, ); cx.run_until_parked(); @@ -530,11 +559,10 @@ async fn test_workspace_lifecycle(cx: &mut TestAppContext) { async fn test_view_more_pagination(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(12, &path_list, cx).await; + save_n_test_threads(12, &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -557,12 +585,15 @@ async fn test_view_more_pagination(cx: &mut TestAppContext) { async fn test_view_more_batched_expansion(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); // Create 17 threads: initially shows 5, then 10, then 15, then all 17 with Collapse - save_n_test_threads(17, &path_list, cx).await; + save_n_test_threads(17, &project, cx).await; + + let path_list = project.read_with(cx, |project, cx| { + project.project_group_key(cx).path_list().clone() + }); multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -629,11 +660,14 @@ async fn test_view_more_batched_expansion(cx: &mut TestAppContext) { async fn test_collapse_and_expand_group(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(1, &path_list, cx).await; + save_n_test_threads(1, &project, cx).await; + + let path_list = project.read_with(cx, |project, cx| { + project.project_group_key(cx).path_list().clone() + }); multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -685,9 +719,8 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { s.contents.entries = vec![ // Expanded project header ListEntry::ProjectHeader { - path_list: expanded_path.clone(), + key: project::ProjectGroupKey::new(None, expanded_path.clone()), label: "expanded-project".into(), - workspace: workspace.clone(), highlight_positions: Vec::new(), has_running_threads: false, waiting_thread_count: 0, @@ -809,14 +842,13 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { }), // View More entry ListEntry::ViewMore { - path_list: expanded_path.clone(), + key: project::ProjectGroupKey::new(None, expanded_path.clone()), is_fully_expanded: false, }, // Collapsed project header ListEntry::ProjectHeader { - path_list: collapsed_path.clone(), + key: project::ProjectGroupKey::new(None, collapsed_path.clone()), label: "collapsed-project".into(), - workspace: workspace.clone(), highlight_positions: Vec::new(), has_running_threads: false, waiting_thread_count: 0, @@ -872,11 +904,10 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { async fn test_keyboard_select_next_and_previous(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(3, &path_list, cx).await; + save_n_test_threads(3, &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -932,11 +963,10 @@ async fn test_keyboard_select_next_and_previous(cx: &mut TestAppContext) { async fn test_keyboard_select_first_and_last(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(3, &path_list, cx).await; + save_n_test_threads(3, &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -987,11 +1017,10 @@ async fn test_keyboard_focus_in_does_not_set_selection(cx: &mut TestAppContext) async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(1, &path_list, cx).await; + save_n_test_threads(1, &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -1029,11 +1058,10 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA async fn test_keyboard_confirm_on_view_more_expands(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(8, &path_list, cx).await; + save_n_test_threads(8, &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -1064,11 +1092,10 @@ async fn test_keyboard_confirm_on_view_more_expands(cx: &mut TestAppContext) { async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(1, &path_list, cx).await; + save_n_test_threads(1, &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -1109,11 +1136,10 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex async fn test_keyboard_collapse_from_child_selects_parent(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(1, &path_list, cx).await; + save_n_test_threads(1, &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -1177,11 +1203,10 @@ async fn test_keyboard_navigation_on_empty_list(cx: &mut TestAppContext) { async fn test_selection_clamps_after_entry_removal(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(1, &path_list, cx).await; + save_n_test_threads(1, &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -1254,15 +1279,13 @@ async fn test_parallel_threads_shown_with_live_status(cx: &mut TestAppContext) { cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - // Open thread A and keep it generating. let connection = StubAgentConnection::new(); open_thread_with_connection(&panel, connection.clone(), cx); send_message(&panel, cx); let session_id_a = active_session_id(&panel, cx); - save_test_thread_metadata(&session_id_a, path_list.clone(), cx).await; + save_test_thread_metadata(&session_id_a, &project, cx).await; cx.update(|_, cx| { connection.send_update( @@ -1281,7 +1304,7 @@ async fn test_parallel_threads_shown_with_live_status(cx: &mut TestAppContext) { send_message(&panel, cx); let session_id_b = active_session_id(&panel, cx); - save_test_thread_metadata(&session_id_b, path_list.clone(), cx).await; + save_test_thread_metadata(&session_id_b, &project, cx).await; cx.run_until_parked(); @@ -1300,15 +1323,13 @@ async fn test_background_thread_completion_triggers_notification(cx: &mut TestAp cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); let (sidebar, panel_a) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - let path_list_a = PathList::new(&[std::path::PathBuf::from("/project-a")]); - // Open thread on workspace A and keep it generating. let connection_a = StubAgentConnection::new(); open_thread_with_connection(&panel_a, connection_a.clone(), cx); send_message(&panel_a, cx); let session_id_a = active_session_id(&panel_a, cx); - save_test_thread_metadata(&session_id_a, path_list_a.clone(), cx).await; + save_test_thread_metadata(&session_id_a, &project_a, cx).await; cx.update(|_, cx| { connection_a.send_update( @@ -1358,11 +1379,9 @@ fn type_in_search(sidebar: &Entity, query: &str, cx: &mut gpui::VisualT async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - for (id, title, hour) in [ ("t-1", "Fix crash in project panel", 3), ("t-2", "Add inline diff view", 2), @@ -1373,7 +1392,7 @@ async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext) title.into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), None, - path_list.clone(), + &project, cx, ); } @@ -1411,17 +1430,15 @@ async fn test_search_matches_regardless_of_case(cx: &mut TestAppContext) { // Search should match case-insensitively so they can still find it. let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_thread_metadata( acp::SessionId::new(Arc::from("thread-1")), "Fix Crash In Project Panel".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), None, - path_list, + &project, cx, ); cx.run_until_parked(); @@ -1453,18 +1470,16 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex // to dismiss the filter and see the full list again. let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - for (id, title, hour) in [("t-1", "Alpha thread", 2), ("t-2", "Beta thread", 1)] { save_thread_metadata( acp::SessionId::new(Arc::from(id)), title.into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), None, - path_list.clone(), + &project, cx, ) } @@ -1502,11 +1517,9 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppContext) { let project_a = init_test_project("/project-a", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list_a = PathList::new(&[std::path::PathBuf::from("/project-a")]); - for (id, title, hour) in [ ("a1", "Fix bug in sidebar", 2), ("a2", "Add tests for editor", 1), @@ -1516,7 +1529,7 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC title.into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), None, - path_list_a.clone(), + &project_a, cx, ) } @@ -1527,7 +1540,8 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC }); cx.run_until_parked(); - let path_list_b = PathList::new::(&[]); + let project_b = + multi_workspace.read_with(cx, |mw, cx| mw.workspaces()[1].read(cx).project().clone()); for (id, title, hour) in [ ("b1", "Refactor sidebar layout", 3), @@ -1538,7 +1552,7 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC title.into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), None, - path_list_b.clone(), + &project_b, cx, ) } @@ -1584,11 +1598,9 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { let project_a = init_test_project("/alpha-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list_a = PathList::new(&[std::path::PathBuf::from("/alpha-project")]); - for (id, title, hour) in [ ("a1", "Fix bug in sidebar", 2), ("a2", "Add tests for editor", 1), @@ -1598,7 +1610,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { title.into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), None, - path_list_a.clone(), + &project_a, cx, ) } @@ -1609,7 +1621,8 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { }); cx.run_until_parked(); - let path_list_b = PathList::new::(&[]); + let project_b = + multi_workspace.read_with(cx, |mw, cx| mw.workspaces()[1].read(cx).project().clone()); for (id, title, hour) in [ ("b1", "Refactor sidebar layout", 3), @@ -1620,7 +1633,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { title.into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), None, - path_list_b.clone(), + &project_b, cx, ) } @@ -1686,11 +1699,9 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { async fn test_search_finds_threads_hidden_behind_view_more(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - // Create 8 threads. The oldest one has a unique name and will be // behind View More (only 5 shown by default). for i in 0..8u32 { @@ -1704,7 +1715,7 @@ async fn test_search_finds_threads_hidden_behind_view_more(cx: &mut TestAppConte title.into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, i).unwrap(), None, - path_list.clone(), + &project, cx, ) } @@ -1738,17 +1749,15 @@ async fn test_search_finds_threads_hidden_behind_view_more(cx: &mut TestAppConte async fn test_search_finds_threads_inside_collapsed_groups(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_thread_metadata( acp::SessionId::new(Arc::from("thread-1")), "Important thread".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), None, - path_list, + &project, cx, ); cx.run_until_parked(); @@ -1779,11 +1788,9 @@ async fn test_search_finds_threads_inside_collapsed_groups(cx: &mut TestAppConte async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - for (id, title, hour) in [ ("t-1", "Fix crash in panel", 3), ("t-2", "Fix lint warnings", 2), @@ -1794,7 +1801,7 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) title.into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), None, - path_list.clone(), + &project, cx, ) } @@ -1841,7 +1848,7 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); multi_workspace.update_in(cx, |mw, window, cx| { @@ -1849,14 +1856,12 @@ async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppC }); cx.run_until_parked(); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_thread_metadata( acp::SessionId::new(Arc::from("hist-1")), "Historical Thread".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 6, 1, 0, 0, 0).unwrap(), None, - path_list, + &project, cx, ); cx.run_until_parked(); @@ -1899,17 +1904,15 @@ async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppC async fn test_click_clears_selection_and_focus_in_restores_it(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_thread_metadata( acp::SessionId::new(Arc::from("t-1")), "Thread A".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), None, - path_list.clone(), + &project, cx, ); @@ -1918,7 +1921,7 @@ async fn test_click_clears_selection_and_focus_in_restores_it(cx: &mut TestAppCo "Thread B".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), None, - path_list, + &project, cx, ); @@ -1966,8 +1969,6 @@ async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext) cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - let connection = StubAgentConnection::new(); connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( acp::ContentChunk::new("Hi there!".into()), @@ -1976,7 +1977,7 @@ async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext) send_message(&panel, cx); let session_id = active_session_id(&panel, cx); - save_test_thread_metadata(&session_id, path_list.clone(), cx).await; + save_test_thread_metadata(&session_id, &project, cx).await; cx.run_until_parked(); assert_eq!( @@ -2014,8 +2015,6 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); let (sidebar, panel_a) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - let path_list_a = PathList::new(&[std::path::PathBuf::from("/project-a")]); - // Save a thread so it appears in the list. let connection_a = StubAgentConnection::new(); connection_a.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( @@ -2024,7 +2023,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { open_thread_with_connection(&panel_a, connection_a, cx); send_message(&panel_a, cx); let session_id_a = active_session_id(&panel_a, cx); - save_test_thread_metadata(&session_id_a, path_list_a.clone(), cx).await; + save_test_thread_metadata(&session_id_a, &project_a, cx).await; // Add a second workspace with its own agent panel. let fs = cx.update(|_, cx| ::global(cx)); @@ -2099,8 +2098,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { open_thread_with_connection(&panel_b, connection_b, cx); send_message(&panel_b, cx); let session_id_b = active_session_id(&panel_b, cx); - let path_list_b = PathList::new(&[std::path::PathBuf::from("/project-b")]); - save_test_thread_metadata(&session_id_b, path_list_b.clone(), cx).await; + save_test_thread_metadata(&session_id_b, &project_b, cx).await; cx.run_until_parked(); // Workspace A is currently active. Click a thread in workspace B, @@ -2161,7 +2159,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { open_thread_with_connection(&panel_b, connection_b2, cx); send_message(&panel_b, cx); let session_id_b2 = active_session_id(&panel_b, cx); - save_test_thread_metadata(&session_id_b2, path_list_b.clone(), cx).await; + save_test_thread_metadata(&session_id_b2, &project_b, cx).await; cx.run_until_parked(); // Panel B is not the active workspace's panel (workspace A is @@ -2243,8 +2241,6 @@ async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContex cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - let path_list_a = PathList::new(&[std::path::PathBuf::from("/project-a")]); - // Start a thread and send a message so it has history. let connection = StubAgentConnection::new(); connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( @@ -2253,7 +2249,7 @@ async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContex open_thread_with_connection(&panel, connection, cx); send_message(&panel, cx); let session_id = active_session_id(&panel, cx); - save_test_thread_metadata(&session_id, path_list_a.clone(), cx).await; + save_test_thread_metadata(&session_id, &project, cx).await; cx.run_until_parked(); // Verify the thread appears in the sidebar. @@ -2287,9 +2283,15 @@ async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContex // The workspace path_list is now [project-a, project-b]. The active // thread's metadata was re-saved with the new paths by the agent panel's // project subscription, so it stays visible under the updated group. + // The old [project-a] group persists in the sidebar (empty) because + // project_group_keys is append-only. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project-a, project-b]", " Hello *",] + vec![ + "v [project-a, project-b]", // + " Hello *", + "v [project-a]", + ] ); // The "New Thread" button must still be clickable (not stuck in @@ -2334,8 +2336,6 @@ async fn test_cmd_n_shows_new_thread_entry(cx: &mut TestAppContext) { cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - // Create a non-empty thread (has messages). let connection = StubAgentConnection::new(); connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( @@ -2345,7 +2345,7 @@ async fn test_cmd_n_shows_new_thread_entry(cx: &mut TestAppContext) { send_message(&panel, cx); let session_id = active_session_id(&panel, cx); - save_test_thread_metadata(&session_id, path_list.clone(), cx).await; + save_test_thread_metadata(&session_id, &project, cx).await; cx.run_until_parked(); assert_eq!( @@ -2365,8 +2365,8 @@ async fn test_cmd_n_shows_new_thread_entry(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " [+ New Thread]", " Hello *"], - "After Cmd-N the sidebar should show a highlighted New Thread entry" + vec!["v [my-project]", " [~ Draft]", " Hello *"], + "After Cmd-N the sidebar should show a highlighted Draft entry" ); sidebar.read_with(cx, |sidebar, _cx| { @@ -2385,8 +2385,6 @@ async fn test_draft_with_server_session_shows_as_draft(cx: &mut TestAppContext) cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - // Create a saved thread so the workspace has history. let connection = StubAgentConnection::new(); connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( @@ -2395,7 +2393,7 @@ async fn test_draft_with_server_session_shows_as_draft(cx: &mut TestAppContext) open_thread_with_connection(&panel, connection, cx); send_message(&panel, cx); let saved_session_id = active_session_id(&panel, cx); - save_test_thread_metadata(&saved_session_id, path_list.clone(), cx).await; + save_test_thread_metadata(&saved_session_id, &project, cx).await; cx.run_until_parked(); assert_eq!( @@ -2412,8 +2410,7 @@ async fn test_draft_with_server_session_shows_as_draft(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " [+ New Thread]", " Hello *"], - "Draft with a server session should still show as [+ New Thread]" + vec!["v [my-project]", " [~ Draft]", " Hello *"], ); let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone()); @@ -2503,17 +2500,12 @@ async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestApp send_message(&worktree_panel, cx); let session_id = active_session_id(&worktree_panel, cx); - let wt_path_list = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - save_test_thread_metadata(&session_id, wt_path_list, cx).await; + save_test_thread_metadata(&session_id, &worktree_project, cx).await; cx.run_until_parked(); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - "v [project]", - " [+ New Thread]", - " Hello {wt-feature-a} *" - ] + vec!["v [project]", " Hello {wt-feature-a} *"] ); // Simulate Cmd-N in the worktree workspace. @@ -2529,12 +2521,11 @@ async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestApp visible_entries_as_strings(&sidebar, cx), vec![ "v [project]", - " [+ New Thread]", - " [+ New Thread {wt-feature-a}]", + " [~ Draft {wt-feature-a}]", " Hello {wt-feature-a} *" ], "After Cmd-N in an absorbed worktree, the sidebar should show \ - a highlighted New Thread entry under the main repo header" + a highlighted Draft entry under the main repo header" ); sidebar.read_with(cx, |sidebar, _cx| { @@ -2586,14 +2577,17 @@ async fn test_search_matches_worktree_name(cx: &mut TestAppContext) { .update(cx, |project, cx| project.git_scans_complete(cx)) .await; + let worktree_project = project::Project::test(fs.clone(), ["/wt/rosewood".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let main_paths = PathList::new(&[std::path::PathBuf::from("/project")]); - let wt_paths = PathList::new(&[std::path::PathBuf::from("/wt/rosewood")]); - save_named_thread_metadata("main-t", "Unrelated Thread", &main_paths, cx).await; - save_named_thread_metadata("wt-t", "Fix Bug", &wt_paths, cx).await; + save_named_thread_metadata("main-t", "Unrelated Thread", &project, cx).await; + save_named_thread_metadata("wt-t", "Fix Bug", &worktree_project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -2615,13 +2609,17 @@ async fn test_git_worktree_added_live_updates_sidebar(cx: &mut TestAppContext) { .update(cx, |project, cx| project.git_scans_complete(cx)) .await; + let worktree_project = project::Project::test(fs.clone(), ["/wt/rosewood".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); // Save a thread against a worktree path that doesn't exist yet. - let wt_paths = PathList::new(&[std::path::PathBuf::from("/wt/rosewood")]); - save_named_thread_metadata("wt-thread", "Worktree Thread", &wt_paths, cx).await; + save_named_thread_metadata("wt-thread", "Worktree Thread", &worktree_project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -2650,11 +2648,7 @@ async fn test_git_worktree_added_live_updates_sidebar(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - "v [project]", - " [+ New Thread]", - " Worktree Thread {rosewood}", - ] + vec!["v [project]", " Worktree Thread {rosewood}",] ); } @@ -2714,10 +2708,8 @@ async fn test_two_worktree_workspaces_absorbed_when_main_added(cx: &mut TestAppC }); let sidebar = setup_sidebar(&multi_workspace, cx); - let paths_a = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - let paths_b = PathList::new(&[std::path::PathBuf::from("/wt-feature-b")]); - save_named_thread_metadata("thread-a", "Thread A", &paths_a, cx).await; - save_named_thread_metadata("thread-b", "Thread B", &paths_b, cx).await; + save_named_thread_metadata("thread-a", "Thread A", &project_a, cx).await; + save_named_thread_metadata("thread-b", "Thread B", &project_b, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -2748,7 +2740,6 @@ async fn test_two_worktree_workspaces_absorbed_when_main_added(cx: &mut TestAppC visible_entries_as_strings(&sidebar, cx), vec![ "v [project]", - " [+ New Thread]", " Thread A {wt-feature-a}", " Thread B {wt-feature-b}", ] @@ -2813,8 +2804,7 @@ async fn test_threadless_workspace_shows_new_thread_with_worktree_chip(cx: &mut let sidebar = setup_sidebar(&multi_workspace, cx); // Only save a thread for workspace A. - let paths_a = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - save_named_thread_metadata("thread-a", "Thread A", &paths_a, cx).await; + save_named_thread_metadata("thread-a", "Thread A", &project_a, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -2894,11 +2884,7 @@ async fn test_multi_worktree_thread_shows_multiple_chips(cx: &mut TestAppContext let sidebar = setup_sidebar(&multi_workspace, cx); // Save a thread under the same paths as the workspace roots. - let thread_paths = PathList::new(&[ - std::path::PathBuf::from("/worktrees/project_a/olivetti/project_a"), - std::path::PathBuf::from("/worktrees/project_b/selectric/project_b"), - ]); - save_named_thread_metadata("wt-thread", "Cross Worktree Thread", &thread_paths, cx).await; + save_named_thread_metadata("wt-thread", "Cross Worktree Thread", &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -2971,11 +2957,7 @@ async fn test_same_named_worktree_chips_are_deduplicated(cx: &mut TestAppContext let sidebar = setup_sidebar(&multi_workspace, cx); // Thread with roots in both repos' "olivetti" worktrees. - let thread_paths = PathList::new(&[ - std::path::PathBuf::from("/worktrees/project_a/olivetti/project_a"), - std::path::PathBuf::from("/worktrees/project_b/olivetti/project_b"), - ]); - save_named_thread_metadata("wt-thread", "Same Branch Thread", &thread_paths, cx).await; + save_named_thread_metadata("wt-thread", "Same Branch Thread", &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -3070,8 +3052,7 @@ async fn test_absorbed_worktree_running_thread_shows_live_status(cx: &mut TestAp let session_id = active_session_id(&worktree_panel, cx); // Save metadata so the sidebar knows about this thread. - let wt_paths = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - save_test_thread_metadata(&session_id, wt_paths, cx).await; + save_test_thread_metadata(&session_id, &worktree_project, cx).await; // Keep the thread generating by sending a chunk without ending // the turn. @@ -3091,7 +3072,7 @@ async fn test_absorbed_worktree_running_thread_shows_live_status(cx: &mut TestAp entries, vec![ "v [project]", - " [+ New Thread]", + " [~ Draft]", " Hello {wt-feature-a} * (running)", ] ); @@ -3164,8 +3145,7 @@ async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAp send_message(&worktree_panel, cx); let session_id = active_session_id(&worktree_panel, cx); - let wt_paths = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - save_test_thread_metadata(&session_id, wt_paths, cx).await; + save_test_thread_metadata(&session_id, &worktree_project, cx).await; cx.update(|_, cx| { connection.send_update( @@ -3180,7 +3160,7 @@ async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAp visible_entries_as_strings(&sidebar, cx), vec![ "v [project]", - " [+ New Thread]", + " [~ Draft]", " Hello {wt-feature-a} * (running)", ] ); @@ -3190,11 +3170,7 @@ async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAp assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - "v [project]", - " [+ New Thread]", - " Hello {wt-feature-a} * (!)", - ] + vec!["v [project]", " [~ Draft]", " Hello {wt-feature-a} * (!)",] ); } @@ -3232,13 +3208,17 @@ async fn test_clicking_worktree_thread_opens_workspace_when_none_exists(cx: &mut .update(cx, |p, cx| p.git_scans_complete(cx)) .await; + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); // Save a thread for the worktree path (no workspace for it). - let paths_wt = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - save_named_thread_metadata("thread-wt", "WT Thread", &paths_wt, cx).await; + save_named_thread_metadata("thread-wt", "WT Thread", &worktree_project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -3246,11 +3226,7 @@ async fn test_clicking_worktree_thread_opens_workspace_when_none_exists(cx: &mut // Thread should appear under the main repo with a worktree chip. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - "v [project]", - " [+ New Thread]", - " WT Thread {wt-feature-a}" - ], + vec!["v [project]", " WT Thread {wt-feature-a}"], ); // Only 1 workspace should exist. @@ -3262,7 +3238,7 @@ async fn test_clicking_worktree_thread_opens_workspace_when_none_exists(cx: &mut // Focus the sidebar and select the worktree thread. open_and_focus_sidebar(&sidebar, cx); sidebar.update_in(cx, |sidebar, _window, _cx| { - sidebar.selection = Some(2); // index 0 is header, 1 is new thread, 2 is the thread + sidebar.selection = Some(1); // index 0 is header, 1 is the thread }); // Confirm to open the worktree thread. @@ -3323,28 +3299,28 @@ async fn test_clicking_worktree_thread_does_not_briefly_render_as_separate_proje .update(cx, |p, cx| p.git_scans_complete(cx)) .await; + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let paths_wt = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - save_named_thread_metadata("thread-wt", "WT Thread", &paths_wt, cx).await; + save_named_thread_metadata("thread-wt", "WT Thread", &worktree_project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - "v [project]", - " [+ New Thread]", - " WT Thread {wt-feature-a}" - ], + vec!["v [project]", " WT Thread {wt-feature-a}"], ); open_and_focus_sidebar(&sidebar, cx); sidebar.update_in(cx, |sidebar, _window, _cx| { - sidebar.selection = Some(2); + sidebar.selection = Some(1); // index 0 is header, 1 is the thread }); let assert_sidebar_state = |sidebar: &mut Sidebar, _cx: &mut Context| { @@ -3400,7 +3376,7 @@ async fn test_clicking_worktree_thread_does_not_briefly_render_as_separate_proje ListEntry::ViewMore { .. } => { panic!("unexpected `View More` entry while opening linked worktree thread"); } - ListEntry::NewThread { .. } => {} + ListEntry::DraftThread { .. } | ListEntry::NewThread { .. } => {} } } @@ -3480,10 +3456,8 @@ async fn test_clicking_absorbed_worktree_thread_activates_worktree_workspace( let sidebar = setup_sidebar(&multi_workspace, cx); - let paths_main = PathList::new(&[std::path::PathBuf::from("/project")]); - let paths_wt = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - save_named_thread_metadata("thread-main", "Main Thread", &paths_main, cx).await; - save_named_thread_metadata("thread-wt", "WT Thread", &paths_wt, cx).await; + save_named_thread_metadata("thread-main", "Main Thread", &main_project, cx).await; + save_named_thread_metadata("thread-wt", "WT Thread", &worktree_project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -3544,18 +3518,17 @@ async fn test_activate_archived_thread_with_saved_paths_activates_matching_works let project_b = project::Project::test(fs.clone(), ["/project-b".as_ref()], cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); multi_workspace.update_in(cx, |mw, window, cx| { - mw.test_add_workspace(project_b, window, cx); + mw.test_add_workspace(project_b.clone(), window, cx); }); let sidebar = setup_sidebar(&multi_workspace, cx); // Save a thread with path_list pointing to project-b. - let path_list_b = PathList::new(&[std::path::PathBuf::from("/project-b")]); let session_id = acp::SessionId::new(Arc::from("archived-1")); - save_test_thread_metadata(&session_id, path_list_b.clone(), cx).await; + save_test_thread_metadata(&session_id, &project_b, cx).await; // Ensure workspace A is active. multi_workspace.update_in(cx, |mw, window, cx| { @@ -4093,7 +4066,7 @@ async fn test_archive_thread_uses_next_threads_own_workspace(cx: &mut TestAppCon "Thread 2".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), None, - PathList::new(&[std::path::PathBuf::from("/project")]), + &main_project, cx, ); @@ -4105,7 +4078,7 @@ async fn test_archive_thread_uses_next_threads_own_workspace(cx: &mut TestAppCon "Thread 1".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), None, - PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]), + &worktree_project, cx, ); @@ -4215,6 +4188,11 @@ async fn test_linked_worktree_threads_not_duplicated_across_groups(cx: &mut Test .update(cx, |p, cx| p.git_scans_complete(cx)) .await; + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_only.clone(), window, cx)); multi_workspace.update_in(cx, |mw, window, cx| { @@ -4223,8 +4201,7 @@ async fn test_linked_worktree_threads_not_duplicated_across_groups(cx: &mut Test let sidebar = setup_sidebar(&multi_workspace, cx); // Save a thread under the linked worktree path. - let wt_paths = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - save_named_thread_metadata("wt-thread", "Worktree Thread", &wt_paths, cx).await; + save_named_thread_metadata("wt-thread", "Worktree Thread", &worktree_project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -4234,11 +4211,10 @@ async fn test_linked_worktree_threads_not_duplicated_across_groups(cx: &mut Test assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - "v [project]", - " [+ New Thread]", - " Worktree Thread {wt-feature-a}", "v [other, project]", " [+ New Thread]", + "v [project]", + " Worktree Thread {wt-feature-a}", ] ); } @@ -4250,8 +4226,6 @@ async fn test_thread_switcher_ordering(cx: &mut TestAppContext) { cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - let switcher_ids = |sidebar: &Entity, cx: &mut gpui::VisualTestContext| -> Vec { sidebar.read_with(cx, |sidebar, cx| { @@ -4298,7 +4272,7 @@ async fn test_thread_switcher_ordering(cx: &mut TestAppContext) { "Thread C".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), Some(chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap()), - path_list.clone(), + &project, cx, ); @@ -4314,7 +4288,7 @@ async fn test_thread_switcher_ordering(cx: &mut TestAppContext) { "Thread B".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), Some(chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap()), - path_list.clone(), + &project, cx, ); @@ -4330,7 +4304,7 @@ async fn test_thread_switcher_ordering(cx: &mut TestAppContext) { "Thread A".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 3, 0, 0, 0).unwrap(), Some(chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 3, 0, 0, 0).unwrap()), - path_list.clone(), + &project, cx, ); @@ -4516,7 +4490,7 @@ async fn test_thread_switcher_ordering(cx: &mut TestAppContext) { "Historical Thread".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 6, 1, 0, 0, 0).unwrap(), Some(chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 6, 1, 0, 0, 0).unwrap()), - path_list.clone(), + &project, cx, ); @@ -4557,7 +4531,7 @@ async fn test_thread_switcher_ordering(cx: &mut TestAppContext) { "Old Historical Thread".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2023, 6, 1, 0, 0, 0).unwrap(), Some(chrono::TimeZone::with_ymd_and_hms(&Utc, 2023, 6, 1, 0, 0, 0).unwrap()), - path_list, + &project, cx, ); @@ -4591,17 +4565,15 @@ async fn test_thread_switcher_ordering(cx: &mut TestAppContext) { async fn test_archive_thread_keeps_metadata_but_hides_from_sidebar(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_thread_metadata( acp::SessionId::new(Arc::from("thread-to-archive")), "Thread To Archive".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), None, - path_list, + &project, cx, ); cx.run_until_parked(); @@ -4643,17 +4615,15 @@ async fn test_archive_thread_keeps_metadata_but_hides_from_sidebar(cx: &mut Test async fn test_archived_threads_excluded_from_sidebar_entries(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_thread_metadata( acp::SessionId::new(Arc::from("visible-thread")), "Visible Thread".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), None, - path_list.clone(), + &project, cx, ); @@ -4663,7 +4633,7 @@ async fn test_archived_threads_excluded_from_sidebar_entries(cx: &mut TestAppCon "Archived Thread".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), None, - path_list, + &project, cx, ); @@ -4756,18 +4726,21 @@ async fn test_linked_worktree_workspace_shows_main_worktree_threads(cx: &mut Tes .update(cx, |p, cx| p.git_scans_complete(cx)) .await; + let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; + main_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + let (multi_workspace, cx) = cx.add_window_view(|window, cx| { MultiWorkspace::test_new(worktree_project.clone(), window, cx) }); let sidebar = setup_sidebar(&multi_workspace, cx); // Save a thread against the MAIN repo path. - let main_paths = PathList::new(&[std::path::PathBuf::from("/project")]); - save_named_thread_metadata("main-thread", "Main Repo Thread", &main_paths, cx).await; + save_named_thread_metadata("main-thread", "Main Repo Thread", &main_project, cx).await; // Save a thread against the linked worktree path. - let wt_paths = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - save_named_thread_metadata("wt-thread", "Worktree Thread", &wt_paths, cx).await; + save_named_thread_metadata("wt-thread", "Worktree Thread", &worktree_project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -4788,7 +4761,6 @@ async fn test_linked_worktree_workspace_shows_main_worktree_threads(cx: &mut Tes mod property_test { use super::*; - use gpui::EntityId; struct UnopenedWorktree { path: String, @@ -4922,7 +4894,7 @@ mod property_test { fn save_thread_to_path( state: &mut TestState, - path_list: PathList, + project: &Entity, cx: &mut gpui::VisualTestContext, ) { let session_id = state.next_thread_id(); @@ -4930,7 +4902,7 @@ mod property_test { let updated_at = chrono::TimeZone::with_ymd_and_hms(&chrono::Utc, 2024, 1, 1, 0, 0, 0) .unwrap() + chrono::Duration::seconds(state.thread_counter as i64); - save_thread_metadata(session_id, title, updated_at, None, path_list, cx); + save_thread_metadata(session_id, title, updated_at, None, project, cx); } fn save_thread_to_path_with_main( @@ -4970,11 +4942,10 @@ mod property_test { ) { match operation { Operation::SaveThread { workspace_index } => { - let workspace = - multi_workspace.read_with(cx, |mw, _| mw.workspaces()[workspace_index].clone()); - let path_list = workspace - .read_with(cx, |workspace, cx| PathList::new(&workspace.root_paths(cx))); - save_thread_to_path(state, path_list, cx); + let project = multi_workspace.read_with(cx, |mw, cx| { + mw.workspaces()[workspace_index].read(cx).project().clone() + }); + save_thread_to_path(state, &project, cx); } Operation::SaveWorktreeThread { worktree_index } => { let worktree = &state.unopened_worktrees[worktree_index]; @@ -5147,7 +5118,7 @@ mod property_test { .entries .iter() .filter_map(|entry| match entry { - ListEntry::ProjectHeader { path_list, .. } => Some(path_list.clone()), + ListEntry::ProjectHeader { key, .. } => Some(key.path_list().clone()), _ => None, }) .collect(); @@ -5173,31 +5144,32 @@ mod property_test { anyhow::bail!("sidebar should still have an associated multi-workspace"); }; - let workspaces = multi_workspace.read(cx).workspaces().to_vec(); + let mw = multi_workspace.read(cx); - // Workspaces with no root paths are not shown because the - // sidebar skips empty path lists. All other workspaces should - // appear — either via a Thread entry or a NewThread entry for - // threadless workspaces. - let expected_workspaces: HashSet = workspaces - .iter() - .filter(|ws| !workspace_path_list(ws, cx).paths().is_empty()) - .map(|ws| ws.entity_id()) + // Every project group key in the multi-workspace that has a + // non-empty path list should appear as a ProjectHeader in the + // sidebar. + let expected_keys: HashSet<&project::ProjectGroupKey> = mw + .project_group_keys() + .filter(|k| !k.path_list().paths().is_empty()) .collect(); - let sidebar_workspaces: HashSet = sidebar + let sidebar_keys: HashSet<&project::ProjectGroupKey> = sidebar .contents .entries .iter() - .filter_map(|entry| entry.workspace().map(|ws| ws.entity_id())) + .filter_map(|entry| match entry { + ListEntry::ProjectHeader { key, .. } => Some(key), + _ => None, + }) .collect(); - let missing = &expected_workspaces - &sidebar_workspaces; - let stray = &sidebar_workspaces - &expected_workspaces; + let missing = &expected_keys - &sidebar_keys; + let stray = &sidebar_keys - &expected_keys; anyhow::ensure!( missing.is_empty() && stray.is_empty(), - "sidebar workspaces don't match multi-workspace.\n\ + "sidebar project groups don't match multi-workspace.\n\ Only in multi-workspace (missing): {:?}\n\ Only in sidebar (stray): {:?}", missing, @@ -5222,33 +5194,79 @@ mod property_test { .collect(); let mut metadata_thread_ids: HashSet = HashSet::default(); + + // Query using the same approach as the sidebar: iterate project + // group keys, then do main + legacy queries per group. + let mw = multi_workspace.read(cx); + let mut workspaces_by_group: HashMap>> = + HashMap::default(); for workspace in &workspaces { - let path_list = workspace_path_list(workspace, cx); + let key = workspace.read(cx).project_group_key(cx); + workspaces_by_group + .entry(key) + .or_default() + .push(workspace.clone()); + } + + for group_key in mw.project_group_keys() { + let path_list = group_key.path_list().clone(); if path_list.paths().is_empty() { continue; } + + let group_workspaces = workspaces_by_group + .get(group_key) + .map(|ws| ws.as_slice()) + .unwrap_or_default(); + + // Main code path queries (run for all groups, even without workspaces). + for metadata in thread_store + .read(cx) + .entries_for_main_worktree_path(&path_list) + { + metadata_thread_ids.insert(metadata.session_id.clone()); + } for metadata in thread_store.read(cx).entries_for_path(&path_list) { metadata_thread_ids.insert(metadata.session_id.clone()); } - for snapshot in root_repository_snapshots(workspace, cx) { - for linked_worktree in snapshot.linked_worktrees() { - let worktree_path_list = - PathList::new(std::slice::from_ref(&linked_worktree.path)); - for metadata in thread_store.read(cx).entries_for_path(&worktree_path_list) { + + // Legacy: per-workspace queries for different root paths. + let covered_paths: HashSet = group_workspaces + .iter() + .flat_map(|ws| { + ws.read(cx) + .root_paths(cx) + .into_iter() + .map(|p| p.to_path_buf()) + }) + .collect(); + + for workspace in group_workspaces { + let ws_path_list = workspace_path_list(workspace, cx); + if ws_path_list != path_list { + for metadata in thread_store.read(cx).entries_for_path(&ws_path_list) { metadata_thread_ids.insert(metadata.session_id.clone()); } } - if snapshot.is_linked_worktree() { - let main_path_list = - PathList::new(std::slice::from_ref(&snapshot.original_repo_abs_path)); - for metadata in thread_store.read(cx).entries_for_path(&main_path_list) { - metadata_thread_ids.insert(metadata.session_id.clone()); + } + + for workspace in group_workspaces { + for snapshot in root_repository_snapshots(workspace, cx) { + let repo_path_list = + PathList::new(&[snapshot.original_repo_abs_path.to_path_buf()]); + if repo_path_list != path_list { + continue; } - for metadata in thread_store - .read(cx) - .entries_for_main_worktree_path(&main_path_list) - { - metadata_thread_ids.insert(metadata.session_id.clone()); + for linked_worktree in snapshot.linked_worktrees() { + if covered_paths.contains(&*linked_worktree.path) { + continue; + } + let worktree_path_list = + PathList::new(std::slice::from_ref(&linked_worktree.path)); + for metadata in thread_store.read(cx).entries_for_path(&worktree_path_list) + { + metadata_thread_ids.insert(metadata.session_id.clone()); + } } } } diff --git a/crates/workspace/src/multi_workspace.rs b/crates/workspace/src/multi_workspace.rs index 6aa369774b63dd0d250ba67ba4a5b69a335a2de9..d1bfcf2652d4d7c77d1f83ca2bc9d9603e3a2eed 100644 --- a/crates/workspace/src/multi_workspace.rs +++ b/crates/workspace/src/multi_workspace.rs @@ -474,6 +474,26 @@ impl MultiWorkspace { self.project_group_keys.iter() } + /// Returns the project groups, ordered by most recently added. + pub fn project_groups( + &self, + cx: &App, + ) -> impl Iterator>)> { + let mut groups = self + .project_group_keys + .iter() + .rev() + .map(|key| (key.clone(), Vec::new())) + .collect::>(); + for workspace in &self.workspaces { + let key = workspace.read(cx).project_group_key(cx); + if let Some((_, workspaces)) = groups.iter_mut().find(|(k, _)| k == &key) { + workspaces.push(workspace.clone()); + } + } + groups.into_iter() + } + pub fn workspace(&self) -> &Entity { &self.workspaces[self.active_workspace_index] } From 68452a3daea53843b9b388ec53a2cfd7673baac0 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Fri, 3 Apr 2026 21:55:25 -0400 Subject: [PATCH 40/63] Fix crash on non-ASCII thread titles in archive search (#53114) The archive view's `fuzzy_match_positions` used `chars().enumerate()` which produces **character indices**, not **byte indices**. When thread titles contain multi-byte UTF-8 characters (emoji, CJK, accented characters, etc.), these character indices don't correspond to valid byte boundaries, causing a panic in `HighlightedLabel::new` which asserts that highlight indices are valid UTF-8 boundaries. The fix switches to `char_indices()` and `eq_ignore_ascii_case()` to produce correct byte positions, matching the approach already used by the sidebar's version of the same function. Release Notes: - Fixed a crash when searching archived threads whose titles contain emoji or other non-ASCII characters. --- crates/agent_ui/src/threads_archive_view.rs | 70 +++++++++++++++++++-- 1 file changed, 64 insertions(+), 6 deletions(-) diff --git a/crates/agent_ui/src/threads_archive_view.rs b/crates/agent_ui/src/threads_archive_view.rs index 9aca31e1edbe729fccecfc0dd8f0530d2aed2564..f0c02eefc34a03c5c45730ac4b53645c5b15a2e1 100644 --- a/crates/agent_ui/src/threads_archive_view.rs +++ b/crates/agent_ui/src/threads_archive_view.rs @@ -91,14 +91,16 @@ impl TimeBucket { } fn fuzzy_match_positions(query: &str, text: &str) -> Option> { - let query = query.to_lowercase(); - let text_lower = text.to_lowercase(); let mut positions = Vec::new(); let mut query_chars = query.chars().peekable(); - for (i, c) in text_lower.chars().enumerate() { - if query_chars.peek() == Some(&c) { - positions.push(i); - query_chars.next(); + for (byte_idx, candidate_char) in text.char_indices() { + if let Some(&query_char) = query_chars.peek() { + if candidate_char.eq_ignore_ascii_case(&query_char) { + positions.push(byte_idx); + query_chars.next(); + } + } else { + break; } } if query_chars.peek().is_none() { @@ -1283,3 +1285,59 @@ impl PickerDelegate for ProjectPickerDelegate { ) } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_fuzzy_match_positions_returns_byte_indices() { + // "🔥abc" — the fire emoji is 4 bytes, so 'a' starts at byte 4, 'b' at 5, 'c' at 6. + let text = "🔥abc"; + let positions = fuzzy_match_positions("ab", text).expect("should match"); + assert_eq!(positions, vec![4, 5]); + + // Verify positions are valid char boundaries (this is the assertion that + // panicked before the fix). + for &pos in &positions { + assert!( + text.is_char_boundary(pos), + "position {pos} is not a valid UTF-8 boundary in {text:?}" + ); + } + } + + #[test] + fn test_fuzzy_match_positions_ascii_still_works() { + let positions = fuzzy_match_positions("he", "hello").expect("should match"); + assert_eq!(positions, vec![0, 1]); + } + + #[test] + fn test_fuzzy_match_positions_case_insensitive() { + let positions = fuzzy_match_positions("HE", "hello").expect("should match"); + assert_eq!(positions, vec![0, 1]); + } + + #[test] + fn test_fuzzy_match_positions_no_match() { + assert!(fuzzy_match_positions("xyz", "hello").is_none()); + } + + #[test] + fn test_fuzzy_match_positions_multi_byte_interior() { + // "café" — 'é' is 2 bytes (0xC3 0xA9), so 'f' starts at byte 4, 'é' at byte 5. + let text = "café"; + let positions = fuzzy_match_positions("fé", text).expect("should match"); + // 'c'=0, 'a'=1, 'f'=2, 'é'=3..4 — wait, let's verify: + // Actually: c=1 byte, a=1 byte, f=1 byte, é=2 bytes + // So byte positions: c=0, a=1, f=2, é=3 + assert_eq!(positions, vec![2, 3]); + for &pos in &positions { + assert!( + text.is_char_boundary(pos), + "position {pos} is not a valid UTF-8 boundary in {text:?}" + ); + } + } +} From 3b9c38a32039a2cee6261121789d549df360a18c Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Sat, 4 Apr 2026 00:16:11 -0400 Subject: [PATCH 41/63] Fix resolution of multibuffer anchors that lie outside excerpt boundaries (#53118) It's possible to create a multibuffer anchor that points into a specific excerpted buffer (so not min/max), but whose main buffer `text::Anchor` isn't contained in any of the excerpts for that buffer. When summarizing such an anchor, we map it to the multibuffer position of the start of the next excerpt after where the anchor "should" appear. Or at least, that's the intention, but it turned out we had some bugs in `summary_for_anchor` and `summaries_for_anchors` that caused them to return bizarre summaries for these anchors. This PR fixes that and also updates `test_random_multibuffer` to actually test `MultiBufferSnapshot::summary_for_anchor` against a reference implementation, including for out-of-bounds anchors. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --------- Co-authored-by: Anthony Co-authored-by: Max Co-authored-by: Anthony Eid --- crates/editor/src/display_map/block_map.rs | 3 +- crates/editor/src/editor.rs | 3 +- crates/multi_buffer/src/multi_buffer.rs | 24 +- crates/multi_buffer/src/multi_buffer_tests.rs | 349 +++++++++++++++--- 4 files changed, 306 insertions(+), 73 deletions(-) diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 25874457a8e3d4787de22e3e8c0e2c61a49708f8..67318e3300e73085fe40c2e22edfcd06778902c8 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -2043,6 +2043,7 @@ impl BlockMapWriter<'_> { multi_buffer: &MultiBuffer, cx: &App, ) { + let multi_buffer_snapshot = multi_buffer.snapshot(cx); let mut ranges = Vec::new(); let mut companion_buffer_ids = HashSet::default(); for buffer_id in buffer_ids { @@ -2051,7 +2052,7 @@ impl BlockMapWriter<'_> { } else { self.block_map.folded_buffers.remove(&buffer_id); } - ranges.extend(multi_buffer.range_for_buffer(buffer_id, cx)); + ranges.extend(multi_buffer_snapshot.range_for_buffer(buffer_id)); if let Some(companion) = &self.companion && companion.inverse.is_some() { diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index e4cccf3fc5607937a2a82b2ab1089e00bbda6fa7..6550d79c9f73799d37ccf6433db38f2719636ee6 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -11741,10 +11741,9 @@ impl Editor { buffer_ids.extend(snapshot.buffer_ids_for_range(selection.range())) } - let buffer = self.buffer().read(cx); let ranges = buffer_ids .into_iter() - .flat_map(|buffer_id| buffer.range_for_buffer(buffer_id, cx)) + .flat_map(|buffer_id| snapshot.range_for_buffer(buffer_id)) .collect::>(); self.restore_hunks_in_ranges(ranges, window, cx); diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 8e98a5ad93bdbec4aceb68ba9fff95688777d863..a54ff64af028f44adced1758933f794e9a002c5a 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -1834,14 +1834,6 @@ impl MultiBuffer { cx.notify(); } - pub fn range_for_buffer(&self, buffer_id: BufferId, cx: &App) -> Option> { - let snapshot = self.read(cx); - let path_key = snapshot.path_key_index_for_buffer(buffer_id)?; - let start = Anchor::in_buffer(path_key, text::Anchor::min_for_buffer(buffer_id)); - let end = Anchor::in_buffer(path_key, text::Anchor::max_for_buffer(buffer_id)); - Some((start..end).to_point(&snapshot)) - } - // If point is at the end of the buffer, the last excerpt is returned pub fn point_to_buffer_offset( &self, @@ -4792,10 +4784,10 @@ impl MultiBufferSnapshot { let mut diff_transforms_cursor = self .diff_transforms .cursor::, OutputDimension>>(()); - diff_transforms_cursor.next(); if let Some(excerpt) = item { if !excerpt.contains(anchor, self) { + diff_transforms_cursor.seek(&excerpt_start_position, Bias::Left); return self.summary_for_excerpt_position_without_hunks( Bias::Left, excerpt_start_position, @@ -4822,9 +4814,7 @@ impl MultiBufferSnapshot { position += summary - excerpt_buffer_start; } - if diff_transforms_cursor.start().0 < position { - diff_transforms_cursor.seek_forward(&position, Bias::Left); - } + diff_transforms_cursor.seek(&position, Bias::Left); self.summary_for_anchor_with_excerpt_position( *anchor, position, @@ -4832,7 +4822,7 @@ impl MultiBufferSnapshot { &buffer_snapshot, ) } else { - diff_transforms_cursor.seek_forward(&excerpt_start_position, Bias::Left); + diff_transforms_cursor.seek(&excerpt_start_position, Bias::Left); self.summary_for_excerpt_position_without_hunks( Bias::Right, excerpt_start_position, @@ -5040,6 +5030,7 @@ impl MultiBufferSnapshot { if let Some(excerpt) = cursor.item() { let buffer_snapshot = excerpt.buffer_snapshot(self); if !excerpt.contains(&excerpt_anchor, self) { + diff_transforms_cursor.seek_forward(&excerpt_start_position, Bias::Left); let position = self.summary_for_excerpt_position_without_hunks( Bias::Left, excerpt_start_position, @@ -6740,6 +6731,13 @@ impl MultiBufferSnapshot { .graphemes(true) .count() } + + pub fn range_for_buffer(&self, buffer_id: BufferId) -> Option> { + let path_key = self.path_key_index_for_buffer(buffer_id)?; + let start = Anchor::in_buffer(path_key, text::Anchor::min_for_buffer(buffer_id)); + let end = Anchor::in_buffer(path_key, text::Anchor::max_for_buffer(buffer_id)); + Some((start..end).to_point(self)) + } } #[cfg(any(test, feature = "test-support"))] diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index b0e541ed11d1e9200b22ce682cf3175fae30e8cf..bc904d1a05488ee365ebddf36c3b30accdfb9301 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -2898,10 +2898,11 @@ struct ReferenceExcerpt { struct ReferenceRegion { buffer_id: Option, range: Range, - buffer_range: Option>, + buffer_range: Range, + // if this is a deleted hunk, the main buffer anchor to which the deleted content is attached + deleted_hunk_anchor: Option, status: Option, - excerpt_range: Option>, - excerpt_path_key_index: Option, + excerpt: Option, } impl ReferenceMultibuffer { @@ -3055,7 +3056,15 @@ impl ReferenceMultibuffer { } } - fn expected_content(&self, cx: &App) -> (String, Vec, HashSet) { + fn expected_content( + &self, + cx: &App, + ) -> ( + String, + Vec, + HashSet, + Vec, + ) { use util::maybe; let mut text = String::new(); @@ -3093,12 +3102,10 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: Some(buffer_id), range: len..text.len(), - buffer_range: Some( - (offset..hunk_base_range.start).to_point(&buffer), - ), + buffer_range: (offset..hunk_base_range.start).to_point(&buffer), status: None, - excerpt_range: Some(excerpt.range.clone()), - excerpt_path_key_index: Some(excerpt.path_key_index), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: None, }); } } @@ -3110,10 +3117,10 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: Some(buffer_id), range: len..text.len(), - buffer_range: Some(hunk_base_range.to_point(&buffer)), + buffer_range: hunk_base_range.to_point(&buffer), status: Some(DiffHunkStatus::deleted(hunk.secondary_status)), - excerpt_range: Some(excerpt.range.clone()), - excerpt_path_key_index: Some(excerpt.path_key_index), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: None, }); } @@ -3127,10 +3134,10 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: Some(buffer_id), range: len..text.len(), - buffer_range: Some((offset..buffer_range.end).to_point(&buffer)), + buffer_range: (offset..buffer_range.end).to_point(&buffer), status: None, - excerpt_range: Some(excerpt.range.clone()), - excerpt_path_key_index: Some(excerpt.path_key_index), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: None, }); } else { let diff = self.diffs.get(&buffer_id).unwrap().read(cx).snapshot(cx); @@ -3181,10 +3188,10 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: Some(buffer_id), range: len..text.len(), - buffer_range: Some((offset..hunk_range.start).to_point(&buffer)), + buffer_range: (offset..hunk_range.start).to_point(&buffer), status: None, - excerpt_range: Some(excerpt.range.clone()), - excerpt_path_key_index: Some(excerpt.path_key_index), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: None, }); } @@ -3201,12 +3208,10 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: Some(base_buffer.remote_id()), range: len..text.len(), - buffer_range: Some( - hunk.diff_base_byte_range.to_point(&base_buffer), - ), + buffer_range: hunk.diff_base_byte_range.to_point(&base_buffer), status: Some(DiffHunkStatus::deleted(hunk.secondary_status)), - excerpt_range: Some(excerpt.range.clone()), - excerpt_path_key_index: Some(excerpt.path_key_index), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: Some(hunk.buffer_range.start), }); } @@ -3221,10 +3226,10 @@ impl ReferenceMultibuffer { let region = ReferenceRegion { buffer_id: Some(buffer_id), range, - buffer_range: Some((offset..hunk_range.end).to_point(&buffer)), + buffer_range: (offset..hunk_range.end).to_point(&buffer), status: Some(DiffHunkStatus::added(hunk.secondary_status)), - excerpt_range: Some(excerpt.range.clone()), - excerpt_path_key_index: Some(excerpt.path_key_index), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: None, }; offset = hunk_range.end; regions.push(region); @@ -3238,10 +3243,10 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: Some(buffer_id), range: len..text.len(), - buffer_range: Some((offset..buffer_range.end).to_point(&buffer)), + buffer_range: (offset..buffer_range.end).to_point(&buffer), status: None, - excerpt_range: Some(excerpt.range.clone()), - excerpt_path_key_index: Some(excerpt.path_key_index), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: None, }); } } @@ -3251,13 +3256,16 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: None, range: 0..1, - buffer_range: Some(Point::new(0, 0)..Point::new(0, 1)), + buffer_range: Point::new(0, 0)..Point::new(0, 1), status: None, - excerpt_range: None, - excerpt_path_key_index: None, + excerpt: None, + deleted_hunk_anchor: None, }); } else { text.pop(); + let region = regions.last_mut().unwrap(); + assert!(region.deleted_hunk_anchor.is_none()); + region.range.end -= 1; } // Retrieve the row info using the region that contains @@ -3268,37 +3276,38 @@ impl ReferenceMultibuffer { .map(|line| { let row_info = regions .iter() - .position(|region| region.range.contains(&ix)) + .rposition(|region| { + region.range.contains(&ix) || (ix == text.len() && ix == region.range.end) + }) .map_or(RowInfo::default(), |region_ix| { let region = regions[region_ix].clone(); - let buffer_row = region.buffer_range.as_ref().map(|buffer_range| { - buffer_range.start.row - + text[region.range.start..ix].matches('\n').count() as u32 - }); - let main_buffer = self - .excerpts - .iter() - .find(|e| e.range == region.excerpt_range.clone().unwrap()) - .map(|e| e.buffer.clone()); + let buffer_row = region.buffer_range.start.row + + text[region.range.start..ix].matches('\n').count() as u32; + let main_buffer = region.excerpt.as_ref().map(|e| e.buffer.clone()); + let excerpt_range = region.excerpt.as_ref().map(|e| &e.range); let is_excerpt_start = region_ix == 0 - || ®ions[region_ix - 1].excerpt_range != ®ion.excerpt_range + || regions[region_ix - 1].excerpt.as_ref().map(|e| &e.range) + != excerpt_range || regions[region_ix - 1].range.is_empty(); let mut is_excerpt_end = region_ix == regions.len() - 1 - || ®ions[region_ix + 1].excerpt_range != ®ion.excerpt_range; + || regions[region_ix + 1].excerpt.as_ref().map(|e| &e.range) + != excerpt_range; let is_start = !text[region.range.start..ix].contains('\n'); + let is_last_region = region_ix == regions.len() - 1; let mut is_end = if region.range.end > text.len() { !text[ix..].contains('\n') } else { - text[ix..region.range.end.min(text.len())] + let remaining_newlines = text[ix..region.range.end.min(text.len())] .matches('\n') - .count() - == 1 + .count(); + remaining_newlines == if is_last_region { 0 } else { 1 } }; if region_ix < regions.len() - 1 && !text[ix..].contains("\n") && (region.status == Some(DiffHunkStatus::added_none()) || region.status.is_some_and(|s| s.is_deleted())) - && regions[region_ix + 1].excerpt_range == region.excerpt_range + && regions[region_ix + 1].excerpt.as_ref().map(|e| &e.range) + == excerpt_range && regions[region_ix + 1].range.start == text.len() { is_end = true; @@ -3308,7 +3317,6 @@ impl ReferenceMultibuffer { MultiBufferRow(text[..ix].matches('\n').count() as u32); let mut expand_direction = None; if let Some(buffer) = &main_buffer { - let buffer_row = buffer_row.unwrap(); let needs_expand_up = is_excerpt_start && is_start && buffer_row > 0; let needs_expand_down = is_excerpt_end && is_end @@ -3326,19 +3334,18 @@ impl ReferenceMultibuffer { RowInfo { buffer_id: region.buffer_id, diff_status: region.status, - buffer_row, + buffer_row: Some(buffer_row), wrapped_buffer_row: None, multibuffer_row: Some(multibuffer_row), expand_info: maybe!({ let direction = expand_direction?; - let excerpt_range = region.excerpt_range?; - let path_key_index = region.excerpt_path_key_index?; + let excerpt = region.excerpt.as_ref()?; Some(ExpandInfo { direction, start_anchor: Anchor::in_buffer( - path_key_index, - excerpt_range.start, + excerpt.path_key_index, + excerpt.range.start, ), }) }), @@ -3349,7 +3356,7 @@ impl ReferenceMultibuffer { }) .collect(); - (text, row_infos, excerpt_boundary_rows) + (text, row_infos, excerpt_boundary_rows, regions) } fn diffs_updated(&mut self, cx: &mut App) { @@ -3414,6 +3421,95 @@ impl ReferenceMultibuffer { }) }); } + + fn anchor_to_offset(&self, anchor: &Anchor, cx: &App) -> Option { + if anchor.diff_base_anchor().is_some() { + panic!("reference multibuffer cannot yet resolve anchors inside deleted hunks"); + } + let (anchor, snapshot, path_key) = self.anchor_to_buffer_anchor(anchor, cx)?; + // TODO(cole) can maybe make this and expected content call a common function instead + let (text, _, _, regions) = self.expected_content(cx); + + // Locate the first region that contains or is past the putative location of the buffer anchor + let ix = regions.partition_point(|region| { + let excerpt = region + .excerpt + .as_ref() + .expect("should have no buffers in empty reference multibuffer"); + excerpt + .path_key + .cmp(&path_key) + .then_with(|| { + if excerpt.range.end.cmp(&anchor, &snapshot).is_lt() { + Ordering::Less + } else if excerpt.range.start.cmp(&anchor, &snapshot).is_gt() { + Ordering::Greater + } else { + Ordering::Equal + } + }) + .then_with(|| { + if let Some(deleted_hunk_anchor) = region.deleted_hunk_anchor { + deleted_hunk_anchor.cmp(&anchor, &snapshot) + } else { + let point = anchor.to_point(&snapshot); + assert_eq!(region.buffer_id, Some(snapshot.remote_id())); + if region.buffer_range.end < point { + Ordering::Less + } else if region.buffer_range.start > point { + Ordering::Greater + } else { + Ordering::Equal + } + } + }) + .is_lt() + }); + + let Some(region) = regions.get(ix) else { + return Some(MultiBufferOffset(text.len())); + }; + + let offset = if region.buffer_id == Some(snapshot.remote_id()) { + let buffer_offset = anchor.to_offset(&snapshot); + let buffer_range = region.buffer_range.to_offset(&snapshot); + assert!(buffer_offset <= buffer_range.end); + let overshoot = buffer_offset.saturating_sub(buffer_range.start); + region.range.start + overshoot + } else { + region.range.start + }; + Some(MultiBufferOffset(offset)) + } + + fn anchor_to_buffer_anchor( + &self, + anchor: &Anchor, + cx: &App, + ) -> Option<(text::Anchor, BufferSnapshot, PathKey)> { + let (excerpt, anchor) = match anchor { + Anchor::Min => { + let excerpt = self.excerpts.first()?; + (excerpt, excerpt.range.start) + } + Anchor::Excerpt(excerpt_anchor) => ( + self.excerpts.iter().find(|excerpt| { + excerpt.buffer.read(cx).remote_id() == excerpt_anchor.buffer_id() + })?, + excerpt_anchor.text_anchor, + ), + Anchor::Max => { + let excerpt = self.excerpts.last()?; + (excerpt, excerpt.range.end) + } + }; + + Some(( + anchor, + excerpt.buffer.read(cx).snapshot(), + excerpt.path_key.clone(), + )) + } } #[gpui::test(iterations = 100)] @@ -3791,12 +3887,13 @@ fn mutate_excerpt_ranges( _ => { let end_row = rng.random_range(0..=buffer.max_point().row); let start_row = rng.random_range(0..=end_row); + let end_col = buffer.line_len(end_row); log::info!( "Inserting excerpt for buffer {:?}, row range {:?}", buffer.remote_id(), start_row..end_row ); - ranges_to_add.push(Point::new(start_row, 0)..Point::new(end_row, 0)); + ranges_to_add.push(Point::new(start_row, 0)..Point::new(end_row, end_col)); } } } @@ -3820,8 +3917,36 @@ fn check_multibuffer( .collect::>(); let actual_row_infos = snapshot.row_infos(MultiBufferRow(0)).collect::>(); - let (expected_text, expected_row_infos, expected_boundary_rows) = + let anchors_to_check = anchors + .iter() + .filter_map(|anchor| { + snapshot + .anchor_to_buffer_anchor(*anchor) + .map(|(anchor, _)| anchor) + }) + // Intentionally mix in some anchors that are (in general) not contained in any excerpt + .chain( + reference + .excerpts + .iter() + .map(|excerpt| excerpt.buffer.read(cx).remote_id()) + .dedup() + .flat_map(|buffer_id| { + [ + text::Anchor::min_for_buffer(buffer_id), + text::Anchor::max_for_buffer(buffer_id), + ] + }), + ) + .map(|anchor| snapshot.anchor_in_buffer(anchor).unwrap()) + .collect::>(); + + let (expected_text, expected_row_infos, expected_boundary_rows, _) = reference.expected_content(cx); + let expected_anchor_offsets = anchors_to_check + .iter() + .map(|anchor| reference.anchor_to_offset(anchor, cx).unwrap()) + .collect::>(); let has_diff = actual_row_infos .iter() @@ -3949,6 +4074,15 @@ fn check_multibuffer( ); } + let actual_anchor_offsets = anchors_to_check + .into_iter() + .map(|anchor| anchor.to_offset(&snapshot)) + .collect::>(); + assert_eq!( + actual_anchor_offsets, expected_anchor_offsets, + "buffer anchor resolves to wrong offset" + ); + for _ in 0..10 { let end_ix = text_rope.clip_offset(rng.random_range(0..=text_rope.len()), Bias::Right); assert_eq!( @@ -5911,3 +6045,104 @@ fn test_cannot_seek_backward_after_excerpt_replacement(cx: &mut TestAppContext) snapshot.summaries_for_anchors::(&[anchor_in_e_b2, anchor_in_e_b3]); }); } + +#[gpui::test] +fn test_resolving_max_anchor_for_buffer(cx: &mut TestAppContext) { + let dock_base_text = indoc! {" + 0 + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + "}; + + let dock_text = indoc! {" + 0 + 4 + 5 + 6 + 10 + 11 + 12 + "}; + + let dock_buffer = cx.new(|cx| Buffer::local(dock_text, cx)); + let diff = cx.new(|cx| { + BufferDiff::new_with_base_text(dock_base_text, &dock_buffer.read(cx).snapshot(), cx) + }); + + let workspace_text = "second buffer\n"; + let workspace_buffer = cx.new(|cx| Buffer::local(workspace_text, cx)); + + let dock_path = PathKey::with_sort_prefix(0, rel_path("").into_arc()); + let workspace_path = PathKey::with_sort_prefix(1, rel_path("").into_arc()); + + let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpt_ranges_for_path( + dock_path, + dock_buffer.clone(), + &dock_buffer.read(cx).snapshot(), + vec![ + ExcerptRange::new(Point::zero()..Point::new(1, 1)), + ExcerptRange::new(Point::new(3, 0)..Point::new(4, 2)), + ], + cx, + ); + multibuffer.set_excerpt_ranges_for_path( + workspace_path, + workspace_buffer.clone(), + &workspace_buffer.read(cx).snapshot(), + vec![ExcerptRange::new( + Point::zero()..workspace_buffer.read(cx).max_point(), + )], + cx, + ); + multibuffer.add_diff(diff, cx); + multibuffer.set_all_diff_hunks_expanded(cx); + }); + + let snapshot = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx)); + let diff = format_diff( + &snapshot.text(), + &snapshot.row_infos(MultiBufferRow(0)).collect::>(), + &Default::default(), + None, + ); + assert_eq!( + diff, + indoc! {" + 0 + - 1 + - 2 + - 3 + 4 [↓] + 6 [↑] + - 7 + - 8 + - 9 + 10 [↓] + second buffer + "} + ); + + multibuffer.update(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + let point = snapshot + .anchor_in_buffer(text::Anchor::max_for_buffer( + dock_buffer.read(cx).remote_id(), + )) + .unwrap() + .to_point(&snapshot); + assert_eq!(point, Point::new(10, 0)); + }) +} From 49ebe4bd6197003cc5dec97c1989a4b2a70601c8 Mon Sep 17 00:00:00 2001 From: Vimsucks Date: Sun, 5 Apr 2026 03:22:04 +0800 Subject: [PATCH 42/63] Add reasoning_effort field to OpenAI compatible model configuration (#50582) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Some model like glm-5、kimi-k2.5 support reasoning, but require reasoning_effort parameter This pr add support for setting reasoing_effort for openai compatible models Tested using the following config: ```json { "language_models": { "openai_compatible": { "My LiteLLM": { "available_models": [ { "name": "glm-5", "display_name": "glm-5", "max_tokens": 73728, "reasoning_effort": "low" }, { "name": "kimi-k2.5", "display_name": "kimi-k2.5", "max_tokens": 262144, "reasoning_effort": "low" } ] } } } } ``` Release Notes: - Added a setting to control `reasoning_effort` in custom OpenAI-compatible models --- .../src/agent_configuration/add_llm_provider_modal.rs | 1 + crates/language_models/src/provider/open_ai_compatible.rs | 4 ++-- crates/settings_content/src/language_model.rs | 1 + 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs index b97583377a00d28ea1a8aae6a1380cff3b69e6a0..e0df79ba4dfe226652818b120b7bfcc493c73b1e 100644 --- a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs +++ b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs @@ -202,6 +202,7 @@ impl ModelInput { .text(cx) .parse::() .map_err(|_| SharedString::from("Max Tokens must be a number"))?, + reasoning_effort: None, capabilities: ModelCapabilities { tools: self.capabilities.supports_tools.selected(), images: self.capabilities.supports_images.selected(), diff --git a/crates/language_models/src/provider/open_ai_compatible.rs b/crates/language_models/src/provider/open_ai_compatible.rs index 9f63a1e1a039998c275637f3831b51474c8049ac..1c3268749c3340826cd2f50d29e80eecfa1826d4 100644 --- a/crates/language_models/src/provider/open_ai_compatible.rs +++ b/crates/language_models/src/provider/open_ai_compatible.rs @@ -402,7 +402,7 @@ impl LanguageModel for OpenAiCompatibleLanguageModel { self.model.capabilities.parallel_tool_calls, self.model.capabilities.prompt_cache_key, self.max_output_tokens(), - None, + self.model.reasoning_effort.clone(), ); let completions = self.stream_completion(request, cx); async move { @@ -417,7 +417,7 @@ impl LanguageModel for OpenAiCompatibleLanguageModel { self.model.capabilities.parallel_tool_calls, self.model.capabilities.prompt_cache_key, self.max_output_tokens(), - None, + self.model.reasoning_effort.clone(), ); let completions = self.stream_response(request, cx); async move { diff --git a/crates/settings_content/src/language_model.rs b/crates/settings_content/src/language_model.rs index fef92dc8f43d52c160c1e8c8a2fb7aeb0533e2c0..4b72c2ad3f47d834dfa38555d80a8646e3940f51 100644 --- a/crates/settings_content/src/language_model.rs +++ b/crates/settings_content/src/language_model.rs @@ -278,6 +278,7 @@ pub struct OpenAiCompatibleAvailableModel { pub max_tokens: u64, pub max_output_tokens: Option, pub max_completion_tokens: Option, + pub reasoning_effort: Option, #[serde(default)] pub capabilities: OpenAiCompatibleModelCapabilities, } From 5375ca0ae27bf11713cf319358ac289db16cc749 Mon Sep 17 00:00:00 2001 From: Bowen Xu <40262910+bowenxuuu@users.noreply.github.com> Date: Sun, 5 Apr 2026 04:10:22 +0800 Subject: [PATCH 43/63] gpui: Add `display_handle` implementation for Windows, update it for macOS (#52867) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- crates/gpui_macos/src/window.rs | 7 +------ crates/gpui_windows/src/window.rs | 3 +-- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/crates/gpui_macos/src/window.rs b/crates/gpui_macos/src/window.rs index ace36d695401ce76949129197dcd05135508f7d3..8811a4159a0f539d2bae2c62242a3d5f490686ef 100644 --- a/crates/gpui_macos/src/window.rs +++ b/crates/gpui_macos/src/window.rs @@ -1701,12 +1701,7 @@ impl rwh::HasWindowHandle for MacWindow { impl rwh::HasDisplayHandle for MacWindow { fn display_handle(&self) -> Result, rwh::HandleError> { - // SAFETY: This is a no-op on macOS - unsafe { - Ok(rwh::DisplayHandle::borrow_raw( - rwh::AppKitDisplayHandle::new().into(), - )) - } + Ok(rwh::DisplayHandle::appkit()) } } diff --git a/crates/gpui_windows/src/window.rs b/crates/gpui_windows/src/window.rs index 92255f93fd95969931c6b1ae8cb465ff628f82cb..f655c1989e2c69743032703532f91b3b517084b6 100644 --- a/crates/gpui_windows/src/window.rs +++ b/crates/gpui_windows/src/window.rs @@ -540,10 +540,9 @@ impl rwh::HasWindowHandle for WindowsWindow { } } -// todo(windows) impl rwh::HasDisplayHandle for WindowsWindow { fn display_handle(&self) -> std::result::Result, rwh::HandleError> { - unimplemented!() + Ok(rwh::DisplayHandle::windows()) } } From 77ee72e665da02d33ca661d97fc58b1c97d81ca2 Mon Sep 17 00:00:00 2001 From: K4YT3X Date: Sun, 5 Apr 2026 07:06:46 +0000 Subject: [PATCH 44/63] agent_ui: Fix profile selector not repainting after cycling with Shift+Tab (#53126) Currently, when pressing Shift+Tab to change Zed Agent's profile, the UI isn't immediately updated. This PR fixes this issue so the `Change Profile` button updates immediately after pressing Shift+Tab. The current behavior. Observe that the `Change Profile` button doesn't update right after Shift+Tab changes the active profile: https://github.com/user-attachments/assets/fa1e6488-0dc3-4cc9-a4f3-7f62da48cc19 After this fix, the button text is update immediately on profile change: https://github.com/user-attachments/assets/93261b11-037a-42c9-b1b8-0ca1e1adb851 --- Release Notes: - Fixed Zed Agent profile selector button not visually updating when cycled with Shift+Tab. Signed-off-by: k4yt3x --- crates/agent_ui/src/profile_selector.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/agent_ui/src/profile_selector.rs b/crates/agent_ui/src/profile_selector.rs index 1bad3c45e4dece2397a2e026d659fd0fad043a24..963e32af55fda90f49edb0787f7327190c92681f 100644 --- a/crates/agent_ui/src/profile_selector.rs +++ b/crates/agent_ui/src/profile_selector.rs @@ -90,6 +90,7 @@ impl ProfileSelector { if let Some((next_profile_id, _)) = profiles.get_index(next_index) { self.provider.set_profile(next_profile_id.clone(), cx); + cx.notify(); } } From 1ebcde8164933e6a729fc5f76a23b7d2e7b7f422 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Sun, 5 Apr 2026 11:12:02 -0700 Subject: [PATCH 45/63] Update more sidebar interactions to use the MultiWorkspace's explicit project groups (#53174) * Don't require a workspace to be loaded in order to render the group header menu. * When adding or removing root folders, do it to *every* workspace in the group. * When activating a thread, never open a different window, and never open it in a workspace that's part of a different groupw with a superset of the thread's worktrees. Find or create a workspace with the exact right group of root folders. Release Notes: - N/A --- crates/project/src/project.rs | 13 ++ crates/project/src/worktree_store.rs | 15 ++ crates/sidebar/src/sidebar.rs | 184 +++++++----------- crates/util/src/path_list.rs | 10 + crates/workspace/src/multi_workspace.rs | 247 ++++++++++++++++++++++-- 5 files changed, 342 insertions(+), 127 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index c5b1f982ceacc59a60ff1303faffc972a3ce505d..0ec3366ca8f9f6c6e4e3cbd411e1894de4d0f2b8 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -4763,6 +4763,19 @@ impl Project { }); } + pub fn remove_worktree_for_main_worktree_path( + &mut self, + path: impl AsRef, + cx: &mut Context, + ) { + let path = path.as_ref(); + self.worktree_store.update(cx, |worktree_store, cx| { + if let Some(worktree) = worktree_store.worktree_for_main_worktree_path(path, cx) { + worktree_store.remove_worktree(worktree.read(cx).id(), cx); + } + }); + } + fn add_worktree(&mut self, worktree: &Entity, cx: &mut Context) { self.worktree_store.update(cx, |worktree_store, cx| { worktree_store.add(worktree, cx); diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index ca448ce53118fd23fec0dfc920ee67f5d6d19c41..7ca721ddb50c3f216ed630665e547b60ce4d52bf 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -850,6 +850,21 @@ impl WorktreeStore { self.send_project_updates(cx); } + pub fn worktree_for_main_worktree_path( + &self, + path: &Path, + cx: &App, + ) -> Option> { + self.visible_worktrees(cx).find(|worktree| { + let worktree = worktree.read(cx); + if let Some(common_dir) = worktree.root_repo_common_dir() { + common_dir.parent() == Some(path) + } else { + worktree.abs_path().as_ref() == path + } + }) + } + pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool) { self.worktrees_reordered = worktrees_reordered; } diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 25a2b7ecb75ae11a551caa221609e8c5bfa1751e..53ae57d1a7c55f66e40e1d704859d689d41045e4 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -689,12 +689,9 @@ impl Sidebar { return; }; - let paths: Vec = - path_list.paths().iter().map(|p| p.to_path_buf()).collect(); - multi_workspace - .update(cx, |mw, cx| { - mw.open_project(paths, workspace::OpenMode::Activate, window, cx) + .update(cx, |this, cx| { + this.find_or_create_local_workspace(path_list.clone(), window, cx) }) .detach_and_log_err(cx); } @@ -1439,10 +1436,7 @@ impl Sidebar { }) }), ) - .child({ - let workspace_for_new_thread = workspace.clone(); - let path_list_for_new_thread = path_list.clone(); - + .child( h_flex() .when(self.project_header_menu_ix != Some(ix), |this| { this.visible_on_hover(group_name) @@ -1450,13 +1444,7 @@ impl Sidebar { .on_mouse_down(gpui::MouseButton::Left, |_, _, cx| { cx.stop_propagation(); }) - .when_some(workspace, |this, workspace| { - this.child( - self.render_project_header_menu( - ix, id_prefix, &workspace, &workspace, cx, - ), - ) - }) + .child(self.render_project_header_menu(ix, id_prefix, key, cx)) .when(view_more_expanded && !is_collapsed, |this| { this.child( IconButton::new( @@ -1478,12 +1466,10 @@ impl Sidebar { })), ) }) - .when( - show_new_thread_button && workspace_for_new_thread.is_some(), - |this| { - let workspace_for_new_thread = - workspace_for_new_thread.clone().unwrap(); - let path_list_for_new_thread = path_list_for_new_thread.clone(); + .when_some( + workspace.filter(|_| show_new_thread_button), + |this, workspace| { + let path_list = path_list.clone(); this.child( IconButton::new( SharedString::from(format!( @@ -1495,26 +1481,22 @@ impl Sidebar { .tooltip(Tooltip::text("New Thread")) .on_click(cx.listener( move |this, _, window, cx| { - this.collapsed_groups.remove(&path_list_for_new_thread); + this.collapsed_groups.remove(&path_list); this.selection = None; - this.create_new_thread( - &workspace_for_new_thread, - window, - cx, - ); + this.create_new_thread(&workspace, window, cx); }, )), ) }, - ) - }) + ), + ) .when(!is_active, |this| { - let path_list_for_open = path_list.clone(); + let path_list = path_list.clone(); this.cursor_pointer() .hover(|s| s.bg(hover_color)) .tooltip(Tooltip::text("Open Workspace")) .on_click(cx.listener(move |this, _, window, cx| { - if let Some(workspace) = this.workspace_for_group(&path_list_for_open, cx) { + if let Some(workspace) = this.workspace_for_group(&path_list, cx) { this.active_entry = Some(ActiveEntry::Draft(workspace.clone())); if let Some(multi_workspace) = this.multi_workspace.upgrade() { multi_workspace.update(cx, |multi_workspace, cx| { @@ -1527,7 +1509,7 @@ impl Sidebar { }); } } else { - this.open_workspace_for_group(&path_list_for_open, window, cx); + this.open_workspace_for_group(&path_list, window, cx); } })) }) @@ -1538,14 +1520,12 @@ impl Sidebar { &self, ix: usize, id_prefix: &str, - workspace: &Entity, - workspace_for_remove: &Entity, + project_group_key: &ProjectGroupKey, cx: &mut Context, ) -> impl IntoElement { - let workspace_for_menu = workspace.clone(); - let workspace_for_remove = workspace_for_remove.clone(); let multi_workspace = self.multi_workspace.clone(); let this = cx.weak_entity(); + let project_group_key = project_group_key.clone(); PopoverMenu::new(format!("{id_prefix}project-header-menu-{ix}")) .on_open(Rc::new({ @@ -1559,116 +1539,102 @@ impl Sidebar { } })) .menu(move |window, cx| { - let workspace = workspace_for_menu.clone(); - let workspace_for_remove = workspace_for_remove.clone(); let multi_workspace = multi_workspace.clone(); + let project_group_key = project_group_key.clone(); let menu = ContextMenu::build_persistent(window, cx, move |menu, _window, cx| { - let worktrees: Vec<_> = workspace - .read(cx) - .visible_worktrees(cx) - .map(|worktree| { - let worktree_read = worktree.read(cx); - let id = worktree_read.id(); - let name: SharedString = - worktree_read.root_name().as_unix_str().to_string().into(); - (id, name) - }) - .collect(); - - let worktree_count = worktrees.len(); - let mut menu = menu .header("Project Folders") .end_slot_action(Box::new(menu::EndSlot)); - for (worktree_id, name) in &worktrees { - let worktree_id = *worktree_id; - let workspace_for_worktree = workspace.clone(); - let workspace_for_remove_worktree = workspace_for_remove.clone(); - let multi_workspace_for_worktree = multi_workspace.clone(); - - let remove_handler = move |window: &mut Window, cx: &mut App| { - if worktree_count <= 1 { - if let Some(mw) = multi_workspace_for_worktree.upgrade() { - let ws = workspace_for_remove_worktree.clone(); - mw.update(cx, |multi_workspace, cx| { - multi_workspace.remove(&ws, window, cx); - }); - } - } else { - workspace_for_worktree.update(cx, |workspace, cx| { - workspace.project().update(cx, |project, cx| { - project.remove_worktree(worktree_id, cx); - }); - }); - } + for path in project_group_key.path_list().paths() { + let Some(name) = path.file_name() else { + continue; }; - + let name: SharedString = name.to_string_lossy().into_owned().into(); + let path = path.clone(); + let project_group_key = project_group_key.clone(); + let multi_workspace = multi_workspace.clone(); menu = menu.entry_with_end_slot_on_hover( name.clone(), None, |_, _| {}, IconName::Close, "Remove Folder".into(), - remove_handler, + move |_window, cx| { + multi_workspace + .update(cx, |multi_workspace, cx| { + multi_workspace.remove_folder_from_project_group( + &project_group_key, + &path, + cx, + ); + }) + .ok(); + }, ); } - let workspace_for_add = workspace.clone(); - let multi_workspace_for_add = multi_workspace.clone(); let menu = menu.separator().entry( "Add Folder to Project", Some(Box::new(AddFolderToProject)), - move |window, cx| { - if let Some(mw) = multi_workspace_for_add.upgrade() { - mw.update(cx, |mw, cx| { - mw.activate(workspace_for_add.clone(), window, cx); - }); + { + let project_group_key = project_group_key.clone(); + let multi_workspace = multi_workspace.clone(); + move |window, cx| { + multi_workspace + .update(cx, |multi_workspace, cx| { + multi_workspace.prompt_to_add_folders_to_project_group( + &project_group_key, + window, + cx, + ); + }) + .ok(); } - workspace_for_add.update(cx, |workspace, cx| { - workspace.add_folder_to_project(&AddFolderToProject, window, cx); - }); }, ); - let workspace_count = multi_workspace + let group_count = multi_workspace .upgrade() - .map_or(0, |mw| mw.read(cx).workspaces().len()); - let menu = if workspace_count > 1 { - let workspace_for_move = workspace.clone(); - let multi_workspace_for_move = multi_workspace.clone(); + .map_or(0, |mw| mw.read(cx).project_group_keys().count()); + let menu = if group_count > 1 { + let project_group_key = project_group_key.clone(); + let multi_workspace = multi_workspace.clone(); menu.entry( "Move to New Window", Some(Box::new( zed_actions::agents_sidebar::MoveWorkspaceToNewWindow, )), move |window, cx| { - if let Some(mw) = multi_workspace_for_move.upgrade() { - mw.update(cx, |multi_workspace, cx| { - multi_workspace.move_workspace_to_new_window( - &workspace_for_move, + multi_workspace + .update(cx, |multi_workspace, cx| { + multi_workspace.move_project_group_to_new_window( + &project_group_key, window, cx, ); - }); - } + }) + .ok(); }, ) } else { menu }; - let workspace_for_remove = workspace_for_remove.clone(); - let multi_workspace_for_remove = multi_workspace.clone(); + let project_group_key = project_group_key.clone(); + let multi_workspace = multi_workspace.clone(); menu.separator() .entry("Remove Project", None, move |window, cx| { - if let Some(mw) = multi_workspace_for_remove.upgrade() { - let ws = workspace_for_remove.clone(); - mw.update(cx, |multi_workspace, cx| { - multi_workspace.remove(&ws, window, cx); - }); - } + multi_workspace + .update(cx, |multi_workspace, cx| { + multi_workspace.remove_project_group( + &project_group_key, + window, + cx, + ); + }) + .ok(); }) }); @@ -2176,16 +2142,12 @@ impl Sidebar { return; }; - let paths: Vec = - path_list.paths().iter().map(|p| p.to_path_buf()).collect(); - - let open_task = multi_workspace.update(cx, |mw, cx| { - mw.open_project(paths, workspace::OpenMode::Activate, window, cx) + let open_task = multi_workspace.update(cx, |this, cx| { + this.find_or_create_local_workspace(path_list, window, cx) }); cx.spawn_in(window, async move |this, cx| { let workspace = open_task.await?; - this.update_in(cx, |this, window, cx| { this.activate_thread(metadata, &workspace, window, cx); })?; diff --git a/crates/util/src/path_list.rs b/crates/util/src/path_list.rs index 47ade219c6bd4a2217f7ac00ecccfd92fe64c199..af99f4c6570b35b004179afb87b737d3a4356489 100644 --- a/crates/util/src/path_list.rs +++ b/crates/util/src/path_list.rs @@ -65,6 +65,16 @@ impl PathList { self.paths.is_empty() } + /// Returns a new `PathList` with the given path removed. + pub fn without_path(&self, path_to_remove: &Path) -> PathList { + let paths: Vec = self + .ordered_paths() + .filter(|p| p.as_path() != path_to_remove) + .cloned() + .collect(); + PathList::new(&paths) + } + /// Get the paths in lexicographic order. pub fn paths(&self) -> &[PathBuf] { self.paths.as_ref() diff --git a/crates/workspace/src/multi_workspace.rs b/crates/workspace/src/multi_workspace.rs index d1bfcf2652d4d7c77d1f83ca2bc9d9603e3a2eed..cb2640142442b458b60759547c783d87d9de8a10 100644 --- a/crates/workspace/src/multi_workspace.rs +++ b/crates/workspace/src/multi_workspace.rs @@ -1,5 +1,6 @@ use anyhow::Result; use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt}; +use gpui::PathPromptOptions; use gpui::{ AnyView, App, Context, DragMoveEvent, Entity, EntityId, EventEmitter, FocusHandle, Focusable, ManagedView, MouseButton, Pixels, Render, Subscription, Task, Tiling, Window, WindowId, @@ -7,14 +8,16 @@ use gpui::{ }; #[cfg(any(test, feature = "test-support"))] use project::Project; -use project::{DisableAiSettings, ProjectGroupKey}; +use project::{DirectoryLister, DisableAiSettings, ProjectGroupKey}; use settings::Settings; pub use settings::SidebarSide; use std::future::Future; +use std::path::Path; use std::path::PathBuf; use std::sync::Arc; use ui::prelude::*; use util::ResultExt; +use util::path_list::PathList; use zed_actions::agents_sidebar::{MoveWorkspaceToNewWindow, ToggleThreadSwitcher}; use agent_settings::AgentSettings; @@ -23,6 +26,7 @@ use ui::{ContextMenu, right_click_menu}; const SIDEBAR_RESIZE_HANDLE_SIZE: Pixels = px(6.0); +use crate::AppState; use crate::{ CloseIntent, CloseWindow, DockPosition, Event as WorkspaceEvent, Item, ModalView, OpenMode, Panel, Workspace, WorkspaceId, client_side_decorations, @@ -494,6 +498,176 @@ impl MultiWorkspace { groups.into_iter() } + pub fn workspaces_for_project_group( + &self, + project_group_key: &ProjectGroupKey, + cx: &App, + ) -> impl Iterator> { + self.workspaces + .iter() + .filter(move |ws| ws.read(cx).project_group_key(cx) == *project_group_key) + } + + pub fn remove_folder_from_project_group( + &mut self, + project_group_key: &ProjectGroupKey, + path: &Path, + cx: &mut Context, + ) { + let new_path_list = project_group_key.path_list().without_path(path); + if new_path_list.is_empty() { + return; + } + + let new_key = ProjectGroupKey::new(project_group_key.host(), new_path_list); + + let workspaces: Vec<_> = self + .workspaces_for_project_group(project_group_key, cx) + .cloned() + .collect(); + + self.add_project_group_key(new_key); + + for workspace in workspaces { + let project = workspace.read(cx).project().clone(); + project.update(cx, |project, cx| { + project.remove_worktree_for_main_worktree_path(path, cx); + }); + } + + self.serialize(cx); + cx.notify(); + } + + pub fn prompt_to_add_folders_to_project_group( + &mut self, + key: &ProjectGroupKey, + window: &mut Window, + cx: &mut Context, + ) { + let paths = self.workspace().update(cx, |workspace, cx| { + workspace.prompt_for_open_path( + PathPromptOptions { + files: false, + directories: true, + multiple: true, + prompt: None, + }, + DirectoryLister::Project(workspace.project().clone()), + window, + cx, + ) + }); + + let key = key.clone(); + cx.spawn_in(window, async move |this, cx| { + if let Some(new_paths) = paths.await.ok().flatten() { + if !new_paths.is_empty() { + this.update(cx, |multi_workspace, cx| { + multi_workspace.add_folders_to_project_group(&key, new_paths, cx); + })?; + } + } + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + + pub fn add_folders_to_project_group( + &mut self, + project_group_key: &ProjectGroupKey, + new_paths: Vec, + cx: &mut Context, + ) { + let mut all_paths: Vec = project_group_key.path_list().paths().to_vec(); + all_paths.extend(new_paths.iter().cloned()); + let new_path_list = PathList::new(&all_paths); + let new_key = ProjectGroupKey::new(project_group_key.host(), new_path_list); + + let workspaces: Vec<_> = self + .workspaces_for_project_group(project_group_key, cx) + .cloned() + .collect(); + + self.add_project_group_key(new_key); + + for workspace in workspaces { + let project = workspace.read(cx).project().clone(); + for path in &new_paths { + project + .update(cx, |project, cx| { + project.find_or_create_worktree(path, true, cx) + }) + .detach_and_log_err(cx); + } + } + + self.serialize(cx); + cx.notify(); + } + + pub fn remove_project_group( + &mut self, + key: &ProjectGroupKey, + window: &mut Window, + cx: &mut Context, + ) { + self.project_group_keys.retain(|k| k != key); + + let workspaces: Vec<_> = self + .workspaces_for_project_group(key, cx) + .cloned() + .collect(); + for workspace in workspaces { + self.remove(&workspace, window, cx); + } + + self.serialize(cx); + cx.notify(); + } + + /// Finds an existing workspace in this multi-workspace whose paths match, + /// or creates a new one (deserializing its saved state from the database). + /// Never searches other windows or matches workspaces with a superset of + /// the requested paths. + pub fn find_or_create_local_workspace( + &mut self, + path_list: PathList, + window: &mut Window, + cx: &mut Context, + ) -> Task>> { + if let Some(workspace) = self + .workspaces + .iter() + .find(|ws| ws.read(cx).project_group_key(cx).path_list() == &path_list) + .cloned() + { + self.activate(workspace.clone(), window, cx); + return Task::ready(Ok(workspace)); + } + + let paths = path_list.paths().to_vec(); + let app_state = self.workspace().read(cx).app_state().clone(); + let requesting_window = window.window_handle().downcast::(); + + cx.spawn(async move |_this, cx| { + let result = cx + .update(|cx| { + Workspace::new_local( + paths, + app_state, + requesting_window, + None, + None, + OpenMode::Activate, + cx, + ) + }) + .await?; + Ok(result.workspace) + }) + } + pub fn workspace(&self) -> &Entity { &self.workspaces[self.active_workspace_index] } @@ -892,7 +1066,7 @@ impl MultiWorkspace { return; } - let app_state: Arc = workspace.read(cx).app_state().clone(); + let app_state: Arc = workspace.read(cx).app_state().clone(); cx.defer(move |cx| { let options = (app_state.build_window_options)(None, cx); @@ -909,7 +1083,58 @@ impl MultiWorkspace { }); } - // TODO: Move group to a new window? + pub fn move_project_group_to_new_window( + &mut self, + key: &ProjectGroupKey, + window: &mut Window, + cx: &mut Context, + ) { + let workspaces: Vec<_> = self + .workspaces_for_project_group(key, cx) + .cloned() + .collect(); + if workspaces.is_empty() { + return; + } + + self.project_group_keys.retain(|k| k != key); + + let mut removed = Vec::new(); + for workspace in &workspaces { + if self.remove(workspace, window, cx) { + removed.push(workspace.clone()); + } + } + + if removed.is_empty() { + return; + } + + let app_state = removed[0].read(cx).app_state().clone(); + + cx.defer(move |cx| { + let options = (app_state.build_window_options)(None, cx); + + let first = removed[0].clone(); + let rest = removed[1..].to_vec(); + + let Ok(new_window) = cx.open_window(options, |window, cx| { + cx.new(|cx| MultiWorkspace::new(first, window, cx)) + }) else { + return; + }; + + new_window + .update(cx, |mw, window, cx| { + for workspace in rest { + mw.activate(workspace, window, cx); + } + window.activate_window(); + }) + .log_err(); + }); + } + fn move_active_workspace_to_new_window( &mut self, _: &MoveWorkspaceToNewWindow, @@ -927,16 +1152,10 @@ impl MultiWorkspace { window: &mut Window, cx: &mut Context, ) -> Task>> { - let workspace = self.workspace().clone(); - - let needs_close_prompt = !self.multi_workspace_enabled(cx); - let open_mode = if self.multi_workspace_enabled(cx) { - open_mode + if self.multi_workspace_enabled(cx) { + self.find_or_create_local_workspace(PathList::new(&paths), window, cx) } else { - OpenMode::Activate - }; - - if needs_close_prompt { + let workspace = self.workspace().clone(); cx.spawn_in(window, async move |_this, cx| { let should_continue = workspace .update_in(cx, |workspace, window, cx| { @@ -953,10 +1172,6 @@ impl MultiWorkspace { Ok(workspace) } }) - } else { - workspace.update(cx, |workspace, cx| { - workspace.open_workspace_for_paths(open_mode, paths, window, cx) - }) } } } From c0f01c42788ff13774853aa9dad2c18907a0ec3c Mon Sep 17 00:00:00 2001 From: Dino Date: Sun, 5 Apr 2026 23:05:35 +0100 Subject: [PATCH 46/63] Update futures to 0.3.32 (#52910) As part of the work that is being developed for the Project Panel's Undo & Redo system, in https://github.com/zed-industries/zed/tree/5039-create-redo , we're implementing an asynchronous task queue which simply receives a message with the operation/change that is meant to be carried out, in order to ensure these run in a sequential fashion. While trying to use `futures_channel::mpsc::Receiver`, it was noted that `recv` method was not available so this Pull Request updates the `futures` crate to `0.3.32`, where it is available. This version also deprecates `try_next` in favor of `try_recv` so this Pull Request updates existing callers of `try_next` to use `try_recv`, which was mostly updating the expected return type from `Result>` to `Result`. Co-authored-by: Yara --- Cargo.lock | 256 +++++++++--------- crates/agent/src/edit_agent.rs | 2 +- crates/agent/src/tests/mod.rs | 8 +- crates/agent/src/tools/copy_path_tool.rs | 8 +- .../agent/src/tools/create_directory_tool.rs | 8 +- crates/agent/src/tools/delete_path_tool.rs | 8 +- crates/agent/src/tools/edit_file_tool.rs | 18 +- crates/agent/src/tools/list_directory_tool.rs | 18 +- crates/agent/src/tools/move_path_tool.rs | 8 +- crates/agent/src/tools/read_file_tool.rs | 6 +- .../src/tools/restore_file_from_disk_tool.rs | 8 +- crates/agent/src/tools/save_file_tool.rs | 8 +- .../src/tools/streaming_edit_file_tool.rs | 18 +- crates/agent/src/tools/terminal_tool.rs | 18 +- crates/context_server/src/oauth.rs | 10 +- .../src/copilot_edit_prediction_delegate.rs | 4 +- crates/editor/src/hover_links.rs | 2 +- crates/project/src/git_store.rs | 4 +- .../tests/integration/project_tests.rs | 2 +- crates/search/src/buffer_search.rs | 42 +-- crates/workspace/src/item.rs | 6 +- crates/zed/src/main.rs | 3 +- crates/zed/src/visual_test_runner.rs | 2 +- 23 files changed, 220 insertions(+), 247 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 906c5e65456c604e5123bfde9ac1c39e261eedfd..fd3ddfc882edafa29722cee7b3dbf329caecdad4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -15,7 +15,7 @@ dependencies = [ "collections", "env_logger 0.11.8", "file_icons", - "futures 0.3.31", + "futures 0.3.32", "gpui", "image", "indoc", @@ -75,7 +75,7 @@ dependencies = [ "collections", "ctor", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "language", "log", @@ -100,7 +100,7 @@ dependencies = [ "editor", "extension_host", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "language", "project", @@ -163,7 +163,7 @@ dependencies = [ "eval_utils", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "git", "gpui", "gpui_tokio", @@ -227,7 +227,7 @@ dependencies = [ "async-broadcast", "async-trait", "derive_more", - "futures 0.3.31", + "futures 0.3.32", "log", "serde", "serde_json", @@ -263,7 +263,7 @@ dependencies = [ "env_logger 0.11.8", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "google_ai", "gpui", "gpui_tokio", @@ -344,7 +344,7 @@ dependencies = [ "feature_flags", "file_icons", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "git", "gpui", @@ -629,7 +629,7 @@ version = "0.1.0" dependencies = [ "anyhow", "chrono", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -750,7 +750,7 @@ name = "askpass" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "gpui", "log", "net", @@ -945,7 +945,7 @@ name = "async-pipe" version = "0.1.3" source = "git+https://github.com/zed-industries/async-pipe-rs?rev=82d00a04211cf4e1236029aa03e6b6ce2a74c553#82d00a04211cf4e1236029aa03e6b6ce2a74c553" dependencies = [ - "futures 0.3.31", + "futures 0.3.32", "log", ] @@ -1183,7 +1183,7 @@ dependencies = [ "clock", "ctor", "db", - "futures 0.3.31", + "futures 0.3.32", "futures-lite 1.13.0", "gpui", "http_client", @@ -1862,7 +1862,7 @@ dependencies = [ "anyhow", "aws-sdk-bedrockruntime", "aws-smithy-types", - "futures 0.3.31", + "futures 0.3.32", "schemars", "serde", "serde_json", @@ -2151,7 +2151,7 @@ version = "0.1.0" dependencies = [ "clock", "ctor", - "futures 0.3.31", + "futures 0.3.32", "git2", "gpui", "language", @@ -2348,7 +2348,7 @@ dependencies = [ "collections", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_tokio", "language", @@ -2669,7 +2669,7 @@ dependencies = [ "client", "clock", "collections", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "language", @@ -2864,7 +2864,7 @@ dependencies = [ "derive_more", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_tokio", "http_client", @@ -2920,7 +2920,7 @@ version = "0.1.0" dependencies = [ "anyhow", "cloud_api_types", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_tokio", "http_client", @@ -3052,7 +3052,7 @@ dependencies = [ "anyhow", "edit_prediction", "edit_prediction_types", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "icons", @@ -3099,7 +3099,7 @@ dependencies = [ "extension", "file_finder", "fs", - "futures 0.3.31", + "futures 0.3.32", "git", "git_hosting_providers", "git_ui", @@ -3176,7 +3176,7 @@ dependencies = [ "collections", "db", "editor", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "livekit_client", @@ -3437,7 +3437,7 @@ dependencies = [ "async-trait", "base64 0.22.1", "collections", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "log", @@ -3498,7 +3498,7 @@ dependencies = [ "edit_prediction_types", "editor", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "icons", "indoc", @@ -3532,7 +3532,7 @@ dependencies = [ "collections", "dirs 4.0.0", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "log", @@ -3982,7 +3982,7 @@ version = "0.1.0" dependencies = [ "cfg-if", "crash-handler", - "futures 0.3.31", + "futures 0.3.32", "log", "mach2 0.5.0", "minidumper", @@ -4318,7 +4318,7 @@ dependencies = [ "collections", "dap-types", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "language", @@ -4360,7 +4360,7 @@ dependencies = [ "dap", "dotenvy", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "json_dotpath", @@ -4531,7 +4531,7 @@ dependencies = [ "anyhow", "dap", "editor", - "futures 0.3.31", + "futures 0.3.32", "gpui", "project", "serde_json", @@ -4558,7 +4558,7 @@ dependencies = [ "editor", "feature_flags", "file_icons", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "hex", @@ -4613,7 +4613,7 @@ name = "deepseek" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -4733,7 +4733,7 @@ dependencies = [ "async-trait", "env_logger 0.11.8", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http 1.3.1", "http_client", @@ -5122,7 +5122,7 @@ dependencies = [ "edit_prediction_types", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "heapless", "indoc", @@ -5183,7 +5183,7 @@ dependencies = [ "extension", "flate2", "fs", - "futures 0.3.31", + "futures 0.3.32", "gaoya", "gpui", "gpui_platform", @@ -5235,7 +5235,7 @@ dependencies = [ "clock", "collections", "env_logger 0.11.8", - "futures 0.3.31", + "futures 0.3.32", "gpui", "indoc", "language", @@ -5284,7 +5284,7 @@ dependencies = [ "editor", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "indoc", "language", @@ -5329,7 +5329,7 @@ dependencies = [ "feature_flags", "file_icons", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "git", "gpui", @@ -5743,7 +5743,7 @@ dependencies = [ "extension", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_platform", "gpui_tokio", @@ -5853,7 +5853,7 @@ dependencies = [ "collections", "dap", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "heck 0.5.0", "http_client", @@ -5921,7 +5921,7 @@ dependencies = [ "dap", "extension", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_tokio", "http_client", @@ -6129,7 +6129,7 @@ dependencies = [ "ctor", "editor", "file_icons", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "menu", @@ -6431,7 +6431,7 @@ dependencies = [ "collections", "dunce", "fs", - "futures 0.3.31", + "futures 0.3.32", "git", "gpui", "ignore", @@ -6529,9 +6529,9 @@ checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" [[package]] name = "futures" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" dependencies = [ "futures-channel", "futures-core", @@ -6544,9 +6544,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" dependencies = [ "futures-core", "futures-sink", @@ -6567,15 +6567,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" [[package]] name = "futures-executor" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" dependencies = [ "futures-core", "futures-task", @@ -6595,9 +6595,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" [[package]] name = "futures-lite" @@ -6629,9 +6629,9 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" dependencies = [ "proc-macro2", "quote", @@ -6640,21 +6640,21 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" [[package]] name = "futures-task" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" [[package]] name = "futures-util" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" dependencies = [ "futures 0.1.31", "futures-channel", @@ -6663,9 +6663,9 @@ dependencies = [ "futures-macro", "futures-sink", "futures-task", + "libc", "memchr", "pin-project-lite", - "pin-utils", "slab", "tokio-io", ] @@ -7092,7 +7092,7 @@ dependencies = [ "async-trait", "collections", "derive_more", - "futures 0.3.31", + "futures 0.3.32", "git2", "gpui", "http_client", @@ -7168,7 +7168,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", - "futures 0.3.31", + "futures 0.3.32", "git", "gpui", "http_client", @@ -7199,7 +7199,7 @@ dependencies = [ "db", "editor", "file_icons", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "git", "gpui", @@ -7404,7 +7404,7 @@ name = "google_ai" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -7474,7 +7474,7 @@ dependencies = [ "env_logger 0.11.8", "etagere", "foreign-types 0.5.0", - "futures 0.3.31", + "futures 0.3.32", "futures-concurrency", "getrandom 0.3.4", "gpui_macros", @@ -7549,7 +7549,7 @@ dependencies = [ "calloop-wayland-source", "collections", "filedescriptor", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_wgpu", "http_client", @@ -7603,7 +7603,7 @@ dependencies = [ "dispatch2", "etagere", "foreign-types 0.5.0", - "futures 0.3.31", + "futures 0.3.32", "gpui", "image", "itertools 0.14.0", @@ -7672,7 +7672,7 @@ version = "0.1.0" dependencies = [ "anyhow", "console_error_panic_hook", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_wgpu", "http_client", @@ -7723,7 +7723,7 @@ dependencies = [ "anyhow", "collections", "etagere", - "futures 0.3.31", + "futures 0.3.32", "gpui", "image", "itertools 0.14.0", @@ -8207,7 +8207,7 @@ dependencies = [ "async-tar", "bytes 1.11.1", "derive_more", - "futures 0.3.31", + "futures 0.3.32", "http 1.3.1", "http-body 1.0.1", "log", @@ -9090,7 +9090,7 @@ dependencies = [ "async-trait", "bytes 1.11.1", "chrono", - "futures 0.3.31", + "futures 0.3.32", "serde", "serde_json", "thiserror 2.0.17", @@ -9106,7 +9106,7 @@ dependencies = [ "anyhow", "async-trait", "async-tungstenite", - "futures 0.3.31", + "futures 0.3.32", "jupyter-protocol", "serde", "serde_json", @@ -9224,7 +9224,7 @@ dependencies = [ "ec4rs", "encoding_rs", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "globset", "gpui", @@ -9304,7 +9304,7 @@ dependencies = [ "collections", "extension", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "language", "log", @@ -9330,7 +9330,7 @@ dependencies = [ "collections", "credentials_provider", "env_var", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "icons", @@ -9373,7 +9373,7 @@ dependencies = [ "extension", "extension_host", "fs", - "futures 0.3.31", + "futures 0.3.32", "google_ai", "gpui", "gpui_tokio", @@ -9450,7 +9450,7 @@ dependencies = [ "command_palette_hooks", "edit_prediction", "editor", - "futures 0.3.31", + "futures 0.3.32", "gpui", "itertools 0.14.0", "language", @@ -9486,7 +9486,7 @@ dependencies = [ "chrono", "collections", "fs", - "futures 0.3.31", + "futures 0.3.32", "globset", "gpui", "grammars", @@ -9873,7 +9873,7 @@ dependencies = [ "core-video", "coreaudio-rs 0.12.1", "cpal", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_platform", "gpui_tokio", @@ -9917,7 +9917,7 @@ name = "lmstudio" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -9988,7 +9988,7 @@ dependencies = [ "async-pipe", "collections", "ctor", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_util", "log", @@ -10128,7 +10128,7 @@ dependencies = [ "collections", "env_logger 0.11.8", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_platform", "html5ever 0.27.0", @@ -10575,7 +10575,7 @@ name = "mistral" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -10764,7 +10764,7 @@ name = "nc" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "net", "smol", ] @@ -10860,7 +10860,7 @@ dependencies = [ "async-std", "async-tar", "async-trait", - "futures 0.3.31", + "futures 0.3.32", "http_client", "log", "paths", @@ -11184,7 +11184,7 @@ version = "0.9.2" source = "git+https://github.com/KillTheMule/nvim-rs?rev=764dd270c642f77f10f3e19d05cc178a6cbe69f3#764dd270c642f77f10f3e19d05cc178a6cbe69f3" dependencies = [ "async-trait", - "futures 0.3.31", + "futures 0.3.32", "log", "rmp", "rmpv", @@ -11384,7 +11384,7 @@ name = "ollama" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -11491,7 +11491,7 @@ name = "open_ai" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "log", "rand 0.9.2", @@ -11509,7 +11509,7 @@ version = "0.1.0" dependencies = [ "editor", "file_icons", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "picker", @@ -11530,7 +11530,7 @@ name = "open_router" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -11545,7 +11545,7 @@ name = "opencode" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "google_ai", "http_client", "schemars", @@ -12859,7 +12859,7 @@ checksum = "af3fb618632874fb76937c2361a7f22afd393c982a2165595407edc75b06d3c1" dependencies = [ "atomic", "crossbeam-queue", - "futures 0.3.31", + "futures 0.3.32", "log", "parking_lot", "pin-project", @@ -13092,7 +13092,7 @@ dependencies = [ "extension", "fancy-regex 0.17.0", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "git", "git2", @@ -13159,7 +13159,7 @@ dependencies = [ "askpass", "clap", "client", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_platform", "http_client", @@ -13220,7 +13220,7 @@ version = "0.1.0" dependencies = [ "anyhow", "editor", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "language", @@ -13262,7 +13262,7 @@ dependencies = [ "chrono", "collections", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "handlebars 4.5.0", @@ -14017,7 +14017,7 @@ dependencies = [ "extension", "extension_host", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "http_client", @@ -14201,7 +14201,7 @@ dependencies = [ "base64 0.22.1", "collections", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "log", "parking_lot", @@ -14229,7 +14229,7 @@ dependencies = [ "anyhow", "askpass", "auto_update", - "futures 0.3.31", + "futures 0.3.32", "gpui", "log", "markdown", @@ -14267,7 +14267,7 @@ dependencies = [ "extension_host", "fork", "fs", - "futures 0.3.31", + "futures 0.3.32", "git", "git2", "git_hosting_providers", @@ -14349,7 +14349,7 @@ dependencies = [ "editor", "feature_flags", "file_icons", - "futures 0.3.31", + "futures 0.3.32", "gpui", "html_to_markdown", "http_client", @@ -14474,7 +14474,7 @@ version = "0.1.0" dependencies = [ "anyhow", "bytes 1.11.1", - "futures 0.3.31", + "futures 0.3.32", "gpui_util", "http_client", "http_client_tls", @@ -14646,7 +14646,7 @@ dependencies = [ "async-tungstenite", "base64 0.22.1", "collections", - "futures 0.3.31", + "futures 0.3.32", "gpui", "parking_lot", "proto", @@ -14739,7 +14739,7 @@ dependencies = [ "chrono", "data-encoding", "dirs 6.0.0", - "futures 0.3.31", + "futures 0.3.32", "glob", "jupyter-protocol", "serde", @@ -15113,7 +15113,7 @@ dependencies = [ "backtrace", "chrono", "flume", - "futures 0.3.31", + "futures 0.3.32", "parking_lot", "rand 0.9.2", "web-time", @@ -15341,7 +15341,7 @@ dependencies = [ "collections", "editor", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "itertools 0.14.0", "language", @@ -15620,7 +15620,7 @@ dependencies = [ "collections", "ec4rs", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "indoc", "inventory", @@ -15724,7 +15724,7 @@ dependencies = [ "editor", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "heck 0.5.0", @@ -16100,7 +16100,7 @@ dependencies = [ "collections", "extension", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "indoc", "parking_lot", @@ -16212,7 +16212,7 @@ version = "0.1.0" dependencies = [ "anyhow", "collections", - "futures 0.3.31", + "futures 0.3.32", "indoc", "libsqlite3-sys", "log", @@ -17258,7 +17258,7 @@ version = "0.1.0" dependencies = [ "anyhow", "collections", - "futures 0.3.31", + "futures 0.3.32", "gpui", "hex", "log", @@ -17305,7 +17305,7 @@ dependencies = [ name = "telemetry" version = "0.1.0" dependencies = [ - "futures 0.3.31", + "futures 0.3.32", "serde", "serde_json", "telemetry_events", @@ -17360,7 +17360,7 @@ dependencies = [ "alacritty_terminal", "anyhow", "collections", - "futures 0.3.31", + "futures 0.3.32", "gpui", "itertools 0.14.0", "libc", @@ -17406,7 +17406,7 @@ dependencies = [ "db", "dirs 4.0.0", "editor", - "futures 0.3.31", + "futures 0.3.32", "gpui", "itertools 0.14.0", "language", @@ -18042,7 +18042,7 @@ dependencies = [ "anyhow", "convert_case 0.8.0", "editor", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "language", @@ -18918,7 +18918,7 @@ dependencies = [ "command-fds", "dirs 4.0.0", "dunce", - "futures 0.3.31", + "futures 0.3.32", "futures-lite 1.13.0", "git2", "globset", @@ -19075,7 +19075,7 @@ dependencies = [ "db", "editor", "env_logger 0.11.8", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "git_ui", "gpui", @@ -19439,7 +19439,7 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7516db7f32decdadb1c3b8deb1b7d78b9df7606c5cc2f6241737c2ab3a0258e" dependencies = [ - "futures 0.3.31", + "futures 0.3.32", "js-sys", "wasm-bindgen", "web-sys", @@ -19795,7 +19795,7 @@ dependencies = [ "cap-std", "cap-time-ext", "fs-set-times", - "futures 0.3.31", + "futures 0.3.32", "io-extras", "io-lifetimes", "rustix 1.1.2", @@ -19819,7 +19819,7 @@ dependencies = [ "anyhow", "async-trait", "bytes 1.11.1", - "futures 0.3.31", + "futures 0.3.32", "wasmtime", ] @@ -19837,7 +19837,7 @@ name = "watch" version = "0.1.0" dependencies = [ "ctor", - "futures 0.3.31", + "futures 0.3.32", "gpui", "parking_lot", "zlog", @@ -20007,7 +20007,7 @@ dependencies = [ "client", "cloud_api_types", "cloud_llm_client", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "language_model", @@ -21201,7 +21201,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4db52a11d4dfb0a59f194c064055794ee6564eb1ced88c25da2cf76e50c5621" dependencies = [ "bitflags 2.10.0", - "futures 0.3.31", + "futures 0.3.32", "once_cell", ] @@ -21452,7 +21452,7 @@ dependencies = [ "db", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "git", "gpui", "http_client", @@ -21500,7 +21500,7 @@ dependencies = [ "collections", "encoding_rs", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "git", "gpui", @@ -21752,7 +21752,7 @@ dependencies = [ "base64 0.22.1", "bytes 1.11.1", "flate2", - "futures 0.3.31", + "futures 0.3.32", "http-body-util", "hyper 1.7.0", "hyper-util", @@ -21957,7 +21957,7 @@ dependencies = [ "feedback", "file_finder", "fs", - "futures 0.3.31", + "futures 0.3.32", "git", "git_graph", "git_hosting_providers", @@ -22194,7 +22194,7 @@ version = "0.1.0" dependencies = [ "anyhow", "credentials_provider", - "futures 0.3.31", + "futures 0.3.32", "gpui", "paths", "release_channel", @@ -22347,7 +22347,7 @@ dependencies = [ "asynchronous-codec", "bytes 1.11.1", "crossbeam-queue", - "futures 0.3.31", + "futures 0.3.32", "log", "num-traits", "once_cell", diff --git a/crates/agent/src/edit_agent.rs b/crates/agent/src/edit_agent.rs index f0dae2a7b39dcad0fea280a2354f2f3c5c61600b..afaa124de066d92e5a1d1a1670f762017f086d01 100644 --- a/crates/agent/src/edit_agent.rs +++ b/crates/agent/src/edit_agent.rs @@ -1519,7 +1519,7 @@ mod tests { stream: &mut UnboundedReceiver, ) -> Vec { let mut events = Vec::new(); - while let Ok(Some(event)) = stream.try_next() { + while let Ok(event) = stream.try_recv() { events.push(event); } events diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index 9808b95dd0812f9a857da8a9c39e78fde40af1f9..f7b52b2573144e4c2fd378cfb19c9ee2473a37db 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -6208,9 +6208,9 @@ async fn test_edit_file_tool_allow_rule_skips_confirmation(cx: &mut TestAppConte cx.run_until_parked(); - let event = rx.try_next(); + let event = rx.try_recv(); assert!( - !matches!(event, Ok(Some(Ok(ThreadEvent::ToolCallAuthorization(_))))), + !matches!(event, Ok(Ok(ThreadEvent::ToolCallAuthorization(_)))), "expected no authorization request for allowed .md file" ); } @@ -6352,9 +6352,9 @@ async fn test_fetch_tool_allow_rule_skips_confirmation(cx: &mut TestAppContext) cx.run_until_parked(); - let event = rx.try_next(); + let event = rx.try_recv(); assert!( - !matches!(event, Ok(Some(Ok(ThreadEvent::ToolCallAuthorization(_))))), + !matches!(event, Ok(Ok(ThreadEvent::ToolCallAuthorization(_)))), "expected no authorization request for allowed docs.rs URL" ); } diff --git a/crates/agent/src/tools/copy_path_tool.rs b/crates/agent/src/tools/copy_path_tool.rs index 95688f27dcd8ca04aef72358ce52144f95138e17..06600f64874851c8d703513ea006d7f0327a0952 100644 --- a/crates/agent/src/tools/copy_path_tool.rs +++ b/crates/agent/src/tools/copy_path_tool.rs @@ -383,8 +383,8 @@ mod tests { assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Expected a single authorization prompt", ); @@ -450,8 +450,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); diff --git a/crates/agent/src/tools/create_directory_tool.rs b/crates/agent/src/tools/create_directory_tool.rs index d6c59bcce30ab26991edba0fa7181ec45d10e1b0..60bb44e39ee5ab76168d909c08889cbbbc63f9f4 100644 --- a/crates/agent/src/tools/create_directory_tool.rs +++ b/crates/agent/src/tools/create_directory_tool.rs @@ -370,8 +370,8 @@ mod tests { assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Expected a single authorization prompt", ); @@ -440,8 +440,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); diff --git a/crates/agent/src/tools/delete_path_tool.rs b/crates/agent/src/tools/delete_path_tool.rs index 7433975c7b782a145dd3e5a80ee59cd92945a989..21b4674425d9169e7740dd35c929302814006684 100644 --- a/crates/agent/src/tools/delete_path_tool.rs +++ b/crates/agent/src/tools/delete_path_tool.rs @@ -439,8 +439,8 @@ mod tests { assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Expected a single authorization prompt", ); @@ -513,8 +513,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index 763efd6724a719b90af93843f203ef8c1c3976bb..9bcf164096b99675febd3d7ae1bde8341f7c5ff8 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -1188,7 +1188,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); // Test 4: Path with .zed in the middle should require confirmation let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); @@ -1251,7 +1251,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); // 5.3: Normal in-project path with allow — no confirmation needed let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); @@ -1268,7 +1268,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); // 5.4: With Confirm default, non-project paths still prompt cx.update(|cx| { @@ -1586,8 +1586,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - stream_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + stream_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); @@ -1658,7 +1658,7 @@ mod tests { } else { auth.await.unwrap(); assert!( - stream_rx.try_next().is_err(), + stream_rx.try_recv().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path @@ -1769,7 +1769,7 @@ mod tests { } else { auth.await.unwrap(); assert!( - stream_rx.try_next().is_err(), + stream_rx.try_recv().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path @@ -1862,7 +1862,7 @@ mod tests { stream_rx.expect_authorization().await; } else { assert!( - stream_rx.try_next().is_err(), + stream_rx.try_recv().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path @@ -1963,7 +1963,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); } } diff --git a/crates/agent/src/tools/list_directory_tool.rs b/crates/agent/src/tools/list_directory_tool.rs index 7abbe1ed4c488210b9079e59765dddc8d5208bed..c88492bba40ee4fdfa928f153e49a302ad60be8b 100644 --- a/crates/agent/src/tools/list_directory_tool.rs +++ b/crates/agent/src/tools/list_directory_tool.rs @@ -982,13 +982,11 @@ mod tests { "Expected private path validation error, got: {error}" ); - let event = event_rx.try_next(); + let event = event_rx.try_recv(); assert!( !matches!( event, - Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization( - _ - )))) + Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_))) ), "No authorization should be requested when validation fails before listing", ); @@ -1030,13 +1028,11 @@ mod tests { "Normal path should succeed without authorization" ); - let event = event_rx.try_next(); + let event = event_rx.try_recv(); assert!( !matches!( event, - Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization( - _ - )))) + Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_))) ), "No authorization should be requested for normal paths", ); @@ -1087,13 +1083,11 @@ mod tests { "Intra-project symlink should succeed without authorization: {result:?}", ); - let event = event_rx.try_next(); + let event = event_rx.try_recv(); assert!( !matches!( event, - Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization( - _ - )))) + Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_))) ), "No authorization should be requested for intra-project symlinks", ); diff --git a/crates/agent/src/tools/move_path_tool.rs b/crates/agent/src/tools/move_path_tool.rs index 147947bb67ec646c38b51f37dd75779ed78ec85b..eaea204d84d96ab841f2e075a42a1a42b827374d 100644 --- a/crates/agent/src/tools/move_path_tool.rs +++ b/crates/agent/src/tools/move_path_tool.rs @@ -390,8 +390,8 @@ mod tests { assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Expected a single authorization prompt", ); @@ -457,8 +457,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); diff --git a/crates/agent/src/tools/read_file_tool.rs b/crates/agent/src/tools/read_file_tool.rs index 093a8580892cfc4cec0a061bcc10717b28c608f2..0086a82f4e79c9924502202873ceb2b25d2e66fb 100644 --- a/crates/agent/src/tools/read_file_tool.rs +++ b/crates/agent/src/tools/read_file_tool.rs @@ -1317,13 +1317,11 @@ mod test { "Expected private-files validation error, got: {error}" ); - let event = event_rx.try_next(); + let event = event_rx.try_recv(); assert!( !matches!( event, - Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization( - _ - )))) + Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_))) ), "No authorization should be requested when validation fails before read", ); diff --git a/crates/agent/src/tools/restore_file_from_disk_tool.rs b/crates/agent/src/tools/restore_file_from_disk_tool.rs index 9273ea5b8bb041e0ea53f3ea72b94b46e5a7e294..b808a966cf983c92a5e93c19599ff5333ed70860 100644 --- a/crates/agent/src/tools/restore_file_from_disk_tool.rs +++ b/crates/agent/src/tools/restore_file_from_disk_tool.rs @@ -589,8 +589,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); @@ -662,8 +662,8 @@ mod tests { assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Expected a single authorization prompt", ); diff --git a/crates/agent/src/tools/save_file_tool.rs b/crates/agent/src/tools/save_file_tool.rs index c6a1cd79db65127164fe66f966029b58a366da7f..0cf9666a415f8174e9036ebadf8368589294c885 100644 --- a/crates/agent/src/tools/save_file_tool.rs +++ b/crates/agent/src/tools/save_file_tool.rs @@ -584,8 +584,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); @@ -657,8 +657,8 @@ mod tests { assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Expected a single authorization prompt", ); diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs index c0c29bfc43d9c58ac011b3170edf81210ba8ee66..bc99515e499696e3df11101be8b813afa027c8f4 100644 --- a/crates/agent/src/tools/streaming_edit_file_tool.rs +++ b/crates/agent/src/tools/streaming_edit_file_tool.rs @@ -2493,7 +2493,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); // Test 4: Path with .zed in the middle should require confirmation let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); @@ -2540,7 +2540,7 @@ mod tests { cx.update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), "test 5.2", &stream_tx, cx)) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); // 5.3: Normal in-project path with allow — no confirmation needed let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); @@ -2554,7 +2554,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); // 5.4: With Confirm default, non-project paths still prompt cx.update(|cx| { @@ -2767,8 +2767,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - stream_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + stream_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); @@ -2810,7 +2810,7 @@ mod tests { } else { auth.await.unwrap(); assert!( - stream_rx.try_next().is_err(), + stream_rx.try_recv().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path @@ -2887,7 +2887,7 @@ mod tests { } else { auth.await.unwrap(); assert!( - stream_rx.try_next().is_err(), + stream_rx.try_recv().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path @@ -2947,7 +2947,7 @@ mod tests { stream_rx.expect_authorization().await; } else { assert!( - stream_rx.try_next().is_err(), + stream_rx.try_recv().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path @@ -3015,7 +3015,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); } } diff --git a/crates/agent/src/tools/terminal_tool.rs b/crates/agent/src/tools/terminal_tool.rs index 82bf9a06480bb7d6db3611516281f42452ec5137..f36bd0fe3d3fb00931a7dc272d76eb042f6570f6 100644 --- a/crates/agent/src/tools/terminal_tool.rs +++ b/crates/agent/src/tools/terminal_tool.rs @@ -681,17 +681,17 @@ mod tests { ); assert!( !matches!( - rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "invalid command should not request authorization" ); assert!( !matches!( - rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallUpdate( + rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallUpdate( acp_thread::ToolCallUpdate::UpdateFields(_) - )))) + ))) ), "invalid command should not emit a terminal card update" ); @@ -810,8 +810,8 @@ mod tests { ); assert!( !matches!( - rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "hardcoded denial should not request authorization" ); @@ -1058,8 +1058,8 @@ mod tests { ); assert!( !matches!( - rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "rejected command {command:?} should not request authorization" ); diff --git a/crates/context_server/src/oauth.rs b/crates/context_server/src/oauth.rs index 8fa94b775bd270809e5b26aa7fe8478ad6378170..1a314de2fca9b9987336decb15b208ffd7759dea 100644 --- a/crates/context_server/src/oauth.rs +++ b/crates/context_server/src/oauth.rs @@ -2733,10 +2733,7 @@ mod tests { assert!(refreshed); assert_eq!(provider.access_token().as_deref(), Some("new-access")); - let notified_session = rx - .try_next() - .unwrap() - .expect("channel should have a session"); + let notified_session = rx.try_recv().expect("channel should have a session"); assert_eq!(notified_session.tokens.access_token, "new-access"); assert_eq!( notified_session.tokens.refresh_token.as_deref(), @@ -2768,10 +2765,7 @@ mod tests { let refreshed = provider.try_refresh().await.unwrap(); assert!(refreshed); - let notified_session = rx - .try_next() - .unwrap() - .expect("channel should have a session"); + let notified_session = rx.try_recv().expect("channel should have a session"); assert_eq!(notified_session.tokens.access_token, "new-access"); assert_eq!( notified_session.tokens.refresh_token.as_deref(), diff --git a/crates/copilot/src/copilot_edit_prediction_delegate.rs b/crates/copilot/src/copilot_edit_prediction_delegate.rs index 6f69bc6bc7bea4ec31aa59262a4abc5640999a2e..e789a89df65daf45dd02a16d954b299307e0c62d 100644 --- a/crates/copilot/src/copilot_edit_prediction_delegate.rs +++ b/crates/copilot/src/copilot_edit_prediction_delegate.rs @@ -1045,7 +1045,7 @@ mod tests { }); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); - assert!(copilot_requests.try_next().is_err()); + assert!(copilot_requests.try_recv().is_err()); _ = editor.update(cx, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { @@ -1055,7 +1055,7 @@ mod tests { }); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); - assert!(copilot_requests.try_next().is_ok()); + assert!(copilot_requests.try_recv().is_ok()); } fn handle_copilot_completion_request( diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index 7f05f4355bfaa218dbc26aab77d949b2146816d7..e00fd20ed5abdcd49dbe87510bfd8de54b60fce2 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -1166,7 +1166,7 @@ mod tests { }); cx.simulate_mouse_move(hover_point, None, Modifiers::secondary_key()); cx.background_executor.run_until_parked(); - assert!(requests.try_next().is_err()); + assert!(requests.try_recv().is_err()); cx.assert_editor_text_highlights( HighlightKey::HoveredLinkState, indoc! {" diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index e22d13b5fe5fd0bc64b6d95c52432437a41569f1..20e04a19a7891c5b8800b270a1c8d55720ce90ff 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -6581,7 +6581,7 @@ impl Repository { let state = RepositoryState::Local(state); let mut jobs = VecDeque::new(); loop { - while let Ok(Some(next_job)) = job_rx.try_next() { + while let Ok(next_job) = job_rx.try_recv() { jobs.push_back(next_job); } @@ -6617,7 +6617,7 @@ impl Repository { let state = RepositoryState::Remote(state); let mut jobs = VecDeque::new(); loop { - while let Ok(Some(next_job)) = job_rx.try_next() { + while let Ok(next_job) = job_rx.try_recv() { jobs.push_back(next_job); } diff --git a/crates/project/tests/integration/project_tests.rs b/crates/project/tests/integration/project_tests.rs index 6601b0744aa770917390e03b16ae93d3bc7f637f..ee5af024776839fde8965f875bf3d12630c1dad2 100644 --- a/crates/project/tests/integration/project_tests.rs +++ b/crates/project/tests/integration/project_tests.rs @@ -4448,7 +4448,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) { // Assert no new language server started cx.executor().run_until_parked(); - assert!(fake_servers.try_next().is_err()); + assert!(fake_servers.try_recv().is_err()); assert_eq!(definitions.len(), 1); let definition = definitions.pop().unwrap(); diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 2ea386b85df21a72262b70eb7016028a49c2b8c0..1328805b50fe077e36d38b3290cb7936f24301f2 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -3406,17 +3406,15 @@ mod tests { assert_eq!(initial_location, ToolbarItemLocation::Secondary); - let mut events = cx.events(&search_bar); + let mut events = cx.events::(&search_bar); search_bar.update_in(cx, |search_bar, window, cx| { search_bar.dismiss(&Dismiss, window, cx); }); assert_eq!( - events.try_next().unwrap(), - Some(ToolbarItemEvent::ChangeLocation( - ToolbarItemLocation::Hidden - )) + events.try_recv().unwrap(), + (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Hidden)) ); search_bar.update_in(cx, |search_bar, window, cx| { @@ -3424,10 +3422,8 @@ mod tests { }); assert_eq!( - events.try_next().unwrap(), - Some(ToolbarItemEvent::ChangeLocation( - ToolbarItemLocation::Secondary - )) + events.try_recv().unwrap(), + (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Secondary)) ); } @@ -3442,17 +3438,15 @@ mod tests { assert_eq!(initial_location, ToolbarItemLocation::PrimaryLeft); - let mut events = cx.events(&search_bar); + let mut events = cx.events::(&search_bar); search_bar.update_in(cx, |search_bar, window, cx| { search_bar.dismiss(&Dismiss, window, cx); }); assert_eq!( - events.try_next().unwrap(), - Some(ToolbarItemEvent::ChangeLocation( - ToolbarItemLocation::PrimaryLeft - )) + events.try_recv().unwrap(), + (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::PrimaryLeft)) ); search_bar.update_in(cx, |search_bar, window, cx| { @@ -3460,10 +3454,8 @@ mod tests { }); assert_eq!( - events.try_next().unwrap(), - Some(ToolbarItemEvent::ChangeLocation( - ToolbarItemLocation::PrimaryLeft - )) + events.try_recv().unwrap(), + (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::PrimaryLeft)) ); } @@ -3482,17 +3474,15 @@ mod tests { assert_eq!(initial_location, ToolbarItemLocation::Hidden); - let mut events = cx.events(&search_bar); + let mut events = cx.events::(&search_bar); search_bar.update_in(cx, |search_bar, window, cx| { search_bar.dismiss(&Dismiss, window, cx); }); assert_eq!( - events.try_next().unwrap(), - Some(ToolbarItemEvent::ChangeLocation( - ToolbarItemLocation::Hidden - )) + events.try_recv().unwrap(), + (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Hidden)) ); search_bar.update_in(cx, |search_bar, window, cx| { @@ -3500,10 +3490,8 @@ mod tests { }); assert_eq!( - events.try_next().unwrap(), - Some(ToolbarItemEvent::ChangeLocation( - ToolbarItemLocation::Secondary - )) + events.try_recv().unwrap(), + (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Secondary)) ); } diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index ed104a534eba7707a04a60775ae08820c4f258b8..64647419e300357e360e3ac3f535d8bbcd076711 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -9,7 +9,7 @@ use crate::{ }; use anyhow::Result; use client::{Client, proto}; -use futures::{StreamExt, channel::mpsc}; +use futures::channel::mpsc; use gpui::{ Action, AnyElement, AnyEntity, AnyView, App, AppContext, Context, Entity, EntityId, EventEmitter, FocusHandle, Focusable, Font, Pixels, Point, Render, SharedString, Task, @@ -777,8 +777,8 @@ impl ItemHandle for Entity { send_follower_updates = Some(cx.spawn_in(window, { let pending_update = pending_update.clone(); async move |workspace, cx| { - while let Some(mut leader_id) = pending_update_rx.next().await { - while let Ok(Some(id)) = pending_update_rx.try_next() { + while let Ok(mut leader_id) = pending_update_rx.recv().await { + while let Ok(id) = pending_update_rx.try_recv() { leader_id = id; } diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 902d147084ce42b34a34477593ecc755bc6aa7cc..b68f485d88b9ae183de494fb394d972231f21eed 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -867,9 +867,8 @@ fn main() { } match open_rx - .try_next() + .try_recv() .ok() - .flatten() .and_then(|request| OpenRequest::parse(request, cx).log_err()) { Some(request) => { diff --git a/crates/zed/src/visual_test_runner.rs b/crates/zed/src/visual_test_runner.rs index ad44ba4128b436597a74621694ae47c661f57bd1..f1ed73fe89f0980a2705631063dcf4efbbe84bfb 100644 --- a/crates/zed/src/visual_test_runner.rs +++ b/crates/zed/src/visual_test_runner.rs @@ -2071,7 +2071,7 @@ fn run_agent_thread_view_test( let mut tool_content: Vec = Vec::new(); let mut tool_locations: Vec = Vec::new(); - while let Ok(Some(event)) = event_receiver.try_next() { + while let Ok(event) = event_receiver.try_recv() { if let Ok(agent::ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateFields( update, ))) = event From ea5a5729bfdcb4c662698764f160a4c85c93bf2a Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Sun, 5 Apr 2026 23:26:22 -0700 Subject: [PATCH 47/63] Explicitly restore multi-workspace's project groups and active workspace when restoring a window (#53217) This PR stops us from eagerly restoring multiple workspaces when re-opening a window. It also should make us reliably return to the right workspace, with the right groups in the sidebar. There is still more work needed on our workspace persistence, especially making remote workspaces behave more consistently with local workspaces with respect to the sidebar. We can tackle that in follow-up PRs. Release Notes: - N/A --- crates/workspace/src/multi_workspace.rs | 10 + crates/workspace/src/persistence.rs | 297 ++++++++++++++++++---- crates/workspace/src/persistence/model.rs | 17 +- crates/workspace/src/workspace.rs | 106 ++------ crates/zed/src/main.rs | 14 +- crates/zed/src/zed.rs | 128 ++++------ 6 files changed, 342 insertions(+), 230 deletions(-) diff --git a/crates/workspace/src/multi_workspace.rs b/crates/workspace/src/multi_workspace.rs index cb2640142442b458b60759547c783d87d9de8a10..dc6060b70a0eeeebc1168113c2c9eb1ba2ddd251 100644 --- a/crates/workspace/src/multi_workspace.rs +++ b/crates/workspace/src/multi_workspace.rs @@ -474,6 +474,16 @@ impl MultiWorkspace { self.project_group_keys.push(project_group_key); } + pub fn restore_project_group_keys(&mut self, keys: Vec) { + let mut restored = keys; + for existing_key in &self.project_group_keys { + if !restored.contains(existing_key) { + restored.push(existing_key.clone()); + } + } + self.project_group_keys = restored; + } + pub fn project_group_keys(&self) -> impl Iterator { self.project_group_keys.iter() } diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index d9e440eb151bf7e8fc24f328b6ba73dc416a7c12..644ff0282df216e79d6be24918d29b802e50a0e8 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -337,15 +337,20 @@ pub fn read_serialized_multi_workspaces( window_groups .into_iter() - .map(|group| { + .filter_map(|group| { let window_id = group.first().and_then(|sw| sw.window_id); let state = window_id .map(|wid| read_multi_workspace_state(wid, cx)) .unwrap_or_default(); - model::SerializedMultiWorkspace { - workspaces: group, + let active_workspace = state + .active_workspace_id + .and_then(|id| group.iter().position(|ws| ws.workspace_id == id)) + .or(Some(0)) + .and_then(|index| group.into_iter().nth(index))?; + Some(model::SerializedMultiWorkspace { + active_workspace, state, - } + }) }) .collect() } @@ -2488,11 +2493,20 @@ pub fn delete_unloaded_items( #[cfg(test)] mod tests { use super::*; - use crate::persistence::model::{ - SerializedItem, SerializedPane, SerializedPaneGroup, SerializedWorkspace, SessionWorkspace, + use crate::{ + multi_workspace::MultiWorkspace, + persistence::{ + model::{ + SerializedItem, SerializedPane, SerializedPaneGroup, SerializedWorkspace, + SessionWorkspace, + }, + read_multi_workspace_state, + }, }; - use gpui; + use feature_flags::FeatureFlagAppExt; + use gpui::AppContext as _; use pretty_assertions::assert_eq; + use project::{Project, ProjectGroupKey}; use remote::SshConnectionOptions; use serde_json::json; use std::{thread, time::Duration}; @@ -2507,12 +2521,6 @@ mod tests { #[gpui::test] async fn test_multi_workspace_serializes_on_add_and_remove(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use crate::persistence::read_multi_workspace_state; - use feature_flags::FeatureFlagAppExt; - use gpui::AppContext as _; - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4042,35 +4050,30 @@ mod tests { let results = cx.update(|cx| read_serialized_multi_workspaces(session_workspaces, cx)); - // Should produce 3 groups: window 10, window 20, and the orphan. + // Should produce 3 results: window 10, window 20, and the orphan. assert_eq!(results.len(), 3); - // Window 10 group: 2 workspaces, active_workspace_id = 2, sidebar open. + // Window 10: active_workspace_id = 2 picks workspace 2 (paths /b), sidebar open. let group_10 = &results[0]; - assert_eq!(group_10.workspaces.len(), 2); + assert_eq!(group_10.active_workspace.workspace_id, WorkspaceId(2)); assert_eq!(group_10.state.active_workspace_id, Some(WorkspaceId(2))); assert_eq!(group_10.state.sidebar_open, true); - // Window 20 group: 1 workspace, active_workspace_id = 3, sidebar closed. + // Window 20: active_workspace_id = 3 picks workspace 3 (paths /c), sidebar closed. let group_20 = &results[1]; - assert_eq!(group_20.workspaces.len(), 1); + assert_eq!(group_20.active_workspace.workspace_id, WorkspaceId(3)); assert_eq!(group_20.state.active_workspace_id, Some(WorkspaceId(3))); assert_eq!(group_20.state.sidebar_open, false); - // Orphan group: no window_id, so state is default. + // Orphan: no active_workspace_id, falls back to first workspace (id 4). let group_none = &results[2]; - assert_eq!(group_none.workspaces.len(), 1); + assert_eq!(group_none.active_workspace.workspace_id, WorkspaceId(4)); assert_eq!(group_none.state.active_workspace_id, None); assert_eq!(group_none.state.sidebar_open, false); } #[gpui::test] async fn test_flush_serialization_completes_before_quit(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4116,12 +4119,6 @@ mod tests { #[gpui::test] async fn test_create_workspace_serialization(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use crate::persistence::read_multi_workspace_state; - use feature_flags::FeatureFlagAppExt; - - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4179,11 +4176,6 @@ mod tests { #[gpui::test] async fn test_remove_workspace_clears_session_binding(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - use gpui::AppContext as _; - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4271,11 +4263,6 @@ mod tests { #[gpui::test] async fn test_remove_workspace_not_restored_as_zombie(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - use gpui::AppContext as _; - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4378,11 +4365,6 @@ mod tests { #[gpui::test] async fn test_pending_removal_tasks_drained_on_flush(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - use gpui::AppContext as _; - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4484,10 +4466,6 @@ mod tests { #[gpui::test] async fn test_create_workspace_bounds_observer_uses_fresh_id(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4545,10 +4523,6 @@ mod tests { #[gpui::test] async fn test_flush_serialization_writes_bounds(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4704,4 +4678,219 @@ mod tests { assert_eq!(result[2].2.paths(), &[PathBuf::from("/plain-project")]); assert_eq!(result[2].0, WorkspaceId(4)); } + + #[gpui::test] + async fn test_restore_window_with_linked_worktree_and_multiple_project_groups( + cx: &mut gpui::TestAppContext, + ) { + crate::tests::init_test(cx); + + cx.update(|cx| { + cx.set_staff(true); + cx.update_flags(true, vec!["agent-v2".to_string()]); + }); + + let fs = fs::FakeFs::new(cx.executor()); + + // Main git repo at /repo + fs.insert_tree( + "/repo", + json!({ + ".git": { + "HEAD": "ref: refs/heads/main", + "worktrees": { + "feature": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature" + } + } + }, + "src": { "main.rs": "" } + }), + ) + .await; + + // Linked worktree checkout pointing back to /repo + fs.insert_tree( + "/worktree-feature", + json!({ + ".git": "gitdir: /repo/.git/worktrees/feature", + "src": { "lib.rs": "" } + }), + ) + .await; + + // --- Phase 1: Set up the original multi-workspace window --- + + let project_1 = Project::test(fs.clone(), ["/repo".as_ref()], cx).await; + let project_1_linked_worktree = + Project::test(fs.clone(), ["/worktree-feature".as_ref()], cx).await; + + // Wait for git discovery to finish. + cx.run_until_parked(); + + // Create a second, unrelated project so we have two distinct project groups. + fs.insert_tree( + "/other-project", + json!({ + ".git": { "HEAD": "ref: refs/heads/main" }, + "readme.md": "" + }), + ) + .await; + let project_2 = Project::test(fs.clone(), ["/other-project".as_ref()], cx).await; + cx.run_until_parked(); + + // Create the MultiWorkspace with project_2, then add the main repo + // and its linked worktree. The linked worktree is added last and + // becomes the active workspace. + let (multi_workspace, cx) = cx + .add_window_view(|window, cx| MultiWorkspace::test_new(project_2.clone(), window, cx)); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_1.clone(), window, cx); + }); + + let workspace_worktree = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_1_linked_worktree.clone(), window, cx) + }); + + // Assign database IDs and set up session bindings so serialization + // writes real rows. + multi_workspace.update_in(cx, |mw, _, cx| { + for workspace in mw.workspaces() { + workspace.update(cx, |ws, _cx| { + ws.set_random_database_id(); + }); + } + }); + + // Flush serialization for each individual workspace (writes to SQLite) + // and for the MultiWorkspace (writes to KVP). + let tasks = multi_workspace.update_in(cx, |mw, window, cx| { + let session_id = mw.workspace().read(cx).session_id(); + let window_id_u64 = window.window_handle().window_id().as_u64(); + + let mut tasks: Vec> = Vec::new(); + for workspace in mw.workspaces() { + tasks.push(workspace.update(cx, |ws, cx| ws.flush_serialization(window, cx))); + if let Some(db_id) = workspace.read(cx).database_id() { + let db = WorkspaceDb::global(cx); + let session_id = session_id.clone(); + tasks.push(cx.background_spawn(async move { + db.set_session_binding(db_id, session_id, Some(window_id_u64)) + .await + .log_err(); + })); + } + } + mw.serialize(cx); + tasks + }); + cx.run_until_parked(); + for task in tasks { + task.await; + } + cx.run_until_parked(); + + let active_db_id = workspace_worktree.read_with(cx, |ws, _| ws.database_id()); + assert!( + active_db_id.is_some(), + "Active workspace should have a database ID" + ); + + // --- Phase 2: Read back and verify the serialized state --- + + let session_id = multi_workspace + .read_with(cx, |mw, cx| mw.workspace().read(cx).session_id()) + .unwrap(); + let db = cx.update(|_, cx| WorkspaceDb::global(cx)); + let session_workspaces = db + .last_session_workspace_locations(&session_id, None, fs.as_ref()) + .await + .expect("should load session workspaces"); + assert!( + !session_workspaces.is_empty(), + "Should have at least one session workspace" + ); + + let multi_workspaces = + cx.update(|_, cx| read_serialized_multi_workspaces(session_workspaces, cx)); + assert_eq!( + multi_workspaces.len(), + 1, + "All workspaces share one window, so there should be exactly one multi-workspace" + ); + + let serialized = &multi_workspaces[0]; + assert_eq!( + serialized.active_workspace.workspace_id, + active_db_id.unwrap(), + ); + assert_eq!(serialized.state.project_group_keys.len(), 2,); + + // Verify the serialized project group keys round-trip back to the + // originals. + let restored_keys: Vec = serialized + .state + .project_group_keys + .iter() + .cloned() + .map(Into::into) + .collect(); + let expected_keys = vec![ + ProjectGroupKey::new(None, PathList::new(&["/other-project"])), + ProjectGroupKey::new(None, PathList::new(&["/repo"])), + ]; + assert_eq!( + restored_keys, expected_keys, + "Deserialized project group keys should match the originals" + ); + + // --- Phase 3: Restore the window and verify the result --- + + let app_state = + multi_workspace.read_with(cx, |mw, cx| mw.workspace().read(cx).app_state().clone()); + + let serialized_mw = multi_workspaces.into_iter().next().unwrap(); + let restored_handle: gpui::WindowHandle = cx + .update(|_, cx| { + cx.spawn(async move |mut cx| { + crate::restore_multiworkspace(serialized_mw, app_state, &mut cx).await + }) + }) + .await + .expect("restore_multiworkspace should succeed"); + + cx.run_until_parked(); + + // The restored window should have the same project group keys. + let restored_keys: Vec = restored_handle + .read_with(cx, |mw: &MultiWorkspace, _cx| { + mw.project_group_keys().cloned().collect() + }) + .unwrap(); + assert_eq!( + restored_keys, expected_keys, + "Restored window should have the same project group keys as the original" + ); + + // The active workspace in the restored window should have the linked + // worktree paths. + let active_paths: Vec = restored_handle + .read_with(cx, |mw: &MultiWorkspace, cx| { + mw.workspace() + .read(cx) + .root_paths(cx) + .into_iter() + .map(|p: Arc| p.to_path_buf()) + .collect() + }) + .unwrap(); + assert_eq!( + active_paths, + vec![PathBuf::from("/worktree-feature")], + "The restored active workspace should be the linked worktree project" + ); + } } diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index 61fe3bc4861d9ebb000681d8b4f887c3a45feebe..b50d82fff0b05c3511967dd65a9060e38ca4ca26 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -77,6 +77,17 @@ impl From for SerializedProjectGroupKey { } } +impl From for ProjectGroupKey { + fn from(value: SerializedProjectGroupKey) -> Self { + let path_list = PathList::deserialize(&value.path_list); + let host = match value.location { + SerializedWorkspaceLocation::Local => None, + SerializedWorkspaceLocation::Remote(opts) => Some(opts), + }; + ProjectGroupKey::new(host, path_list) + } +} + /// Per-window state for a MultiWorkspace, persisted to KVP. #[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)] pub struct MultiWorkspaceState { @@ -88,11 +99,11 @@ pub struct MultiWorkspaceState { } /// The serialized state of a single MultiWorkspace window from a previous session: -/// all workspaces that shared the window, which one was active, and whether the -/// sidebar was open. +/// the active workspace to restore plus window-level state (project group keys, +/// sidebar). #[derive(Debug, Clone)] pub struct SerializedMultiWorkspace { - pub workspaces: Vec, + pub active_workspace: SessionWorkspace, pub state: MultiWorkspaceState, } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 1bf0d2bc4a09a2c6417ce2b35e46372d274c6161..10f8fa4e30178b5d9036ce4c59842944c3bcd501 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -84,8 +84,8 @@ use persistence::{SerializedWindowBounds, model::SerializedWorkspace}; pub use persistence::{ WorkspaceDb, delete_unloaded_items, model::{ - DockStructure, ItemId, SerializedMultiWorkspace, SerializedWorkspaceLocation, - SessionWorkspace, + DockStructure, ItemId, MultiWorkspaceState, SerializedMultiWorkspace, + SerializedWorkspaceLocation, SessionWorkspace, }, read_serialized_multi_workspaces, resolve_worktree_workspaces, }; @@ -8621,30 +8621,32 @@ pub async fn last_session_workspace_locations( .log_err() } -pub struct MultiWorkspaceRestoreResult { - pub window_handle: WindowHandle, - pub errors: Vec, -} - pub async fn restore_multiworkspace( multi_workspace: SerializedMultiWorkspace, app_state: Arc, cx: &mut AsyncApp, -) -> anyhow::Result { - let SerializedMultiWorkspace { workspaces, state } = multi_workspace; - let mut group_iter = workspaces.into_iter(); - let first = group_iter - .next() - .context("window group must not be empty")?; - - let window_handle = if first.paths.is_empty() { - cx.update(|cx| open_workspace_by_id(first.workspace_id, app_state.clone(), None, cx)) - .await? +) -> anyhow::Result> { + let SerializedMultiWorkspace { + active_workspace, + state, + } = multi_workspace; + let MultiWorkspaceState { + sidebar_open, + project_group_keys, + sidebar_state, + .. + } = state; + + let window_handle = if active_workspace.paths.is_empty() { + cx.update(|cx| { + open_workspace_by_id(active_workspace.workspace_id, app_state.clone(), None, cx) + }) + .await? } else { let OpenResult { window, .. } = cx .update(|cx| { Workspace::new_local( - first.paths.paths().to_vec(), + active_workspace.paths.paths().to_vec(), app_state.clone(), None, None, @@ -8657,65 +8659,17 @@ pub async fn restore_multiworkspace( window }; - let mut errors = Vec::new(); - - for session_workspace in group_iter { - let error = if session_workspace.paths.is_empty() { - cx.update(|cx| { - open_workspace_by_id( - session_workspace.workspace_id, - app_state.clone(), - Some(window_handle), - cx, - ) - }) - .await - .err() - } else { - cx.update(|cx| { - Workspace::new_local( - session_workspace.paths.paths().to_vec(), - app_state.clone(), - Some(window_handle), - None, - None, - OpenMode::Add, - cx, - ) - }) - .await - .err() - }; - - if let Some(error) = error { - errors.push(error); - } - } - - if let Some(target_id) = state.active_workspace_id { + if !project_group_keys.is_empty() { + let restored_keys: Vec = + project_group_keys.into_iter().map(Into::into).collect(); window_handle - .update(cx, |multi_workspace, window, cx| { - let target_index = multi_workspace - .workspaces() - .iter() - .position(|ws| ws.read(cx).database_id() == Some(target_id)); - let index = target_index.unwrap_or(0); - if let Some(workspace) = multi_workspace.workspaces().get(index).cloned() { - multi_workspace.activate(workspace, window, cx); - } - }) - .ok(); - } else { - window_handle - .update(cx, |multi_workspace, window, cx| { - if let Some(workspace) = multi_workspace.workspaces().first().cloned() { - multi_workspace.activate(workspace, window, cx); - } + .update(cx, |multi_workspace, _window, _cx| { + multi_workspace.restore_project_group_keys(restored_keys); }) .ok(); } - if state.sidebar_open { + if sidebar_open { window_handle .update(cx, |multi_workspace, _, cx| { multi_workspace.open_sidebar(cx); @@ -8723,8 +8677,7 @@ pub async fn restore_multiworkspace( .ok(); } - if let Some(sidebar_state) = &state.sidebar_state { - let sidebar_state = sidebar_state.clone(); + if let Some(sidebar_state) = sidebar_state { window_handle .update(cx, |multi_workspace, window, cx| { if let Some(sidebar) = multi_workspace.sidebar() { @@ -8741,10 +8694,7 @@ pub async fn restore_multiworkspace( }) .ok(); - Ok(MultiWorkspaceRestoreResult { - window_handle, - errors, - }) + Ok(window_handle) } actions!( diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index b68f485d88b9ae183de494fb394d972231f21eed..5937b91665b892084aa7b4d1f8b94ec1e2d864da 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -1363,16 +1363,10 @@ pub(crate) async fn restore_or_create_workspace( let mut tasks = Vec::new(); for multi_workspace in multi_workspaces { - match restore_multiworkspace(multi_workspace, app_state.clone(), cx).await { - Ok(result) => { - for error in result.errors { - log::error!("Failed to restore workspace in group: {error:#}"); - results.push(Err(error)); - } - } - Err(e) => { - results.push(Err(e)); - } + if let Err(error) = restore_multiworkspace(multi_workspace, app_state.clone(), cx).await + { + log::error!("Failed to restore workspace: {error:#}"); + results.push(Err(error)); } } diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 8d7759948fcabba7388a5c63e0bfa6710aa21f74..9b81ccf0e1c183363bbb170d71b7b3a1a5526085 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -5957,7 +5957,9 @@ mod tests { #[gpui::test] async fn test_multi_workspace_session_restore(cx: &mut TestAppContext) { use collections::HashMap; + use project::ProjectGroupKey; use session::Session; + use util::path_list::PathList; use workspace::{OpenMode, Workspace, WorkspaceId}; let app_state = init_test(cx); @@ -6117,94 +6119,50 @@ mod tests { .filter_map(|window| window.downcast::()) .collect() }); + assert_eq!(restored_windows.len(), 2,); + + // Identify restored windows by their active workspace root paths. + let (restored_a, restored_b) = { + let (mut with_dir1, mut with_dir3) = (None, None); + for window in &restored_windows { + let active_paths = window + .read_with(cx, |mw, cx| mw.workspace().read(cx).root_paths(cx)) + .unwrap(); + if active_paths.iter().any(|p| p.as_ref() == Path::new(dir1)) { + with_dir1 = Some(window); + } else { + with_dir3 = Some(window); + } + } + ( + with_dir1.expect("expected a window with dir1 active"), + with_dir3.expect("expected a window with dir3 active"), + ) + }; - assert_eq!( - restored_windows.len(), - 2, - "expected 2 restored windows, got {}", - restored_windows.len() - ); - - let workspace_counts: Vec = restored_windows - .iter() - .map(|window| { - window - .read_with(cx, |multi_workspace, _| multi_workspace.workspaces().len()) - .unwrap() - }) - .collect(); - let mut sorted_counts = workspace_counts.clone(); - sorted_counts.sort(); - assert_eq!( - sorted_counts, - vec![1, 2], - "expected one window with 1 workspace and one with 2, got {workspace_counts:?}" - ); - - let dir1_path: Arc = Path::new(dir1).into(); - let dir2_path: Arc = Path::new(dir2).into(); - let dir3_path: Arc = Path::new(dir3).into(); - - let all_restored_paths: Vec>>> = restored_windows - .iter() - .map(|window| { - window - .read_with(cx, |multi_workspace, cx| { - multi_workspace - .workspaces() - .iter() - .map(|ws| ws.read(cx).root_paths(cx)) - .collect() - }) - .unwrap() + // Window A (dir1+dir2): 1 workspace restored, but 2 project group keys. + restored_a + .read_with(cx, |mw, _| { + assert_eq!( + mw.project_group_keys().cloned().collect::>(), + vec![ + ProjectGroupKey::new(None, PathList::new(&[dir1])), + ProjectGroupKey::new(None, PathList::new(&[dir2])), + ] + ); + assert_eq!(mw.workspaces().len(), 1); }) - .collect(); - - let two_ws_window = all_restored_paths - .iter() - .find(|paths| paths.len() == 2) - .expect("expected a window with 2 workspaces"); - assert!( - two_ws_window.iter().any(|p| p.contains(&dir1_path)), - "2-workspace window should contain dir1, got {two_ws_window:?}" - ); - assert!( - two_ws_window.iter().any(|p| p.contains(&dir2_path)), - "2-workspace window should contain dir2, got {two_ws_window:?}" - ); - - let one_ws_window = all_restored_paths - .iter() - .find(|paths| paths.len() == 1) - .expect("expected a window with 1 workspace"); - assert!( - one_ws_window[0].contains(&dir3_path), - "1-workspace window should contain dir3, got {one_ws_window:?}" - ); - - // --- Verify the active workspace is preserved --- - for window in &restored_windows { - let (active_paths, workspace_count) = window - .read_with(cx, |multi_workspace, cx| { - let active = multi_workspace.workspace(); - ( - active.read(cx).root_paths(cx), - multi_workspace.workspaces().len(), - ) - }) - .unwrap(); + .unwrap(); - if workspace_count == 2 { - assert!( - active_paths.contains(&dir1_path), - "2-workspace window should have dir1 active, got {active_paths:?}" - ); - } else { - assert!( - active_paths.contains(&dir3_path), - "1-workspace window should have dir3 active, got {active_paths:?}" + // Window B (dir3): 1 workspace, 1 project group key. + restored_b + .read_with(cx, |mw, _| { + assert_eq!( + mw.project_group_keys().cloned().collect::>(), + vec![ProjectGroupKey::new(None, PathList::new(&[dir3]))] ); - } - } + assert_eq!(mw.workspaces().len(), 1); + }) + .unwrap(); } } From a48bab76e8e20976eca3dc0f6de78db666efaf43 Mon Sep 17 00:00:00 2001 From: Smit Barmase Date: Mon, 6 Apr 2026 13:47:51 +0530 Subject: [PATCH 48/63] markdown: Fix horizontal rules and blockquotes not visible (#53223) Closes #53167 Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [ ] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Fixed horizontal rules and blockquotes not being visible in the Markdown preview. --- crates/markdown/src/markdown.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index 52c5fd38ac84b259ca2b39b97a53a11c6dc75d03..871cf5848d9348f2301363b16c30a4811cf5c24e 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -154,6 +154,8 @@ impl MarkdownStyle { base_text_style: text_style.clone(), syntax: cx.theme().syntax().clone(), selection_background_color: colors.element_selection_background, + rule_color: colors.border, + block_quote_border_color: colors.border, code_block_overflow_x_scroll: true, heading_level_styles: Some(HeadingLevelStyles { h1: Some(TextStyleRefinement { From 24b041d48fdfcc10eecc87c777eacbdc06613670 Mon Sep 17 00:00:00 2001 From: AltCode Date: Mon, 6 Apr 2026 14:30:05 +0200 Subject: [PATCH 49/63] Add comment injections for GLSL and Proto (#53058) Release Notes: - N/A --- extensions/glsl/languages/glsl/injections.scm | 2 ++ extensions/proto/languages/proto/injections.scm | 2 ++ 2 files changed, 4 insertions(+) create mode 100644 extensions/glsl/languages/glsl/injections.scm create mode 100644 extensions/proto/languages/proto/injections.scm diff --git a/extensions/glsl/languages/glsl/injections.scm b/extensions/glsl/languages/glsl/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..2f0e58eb6431515b86b6042e5828263341513e99 --- /dev/null +++ b/extensions/glsl/languages/glsl/injections.scm @@ -0,0 +1,2 @@ +((comment) @injection.content + (#set! injection.language "comment")) diff --git a/extensions/proto/languages/proto/injections.scm b/extensions/proto/languages/proto/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..2f0e58eb6431515b86b6042e5828263341513e99 --- /dev/null +++ b/extensions/proto/languages/proto/injections.scm @@ -0,0 +1,2 @@ +((comment) @injection.content + (#set! injection.language "comment")) From 9a967b4d5af94ec45bd36d2b96fe91ea0c5ba92d Mon Sep 17 00:00:00 2001 From: "zed-zippy[bot]" <234243425+zed-zippy[bot]@users.noreply.github.com> Date: Mon, 6 Apr 2026 12:45:25 +0000 Subject: [PATCH 50/63] glsl: Bump to v0.2.3 (#53234) This PR bumps the version of the GLSL extension to v0.2.3. Release Notes: - N/A Co-authored-by: zed-zippy[bot] <234243425+zed-zippy[bot]@users.noreply.github.com> --- Cargo.lock | 2 +- extensions/glsl/Cargo.toml | 2 +- extensions/glsl/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fd3ddfc882edafa29722cee7b3dbf329caecdad4..c3440f3b55a4b404b52d81807ebfad4041db9caa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -22242,7 +22242,7 @@ dependencies = [ [[package]] name = "zed_glsl" -version = "0.2.2" +version = "0.2.3" dependencies = [ "zed_extension_api 0.1.0", ] diff --git a/extensions/glsl/Cargo.toml b/extensions/glsl/Cargo.toml index 5d7b6ce941c14f68410ac33f825d0ee0b645d6b5..a02c93c0387424255fa32abf8fb027e2d923b809 100644 --- a/extensions/glsl/Cargo.toml +++ b/extensions/glsl/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_glsl" -version = "0.2.2" +version = "0.2.3" edition.workspace = true publish.workspace = true license = "Apache-2.0" diff --git a/extensions/glsl/extension.toml b/extensions/glsl/extension.toml index f866091b84674780e859407ebd893641a3a159ce..1fcc888ebdfc14b1fb94d136c99e2ef6b7008b94 100644 --- a/extensions/glsl/extension.toml +++ b/extensions/glsl/extension.toml @@ -1,7 +1,7 @@ id = "glsl" name = "GLSL" description = "GLSL support." -version = "0.2.2" +version = "0.2.3" schema_version = 1 authors = ["Mikayla Maki "] repository = "https://github.com/zed-industries/zed" From d0a61a42d9ffb606fbe4eea004afa23b2f9a1b0d Mon Sep 17 00:00:00 2001 From: "zed-zippy[bot]" <234243425+zed-zippy[bot]@users.noreply.github.com> Date: Mon, 6 Apr 2026 12:51:13 +0000 Subject: [PATCH 51/63] proto: Bump to v0.3.2 (#53235) This PR bumps the version of the Proto extension to v0.3.2. Release Notes: - N/A Co-authored-by: zed-zippy[bot] <234243425+zed-zippy[bot]@users.noreply.github.com> --- Cargo.lock | 2 +- extensions/proto/Cargo.toml | 2 +- extensions/proto/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c3440f3b55a4b404b52d81807ebfad4041db9caa..d091e026ff3a6e0c27b477b26454b3ca47ae947b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -22256,7 +22256,7 @@ dependencies = [ [[package]] name = "zed_proto" -version = "0.3.1" +version = "0.3.2" dependencies = [ "zed_extension_api 0.7.0", ] diff --git a/extensions/proto/Cargo.toml b/extensions/proto/Cargo.toml index 68a524ed944b0db1fd75b9ec5ca5e0b1aa99e89f..5ca9720e25fb7cb115004d0de7c47e45d7e6252a 100644 --- a/extensions/proto/Cargo.toml +++ b/extensions/proto/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_proto" -version = "0.3.1" +version = "0.3.2" edition.workspace = true publish.workspace = true license = "Apache-2.0" diff --git a/extensions/proto/extension.toml b/extensions/proto/extension.toml index 70ebed1ca50635d9e818ce216920937a547b64c4..42985998e4dc934f9b6860ee0a5778a097d5723a 100644 --- a/extensions/proto/extension.toml +++ b/extensions/proto/extension.toml @@ -1,7 +1,7 @@ id = "proto" name = "Proto" description = "Protocol Buffers support." -version = "0.3.1" +version = "0.3.2" schema_version = 1 authors = ["Zed Industries "] repository = "https://github.com/zed-industries/zed" From 7e271711749cad0e1092c125d5f16bd22738bec6 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 6 Apr 2026 12:16:58 -0300 Subject: [PATCH 52/63] agent_ui: Fix label for image mentions (#52995) This PR fixes an issue where an image mention would have its label reset to just "Image", instead of persisting the original label, when the prompt got submitted. Closes #48564 Release Notes: - agent: Fixed image mention labels by persisting the file name after submitting the prompt - agent: Fixed directory mentions being incorrectly parsed as files when pasting into prompt editor --------- Co-authored-by: Bennet Bo Fenner --- crates/acp_thread/src/mention.rs | 39 ++++++++++++++++--- crates/agent/src/thread.rs | 2 +- .../src/conversation_view/thread_view.rs | 2 +- crates/agent_ui/src/mention_set.rs | 20 +++++++--- crates/agent_ui/src/message_editor.rs | 25 +++++++++--- crates/agent_ui/src/ui/mention_crease.rs | 2 +- 6 files changed, 69 insertions(+), 21 deletions(-) diff --git a/crates/acp_thread/src/mention.rs b/crates/acp_thread/src/mention.rs index 753838d3b98ed60dc02c3d9383c28fe4f848a29e..28038ecbc04c59d1c5107872210056f11b413141 100644 --- a/crates/acp_thread/src/mention.rs +++ b/crates/acp_thread/src/mention.rs @@ -19,7 +19,9 @@ pub enum MentionUri { File { abs_path: PathBuf, }, - PastedImage, + PastedImage { + name: String, + }, Directory { abs_path: PathBuf, }, @@ -155,7 +157,9 @@ impl MentionUri { include_warnings, }) } else if path.starts_with("/agent/pasted-image") { - Ok(Self::PastedImage) + let name = + single_query_param(&url, "name")?.unwrap_or_else(|| "Image".to_string()); + Ok(Self::PastedImage { name }) } else if path.starts_with("/agent/untitled-buffer") { let fragment = url .fragment() @@ -227,7 +231,7 @@ impl MentionUri { .unwrap_or_default() .to_string_lossy() .into_owned(), - MentionUri::PastedImage => "Image".to_string(), + MentionUri::PastedImage { name } => name.clone(), MentionUri::Symbol { name, .. } => name.clone(), MentionUri::Thread { name, .. } => name.clone(), MentionUri::Rule { name, .. } => name.clone(), @@ -296,7 +300,7 @@ impl MentionUri { MentionUri::File { abs_path } => { FileIcons::get_icon(abs_path, cx).unwrap_or_else(|| IconName::File.path().into()) } - MentionUri::PastedImage => IconName::Image.path().into(), + MentionUri::PastedImage { .. } => IconName::Image.path().into(), MentionUri::Directory { abs_path } => FileIcons::get_folder_icon(false, abs_path, cx) .unwrap_or_else(|| IconName::Folder.path().into()), MentionUri::Symbol { .. } => IconName::Code.path().into(), @@ -322,10 +326,18 @@ impl MentionUri { url.set_path(&abs_path.to_string_lossy()); url } - MentionUri::PastedImage => Url::parse("zed:///agent/pasted-image").unwrap(), + MentionUri::PastedImage { name } => { + let mut url = Url::parse("zed:///agent/pasted-image").unwrap(); + url.query_pairs_mut().append_pair("name", name); + url + } MentionUri::Directory { abs_path } => { let mut url = Url::parse("file:///").unwrap(); - url.set_path(&abs_path.to_string_lossy()); + let mut path = abs_path.to_string_lossy().into_owned(); + if !path.ends_with('/') && !path.ends_with('\\') { + path.push('/'); + } + url.set_path(&path); url } MentionUri::Symbol { @@ -490,6 +502,21 @@ mod tests { assert_eq!(uri.to_uri().to_string(), expected); } + #[test] + fn test_directory_uri_round_trip_without_trailing_slash() { + let uri = MentionUri::Directory { + abs_path: PathBuf::from(path!("/path/to/dir")), + }; + let serialized = uri.to_uri().to_string(); + assert!(serialized.ends_with('/'), "directory URI must end with /"); + let parsed = MentionUri::parse(&serialized, PathStyle::local()).unwrap(); + assert!( + matches!(parsed, MentionUri::Directory { .. }), + "expected Directory variant, got {:?}", + parsed + ); + } + #[test] fn test_parse_symbol_uri() { let symbol_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20"); diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index b61df1b8af84d312d7f186fb85e5a1d04ab59dfd..bcb5b7b2d2f3eb8cffd5be8b70fc08fef8e9fe37 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -253,7 +253,7 @@ impl UserMessage { ) .ok(); } - MentionUri::PastedImage => { + MentionUri::PastedImage { .. } => { debug_panic!("pasted image URI should not be used in mention content") } MentionUri::Directory { .. } => { diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs index 53e63268c51aa1aa5537a87b6055dea62ecd630e..886ac816c925067b6be6b4553361eb2425539ada 100644 --- a/crates/agent_ui/src/conversation_view/thread_view.rs +++ b/crates/agent_ui/src/conversation_view/thread_view.rs @@ -8819,7 +8819,7 @@ pub(crate) fn open_link( .open_path(path, None, true, window, cx) .detach_and_log_err(cx); } - MentionUri::PastedImage => {} + MentionUri::PastedImage { .. } => {} MentionUri::Directory { abs_path } => { let project = workspace.project(); let Some(entry_id) = project.update(cx, |project, cx| { diff --git a/crates/agent_ui/src/mention_set.rs b/crates/agent_ui/src/mention_set.rs index 4db856f9dd1e512a7b8b43eadcefccc22fe50188..1b2ec0ad2fd460b4eec5a8b757bdd3058d4a3704 100644 --- a/crates/agent_ui/src/mention_set.rs +++ b/crates/agent_ui/src/mention_set.rs @@ -154,7 +154,7 @@ impl MentionSet { MentionUri::Selection { abs_path: None, .. } => Task::ready(Err(anyhow!( "Untitled buffer selection mentions are not supported for paste" ))), - MentionUri::PastedImage + MentionUri::PastedImage { .. } | MentionUri::TerminalSelection { .. } | MentionUri::MergeConflict { .. } => { Task::ready(Err(anyhow!("Unsupported mention URI type for paste"))) @@ -283,7 +283,7 @@ impl MentionSet { include_errors, include_warnings, } => self.confirm_mention_for_diagnostics(include_errors, include_warnings, cx), - MentionUri::PastedImage => { + MentionUri::PastedImage { .. } => { debug_panic!("pasted image URI should not be included in completions"); Task::ready(Err(anyhow!( "pasted imaged URI should not be included in completions" @@ -739,9 +739,11 @@ pub(crate) async fn insert_images_as_context( return; } - let replacement_text = MentionUri::PastedImage.as_link().to_string(); - for (image, name) in images { + let mention_uri = MentionUri::PastedImage { + name: name.to_string(), + }; + let replacement_text = mention_uri.as_link().to_string(); let Some((text_anchor, multibuffer_anchor)) = editor .update_in(cx, |editor, window, cx| { let snapshot = editor.snapshot(window, cx); @@ -804,7 +806,13 @@ pub(crate) async fn insert_images_as_context( .shared(); mention_set.update(cx, |mention_set, _cx| { - mention_set.insert_mention(crease_id, MentionUri::PastedImage, task.clone()) + mention_set.insert_mention( + crease_id, + MentionUri::PastedImage { + name: name.to_string(), + }, + task.clone(), + ) }); if task @@ -873,7 +881,7 @@ pub(crate) fn paste_images_as_context( Some(window.spawn(cx, async move |mut cx| { use itertools::Itertools; - let default_name: SharedString = MentionUri::PastedImage.name().into(); + let default_name: SharedString = "Image".into(); let (mut images, paths): (Vec<(gpui::Image, SharedString)>, Vec<_>) = clipboard .into_entries() .filter_map(|entry| match entry { diff --git a/crates/agent_ui/src/message_editor.rs b/crates/agent_ui/src/message_editor.rs index 8660e792cd23bc418b1d2c204bfafb2a81ba48df..0f59441ab27b5074a710c46a683e72d003a8d5d7 100644 --- a/crates/agent_ui/src/message_editor.rs +++ b/crates/agent_ui/src/message_editor.rs @@ -261,7 +261,7 @@ async fn resolve_pasted_context_items( ) -> (Vec, Vec>) { let mut items = Vec::new(); let mut added_worktrees = Vec::new(); - let default_image_name: SharedString = MentionUri::PastedImage.name().into(); + let default_image_name: SharedString = "Image".into(); for entry in entries { match entry { @@ -812,7 +812,9 @@ impl MessageEditor { ) .uri(match uri { MentionUri::File { .. } => Some(uri.to_uri().to_string()), - MentionUri::PastedImage => None, + MentionUri::PastedImage { .. } => { + Some(uri.to_uri().to_string()) + } other => { debug_panic!( "unexpected mention uri for image: {:?}", @@ -1638,7 +1640,9 @@ impl MessageEditor { let mention_uri = if let Some(uri) = uri { MentionUri::parse(&uri, path_style) } else { - Ok(MentionUri::PastedImage) + Ok(MentionUri::PastedImage { + name: "Image".to_string(), + }) }; let Some(mention_uri) = mention_uri.log_err() else { continue; @@ -4074,6 +4078,11 @@ mod tests { &mut cx, ); + let image_name = temporary_image_path + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or("Image") + .to_string(); std::fs::remove_file(&temporary_image_path).expect("remove temp png"); let expected_file_uri = MentionUri::File { @@ -4081,12 +4090,16 @@ mod tests { } .to_uri() .to_string(); - let expected_image_uri = MentionUri::PastedImage.to_uri().to_string(); + let expected_image_uri = MentionUri::PastedImage { + name: image_name.clone(), + } + .to_uri() + .to_string(); editor.update(&mut cx, |editor, cx| { assert_eq!( editor.text(cx), - format!("[@Image]({expected_image_uri}) [@file.txt]({expected_file_uri}) ") + format!("[@{image_name}]({expected_image_uri}) [@file.txt]({expected_file_uri}) ") ); }); @@ -4094,7 +4107,7 @@ mod tests { assert_eq!(contents.len(), 2); assert!(contents.iter().any(|(uri, mention)| { - *uri == MentionUri::PastedImage && matches!(mention, Mention::Image(_)) + matches!(uri, MentionUri::PastedImage { .. }) && matches!(mention, Mention::Image(_)) })); assert!(contents.iter().any(|(uri, mention)| { *uri == MentionUri::File { diff --git a/crates/agent_ui/src/ui/mention_crease.rs b/crates/agent_ui/src/ui/mention_crease.rs index 6e99647304d93fe91cd6b91dbd2bf3bfd82c7ab0..bd48a558f5d9b1f042f974dc6e174f8ba8078adf 100644 --- a/crates/agent_ui/src/ui/mention_crease.rs +++ b/crates/agent_ui/src/ui/mention_crease.rs @@ -184,7 +184,7 @@ fn open_mention_uri( MentionUri::Fetch { url } => { cx.open_url(url.as_str()); } - MentionUri::PastedImage + MentionUri::PastedImage { .. } | MentionUri::Selection { abs_path: None, .. } | MentionUri::Diagnostics { .. } | MentionUri::TerminalSelection { .. } From 733857b74692522798c88e5d45537cdcac7247b5 Mon Sep 17 00:00:00 2001 From: Finn Eitreim <48069764+feitreim@users.noreply.github.com> Date: Mon, 6 Apr 2026 11:53:13 -0400 Subject: [PATCH 53/63] repl: Use uv to install ipykernel for uv-managed venv (#51897) ## Context Closes #51874 the repl is able to recognize that the venv is managed by uv, but still runs `python -m pip install ipykernel`, despite this not working. this PR fixes that behavior and uses uv to install ipkernel. ## How to Review Added a path that uses uv to install ipykernel in repl_editor.rs Added a function to repl_store.rs that allows updating the venv as having ipykernel installed after installing it. ## Videos Old Behavior: https://github.com/user-attachments/assets/9de81cc9-cd78-4570-ad57-550f5ecabffa New Behavior: https://github.com/user-attachments/assets/391f54c7-ae67-4d85-8f4f-9d87ddc8db63 ## Self-Review Checklist - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - repl: Fixed installing ipykernel with uv managed environements --- crates/repl/src/kernels/mod.rs | 7 +++++++ crates/repl/src/repl_editor.rs | 30 +++++++++++++++++++++++++----- crates/repl/src/repl_store.rs | 21 +++++++++++++++++++-- 3 files changed, 51 insertions(+), 7 deletions(-) diff --git a/crates/repl/src/kernels/mod.rs b/crates/repl/src/kernels/mod.rs index 6745bcd9b6a08cb34b3a0fc3b8219918cb4f0dca..9f08876cd39f4b7441d8c97bd1d5344b944b09ff 100644 --- a/crates/repl/src/kernels/mod.rs +++ b/crates/repl/src/kernels/mod.rs @@ -177,6 +177,13 @@ impl PythonEnvKernelSpecification { kernelspec: self.kernelspec.clone(), } } + + pub fn is_uv(&self) -> bool { + matches!( + self.environment_kind.as_deref(), + Some("uv" | "uv (Workspace)") + ) + } } #[derive(Debug, Clone, PartialEq, Eq)] diff --git a/crates/repl/src/repl_editor.rs b/crates/repl/src/repl_editor.rs index cf1493000edb5881bff412224f7e44dbfbf88b25..61bed513a16c3b9baf885714110c3de78a7094d5 100644 --- a/crates/repl/src/repl_editor.rs +++ b/crates/repl/src/repl_editor.rs @@ -87,6 +87,7 @@ pub fn install_ipykernel_and_assign( let python_path = env_spec.path.clone(); let env_name = env_spec.name.clone(); + let is_uv = env_spec.is_uv(); let env_spec = env_spec.clone(); struct IpykernelInstall; @@ -109,11 +110,25 @@ pub fn install_ipykernel_and_assign( let window_handle = window.window_handle(); let install_task = cx.background_spawn(async move { - let output = util::command::new_command(python_path.to_string_lossy().as_ref()) - .args(&["-m", "pip", "install", "ipykernel"]) - .output() - .await - .context("failed to run pip install ipykernel")?; + let output = if is_uv { + util::command::new_command("uv") + .args(&[ + "pip", + "install", + "ipykernel", + "--python", + &python_path.to_string_lossy(), + ]) + .output() + .await + .context("failed to run uv pip install ipykernel")? + } else { + util::command::new_command(python_path.to_string_lossy().as_ref()) + .args(&["-m", "pip", "install", "ipykernel"]) + .output() + .await + .context("failed to run pip install ipykernel")? + }; if output.status.success() { anyhow::Ok(()) @@ -146,6 +161,11 @@ pub fn install_ipykernel_and_assign( window_handle .update(cx, |_, window, cx| { + let store = ReplStore::global(cx); + store.update(cx, |store, cx| { + store.mark_ipykernel_installed(cx, &env_spec); + }); + let updated_spec = KernelSpecification::PythonEnv(PythonEnvKernelSpecification { has_ipykernel: true, diff --git a/crates/repl/src/repl_store.rs b/crates/repl/src/repl_store.rs index cf992a542830bd86c1a9ad8b1909501417f427fd..4c5827b7c0cf881725b2937cc0aef0b7e241f0f3 100644 --- a/crates/repl/src/repl_store.rs +++ b/crates/repl/src/repl_store.rs @@ -13,8 +13,8 @@ use settings::{Settings, SettingsStore}; use util::rel_path::RelPath; use crate::kernels::{ - Kernel, list_remote_kernelspecs, local_kernel_specifications, python_env_kernel_specifications, - wsl_kernel_specifications, + Kernel, PythonEnvKernelSpecification, list_remote_kernelspecs, local_kernel_specifications, + python_env_kernel_specifications, wsl_kernel_specifications, }; use crate::{JupyterSettings, KernelSpecification, Session}; @@ -136,6 +136,23 @@ impl ReplStore { cx.notify(); } + pub fn mark_ipykernel_installed( + &mut self, + cx: &mut Context, + spec: &PythonEnvKernelSpecification, + ) { + for specs in self.kernel_specifications_for_worktree.values_mut() { + for kernel_spec in specs.iter_mut() { + if let KernelSpecification::PythonEnv(env_spec) = kernel_spec { + if env_spec == spec { + env_spec.has_ipykernel = true; + } + } + } + } + cx.notify(); + } + pub fn refresh_python_kernelspecs( &mut self, worktree_id: WorktreeId, From 91fc544a03b79a7680d52c931d63d379f4d42bf7 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 6 Apr 2026 13:01:26 -0300 Subject: [PATCH 54/63] Display agent-powered merge conflict resolution in the status bar (#53033) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Follow up to https://github.com/zed-industries/zed/pull/49807 Previously, when there were multiple conflicts across the codebase, we would pop a toast at the bottom right corner of the UI. A toast seemed like a functional idea because it'd be visible from any state of the app and thus it'd be a good place to expose the button that allows you to quickly prompt the agent to resolve all conflicts, as opposed to creating a thread for each individual one. However, the toast was met with some negative (and correct) feedback, mostly because it is interruptive, and thus can sometimes block very relevant surfaces, like either the agent panel itself or the Git commit area. Therefore, in this PR, I'm removing the toast and adding a button in the status bar instead; a bit more minimal, not interruptive, and a common place for other items that might require your attention. The status bar can be quite busy these days, though; we can display diagnostics, LSP status, and file names in there; conscious of that. But it felt like it could work given this button is such a transient one that you can either easily manually dismiss or wait for it to be auto-dismissed as you or the agent resolves the merge conflicts. Screenshot 2026-04-02 at 9  15@2x Release Notes: - Git: Improved how we surface the affordance to resolve codebase-wide merge conflicts with the agent in the UI. - Agent: Added a setting to control whether or not the button to resolve merge conflicts with the agent should be displayed. --- assets/settings/default.json | 5 + crates/agent/src/tool_permissions.rs | 1 + crates/agent_settings/src/agent_settings.rs | 2 + crates/agent_ui/src/agent_ui.rs | 1 + .../src/conversation_view/thread_view.rs | 24 +- crates/git_ui/src/conflict_view.rs | 248 ++++++++++++------ crates/git_ui/src/git_ui.rs | 3 +- crates/settings_content/src/agent.rs | 5 + crates/settings_ui/src/page_data.rs | 18 ++ crates/workspace/src/workspace.rs | 5 - crates/zed/src/zed.rs | 3 + 11 files changed, 212 insertions(+), 103 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index e9d21eb0dcc18ae939a41e3415b93eaeba1e4546..5e1eb0e68d2f8a17f89422597aa29b99516333e8 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1144,6 +1144,11 @@ // // Default: false "show_turn_stats": false, + // Whether to show the merge conflict indicator in the status bar + // that offers to resolve conflicts using the agent. + // + // Default: true + "show_merge_conflict_indicator": true, }, // Whether the screen sharing icon is shown in the os status bar. "show_call_status_icon": true, diff --git a/crates/agent/src/tool_permissions.rs b/crates/agent/src/tool_permissions.rs index c67942e5cd3769f814fad62f7311bf7967f3317a..58e779da59aef176464839ed6f2d6a5c16e4bc12 100644 --- a/crates/agent/src/tool_permissions.rs +++ b/crates/agent/src/tool_permissions.rs @@ -595,6 +595,7 @@ mod tests { message_editor_min_lines: 1, tool_permissions, show_turn_stats: false, + show_merge_conflict_indicator: true, new_thread_location: Default::default(), sidebar_side: Default::default(), thinking_display: Default::default(), diff --git a/crates/agent_settings/src/agent_settings.rs b/crates/agent_settings/src/agent_settings.rs index f0730d39eee17cbd544e5ba8574b30f03963c524..0c68d2f25d54f966d1cc0a93476457bbba79c959 100644 --- a/crates/agent_settings/src/agent_settings.rs +++ b/crates/agent_settings/src/agent_settings.rs @@ -176,6 +176,7 @@ pub struct AgentSettings { pub use_modifier_to_send: bool, pub message_editor_min_lines: usize, pub show_turn_stats: bool, + pub show_merge_conflict_indicator: bool, pub tool_permissions: ToolPermissions, pub new_thread_location: NewThreadLocation, } @@ -629,6 +630,7 @@ impl Settings for AgentSettings { use_modifier_to_send: agent.use_modifier_to_send.unwrap(), message_editor_min_lines: agent.message_editor_min_lines.unwrap(), show_turn_stats: agent.show_turn_stats.unwrap(), + show_merge_conflict_indicator: agent.show_merge_conflict_indicator.unwrap(), tool_permissions: compile_tool_permissions(agent.tool_permissions), new_thread_location: agent.new_thread_location.unwrap_or_default(), } diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index e58c7eb3526cc1a53d7b8e6d449e968a5923425a..5cff5bfc38d4512d659d919c6e7c4ff02fcc0caf 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -734,6 +734,7 @@ mod tests { message_editor_min_lines: 1, tool_permissions: Default::default(), show_turn_stats: false, + show_merge_conflict_indicator: true, new_thread_location: Default::default(), sidebar_side: Default::default(), thinking_display: Default::default(), diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs index 886ac816c925067b6be6b4553361eb2425539ada..25af09832f3473aa690c7b205e1b56bab86e9709 100644 --- a/crates/agent_ui/src/conversation_view/thread_view.rs +++ b/crates/agent_ui/src/conversation_view/thread_view.rs @@ -816,13 +816,10 @@ impl ThreadView { } } })); - if self.parent_id.is_none() { - self.suppress_merge_conflict_notification(cx); - } generation } - pub fn stop_turn(&mut self, generation: usize, cx: &mut Context) { + pub fn stop_turn(&mut self, generation: usize, _cx: &mut Context) { if self.turn_fields.turn_generation != generation { return; } @@ -833,25 +830,6 @@ impl ThreadView { .map(|started| started.elapsed()); self.turn_fields.last_turn_tokens = self.turn_fields.turn_tokens.take(); self.turn_fields._turn_timer_task = None; - if self.parent_id.is_none() { - self.unsuppress_merge_conflict_notification(cx); - } - } - - fn suppress_merge_conflict_notification(&self, cx: &mut Context) { - self.workspace - .update(cx, |workspace, cx| { - workspace.suppress_notification(&workspace::merge_conflict_notification_id(), cx); - }) - .ok(); - } - - fn unsuppress_merge_conflict_notification(&self, cx: &mut Context) { - self.workspace - .update(cx, |workspace, _cx| { - workspace.unsuppress(workspace::merge_conflict_notification_id()); - }) - .ok(); } pub fn update_turn_tokens(&mut self, cx: &App) { diff --git a/crates/git_ui/src/conflict_view.rs b/crates/git_ui/src/conflict_view.rs index 95d46676a80ebca3b2db1ba1d7c88edee32df9ea..25175dce48163778615c26a585cd8a6319c1735f 100644 --- a/crates/git_ui/src/conflict_view.rs +++ b/crates/git_ui/src/conflict_view.rs @@ -6,19 +6,19 @@ use editor::{ display_map::{BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId}, }; use gpui::{ - App, Context, DismissEvent, Entity, InteractiveElement as _, ParentElement as _, Subscription, - Task, WeakEntity, + App, ClickEvent, Context, Empty, Entity, InteractiveElement as _, ParentElement as _, + Subscription, Task, WeakEntity, }; use language::{Anchor, Buffer, BufferId}; use project::{ ConflictRegion, ConflictSet, ConflictSetUpdate, Project, ProjectItem as _, - git_store::{GitStoreEvent, RepositoryEvent}, + git_store::{GitStore, GitStoreEvent, RepositoryEvent}, }; use settings::Settings; -use std::{cell::RefCell, ops::Range, rc::Rc, sync::Arc}; -use ui::{ActiveTheme, Divider, Element as _, Styled, Window, prelude::*}; +use std::{ops::Range, sync::Arc}; +use ui::{ButtonLike, Divider, Tooltip, prelude::*}; use util::{ResultExt as _, debug_panic, maybe}; -use workspace::{Workspace, notifications::simple_message_notification::MessageNotification}; +use workspace::{StatusItemView, Workspace, item::ItemHandle}; use zed_actions::agent::{ ConflictContent, ResolveConflictedFilesWithAgent, ResolveConflictsWithAgent, }; @@ -433,74 +433,6 @@ fn collect_conflicted_file_paths(project: &Project, cx: &App) -> Vec { paths } -pub(crate) fn register_conflict_notification( - workspace: &mut Workspace, - cx: &mut Context, -) { - let git_store = workspace.project().read(cx).git_store().clone(); - - let last_shown_paths: Rc>> = Rc::new(RefCell::new(HashSet::default())); - - cx.subscribe(&git_store, move |workspace, _git_store, event, cx| { - let conflicts_changed = matches!( - event, - GitStoreEvent::ConflictsUpdated - | GitStoreEvent::RepositoryUpdated(_, RepositoryEvent::StatusesChanged, _) - ); - if !AgentSettings::get_global(cx).enabled(cx) || !conflicts_changed { - return; - } - let project = workspace.project().read(cx); - if project.is_via_collab() { - return; - } - - if workspace.is_notification_suppressed(workspace::merge_conflict_notification_id()) { - return; - } - - let paths = collect_conflicted_file_paths(project, cx); - let notification_id = workspace::merge_conflict_notification_id(); - let current_paths_set: HashSet = paths.iter().cloned().collect(); - - if paths.is_empty() { - last_shown_paths.borrow_mut().clear(); - workspace.dismiss_notification(¬ification_id, cx); - } else if *last_shown_paths.borrow() != current_paths_set { - // Only show the notification if the set of conflicted paths has changed. - // This prevents re-showing after the user dismisses it while working on the same conflicts. - *last_shown_paths.borrow_mut() = current_paths_set; - let file_count = paths.len(); - workspace.show_notification(notification_id, cx, |cx| { - cx.new(|cx| { - let message = format!( - "{file_count} file{} have unresolved merge conflicts", - if file_count == 1 { "" } else { "s" } - ); - - MessageNotification::new(message, cx) - .primary_message("Resolve with Agent") - .primary_icon(IconName::ZedAssistant) - .primary_icon_color(Color::Muted) - .primary_on_click({ - let paths = paths.clone(); - move |window, cx| { - window.dispatch_action( - Box::new(ResolveConflictedFilesWithAgent { - conflicted_file_paths: paths.clone(), - }), - cx, - ); - cx.emit(DismissEvent); - } - }) - }) - }); - } - }) - .detach(); -} - pub(crate) fn resolve_conflict( editor: WeakEntity, resolved_conflict: ConflictRegion, @@ -573,3 +505,171 @@ pub(crate) fn resolve_conflict( } }) } + +pub struct MergeConflictIndicator { + project: Entity, + conflicted_paths: Vec, + last_shown_paths: HashSet, + dismissed: bool, + _subscription: Subscription, +} + +impl MergeConflictIndicator { + pub fn new(workspace: &Workspace, cx: &mut Context) -> Self { + let project = workspace.project().clone(); + let git_store = project.read(cx).git_store().clone(); + + let subscription = cx.subscribe(&git_store, Self::on_git_store_event); + + let conflicted_paths = collect_conflicted_file_paths(project.read(cx), cx); + let last_shown_paths: HashSet = conflicted_paths.iter().cloned().collect(); + + Self { + project, + conflicted_paths, + last_shown_paths, + dismissed: false, + _subscription: subscription, + } + } + + fn on_git_store_event( + &mut self, + _git_store: Entity, + event: &GitStoreEvent, + cx: &mut Context, + ) { + let conflicts_changed = matches!( + event, + GitStoreEvent::ConflictsUpdated + | GitStoreEvent::RepositoryUpdated(_, RepositoryEvent::StatusesChanged, _) + ); + + let agent_settings = AgentSettings::get_global(cx); + if !agent_settings.enabled(cx) + || !agent_settings.show_merge_conflict_indicator + || !conflicts_changed + { + return; + } + + let project = self.project.read(cx); + if project.is_via_collab() { + return; + } + + let paths = collect_conflicted_file_paths(project, cx); + let current_paths_set: HashSet = paths.iter().cloned().collect(); + + if paths.is_empty() { + self.conflicted_paths.clear(); + self.last_shown_paths.clear(); + self.dismissed = false; + cx.notify(); + } else if self.last_shown_paths != current_paths_set { + self.last_shown_paths = current_paths_set; + self.conflicted_paths = paths; + self.dismissed = false; + cx.notify(); + } + } + + fn resolve_with_agent(&mut self, window: &mut Window, cx: &mut Context) { + window.dispatch_action( + Box::new(ResolveConflictedFilesWithAgent { + conflicted_file_paths: self.conflicted_paths.clone(), + }), + cx, + ); + self.dismissed = true; + cx.notify(); + } + + fn dismiss(&mut self, _: &ClickEvent, _window: &mut Window, cx: &mut Context) { + self.dismissed = true; + cx.notify(); + } +} + +impl Render for MergeConflictIndicator { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + let agent_settings = AgentSettings::get_global(cx); + if !agent_settings.enabled(cx) + || !agent_settings.show_merge_conflict_indicator + || self.conflicted_paths.is_empty() + || self.dismissed + { + return Empty.into_any_element(); + } + + let file_count = self.conflicted_paths.len(); + + let message: SharedString = format!( + "Resolve Merge Conflict{} with Agent", + if file_count == 1 { "" } else { "s" } + ) + .into(); + + let tooltip_label: SharedString = format!( + "Found {} {} across the codebase", + file_count, + if file_count == 1 { + "conflict" + } else { + "conflicts" + } + ) + .into(); + + let border_color = cx.theme().colors().text_accent.opacity(0.2); + + h_flex() + .h(rems_from_px(22.)) + .rounded_sm() + .border_1() + .border_color(border_color) + .child( + ButtonLike::new("update-button") + .child( + h_flex() + .h_full() + .gap_1() + .child( + Icon::new(IconName::GitMergeConflict) + .size(IconSize::Small) + .color(Color::Muted), + ) + .child(Label::new(message).size(LabelSize::Small)), + ) + .tooltip(move |_, cx| { + Tooltip::with_meta( + tooltip_label.clone(), + None, + "Click to Resolve with Agent", + cx, + ) + }) + .on_click(cx.listener(|this, _, window, cx| { + this.resolve_with_agent(window, cx); + })), + ) + .child( + div().border_l_1().border_color(border_color).child( + IconButton::new("dismiss-merge-conflicts", IconName::Close) + .icon_size(IconSize::XSmall) + .on_click(cx.listener(Self::dismiss)), + ), + ) + .into_any_element() + } +} + +impl StatusItemView for MergeConflictIndicator { + fn set_active_pane_item( + &mut self, + _: Option<&dyn ItemHandle>, + _window: &mut Window, + _: &mut Context, + ) { + } +} diff --git a/crates/git_ui/src/git_ui.rs b/crates/git_ui/src/git_ui.rs index e12e9142d081c5f083a1f9ba414d7099776f327d..7d73760e34d1b2923a247f71b04fc8b5218f380b 100644 --- a/crates/git_ui/src/git_ui.rs +++ b/crates/git_ui/src/git_ui.rs @@ -47,6 +47,8 @@ pub mod stash_picker; pub mod text_diff_view; pub mod worktree_picker; +pub use conflict_view::MergeConflictIndicator; + pub fn init(cx: &mut App) { editor::set_blame_renderer(blame_ui::GitBlameRenderer, cx); commit_view::init(cx); @@ -62,7 +64,6 @@ pub fn init(cx: &mut App) { git_panel::register(workspace); repository_selector::register(workspace); git_picker::register(workspace); - conflict_view::register_conflict_notification(workspace, cx); let project = workspace.project().read(cx); if project.is_read_only(cx) { diff --git a/crates/settings_content/src/agent.rs b/crates/settings_content/src/agent.rs index 7ec6a6b5bbdee57cbe75c13d1abe5277ac4f1825..5b1b3c014f8c538cb0dff506e05d84a80dc863d1 100644 --- a/crates/settings_content/src/agent.rs +++ b/crates/settings_content/src/agent.rs @@ -209,6 +209,11 @@ pub struct AgentSettingsContent { /// /// Default: false pub show_turn_stats: Option, + /// Whether to show the merge conflict indicator in the status bar + /// that offers to resolve conflicts using the agent. + /// + /// Default: true + pub show_merge_conflict_indicator: Option, /// Per-tool permission rules for granular control over which tool actions /// require confirmation. /// diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index 828a574115c4664b3ab2f37f32ad4087363b3978..bacfd227d83933d3ebd9b2d8836bbe19958acf2b 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -7516,6 +7516,24 @@ fn ai_page(cx: &App) -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Show Merge Conflict Indicator", + description: "Whether to show the merge conflict indicator in the status bar that offers to resolve conflicts using the agent.", + field: Box::new(SettingField { + json_path: Some("agent.show_merge_conflict_indicator"), + pick: |settings_content| { + settings_content.agent.as_ref()?.show_merge_conflict_indicator.as_ref() + }, + write: |settings_content, value| { + settings_content + .agent + .get_or_insert_default() + .show_merge_conflict_indicator = value; + }, + }), + metadata: None, + files: USER, + }), ]); items.into_boxed_slice() diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 10f8fa4e30178b5d9036ce4c59842944c3bcd501..6a5e9a3318e576054a9533c7ab92f86fc10e1a66 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -7693,11 +7693,6 @@ impl GlobalAnyActiveCall { } } -pub fn merge_conflict_notification_id() -> NotificationId { - struct MergeConflictNotification; - NotificationId::unique::() -} - /// Workspace-local view of a remote participant's location. #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ParticipantLocation { diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 9b81ccf0e1c183363bbb170d71b7b3a1a5526085..795fd12a6c73d9576095b6cd4a26cdd5577e6000 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -502,12 +502,15 @@ pub fn initialize_workspace(app_state: Arc, cx: &mut App) { cx.new(|_| go_to_line::cursor_position::CursorPosition::new(workspace)); let line_ending_indicator = cx.new(|_| line_ending_selector::LineEndingIndicator::default()); + let merge_conflict_indicator = + cx.new(|cx| git_ui::MergeConflictIndicator::new(workspace, cx)); workspace.status_bar().update(cx, |status_bar, cx| { status_bar.add_left_item(search_button, window, cx); status_bar.add_left_item(lsp_button, window, cx); status_bar.add_left_item(diagnostic_summary, window, cx); status_bar.add_left_item(active_file_name, window, cx); status_bar.add_left_item(activity_indicator, window, cx); + status_bar.add_left_item(merge_conflict_indicator, window, cx); status_bar.add_right_item(edit_prediction_ui, window, cx); status_bar.add_right_item(active_buffer_encoding, window, cx); status_bar.add_right_item(active_buffer_language, window, cx); From c9d799e5e686436e63a15f8b19939705f87f84d4 Mon Sep 17 00:00:00 2001 From: KyleBarton Date: Mon, 6 Apr 2026 09:01:51 -0700 Subject: [PATCH 55/63] Ensure updateUID gets run for docker-compose and plain images (#53106) Dev Containers should run a script which updates the remote UID of the image user, so that files are still accessible. This was being run incorrectly (on the Docker-compose side) or not at all (in the case of a plain dev container image). This change fixes this Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #53081 Release Notes: - Fixed dev container behavior for configs which use images without a dockerfile --- crates/dev_container/src/devcontainer_json.rs | 7 - .../src/devcontainer_manifest.rs | 277 ++++++++++++++++-- crates/dev_container/src/docker.rs | 15 + 3 files changed, 263 insertions(+), 36 deletions(-) diff --git a/crates/dev_container/src/devcontainer_json.rs b/crates/dev_container/src/devcontainer_json.rs index f034026a8de4c4a6c3186c97870e25f3510ebc85..de970674a4d4ae7b9b583b924addd433d8a03073 100644 --- a/crates/dev_container/src/devcontainer_json.rs +++ b/crates/dev_container/src/devcontainer_json.rs @@ -257,13 +257,6 @@ impl DevContainer { } return DevContainerBuildType::None; } - - pub(crate) fn has_features(&self) -> bool { - self.features - .as_ref() - .map(|features| !features.is_empty()) - .unwrap_or(false) - } } // Custom deserializer that parses the entire customizations object as a diff --git a/crates/dev_container/src/devcontainer_manifest.rs b/crates/dev_container/src/devcontainer_manifest.rs index 0ba7e8c82a036477103e18db0940f8950fb875d2..d28014bffff146ece8cc69f63753ecf5f82a33ea 100644 --- a/crates/dev_container/src/devcontainer_manifest.rs +++ b/crates/dev_container/src/devcontainer_manifest.rs @@ -317,13 +317,6 @@ impl DevContainerManifest { let root_image_tag = self.get_base_image_from_config().await?; let root_image = self.docker_client.inspect(&root_image_tag).await?; - if dev_container.build_type() == DevContainerBuildType::Image - && !dev_container.has_features() - { - log::debug!("No resources to download. Proceeding with just the image"); - return Ok(()); - } - let temp_base = std::env::temp_dir().join("devcontainer-zed"); let timestamp = std::time::SystemTime::now() .duration_since(std::time::UNIX_EPOCH) @@ -701,10 +694,29 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true } let dev_container = self.dev_container(); match dev_container.build_type() { - DevContainerBuildType::Image | DevContainerBuildType::Dockerfile => { + DevContainerBuildType::Image => { + let built_docker_image = self.build_docker_image().await?; + let Some(base_image) = dev_container.image.as_ref() else { + log::error!("Dev container is using and image which can't be referenced"); + return Err(DevContainerError::DevContainerParseFailed); + }; + let built_docker_image = self + .update_remote_user_uid(built_docker_image, base_image) + .await?; + + let resources = self.build_merged_resources(built_docker_image)?; + Ok(DevContainerBuildResources::Docker(resources)) + } + DevContainerBuildType::Dockerfile => { let built_docker_image = self.build_docker_image().await?; + let Some(features_build_info) = &self.features_build_info else { + log::error!( + "Can't attempt to build update UID dockerfile before initial docker build" + ); + return Err(DevContainerError::DevContainerParseFailed); + }; let built_docker_image = self - .update_remote_user_uid(built_docker_image, None) + .update_remote_user_uid(built_docker_image, &features_build_info.image_tag) .await?; let resources = self.build_merged_resources(built_docker_image)?; @@ -816,7 +828,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true let (main_service_name, main_service) = find_primary_service(&docker_compose_resources, self)?; - let built_service_image = if main_service + let (built_service_image, built_service_image_tag) = if main_service .build .as_ref() .map(|b| b.dockerfile.as_ref()) @@ -905,16 +917,19 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true self.docker_client .docker_compose_build(&docker_compose_resources.files, &self.project_name()) .await?; - self.docker_client - .inspect(&features_build_info.image_tag) - .await? + ( + self.docker_client + .inspect(&features_build_info.image_tag) + .await?, + &features_build_info.image_tag, + ) } else if let Some(image) = &main_service.image { if dev_container .features .as_ref() .is_none_or(|features| features.is_empty()) { - self.docker_client.inspect(image).await? + (self.docker_client.inspect(image).await?, image) } else { if !supports_buildkit { self.build_feature_content_image().await?; @@ -994,9 +1009,12 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true .docker_compose_build(&docker_compose_resources.files, &self.project_name()) .await?; - self.docker_client - .inspect(&features_build_info.image_tag) - .await? + ( + self.docker_client + .inspect(&features_build_info.image_tag) + .await?, + &features_build_info.image_tag, + ) } } else { log::error!("Docker compose must have either image or dockerfile defined"); @@ -1004,7 +1022,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true }; let built_service_image = self - .update_remote_user_uid(built_service_image, Some(&features_build_info.image_tag)) + .update_remote_user_uid(built_service_image, built_service_image_tag) .await?; let resources = self.build_merged_resources(built_service_image)?; @@ -1312,7 +1330,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true async fn update_remote_user_uid( &self, image: DockerInspect, - _override_tag: Option<&str>, + _base_image: &str, ) -> Result { Ok(image) } @@ -1320,7 +1338,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true async fn update_remote_user_uid( &self, image: DockerInspect, - override_tag: Option<&str>, + base_image: &str, ) -> Result { let dev_container = self.dev_container(); @@ -1394,18 +1412,13 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true DevContainerError::FilesystemError })?; - let updated_image_tag = override_tag - .map(|t| t.to_string()) - .unwrap_or_else(|| format!("{}-uid", features_build_info.image_tag)); + let updated_image_tag = format!("{}-uid", features_build_info.image_tag); let mut command = Command::new(self.docker_client.docker_cli()); command.args(["build"]); command.args(["-f", &dockerfile_path.display().to_string()]); command.args(["-t", &updated_image_tag]); - command.args([ - "--build-arg", - &format!("BASE_IMAGE={}", features_build_info.image_tag), - ]); + command.args(["--build-arg", &format!("BASE_IMAGE={}", base_image)]); command.args(["--build-arg", &format!("REMOTE_USER={}", remote_user)]); command.args(["--build-arg", &format!("NEW_UID={}", host_uid)]); command.args(["--build-arg", &format!("NEW_GID={}", host_gid)]); @@ -2384,6 +2397,8 @@ mod test { use serde_json_lenient::Value; use util::{command::Command, paths::SanitizedPath}; + #[cfg(not(target_os = "windows"))] + use crate::docker::DockerComposeServicePort; use crate::{ DevContainerConfig, DevContainerContext, command_json::CommandRunner, @@ -3311,8 +3326,6 @@ chmod +x ./install.sh #[cfg(not(target_os = "windows"))] #[gpui::test] async fn test_spawns_devcontainer_with_docker_compose(cx: &mut TestAppContext) { - use crate::docker::DockerComposeServicePort; - cx.executor().allow_parking(); env_logger::try_init().ok(); let given_devcontainer_contents = r#" @@ -4296,6 +4309,175 @@ chmod +x ./install.sh })) } + #[cfg(not(target_os = "windows"))] + #[gpui::test] + async fn test_spawns_devcontainer_with_plain_image(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + env_logger::try_init().ok(); + let given_devcontainer_contents = r#" + { + "name": "cli-${devcontainerId}", + "image": "test_image:latest", + } + "#; + + let (test_dependencies, mut devcontainer_manifest) = + init_default_devcontainer_manifest(cx, given_devcontainer_contents) + .await + .unwrap(); + + devcontainer_manifest.parse_nonremote_vars().unwrap(); + + let _devcontainer_up = devcontainer_manifest.build_and_run().await.unwrap(); + + let files = test_dependencies.fs.files(); + let uid_dockerfile = files + .iter() + .find(|f| { + f.file_name() + .is_some_and(|s| s.display().to_string() == "updateUID.Dockerfile") + }) + .expect("to be found"); + let uid_dockerfile = test_dependencies.fs.load(uid_dockerfile).await.unwrap(); + + assert_eq!( + &uid_dockerfile, + r#"ARG BASE_IMAGE +FROM $BASE_IMAGE + +USER root + +ARG REMOTE_USER +ARG NEW_UID +ARG NEW_GID +SHELL ["/bin/sh", "-c"] +RUN eval $(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/OLD_UID=\1;OLD_GID=\2;HOME_FOLDER=\3/p" /etc/passwd); \ + eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/EXISTING_USER=\1/p" /etc/passwd); \ + eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/EXISTING_GROUP=\1/p" /etc/group); \ + if [ -z "$OLD_UID" ]; then \ + echo "Remote user not found in /etc/passwd ($REMOTE_USER)."; \ + elif [ "$OLD_UID" = "$NEW_UID" -a "$OLD_GID" = "$NEW_GID" ]; then \ + echo "UIDs and GIDs are the same ($NEW_UID:$NEW_GID)."; \ + elif [ "$OLD_UID" != "$NEW_UID" -a -n "$EXISTING_USER" ]; then \ + echo "User with UID exists ($EXISTING_USER=$NEW_UID)."; \ + else \ + if [ "$OLD_GID" != "$NEW_GID" -a -n "$EXISTING_GROUP" ]; then \ + FREE_GID=65532; \ + while grep -q ":[^:]*:${FREE_GID}:" /etc/group; do FREE_GID=$((FREE_GID - 1)); done; \ + echo "Reassigning group $EXISTING_GROUP from GID $NEW_GID to $FREE_GID."; \ + sed -i -e "s/\(${EXISTING_GROUP}:[^:]*:\)${NEW_GID}:/\1${FREE_GID}:/" /etc/group; \ + fi; \ + echo "Updating UID:GID from $OLD_UID:$OLD_GID to $NEW_UID:$NEW_GID."; \ + sed -i -e "s/\(${REMOTE_USER}:[^:]*:\)[^:]*:[^:]*/\1${NEW_UID}:${NEW_GID}/" /etc/passwd; \ + if [ "$OLD_GID" != "$NEW_GID" ]; then \ + sed -i -e "s/\([^:]*:[^:]*:\)${OLD_GID}:/\1${NEW_GID}:/" /etc/group; \ + fi; \ + chown -R $NEW_UID:$NEW_GID $HOME_FOLDER; \ + fi; + +ARG IMAGE_USER +USER $IMAGE_USER + +# Ensure that /etc/profile does not clobber the existing path +RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true +"# + ); + } + + #[cfg(not(target_os = "windows"))] + #[gpui::test] + async fn test_spawns_devcontainer_with_docker_compose_and_plain_image(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + env_logger::try_init().ok(); + let given_devcontainer_contents = r#" + { + "name": "cli-${devcontainerId}", + "dockerComposeFile": "docker-compose-plain.yml", + "service": "app", + } + "#; + + let (test_dependencies, mut devcontainer_manifest) = + init_default_devcontainer_manifest(cx, given_devcontainer_contents) + .await + .unwrap(); + + test_dependencies + .fs + .atomic_write( + PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/docker-compose-plain.yml"), + r#" +services: + app: + image: test_image:latest + command: sleep infinity + volumes: + - ..:/workspace:cached + "# + .trim() + .to_string(), + ) + .await + .unwrap(); + + devcontainer_manifest.parse_nonremote_vars().unwrap(); + + let _devcontainer_up = devcontainer_manifest.build_and_run().await.unwrap(); + + let files = test_dependencies.fs.files(); + let uid_dockerfile = files + .iter() + .find(|f| { + f.file_name() + .is_some_and(|s| s.display().to_string() == "updateUID.Dockerfile") + }) + .expect("to be found"); + let uid_dockerfile = test_dependencies.fs.load(uid_dockerfile).await.unwrap(); + + assert_eq!( + &uid_dockerfile, + r#"ARG BASE_IMAGE +FROM $BASE_IMAGE + +USER root + +ARG REMOTE_USER +ARG NEW_UID +ARG NEW_GID +SHELL ["/bin/sh", "-c"] +RUN eval $(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/OLD_UID=\1;OLD_GID=\2;HOME_FOLDER=\3/p" /etc/passwd); \ + eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/EXISTING_USER=\1/p" /etc/passwd); \ + eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/EXISTING_GROUP=\1/p" /etc/group); \ + if [ -z "$OLD_UID" ]; then \ + echo "Remote user not found in /etc/passwd ($REMOTE_USER)."; \ + elif [ "$OLD_UID" = "$NEW_UID" -a "$OLD_GID" = "$NEW_GID" ]; then \ + echo "UIDs and GIDs are the same ($NEW_UID:$NEW_GID)."; \ + elif [ "$OLD_UID" != "$NEW_UID" -a -n "$EXISTING_USER" ]; then \ + echo "User with UID exists ($EXISTING_USER=$NEW_UID)."; \ + else \ + if [ "$OLD_GID" != "$NEW_GID" -a -n "$EXISTING_GROUP" ]; then \ + FREE_GID=65532; \ + while grep -q ":[^:]*:${FREE_GID}:" /etc/group; do FREE_GID=$((FREE_GID - 1)); done; \ + echo "Reassigning group $EXISTING_GROUP from GID $NEW_GID to $FREE_GID."; \ + sed -i -e "s/\(${EXISTING_GROUP}:[^:]*:\)${NEW_GID}:/\1${FREE_GID}:/" /etc/group; \ + fi; \ + echo "Updating UID:GID from $OLD_UID:$OLD_GID to $NEW_UID:$NEW_GID."; \ + sed -i -e "s/\(${REMOTE_USER}:[^:]*:\)[^:]*:[^:]*/\1${NEW_UID}:${NEW_GID}/" /etc/passwd; \ + if [ "$OLD_GID" != "$NEW_GID" ]; then \ + sed -i -e "s/\([^:]*:[^:]*:\)${OLD_GID}:/\1${NEW_GID}:/" /etc/group; \ + fi; \ + chown -R $NEW_UID:$NEW_GID $HOME_FOLDER; \ + fi; + +ARG IMAGE_USER +USER $IMAGE_USER + +# Ensure that /etc/profile does not clobber the existing path +RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true +"# + ); + } + pub(crate) struct RecordedExecCommand { pub(crate) _container_id: String, pub(crate) _remote_folder: String, @@ -4418,6 +4600,24 @@ chmod +x ./install.sh state: None, }); } + if id == "test_image:latest" { + return Ok(DockerInspect { + id: "sha256:610e6cfca95280188b021774f8cf69dd6f49bdb6eebc34c5ee2010f4d51cc104" + .to_string(), + config: DockerInspectConfig { + labels: DockerConfigLabels { + metadata: Some(vec![HashMap::from([( + "remoteUser".to_string(), + Value::String("node".to_string()), + )])]), + }, + env: Vec::new(), + image_user: Some("root".to_string()), + }, + mounts: None, + state: None, + }); + } Err(DevContainerError::DockerNotAvailable) } @@ -4472,6 +4672,25 @@ chmod +x ./install.sh )]), })); } + if config_files.len() == 1 + && config_files.get(0) + == Some(&PathBuf::from( + "/path/to/local/project/.devcontainer/docker-compose-plain.yml", + )) + { + return Ok(Some(DockerComposeConfig { + name: None, + services: HashMap::from([( + "app".to_string(), + DockerComposeService { + image: Some("test_image:latest".to_string()), + command: vec!["sleep".to_string(), "infinity".to_string()], + ..Default::default() + }, + )]), + ..Default::default() + })); + } Err(DevContainerError::DockerNotAvailable) } async fn docker_compose_build( diff --git a/crates/dev_container/src/docker.rs b/crates/dev_container/src/docker.rs index 9320ec360968425cf85644e96b12c1d089c1f05f..e0eecd1da0e2b1749ffcf60fa67cdbef273fda12 100644 --- a/crates/dev_container/src/docker.rs +++ b/crates/dev_container/src/docker.rs @@ -149,6 +149,12 @@ pub(crate) struct DockerComposeService { pub(crate) ports: Vec, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) network_mode: Option, + #[serde( + default, + skip_serializing_if = "Vec::is_empty", + deserialize_with = "deserialize_nullable_vec" + )] + pub(crate) command: Vec, } #[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)] @@ -459,6 +465,14 @@ where deserializer.deserialize_any(LabelsVisitor) } +fn deserialize_nullable_vec<'de, D, T>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, + T: Deserialize<'de>, +{ + Option::>::deserialize(deserializer).map(|opt| opt.unwrap_or_default()) +} + fn deserialize_nullable_labels<'de, D>(deserializer: D) -> Result where D: Deserializer<'de>, @@ -987,6 +1001,7 @@ mod test { ( "app".to_string(), DockerComposeService { + command: vec!["sleep".to_string(), "infinity".to_string()], image: Some( "mcr.microsoft.com/devcontainers/rust:2-1-bookworm".to_string(), ), From fd4d8444cfe6afc1476780020bf43a985e2fb321 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ahmet=20Kaan=20G=C3=BCm=C3=BC=C5=9F?= Date: Mon, 6 Apr 2026 19:42:20 +0300 Subject: [PATCH 56/63] markdown_preview: Add search support to markdown preview (#52502) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Context The markdown preview had no search functionality — pressing Ctrl+F did nothing. This PR implements the SearchableItem trait for MarkdownPreviewView, enabling in-pane text search with match highlighting and navigation. Changes span four crates: - project: Added SearchQuery::search_str() — a synchronous method to search plain &str text, since the existing search() only works on BufferSnapshot. - markdown: Added search highlight storage to the Markdown entity and paint_search_highlights to MarkdownElement. Extracted the existing selection painting into a reusable paint_highlight_range helper to avoid duplicating quad-painting logic. - markdown_preview: Implemented SearchableItem with full match navigation, active match tracking, and proper SearchEvent emission matching Editor behavior. - Keymaps: Added buffer_search::Deploy bindings to the MarkdownPreview context on all three platforms. The PR hopefully Closes https://github.com/zed-industries/zed/issues/27154 How to Review 1. crates/project/src/search.rs — search_str method at the end of impl SearchQuery. Handles both Text (AhoCorasick) and Regex variants with whole-word and multiline support. 2. crates/markdown/src/markdown.rs — Three areas: - New fields and methods on Markdown struct (~line 264, 512-548) - paint_highlight_range extraction and paint_search_highlights (~line 1059-1170) - The single-line addition in Element::paint (~line 2003) 3. crates/markdown_preview/src/markdown_preview_view.rs — The main change. Focus on: - SearchEvent::MatchesInvalidated emission in schedule_markdown_update (line 384) - EventEmitter and as_searchable (lines 723, 748-754) - The SearchableItem impl (lines 779-927), especially active_match_index which computes position from old highlights to handle query changes correctly 4. Keymap files — Two lines each for Linux/Windows, one for macOS. Self-Review Checklist - [ x ] I've reviewed my own diff for quality, security, and reliability - [ x ] Unsafe blocks (if any) have justifying comments (no unsafe) - [ x ] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) (should be :smile: ) - [ - ] Tests cover the new/changed behavior (not sure) - [ - ] Performance impact has been considered and is acceptable (I'm not sure about it and it would be nice to see experienced people to test) Release Notes: - Added search support (Ctrl+F / Cmd+F) to the markdown preview --------- Co-authored-by: Conrad Irwin --- Cargo.lock | 1 + assets/keymaps/default-linux.json | 2 + assets/keymaps/default-macos.json | 1 + assets/keymaps/default-windows.json | 2 + assets/keymaps/vim.json | 1 + crates/debugger_tools/src/dap_log.rs | 1 + crates/editor/src/items.rs | 2 + crates/language_tools/src/lsp_log_view.rs | 1 + crates/markdown/src/markdown.rs | 109 ++++++++++-- crates/markdown_preview/Cargo.toml | 1 + .../src/markdown_preview_view.rs | 155 +++++++++++++++++- crates/project/src/search.rs | 52 ++++++ crates/search/src/buffer_search.rs | 19 ++- crates/terminal_view/src/terminal_view.rs | 1 + crates/workspace/src/searchable.rs | 2 + 15 files changed, 330 insertions(+), 20 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d091e026ff3a6e0c27b477b26454b3ca47ae947b..279fcec10f1efb4c3174bfdd8e28192cda2f6a0c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10159,6 +10159,7 @@ dependencies = [ "language", "log", "markdown", + "project", "settings", "tempfile", "theme_settings", diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 4930fbea84b2b449f3b5c35fee2a390525cb3551..0beabfcbc555a336ad75424fb4079e5d4a867b89 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -1275,6 +1275,8 @@ "alt-down": "markdown::ScrollDownByItem", "ctrl-home": "markdown::ScrollToTop", "ctrl-end": "markdown::ScrollToBottom", + "find": "buffer_search::Deploy", + "ctrl-f": "buffer_search::Deploy", }, }, { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 85c01bb33b54c30a55b5d046d03eb391d8c058c1..c514a8fbfc71f7b2b62e017b940790a39cf59db7 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -1375,6 +1375,7 @@ "alt-down": "markdown::ScrollDownByItem", "cmd-up": "markdown::ScrollToTop", "cmd-down": "markdown::ScrollToBottom", + "cmd-f": "buffer_search::Deploy", }, }, { diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 0705717062ab5015de20cc3b93f651f867b5116d..a9eb3933423ff60fe60ac391b12773ce7146fb0d 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -1300,6 +1300,8 @@ "alt-down": "markdown::ScrollDownByItem", "ctrl-home": "markdown::ScrollToTop", "ctrl-end": "markdown::ScrollToBottom", + "find": "buffer_search::Deploy", + "ctrl-f": "buffer_search::Deploy", }, }, { diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 1a7e7bf77248b6f863d4a6dbc1e268b4c5ae3576..220b44ff537ffa791b23c0c5b7d86b6768d74dc2 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -1096,6 +1096,7 @@ "ctrl-e": "markdown::ScrollDown", "g g": "markdown::ScrollToTop", "shift-g": "markdown::ScrollToBottom", + "/": "buffer_search::Deploy", }, }, { diff --git a/crates/debugger_tools/src/dap_log.rs b/crates/debugger_tools/src/dap_log.rs index 6a6ac706ecd7e4e3e7369afe503652b9756b6dec..2c653217716b0218cff0b60eb2bce4ac1ce02e5d 100644 --- a/crates/debugger_tools/src/dap_log.rs +++ b/crates/debugger_tools/src/dap_log.rs @@ -1086,6 +1086,7 @@ impl SearchableItem for DapLogView { // DAP log is read-only. replacement: false, selection: false, + select_all: true, } } fn active_match_index( diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 28e920c28bd9854a38a5019622248fa79cd0a8e1..d2c157014330cc26f0024ace87ee0e3688f85eaa 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -1630,6 +1630,7 @@ impl SearchableItem for Editor { regex: true, replacement: false, selection: false, + select_all: true, find_in_results: true, } } else { @@ -1639,6 +1640,7 @@ impl SearchableItem for Editor { regex: true, replacement: true, selection: true, + select_all: true, find_in_results: false, } } diff --git a/crates/language_tools/src/lsp_log_view.rs b/crates/language_tools/src/lsp_log_view.rs index ff1ec56b41ccf12ce6e497c21439aea5c97c3d39..97f0676d250cac2cee54b307e7c07d894d3d3128 100644 --- a/crates/language_tools/src/lsp_log_view.rs +++ b/crates/language_tools/src/lsp_log_view.rs @@ -880,6 +880,7 @@ impl SearchableItem for LspLogView { // LSP log is read-only. replacement: false, selection: false, + select_all: true, } } fn active_match_index( diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index 871cf5848d9348f2301363b16c30a4811cf5c24e..247c082d223005a7e0bd6d57696751ce76cc4d86 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -263,6 +263,8 @@ pub struct Markdown { copied_code_blocks: HashSet, code_block_scroll_handles: BTreeMap, context_menu_selected_text: Option, + search_highlights: Vec>, + active_search_highlight: Option, } #[derive(Clone, Copy, Default)] @@ -430,6 +432,8 @@ impl Markdown { copied_code_blocks: HashSet::default(), code_block_scroll_handles: BTreeMap::default(), context_menu_selected_text: None, + search_highlights: Vec::new(), + active_search_highlight: None, }; this.parse(cx); this @@ -541,6 +545,8 @@ impl Markdown { self.autoscroll_request = None; self.pending_parse = None; self.should_reparse = false; + self.search_highlights.clear(); + self.active_search_highlight = None; // Don't clear parsed_markdown here - keep existing content visible until new parse completes self.parse(cx); } @@ -576,6 +582,40 @@ impl Markdown { } } + pub fn set_search_highlights( + &mut self, + highlights: Vec>, + active: Option, + cx: &mut Context, + ) { + self.search_highlights = highlights; + self.active_search_highlight = active; + cx.notify(); + } + + pub fn clear_search_highlights(&mut self, cx: &mut Context) { + if !self.search_highlights.is_empty() || self.active_search_highlight.is_some() { + self.search_highlights.clear(); + self.active_search_highlight = None; + cx.notify(); + } + } + + pub fn set_active_search_highlight(&mut self, active: Option, cx: &mut Context) { + if self.active_search_highlight != active { + self.active_search_highlight = active; + cx.notify(); + } + } + + pub fn search_highlights(&self) -> &[Range] { + &self.search_highlights + } + + pub fn active_search_highlight(&self) -> Option { + self.active_search_highlight + } + fn copy(&self, text: &RenderedText, _: &mut Window, cx: &mut Context) { if self.selection.end <= self.selection.start { return; @@ -1084,18 +1124,18 @@ impl MarkdownElement { builder.pop_div(); } - fn paint_selection( - &self, + fn paint_highlight_range( bounds: Bounds, + start: usize, + end: usize, + color: Hsla, rendered_text: &RenderedText, window: &mut Window, - cx: &mut App, ) { - let selection = self.markdown.read(cx).selection.clone(); - let selection_start = rendered_text.position_for_source_index(selection.start); - let selection_end = rendered_text.position_for_source_index(selection.end); + let start_pos = rendered_text.position_for_source_index(start); + let end_pos = rendered_text.position_for_source_index(end); if let Some(((start_position, start_line_height), (end_position, end_line_height))) = - selection_start.zip(selection_end) + start_pos.zip(end_pos) { if start_position.y == end_position.y { window.paint_quad(quad( @@ -1104,7 +1144,7 @@ impl MarkdownElement { point(end_position.x, end_position.y + end_line_height), ), Pixels::ZERO, - self.style.selection_background_color, + color, Edges::default(), Hsla::transparent_black(), BorderStyle::default(), @@ -1116,7 +1156,7 @@ impl MarkdownElement { point(bounds.right(), start_position.y + start_line_height), ), Pixels::ZERO, - self.style.selection_background_color, + color, Edges::default(), Hsla::transparent_black(), BorderStyle::default(), @@ -1129,7 +1169,7 @@ impl MarkdownElement { point(bounds.right(), end_position.y), ), Pixels::ZERO, - self.style.selection_background_color, + color, Edges::default(), Hsla::transparent_black(), BorderStyle::default(), @@ -1142,7 +1182,7 @@ impl MarkdownElement { point(end_position.x, end_position.y + end_line_height), ), Pixels::ZERO, - self.style.selection_background_color, + color, Edges::default(), Hsla::transparent_black(), BorderStyle::default(), @@ -1151,6 +1191,52 @@ impl MarkdownElement { } } + fn paint_selection( + &self, + bounds: Bounds, + rendered_text: &RenderedText, + window: &mut Window, + cx: &mut App, + ) { + let selection = self.markdown.read(cx).selection.clone(); + Self::paint_highlight_range( + bounds, + selection.start, + selection.end, + self.style.selection_background_color, + rendered_text, + window, + ); + } + + fn paint_search_highlights( + &self, + bounds: Bounds, + rendered_text: &RenderedText, + window: &mut Window, + cx: &mut App, + ) { + let markdown = self.markdown.read(cx); + let active_index = markdown.active_search_highlight; + let colors = cx.theme().colors(); + + for (i, highlight_range) in markdown.search_highlights.iter().enumerate() { + let color = if Some(i) == active_index { + colors.search_active_match_background + } else { + colors.search_match_background + }; + Self::paint_highlight_range( + bounds, + highlight_range.start, + highlight_range.end, + color, + rendered_text, + window, + ); + } + } + fn paint_mouse_listeners( &mut self, hitbox: &Hitbox, @@ -1955,6 +2041,7 @@ impl Element for MarkdownElement { self.paint_mouse_listeners(hitbox, &rendered_markdown.text, window, cx); rendered_markdown.element.paint(window, cx); + self.paint_search_highlights(bounds, &rendered_markdown.text, window, cx); self.paint_selection(bounds, &rendered_markdown.text, window, cx); } } diff --git a/crates/markdown_preview/Cargo.toml b/crates/markdown_preview/Cargo.toml index 19f1270bb91e8a7e9e660a62d8191a9d12b66641..3a07b258c5bd17ef2da02820ef2e724f7389ce13 100644 --- a/crates/markdown_preview/Cargo.toml +++ b/crates/markdown_preview/Cargo.toml @@ -21,6 +21,7 @@ gpui.workspace = true language.workspace = true log.workspace = true markdown.workspace = true +project.workspace = true settings.workspace = true theme_settings.workspace = true ui.workspace = true diff --git a/crates/markdown_preview/src/markdown_preview_view.rs b/crates/markdown_preview/src/markdown_preview_view.rs index f978fdfcce13808b58cd1d7467379c44b95e7433..3e6423b36603e247ba5da2a2166a8357701fa5cd 100644 --- a/crates/markdown_preview/src/markdown_preview_view.rs +++ b/crates/markdown_preview/src/markdown_preview_view.rs @@ -1,4 +1,5 @@ use std::cmp::min; +use std::ops::Range; use std::path::{Path, PathBuf}; use std::sync::Arc; use std::time::Duration; @@ -16,11 +17,15 @@ use markdown::{ CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownFont, MarkdownOptions, MarkdownStyle, }; +use project::search::SearchQuery; use settings::Settings; use theme_settings::ThemeSettings; use ui::{WithScrollbar, prelude::*}; use util::normalize_path; -use workspace::item::{Item, ItemHandle}; +use workspace::item::{Item, ItemBufferKind, ItemHandle}; +use workspace::searchable::{ + Direction, SearchEvent, SearchOptions, SearchToken, SearchableItem, SearchableItemHandle, +}; use workspace::{OpenOptions, OpenVisible, Pane, Workspace}; use crate::{ @@ -382,6 +387,7 @@ impl MarkdownPreviewView { markdown.reset(contents, cx); }); view.sync_preview_to_source_index(selection_start, should_reveal_selection, cx); + cx.emit(SearchEvent::MatchesInvalidated); } view.pending_update_task = None; cx.notify(); @@ -751,6 +757,7 @@ impl Focusable for MarkdownPreviewView { } impl EventEmitter<()> for MarkdownPreviewView {} +impl EventEmitter for MarkdownPreviewView {} impl Item for MarkdownPreviewView { type Event = (); @@ -775,6 +782,18 @@ impl Item for MarkdownPreviewView { } fn to_item_events(_event: &Self::Event, _f: &mut dyn FnMut(workspace::item::ItemEvent)) {} + + fn buffer_kind(&self, _cx: &App) -> ItemBufferKind { + ItemBufferKind::Singleton + } + + fn as_searchable( + &self, + handle: &Entity, + _: &App, + ) -> Option> { + Some(Box::new(handle.clone())) + } } impl Render for MarkdownPreviewView { @@ -807,6 +826,140 @@ impl Render for MarkdownPreviewView { } } +impl SearchableItem for MarkdownPreviewView { + type Match = Range; + + fn supported_options(&self) -> SearchOptions { + SearchOptions { + case: true, + word: true, + regex: true, + replacement: false, + selection: false, + select_all: false, + find_in_results: false, + } + } + + fn get_matches(&self, _window: &mut Window, cx: &mut App) -> (Vec, SearchToken) { + ( + self.markdown.read(cx).search_highlights().to_vec(), + SearchToken::default(), + ) + } + + fn clear_matches(&mut self, _window: &mut Window, cx: &mut Context) { + let had_highlights = !self.markdown.read(cx).search_highlights().is_empty(); + self.markdown.update(cx, |markdown, cx| { + markdown.clear_search_highlights(cx); + }); + if had_highlights { + cx.emit(SearchEvent::MatchesInvalidated); + } + } + + fn update_matches( + &mut self, + matches: &[Self::Match], + active_match_index: Option, + _token: SearchToken, + _window: &mut Window, + cx: &mut Context, + ) { + let old_highlights = self.markdown.read(cx).search_highlights(); + let changed = old_highlights != matches; + self.markdown.update(cx, |markdown, cx| { + markdown.set_search_highlights(matches.to_vec(), active_match_index, cx); + }); + if changed { + cx.emit(SearchEvent::MatchesInvalidated); + } + } + + fn query_suggestion(&mut self, _window: &mut Window, cx: &mut Context) -> String { + self.markdown.read(cx).selected_text().unwrap_or_default() + } + + fn activate_match( + &mut self, + index: usize, + matches: &[Self::Match], + _token: SearchToken, + _window: &mut Window, + cx: &mut Context, + ) { + if let Some(match_range) = matches.get(index) { + let start = match_range.start; + self.markdown.update(cx, |markdown, cx| { + markdown.set_active_search_highlight(Some(index), cx); + markdown.request_autoscroll_to_source_index(start, cx); + }); + cx.emit(SearchEvent::ActiveMatchChanged); + } + } + + fn select_matches( + &mut self, + _matches: &[Self::Match], + _token: SearchToken, + _window: &mut Window, + _cx: &mut Context, + ) { + } + + fn replace( + &mut self, + _: &Self::Match, + _: &SearchQuery, + _token: SearchToken, + _window: &mut Window, + _: &mut Context, + ) { + } + + fn find_matches( + &mut self, + query: Arc, + _window: &mut Window, + cx: &mut Context, + ) -> Task> { + let source = self.markdown.read(cx).source().to_string(); + cx.background_spawn(async move { query.search_str(&source) }) + } + + fn active_match_index( + &mut self, + direction: Direction, + matches: &[Self::Match], + _token: SearchToken, + _window: &mut Window, + cx: &mut Context, + ) -> Option { + if matches.is_empty() { + return None; + } + + let markdown = self.markdown.read(cx); + let current_source_index = markdown + .active_search_highlight() + .and_then(|i| markdown.search_highlights().get(i)) + .map(|m| m.start) + .or(self.active_source_index) + .unwrap_or(0); + + match direction { + Direction::Next => matches + .iter() + .position(|m| m.start >= current_source_index) + .or(Some(0)), + Direction::Prev => matches + .iter() + .rposition(|m| m.start <= current_source_index) + .or(Some(matches.len().saturating_sub(1))), + } + } +} + #[cfg(test)] mod tests { use crate::markdown_preview_view::ImageSource; diff --git a/crates/project/src/search.rs b/crates/project/src/search.rs index 3a554eb3da1557849e18846b09a7787ab939f46d..cd4702d04863c2fc3026700b2d6653e1db24dbff 100644 --- a/crates/project/src/search.rs +++ b/crates/project/src/search.rs @@ -620,4 +620,56 @@ impl SearchQuery { Self::Text { .. } => None, } } + + pub fn search_str(&self, text: &str) -> Vec> { + if self.as_str().is_empty() { + return Vec::new(); + } + + let is_word_char = |c: char| c.is_alphanumeric() || c == '_'; + + let mut matches = Vec::new(); + match self { + Self::Text { + search, whole_word, .. + } => { + for mat in search.find_iter(text.as_bytes()) { + if *whole_word { + let prev_char = text[..mat.start()].chars().last(); + let next_char = text[mat.end()..].chars().next(); + if prev_char.is_some_and(&is_word_char) + || next_char.is_some_and(&is_word_char) + { + continue; + } + } + matches.push(mat.start()..mat.end()); + } + } + Self::Regex { + regex, + multiline, + one_match_per_line, + .. + } => { + if *multiline { + for mat in regex.find_iter(text).flatten() { + matches.push(mat.start()..mat.end()); + } + } else { + let mut line_offset = 0; + for line in text.split('\n') { + for mat in regex.find_iter(line).flatten() { + matches.push((line_offset + mat.start())..(line_offset + mat.end())); + if *one_match_per_line { + break; + } + } + line_offset += line.len() + 1; + } + } + } + } + matches + } } diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 1328805b50fe077e36d38b3290cb7936f24301f2..46177c5642a8d05daaf22e9fb24b205cd10ca42b 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -291,6 +291,7 @@ impl Render for BufferSearchBar { regex, replacement, selection, + select_all, find_in_results, } = self.supported_options(cx); @@ -461,14 +462,16 @@ impl Render for BufferSearchBar { )) }); - el.child(render_action_button( - "buffer-search-nav-button", - IconName::SelectAll, - Default::default(), - "Select All Matches", - &SelectAllMatches, - query_focus, - )) + el.when(select_all, |el| { + el.child(render_action_button( + "buffer-search-nav-button", + IconName::SelectAll, + Default::default(), + "Select All Matches", + &SelectAllMatches, + query_focus.clone(), + )) + }) .child(matches_column) }) .when(find_in_results, |el| { diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index 0c9bbcbec32dcd0fbb8240d524b83f461ac778c3..3ecc6c844db834da91e2f24c3f0cf2d460b5f246 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -1820,6 +1820,7 @@ impl SearchableItem for TerminalView { regex: true, replacement: false, selection: false, + select_all: false, find_in_results: false, } } diff --git a/crates/workspace/src/searchable.rs b/crates/workspace/src/searchable.rs index 93d809d7a522d11e4b4bd78e71899b89aa4d0508..f0932a7d7b3e7880c27b40c28890f063f4de731e 100644 --- a/crates/workspace/src/searchable.rs +++ b/crates/workspace/src/searchable.rs @@ -55,6 +55,7 @@ pub struct SearchOptions { /// Specifies whether the supports search & replace. pub replacement: bool, pub selection: bool, + pub select_all: bool, pub find_in_results: bool, } @@ -78,6 +79,7 @@ pub trait SearchableItem: Item + EventEmitter { regex: true, replacement: true, selection: true, + select_all: true, find_in_results: false, } } From 810822b5cd811363dd40ec16609c8cfc7aeec95e Mon Sep 17 00:00:00 2001 From: Steven Date: Mon, 6 Apr 2026 09:46:00 -0700 Subject: [PATCH 57/63] Use multibuffer to fix symbol search when diff is present (#52268) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Context Fixes a bug where project symbol search navigates to the wrong position when diff hunks are expanded. The cursor would land offset by the number of lines added by the expanded diffs (Closes #51331). Now, users navigating to symbols via project symbol search will land on the correct position even when diff hunks are expanded in the editor. The fix converts the buffer position to a `multi_buffer::Anchor` before passing it to `select_ranges`, so it resolves correctly through the diff transform layer instead of being interpreted as a literal MultiBuffer coordinate. Previously, the symbol's position was passed as a raw coordinate to the editor, which interpreted it relative to what's displayed on screen (including expanded diff lines). The fix converts the position to an anchor, which is tied to the actual content in the file rather than a screen position. ## How to Review - All changes are in `crates/project_symbols/src/project_symbols.rs`. Most of the changes are in `confirm()` method (Lines 142-154). - There's also one change on the first line of the file. ## Self-Review Checklist - [x] I've reviewed my own diff for quality, security, and reliability - [ ] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Fixed a bug where project symbols did not take you to the correct location when diffs are expanded. --- crates/project_symbols/src/project_symbols.rs | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/crates/project_symbols/src/project_symbols.rs b/crates/project_symbols/src/project_symbols.rs index 84b92f3eaa4f0216b881526b3aac42f8980ffe78..351b6e7afb59ef9b7ffd545d36b0e3dd66c6e834 100644 --- a/crates/project_symbols/src/project_symbols.rs +++ b/crates/project_symbols/src/project_symbols.rs @@ -1,4 +1,6 @@ -use editor::{Bias, Editor, SelectionEffects, scroll::Autoscroll, styled_runs_for_code_label}; +use editor::{ + Anchor, Bias, Editor, SelectionEffects, scroll::Autoscroll, styled_runs_for_code_label, +}; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ App, Context, DismissEvent, Entity, HighlightStyle, ParentElement, StyledText, Task, TextStyle, @@ -140,11 +142,19 @@ impl PickerDelegate for ProjectSymbolsDelegate { ); editor.update(cx, |editor, cx| { + let multibuffer_snapshot = editor.buffer().read(cx).snapshot(cx); + let Some((excerpt_id, _, buffer_snapshot)) = + multibuffer_snapshot.as_singleton() + else { + return; + }; + let text_anchor = buffer_snapshot.anchor_before(position); + let anchor = Anchor::in_buffer(excerpt_id, text_anchor); editor.change_selections( SelectionEffects::scroll(Autoscroll::center()), window, cx, - |s| s.select_ranges([position..position]), + |s| s.select_ranges([anchor..anchor]), ); }); })?; From 9eab76d0a08c735c8f952425a8257919fb15bc67 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 6 Apr 2026 13:47:54 -0300 Subject: [PATCH 58/63] sidebar: Adjust "Add Local Project" button behavior (#53248) This PR makes it so using that button from the sidebar's recent projects picker _does not_ add a new window with that project, but rather, add it to the current multi-workspace/sidebar. Previously, the `Open` action was defaulting to true even if `false` was passed to its `create_new_window` condition. Release Notes: - N/A --- .../src/sidebar_recent_projects.rs | 19 +++++++----- crates/workspace/src/workspace.rs | 29 ++++++++++++++++--- 2 files changed, 37 insertions(+), 11 deletions(-) diff --git a/crates/recent_projects/src/sidebar_recent_projects.rs b/crates/recent_projects/src/sidebar_recent_projects.rs index cda4eb8d5595c7572292bb1b0b4fbc10c8e30ae5..1fe0d2ae86aefdad45136c496f8049689d77e048 100644 --- a/crates/recent_projects/src/sidebar_recent_projects.rs +++ b/crates/recent_projects/src/sidebar_recent_projects.rs @@ -411,12 +411,16 @@ impl PickerDelegate for SidebarRecentProjectsDelegate { .border_t_1() .border_color(cx.theme().colors().border_variant) .child({ - let open_action = workspace::Open::default(); + let open_action = workspace::Open { + create_new_window: false, + }; + Button::new("open_local_folder", "Add Local Project") .key_binding(KeyBinding::for_action_in(&open_action, &focus_handle, cx)) - .on_click(move |_, window, cx| { - window.dispatch_action(open_action.boxed_clone(), cx) - }) + .on_click(cx.listener(move |_, _, window, cx| { + window.dispatch_action(open_action.boxed_clone(), cx); + cx.emit(DismissEvent); + })) }) .child( Button::new("open_remote_folder", "Add Remote Project") @@ -427,7 +431,7 @@ impl PickerDelegate for SidebarRecentProjectsDelegate { }, cx, )) - .on_click(|_, window, cx| { + .on_click(cx.listener(|_, _, window, cx| { window.dispatch_action( OpenRemote { from_existing_connection: false, @@ -435,8 +439,9 @@ impl PickerDelegate for SidebarRecentProjectsDelegate { } .boxed_clone(), cx, - ) - }), + ); + cx.emit(DismissEvent); + })), ) .into_any(), ) diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 6a5e9a3318e576054a9533c7ab92f86fc10e1a66..c726d0a421928979200a088125d3ddd172530ff9 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -656,13 +656,25 @@ impl From for i64 { } } -fn prompt_and_open_paths(app_state: Arc, options: PathPromptOptions, cx: &mut App) { +fn prompt_and_open_paths( + app_state: Arc, + options: PathPromptOptions, + create_new_window: bool, + cx: &mut App, +) { if let Some(workspace_window) = local_workspace_windows(cx).into_iter().next() { workspace_window .update(cx, |multi_workspace, window, cx| { let workspace = multi_workspace.workspace().clone(); workspace.update(cx, |workspace, cx| { - prompt_for_open_path_and_open(workspace, app_state, options, true, window, cx); + prompt_for_open_path_and_open( + workspace, + app_state, + options, + create_new_window, + window, + cx, + ); }); }) .ok(); @@ -682,7 +694,14 @@ fn prompt_and_open_paths(app_state: Arc, options: PathPromptOptions, c window.activate_window(); let workspace = multi_workspace.workspace().clone(); workspace.update(cx, |workspace, cx| { - prompt_for_open_path_and_open(workspace, app_state, options, true, window, cx); + prompt_for_open_path_and_open( + workspace, + app_state, + options, + create_new_window, + window, + cx, + ); }); })?; anyhow::Ok(()) @@ -743,7 +762,7 @@ pub fn init(app_state: Arc, cx: &mut App) { cx.on_action(|_: &CloseWindow, cx| Workspace::close_global(cx)) .on_action(|_: &Reload, cx| reload(cx)) - .on_action(|_: &Open, cx: &mut App| { + .on_action(|action: &Open, cx: &mut App| { let app_state = AppState::global(cx); prompt_and_open_paths( app_state, @@ -753,6 +772,7 @@ pub fn init(app_state: Arc, cx: &mut App) { multiple: true, prompt: None, }, + action.create_new_window, cx, ); }) @@ -767,6 +787,7 @@ pub fn init(app_state: Arc, cx: &mut App) { multiple: true, prompt: None, }, + true, cx, ); }); From a92b242e01ac9d923cfa2a90aee103e152a6be6e Mon Sep 17 00:00:00 2001 From: David Alecrim <35930364+davidalecrim1@users.noreply.github.com> Date: Mon, 6 Apr 2026 13:50:48 -0300 Subject: [PATCH 59/63] keymaps: Add Ctrl+R open recent binding for macOS and Linux (#52893) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #52879 ## Summary VS Code binds `Ctrl+R` to open recent workspaces/folders on all platforms (Windows, macOS, and Linux). Zed already had this binding in `default-windows.json`, but it was missing from `default-macos.json` and `default-linux.json`. Since `BaseKeymap::VSCode` returns no supplemental keymap file and relies entirely on the platform default keymaps, users who selected VS Code keybindings on macOS or Linux would not get the expected `Ctrl+R` behavior — instead getting nothing, or having to use the non-VSCode binding (`Alt+Cmd+O` / `Alt+Ctrl+O`). This adds the missing binding to both platform defaults, consistent with what Windows already had. ## Screenshot The default keybinding in VS Code: Screenshot 2026-04-01 at 07 38 09 Release Notes: - Added `Ctrl+R` keybinding for opening recent projects on macOS and Linux, matching VS Code's default behavior on all platforms. --- assets/keymaps/default-linux.json | 1 + assets/keymaps/default-macos.json | 1 + 2 files changed, 2 insertions(+) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 0beabfcbc555a336ad75424fb4079e5d4a867b89..5ecca68e0404b400af2c285dc51df0a65d6fe07a 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -598,6 +598,7 @@ // Change the default action on `menu::Confirm` by setting the parameter // "alt-ctrl-o": ["projects::OpenRecent", { "create_new_window": true }], "alt-ctrl-o": ["projects::OpenRecent", { "create_new_window": false }], + "ctrl-r": ["projects::OpenRecent", { "create_new_window": false }], "alt-shift-open": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }], // Change to open path modal for existing remote connection by setting the parameter // "alt-ctrl-shift-o": "["projects::OpenRemote", { "from_existing_connection": true }]", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index c514a8fbfc71f7b2b62e017b940790a39cf59db7..c74b5900001a2c798076783b2741aba84ffc4b15 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -668,6 +668,7 @@ // Change the default action on `menu::Confirm` by setting the parameter // "alt-cmd-o": ["projects::OpenRecent", {"create_new_window": true }], "alt-cmd-o": ["projects::OpenRecent", { "create_new_window": false }], + "ctrl-r": ["projects::OpenRecent", { "create_new_window": false }], "ctrl-cmd-o": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }], "ctrl-cmd-shift-o": ["projects::OpenRemote", { "from_existing_connection": true, "create_new_window": false }], "cmd-ctrl-b": "branches::OpenRecent", From f3c034ef2b3e56848de4f4117ea17266ddd1d940 Mon Sep 17 00:00:00 2001 From: Toni Alatalo Date: Mon, 6 Apr 2026 20:16:45 +0300 Subject: [PATCH 60/63] Prevent dev container modal dismissal during creation (#52506) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Context When the dev container creation modal is showing "Creating Dev Container", clicking anywhere on the workspace backdrop dismisses the dialog. The container creation continues in the background, but the user loses visual feedback and the subsequent `open_remote_project` call may fail because the modal entity is gone. This adds an `allow_dismissal` flag to `RemoteServerProjects` that blocks accidental dismissal (backdrop clicks, focus loss) while a dev container is being created, but allows explicit dismissal on success or error. ## How to Review Small PR — two files changed: 1. **`remote_servers.rs`** (the fix): `allow_dismissal` bool field added, set to `false` when entering Creating state, set to `true` before emitting `DismissEvent` on success/error. `on_before_dismiss` override checks the flag. 2. **`recent_projects.rs`** (the test): Regression test that opens a dev container modal, simulates a backdrop click, and asserts the modal stays open. ## Self-Review Checklist - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Fixed dev container creation modal being dismissed when clicking outside it --------- Co-authored-by: Claude Opus 4.6 --- crates/recent_projects/src/recent_projects.rs | 67 ++++++++++++++++++- crates/recent_projects/src/remote_servers.rs | 20 +++++- 2 files changed, 83 insertions(+), 4 deletions(-) diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index dc952764056f6465840825d2a1f0fce886f401c0..24010017ff9fa4eb62a1787332fed70f740ccc2d 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -2003,7 +2003,7 @@ mod tests { use std::path::PathBuf; use editor::Editor; - use gpui::{TestAppContext, UpdateGlobal, WindowHandle}; + use gpui::{TestAppContext, UpdateGlobal, VisualTestContext, WindowHandle}; use serde_json::json; use settings::SettingsStore; @@ -2242,6 +2242,71 @@ mod tests { .unwrap(); } + #[gpui::test] + async fn test_dev_container_modal_not_dismissed_on_backdrop_click(cx: &mut TestAppContext) { + let app_state = init_test(cx); + + app_state + .fs + .as_fake() + .insert_tree( + path!("/project"), + json!({ + ".devcontainer": { + "devcontainer.json": "{}" + }, + "src": { + "main.rs": "fn main() {}" + } + }), + ) + .await; + + cx.update(|cx| { + open_paths( + &[PathBuf::from(path!("/project"))], + app_state, + workspace::OpenOptions::default(), + cx, + ) + }) + .await + .unwrap(); + + assert_eq!(cx.update(|cx| cx.windows().len()), 1); + let multi_workspace = cx.update(|cx| cx.windows()[0].downcast::().unwrap()); + + cx.run_until_parked(); + + cx.dispatch_action(*multi_workspace, OpenDevContainer); + + multi_workspace + .update(cx, |multi_workspace, _, cx| { + assert!( + multi_workspace + .active_modal::(cx) + .is_some(), + "Dev container modal should be open" + ); + }) + .unwrap(); + + // Click outside the modal (on the backdrop) to try to dismiss it + let mut vcx = VisualTestContext::from_window(*multi_workspace, cx); + vcx.simulate_click(gpui::point(px(1.0), px(1.0)), gpui::Modifiers::default()); + + multi_workspace + .update(cx, |multi_workspace, _, cx| { + assert!( + multi_workspace + .active_modal::(cx) + .is_some(), + "Dev container modal should remain open during creation" + ); + }) + .unwrap(); + } + #[gpui::test] async fn test_open_dev_container_action_with_multiple_configs(cx: &mut TestAppContext) { let app_state = init_test(cx); diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index 404b0673ab8cf220385d1a0ce41a40156d469a01..7db09c88616879010352cbc2ac0fd0549982240b 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -54,7 +54,7 @@ use util::{ rel_path::RelPath, }; use workspace::{ - AppState, ModalView, MultiWorkspace, OpenLog, OpenOptions, Toast, Workspace, + AppState, DismissDecision, ModalView, MultiWorkspace, OpenLog, OpenOptions, Toast, Workspace, notifications::{DetachAndPromptErr, NotificationId}, open_remote_project_with_existing_connection, }; @@ -69,6 +69,7 @@ pub struct RemoteServerProjects { create_new_window: bool, dev_container_picker: Option>>, _subscription: Subscription, + allow_dismissal: bool, } struct CreateRemoteServer { @@ -920,6 +921,7 @@ impl RemoteServerProjects { create_new_window, dev_container_picker: None, _subscription, + allow_dismissal: true, } } @@ -1140,6 +1142,7 @@ impl RemoteServerProjects { } fn view_in_progress_dev_container(&mut self, window: &mut Window, cx: &mut Context) { + self.allow_dismissal = false; self.mode = Mode::CreateRemoteDevContainer(CreateRemoteDevContainer::new( DevContainerCreationProgress::Creating, cx, @@ -1309,6 +1312,7 @@ impl RemoteServerProjects { cx.emit(DismissEvent); } _ => { + self.allow_dismissal = true; self.mode = Mode::default_mode(&self.ssh_config_servers, cx); self.focus_handle(cx).focus(window, cx); cx.notify(); @@ -1875,6 +1879,7 @@ impl RemoteServerProjects { .ok(); entity .update_in(cx, |remote_server_projects, window, cx| { + remote_server_projects.allow_dismissal = true; remote_server_projects.mode = Mode::CreateRemoteDevContainer(CreateRemoteDevContainer::new( DevContainerCreationProgress::Error(format!("{e}")), @@ -1897,7 +1902,8 @@ impl RemoteServerProjects { .log_err(); entity - .update(cx, |_, cx| { + .update(cx, |this, cx| { + this.allow_dismissal = true; cx.emit(DismissEvent); }) .log_err(); @@ -2948,7 +2954,15 @@ fn get_text(element: &Entity, cx: &mut App) -> String { element.read(cx).text(cx).trim().to_string() } -impl ModalView for RemoteServerProjects {} +impl ModalView for RemoteServerProjects { + fn on_before_dismiss( + &mut self, + _window: &mut Window, + _cx: &mut Context, + ) -> DismissDecision { + DismissDecision::Dismiss(self.allow_dismissal) + } +} impl Focusable for RemoteServerProjects { fn focus_handle(&self, cx: &App) -> FocusHandle { From 1d0967cdda39f0e2dc599626b575063d77a0bf45 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Mon, 6 Apr 2026 14:18:59 -0400 Subject: [PATCH 61/63] Fix compilation on main (#53257) Semantic merge conflict due to multibuffer API changes Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - N/A --- crates/project_symbols/src/project_symbols.rs | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/crates/project_symbols/src/project_symbols.rs b/crates/project_symbols/src/project_symbols.rs index 351b6e7afb59ef9b7ffd545d36b0e3dd66c6e834..931e332d93d869bc31909643190d5b35f32409dc 100644 --- a/crates/project_symbols/src/project_symbols.rs +++ b/crates/project_symbols/src/project_symbols.rs @@ -1,6 +1,4 @@ -use editor::{ - Anchor, Bias, Editor, SelectionEffects, scroll::Autoscroll, styled_runs_for_code_label, -}; +use editor::{Bias, Editor, SelectionEffects, scroll::Autoscroll, styled_runs_for_code_label}; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ App, Context, DismissEvent, Entity, HighlightStyle, ParentElement, StyledText, Task, TextStyle, @@ -143,13 +141,14 @@ impl PickerDelegate for ProjectSymbolsDelegate { editor.update(cx, |editor, cx| { let multibuffer_snapshot = editor.buffer().read(cx).snapshot(cx); - let Some((excerpt_id, _, buffer_snapshot)) = - multibuffer_snapshot.as_singleton() - else { + let Some(buffer_snapshot) = multibuffer_snapshot.as_singleton() else { return; }; let text_anchor = buffer_snapshot.anchor_before(position); - let anchor = Anchor::in_buffer(excerpt_id, text_anchor); + let Some(anchor) = multibuffer_snapshot.anchor_in_buffer(text_anchor) + else { + return; + }; editor.change_selections( SelectionEffects::scroll(Autoscroll::center()), window, From 4b1e0a30b8ecf6b45da880e51b29a2d933c418a7 Mon Sep 17 00:00:00 2001 From: Peter Siegel <33677897+yeetypete@users.noreply.github.com> Date: Mon, 6 Apr 2026 20:30:23 +0200 Subject: [PATCH 62/63] dev_container: Parse env vars and docker labels with `=` in values correctly (#53134) Fixes a parsing issue where docker env var key/value pairs can contain an "=" character in the value. This is pretty common and present in all [nvidia/cuda](https://hub.docker.com/r/nvidia/cuda) docker images. Also adds some tests for env var parsing. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Release Notes: - Fixed a parsing failure where docker env var key/value pairs can contain an "=" character in the value. --- crates/dev_container/src/docker.rs | 57 ++++++++++++++++++++++++------ 1 file changed, 46 insertions(+), 11 deletions(-) diff --git a/crates/dev_container/src/docker.rs b/crates/dev_container/src/docker.rs index e0eecd1da0e2b1749ffcf60fa67cdbef273fda12..88600e2b2a5221165b6ca80e36c0ebcfdf35013a 100644 --- a/crates/dev_container/src/docker.rs +++ b/crates/dev_container/src/docker.rs @@ -56,12 +56,11 @@ impl DockerInspectConfig { pub(crate) fn env_as_map(&self) -> Result, DevContainerError> { let mut map = HashMap::new(); for env_var in &self.env { - let parts: Vec<&str> = env_var.split("=").collect(); - if parts.len() != 2 { - log::error!("Unable to parse {env_var} into and environment key-value"); + let Some((key, value)) = env_var.split_once('=') else { + log::error!("Unable to parse {env_var} into an environment key-value"); return Err(DevContainerError::DevContainerParseFailed); - } - map.insert(parts[0].to_string(), parts[1].to_string()); + }; + map.insert(key.to_string(), value.to_string()); } Ok(map) } @@ -428,12 +427,8 @@ where values .iter() .filter_map(|v| { - let parts: Vec<&str> = v.split("=").collect(); - if parts.len() != 2 { - None - } else { - Some((parts[0].to_string(), parts[1].to_string())) - } + let (key, value) = v.split_once('=')?; + Some((key.to_string(), value.to_string())) }) .collect(), )) @@ -547,6 +542,46 @@ mod test { }, }; + #[test] + fn should_parse_simple_env_var() { + let config = super::DockerInspectConfig { + labels: super::DockerConfigLabels { metadata: None }, + image_user: None, + env: vec!["KEY=value".to_string()], + }; + + let map = config.env_as_map().unwrap(); + assert_eq!(map.get("KEY").unwrap(), "value"); + } + + #[test] + fn should_parse_env_var_with_equals_in_value() { + let config = super::DockerInspectConfig { + labels: super::DockerConfigLabels { metadata: None }, + image_user: None, + env: vec!["COMPLEX=key=val other>=1.0".to_string()], + }; + + let map = config.env_as_map().unwrap(); + assert_eq!(map.get("COMPLEX").unwrap(), "key=val other>=1.0"); + } + + #[test] + fn should_parse_simple_label() { + let json = r#"{"volumes": [], "labels": ["com.example.key=value"]}"#; + let service: DockerComposeService = serde_json_lenient::from_str(json).unwrap(); + let labels = service.labels.unwrap(); + assert_eq!(labels.get("com.example.key").unwrap(), "value"); + } + + #[test] + fn should_parse_label_with_equals_in_value() { + let json = r#"{"volumes": [], "labels": ["com.example.key=value=with=equals"]}"#; + let service: DockerComposeService = serde_json_lenient::from_str(json).unwrap(); + let labels = service.labels.unwrap(); + assert_eq!(labels.get("com.example.key").unwrap(), "value=with=equals"); + } + #[test] fn should_create_docker_inspect_command() { let docker = Docker::new("docker"); From 7748047051bc5886269a2d2ee1d4aa5bdd910471 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Mon, 6 Apr 2026 14:30:52 -0400 Subject: [PATCH 63/63] git_graph: Refresh UI when stash/branch list has changed (#53094) ### Summary This PR fixes an issue where the git graph wouldn't refresh its state correctly unless HEAD changed. Now repository emits events when the branch list has changed, and invalidates the graph data cache when the stash or branch list has changed. I also renamed the event `Repository::BranchedChanged` to `Repository::HeadChanged` as well. Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #53080 Release Notes: - N/A or Added/Fixed/Improved ... --- crates/fs/src/fake_git_repo.rs | 13 +- crates/git_graph/src/git_graph.rs | 155 +++++++++++++++++- crates/git_ui/src/git_panel.rs | 2 +- crates/project/src/git_store.rs | 32 +++- crates/project/src/git_store/branch_diff.rs | 2 +- .../tests/integration/project_tests.rs | 2 +- 6 files changed, 186 insertions(+), 20 deletions(-) diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index a00061452e4dbd2051b961fdde9e33dc05fba0b1..c25b0ded5daea0674629ce4bea00736cb2eb3ffb 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -10,6 +10,7 @@ use git::{ GRAPH_CHUNK_SIZE, GitRepository, GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder, LogSource, PushOptions, Remote, RepoPath, ResetMode, SearchCommitArgs, Worktree, }, + stash::GitStash, status::{ DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus, UnmergedStatus, @@ -53,6 +54,7 @@ pub struct FakeGitRepositoryState { pub simulated_create_worktree_error: Option, pub refs: HashMap, pub graph_commits: Vec>, + pub stash_entries: GitStash, } impl FakeGitRepositoryState { @@ -72,6 +74,7 @@ impl FakeGitRepositoryState { oids: Default::default(), remotes: HashMap::default(), graph_commits: Vec::new(), + stash_entries: Default::default(), } } } @@ -378,13 +381,13 @@ impl GitRepository for FakeGitRepository { } fn stash_entries(&self) -> BoxFuture<'_, Result> { - async { Ok(git::stash::GitStash::default()) }.boxed() + self.with_state_async(false, |state| Ok(state.stash_entries.clone())) } fn branches(&self) -> BoxFuture<'_, Result>> { self.with_state_async(false, move |state| { let current_branch = &state.current_branch_name; - Ok(state + let mut branches = state .branches .iter() .map(|branch_name| { @@ -402,7 +405,11 @@ impl GitRepository for FakeGitRepository { upstream: None, } }) - .collect()) + .collect::>(); + // compute snapshot expects these to be sorted by ref_name + // because that's what git itself does + branches.sort_by(|a, b| a.ref_name.cmp(&b.ref_name)); + Ok(branches) }) } diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index 3439ca9fabd75d15a0a32fc09751b35c4e18b5a1..83cd01eda5c509583f24fd424426d20a55bbfbed 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -1148,7 +1148,7 @@ impl GitGraph { } } } - RepositoryEvent::BranchChanged => { + RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => { self.pending_select_sha = None; // Only invalidate if we scanned atleast once, // meaning we are not inside the initial repo loading state @@ -1157,6 +1157,12 @@ impl GitGraph { self.invalidate_state(cx); } } + RepositoryEvent::StashEntriesChanged if self.log_source == LogSource::All => { + self.pending_select_sha = None; + if repository.read(cx).scan_id > 1 { + self.invalidate_state(cx); + } + } RepositoryEvent::GraphEvent(_, _) => {} _ => {} } @@ -3737,8 +3743,8 @@ mod tests { assert!( observed_repository_events .iter() - .any(|event| matches!(event, RepositoryEvent::BranchChanged)), - "initial repository scan should emit BranchChanged" + .any(|event| matches!(event, RepositoryEvent::HeadChanged)), + "initial repository scan should emit HeadChanged" ); let commit_count_after = repository.read_with(cx, |repo, _| { repo.get_graph_data(crate::LogSource::default(), crate::LogOrder::default()) @@ -3856,11 +3862,148 @@ mod tests { ); cx.run_until_parked(); - let commit_count_after_switch_back = + // Verify graph data is reloaded from repository cache on switch back + let reloaded_commit_count = git_graph.read_with(&*cx, |graph, _| graph.graph_data.commits.len()); assert_eq!( - initial_commit_count, commit_count_after_switch_back, - "graph_data should be repopulated from cache after switching back to the same repo" + reloaded_commit_count, + commits.len(), + "graph data should be reloaded after switching back" ); } + + #[gpui::test] + async fn test_graph_data_reloaded_after_stash_change(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + Path::new("/project"), + json!({ + ".git": {}, + "file.txt": "content", + }), + ) + .await; + + let initial_head = Oid::from_bytes(&[1; 20]).unwrap(); + let initial_stash = Oid::from_bytes(&[2; 20]).unwrap(); + let updated_head = Oid::from_bytes(&[3; 20]).unwrap(); + let updated_stash = Oid::from_bytes(&[4; 20]).unwrap(); + + fs.set_graph_commits( + Path::new("/project/.git"), + vec![ + Arc::new(InitialGraphCommitData { + sha: initial_head, + parents: smallvec![initial_stash], + ref_names: vec!["HEAD".into(), "refs/heads/main".into()], + }), + Arc::new(InitialGraphCommitData { + sha: initial_stash, + parents: smallvec![], + ref_names: vec!["refs/stash".into()], + }), + ], + ); + fs.with_git_state(Path::new("/project/.git"), true, |state| { + state.stash_entries = git::stash::GitStash { + entries: vec![git::stash::StashEntry { + index: 0, + oid: initial_stash, + message: "initial stash".to_string(), + branch: Some("main".to_string()), + timestamp: 1, + }] + .into(), + }; + }) + .unwrap(); + + let project = Project::test(fs.clone(), [Path::new("/project")], cx).await; + cx.run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project + .active_repository(cx) + .expect("should have a repository") + }); + + let (multi_workspace, cx) = cx.add_window_view(|window, cx| { + workspace::MultiWorkspace::test_new(project.clone(), window, cx) + }); + let workspace_weak = + multi_workspace.read_with(&*cx, |multi, _| multi.workspace().downgrade()); + let git_graph = cx.new_window_entity(|window, cx| { + GitGraph::new( + repository.read(cx).id, + project.read(cx).git_store().clone(), + workspace_weak, + window, + cx, + ) + }); + cx.run_until_parked(); + + let initial_shas = git_graph.read_with(&*cx, |graph, _| { + graph + .graph_data + .commits + .iter() + .map(|commit| commit.data.sha) + .collect::>() + }); + assert_eq!(initial_shas, vec![initial_head, initial_stash]); + + fs.set_graph_commits( + Path::new("/project/.git"), + vec![ + Arc::new(InitialGraphCommitData { + sha: updated_head, + parents: smallvec![updated_stash], + ref_names: vec!["HEAD".into(), "refs/heads/main".into()], + }), + Arc::new(InitialGraphCommitData { + sha: updated_stash, + parents: smallvec![], + ref_names: vec!["refs/stash".into()], + }), + ], + ); + fs.with_git_state(Path::new("/project/.git"), true, |state| { + state.stash_entries = git::stash::GitStash { + entries: vec![git::stash::StashEntry { + index: 0, + oid: updated_stash, + message: "updated stash".to_string(), + branch: Some("main".to_string()), + timestamp: 1, + }] + .into(), + }; + }) + .unwrap(); + + project + .update(cx, |project, cx| project.git_scans_complete(cx)) + .await; + cx.run_until_parked(); + + cx.draw( + point(px(0.), px(0.)), + gpui::size(px(1200.), px(800.)), + |_, _| git_graph.clone().into_any_element(), + ); + cx.run_until_parked(); + + let reloaded_shas = git_graph.read_with(&*cx, |graph, _| { + graph + .graph_data + .commits + .iter() + .map(|commit| commit.data.sha) + .collect::>() + }); + assert_eq!(reloaded_shas, vec![updated_head, updated_stash]); + } } diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 5b40c4bffc3a492f0113a8c5e45b2cfc1763d380..aac1ec1a19ab53913a830738ae528fb2c0c10248 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -780,7 +780,7 @@ impl GitPanel { move |this, _git_store, event, window, cx| match event { GitStoreEvent::RepositoryUpdated( _, - RepositoryEvent::StatusesChanged | RepositoryEvent::BranchChanged, + RepositoryEvent::StatusesChanged | RepositoryEvent::HeadChanged, true, ) | GitStoreEvent::RepositoryAdded diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 20e04a19a7891c5b8800b270a1c8d55720ce90ff..6bc7f1ab52db8665efac7ab5631986b5ec0c8e33 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -287,6 +287,7 @@ pub struct RepositorySnapshot { pub original_repo_abs_path: Arc, pub path_style: PathStyle, pub branch: Option, + pub branch_list: Arc<[Branch]>, pub head_commit: Option, pub scan_id: u64, pub merge: MergeDetails, @@ -428,7 +429,8 @@ pub enum GitGraphEvent { #[derive(Clone, Debug, PartialEq, Eq)] pub enum RepositoryEvent { StatusesChanged, - BranchChanged, + HeadChanged, + BranchListChanged, StashEntriesChanged, GitWorktreeListChanged, PendingOpsChanged { pending_ops: SumTree }, @@ -3686,6 +3688,7 @@ impl RepositorySnapshot { .unwrap_or_else(|| work_directory_abs_path.clone()), work_directory_abs_path, branch: None, + branch_list: Arc::from([]), head_commit: None, scan_id: 0, merge: Default::default(), @@ -4048,11 +4051,17 @@ impl Repository { .shared(); cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event { - RepositoryEvent::BranchChanged => { + RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => { if this.scan_id > 1 { this.initial_graph_data.clear(); } } + RepositoryEvent::StashEntriesChanged => { + if this.scan_id > 1 { + this.initial_graph_data + .retain(|(log_source, _), _| *log_source != LogSource::All); + } + } _ => {} }) .detach(); @@ -5594,7 +5603,7 @@ impl Repository { log::info!("head branch after scan is {branch:?}"); let snapshot = this.update(&mut cx, |this, cx| { this.snapshot.branch = branch; - cx.emit(RepositoryEvent::BranchChanged); + cx.emit(RepositoryEvent::HeadChanged); this.snapshot.clone() })?; if let Some(updates_tx) = updates_tx { @@ -6380,7 +6389,7 @@ impl Repository { .as_ref() .map(proto_to_commit_details); if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit { - cx.emit(RepositoryEvent::BranchChanged) + cx.emit(RepositoryEvent::HeadChanged) } self.snapshot.branch = new_branch; self.snapshot.head_commit = new_head_commit; @@ -7318,7 +7327,8 @@ async fn compute_snapshot( } }) .await?; - let branch = branches.into_iter().find(|branch| branch.is_head); + let branch = branches.iter().find(|branch| branch.is_head).cloned(); + let branch_list: Arc<[Branch]> = branches.into(); let linked_worktrees: Arc<[GitWorktree]> = all_worktrees .into_iter() @@ -7341,14 +7351,16 @@ async fn compute_snapshot( .await?; let snapshot = this.update(cx, |this, cx| { - let branch_changed = + let head_changed = branch != this.snapshot.branch || head_commit != this.snapshot.head_commit; + let branch_list_changed = *branch_list != *this.snapshot.branch_list; let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees; this.snapshot = RepositorySnapshot { id, work_directory_abs_path, branch, + branch_list: branch_list.clone(), head_commit, remote_origin_url, remote_upstream_url, @@ -7357,8 +7369,12 @@ async fn compute_snapshot( ..prev_snapshot }; - if branch_changed { - cx.emit(RepositoryEvent::BranchChanged); + if head_changed { + cx.emit(RepositoryEvent::HeadChanged); + } + + if branch_list_changed { + cx.emit(RepositoryEvent::BranchListChanged); } if worktrees_changed { diff --git a/crates/project/src/git_store/branch_diff.rs b/crates/project/src/git_store/branch_diff.rs index 3b8324fce8ffea7049838aeac09e831463dbd34e..dc7c8bf647585d9fcf1d5f92e0e976f86939a781 100644 --- a/crates/project/src/git_store/branch_diff.rs +++ b/crates/project/src/git_store/branch_diff.rs @@ -70,7 +70,7 @@ impl BranchDiff { } GitStoreEvent::RepositoryUpdated( event_repo_id, - RepositoryEvent::StatusesChanged | RepositoryEvent::BranchChanged, + RepositoryEvent::StatusesChanged | RepositoryEvent::HeadChanged, _, ) => this .repo diff --git a/crates/project/tests/integration/project_tests.rs b/crates/project/tests/integration/project_tests.rs index ee5af024776839fde8965f875bf3d12630c1dad2..d6c2ce37c9e60e17bd43c3f6c3ad10cde52b4bec 100644 --- a/crates/project/tests/integration/project_tests.rs +++ b/crates/project/tests/integration/project_tests.rs @@ -11161,7 +11161,7 @@ async fn test_odd_events_for_ignored_dirs( assert_eq!( repository_updates.lock().drain(..).collect::>(), vec![ - RepositoryEvent::BranchChanged, + RepositoryEvent::HeadChanged, RepositoryEvent::StatusesChanged, RepositoryEvent::StatusesChanged, ],