From ee422dea6ed88a441e59ab2b848ac9fc76d7ffb9 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 4 Feb 2025 14:38:37 -0500 Subject: [PATCH 001/130] Bump Zed to v0.174 (#24221) --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d51de0f11b5bfdafa99d21cc51034677c8e415ea..1b6b59177deb88e042b3d9c9e1529edf512cf955 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -16411,7 +16411,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.173.0" +version = "0.174.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 57f77cad183fd6fa11bbd3e31c4469b146da08b7..5677203d1d6c4825f727a373dd2d4973b178d928 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition.workspace = true name = "zed" -version = "0.173.0" +version = "0.174.0" publish.workspace = true license = "GPL-3.0-or-later" authors = ["Zed Team "] From 7da60995cc101d5c93121a534a3fc44088fc7e30 Mon Sep 17 00:00:00 2001 From: Henry Chu Date: Wed, 5 Feb 2025 03:50:49 +0800 Subject: [PATCH 002/130] Enable CSS, JSON, Python, and Tailwind to lookup LSP installed in PATH (#22037) Co-authored-by: Peter Tripp --- crates/languages/src/css.rs | 21 +++++++++- crates/languages/src/json.rs | 18 +++++++++ crates/languages/src/python.rs | 66 ++++++++++++++++++++------------ crates/languages/src/tailwind.rs | 16 ++++++++ 4 files changed, 96 insertions(+), 25 deletions(-) diff --git a/crates/languages/src/css.rs b/crates/languages/src/css.rs index 957f66d5fbd7ccfae044ba33cadfd7967e794b91..dee82c3428340f1a3dc6a49b50f35da03e04d257 100644 --- a/crates/languages/src/css.rs +++ b/crates/languages/src/css.rs @@ -1,7 +1,8 @@ use anyhow::{anyhow, Result}; use async_trait::async_trait; use futures::StreamExt; -use language::{LspAdapter, LspAdapterDelegate}; +use gpui::AsyncApp; +use language::{LanguageToolchainStore, LspAdapter, LspAdapterDelegate}; use lsp::{LanguageServerBinary, LanguageServerName}; use node_runtime::NodeRuntime; use project::Fs; @@ -39,6 +40,24 @@ impl LspAdapter for CssLspAdapter { LanguageServerName("vscode-css-language-server".into()) } + async fn check_if_user_installed( + &self, + delegate: &dyn LspAdapterDelegate, + _: Arc, + _: &AsyncApp, + ) -> Option { + let path = delegate + .which("vscode-css-language-server".as_ref()) + .await?; + let env = delegate.shell_env().await; + + Some(LanguageServerBinary { + path, + env: Some(env), + arguments: vec!["--stdio".into()], + }) + } + async fn fetch_latest_server_version( &self, _: &dyn LspAdapterDelegate, diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index 81f4f479b2afeb624964a21e44fbe52c3ef50b39..02a818bce93d79021bd27fba48be70c07b165d67 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -149,6 +149,24 @@ impl LspAdapter for JsonLspAdapter { LanguageServerName("json-language-server".into()) } + async fn check_if_user_installed( + &self, + delegate: &dyn LspAdapterDelegate, + _: Arc, + _: &AsyncApp, + ) -> Option { + let path = delegate + .which("vscode-json-language-server".as_ref()) + .await?; + let env = delegate.shell_env().await; + + Some(LanguageServerBinary { + path, + env: Some(env), + arguments: vec!["--stdio".into()], + }) + } + async fn fetch_latest_server_version( &self, _: &dyn LspAdapterDelegate, diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 67073e7cf30d58ba4febe77ac7a4fa02a1f370fd..7c72a2be9ac4393a9420756487e12ff98db8c085 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -83,19 +83,28 @@ impl LspAdapter for PythonLspAdapter { _: Arc, _: &AsyncApp, ) -> Option { - let node = delegate.which("node".as_ref()).await?; - let (node_modules_path, _) = delegate - .npm_package_installed_version(Self::SERVER_NAME.as_ref()) - .await - .log_err()??; + if let Some(pyright_bin) = delegate.which(Self::SERVER_NAME.as_ref()).await { + let env = delegate.shell_env().await; + Some(LanguageServerBinary { + path: pyright_bin, + env: Some(env), + arguments: vec!["--stdio".into()], + }) + } else { + let node = delegate.which("node".as_ref()).await?; + let (node_modules_path, _) = delegate + .npm_package_installed_version(Self::SERVER_NAME.as_ref()) + .await + .log_err()??; - let path = node_modules_path.join(NODE_MODULE_RELATIVE_SERVER_PATH); + let path = node_modules_path.join(NODE_MODULE_RELATIVE_SERVER_PATH); - Some(LanguageServerBinary { - path: node, - env: None, - arguments: server_binary_arguments(&path), - }) + Some(LanguageServerBinary { + path: node, + env: None, + arguments: server_binary_arguments(&path), + }) + } } async fn fetch_latest_server_version( @@ -791,19 +800,28 @@ impl LspAdapter for PyLspAdapter { toolchains: Arc, cx: &AsyncApp, ) -> Option { - let venv = toolchains - .active_toolchain( - delegate.worktree_id(), - LanguageName::new("Python"), - &mut cx.clone(), - ) - .await?; - let pylsp_path = Path::new(venv.path.as_ref()).parent()?.join("pylsp"); - pylsp_path.exists().then(|| LanguageServerBinary { - path: venv.path.to_string().into(), - arguments: vec![pylsp_path.into()], - env: None, - }) + if let Some(pylsp_bin) = delegate.which(Self::SERVER_NAME.as_ref()).await { + let env = delegate.shell_env().await; + Some(LanguageServerBinary { + path: pylsp_bin, + env: Some(env), + arguments: vec![], + }) + } else { + let venv = toolchains + .active_toolchain( + delegate.worktree_id(), + LanguageName::new("Python"), + &mut cx.clone(), + ) + .await?; + let pylsp_path = Path::new(venv.path.as_ref()).parent()?.join("pylsp"); + pylsp_path.exists().then(|| LanguageServerBinary { + path: venv.path.to_string().into(), + arguments: vec![pylsp_path.into()], + env: None, + }) + } } async fn fetch_latest_server_version( diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index 5350eeac0ec75c16504743ebfafc0118b1db3b68..04127e98bb1e03d981b4918a841822a0c900dd42 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -47,6 +47,22 @@ impl LspAdapter for TailwindLspAdapter { Self::SERVER_NAME.clone() } + async fn check_if_user_installed( + &self, + delegate: &dyn LspAdapterDelegate, + _: Arc, + _: &AsyncApp, + ) -> Option { + let path = delegate.which(Self::SERVER_NAME.as_ref()).await?; + let env = delegate.shell_env().await; + + Some(LanguageServerBinary { + path, + env: Some(env), + arguments: vec!["--stdio".into()], + }) + } + async fn fetch_latest_server_version( &self, _: &dyn LspAdapterDelegate, From cfe0932c0ad9c51bcd3fc4770d135235f7936910 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Tue, 4 Feb 2025 12:15:43 -0800 Subject: [PATCH 003/130] Implement character index for point (#23989) Fixes #22939 Fixes #23970 Supersedes https://github.com/zed-industries/zed/pull/23469 Release Notes: - Fixed a bug where Zed could crash with certain input sources on macOS --------- Co-authored-by: Louis Brunner Co-authored-by: ben --- crates/editor/src/editor.rs | 32 +++- crates/editor/src/element.rs | 171 +++++++------------ crates/editor/src/mouse_context_menu.rs | 3 +- crates/gpui/examples/input.rs | 14 ++ crates/gpui/src/geometry.rs | 31 ++++ crates/gpui/src/input.rs | 19 +++ crates/gpui/src/platform.rs | 18 ++ crates/gpui/src/platform/mac/window.rs | 53 ++++-- crates/terminal_view/src/terminal_element.rs | 9 + 9 files changed, 221 insertions(+), 129 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 2d22bfcdd2c910e652d9b3a7b05f64aa250d38c9..0468770e0e59c3fe4cfc4e4151fdf5aaeb9de3fe 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -63,10 +63,10 @@ pub use editor_settings::{ CurrentLineHighlight, EditorSettings, ScrollBeyondLastLine, SearchSettings, ShowScrollbar, }; pub use editor_settings_controls::*; -use element::LineWithInvisibles; pub use element::{ CursorLayout, EditorElement, HighlightedRange, HighlightedRangeLine, PointForPosition, }; +use element::{LineWithInvisibles, PositionMap}; use futures::{future, FutureExt}; use fuzzy::StringMatchCandidate; @@ -715,6 +715,7 @@ pub struct Editor { >, >, last_bounds: Option>, + last_position_map: Option>, expect_bounds_change: Option>, tasks: BTreeMap<(BufferId, BufferRow), RunnableTasks>, tasks_update_task: Option>, @@ -1377,6 +1378,7 @@ impl Editor { gutter_hovered: false, pixel_position_of_newest_cursor: None, last_bounds: None, + last_position_map: None, expect_bounds_change: None, gutter_dimensions: GutterDimensions::default(), style: None, @@ -14386,7 +14388,7 @@ impl Editor { .and_then(|item| item.to_any().downcast_ref::()) } - fn character_size(&self, window: &mut Window) -> gpui::Point { + fn character_size(&self, window: &mut Window) -> gpui::Size { let text_layout_details = self.text_layout_details(window); let style = &text_layout_details.editor_style; let font_id = window.text_system().resolve_font(&style.text.font()); @@ -14394,7 +14396,7 @@ impl Editor { let line_height = style.text.line_height_in_pixels(window.rem_size()); let em_width = window.text_system().em_width(font_id, font_size).unwrap(); - gpui::Point::new(em_width, line_height) + gpui::Size::new(em_width, line_height) } } @@ -15902,9 +15904,9 @@ impl EntityInputHandler for Editor { cx: &mut Context, ) -> Option> { let text_layout_details = self.text_layout_details(window); - let gpui::Point { - x: em_width, - y: line_height, + let gpui::Size { + width: em_width, + height: line_height, } = self.character_size(window); let snapshot = self.snapshot(window, cx); @@ -15922,6 +15924,24 @@ impl EntityInputHandler for Editor { size: size(em_width, line_height), }) } + + fn character_index_for_point( + &mut self, + point: gpui::Point, + _window: &mut Window, + _cx: &mut Context, + ) -> Option { + let position_map = self.last_position_map.as_ref()?; + if !position_map.text_hitbox.contains(&point) { + return None; + } + let display_point = position_map.point_for_position(point).previous_valid; + let anchor = position_map + .snapshot + .display_point_to_anchor(display_point, Bias::Left); + let utf16_offset = anchor.to_offset_utf16(&position_map.snapshot.buffer_snapshot); + Some(utf16_offset.0) + } } trait SelectionExt { diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 4b57f8654495ce8f7b676ee50f8822bd9bbb76bd..bf3d9ab78dc04667fb4c0e9ad9bb22f66341d6a1 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -503,7 +503,6 @@ impl EditorElement { let position_map = layout.position_map.clone(); window.on_key_event({ let editor = self.editor.clone(); - let text_hitbox = layout.text_hitbox.clone(); move |event: &ModifiersChangedEvent, phase, window, cx| { if phase != DispatchPhase::Bubble { return; @@ -512,7 +511,7 @@ impl EditorElement { if editor.hover_state.focused(window, cx) { return; } - Self::modifiers_changed(editor, event, &position_map, &text_hitbox, window, cx) + Self::modifiers_changed(editor, event, &position_map, window, cx) }) } }); @@ -522,19 +521,18 @@ impl EditorElement { editor: &mut Editor, event: &ModifiersChangedEvent, position_map: &PositionMap, - text_hitbox: &Hitbox, window: &mut Window, cx: &mut Context, ) { editor.update_inline_completion_preview(&event.modifiers, window, cx); let mouse_position = window.mouse_position(); - if !text_hitbox.is_hovered(window) { + if !position_map.text_hitbox.is_hovered(window) { return; } editor.update_hovered_link( - position_map.point_for_position(text_hitbox.bounds, mouse_position), + position_map.point_for_position(mouse_position), &position_map.snapshot, event.modifiers, window, @@ -542,14 +540,11 @@ impl EditorElement { ) } - #[allow(clippy::too_many_arguments)] fn mouse_left_down( editor: &mut Editor, event: &MouseDownEvent, hovered_hunk: Option>, position_map: &PositionMap, - text_hitbox: &Hitbox, - gutter_hitbox: &Hitbox, line_numbers: &HashMap, window: &mut Window, cx: &mut Context, @@ -558,6 +553,8 @@ impl EditorElement { return; } + let text_hitbox = &position_map.text_hitbox; + let gutter_hitbox = &position_map.gutter_hitbox; let mut click_count = event.click_count; let mut modifiers = event.modifiers; @@ -614,8 +611,7 @@ impl EditorElement { } } - let point_for_position = - position_map.point_for_position(text_hitbox.bounds, event.position); + let point_for_position = position_map.point_for_position(event.position); let position = point_for_position.previous_valid; if modifiers.shift && modifiers.alt { editor.select( @@ -690,15 +686,13 @@ impl EditorElement { editor: &mut Editor, event: &MouseDownEvent, position_map: &PositionMap, - text_hitbox: &Hitbox, window: &mut Window, cx: &mut Context, ) { - if !text_hitbox.is_hovered(window) { + if !position_map.text_hitbox.is_hovered(window) { return; } - let point_for_position = - position_map.point_for_position(text_hitbox.bounds, event.position); + let point_for_position = position_map.point_for_position(event.position); mouse_context_menu::deploy_context_menu( editor, Some(event.position), @@ -713,16 +707,14 @@ impl EditorElement { editor: &mut Editor, event: &MouseDownEvent, position_map: &PositionMap, - text_hitbox: &Hitbox, window: &mut Window, cx: &mut Context, ) { - if !text_hitbox.is_hovered(window) || window.default_prevented() { + if !position_map.text_hitbox.is_hovered(window) || window.default_prevented() { return; } - let point_for_position = - position_map.point_for_position(text_hitbox.bounds, event.position); + let point_for_position = position_map.point_for_position(event.position); let position = point_for_position.previous_valid; editor.select( @@ -739,15 +731,11 @@ impl EditorElement { fn mouse_up( editor: &mut Editor, event: &MouseUpEvent, - #[cfg_attr( - not(any(target_os = "linux", target_os = "freebsd")), - allow(unused_variables) - )] position_map: &PositionMap, - text_hitbox: &Hitbox, window: &mut Window, cx: &mut Context, ) { + let text_hitbox = &position_map.text_hitbox; let end_selection = editor.has_pending_selection(); let pending_nonempty_selections = editor.has_pending_nonempty_selection(); @@ -767,8 +755,7 @@ impl EditorElement { #[cfg(any(target_os = "linux", target_os = "freebsd"))] if EditorSettings::get_global(cx).middle_click_paste { if let Some(text) = cx.read_from_primary().and_then(|item| item.text()) { - let point_for_position = - position_map.point_for_position(text_hitbox.bounds, event.position); + let point_for_position = position_map.point_for_position(event.position); let position = point_for_position.previous_valid; editor.select( @@ -791,10 +778,10 @@ impl EditorElement { editor: &mut Editor, event: &ClickEvent, position_map: &PositionMap, - text_hitbox: &Hitbox, window: &mut Window, cx: &mut Context, ) { + let text_hitbox = &position_map.text_hitbox; let pending_nonempty_selections = editor.has_pending_nonempty_selection(); let multi_cursor_setting = EditorSettings::get_global(cx).multi_cursor_modifier; @@ -804,7 +791,7 @@ impl EditorElement { }; if !pending_nonempty_selections && multi_cursor_modifier && text_hitbox.is_hovered(window) { - let point = position_map.point_for_position(text_hitbox.bounds, event.up.position); + let point = position_map.point_for_position(event.up.position); editor.handle_click_hovered_link(point, event.modifiers(), window, cx); cx.stop_propagation(); @@ -815,7 +802,6 @@ impl EditorElement { editor: &mut Editor, event: &MouseMoveEvent, position_map: &PositionMap, - text_bounds: Bounds, window: &mut Window, cx: &mut Context, ) { @@ -823,7 +809,8 @@ impl EditorElement { return; } - let point_for_position = position_map.point_for_position(text_bounds, event.position); + let text_bounds = position_map.text_hitbox.bounds; + let point_for_position = position_map.point_for_position(event.position); let mut scroll_delta = gpui::Point::::default(); let vertical_margin = position_map.line_height.min(text_bounds.size.height / 3.0); let top = text_bounds.origin.y + vertical_margin; @@ -870,19 +857,18 @@ impl EditorElement { editor: &mut Editor, event: &MouseMoveEvent, position_map: &PositionMap, - text_hitbox: &Hitbox, - gutter_hitbox: &Hitbox, window: &mut Window, cx: &mut Context, ) { + let text_hitbox = &position_map.text_hitbox; + let gutter_hitbox = &position_map.gutter_hitbox; let modifiers = event.modifiers; let gutter_hovered = gutter_hitbox.is_hovered(window); editor.set_gutter_hovered(gutter_hovered, cx); // Don't trigger hover popover if mouse is hovering over context menu if text_hitbox.is_hovered(window) { - let point_for_position = - position_map.point_for_position(text_hitbox.bounds, event.position); + let point_for_position = position_map.point_for_position(event.position); editor.update_hovered_link( point_for_position, @@ -4107,8 +4093,7 @@ impl EditorElement { window: &mut Window, cx: &mut App, ) -> Vec { - let point_for_position = - position_map.point_for_position(text_hitbox.bounds, window.mouse_position()); + let point_for_position = position_map.point_for_position(window.mouse_position()); let mut controls = vec![]; @@ -4245,7 +4230,10 @@ impl EditorElement { let scroll_top = layout.position_map.snapshot.scroll_position().y; let gutter_bg = cx.theme().colors().editor_gutter_background; window.paint_quad(fill(layout.gutter_hitbox.bounds, gutter_bg)); - window.paint_quad(fill(layout.text_hitbox.bounds, self.style.background)); + window.paint_quad(fill( + layout.position_map.text_hitbox.bounds, + self.style.background, + )); if let EditorMode::Full = layout.mode { let mut active_rows = layout.active_rows.iter().peekable(); @@ -4270,8 +4258,8 @@ impl EditorElement { end: layout.gutter_hitbox.right(), }), CurrentLineHighlight::Line => Some(Range { - start: layout.text_hitbox.bounds.left(), - end: layout.text_hitbox.bounds.right(), + start: layout.position_map.text_hitbox.bounds.left(), + end: layout.position_map.text_hitbox.bounds.right(), }), CurrentLineHighlight::All => Some(Range { start: layout.hitbox.left(), @@ -4345,7 +4333,7 @@ impl EditorElement { layout.position_map.snapshot.scroll_position().x * layout.position_map.em_width; for (wrap_position, active) in layout.wrap_guides.iter() { - let x = (layout.text_hitbox.origin.x + let x = (layout.position_map.text_hitbox.origin.x + *wrap_position + layout.position_map.em_width / 2.) - scroll_left; @@ -4357,7 +4345,7 @@ impl EditorElement { || scrollbar_y.as_ref().map_or(false, |sy| sy.visible) }; - if x < layout.text_hitbox.origin.x + if x < layout.position_map.text_hitbox.origin.x || (show_scrollbars && x > self.scrollbar_left(&layout.hitbox.bounds)) { continue; @@ -4370,8 +4358,8 @@ impl EditorElement { }; window.paint_quad(fill( Bounds { - origin: point(x, layout.text_hitbox.origin.y), - size: size(px(1.), layout.text_hitbox.size.height), + origin: point(x, layout.position_map.text_hitbox.origin.y), + size: size(px(1.), layout.position_map.text_hitbox.size.height), }, color, )); @@ -4746,7 +4734,7 @@ impl EditorElement { fn paint_text(&mut self, layout: &mut EditorLayout, window: &mut Window, cx: &mut App) { window.with_content_mask( Some(ContentMask { - bounds: layout.text_hitbox.bounds, + bounds: layout.position_map.text_hitbox.bounds, }), |window| { let cursor_style = if self @@ -4760,7 +4748,7 @@ impl EditorElement { } else { CursorStyle::IBeam }; - window.set_cursor_style(cursor_style, &layout.text_hitbox); + window.set_cursor_style(cursor_style, &layout.position_map.text_hitbox); let invisible_display_ranges = self.paint_highlights(layout, window); self.paint_lines(&invisible_display_ranges, layout, window, cx); @@ -4782,7 +4770,7 @@ impl EditorElement { layout: &mut EditorLayout, window: &mut Window, ) -> SmallVec<[Range; 32]> { - window.paint_layer(layout.text_hitbox.bounds, |window| { + window.paint_layer(layout.position_map.text_hitbox.bounds, |window| { let mut invisible_display_ranges = SmallVec::<[Range; 32]>::new(); let line_end_overshoot = 0.15 * layout.position_map.line_height; for (range, color) in &layout.highlighted_ranges { @@ -4861,7 +4849,7 @@ impl EditorElement { // A softer than perfect black let redaction_color = gpui::rgb(0x0e1111); - window.paint_layer(layout.text_hitbox.bounds, |window| { + window.paint_layer(layout.position_map.text_hitbox.bounds, |window| { for range in layout.redacted_ranges.iter() { self.paint_highlighted_range( range.clone(), @@ -5435,13 +5423,13 @@ impl EditorElement { .collect(), }; - highlighted_range.paint(layout.text_hitbox.bounds, window); + highlighted_range.paint(layout.position_map.text_hitbox.bounds, window); } } fn paint_inline_blame(&mut self, layout: &mut EditorLayout, window: &mut Window, cx: &mut App) { if let Some(mut inline_blame) = layout.inline_blame.take() { - window.paint_layer(layout.text_hitbox.bounds, |window| { + window.paint_layer(layout.position_map.text_hitbox.bounds, |window| { inline_blame.paint(window, cx); }) } @@ -5560,8 +5548,6 @@ impl EditorElement { window.on_mouse_event({ let position_map = layout.position_map.clone(); let editor = self.editor.clone(); - let text_hitbox = layout.text_hitbox.clone(); - let gutter_hitbox = layout.gutter_hitbox.clone(); let multi_buffer_range = layout .display_hunks @@ -5600,32 +5586,16 @@ impl EditorElement { event, multi_buffer_range.clone(), &position_map, - &text_hitbox, - &gutter_hitbox, line_numbers.as_ref(), window, cx, ); }), MouseButton::Right => editor.update(cx, |editor, cx| { - Self::mouse_right_down( - editor, - event, - &position_map, - &text_hitbox, - window, - cx, - ); + Self::mouse_right_down(editor, event, &position_map, window, cx); }), MouseButton::Middle => editor.update(cx, |editor, cx| { - Self::mouse_middle_down( - editor, - event, - &position_map, - &text_hitbox, - window, - cx, - ); + Self::mouse_middle_down(editor, event, &position_map, window, cx); }), _ => {} }; @@ -5636,12 +5606,11 @@ impl EditorElement { window.on_mouse_event({ let editor = self.editor.clone(); let position_map = layout.position_map.clone(); - let text_hitbox = layout.text_hitbox.clone(); move |event: &MouseUpEvent, phase, window, cx| { if phase == DispatchPhase::Bubble { editor.update(cx, |editor, cx| { - Self::mouse_up(editor, event, &position_map, &text_hitbox, window, cx) + Self::mouse_up(editor, event, &position_map, window, cx) }); } } @@ -5650,8 +5619,6 @@ impl EditorElement { window.on_mouse_event({ let editor = self.editor.clone(); let position_map = layout.position_map.clone(); - let text_hitbox = layout.text_hitbox.clone(); - let mut captured_mouse_down = None; move |event: &MouseUpEvent, phase, window, cx| match phase { @@ -5665,7 +5632,7 @@ impl EditorElement { .clone(); let mut pending_mouse_down = pending_mouse_down.borrow_mut(); - if pending_mouse_down.is_some() && text_hitbox.is_hovered(window) { + if pending_mouse_down.is_some() && position_map.text_hitbox.is_hovered(window) { captured_mouse_down = pending_mouse_down.take(); window.refresh(); } @@ -5677,7 +5644,7 @@ impl EditorElement { down: mouse_down, up: event.clone(), }; - Self::click(editor, &event, &position_map, &text_hitbox, window, cx); + Self::click(editor, &event, &position_map, window, cx); } }), } @@ -5686,8 +5653,6 @@ impl EditorElement { window.on_mouse_event({ let position_map = layout.position_map.clone(); let editor = self.editor.clone(); - let text_hitbox = layout.text_hitbox.clone(); - let gutter_hitbox = layout.gutter_hitbox.clone(); move |event: &MouseMoveEvent, phase, window, cx| { if phase == DispatchPhase::Bubble { @@ -5698,25 +5663,10 @@ impl EditorElement { if event.pressed_button == Some(MouseButton::Left) || event.pressed_button == Some(MouseButton::Middle) { - Self::mouse_dragged( - editor, - event, - &position_map, - text_hitbox.bounds, - window, - cx, - ) + Self::mouse_dragged(editor, event, &position_map, window, cx) } - Self::mouse_moved( - editor, - event, - &position_map, - &text_hitbox, - &gutter_hitbox, - window, - cx, - ) + Self::mouse_moved(editor, event, &position_map, window, cx) }); } } @@ -7566,6 +7516,12 @@ impl Element for EditorElement { em_width, em_advance, snapshot, + gutter_hitbox: gutter_hitbox.clone(), + text_hitbox: text_hitbox.clone(), + }); + + self.editor.update(cx, |editor, _| { + editor.last_position_map = Some(position_map.clone()) }); let hunk_controls = self.layout_diff_hunk_controls( @@ -7589,7 +7545,6 @@ impl Element for EditorElement { wrap_guides, indent_guides, hitbox, - text_hitbox, gutter_hitbox, display_hunks, content_origin, @@ -7761,7 +7716,6 @@ impl IntoElement for EditorElement { pub struct EditorLayout { position_map: Rc, hitbox: Hitbox, - text_hitbox: Hitbox, gutter_hitbox: Hitbox, content_origin: gpui::Point, scrollbars_layout: AxisPair>, @@ -7941,15 +7895,17 @@ struct CreaseTrailerLayout { bounds: Bounds, } -struct PositionMap { - size: Size, - line_height: Pixels, - scroll_pixel_position: gpui::Point, - scroll_max: gpui::Point, - em_width: Pixels, - em_advance: Pixels, - line_layouts: Vec, - snapshot: EditorSnapshot, +pub(crate) struct PositionMap { + pub size: Size, + pub line_height: Pixels, + pub scroll_pixel_position: gpui::Point, + pub scroll_max: gpui::Point, + pub em_width: Pixels, + pub em_advance: Pixels, + pub line_layouts: Vec, + pub snapshot: EditorSnapshot, + pub text_hitbox: Hitbox, + pub gutter_hitbox: Hitbox, } #[derive(Debug, Copy, Clone)] @@ -7971,11 +7927,8 @@ impl PointForPosition { } impl PositionMap { - fn point_for_position( - &self, - text_bounds: Bounds, - position: gpui::Point, - ) -> PointForPosition { + pub(crate) fn point_for_position(&self, position: gpui::Point) -> PointForPosition { + let text_bounds = self.text_hitbox.bounds; let scroll_position = self.snapshot.scroll_position(); let position = position - text_bounds.origin; let y = position.y.max(px(0.)).min(self.size.height); diff --git a/crates/editor/src/mouse_context_menu.rs b/crates/editor/src/mouse_context_menu.rs index b9a9aa623de3c7f431433c29099ab77704cbe61e..64c5d730d273862a2b952c6bb172f9865739679a 100644 --- a/crates/editor/src/mouse_context_menu.rs +++ b/crates/editor/src/mouse_context_menu.rs @@ -229,9 +229,10 @@ pub fn deploy_context_menu( cx, ), None => { + let character_size = editor.character_size(window); let menu_position = MenuPosition::PinnedToEditor { source: source_anchor, - offset: editor.character_size(window), + offset: gpui::point(character_size.width, character_size.height), }; Some(MouseContextMenu::new( menu_position, diff --git a/crates/gpui/examples/input.rs b/crates/gpui/examples/input.rs index 7d0a72f21a702934ef26a30dfaf9a80d519103f3..a3a9d1f321197281297e0d3d091f6b5798ee9d2c 100644 --- a/crates/gpui/examples/input.rs +++ b/crates/gpui/examples/input.rs @@ -364,6 +364,20 @@ impl EntityInputHandler for TextInput { ), )) } + + fn character_index_for_point( + &mut self, + point: gpui::Point, + _window: &mut Window, + _cx: &mut Context, + ) -> Option { + let line_point = self.last_bounds?.localize(&point)?; + let last_layout = self.last_layout.as_ref()?; + + assert_eq!(last_layout.text, self.content); + let utf8_index = last_layout.index_for_x(point.x - line_point.x)?; + Some(self.offset_to_utf16(utf8_index)) + } } struct TextElement { diff --git a/crates/gpui/src/geometry.rs b/crates/gpui/src/geometry.rs index 11b4a58554bafbecb049088d8b43238cbd4f315a..d6cd83ae605afa448c153649ed197823ecda5f2a 100644 --- a/crates/gpui/src/geometry.rs +++ b/crates/gpui/src/geometry.rs @@ -217,6 +217,19 @@ impl Point { } } +impl Point +where + T: Sub + Debug + Clone + Default, +{ + /// Get the position of this point, relative to the given origin + pub fn relative_to(&self, origin: &Point) -> Point { + point( + self.x.clone() - origin.x.clone(), + self.y.clone() - origin.y.clone(), + ) + } +} + impl Mul for Point where T: Mul + Clone + Default + Debug, @@ -376,6 +389,13 @@ pub struct Size { pub height: T, } +impl Size { + /// Create a new Size, a synonym for [`size`] + pub fn new(width: T, height: T) -> Self { + size(width, height) + } +} + /// Constructs a new `Size` with the provided width and height. /// /// # Arguments @@ -1456,6 +1476,17 @@ where } } +impl Bounds +where + T: Add + PartialOrd + Clone + Default + Debug + Sub, +{ + /// Convert a point to the coordinate space defined by this Bounds + pub fn localize(&self, point: &Point) -> Option> { + self.contains(point) + .then(|| point.relative_to(&self.origin)) + } +} + /// Checks if the bounds represent an empty area. /// /// # Returns diff --git a/crates/gpui/src/input.rs b/crates/gpui/src/input.rs index 41a63bd8a3628311a46a491bbf9107eb42edd09b..4acd7f90c1273a1eb51b1be2ccc672a79e6f7710 100644 --- a/crates/gpui/src/input.rs +++ b/crates/gpui/src/input.rs @@ -62,6 +62,14 @@ pub trait EntityInputHandler: 'static + Sized { window: &mut Window, cx: &mut Context, ) -> Option>; + + /// See [`InputHandler::character_index_for_point`] for details + fn character_index_for_point( + &mut self, + point: crate::Point, + window: &mut Window, + cx: &mut Context, + ) -> Option; } /// The canonical implementation of [`PlatformInputHandler`]. Call [`Window::handle_input`] @@ -158,4 +166,15 @@ impl InputHandler for ElementInputHandler { view.bounds_for_range(range_utf16, self.element_bounds, window, cx) }) } + + fn character_index_for_point( + &mut self, + point: crate::Point, + window: &mut Window, + cx: &mut App, + ) -> Option { + self.view.update(cx, |view, cx| { + view.character_index_for_point(point, window, cx) + }) + } } diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index 104ec71b8d4160077043315d3cd0df4e7d45f87b..e3af6654f17107c265c38437e6d6d706ca2667bb 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -792,6 +792,14 @@ impl PlatformInputHandler { cx, ) } + + #[allow(unused)] + pub fn character_index_for_point(&mut self, point: Point) -> Option { + self.cx + .update(|window, cx| self.handler.character_index_for_point(point, window, cx)) + .ok() + .flatten() + } } /// A struct representing a selection in a text buffer, in UTF16 characters. @@ -882,6 +890,16 @@ pub trait InputHandler: 'static { cx: &mut App, ) -> Option>; + /// Get the character offset for the given point in terms of UTF16 characters + /// + /// Corresponds to [characterIndexForPoint:](https://developer.apple.com/documentation/appkit/nstextinputclient/characterindex(for:)) + fn character_index_for_point( + &mut self, + point: Point, + window: &mut Window, + cx: &mut App, + ) -> Option; + /// Allows a given input context to opt into getting raw key repeats instead of /// sending these to the platform. /// TODO: Ideally we should be able to set ApplePressAndHoldEnabled in NSUserDefaults diff --git a/crates/gpui/src/platform/mac/window.rs b/crates/gpui/src/platform/mac/window.rs index 9d874b4f9b85e24b0d90f82a45f7adcdacd436f4..ba5011ef160af20b97d9c118d662fb80b0449153 100644 --- a/crates/gpui/src/platform/mac/window.rs +++ b/crates/gpui/src/platform/mac/window.rs @@ -17,8 +17,8 @@ use cocoa::{ }, base::{id, nil}, foundation::{ - NSArray, NSAutoreleasePool, NSDictionary, NSFastEnumeration, NSInteger, NSPoint, NSRect, - NSSize, NSString, NSUInteger, + NSArray, NSAutoreleasePool, NSDictionary, NSFastEnumeration, NSInteger, NSNotFound, + NSPoint, NSRect, NSSize, NSString, NSUInteger, }, }; use core_graphics::display::{CGDirectDisplayID, CGPoint, CGRect}; @@ -227,6 +227,11 @@ unsafe fn build_classes() { accepts_first_mouse as extern "C" fn(&Object, Sel, id) -> BOOL, ); + decl.add_method( + sel!(characterIndexForPoint:), + character_index_for_point as extern "C" fn(&Object, Sel, NSPoint) -> u64, + ); + decl.register() }; } @@ -1687,17 +1692,7 @@ extern "C" fn first_rect_for_character_range( range: NSRange, _: id, ) -> NSRect { - let frame: NSRect = unsafe { - let state = get_window_state(this); - let lock = state.lock(); - let mut frame = NSWindow::frame(lock.native_window); - let content_layout_rect: CGRect = msg_send![lock.native_window, contentLayoutRect]; - let style_mask: NSWindowStyleMask = msg_send![lock.native_window, styleMask]; - if !style_mask.contains(NSWindowStyleMask::NSFullSizeContentViewWindowMask) { - frame.origin.y -= frame.size.height - content_layout_rect.size.height; - } - frame - }; + let frame = get_frame(this); with_input_handler(this, |input_handler| { input_handler.bounds_for_range(range.to_range()?) }) @@ -1718,6 +1713,20 @@ extern "C" fn first_rect_for_character_range( ) } +fn get_frame(this: &Object) -> NSRect { + unsafe { + let state = get_window_state(this); + let lock = state.lock(); + let mut frame = NSWindow::frame(lock.native_window); + let content_layout_rect: CGRect = msg_send![lock.native_window, contentLayoutRect]; + let style_mask: NSWindowStyleMask = msg_send![lock.native_window, styleMask]; + if !style_mask.contains(NSWindowStyleMask::NSFullSizeContentViewWindowMask) { + frame.origin.y -= frame.size.height - content_layout_rect.size.height; + } + frame + } +} + extern "C" fn insert_text(this: &Object, _: Sel, text: id, replacement_range: NSRange) { unsafe { let is_attributed_string: BOOL = @@ -1831,6 +1840,24 @@ extern "C" fn accepts_first_mouse(this: &Object, _: Sel, _: id) -> BOOL { YES } +extern "C" fn character_index_for_point(this: &Object, _: Sel, position: NSPoint) -> u64 { + let position = screen_point_to_gpui_point(this, position); + with_input_handler(this, |input_handler| { + input_handler.character_index_for_point(position) + }) + .flatten() + .map(|index| index as u64) + .unwrap_or(NSNotFound as u64) +} + +fn screen_point_to_gpui_point(this: &Object, position: NSPoint) -> Point { + let frame = get_frame(this); + let window_x = position.x - frame.origin.x; + let window_y = frame.size.height - (position.y - frame.origin.y); + let position = point(px(window_x as f32), px(window_y as f32)); + position +} + extern "C" fn dragging_entered(this: &Object, _: Sel, dragging_info: id) -> NSDragOperation { let window_state = unsafe { get_window_state(this) }; let position = drag_event_position(&window_state, dragging_info); diff --git a/crates/terminal_view/src/terminal_element.rs b/crates/terminal_view/src/terminal_element.rs index a8574fbc006a7996e8092d49bd4bd5a907609328..eb3a1489e3f3e6e8a0bfa7cd12b7124f9a876014 100644 --- a/crates/terminal_view/src/terminal_element.rs +++ b/crates/terminal_view/src/terminal_element.rs @@ -1073,6 +1073,15 @@ impl InputHandler for TerminalInputHandler { fn apple_press_and_hold_enabled(&mut self) -> bool { false } + + fn character_index_for_point( + &mut self, + _point: Point, + _window: &mut Window, + _cx: &mut App, + ) -> Option { + None + } } pub fn is_blank(cell: &IndexedCell) -> bool { From 69bb0a0597f74704604805a5ddff5df7207b0f3b Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Tue, 4 Feb 2025 12:23:20 -0800 Subject: [PATCH 004/130] Fix slow focus transitions to the terminal panel (#24172) This long standing bug was caused by `Pane`'s focus_in handler bouncing the focus to another handle. Because focus resolution happens _after_ a frame has been rendered, the only way to deal with this case is to schedule another frame to be redrawn. However, we where suppressing all window refreshes that occur during a focus transfer, causing this focus change to be completely missed. However, changing this behavior can lead to infinite notify loops, due to drawing a frame causing another to be rendered. This PR fixes this problem narrowly by adding an `on_next_frame()` callback in the pane's focus handle, so that the focus changes take effect almost immediately. But only for this case, where we know it doesn't cause infinite notify loops. TODO: - [x] Fix the infinite notify loop bug or determine a third way to fix this lag Release Notes: - Fixed a bug where shifting focus to the terminal panel could be slow --- crates/gpui/src/window.rs | 4 ++-- crates/workspace/src/pane.rs | 5 +++++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index e522ea85957b9f64193c937e65d5160f5ad39d31..1b5b6bc71788f77b2ac3a87aa1aa7366389f1dea 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -137,7 +137,7 @@ impl WindowInvalidator { self.inner.borrow_mut().dirty_views = views; } - pub fn not_painting(&self) -> bool { + pub fn not_drawing(&self) -> bool { self.inner.borrow().draw_phase == DrawPhase::None } @@ -1035,7 +1035,7 @@ impl Window { /// Mark the window as dirty, scheduling it to be redrawn on the next frame. pub fn refresh(&mut self) { - if self.invalidator.not_painting() { + if self.invalidator.not_drawing() { self.refreshing = true; self.invalidator.set_dirty(true); } diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index f161501e23cac7338d04fb20897298f0c6b09a85..4480f4acbfc835e29c0444cf3a19f0b4d3497a68 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -582,6 +582,11 @@ impl Pane { if let Some(active_item) = self.active_item() { if self.focus_handle.is_focused(window) { + // Schedule a redraw next frame, so that the focus changes below take effect + cx.on_next_frame(window, |_, _, cx| { + cx.notify(); + }); + // Pane was focused directly. We need to either focus a view inside the active item, // or focus the active item itself if let Some(weak_last_focus_handle) = From 871f98bc4d7381aef35563dfd1e6c031602eb820 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Tue, 4 Feb 2025 15:26:06 -0500 Subject: [PATCH 005/130] Bump openssl to fix vulnerability (#24223) See: https://github.com/advisories/GHSA-rpmj-rpgj-qmpm Release Notes: - N/A --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1b6b59177deb88e042b3d9c9e1529edf512cf955..565c1a729eebf4044ff7a6c77959142daf8f9be5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8664,9 +8664,9 @@ dependencies = [ [[package]] name = "openssl" -version = "0.10.68" +version = "0.10.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5" +checksum = "61cfb4e166a8bb8c9b55c500bc2308550148ece889be90f609377e58140f42c6" dependencies = [ "bitflags 2.8.0", "cfg-if", @@ -8705,9 +8705,9 @@ dependencies = [ [[package]] name = "openssl-sys" -version = "0.9.104" +version = "0.9.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" +checksum = "8b22d5b84be05a8d6947c7cb71f7c849aa0f112acd4bf51c2a7c1c988ac0a9dc" dependencies = [ "cc", "libc", From 5704b50fb1a147b6f0564b3250f8cc50a1de6507 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Tue, 4 Feb 2025 15:29:10 -0500 Subject: [PATCH 006/130] git: Compute and synchronize diffs from HEAD (#23626) This PR builds on #21258 to make it possible to use HEAD as a diff base. The buffer store is extended to support holding multiple change sets, and collab gains support for synchronizing the committed text of files when any collaborator requires it. Not implemented in this PR: - Exposing the diff from HEAD to the user - Decorating the diff from HEAD with information about which hunks are staged `test_random_multibuffer` now fails first at `SEED=13277`, similar to the previous high-water mark, but with various bugs in the multibuffer logic now shaken out. Release Notes: - N/A --------- Co-authored-by: Max Co-authored-by: Ben Co-authored-by: Max Brunsfeld Co-authored-by: Conrad Irwin Co-authored-by: Conrad --- crates/collab/src/rpc.rs | 5 +- crates/collab/src/tests/editor_tests.rs | 7 +- crates/collab/src/tests/integration_tests.rs | 134 +- .../random_project_collaboration_tests.rs | 8 +- crates/diagnostics/src/items.rs | 2 +- crates/editor/src/display_map/wrap_map.rs | 1 + crates/editor/src/editor.rs | 7 +- crates/editor/src/editor_tests.rs | 45 +- crates/editor/src/element.rs | 2 +- crates/editor/src/git/blame.rs | 8 +- crates/editor/src/git/project_diff.rs | 1296 +++++++++++++++++ crates/editor/src/hover_popover.rs | 3 +- crates/editor/src/proposed_changes_editor.rs | 8 +- crates/editor/src/test/editor_test_context.rs | 2 +- crates/fs/src/fs.rs | 27 +- crates/git/src/diff.rs | 78 +- crates/git/src/repository.rs | 52 +- crates/language/src/buffer.rs | 28 + crates/multi_buffer/src/multi_buffer.rs | 209 ++- crates/multi_buffer/src/multi_buffer_tests.rs | 150 +- crates/project/src/buffer_store.rs | 1093 ++++++++++---- crates/project/src/project.rs | 14 + crates/project/src/project_tests.rs | 106 +- crates/proto/proto/zed.proto | 49 +- crates/proto/src/proto.rs | 16 +- .../remote_server/src/remote_editing_tests.rs | 4 +- crates/rope/src/rope.rs | 4 + crates/sum_tree/src/sum_tree.rs | 4 + crates/worktree/src/worktree.rs | 24 + 29 files changed, 2791 insertions(+), 595 deletions(-) create mode 100644 crates/editor/src/git/project_diff.rs diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 6b6d1d9749822534555032e7774573a441743137..eea17d45fb3ba9c4b39644c95d1971cf23964105 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -309,7 +309,8 @@ impl Server { .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) - .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) .add_request_handler( forward_mutating_project_request::, ) @@ -348,7 +349,7 @@ impl Server { .add_message_handler(broadcast_project_message_from_host::) .add_message_handler(broadcast_project_message_from_host::) .add_message_handler(broadcast_project_message_from_host::) - .add_message_handler(broadcast_project_message_from_host::) + .add_message_handler(broadcast_project_message_from_host::) .add_request_handler(get_users) .add_request_handler(fuzzy_search_users) .add_request_handler(request_contact) diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index e35113c0c04c6f36ecf5b5ed287c37990dd134ab..a094d9cd8c91351ae83adccd6f485b212d142c42 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -1991,10 +1991,9 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA .collect(), remote_url: Some("git@github.com:zed-industries/zed.git".to_string()), }; - client_a.fs().set_blame_for_repo( - Path::new("/my-repo/.git"), - vec![(Path::new("file.txt"), blame)], - ); + client_a + .fs() + .set_blame_for_repo(Path::new("/my-repo/.git"), vec![("file.txt".into(), blame)]); let (project_a, worktree_id) = client_a.build_local_project("/my-repo", cx_a).await; let project_id = active_call_a diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 3b691a2173d471a02918e887dd51cffcd3dd9dc4..a512a9f10cb6a68556f14f1de4f119f18a98b6a0 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -2558,13 +2558,27 @@ async fn test_git_diff_base_change( let project_remote = client_b.join_remote_project(project_id, cx_b).await; - let diff_base = " + let staged_text = " one three " .unindent(); - let new_diff_base = " + let committed_text = " + one + TWO + three + " + .unindent(); + + let new_committed_text = " + one + TWO_HUNDRED + three + " + .unindent(); + + let new_staged_text = " one two " @@ -2572,7 +2586,11 @@ async fn test_git_diff_base_change( client_a.fs().set_index_for_repo( Path::new("/dir/.git"), - &[(Path::new("a.txt"), diff_base.clone())], + &[("a.txt".into(), staged_text.clone())], + ); + client_a.fs().set_head_for_repo( + Path::new("/dir/.git"), + &[("a.txt".into(), committed_text.clone())], ); // Create the buffer @@ -2580,7 +2598,7 @@ async fn test_git_diff_base_change( .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) .await .unwrap(); - let change_set_local_a = project_local + let local_unstaged_changes_a = project_local .update(cx_a, |p, cx| { p.open_unstaged_changes(buffer_local_a.clone(), cx) }) @@ -2589,16 +2607,16 @@ async fn test_git_diff_base_change( // Wait for it to catch up to the new diff executor.run_until_parked(); - change_set_local_a.read_with(cx_a, |change_set, cx| { + local_unstaged_changes_a.read_with(cx_a, |change_set, cx| { let buffer = buffer_local_a.read(cx); assert_eq!( change_set.base_text_string().as_deref(), - Some(diff_base.as_str()) + Some(staged_text.as_str()) ); git::diff::assert_hunks( change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), buffer, - &diff_base, + &change_set.base_text_string().unwrap(), &[(1..2, "", "two\n")], ); }); @@ -2608,7 +2626,7 @@ async fn test_git_diff_base_change( .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) .await .unwrap(); - let change_set_remote_a = project_remote + let remote_unstaged_changes_a = project_remote .update(cx_b, |p, cx| { p.open_unstaged_changes(buffer_remote_a.clone(), cx) }) @@ -2617,64 +2635,104 @@ async fn test_git_diff_base_change( // Wait remote buffer to catch up to the new diff executor.run_until_parked(); - change_set_remote_a.read_with(cx_b, |change_set, cx| { + remote_unstaged_changes_a.read_with(cx_b, |change_set, cx| { let buffer = buffer_remote_a.read(cx); assert_eq!( change_set.base_text_string().as_deref(), - Some(diff_base.as_str()) + Some(staged_text.as_str()) ); git::diff::assert_hunks( change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), buffer, - &diff_base, + &change_set.base_text_string().unwrap(), &[(1..2, "", "two\n")], ); }); - // Update the staged text of the open buffer + // Open uncommitted changes on the guest, without opening them on the host first + let remote_uncommitted_changes_a = project_remote + .update(cx_b, |p, cx| { + p.open_uncommitted_changes(buffer_remote_a.clone(), cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + remote_uncommitted_changes_a.read_with(cx_b, |change_set, cx| { + let buffer = buffer_remote_a.read(cx); + assert_eq!( + change_set.base_text_string().as_deref(), + Some(committed_text.as_str()) + ); + git::diff::assert_hunks( + change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), + buffer, + &change_set.base_text_string().unwrap(), + &[(1..2, "TWO\n", "two\n")], + ); + }); + + // Update the index text of the open buffer client_a.fs().set_index_for_repo( Path::new("/dir/.git"), - &[(Path::new("a.txt"), new_diff_base.clone())], + &[("a.txt".into(), new_staged_text.clone())], + ); + client_a.fs().set_head_for_repo( + Path::new("/dir/.git"), + &[("a.txt".into(), new_committed_text.clone())], ); // Wait for buffer_local_a to receive it executor.run_until_parked(); - change_set_local_a.read_with(cx_a, |change_set, cx| { + local_unstaged_changes_a.read_with(cx_a, |change_set, cx| { let buffer = buffer_local_a.read(cx); assert_eq!( change_set.base_text_string().as_deref(), - Some(new_diff_base.as_str()) + Some(new_staged_text.as_str()) ); git::diff::assert_hunks( change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), buffer, - &new_diff_base, + &change_set.base_text_string().unwrap(), &[(2..3, "", "three\n")], ); }); - change_set_remote_a.read_with(cx_b, |change_set, cx| { + remote_unstaged_changes_a.read_with(cx_b, |change_set, cx| { let buffer = buffer_remote_a.read(cx); assert_eq!( change_set.base_text_string().as_deref(), - Some(new_diff_base.as_str()) + Some(new_staged_text.as_str()) ); git::diff::assert_hunks( change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), buffer, - &new_diff_base, + &change_set.base_text_string().unwrap(), &[(2..3, "", "three\n")], ); }); + remote_uncommitted_changes_a.read_with(cx_b, |change_set, cx| { + let buffer = buffer_remote_a.read(cx); + assert_eq!( + change_set.base_text_string().as_deref(), + Some(new_committed_text.as_str()) + ); + git::diff::assert_hunks( + change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), + buffer, + &change_set.base_text_string().unwrap(), + &[(1..2, "TWO_HUNDRED\n", "two\n")], + ); + }); + // Nested git dir - let diff_base = " + let staged_text = " one three " .unindent(); - let new_diff_base = " + let new_staged_text = " one two " @@ -2682,7 +2740,7 @@ async fn test_git_diff_base_change( client_a.fs().set_index_for_repo( Path::new("/dir/sub/.git"), - &[(Path::new("b.txt"), diff_base.clone())], + &[("b.txt".into(), staged_text.clone())], ); // Create the buffer @@ -2690,7 +2748,7 @@ async fn test_git_diff_base_change( .update(cx_a, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx)) .await .unwrap(); - let change_set_local_b = project_local + let local_unstaged_changes_b = project_local .update(cx_a, |p, cx| { p.open_unstaged_changes(buffer_local_b.clone(), cx) }) @@ -2699,16 +2757,16 @@ async fn test_git_diff_base_change( // Wait for it to catch up to the new diff executor.run_until_parked(); - change_set_local_b.read_with(cx_a, |change_set, cx| { + local_unstaged_changes_b.read_with(cx_a, |change_set, cx| { let buffer = buffer_local_b.read(cx); assert_eq!( change_set.base_text_string().as_deref(), - Some(diff_base.as_str()) + Some(staged_text.as_str()) ); git::diff::assert_hunks( change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), buffer, - &diff_base, + &change_set.base_text_string().unwrap(), &[(1..2, "", "two\n")], ); }); @@ -2718,7 +2776,7 @@ async fn test_git_diff_base_change( .update(cx_b, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx)) .await .unwrap(); - let change_set_remote_b = project_remote + let remote_unstaged_changes_b = project_remote .update(cx_b, |p, cx| { p.open_unstaged_changes(buffer_remote_b.clone(), cx) }) @@ -2726,52 +2784,52 @@ async fn test_git_diff_base_change( .unwrap(); executor.run_until_parked(); - change_set_remote_b.read_with(cx_b, |change_set, cx| { + remote_unstaged_changes_b.read_with(cx_b, |change_set, cx| { let buffer = buffer_remote_b.read(cx); assert_eq!( change_set.base_text_string().as_deref(), - Some(diff_base.as_str()) + Some(staged_text.as_str()) ); git::diff::assert_hunks( change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), buffer, - &diff_base, + &staged_text, &[(1..2, "", "two\n")], ); }); - // Update the staged text + // Updatet the staged text client_a.fs().set_index_for_repo( Path::new("/dir/sub/.git"), - &[(Path::new("b.txt"), new_diff_base.clone())], + &[("b.txt".into(), new_staged_text.clone())], ); // Wait for buffer_local_b to receive it executor.run_until_parked(); - change_set_local_b.read_with(cx_a, |change_set, cx| { + local_unstaged_changes_b.read_with(cx_a, |change_set, cx| { let buffer = buffer_local_b.read(cx); assert_eq!( change_set.base_text_string().as_deref(), - Some(new_diff_base.as_str()) + Some(new_staged_text.as_str()) ); git::diff::assert_hunks( change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), buffer, - &new_diff_base, + &new_staged_text, &[(2..3, "", "three\n")], ); }); - change_set_remote_b.read_with(cx_b, |change_set, cx| { + remote_unstaged_changes_b.read_with(cx_b, |change_set, cx| { let buffer = buffer_remote_b.read(cx); assert_eq!( change_set.base_text_string().as_deref(), - Some(new_diff_base.as_str()) + Some(new_staged_text.as_str()) ); git::diff::assert_hunks( change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), buffer, - &new_diff_base, + &new_staged_text, &[(2..3, "", "three\n")], ); }); diff --git a/crates/collab/src/tests/random_project_collaboration_tests.rs b/crates/collab/src/tests/random_project_collaboration_tests.rs index b250473b61663abe46a112b559da03be6fca291c..e4d1ae79a5c36d833484592429a947c23a8ad96a 100644 --- a/crates/collab/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab/src/tests/random_project_collaboration_tests.rs @@ -953,8 +953,8 @@ impl RandomizedTest for ProjectCollaborationTest { let dot_git_dir = repo_path.join(".git"); let contents = contents - .iter() - .map(|(path, contents)| (path.as_path(), contents.clone())) + .into_iter() + .map(|(path, contents)| (path.into(), contents)) .collect::>(); if client.fs().metadata(&dot_git_dir).await?.is_none() { client.fs().create_dir(&dot_git_dir).await?; @@ -1339,7 +1339,7 @@ impl RandomizedTest for ProjectCollaborationTest { project .buffer_store() .read(cx) - .get_unstaged_changes(host_buffer.read(cx).remote_id()) + .get_unstaged_changes(host_buffer.read(cx).remote_id(), cx) .unwrap() .read(cx) .base_text_string() @@ -1348,7 +1348,7 @@ impl RandomizedTest for ProjectCollaborationTest { project .buffer_store() .read(cx) - .get_unstaged_changes(guest_buffer.read(cx).remote_id()) + .get_unstaged_changes(guest_buffer.read(cx).remote_id(), cx) .unwrap() .read(cx) .base_text_string() diff --git a/crates/diagnostics/src/items.rs b/crates/diagnostics/src/items.rs index 2fa593e6eafebb9a11e9c640aede434fc18e34e4..017adc5017f7a83fb8c1def828ca9d2b7b9c627c 100644 --- a/crates/diagnostics/src/items.rs +++ b/crates/diagnostics/src/items.rs @@ -157,7 +157,7 @@ impl DiagnosticIndicator { (buffer, cursor_position) }); let new_diagnostic = buffer - .diagnostics_in_range::<_, usize>(cursor_position..cursor_position) + .diagnostics_in_range::(cursor_position..cursor_position) .filter(|entry| !entry.range.is_empty()) .min_by_key(|entry| (entry.diagnostic.severity, entry.range.len())) .map(|entry| entry.diagnostic); diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index c1510cd23156c637ed58576083e701d1f2875c7e..6b00ab7db0f796955d213fcc34e63ce7f75e5ef0 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -979,6 +979,7 @@ impl<'a> Iterator for WrapRows<'a> { Some(if soft_wrapped { RowInfo { + buffer_id: None, buffer_row: None, multibuffer_row: None, diff_status, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 0468770e0e59c3fe4cfc4e4151fdf5aaeb9de3fe..63f0068eda84aef4dde92f713743e3f315a7be91 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -10137,12 +10137,12 @@ impl Editor { let mut diagnostics; if direction == Direction::Prev { diagnostics = buffer - .diagnostics_in_range::<_, usize>(0..search_start) + .diagnostics_in_range::(0..search_start) .collect::>(); diagnostics.reverse(); } else { diagnostics = buffer - .diagnostics_in_range::<_, usize>(search_start..buffer.len()) + .diagnostics_in_range::(search_start..buffer.len()) .collect::>(); }; let group = diagnostics @@ -11333,8 +11333,9 @@ impl Editor { if let Some(active_diagnostics) = self.active_diagnostics.as_mut() { let buffer = self.buffer.read(cx).snapshot(cx); let primary_range_start = active_diagnostics.primary_range.start.to_offset(&buffer); + let primary_range_end = active_diagnostics.primary_range.end.to_offset(&buffer); let is_valid = buffer - .diagnostics_in_range::<_, usize>(active_diagnostics.primary_range.clone()) + .diagnostics_in_range::(primary_range_start..primary_range_end) .any(|entry| { entry.diagnostic.is_primary && !entry.range.is_empty() diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 2ce221fbda6d80a0fb056095abcf02c1e95365d3..35942cce25102c59e1f0485d167a0bf3c7968129 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -12431,8 +12431,8 @@ async fn test_multibuffer_reverts(cx: &mut gpui::TestAppContext) { (buffer_2.clone(), base_text_2), (buffer_3.clone(), base_text_3), ] { - let change_set = cx - .new(|cx| BufferChangeSet::new_with_base_text(diff_base.to_string(), &buffer, cx)); + let change_set = + cx.new(|cx| BufferChangeSet::new_with_base_text(&diff_base, &buffer, cx)); editor .buffer .update(cx, |buffer, cx| buffer.add_change_set(change_set, cx)); @@ -13125,9 +13125,8 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) (buffer_2.clone(), file_2_old), (buffer_3.clone(), file_3_old), ] { - let change_set = cx.new(|cx| { - BufferChangeSet::new_with_base_text(diff_base.to_string(), &buffer, cx) - }); + let change_set = + cx.new(|cx| BufferChangeSet::new_with_base_text(&diff_base, &buffer, cx)); editor .buffer .update(cx, |buffer, cx| buffer.add_change_set(change_set, cx)); @@ -13212,7 +13211,7 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext init_test(cx, |_| {}); let base = "aaa\nbbb\nccc\nddd\neee\nfff\nggg\n"; - let text = "aaa\nBBB\nBB2\nccc\nDDD\nEEE\nfff\nggg\n"; + let text = "aaa\nBBB\nBB2\nccc\nDDD\nEEE\nfff\nggg\nhhh\niii\n"; let buffer = cx.new(|cx| Buffer::local(text.to_string(), cx)); let multi_buffer = cx.new(|cx| { @@ -13225,7 +13224,11 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext primary: None, }, ExcerptRange { - context: Point::new(5, 0)..Point::new(7, 0), + context: Point::new(4, 0)..Point::new(7, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(9, 0)..Point::new(10, 0), primary: None, }, ], @@ -13239,8 +13242,7 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext }); editor .update(cx, |editor, _window, cx| { - let change_set = - cx.new(|cx| BufferChangeSet::new_with_base_text(base.to_string(), &buffer, cx)); + let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base, &buffer, cx)); editor .buffer .update(cx, |buffer, cx| buffer.add_change_set(change_set, cx)) @@ -13255,14 +13257,22 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext }); cx.executor().run_until_parked(); + // When the start of a hunk coincides with the start of its excerpt, + // the hunk is expanded. When the start of a a hunk is earlier than + // the start of its excerpt, the hunk is not expanded. cx.assert_state_with_diff( " ˇaaa - bbb + BBB + - ddd + - eee + + DDD + EEE fff + + iii " .unindent(), ); @@ -13500,8 +13510,8 @@ async fn test_toggling_adjacent_diff_hunks(cx: &mut TestAppContext) { cx.set_state(indoc! { " one - TWO - ˇthree + ˇTWO + three four five "}); @@ -13514,15 +13524,14 @@ async fn test_toggling_adjacent_diff_hunks(cx: &mut TestAppContext) { indoc! { " one - two - + TWO - ˇthree + + ˇTWO + three four five "} .to_string(), ); cx.update_editor(|editor, window, cx| { - editor.move_up(&Default::default(), window, cx); editor.move_up(&Default::default(), window, cx); editor.toggle_selected_diff_hunks(&Default::default(), window, cx); }); @@ -14402,12 +14411,8 @@ async fn test_indent_guide_with_expanded_diff_hunks(cx: &mut gpui::TestAppContex editor.buffer().update(cx, |multibuffer, cx| { let buffer = multibuffer.as_singleton().unwrap(); - let change_set = cx.new(|cx| { - let mut change_set = BufferChangeSet::new(&buffer, cx); - let _ = - change_set.set_base_text(base_text.into(), buffer.read(cx).text_snapshot(), cx); - change_set - }); + let change_set = + cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx)); multibuffer.set_all_diff_hunks_expanded(cx); multibuffer.add_change_set(change_set, cx); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index bf3d9ab78dc04667fb4c0e9ad9bb22f66341d6a1..632c81c6b149b7f75438aededf7e9b9775f830a1 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -5295,7 +5295,7 @@ impl EditorElement { if scrollbar_settings.diagnostics != ScrollbarDiagnostics::None { let diagnostics = snapshot .buffer_snapshot - .diagnostics_in_range::<_, Point>(Point::zero()..max_point) + .diagnostics_in_range::(Point::zero()..max_point) // Don't show diagnostics the user doesn't care about .filter(|diagnostic| { match ( diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index b3680c891fc10e7b8f56ee3c7a2e2d2a9e7639a7..d9c4926d33cc857d35bc57cf3c81c0da6a9e2349 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -697,7 +697,7 @@ mod tests { fs.set_blame_for_repo( Path::new("/my-repo/.git"), vec![( - Path::new("file.txt"), + "file.txt".into(), Blame { entries: vec![ blame_entry("1b1b1b", 0..1), @@ -809,7 +809,7 @@ mod tests { fs.set_blame_for_repo( Path::new("/my-repo/.git"), vec![( - Path::new("file.txt"), + "file.txt".into(), Blame { entries: vec![blame_entry("1b1b1b", 0..4)], ..Default::default() @@ -958,7 +958,7 @@ mod tests { fs.set_blame_for_repo( Path::new("/my-repo/.git"), vec![( - Path::new("file.txt"), + "file.txt".into(), Blame { entries: blame_entries, ..Default::default() @@ -1000,7 +1000,7 @@ mod tests { fs.set_blame_for_repo( Path::new("/my-repo/.git"), vec![( - Path::new("file.txt"), + "file.txt".into(), Blame { entries: blame_entries, ..Default::default() diff --git a/crates/editor/src/git/project_diff.rs b/crates/editor/src/git/project_diff.rs new file mode 100644 index 0000000000000000000000000000000000000000..8420aa99806a655b4e67e9aa59bf8dba39c167e9 --- /dev/null +++ b/crates/editor/src/git/project_diff.rs @@ -0,0 +1,1296 @@ +use std::{ + any::{Any, TypeId}, + cmp::Ordering, + collections::HashSet, + ops::Range, + time::Duration, +}; + +use anyhow::{anyhow, Context as _}; +use collections::{BTreeMap, HashMap}; +use feature_flags::FeatureFlagAppExt; +use git::diff::{BufferDiff, DiffHunk}; +use gpui::{ + actions, AnyElement, AnyView, App, Entity, EventEmitter, FocusHandle, Focusable, + InteractiveElement, Render, Subscription, Task, WeakEntity, +}; +use language::{Buffer, BufferRow}; +use multi_buffer::{ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer}; +use project::{Project, ProjectEntryId, ProjectPath, WorktreeId}; +use text::{OffsetRangeExt, ToPoint}; +use theme::ActiveTheme; +use ui::prelude::*; +use util::{paths::compare_paths, ResultExt}; +use workspace::{ + item::{BreadcrumbText, Item, ItemEvent, ItemHandle, TabContentParams}, + ItemNavHistory, ToolbarItemLocation, Workspace, +}; + +use crate::{Editor, EditorEvent, DEFAULT_MULTIBUFFER_CONTEXT}; + +actions!(project_diff, [Deploy]); + +pub fn init(cx: &mut App) { + cx.observe_new(ProjectDiffEditor::register).detach(); +} + +const UPDATE_DEBOUNCE: Duration = Duration::from_millis(50); + +struct ProjectDiffEditor { + buffer_changes: BTreeMap>, + entry_order: HashMap>, + excerpts: Entity, + editor: Entity, + + project: Entity, + workspace: WeakEntity, + focus_handle: FocusHandle, + worktree_rescans: HashMap>, + _subscriptions: Vec, +} + +#[derive(Debug)] +struct Changes { + buffer: Entity, + hunks: Vec, +} + +impl ProjectDiffEditor { + fn register( + workspace: &mut Workspace, + _window: Option<&mut Window>, + _: &mut Context, + ) { + workspace.register_action(Self::deploy); + } + + fn deploy( + workspace: &mut Workspace, + _: &Deploy, + window: &mut Window, + cx: &mut Context, + ) { + if !cx.is_staff() { + return; + } + + if let Some(existing) = workspace.item_of_type::(cx) { + workspace.activate_item(&existing, true, true, window, cx); + } else { + let workspace_handle = cx.entity().downgrade(); + let project_diff = + cx.new(|cx| Self::new(workspace.project().clone(), workspace_handle, window, cx)); + workspace.add_item_to_active_pane(Box::new(project_diff), None, true, window, cx); + } + } + + fn new( + project: Entity, + workspace: WeakEntity, + window: &mut Window, + cx: &mut Context, + ) -> Self { + // TODO diff change subscriptions. For that, needed: + // * `-20/+50` stats retrieval: some background process that reacts on file changes + let focus_handle = cx.focus_handle(); + let changed_entries_subscription = + cx.subscribe_in(&project, window, |project_diff_editor, _, e, window, cx| { + let mut worktree_to_rescan = None; + match e { + project::Event::WorktreeAdded(id) => { + worktree_to_rescan = Some(*id); + // project_diff_editor + // .buffer_changes + // .insert(*id, HashMap::default()); + } + project::Event::WorktreeRemoved(id) => { + project_diff_editor.buffer_changes.remove(id); + } + project::Event::WorktreeUpdatedEntries(id, _updated_entries) => { + // TODO cannot invalidate buffer entries without invalidating the corresponding excerpts and order entries. + worktree_to_rescan = Some(*id); + // let entry_changes = + // project_diff_editor.buffer_changes.entry(*id).or_default(); + // for (_, entry_id, change) in updated_entries.iter() { + // let changes = entry_changes.entry(*entry_id); + // match change { + // project::PathChange::Removed => { + // if let hash_map::Entry::Occupied(entry) = changes { + // entry.remove(); + // } + // } + // // TODO understand the invalidation case better: now, we do that but still rescan the entire worktree + // // What if we already have the buffer loaded inside the diff multi buffer and it was edited there? We should not do anything. + // _ => match changes { + // hash_map::Entry::Occupied(mut o) => o.get_mut().invalidate(), + // hash_map::Entry::Vacant(v) => { + // v.insert(None); + // } + // }, + // } + // } + } + project::Event::WorktreeUpdatedGitRepositories(id) => { + worktree_to_rescan = Some(*id); + // project_diff_editor.buffer_changes.clear(); + } + project::Event::DeletedEntry(id, _entry_id) => { + worktree_to_rescan = Some(*id); + // if let Some(entries) = project_diff_editor.buffer_changes.get_mut(id) { + // entries.remove(entry_id); + // } + } + project::Event::Closed => { + project_diff_editor.buffer_changes.clear(); + } + _ => {} + } + + if let Some(worktree_to_rescan) = worktree_to_rescan { + project_diff_editor.schedule_worktree_rescan(worktree_to_rescan, window, cx); + } + }); + + let excerpts = cx.new(|cx| MultiBuffer::new(project.read(cx).capability())); + + let editor = cx.new(|cx| { + let mut diff_display_editor = + Editor::for_multibuffer(excerpts.clone(), Some(project.clone()), true, window, cx); + diff_display_editor.set_expand_all_diff_hunks(cx); + diff_display_editor + }); + + let mut new_self = Self { + project, + workspace, + buffer_changes: BTreeMap::default(), + entry_order: HashMap::default(), + worktree_rescans: HashMap::default(), + focus_handle, + editor, + excerpts, + _subscriptions: vec![changed_entries_subscription], + }; + new_self.schedule_rescan_all(window, cx); + new_self + } + + fn schedule_rescan_all(&mut self, window: &mut Window, cx: &mut Context) { + let mut current_worktrees = HashSet::::default(); + for worktree in self.project.read(cx).worktrees(cx).collect::>() { + let worktree_id = worktree.read(cx).id(); + current_worktrees.insert(worktree_id); + self.schedule_worktree_rescan(worktree_id, window, cx); + } + + self.worktree_rescans + .retain(|worktree_id, _| current_worktrees.contains(worktree_id)); + self.buffer_changes + .retain(|worktree_id, _| current_worktrees.contains(worktree_id)); + self.entry_order + .retain(|worktree_id, _| current_worktrees.contains(worktree_id)); + } + + fn schedule_worktree_rescan( + &mut self, + id: WorktreeId, + window: &mut Window, + cx: &mut Context, + ) { + let project = self.project.clone(); + self.worktree_rescans.insert( + id, + cx.spawn_in(window, |project_diff_editor, mut cx| async move { + cx.background_executor().timer(UPDATE_DEBOUNCE).await; + let open_tasks = project + .update(&mut cx, |project, cx| { + let worktree = project.worktree_for_id(id, cx)?; + let snapshot = worktree.read(cx).snapshot(); + let applicable_entries = snapshot + .repositories() + .iter() + .flat_map(|entry| { + entry + .status() + .map(|git_entry| entry.join(git_entry.repo_path)) + }) + .filter_map(|path| { + let id = snapshot.entry_for_path(&path)?.id; + Some(( + id, + ProjectPath { + worktree_id: snapshot.id(), + path: path.into(), + }, + )) + }) + .collect::>(); + Some( + applicable_entries + .into_iter() + .map(|(entry_id, entry_path)| { + let open_task = project.open_path(entry_path.clone(), cx); + (entry_id, entry_path, open_task) + }) + .collect::>(), + ) + }) + .ok() + .flatten() + .unwrap_or_default(); + + let Some((buffers, mut new_entries, change_sets)) = cx + .spawn(|mut cx| async move { + let mut new_entries = Vec::new(); + let mut buffers = HashMap::< + ProjectEntryId, + (text::BufferSnapshot, Entity, BufferDiff), + >::default(); + let mut change_sets = Vec::new(); + for (entry_id, entry_path, open_task) in open_tasks { + let Some(buffer) = open_task + .await + .and_then(|(_, opened_model)| { + opened_model + .downcast::() + .map_err(|_| anyhow!("Unexpected non-buffer")) + }) + .with_context(|| { + format!("loading {:?} for git diff", entry_path.path) + }) + .log_err() + else { + continue; + }; + + let Some(change_set) = project + .update(&mut cx, |project, cx| { + project.open_unstaged_changes(buffer.clone(), cx) + })? + .await + .log_err() + else { + continue; + }; + + cx.update(|_, cx| { + buffers.insert( + entry_id, + ( + buffer.read(cx).text_snapshot(), + buffer, + change_set.read(cx).diff_to_buffer.clone(), + ), + ); + })?; + change_sets.push(change_set); + new_entries.push((entry_path, entry_id)); + } + + anyhow::Ok((buffers, new_entries, change_sets)) + }) + .await + .log_err() + else { + return; + }; + + let (new_changes, new_entry_order) = cx + .background_executor() + .spawn(async move { + let mut new_changes = HashMap::::default(); + for (entry_id, (buffer_snapshot, buffer, buffer_diff)) in buffers { + new_changes.insert( + entry_id, + Changes { + buffer, + hunks: buffer_diff + .hunks_in_row_range(0..BufferRow::MAX, &buffer_snapshot) + .collect::>(), + }, + ); + } + + new_entries.sort_by(|(project_path_a, _), (project_path_b, _)| { + compare_paths( + (project_path_a.path.as_ref(), true), + (project_path_b.path.as_ref(), true), + ) + }); + (new_changes, new_entries) + }) + .await; + + project_diff_editor + .update_in(&mut cx, |project_diff_editor, _window, cx| { + project_diff_editor.update_excerpts(id, new_changes, new_entry_order, cx); + project_diff_editor.editor.update(cx, |editor, cx| { + editor.buffer.update(cx, |buffer, cx| { + for change_set in change_sets { + buffer.add_change_set(change_set, cx) + } + }); + }); + }) + .ok(); + }), + ); + } + + fn update_excerpts( + &mut self, + worktree_id: WorktreeId, + new_changes: HashMap, + new_entry_order: Vec<(ProjectPath, ProjectEntryId)>, + + cx: &mut Context, + ) { + if let Some(current_order) = self.entry_order.get(&worktree_id) { + let current_entries = self.buffer_changes.entry(worktree_id).or_default(); + let mut new_order_entries = new_entry_order.iter().fuse().peekable(); + let mut excerpts_to_remove = Vec::new(); + let mut new_excerpt_hunks = BTreeMap::< + ExcerptId, + Vec<(ProjectPath, Entity, Vec>)>, + >::new(); + let mut excerpt_to_expand = + HashMap::<(u32, ExpandExcerptDirection), Vec>::default(); + let mut latest_excerpt_id = ExcerptId::min(); + + for (current_path, current_entry_id) in current_order { + let current_changes = match current_entries.get(current_entry_id) { + Some(current_changes) => { + if current_changes.hunks.is_empty() { + continue; + } + current_changes + } + None => continue, + }; + let buffer_excerpts = self + .excerpts + .read(cx) + .excerpts_for_buffer(¤t_changes.buffer, cx); + let last_current_excerpt_id = + buffer_excerpts.last().map(|(excerpt_id, _)| *excerpt_id); + let mut current_excerpts = buffer_excerpts.into_iter().fuse().peekable(); + loop { + match new_order_entries.peek() { + Some((new_path, new_entry)) => { + match compare_paths( + (current_path.path.as_ref(), true), + (new_path.path.as_ref(), true), + ) { + Ordering::Less => { + excerpts_to_remove + .extend(current_excerpts.map(|(excerpt_id, _)| excerpt_id)); + break; + } + Ordering::Greater => { + if let Some(new_changes) = new_changes.get(new_entry) { + if !new_changes.hunks.is_empty() { + let hunks = new_excerpt_hunks + .entry(latest_excerpt_id) + .or_default(); + match hunks.binary_search_by(|(probe, ..)| { + compare_paths( + (new_path.path.as_ref(), true), + (probe.path.as_ref(), true), + ) + }) { + Ok(i) => hunks[i].2.extend( + new_changes + .hunks + .iter() + .map(|hunk| hunk.buffer_range.clone()), + ), + Err(i) => hunks.insert( + i, + ( + new_path.clone(), + new_changes.buffer.clone(), + new_changes + .hunks + .iter() + .map(|hunk| hunk.buffer_range.clone()) + .collect(), + ), + ), + } + } + }; + let _ = new_order_entries.next(); + } + Ordering::Equal => { + match new_changes.get(new_entry) { + Some(new_changes) => { + let buffer_snapshot = + new_changes.buffer.read(cx).snapshot(); + let mut current_hunks = + current_changes.hunks.iter().fuse().peekable(); + let mut new_hunks_unchanged = + Vec::with_capacity(new_changes.hunks.len()); + let mut new_hunks_with_updates = + Vec::with_capacity(new_changes.hunks.len()); + 'new_changes: for new_hunk in &new_changes.hunks { + loop { + match current_hunks.peek() { + Some(current_hunk) => { + match ( + current_hunk + .buffer_range + .start + .cmp( + &new_hunk + .buffer_range + .start, + &buffer_snapshot, + ), + current_hunk.buffer_range.end.cmp( + &new_hunk.buffer_range.end, + &buffer_snapshot, + ), + ) { + ( + Ordering::Equal, + Ordering::Equal, + ) => { + new_hunks_unchanged + .push(new_hunk); + let _ = current_hunks.next(); + continue 'new_changes; + } + (Ordering::Equal, _) + | (_, Ordering::Equal) => { + new_hunks_with_updates + .push(new_hunk); + continue 'new_changes; + } + ( + Ordering::Less, + Ordering::Greater, + ) + | ( + Ordering::Greater, + Ordering::Less, + ) => { + new_hunks_with_updates + .push(new_hunk); + continue 'new_changes; + } + ( + Ordering::Less, + Ordering::Less, + ) => { + if current_hunk + .buffer_range + .start + .cmp( + &new_hunk + .buffer_range + .end, + &buffer_snapshot, + ) + .is_le() + { + new_hunks_with_updates + .push(new_hunk); + continue 'new_changes; + } else { + let _ = + current_hunks.next(); + } + } + ( + Ordering::Greater, + Ordering::Greater, + ) => { + if current_hunk + .buffer_range + .end + .cmp( + &new_hunk + .buffer_range + .start, + &buffer_snapshot, + ) + .is_ge() + { + new_hunks_with_updates + .push(new_hunk); + continue 'new_changes; + } else { + let _ = + current_hunks.next(); + } + } + } + } + None => { + new_hunks_with_updates.push(new_hunk); + continue 'new_changes; + } + } + } + } + + let mut excerpts_with_new_changes = + HashSet::::default(); + 'new_hunks: for new_hunk in new_hunks_with_updates { + loop { + match current_excerpts.peek() { + Some(( + current_excerpt_id, + current_excerpt_range, + )) => { + match ( + current_excerpt_range + .context + .start + .cmp( + &new_hunk + .buffer_range + .start, + &buffer_snapshot, + ), + current_excerpt_range + .context + .end + .cmp( + &new_hunk.buffer_range.end, + &buffer_snapshot, + ), + ) { + ( + Ordering::Less + | Ordering::Equal, + Ordering::Greater + | Ordering::Equal, + ) => { + excerpts_with_new_changes + .insert( + *current_excerpt_id, + ); + continue 'new_hunks; + } + ( + Ordering::Greater + | Ordering::Equal, + Ordering::Less + | Ordering::Equal, + ) => { + let expand_up = current_excerpt_range + .context + .start + .to_point(&buffer_snapshot) + .row + .saturating_sub( + new_hunk + .buffer_range + .start + .to_point(&buffer_snapshot) + .row, + ); + let expand_down = new_hunk + .buffer_range + .end + .to_point(&buffer_snapshot) + .row + .saturating_sub( + current_excerpt_range + .context + .end + .to_point( + &buffer_snapshot, + ) + .row, + ); + excerpt_to_expand.entry((expand_up.max(expand_down).max(DEFAULT_MULTIBUFFER_CONTEXT), ExpandExcerptDirection::UpAndDown)).or_default().push(*current_excerpt_id); + excerpts_with_new_changes + .insert( + *current_excerpt_id, + ); + continue 'new_hunks; + } + ( + Ordering::Less, + Ordering::Less, + ) => { + if current_excerpt_range + .context + .start + .cmp( + &new_hunk + .buffer_range + .end, + &buffer_snapshot, + ) + .is_le() + { + let expand_up = current_excerpt_range + .context + .start + .to_point(&buffer_snapshot) + .row + .saturating_sub( + new_hunk.buffer_range + .start + .to_point( + &buffer_snapshot, + ) + .row, + ); + excerpt_to_expand.entry((expand_up.max(DEFAULT_MULTIBUFFER_CONTEXT), ExpandExcerptDirection::Up)).or_default().push(*current_excerpt_id); + excerpts_with_new_changes + .insert( + *current_excerpt_id, + ); + continue 'new_hunks; + } else { + if !new_changes + .hunks + .is_empty() + { + let hunks = new_excerpt_hunks + .entry(latest_excerpt_id) + .or_default(); + match hunks.binary_search_by(|(probe, ..)| { + compare_paths( + (new_path.path.as_ref(), true), + (probe.path.as_ref(), true), + ) + }) { + Ok(i) => hunks[i].2.extend( + new_changes + .hunks + .iter() + .map(|hunk| hunk.buffer_range.clone()), + ), + Err(i) => hunks.insert( + i, + ( + new_path.clone(), + new_changes.buffer.clone(), + new_changes + .hunks + .iter() + .map(|hunk| hunk.buffer_range.clone()) + .collect(), + ), + ), + } + } + continue 'new_hunks; + } + } + /* TODO remove or leave? + [ ><<<<<<<--]----<-- + cur_s > cur_e < + > < + new_s>>>>>>>>< + */ + ( + Ordering::Greater, + Ordering::Greater, + ) => { + if current_excerpt_range + .context + .end + .cmp( + &new_hunk + .buffer_range + .start, + &buffer_snapshot, + ) + .is_ge() + { + let expand_down = new_hunk + .buffer_range + .end + .to_point(&buffer_snapshot) + .row + .saturating_sub( + current_excerpt_range + .context + .end + .to_point( + &buffer_snapshot, + ) + .row, + ); + excerpt_to_expand.entry((expand_down.max(DEFAULT_MULTIBUFFER_CONTEXT), ExpandExcerptDirection::Down)).or_default().push(*current_excerpt_id); + excerpts_with_new_changes + .insert( + *current_excerpt_id, + ); + continue 'new_hunks; + } else { + latest_excerpt_id = + *current_excerpt_id; + let _ = + current_excerpts.next(); + } + } + } + } + None => { + let hunks = new_excerpt_hunks + .entry(latest_excerpt_id) + .or_default(); + match hunks.binary_search_by( + |(probe, ..)| { + compare_paths( + ( + new_path.path.as_ref(), + true, + ), + (probe.path.as_ref(), true), + ) + }, + ) { + Ok(i) => hunks[i].2.extend( + new_changes.hunks.iter().map( + |hunk| { + hunk.buffer_range + .clone() + }, + ), + ), + Err(i) => hunks.insert( + i, + ( + new_path.clone(), + new_changes.buffer.clone(), + new_changes + .hunks + .iter() + .map(|hunk| { + hunk.buffer_range + .clone() + }) + .collect(), + ), + ), + } + continue 'new_hunks; + } + } + } + } + + for (excerpt_id, excerpt_range) in current_excerpts { + if !excerpts_with_new_changes.contains(&excerpt_id) + && !new_hunks_unchanged.iter().any(|hunk| { + excerpt_range + .context + .start + .cmp( + &hunk.buffer_range.end, + &buffer_snapshot, + ) + .is_le() + && excerpt_range + .context + .end + .cmp( + &hunk.buffer_range.start, + &buffer_snapshot, + ) + .is_ge() + }) + { + excerpts_to_remove.push(excerpt_id); + } + latest_excerpt_id = excerpt_id; + } + } + None => excerpts_to_remove.extend( + current_excerpts.map(|(excerpt_id, _)| excerpt_id), + ), + } + let _ = new_order_entries.next(); + break; + } + } + } + None => { + excerpts_to_remove + .extend(current_excerpts.map(|(excerpt_id, _)| excerpt_id)); + break; + } + } + } + latest_excerpt_id = last_current_excerpt_id.unwrap_or(latest_excerpt_id); + } + + for (path, project_entry_id) in new_order_entries { + if let Some(changes) = new_changes.get(project_entry_id) { + if !changes.hunks.is_empty() { + let hunks = new_excerpt_hunks.entry(latest_excerpt_id).or_default(); + match hunks.binary_search_by(|(probe, ..)| { + compare_paths((path.path.as_ref(), true), (probe.path.as_ref(), true)) + }) { + Ok(i) => hunks[i] + .2 + .extend(changes.hunks.iter().map(|hunk| hunk.buffer_range.clone())), + Err(i) => hunks.insert( + i, + ( + path.clone(), + changes.buffer.clone(), + changes + .hunks + .iter() + .map(|hunk| hunk.buffer_range.clone()) + .collect(), + ), + ), + } + } + } + } + + self.excerpts.update(cx, |multi_buffer, cx| { + for (mut after_excerpt_id, excerpts_to_add) in new_excerpt_hunks { + for (_, buffer, hunk_ranges) in excerpts_to_add { + let buffer_snapshot = buffer.read(cx).snapshot(); + let max_point = buffer_snapshot.max_point(); + let new_excerpts = multi_buffer.insert_excerpts_after( + after_excerpt_id, + buffer, + hunk_ranges.into_iter().map(|range| { + let mut extended_point_range = range.to_point(&buffer_snapshot); + extended_point_range.start.row = extended_point_range + .start + .row + .saturating_sub(DEFAULT_MULTIBUFFER_CONTEXT); + extended_point_range.end.row = (extended_point_range.end.row + + DEFAULT_MULTIBUFFER_CONTEXT) + .min(max_point.row); + ExcerptRange { + context: extended_point_range, + primary: None, + } + }), + cx, + ); + after_excerpt_id = new_excerpts.last().copied().unwrap_or(after_excerpt_id); + } + } + multi_buffer.remove_excerpts(excerpts_to_remove, cx); + for ((line_count, direction), excerpts) in excerpt_to_expand { + multi_buffer.expand_excerpts(excerpts, line_count, direction, cx); + } + }); + } else { + self.excerpts.update(cx, |multi_buffer, cx| { + for new_changes in new_entry_order + .iter() + .filter_map(|(_, entry_id)| new_changes.get(entry_id)) + { + multi_buffer.push_excerpts_with_context_lines( + new_changes.buffer.clone(), + new_changes + .hunks + .iter() + .map(|hunk| hunk.buffer_range.clone()) + .collect(), + DEFAULT_MULTIBUFFER_CONTEXT, + cx, + ); + } + }); + }; + + let mut new_changes = new_changes; + let mut new_entry_order = new_entry_order; + std::mem::swap( + self.buffer_changes.entry(worktree_id).or_default(), + &mut new_changes, + ); + std::mem::swap( + self.entry_order.entry(worktree_id).or_default(), + &mut new_entry_order, + ); + } +} + +impl EventEmitter for ProjectDiffEditor {} + +impl Focusable for ProjectDiffEditor { + fn focus_handle(&self, _: &App) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl Item for ProjectDiffEditor { + type Event = EditorEvent; + + fn to_item_events(event: &EditorEvent, f: impl FnMut(ItemEvent)) { + Editor::to_item_events(event, f) + } + + fn deactivated(&mut self, window: &mut Window, cx: &mut Context) { + self.editor + .update(cx, |editor, cx| editor.deactivated(window, cx)); + } + + fn navigate( + &mut self, + data: Box, + window: &mut Window, + cx: &mut Context, + ) -> bool { + self.editor + .update(cx, |editor, cx| editor.navigate(data, window, cx)) + } + + fn tab_tooltip_text(&self, _: &App) -> Option { + Some("Project Diff".into()) + } + + fn tab_content(&self, params: TabContentParams, _window: &Window, _: &App) -> AnyElement { + if self.buffer_changes.is_empty() { + Label::new("No changes") + .color(if params.selected { + Color::Default + } else { + Color::Muted + }) + .into_any_element() + } else { + h_flex() + .gap_1() + .when(true, |then| { + then.child( + h_flex() + .gap_1() + .child(Icon::new(IconName::XCircle).color(Color::Error)) + .child(Label::new(self.buffer_changes.len().to_string()).color( + if params.selected { + Color::Default + } else { + Color::Muted + }, + )), + ) + }) + .when(true, |then| { + then.child( + h_flex() + .gap_1() + .child(Icon::new(IconName::Indicator).color(Color::Warning)) + .child(Label::new(self.buffer_changes.len().to_string()).color( + if params.selected { + Color::Default + } else { + Color::Muted + }, + )), + ) + }) + .into_any_element() + } + } + + fn telemetry_event_text(&self) -> Option<&'static str> { + Some("Project Diagnostics Opened") + } + + fn for_each_project_item( + &self, + cx: &App, + f: &mut dyn FnMut(gpui::EntityId, &dyn project::ProjectItem), + ) { + self.editor.for_each_project_item(cx, f) + } + + fn is_singleton(&self, _: &App) -> bool { + false + } + + fn set_nav_history( + &mut self, + nav_history: ItemNavHistory, + _: &mut Window, + cx: &mut Context, + ) { + self.editor.update(cx, |editor, _| { + editor.set_nav_history(Some(nav_history)); + }); + } + + fn clone_on_split( + &self, + _workspace_id: Option, + window: &mut Window, + cx: &mut Context, + ) -> Option> + where + Self: Sized, + { + Some(cx.new(|cx| { + ProjectDiffEditor::new(self.project.clone(), self.workspace.clone(), window, cx) + })) + } + + fn is_dirty(&self, cx: &App) -> bool { + self.excerpts.read(cx).is_dirty(cx) + } + + fn has_conflict(&self, cx: &App) -> bool { + self.excerpts.read(cx).has_conflict(cx) + } + + fn can_save(&self, _: &App) -> bool { + true + } + + fn save( + &mut self, + format: bool, + project: Entity, + window: &mut Window, + cx: &mut Context, + ) -> Task> { + self.editor.save(format, project, window, cx) + } + + fn save_as( + &mut self, + _: Entity, + _: ProjectPath, + _window: &mut Window, + _: &mut Context, + ) -> Task> { + unreachable!() + } + + fn reload( + &mut self, + project: Entity, + window: &mut Window, + cx: &mut Context, + ) -> Task> { + self.editor.reload(project, window, cx) + } + + fn act_as_type<'a>( + &'a self, + type_id: TypeId, + self_handle: &'a Entity, + _: &'a App, + ) -> Option { + if type_id == TypeId::of::() { + Some(self_handle.to_any()) + } else if type_id == TypeId::of::() { + Some(self.editor.to_any()) + } else { + None + } + } + + fn breadcrumb_location(&self, _: &App) -> ToolbarItemLocation { + ToolbarItemLocation::PrimaryLeft + } + + fn breadcrumbs(&self, theme: &theme::Theme, cx: &App) -> Option> { + self.editor.breadcrumbs(theme, cx) + } + + fn added_to_workspace( + &mut self, + workspace: &mut Workspace, + window: &mut Window, + cx: &mut Context, + ) { + self.editor.update(cx, |editor, cx| { + editor.added_to_workspace(workspace, window, cx) + }); + } +} + +impl Render for ProjectDiffEditor { + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + let child = if self.buffer_changes.is_empty() { + div() + .bg(cx.theme().colors().editor_background) + .flex() + .items_center() + .justify_center() + .size_full() + .child(Label::new("No changes in the workspace")) + } else { + div().size_full().child(self.editor.clone()) + }; + + div() + .track_focus(&self.focus_handle) + .size_full() + .child(child) + } +} + +#[cfg(test)] +mod tests { + use git::status::{StatusCode, TrackedStatus}; + use gpui::{SemanticVersion, TestAppContext, VisualTestContext}; + use project::buffer_store::BufferChangeSet; + use serde_json::json; + use settings::SettingsStore; + use std::{ + ops::Deref as _, + path::{Path, PathBuf}, + }; + + use crate::test::editor_test_context::assert_state_with_diff; + + use super::*; + + // TODO finish + // #[gpui::test] + // async fn randomized_tests(cx: &mut TestAppContext) { + // // Create a new project (how?? temp fs?), + // let fs = FakeFs::new(cx.executor()); + // let project = Project::test(fs, [], cx).await; + + // // create random files with random content + + // // Commit it into git somehow (technically can do with "real" fs in a temp dir) + // // + // // Apply randomized changes to the project: select a random file, random change and apply to buffers + // } + + #[gpui::test(iterations = 30)] + async fn simple_edit_test(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + init_test(cx); + + let fs = fs::FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/root", + json!({ + ".git": {}, + "file_a": "This is file_a", + "file_b": "This is file_b", + }), + ) + .await; + + let project = Project::test(fs.clone(), [Path::new("/root")], cx).await; + let workspace = + cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx); + + let file_a_editor = workspace + .update(cx, |workspace, window, cx| { + let file_a_editor = + workspace.open_abs_path(PathBuf::from("/root/file_a"), true, window, cx); + ProjectDiffEditor::deploy(workspace, &Deploy, window, cx); + file_a_editor + }) + .unwrap() + .await + .expect("did not open an item at all") + .downcast::() + .expect("did not open an editor for file_a"); + let project_diff_editor = workspace + .update(cx, |workspace, _, cx| { + workspace + .active_pane() + .read(cx) + .items() + .find_map(|item| item.downcast::()) + }) + .unwrap() + .expect("did not find a ProjectDiffEditor"); + project_diff_editor.update(cx, |project_diff_editor, cx| { + assert!( + project_diff_editor.editor.read(cx).text(cx).is_empty(), + "Should have no changes after opening the diff on no git changes" + ); + }); + + let old_text = file_a_editor.update(cx, |editor, cx| editor.text(cx)); + let change = "an edit after git add"; + file_a_editor + .update_in(cx, |file_a_editor, window, cx| { + file_a_editor.insert(change, window, cx); + file_a_editor.save(false, project.clone(), window, cx) + }) + .await + .expect("failed to save a file"); + file_a_editor.update_in(cx, |file_a_editor, _window, cx| { + let change_set = cx.new(|cx| { + BufferChangeSet::new_with_base_text( + &old_text, + &file_a_editor.buffer().read(cx).as_singleton().unwrap(), + cx, + ) + }); + file_a_editor.buffer.update(cx, |buffer, cx| { + buffer.add_change_set(change_set.clone(), cx) + }); + project.update(cx, |project, cx| { + project.buffer_store().update(cx, |buffer_store, cx| { + buffer_store.set_unstaged_change_set( + file_a_editor + .buffer() + .read(cx) + .as_singleton() + .unwrap() + .read(cx) + .remote_id(), + change_set, + ); + }); + }); + }); + fs.set_status_for_repo_via_git_operation( + Path::new("/root/.git"), + &[( + Path::new("file_a"), + TrackedStatus { + worktree_status: StatusCode::Modified, + index_status: StatusCode::Unmodified, + } + .into(), + )], + ); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + let editor = project_diff_editor.update(cx, |diff_editor, _| diff_editor.editor.clone()); + + assert_state_with_diff( + &editor, + cx, + indoc::indoc! { + " + - This is file_a + + an edit after git addThis is file_aˇ", + }, + ); + } + + fn init_test(cx: &mut gpui::TestAppContext) { + if std::env::var("RUST_LOG").is_ok() { + env_logger::try_init().ok(); + } + + cx.update(|cx| { + assets::Assets.load_test_fonts(cx); + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + theme::init(theme::LoadThemes::JustBase, cx); + release_channel::init(SemanticVersion::default(), cx); + client::init_settings(cx); + language::init(cx); + Project::init_settings(cx); + workspace::init_settings(cx); + crate::init(cx); + cx.set_staff(true); + }); + } +} diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 2f1f00715e11c461f8900053af862ee6b37fbb6e..dd37c34afe65b1d66767f4bfb3d763b4d063b47a 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -279,9 +279,10 @@ fn show_hover( delay.await; } + let offset = anchor.to_offset(&snapshot.buffer_snapshot); let local_diagnostic = snapshot .buffer_snapshot - .diagnostics_in_range::<_, usize>(anchor..anchor) + .diagnostics_in_range::(offset..offset) // Find the entry with the most specific range .min_by_key(|entry| entry.range.len()); diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs index 4e3066692d38f1d4ec4d0406d343b305de418017..9a61656a58c34f1e0777b047adc3e6165b665a0a 100644 --- a/crates/editor/src/proposed_changes_editor.rs +++ b/crates/editor/src/proposed_changes_editor.rs @@ -111,11 +111,7 @@ impl ProposedChangesEditor { .read(cx) .change_set_for(buffer.remote_id())?; Some(change_set.update(cx, |change_set, cx| { - change_set.set_base_text( - base_buffer.read(cx).text(), - buffer, - cx, - ) + change_set.set_base_text(base_buffer.clone(), buffer, cx) })) }) .collect::>() @@ -192,7 +188,7 @@ impl ProposedChangesEditor { new_change_sets.push(cx.new(|cx| { let mut change_set = BufferChangeSet::new(&branch_buffer, cx); let _ = change_set.set_base_text( - location.buffer.read(cx).text(), + location.buffer.clone(), branch_buffer.read(cx).text_snapshot(), cx, ); diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index a4e6013400a62d27e7a055420e630681c648669d..6246ec14fb1fe64b4e4852860c3b51d0ffee1834 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -292,7 +292,7 @@ impl EditorTestContext { let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone()); fs.set_index_for_repo( &Self::root_path().join(".git"), - &[(path.as_ref(), diff_base.to_string())], + &[(path.into(), diff_base.to_string())], ); self.cx.run_until_parked(); } diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 2c64388ec3c260160864fe22f01fb921cf5d8b2c..c65877145ca8eabf65c5c23bf3798db67f2a6fc5 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -5,9 +5,9 @@ mod mac_watcher; pub mod fs_watcher; use anyhow::{anyhow, Context as _, Result}; -#[cfg(any(test, feature = "test-support"))] -use git::status::FileStatus; use git::GitHostingProviderRegistry; +#[cfg(any(test, feature = "test-support"))] +use git::{repository::RepoPath, status::FileStatus}; #[cfg(any(target_os = "linux", target_os = "freebsd"))] use ashpd::desktop::trash; @@ -1270,25 +1270,32 @@ impl FakeFs { }) } - pub fn set_index_for_repo(&self, dot_git: &Path, head_state: &[(&Path, String)]) { + pub fn set_index_for_repo(&self, dot_git: &Path, index_state: &[(RepoPath, String)]) { self.with_git_state(dot_git, true, |state| { state.index_contents.clear(); state.index_contents.extend( + index_state + .iter() + .map(|(path, content)| (path.clone(), content.clone())), + ); + }); + } + + pub fn set_head_for_repo(&self, dot_git: &Path, head_state: &[(RepoPath, String)]) { + self.with_git_state(dot_git, true, |state| { + state.head_contents.clear(); + state.head_contents.extend( head_state .iter() - .map(|(path, content)| (path.to_path_buf(), content.clone())), + .map(|(path, content)| (path.clone(), content.clone())), ); }); } - pub fn set_blame_for_repo(&self, dot_git: &Path, blames: Vec<(&Path, git::blame::Blame)>) { + pub fn set_blame_for_repo(&self, dot_git: &Path, blames: Vec<(RepoPath, git::blame::Blame)>) { self.with_git_state(dot_git, true, |state| { state.blames.clear(); - state.blames.extend( - blames - .into_iter() - .map(|(path, blame)| (path.to_path_buf(), blame)), - ); + state.blames.extend(blames); }); } diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index 4dc835ddaccb12a27da7bd119a51132c61092b1f..7fd6628a89efc8677de400bf9a28b33d86173112 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -74,31 +74,34 @@ impl BufferDiff { } } - pub fn build(diff_base: &str, buffer: &text::BufferSnapshot) -> Self { + pub fn build(diff_base: Option<&str>, buffer: &text::BufferSnapshot) -> Self { let mut tree = SumTree::new(buffer); - let buffer_text = buffer.as_rope().to_string(); - let patch = Self::diff(diff_base, &buffer_text); - - // A common case in Zed is that the empty buffer is represented as just a newline, - // but if we just compute a naive diff you get a "preserved" line in the middle, - // which is a bit odd. - if buffer_text == "\n" && diff_base.ends_with("\n") && diff_base.len() > 1 { - tree.push( - InternalDiffHunk { - buffer_range: buffer.anchor_before(0)..buffer.anchor_before(0), - diff_base_byte_range: 0..diff_base.len() - 1, - }, - buffer, - ); - return Self { tree }; - } + if let Some(diff_base) = diff_base { + let buffer_text = buffer.as_rope().to_string(); + let patch = Self::diff(diff_base, &buffer_text); + + // A common case in Zed is that the empty buffer is represented as just a newline, + // but if we just compute a naive diff you get a "preserved" line in the middle, + // which is a bit odd. + if buffer_text == "\n" && diff_base.ends_with("\n") && diff_base.len() > 1 { + tree.push( + InternalDiffHunk { + buffer_range: buffer.anchor_before(0)..buffer.anchor_before(0), + diff_base_byte_range: 0..diff_base.len() - 1, + }, + buffer, + ); + return Self { tree }; + } - if let Some(patch) = patch { - let mut divergence = 0; - for hunk_index in 0..patch.num_hunks() { - let hunk = Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence); - tree.push(hunk, buffer); + if let Some(patch) = patch { + let mut divergence = 0; + for hunk_index in 0..patch.num_hunks() { + let hunk = + Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence); + tree.push(hunk, buffer); + } } } @@ -125,11 +128,14 @@ impl BufferDiff { range: Range, buffer: &'a BufferSnapshot, ) -> impl 'a + Iterator { + let range = range.to_offset(buffer); + let mut cursor = self .tree .filter::<_, DiffHunkSummary>(buffer, move |summary| { - let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt(); - let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt(); + let summary_range = summary.buffer_range.to_offset(buffer); + let before_start = summary_range.end < range.start; + let after_end = summary_range.start > range.end; !before_start && !after_end }); @@ -151,21 +157,25 @@ impl BufferDiff { }); let mut summaries = buffer.summaries_for_anchors_with_payload::(anchor_iter); - iter::from_fn(move || { + iter::from_fn(move || loop { let (start_point, (start_anchor, start_base)) = summaries.next()?; let (mut end_point, (mut end_anchor, end_base)) = summaries.next()?; + if !start_anchor.is_valid(buffer) { + continue; + } + if end_point.column > 0 { end_point.row += 1; end_point.column = 0; end_anchor = buffer.anchor_before(end_point); } - Some(DiffHunk { + return Some(DiffHunk { row_range: start_point.row..end_point.row, diff_base_byte_range: start_base..end_base, buffer_range: start_anchor..end_anchor, - }) + }); }) } @@ -270,7 +280,7 @@ impl BufferDiff { } pub fn update(&mut self, diff_base: &Rope, buffer: &text::BufferSnapshot) { - *self = Self::build(&diff_base.to_string(), buffer); + *self = Self::build(Some(&diff_base.to_string()), buffer); } #[cfg(test)] @@ -536,7 +546,7 @@ mod tests { let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text_1); let empty_diff = BufferDiff::new(&buffer); - let diff_1 = BufferDiff::build(&base_text, &buffer); + let diff_1 = BufferDiff::build(Some(&base_text), &buffer); let range = diff_1.compare(&empty_diff, &buffer).unwrap(); assert_eq!(range.to_point(&buffer), Point::new(0, 0)..Point::new(8, 0)); @@ -554,7 +564,7 @@ mod tests { " .unindent(), ); - let diff_2 = BufferDiff::build(&base_text, &buffer); + let diff_2 = BufferDiff::build(Some(&base_text), &buffer); assert_eq!(None, diff_2.compare(&diff_1, &buffer)); // Edit turns a deletion hunk into a modification. @@ -571,7 +581,7 @@ mod tests { " .unindent(), ); - let diff_3 = BufferDiff::build(&base_text, &buffer); + let diff_3 = BufferDiff::build(Some(&base_text), &buffer); let range = diff_3.compare(&diff_2, &buffer).unwrap(); assert_eq!(range.to_point(&buffer), Point::new(1, 0)..Point::new(2, 0)); @@ -588,7 +598,7 @@ mod tests { " .unindent(), ); - let diff_4 = BufferDiff::build(&base_text, &buffer); + let diff_4 = BufferDiff::build(Some(&base_text), &buffer); let range = diff_4.compare(&diff_3, &buffer).unwrap(); assert_eq!(range.to_point(&buffer), Point::new(3, 4)..Point::new(4, 0)); @@ -606,7 +616,7 @@ mod tests { " .unindent(), ); - let diff_5 = BufferDiff::build(&base_text, &buffer); + let diff_5 = BufferDiff::build(Some(&base_text), &buffer); let range = diff_5.compare(&diff_4, &buffer).unwrap(); assert_eq!(range.to_point(&buffer), Point::new(3, 0)..Point::new(4, 0)); @@ -624,7 +634,7 @@ mod tests { " .unindent(), ); - let diff_6 = BufferDiff::build(&base_text, &buffer); + let diff_6 = BufferDiff::build(Some(&base_text), &buffer); let range = diff_6.compare(&diff_5, &buffer).unwrap(); assert_eq!(range.to_point(&buffer), Point::new(7, 0)..Point::new(8, 0)); } diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 7b82c2571d64e9c4ed3cf1b2b1e2b038ad22a3a7..a3777401525778ed1e8e79d4bd114792aec89a6e 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -29,9 +29,15 @@ pub struct Branch { pub trait GitRepository: Send + Sync { fn reload_index(&self); - /// Loads a git repository entry's contents. + /// Returns the contents of an entry in the repository's index, or None if there is no entry for the given path. + /// + /// Note that for symlink entries, this will return the contents of the symlink, not the target. + fn load_index_text(&self, path: &RepoPath) -> Option; + + /// Returns the contents of an entry in the repository's HEAD, or None if HEAD does not exist or has no entry for the given path. + /// /// Note that for symlink entries, this will return the contents of the symlink, not the target. - fn load_index_text(&self, relative_file_path: &Path) -> Option; + fn load_committed_text(&self, path: &RepoPath) -> Option; /// Returns the URL of the remote with the given name. fn remote_url(&self, name: &str) -> Option; @@ -106,15 +112,15 @@ impl GitRepository for RealGitRepository { repo.path().into() } - fn load_index_text(&self, relative_file_path: &Path) -> Option { - fn logic(repo: &git2::Repository, relative_file_path: &Path) -> Result> { + fn load_index_text(&self, path: &RepoPath) -> Option { + fn logic(repo: &git2::Repository, path: &RepoPath) -> Result> { const STAGE_NORMAL: i32 = 0; let index = repo.index()?; // This check is required because index.get_path() unwraps internally :( - check_path_to_repo_path_errors(relative_file_path)?; + check_path_to_repo_path_errors(path)?; - let oid = match index.get_path(relative_file_path, STAGE_NORMAL) { + let oid = match index.get_path(path, STAGE_NORMAL) { Some(entry) if entry.mode != GIT_MODE_SYMLINK => entry.id, _ => return Ok(None), }; @@ -123,13 +129,22 @@ impl GitRepository for RealGitRepository { Ok(Some(String::from_utf8(content)?)) } - match logic(&self.repository.lock(), relative_file_path) { + match logic(&self.repository.lock(), path) { Ok(value) => return value, - Err(err) => log::error!("Error loading head text: {:?}", err), + Err(err) => log::error!("Error loading index text: {:?}", err), } None } + fn load_committed_text(&self, path: &RepoPath) -> Option { + let repo = self.repository.lock(); + let head = repo.head().ok()?.peel_to_tree().log_err()?; + let oid = head.get_path(path).ok()?.id(); + let content = repo.find_blob(oid).log_err()?.content().to_owned(); + let content = String::from_utf8(content).log_err()?; + Some(content) + } + fn remote_url(&self, name: &str) -> Option { let repo = self.repository.lock(); let remote = repo.find_remote(name).ok()?; @@ -325,8 +340,9 @@ pub struct FakeGitRepository { pub struct FakeGitRepositoryState { pub dot_git_dir: PathBuf, pub event_emitter: smol::channel::Sender, - pub index_contents: HashMap, - pub blames: HashMap, + pub head_contents: HashMap, + pub index_contents: HashMap, + pub blames: HashMap, pub statuses: HashMap, pub current_branch_name: Option, pub branches: HashSet, @@ -343,6 +359,7 @@ impl FakeGitRepositoryState { FakeGitRepositoryState { dot_git_dir, event_emitter, + head_contents: Default::default(), index_contents: Default::default(), blames: Default::default(), statuses: Default::default(), @@ -355,9 +372,14 @@ impl FakeGitRepositoryState { impl GitRepository for FakeGitRepository { fn reload_index(&self) {} - fn load_index_text(&self, path: &Path) -> Option { + fn load_index_text(&self, path: &RepoPath) -> Option { let state = self.state.lock(); - state.index_contents.get(path).cloned() + state.index_contents.get(path.as_ref()).cloned() + } + + fn load_committed_text(&self, path: &RepoPath) -> Option { + let state = self.state.lock(); + state.head_contents.get(path.as_ref()).cloned() } fn remote_url(&self, _name: &str) -> Option { @@ -529,6 +551,12 @@ impl From<&Path> for RepoPath { } } +impl From> for RepoPath { + fn from(value: Arc) -> Self { + RepoPath(value) + } +} + impl From for RepoPath { fn from(value: PathBuf) -> Self { RepoPath::new(value) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index dc6399122c92aa3b57c9d3fa52b344d1922fdd3b..ceb387d2e10ce5b5658aa44bd704e20e4ec8b214 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1001,6 +1001,34 @@ impl Buffer { } } + #[cfg(any(test, feature = "test-support"))] + pub fn build_snapshot_sync( + text: Rope, + language: Option>, + language_registry: Option>, + cx: &mut App, + ) -> BufferSnapshot { + let entity_id = cx.reserve_entity::().entity_id(); + let buffer_id = entity_id.as_non_zero_u64().into(); + let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot(); + let mut syntax = SyntaxMap::new(&text).snapshot(); + if let Some(language) = language.clone() { + let text = text.clone(); + let language = language.clone(); + let language_registry = language_registry.clone(); + syntax.reparse(&text, language_registry, language); + } + BufferSnapshot { + text, + syntax, + file: None, + diagnostics: Default::default(), + remote_selections: Default::default(), + language, + non_text_state_update_count: 0, + } + } + /// Retrieve a snapshot of the buffer's current state. This is computationally /// cheap, and allows reading from the buffer on a background thread. pub fn snapshot(&self) -> BufferSnapshot { diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 46ae2291426943209785879af7a821fe93e8bab6..cc8afcb234f66c7725d4018633719f41e9bdf778 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -28,7 +28,7 @@ use smol::future::yield_now; use std::{ any::type_name, borrow::Cow, - cell::{Ref, RefCell, RefMut}, + cell::{Ref, RefCell}, cmp, fmt, future::Future, io, @@ -290,6 +290,7 @@ impl ExcerptBoundary { #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] pub struct RowInfo { + pub buffer_id: Option, pub buffer_row: Option, pub multibuffer_row: Option, pub diff_status: Option, @@ -1742,7 +1743,7 @@ impl MultiBuffer { } self.sync_diff_transforms( - snapshot, + &mut snapshot, vec![Edit { old: edit_start..edit_start, new: edit_start..edit_end, @@ -1775,7 +1776,7 @@ impl MultiBuffer { snapshot.has_conflict = false; self.sync_diff_transforms( - snapshot, + &mut snapshot, vec![Edit { old: start..prev_len, new: start..start, @@ -2053,7 +2054,7 @@ impl MultiBuffer { snapshot.trailing_excerpt_update_count += 1; } - self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited); + self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); cx.emit(Event::Edited { singleton_buffer_edited: false, edited_buffer: None, @@ -2218,7 +2219,7 @@ impl MultiBuffer { } self.sync_diff_transforms( - snapshot, + &mut snapshot, excerpt_edits, DiffChangeKind::DiffUpdated { base_changed: base_text_changed, @@ -2388,7 +2389,7 @@ impl MultiBuffer { cx: &mut Context, ) { self.sync(cx); - let snapshot = self.snapshot.borrow_mut(); + let mut snapshot = self.snapshot.borrow_mut(); let mut excerpt_edits = Vec::new(); for range in ranges.iter() { let end_excerpt_id = range.end.excerpt_id; @@ -2422,7 +2423,7 @@ impl MultiBuffer { } self.sync_diff_transforms( - snapshot, + &mut snapshot, excerpt_edits, DiffChangeKind::ExpandOrCollapseHunks { expand }, ); @@ -2491,7 +2492,7 @@ impl MultiBuffer { drop(cursor); snapshot.excerpts = new_excerpts; - self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited); + self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); cx.emit(Event::Edited { singleton_buffer_edited: false, edited_buffer: None, @@ -2592,7 +2593,7 @@ impl MultiBuffer { drop(cursor); snapshot.excerpts = new_excerpts; - self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited); + self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); cx.emit(Event::Edited { singleton_buffer_edited: false, edited_buffer: None, @@ -2705,12 +2706,12 @@ impl MultiBuffer { drop(cursor); snapshot.excerpts = new_excerpts; - self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited); + self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); } fn sync_diff_transforms( &self, - mut snapshot: RefMut, + snapshot: &mut MultiBufferSnapshot, excerpt_edits: Vec>, change_kind: DiffChangeKind, ) { @@ -2791,11 +2792,23 @@ impl MultiBuffer { if excerpt_edits.peek().map_or(true, |next_edit| { next_edit.old.start >= old_diff_transforms.end(&()).0 }) { + let keep_next_old_transform = (old_diff_transforms.start().0 >= edit.old.end) + && match old_diff_transforms.item() { + Some(DiffTransform::BufferContent { + inserted_hunk_anchor: Some(hunk_anchor), + .. + }) => excerpts + .item() + .is_some_and(|excerpt| hunk_anchor.1.is_valid(&excerpt.buffer)), + _ => true, + }; + let mut excerpt_offset = edit.new.end; - if old_diff_transforms.start().0 < edit.old.end { + if !keep_next_old_transform { excerpt_offset += old_diff_transforms.end(&()).0 - edit.old.end; old_diff_transforms.next(&()); } + old_expanded_hunks.clear(); self.push_buffer_content_transform( &snapshot, @@ -2894,12 +2907,14 @@ impl MultiBuffer { buffer.anchor_before(edit_buffer_start)..buffer.anchor_after(edit_buffer_end); for hunk in diff.hunks_intersecting_range(edit_anchor_range, buffer) { + let hunk_buffer_range = hunk.buffer_range.to_offset(buffer); + let hunk_anchor = (excerpt.id, hunk.buffer_range.start); - if !hunk_anchor.1.is_valid(buffer) { + if hunk_buffer_range.start < excerpt_buffer_start { + log::trace!("skipping hunk that starts before excerpt"); continue; } - let hunk_buffer_range = hunk.buffer_range.to_offset(buffer); let hunk_excerpt_start = excerpt_start + ExcerptOffset::new( hunk_buffer_range.start.saturating_sub(excerpt_buffer_start), @@ -2941,8 +2956,9 @@ impl MultiBuffer { if should_expand_hunk { did_expand_hunks = true; log::trace!( - "expanding hunk {:?}", + "expanding hunk {:?}, excerpt:{:?}", hunk_excerpt_start.value..hunk_excerpt_end.value, + excerpt.id ); if !hunk.diff_base_byte_range.is_empty() @@ -3389,12 +3405,12 @@ impl MultiBufferSnapshot { self.diff_hunks_in_range(Anchor::min()..Anchor::max()) } - pub fn diff_hunks_in_range( + pub fn diff_hunks_in_range( &self, range: Range, ) -> impl Iterator + '_ { - let range = range.start.to_offset(self)..range.end.to_offset(self); - self.lift_buffer_metadata(range.clone(), move |buffer, buffer_range| { + let query_range = range.start.to_point(self)..range.end.to_point(self); + self.lift_buffer_metadata(query_range.clone(), move |buffer, buffer_range| { let diff = self.diffs.get(&buffer.remote_id())?; let buffer_start = buffer.anchor_before(buffer_range.start); let buffer_end = buffer.anchor_after(buffer_range.end); @@ -3409,19 +3425,25 @@ impl MultiBufferSnapshot { }), ) }) - .map(|(range, hunk, excerpt)| { + .filter_map(move |(range, hunk, excerpt)| { + if range.start != range.end + && range.end == query_range.start + && !hunk.row_range.is_empty() + { + return None; + } let end_row = if range.end.column == 0 { range.end.row } else { range.end.row + 1 }; - MultiBufferDiffHunk { + Some(MultiBufferDiffHunk { row_range: MultiBufferRow(range.start.row)..MultiBufferRow(end_row), buffer_id: excerpt.buffer_id, excerpt_id: excerpt.id, buffer_range: hunk.buffer_range.clone(), diff_base_byte_range: hunk.diff_base_byte_range.clone(), - } + }) }) } @@ -3560,8 +3582,8 @@ impl MultiBufferSnapshot { /// multi-buffer coordinates. fn lift_buffer_metadata<'a, D, M, I>( &'a self, - range: Range, - get_buffer_metadata: impl 'a + Fn(&'a BufferSnapshot, Range) -> Option, + query_range: Range, + get_buffer_metadata: impl 'a + Fn(&'a BufferSnapshot, Range) -> Option, ) -> impl Iterator, M, &'a Excerpt)> + 'a where I: Iterator, M)> + 'a, @@ -3569,18 +3591,19 @@ impl MultiBufferSnapshot { { let max_position = D::from_text_summary(&self.text_summary()); let mut current_excerpt_metadata: Option<(ExcerptId, I)> = None; - let mut cursor = self.cursor::>(); + let mut cursor = self.cursor::(); // Find the excerpt and buffer offset where the given range ends. - cursor.seek(&DimensionPair { - key: range.end, - value: None, - }); + cursor.seek(&query_range.end); let mut range_end = None; while let Some(region) = cursor.region() { if region.is_main_buffer { - let mut buffer_end = region.buffer_range.start.key; - let overshoot = range.end.saturating_sub(region.range.start.key); + let mut buffer_end = region.buffer_range.start; + let overshoot = if query_range.end > region.range.start { + query_range.end - region.range.start + } else { + D::default() + }; buffer_end.add_assign(&overshoot); range_end = Some((region.excerpt.id, buffer_end)); break; @@ -3588,13 +3611,10 @@ impl MultiBufferSnapshot { cursor.next(); } - cursor.seek(&DimensionPair { - key: range.start, - value: None, - }); + cursor.seek(&query_range.start); if let Some(region) = cursor.region().filter(|region| !region.is_main_buffer) { - if region.range.start.key > 0 { + if region.range.start > D::zero(&()) { cursor.prev() } } @@ -3613,14 +3633,18 @@ impl MultiBufferSnapshot { // and retrieve the metadata for the resulting range. else { let region = cursor.region()?; - let buffer_start = if region.is_main_buffer { - let start_overshoot = range.start.saturating_sub(region.range.start.key); - (region.buffer_range.start.key + start_overshoot) - .min(region.buffer_range.end.key) + let mut buffer_start; + if region.is_main_buffer { + buffer_start = region.buffer_range.start; + if query_range.start > region.range.start { + let overshoot = query_range.start - region.range.start; + buffer_start.add_assign(&overshoot); + } + buffer_start = buffer_start.min(region.buffer_range.end); } else { - cursor.main_buffer_position()?.key + buffer_start = cursor.main_buffer_position()?; }; - let mut buffer_end = excerpt.range.context.end.to_offset(&excerpt.buffer); + let mut buffer_end = excerpt.range.context.end.summary::(&excerpt.buffer); if let Some((end_excerpt_id, end_buffer_offset)) = range_end { if excerpt.id == end_excerpt_id { buffer_end = buffer_end.min(end_buffer_offset); @@ -3637,53 +3661,56 @@ impl MultiBufferSnapshot { }; // Visit each metadata item. - if let Some((range, metadata)) = metadata_iter.and_then(Iterator::next) { + if let Some((metadata_buffer_range, metadata)) = metadata_iter.and_then(Iterator::next) + { // Find the multibuffer regions that contain the start and end of // the metadata item's range. - if range.start > D::default() { + if metadata_buffer_range.start > D::default() { while let Some(region) = cursor.region() { - if !region.is_main_buffer - || region.buffer.remote_id() == excerpt.buffer_id - && region.buffer_range.end.value.unwrap() < range.start + if region.is_main_buffer + && (region.buffer_range.end >= metadata_buffer_range.start + || cursor.is_at_end_of_excerpt()) { - cursor.next(); - } else { break; } + cursor.next(); } } let start_region = cursor.region()?; while let Some(region) = cursor.region() { - if !region.is_main_buffer - || region.buffer.remote_id() == excerpt.buffer_id - && region.buffer_range.end.value.unwrap() <= range.end + if region.is_main_buffer + && (region.buffer_range.end > metadata_buffer_range.end + || cursor.is_at_end_of_excerpt()) { - cursor.next(); - } else { break; } + cursor.next(); } - let end_region = cursor - .region() - .filter(|region| region.buffer.remote_id() == excerpt.buffer_id); + let end_region = cursor.region(); // Convert the metadata item's range into multibuffer coordinates. - let mut start = start_region.range.start.value.unwrap(); - let region_buffer_start = start_region.buffer_range.start.value.unwrap(); - if start_region.is_main_buffer && range.start > region_buffer_start { - start.add_assign(&(range.start - region_buffer_start)); + let mut start_position = start_region.range.start; + let region_buffer_start = start_region.buffer_range.start; + if start_region.is_main_buffer && metadata_buffer_range.start > region_buffer_start + { + start_position.add_assign(&(metadata_buffer_range.start - region_buffer_start)); + start_position = start_position.min(start_region.range.end); } - let mut end = max_position; - if let Some(end_region) = end_region { - end = end_region.range.start.value.unwrap(); + + let mut end_position = max_position; + if let Some(end_region) = &end_region { + end_position = end_region.range.start; debug_assert!(end_region.is_main_buffer); - let region_buffer_start = end_region.buffer_range.start.value.unwrap(); - if range.end > region_buffer_start { - end.add_assign(&(range.end - region_buffer_start)); + let region_buffer_start = end_region.buffer_range.start; + if metadata_buffer_range.end > region_buffer_start { + end_position.add_assign(&(metadata_buffer_range.end - region_buffer_start)); } + end_position = end_position.min(end_region.range.end); } - return Some((start..end, metadata, excerpt)); + if start_position <= query_range.end && end_position >= query_range.start { + return Some((start_position..end_position, metadata, excerpt)); + } } // When there are no more metadata items for this excerpt, move to the next excerpt. else { @@ -4509,7 +4536,16 @@ impl MultiBufferSnapshot { } let excerpt_start_position = D::from_text_summary(&cursor.start().text); - if let Some(excerpt) = cursor.item().filter(|excerpt| excerpt.id == excerpt_id) { + if let Some(excerpt) = cursor.item() { + if excerpt.id != excerpt_id { + let position = self.resolve_summary_for_anchor( + &Anchor::min(), + excerpt_start_position, + &mut diff_transforms_cursor, + ); + summaries.extend(excerpt_anchors.map(|_| position)); + continue; + } let excerpt_buffer_start = excerpt.range.context.start.summary::(&excerpt.buffer); let excerpt_buffer_end = excerpt.range.context.end.summary::(&excerpt.buffer); @@ -5525,7 +5561,7 @@ impl MultiBufferSnapshot { buffer_id: BufferId, group_id: usize, ) -> impl Iterator> + '_ { - self.lift_buffer_metadata(0..self.len(), move |buffer, _| { + self.lift_buffer_metadata(Point::zero()..self.max_point(), move |buffer, _| { if buffer.remote_id() != buffer_id { return None; }; @@ -5538,15 +5574,19 @@ impl MultiBufferSnapshot { .map(|(range, diagnostic, _)| DiagnosticEntry { diagnostic, range }) } - pub fn diagnostics_in_range<'a, T, O>( + pub fn diagnostics_in_range<'a, T>( &'a self, range: Range, - ) -> impl Iterator> + 'a + ) -> impl Iterator> + 'a where - T: 'a + ToOffset, - O: 'a + text::FromAnchor + Copy + TextDimension + Ord + Sub + fmt::Debug, + T: 'a + + text::ToOffset + + text::FromAnchor + + TextDimension + + Ord + + Sub + + fmt::Debug, { - let range = range.start.to_offset(self)..range.end.to_offset(self); self.lift_buffer_metadata(range, move |buffer, buffer_range| { Some( buffer @@ -6036,6 +6076,24 @@ where self.cached_region.clone() } + fn is_at_end_of_excerpt(&mut self) -> bool { + if self.diff_transforms.end(&()).1 < self.excerpts.end(&()) { + return false; + } else if self.diff_transforms.end(&()).1 > self.excerpts.end(&()) + || self.diff_transforms.item().is_none() + { + return true; + } + + self.diff_transforms.next(&()); + let next_transform = self.diff_transforms.item(); + self.diff_transforms.prev(&()); + + next_transform.map_or(true, |next_transform| { + matches!(next_transform, DiffTransform::BufferContent { .. }) + }) + } + fn main_buffer_position(&self) -> Option { let excerpt = self.excerpts.item()?; let buffer = &excerpt.buffer; @@ -6879,6 +6937,7 @@ impl<'a> Iterator for MultiBufferRows<'a> { if self.is_empty && self.point.row == 0 { self.point += Point::new(1, 0); return Some(RowInfo { + buffer_id: None, buffer_row: Some(0), multibuffer_row: Some(MultiBufferRow(0)), diff_status: None, @@ -6906,6 +6965,7 @@ impl<'a> Iterator for MultiBufferRows<'a> { .to_point(&last_excerpt.buffer) .row; return Some(RowInfo { + buffer_id: Some(last_excerpt.buffer_id), buffer_row: Some(last_row), multibuffer_row: Some(multibuffer_row), diff_status: None, @@ -6919,6 +6979,7 @@ impl<'a> Iterator for MultiBufferRows<'a> { let overshoot = self.point - region.range.start; let buffer_point = region.buffer_range.start + overshoot; let result = Some(RowInfo { + buffer_id: Some(region.buffer.remote_id()), buffer_row: Some(buffer_point.row), multibuffer_row: Some(MultiBufferRow(self.point.row)), diff_status: if region.is_inserted_hunk && self.point < region.range.end { diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index d85e5eba5d92f9f775c1745d0db36f747c54db8a..61094a1b4f132dc7c67aac65f068be93716ea207 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -19,12 +19,14 @@ fn init_logger() { #[gpui::test] fn test_empty_singleton(cx: &mut App) { let buffer = cx.new(|cx| Buffer::local("", cx)); + let buffer_id = buffer.read(cx).remote_id(); let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx)); let snapshot = multibuffer.read(cx).snapshot(cx); assert_eq!(snapshot.text(), ""); assert_eq!( snapshot.row_infos(MultiBufferRow(0)).collect::>(), [RowInfo { + buffer_id: Some(buffer_id), buffer_row: Some(0), multibuffer_row: Some(MultiBufferRow(0)), diff_status: None @@ -359,13 +361,7 @@ fn test_diff_boundary_anchors(cx: &mut TestAppContext) { let base_text = "one\ntwo\nthree\n"; let text = "one\nthree\n"; let buffer = cx.new(|cx| Buffer::local(text, cx)); - let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - let change_set = cx.new(|cx| { - let mut change_set = BufferChangeSet::new(&buffer, cx); - let _ = change_set.set_base_text(base_text.into(), snapshot.text, cx); - change_set - }); - cx.run_until_parked(); + let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx)); let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); multibuffer.update(cx, |multibuffer, cx| { multibuffer.add_change_set(change_set, cx) @@ -382,7 +378,7 @@ fn test_diff_boundary_anchors(cx: &mut TestAppContext) { let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); let actual_text = snapshot.text(); let actual_row_infos = snapshot.row_infos(MultiBufferRow(0)).collect::>(); - let actual_diff = format_diff(&actual_text, &actual_row_infos, &Default::default()); + let actual_diff = format_diff(&actual_text, &actual_row_infos, &Default::default(), None); pretty_assertions::assert_eq!( actual_diff, indoc! { @@ -409,13 +405,7 @@ fn test_diff_hunks_in_range(cx: &mut TestAppContext) { let base_text = "one\ntwo\nthree\nfour\nfive\nsix\nseven\neight\n"; let text = "one\nfour\nseven\n"; let buffer = cx.new(|cx| Buffer::local(text, cx)); - let change_set = cx.new(|cx| { - let mut change_set = BufferChangeSet::new(&buffer, cx); - let snapshot = buffer.read(cx).snapshot(); - let _ = change_set.set_base_text(base_text.into(), snapshot.text, cx); - change_set - }); - cx.run_until_parked(); + let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx)); let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); let (mut snapshot, mut subscription) = multibuffer.update(cx, |multibuffer, cx| { (multibuffer.snapshot(cx), multibuffer.subscribe()) @@ -508,13 +498,7 @@ fn test_editing_text_in_diff_hunks(cx: &mut TestAppContext) { let base_text = "one\ntwo\nfour\nfive\nsix\nseven\n"; let text = "one\ntwo\nTHREE\nfour\nfive\nseven\n"; let buffer = cx.new(|cx| Buffer::local(text, cx)); - let change_set = cx.new(|cx| { - let mut change_set = BufferChangeSet::new(&buffer, cx); - let snapshot = buffer.read(cx).text_snapshot(); - let _ = change_set.set_base_text(base_text.into(), snapshot, cx); - change_set - }); - cx.run_until_parked(); + let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(&base_text, &buffer, cx)); let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx)); let (mut snapshot, mut subscription) = multibuffer.update(cx, |multibuffer, cx| { @@ -995,12 +979,7 @@ fn test_empty_diff_excerpt(cx: &mut TestAppContext) { let buffer = cx.new(|cx| Buffer::local("", cx)); let base_text = "a\nb\nc"; - let change_set = cx.new(|cx| { - let snapshot = buffer.read(cx).snapshot(); - let mut change_set = BufferChangeSet::new(&buffer, cx); - let _ = change_set.set_base_text(base_text.into(), snapshot.text, cx); - change_set - }); + let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx)); multibuffer.update(cx, |multibuffer, cx| { multibuffer.set_all_diff_hunks_expanded(cx); multibuffer.add_change_set(change_set.clone(), cx); @@ -1040,7 +1019,7 @@ fn test_empty_diff_excerpt(cx: &mut TestAppContext) { buffer.update(cx, |buffer, cx| { buffer.edit([(0..0, "a\nb\nc")], None, cx); change_set.update(cx, |change_set, cx| { - let _ = change_set.recalculate_diff(buffer.snapshot().text, cx); + change_set.recalculate_diff_sync(buffer.snapshot().text, cx); }); assert_eq!(buffer.text(), "a\nb\nc") }); @@ -1052,7 +1031,7 @@ fn test_empty_diff_excerpt(cx: &mut TestAppContext) { buffer.update(cx, |buffer, cx| { buffer.undo(cx); change_set.update(cx, |change_set, cx| { - let _ = change_set.recalculate_diff(buffer.snapshot().text, cx); + change_set.recalculate_diff_sync(buffer.snapshot().text, cx); }); assert_eq!(buffer.text(), "") }); @@ -1294,8 +1273,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) { ); let buffer = cx.new(|cx| Buffer::local(text, cx)); - let change_set = - cx.new(|cx| BufferChangeSet::new_with_base_text(base_text.to_string(), &buffer, cx)); + let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx)); cx.run_until_parked(); let multibuffer = cx.new(|cx| { @@ -1485,8 +1463,8 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) { assert_line_indents(&snapshot); // Recalculate the diff, changing the first diff hunk. - let _ = change_set.update(cx, |change_set, cx| { - change_set.recalculate_diff(buffer.read(cx).text_snapshot(), cx) + change_set.update(cx, |change_set, cx| { + change_set.recalculate_diff_sync(buffer.read(cx).text_snapshot(), cx); }); cx.run_until_parked(); assert_new_snapshot( @@ -1538,8 +1516,7 @@ fn test_repeatedly_expand_a_diff_hunk(cx: &mut TestAppContext) { ); let buffer = cx.new(|cx| Buffer::local(text, cx)); - let change_set = - cx.new(|cx| BufferChangeSet::new_with_base_text(base_text.to_string(), &buffer, cx)); + let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx)); cx.run_until_parked(); let multibuffer = cx.new(|cx| { @@ -1840,10 +1817,8 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { let buffer_1 = cx.new(|cx| Buffer::local(text_1, cx)); let buffer_2 = cx.new(|cx| Buffer::local(text_2, cx)); - let change_set_1 = - cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_1.to_string(), &buffer_1, cx)); - let change_set_2 = - cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_2.to_string(), &buffer_2, cx)); + let change_set_1 = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_1, &buffer_1, cx)); + let change_set_2 = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_2, &buffer_2, cx)); cx.run_until_parked(); let multibuffer = cx.new(|cx| { @@ -2028,6 +2003,7 @@ struct ReferenceMultibuffer { change_sets: HashMap>, } +#[derive(Debug)] struct ReferenceExcerpt { id: ExcerptId, buffer: Entity, @@ -2037,6 +2013,7 @@ struct ReferenceExcerpt { #[derive(Debug)] struct ReferenceRegion { + buffer_id: Option, range: Range, buffer_start: Option, status: Option, @@ -2117,37 +2094,26 @@ impl ReferenceMultibuffer { }; let diff = change_set.read(cx).diff_to_buffer.clone(); let excerpt_range = excerpt.range.to_offset(&buffer); - if excerpt_range.is_empty() { - return; - } for hunk in diff.hunks_intersecting_range(range, &buffer) { let hunk_range = hunk.buffer_range.to_offset(&buffer); - let hunk_precedes_excerpt = hunk - .buffer_range - .end - .cmp(&excerpt.range.start, &buffer) - .is_lt(); - let hunk_follows_excerpt = hunk - .buffer_range - .start - .cmp(&excerpt.range.end, &buffer) - .is_ge(); - if hunk_precedes_excerpt || hunk_follows_excerpt { + if hunk_range.start < excerpt_range.start || hunk_range.start > excerpt_range.end { continue; } - if let Err(ix) = excerpt .expanded_diff_hunks .binary_search_by(|anchor| anchor.cmp(&hunk.buffer_range.start, &buffer)) { log::info!( - "expanding diff hunk {:?}. excerpt: {:?}", + "expanding diff hunk {:?}. excerpt:{:?}, excerpt range:{:?}", hunk_range, + excerpt_id, excerpt_range ); excerpt .expanded_diff_hunks .insert(ix, hunk.buffer_range.start); + } else { + log::trace!("hunk {hunk_range:?} already expanded in excerpt {excerpt_id:?}"); } } } @@ -2170,17 +2136,12 @@ impl ReferenceMultibuffer { .peekable(); while let Some(hunk) = hunks.next() { - if !hunk.buffer_range.start.is_valid(&buffer) { - continue; - } - // Ignore hunks that are outside the excerpt range. let mut hunk_range = hunk.buffer_range.to_offset(buffer); + hunk_range.end = hunk_range.end.min(buffer_range.end); - if hunk_range.start > buffer_range.end - || hunk_range.end < buffer_range.start - || buffer_range.is_empty() - { + if hunk_range.start > buffer_range.end || hunk_range.start < buffer_range.start { + log::trace!("skipping hunk outside excerpt range"); continue; } @@ -2188,6 +2149,12 @@ impl ReferenceMultibuffer { expanded_anchor.to_offset(&buffer).max(buffer_range.start) == hunk_range.start.max(buffer_range.start) }) { + log::trace!("skipping a hunk that's not marked as expanded"); + continue; + } + + if !hunk.buffer_range.start.is_valid(&buffer) { + log::trace!("skipping hunk with deleted start: {:?}", hunk.row_range); continue; } @@ -2196,6 +2163,7 @@ impl ReferenceMultibuffer { let len = text.len(); text.extend(buffer.text_for_range(offset..hunk_range.start)); regions.push(ReferenceRegion { + buffer_id: Some(buffer.remote_id()), range: len..text.len(), buffer_start: Some(buffer.offset_to_point(offset)), status: None, @@ -2212,6 +2180,7 @@ impl ReferenceMultibuffer { let len = text.len(); text.push_str(&base_text); regions.push(ReferenceRegion { + buffer_id: Some(base_buffer.remote_id()), range: len..text.len(), buffer_start: Some( base_buffer.offset_to_point(hunk.diff_base_byte_range.start), @@ -2228,6 +2197,7 @@ impl ReferenceMultibuffer { let len = text.len(); text.extend(buffer.text_for_range(offset..hunk_range.end)); regions.push(ReferenceRegion { + buffer_id: Some(buffer.remote_id()), range: len..text.len(), buffer_start: Some(buffer.offset_to_point(offset)), status: Some(DiffHunkStatus::Added), @@ -2241,6 +2211,7 @@ impl ReferenceMultibuffer { text.extend(buffer.text_for_range(offset..buffer_range.end)); text.push('\n'); regions.push(ReferenceRegion { + buffer_id: Some(buffer.remote_id()), range: len..text.len(), buffer_start: Some(buffer.offset_to_point(offset)), status: None, @@ -2250,6 +2221,7 @@ impl ReferenceMultibuffer { // Remove final trailing newline. if self.excerpts.is_empty() { regions.push(ReferenceRegion { + buffer_id: None, range: 0..1, buffer_start: Some(Point::new(0, 0)), status: None, @@ -2273,6 +2245,7 @@ impl ReferenceMultibuffer { + text[region.range.start..ix].matches('\n').count() as u32 }); RowInfo { + buffer_id: region.buffer_id, diff_status: region.status, buffer_row, multibuffer_row: Some(MultiBufferRow( @@ -2348,6 +2321,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { buffer.update(cx, |buf, cx| { let edit_count = rng.gen_range(1..5); buf.randomly_edit(&mut rng, edit_count, cx); + log::info!("buffer text:\n{}", buf.text()); needs_diff_calculation = true; }); cx.update(|cx| reference.diffs_updated(cx)); @@ -2440,7 +2414,11 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { let range = snapshot.anchor_in_excerpt(excerpt.id, start).unwrap() ..snapshot.anchor_in_excerpt(excerpt.id, end).unwrap(); - log::info!("expanding diff hunks for excerpt {:?}", excerpt_ix); + log::info!( + "expanding diff hunks in range {:?} (excerpt id {:?}) index {excerpt_ix:?})", + range.to_offset(&snapshot), + excerpt.id + ); reference.expand_diff_hunks(excerpt.id, start..end, cx); multibuffer.expand_diff_hunks(vec![range], cx); }); @@ -2457,7 +2435,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { "recalculating diff for buffer {:?}", snapshot.remote_id(), ); - change_set.recalculate_diff(snapshot.text, cx) + change_set.recalculate_diff_sync(snapshot.text, cx); }); } reference.diffs_updated(cx); @@ -2471,14 +2449,8 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { .collect::(); let buffer = cx.new(|cx| Buffer::local(base_text.clone(), cx)); - let change_set = cx.new(|cx| BufferChangeSet::new(&buffer, cx)); - change_set - .update(cx, |change_set, cx| { - let snapshot = buffer.read(cx).snapshot(); - change_set.set_base_text(base_text, snapshot.text, cx) - }) - .await - .unwrap(); + let change_set = + cx.new(|cx| BufferChangeSet::new_with_base_text(&base_text, &buffer, cx)); multibuffer.update(cx, |multibuffer, cx| { reference.add_change_set(change_set.clone(), cx); @@ -2553,12 +2525,28 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { .filter_map(|b| if b.next.is_some() { Some(b.row) } else { None }) .collect::>(); let actual_row_infos = snapshot.row_infos(MultiBufferRow(0)).collect::>(); - let actual_diff = format_diff(&actual_text, &actual_row_infos, &actual_boundary_rows); let (expected_text, expected_row_infos, expected_boundary_rows) = cx.update(|cx| reference.expected_content(cx)); - let expected_diff = - format_diff(&expected_text, &expected_row_infos, &expected_boundary_rows); + + let has_diff = actual_row_infos + .iter() + .any(|info| info.diff_status.is_some()) + || expected_row_infos + .iter() + .any(|info| info.diff_status.is_some()); + let actual_diff = format_diff( + &actual_text, + &actual_row_infos, + &actual_boundary_rows, + Some(has_diff), + ); + let expected_diff = format_diff( + &expected_text, + &expected_row_infos, + &expected_boundary_rows, + Some(has_diff), + ); log::info!("Multibuffer content:\n{}", actual_diff); @@ -2569,8 +2557,8 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { actual_text.split('\n').count() ); pretty_assertions::assert_eq!(actual_diff, expected_diff); - pretty_assertions::assert_eq!(actual_row_infos, expected_row_infos); pretty_assertions::assert_eq!(actual_text, expected_text); + pretty_assertions::assert_eq!(actual_row_infos, expected_row_infos); for _ in 0..5 { let start_row = rng.gen_range(0..=expected_row_infos.len()); @@ -2937,8 +2925,10 @@ fn format_diff( text: &str, row_infos: &Vec, boundary_rows: &HashSet, + has_diff: Option, ) -> String { - let has_diff = row_infos.iter().any(|info| info.diff_status.is_some()); + let has_diff = + has_diff.unwrap_or_else(|| row_infos.iter().any(|info| info.diff_status.is_some())); text.split('\n') .enumerate() .zip(row_infos) @@ -3002,7 +2992,7 @@ fn assert_new_snapshot( let line_infos = new_snapshot .row_infos(MultiBufferRow(0)) .collect::>(); - let actual_diff = format_diff(&actual_text, &line_infos, &Default::default()); + let actual_diff = format_diff(&actual_text, &line_infos, &Default::default(), None); pretty_assertions::assert_eq!(actual_diff, expected_diff); check_edits( snapshot, diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 921bc0adfd17331788cb50ee9318481fa86d9e10..4128990f8c5f888559c6dae217b399542de9917d 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -9,7 +9,11 @@ use anyhow::{anyhow, bail, Context as _, Result}; use client::Client; use collections::{hash_map, HashMap, HashSet}; use fs::Fs; -use futures::{channel::oneshot, future::Shared, Future, FutureExt as _, StreamExt}; +use futures::{ + channel::oneshot, + future::{OptionFuture, Shared}, + Future, FutureExt as _, StreamExt, +}; use git::{blame::Blame, diff::BufferDiff, repository::RepoPath}; use gpui::{ App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity, @@ -34,18 +38,26 @@ use std::{ sync::Arc, time::Instant, }; -use text::{BufferId, LineEnding, Rope}; +use text::{BufferId, Rope}; use util::{debug_panic, maybe, ResultExt as _, TryFutureExt}; use worktree::{File, PathChange, ProjectEntryId, UpdatedGitRepositoriesSet, Worktree, WorktreeId}; +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +enum ChangeSetKind { + Unstaged, + Uncommitted, +} + /// A set of open buffers. pub struct BufferStore { state: BufferStoreState, #[allow(clippy::type_complexity)] loading_buffers: HashMap, Arc>>>>, #[allow(clippy::type_complexity)] - loading_change_sets: - HashMap, Arc>>>>, + loading_change_sets: HashMap< + (BufferId, ChangeSetKind), + Shared, Arc>>>, + >, worktree_store: Entity, opened_buffers: HashMap, downstream_client: Option<(AnyProtoClient, u64)>, @@ -55,18 +67,293 @@ pub struct BufferStore { #[derive(Hash, Eq, PartialEq, Clone)] struct SharedBuffer { buffer: Entity, - unstaged_changes: Option>, + change_set: Option>, lsp_handle: Option, } +#[derive(Default)] +struct BufferChangeSetState { + unstaged_changes: Option>, + uncommitted_changes: Option>, + recalculate_diff_task: Option>>, + language: Option>, + language_registry: Option>, + diff_updated_futures: Vec>, + buffer_subscription: Option, + + head_text: Option>, + index_text: Option>, + head_changed: bool, + index_changed: bool, +} + +#[derive(Clone, Debug)] +enum DiffBasesChange { + SetIndex(Option), + SetHead(Option), + SetEach { + index: Option, + head: Option, + }, + SetBoth(Option), +} + +impl BufferChangeSetState { + fn buffer_language_changed(&mut self, buffer: Entity, cx: &mut Context) { + self.language = buffer.read(cx).language().cloned(); + self.index_changed = self.index_text.is_some(); + self.head_changed = self.head_text.is_some(); + let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx); + } + + fn unstaged_changes(&self) -> Option> { + self.unstaged_changes.as_ref().and_then(|set| set.upgrade()) + } + + fn uncommitted_changes(&self) -> Option> { + self.uncommitted_changes + .as_ref() + .and_then(|set| set.upgrade()) + } + + fn handle_base_texts_updated( + &mut self, + buffer: text::BufferSnapshot, + message: proto::UpdateDiffBases, + cx: &mut Context, + ) { + use proto::update_diff_bases::Mode; + + let Some(mode) = Mode::from_i32(message.mode) else { + return; + }; + + let diff_bases_change = match mode { + Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text), + Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text), + Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.staged_text), + Mode::IndexAndHead => DiffBasesChange::SetEach { + index: message.staged_text, + head: message.committed_text, + }, + }; + + let _ = self.diff_bases_changed(buffer, diff_bases_change, cx); + } + + fn diff_bases_changed( + &mut self, + buffer: text::BufferSnapshot, + diff_bases_change: DiffBasesChange, + cx: &mut Context, + ) -> oneshot::Receiver<()> { + match diff_bases_change { + DiffBasesChange::SetIndex(index) => { + self.index_text = index.map(|mut text| { + text::LineEnding::normalize(&mut text); + Arc::new(text) + }); + self.index_changed = true; + } + DiffBasesChange::SetHead(head) => { + self.head_text = head.map(|mut text| { + text::LineEnding::normalize(&mut text); + Arc::new(text) + }); + self.head_changed = true; + } + DiffBasesChange::SetBoth(mut text) => { + if let Some(text) = text.as_mut() { + text::LineEnding::normalize(text); + } + self.head_text = text.map(Arc::new); + self.index_text = self.head_text.clone(); + self.head_changed = true; + self.index_changed = true; + } + DiffBasesChange::SetEach { index, head } => { + self.index_text = index.map(|mut text| { + text::LineEnding::normalize(&mut text); + Arc::new(text) + }); + self.head_text = head.map(|mut text| { + text::LineEnding::normalize(&mut text); + Arc::new(text) + }); + self.head_changed = true; + self.index_changed = true; + } + } + + self.recalculate_diffs(buffer, cx) + } + + fn recalculate_diffs( + &mut self, + buffer: text::BufferSnapshot, + cx: &mut Context, + ) -> oneshot::Receiver<()> { + let (tx, rx) = oneshot::channel(); + self.diff_updated_futures.push(tx); + + let language = self.language.clone(); + let language_registry = self.language_registry.clone(); + let unstaged_changes = self.unstaged_changes(); + let uncommitted_changes = self.uncommitted_changes(); + let head = self.head_text.clone(); + let index = self.index_text.clone(); + let index_changed = self.index_changed; + let head_changed = self.head_changed; + let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) { + (Some(index), Some(head)) => Arc::ptr_eq(index, head), + (None, None) => true, + _ => false, + }; + self.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move { + let snapshot = if index_changed { + let snapshot = cx.update(|cx| { + index.as_ref().map(|head| { + language::Buffer::build_snapshot( + Rope::from(head.as_str()), + language.clone(), + language_registry.clone(), + cx, + ) + }) + })?; + cx.background_executor() + .spawn(OptionFuture::from(snapshot)) + .await + } else if let Some(unstaged_changes) = &unstaged_changes { + unstaged_changes.read_with(&cx, |change_set, _| change_set.base_text.clone())? + } else if let Some(uncommitted_changes) = &uncommitted_changes { + uncommitted_changes + .read_with(&cx, |change_set, _| change_set.staged_text.clone())? + } else { + return Ok(()); + }; + + if let Some(unstaged_changes) = &unstaged_changes { + let diff = cx + .background_executor() + .spawn({ + let buffer = buffer.clone(); + async move { + BufferDiff::build(index.as_ref().map(|index| index.as_str()), &buffer) + } + }) + .await; + + unstaged_changes.update(&mut cx, |unstaged_changes, cx| { + unstaged_changes.set_state(snapshot.clone(), diff, &buffer, cx); + })?; + + if let Some(uncommitted_changes) = &uncommitted_changes { + uncommitted_changes.update(&mut cx, |uncommitted_changes, _| { + uncommitted_changes.staged_text = snapshot; + })?; + } + } + + if let Some(uncommitted_changes) = &uncommitted_changes { + let (snapshot, diff) = if let (Some(unstaged_changes), true) = + (&unstaged_changes, index_matches_head) + { + unstaged_changes.read_with(&cx, |change_set, _| { + ( + change_set.base_text.clone(), + change_set.diff_to_buffer.clone(), + ) + })? + } else { + let snapshot = cx.update(|cx| { + head.as_deref().map(|head| { + language::Buffer::build_snapshot( + Rope::from(head.as_str()), + language.clone(), + language_registry.clone(), + cx, + ) + }) + })?; + let snapshot = cx.background_executor().spawn(OptionFuture::from(snapshot)); + let diff = cx.background_executor().spawn({ + let buffer = buffer.clone(); + let head = head.clone(); + async move { + BufferDiff::build(head.as_ref().map(|head| head.as_str()), &buffer) + } + }); + futures::join!(snapshot, diff) + }; + + uncommitted_changes.update(&mut cx, |change_set, cx| { + change_set.set_state(snapshot, diff, &buffer, cx); + })?; + + if index_changed || head_changed { + let staged_text = uncommitted_changes + .read_with(&cx, |change_set, _| change_set.staged_text.clone())?; + + let diff = if index_matches_head { + staged_text.as_ref().map(|buffer| BufferDiff::new(buffer)) + } else if let Some(staged_text) = staged_text { + Some( + cx.background_executor() + .spawn(async move { + BufferDiff::build( + head.as_ref().map(|head| head.as_str()), + &staged_text, + ) + }) + .await, + ) + } else { + None + }; + + uncommitted_changes.update(&mut cx, |change_set, _| { + change_set.staged_diff = diff; + })?; + } + } + + if let Some(this) = this.upgrade() { + this.update(&mut cx, |this, _| { + this.index_changed = false; + this.head_changed = false; + for tx in this.diff_updated_futures.drain(..) { + tx.send(()).ok(); + } + })?; + } + + Ok(()) + })); + + rx + } +} + pub struct BufferChangeSet { pub buffer_id: BufferId, pub base_text: Option, - pub language: Option>, - pub diff_to_buffer: git::diff::BufferDiff, - pub recalculate_diff_task: Option>>, - pub diff_updated_futures: Vec>, - pub language_registry: Option>, + pub diff_to_buffer: BufferDiff, + pub staged_text: Option, + // For an uncommitted changeset, this is the diff between HEAD and the index. + pub staged_diff: Option, +} + +impl std::fmt::Debug for BufferChangeSet { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("BufferChangeSet") + .field("buffer_id", &self.buffer_id) + .field("base_text", &self.base_text.as_ref().map(|s| s.text())) + .field("diff_to_buffer", &self.diff_to_buffer) + .field("staged_text", &self.staged_text.as_ref().map(|s| s.text())) + .field("staged_diff", &self.staged_diff) + .finish() + } } pub enum BufferChangeSetEvent { @@ -98,7 +385,7 @@ struct LocalBufferStore { enum OpenBuffer { Complete { buffer: WeakEntity, - unstaged_changes: Option>, + change_set_state: Entity, }, Operations(Vec), } @@ -118,19 +405,48 @@ pub struct ProjectTransaction(pub HashMap, language::Transaction> impl EventEmitter for BufferStore {} impl RemoteBufferStore { - fn load_staged_text(&self, buffer_id: BufferId, cx: &App) -> Task>> { + fn open_unstaged_changes(&self, buffer_id: BufferId, cx: &App) -> Task>> { let project_id = self.project_id; let client = self.upstream_client.clone(); cx.background_executor().spawn(async move { - Ok(client - .request(proto::GetStagedText { + let response = client + .request(proto::OpenUnstagedChanges { project_id, buffer_id: buffer_id.to_proto(), }) - .await? - .staged_text) + .await?; + Ok(response.staged_text) }) } + + fn open_uncommitted_changes( + &self, + buffer_id: BufferId, + cx: &App, + ) -> Task> { + use proto::open_uncommitted_changes_response::Mode; + + let project_id = self.project_id; + let client = self.upstream_client.clone(); + cx.background_executor().spawn(async move { + let response = client + .request(proto::OpenUncommittedChanges { + project_id, + buffer_id: buffer_id.to_proto(), + }) + .await?; + let mode = Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?; + let bases = match mode { + Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.staged_text), + Mode::IndexAndHead => DiffBasesChange::SetEach { + head: response.committed_text, + index: response.staged_text, + }, + }; + Ok(bases) + }) + } + pub fn wait_for_remote_buffer( &mut self, id: BufferId, @@ -398,21 +714,39 @@ impl RemoteBufferStore { } impl LocalBufferStore { - fn load_staged_text(&self, buffer: &Entity, cx: &App) -> Task>> { - let Some(file) = buffer.read(cx).file() else { - return Task::ready(Ok(None)); - }; + fn worktree_for_buffer( + &self, + buffer: &Entity, + cx: &App, + ) -> Option<(Entity, Arc)> { + let file = buffer.read(cx).file()?; let worktree_id = file.worktree_id(cx); let path = file.path().clone(); - let Some(worktree) = self + let worktree = self .worktree_store .read(cx) - .worktree_for_id(worktree_id, cx) - else { + .worktree_for_id(worktree_id, cx)?; + Some((worktree, path)) + } + + fn load_staged_text(&self, buffer: &Entity, cx: &App) -> Task>> { + if let Some((worktree, path)) = self.worktree_for_buffer(buffer, cx) { + worktree.read(cx).load_staged_file(path.as_ref(), cx) + } else { return Task::ready(Err(anyhow!("no such worktree"))); - }; + } + } - worktree.read(cx).load_staged_file(path.as_ref(), cx) + fn load_committed_text( + &self, + buffer: &Entity, + cx: &App, + ) -> Task>> { + if let Some((worktree, path)) = self.worktree_for_buffer(buffer, cx) { + worktree.read(cx).load_committed_file(path.as_ref(), cx) + } else { + Task::ready(Err(anyhow!("no such worktree"))) + } } fn save_local_buffer( @@ -526,74 +860,145 @@ impl LocalBufferStore { ) { debug_assert!(worktree_handle.read(cx).is_local()); - let buffer_change_sets = this - .opened_buffers - .values() - .filter_map(|buffer| { - if let OpenBuffer::Complete { - buffer, - unstaged_changes, - } = buffer - { - let buffer = buffer.upgrade()?.read(cx); - let file = File::from_dyn(buffer.file())?; - if file.worktree != worktree_handle { - return None; - } - changed_repos - .iter() - .find(|(work_dir, _)| file.path.starts_with(work_dir))?; - let unstaged_changes = unstaged_changes.as_ref()?.upgrade()?; - let snapshot = buffer.text_snapshot(); - Some((unstaged_changes, snapshot, file.path.clone())) - } else { - None - } - }) - .collect::>(); + let mut change_set_state_updates = Vec::new(); + for buffer in this.opened_buffers.values() { + let OpenBuffer::Complete { + buffer, + change_set_state, + } = buffer + else { + continue; + }; + let Some(buffer) = buffer.upgrade() else { + continue; + }; + let buffer = buffer.read(cx); + let Some(file) = File::from_dyn(buffer.file()) else { + continue; + }; + if file.worktree != worktree_handle { + continue; + } + let change_set_state = change_set_state.read(cx); + if changed_repos + .iter() + .any(|(work_dir, _)| file.path.starts_with(work_dir)) + { + let snapshot = buffer.text_snapshot(); + change_set_state_updates.push(( + snapshot.clone(), + file.path.clone(), + change_set_state + .unstaged_changes + .as_ref() + .and_then(|set| set.upgrade()) + .is_some(), + change_set_state + .uncommitted_changes + .as_ref() + .and_then(|set| set.upgrade()) + .is_some(), + )) + } + } - if buffer_change_sets.is_empty() { + if change_set_state_updates.is_empty() { return; } cx.spawn(move |this, mut cx| async move { let snapshot = worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?; - let diff_bases_by_buffer = cx + let diff_bases_changes_by_buffer = cx .background_executor() .spawn(async move { - buffer_change_sets + change_set_state_updates .into_iter() - .filter_map(|(change_set, buffer_snapshot, path)| { - let local_repo = snapshot.local_repo_for_path(&path)?; - let relative_path = local_repo.relativize(&path).ok()?; - let base_text = local_repo.repo().load_index_text(&relative_path); - Some((change_set, buffer_snapshot, base_text)) - }) + .filter_map( + |(buffer_snapshot, path, needs_staged_text, needs_committed_text)| { + let local_repo = snapshot.local_repo_for_path(&path)?; + let relative_path = local_repo.relativize(&path).ok()?; + let staged_text = if needs_staged_text { + local_repo.repo().load_index_text(&relative_path) + } else { + None + }; + let committed_text = if needs_committed_text { + local_repo.repo().load_committed_text(&relative_path) + } else { + None + }; + let diff_bases_change = + match (needs_staged_text, needs_committed_text) { + (true, true) => Some(if staged_text == committed_text { + DiffBasesChange::SetBoth(staged_text) + } else { + DiffBasesChange::SetEach { + index: staged_text, + head: committed_text, + } + }), + (true, false) => { + Some(DiffBasesChange::SetIndex(staged_text)) + } + (false, true) => { + Some(DiffBasesChange::SetHead(committed_text)) + } + (false, false) => None, + }; + Some((buffer_snapshot, diff_bases_change)) + }, + ) .collect::>() }) .await; this.update(&mut cx, |this, cx| { - for (change_set, buffer_snapshot, staged_text) in diff_bases_by_buffer { - change_set.update(cx, |change_set, cx| { - if let Some(staged_text) = staged_text.clone() { - let _ = - change_set.set_base_text(staged_text, buffer_snapshot.clone(), cx); - } else { - change_set.unset_base_text(buffer_snapshot.clone(), cx); - } - }); + for (buffer_snapshot, diff_bases_change) in diff_bases_changes_by_buffer { + let Some(OpenBuffer::Complete { + change_set_state, .. + }) = this.opened_buffers.get_mut(&buffer_snapshot.remote_id()) + else { + continue; + }; + let Some(diff_bases_change) = diff_bases_change else { + continue; + }; - if let Some((client, project_id)) = &this.downstream_client.clone() { - client - .send(proto::UpdateDiffBase { + change_set_state.update(cx, |change_set_state, cx| { + use proto::update_diff_bases::Mode; + + if let Some((client, project_id)) = this.downstream_client.as_ref() { + let buffer_id = buffer_snapshot.remote_id().to_proto(); + let (staged_text, committed_text, mode) = match diff_bases_change + .clone() + { + DiffBasesChange::SetIndex(index) => (index, None, Mode::IndexOnly), + DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly), + DiffBasesChange::SetEach { index, head } => { + (index, head, Mode::IndexAndHead) + } + DiffBasesChange::SetBoth(text) => { + (text, None, Mode::IndexMatchesHead) + } + }; + let message = proto::UpdateDiffBases { project_id: *project_id, - buffer_id: buffer_snapshot.remote_id().to_proto(), + buffer_id, staged_text, - }) - .log_err(); - } + committed_text, + mode: mode as i32, + }; + + client.send(message).log_err(); + } + + let _ = change_set_state.diff_bases_changed( + buffer_snapshot, + diff_bases_change, + cx, + ); + }); } }) }) @@ -898,8 +1303,9 @@ impl BufferStore { client.add_entity_request_handler(Self::handle_blame_buffer); client.add_entity_request_handler(Self::handle_reload_buffers); client.add_entity_request_handler(Self::handle_get_permalink_to_line); - client.add_entity_request_handler(Self::handle_get_staged_text); - client.add_entity_message_handler(Self::handle_update_diff_base); + client.add_entity_request_handler(Self::handle_open_unstaged_changes); + client.add_entity_request_handler(Self::handle_open_uncommitted_changes); + client.add_entity_message_handler(Self::handle_update_diff_bases); } /// Creates a buffer store, optionally retaining its buffers. @@ -1022,24 +1428,93 @@ impl BufferStore { cx: &mut Context, ) -> Task>> { let buffer_id = buffer.read(cx).remote_id(); - if let Some(change_set) = self.get_unstaged_changes(buffer_id) { + if let Some(change_set) = self.get_unstaged_changes(buffer_id, cx) { return Task::ready(Ok(change_set)); } - let task = match self.loading_change_sets.entry(buffer_id) { + let task = match self + .loading_change_sets + .entry((buffer_id, ChangeSetKind::Unstaged)) + { hash_map::Entry::Occupied(e) => e.get().clone(), hash_map::Entry::Vacant(entry) => { - let load = match &self.state { + let staged_text = match &self.state { BufferStoreState::Local(this) => this.load_staged_text(&buffer, cx), - BufferStoreState::Remote(this) => this.load_staged_text(buffer_id, cx), + BufferStoreState::Remote(this) => this.open_unstaged_changes(buffer_id, cx), + }; + + entry + .insert( + cx.spawn(move |this, cx| async move { + Self::open_change_set_internal( + this, + ChangeSetKind::Unstaged, + staged_text.await.map(DiffBasesChange::SetIndex), + buffer, + cx, + ) + .await + .map_err(Arc::new) + }) + .shared(), + ) + .clone() + } + }; + + cx.background_executor() + .spawn(async move { task.await.map_err(|e| anyhow!("{e}")) }) + } + + pub fn open_uncommitted_changes( + &mut self, + buffer: Entity, + cx: &mut Context, + ) -> Task>> { + let buffer_id = buffer.read(cx).remote_id(); + if let Some(change_set) = self.get_uncommitted_changes(buffer_id, cx) { + return Task::ready(Ok(change_set)); + } + + let task = match self + .loading_change_sets + .entry((buffer_id, ChangeSetKind::Uncommitted)) + { + hash_map::Entry::Occupied(e) => e.get().clone(), + hash_map::Entry::Vacant(entry) => { + let changes = match &self.state { + BufferStoreState::Local(this) => { + let committed_text = this.load_committed_text(&buffer, cx); + let staged_text = this.load_staged_text(&buffer, cx); + cx.background_executor().spawn(async move { + let committed_text = committed_text.await?; + let staged_text = staged_text.await?; + let diff_bases_change = if committed_text == staged_text { + DiffBasesChange::SetBoth(committed_text) + } else { + DiffBasesChange::SetEach { + index: staged_text, + head: committed_text, + } + }; + Ok(diff_bases_change) + }) + } + BufferStoreState::Remote(this) => this.open_uncommitted_changes(buffer_id, cx), }; entry .insert( cx.spawn(move |this, cx| async move { - Self::open_unstaged_changes_internal(this, load.await, buffer, cx) - .await - .map_err(Arc::new) + Self::open_change_set_internal( + this, + ChangeSetKind::Uncommitted, + changes.await, + buffer, + cx, + ) + .await + .map_err(Arc::new) }) .shared(), ) @@ -1052,52 +1527,83 @@ impl BufferStore { } #[cfg(any(test, feature = "test-support"))] - pub fn set_change_set(&mut self, buffer_id: BufferId, change_set: Entity) { - self.loading_change_sets - .insert(buffer_id, Task::ready(Ok(change_set)).shared()); + pub fn set_unstaged_change_set( + &mut self, + buffer_id: BufferId, + change_set: Entity, + ) { + self.loading_change_sets.insert( + (buffer_id, ChangeSetKind::Unstaged), + Task::ready(Ok(change_set)).shared(), + ); } - pub async fn open_unstaged_changes_internal( + async fn open_change_set_internal( this: WeakEntity, - text: Result>, + kind: ChangeSetKind, + texts: Result, buffer: Entity, mut cx: AsyncApp, ) -> Result> { - let text = match text { + let diff_bases_change = match texts { Err(e) => { this.update(&mut cx, |this, cx| { let buffer_id = buffer.read(cx).remote_id(); - this.loading_change_sets.remove(&buffer_id); + this.loading_change_sets.remove(&(buffer_id, kind)); })?; return Err(e); } - Ok(text) => text, + Ok(change) => change, }; - let change_set = cx.new(|cx| BufferChangeSet::new(&buffer, cx)).unwrap(); - - if let Some(text) = text { - change_set - .update(&mut cx, |change_set, cx| { - let snapshot = buffer.read(cx).text_snapshot(); - change_set.set_base_text(text, snapshot, cx) - })? - .await - .ok(); - } - this.update(&mut cx, |this, cx| { let buffer_id = buffer.read(cx).remote_id(); - this.loading_change_sets.remove(&buffer_id); + this.loading_change_sets.remove(&(buffer_id, kind)); + if let Some(OpenBuffer::Complete { - unstaged_changes, .. + change_set_state, .. }) = this.opened_buffers.get_mut(&buffer.read(cx).remote_id()) { - *unstaged_changes = Some(change_set.downgrade()); - } - })?; + change_set_state.update(cx, |change_set_state, cx| { + let buffer_id = buffer.read(cx).remote_id(); + change_set_state.buffer_subscription.get_or_insert_with(|| { + cx.subscribe(&buffer, |this, buffer, event, cx| match event { + BufferEvent::LanguageChanged => { + this.buffer_language_changed(buffer, cx) + } + _ => {} + }) + }); - Ok(change_set) + let change_set = cx.new(|cx| BufferChangeSet { + buffer_id, + base_text: None, + diff_to_buffer: BufferDiff::new(&buffer.read(cx).text_snapshot()), + staged_text: None, + staged_diff: None, + }); + match kind { + ChangeSetKind::Unstaged => { + change_set_state.unstaged_changes = Some(change_set.downgrade()) + } + ChangeSetKind::Uncommitted => { + change_set_state.uncommitted_changes = Some(change_set.downgrade()) + } + }; + + let buffer = buffer.read(cx).text_snapshot(); + let rx = change_set_state.diff_bases_changed(buffer, diff_bases_change, cx); + + Ok(async move { + rx.await.ok(); + Ok(change_set) + }) + }) + } else { + Err(anyhow!("buffer was closed")) + } + })?? + .await } pub fn create_buffer(&mut self, cx: &mut Context) -> Task>> { @@ -1303,7 +1809,7 @@ impl BufferStore { let is_remote = buffer.read(cx).replica_id() != 0; let open_buffer = OpenBuffer::Complete { buffer: buffer.downgrade(), - unstaged_changes: None, + change_set_state: cx.new(|_| BufferChangeSetState::default()), }; let handle = cx.entity().downgrade(); @@ -1384,12 +1890,39 @@ impl BufferStore { }) } - pub fn get_unstaged_changes(&self, buffer_id: BufferId) -> Option> { + pub fn get_unstaged_changes( + &self, + buffer_id: BufferId, + cx: &App, + ) -> Option> { + if let OpenBuffer::Complete { + change_set_state, .. + } = self.opened_buffers.get(&buffer_id)? + { + change_set_state + .read(cx) + .unstaged_changes + .as_ref()? + .upgrade() + } else { + None + } + } + + pub fn get_uncommitted_changes( + &self, + buffer_id: BufferId, + cx: &App, + ) -> Option> { if let OpenBuffer::Complete { - unstaged_changes, .. + change_set_state, .. } = self.opened_buffers.get(&buffer_id)? { - unstaged_changes.as_ref()?.upgrade() + change_set_state + .read(cx) + .uncommitted_changes + .as_ref()? + .upgrade() } else { None } @@ -1509,21 +2042,14 @@ impl BufferStore { ) -> impl Future { let mut futures = Vec::new(); for buffer in buffers { - let buffer = buffer.read(cx).text_snapshot(); if let Some(OpenBuffer::Complete { - unstaged_changes, .. - }) = self.opened_buffers.get_mut(&buffer.remote_id()) + change_set_state, .. + }) = self.opened_buffers.get_mut(&buffer.read(cx).remote_id()) { - if let Some(unstaged_changes) = unstaged_changes - .as_ref() - .and_then(|changes| changes.upgrade()) - { - unstaged_changes.update(cx, |unstaged_changes, cx| { - futures.push(unstaged_changes.recalculate_diff(buffer.clone(), cx)); - }); - } else { - unstaged_changes.take(); - } + let buffer = buffer.read(cx).text_snapshot(); + futures.push(change_set_state.update(cx, |change_set_state, cx| { + change_set_state.recalculate_diffs(buffer, cx) + })); } } async move { @@ -1632,7 +2158,7 @@ impl BufferStore { .entry(buffer_id) .or_insert_with(|| SharedBuffer { buffer: buffer.clone(), - unstaged_changes: None, + change_set: None, lsp_handle: None, }); @@ -1937,11 +2463,11 @@ impl BufferStore { }) } - pub async fn handle_get_staged_text( + pub async fn handle_open_unstaged_changes( this: Entity, - request: TypedEnvelope, + request: TypedEnvelope, mut cx: AsyncApp, - ) -> Result { + ) -> Result { let buffer_id = BufferId::new(request.payload.buffer_id)?; let change_set = this .update(&mut cx, |this, cx| { @@ -1957,43 +2483,92 @@ impl BufferStore { .or_default(); debug_assert!(shared_buffers.contains_key(&buffer_id)); if let Some(shared) = shared_buffers.get_mut(&buffer_id) { - shared.unstaged_changes = Some(change_set.clone()); + shared.change_set = Some(change_set.clone()); } })?; let staged_text = change_set.read_with(&cx, |change_set, _| { change_set.base_text.as_ref().map(|buffer| buffer.text()) })?; - Ok(proto::GetStagedTextResponse { staged_text }) + Ok(proto::OpenUnstagedChangesResponse { staged_text }) + } + + pub async fn handle_open_uncommitted_changes( + this: Entity, + request: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let buffer_id = BufferId::new(request.payload.buffer_id)?; + let change_set = this + .update(&mut cx, |this, cx| { + let buffer = this.get(buffer_id)?; + Some(this.open_uncommitted_changes(buffer, cx)) + })? + .ok_or_else(|| anyhow!("no such buffer"))? + .await?; + this.update(&mut cx, |this, _| { + let shared_buffers = this + .shared_buffers + .entry(request.original_sender_id.unwrap_or(request.sender_id)) + .or_default(); + debug_assert!(shared_buffers.contains_key(&buffer_id)); + if let Some(shared) = shared_buffers.get_mut(&buffer_id) { + shared.change_set = Some(change_set.clone()); + } + })?; + change_set.read_with(&cx, |change_set, _| { + use proto::open_uncommitted_changes_response::Mode; + + let mode; + let staged_text; + let committed_text; + if let Some(committed_buffer) = &change_set.base_text { + committed_text = Some(committed_buffer.text()); + if let Some(staged_buffer) = &change_set.staged_text { + if staged_buffer.remote_id() == committed_buffer.remote_id() { + mode = Mode::IndexMatchesHead; + staged_text = None; + } else { + mode = Mode::IndexAndHead; + staged_text = Some(staged_buffer.text()); + } + } else { + mode = Mode::IndexAndHead; + staged_text = None; + } + } else { + mode = Mode::IndexAndHead; + committed_text = None; + staged_text = change_set.staged_text.as_ref().map(|buffer| buffer.text()); + } + + proto::OpenUncommittedChangesResponse { + committed_text, + staged_text, + mode: mode.into(), + } + }) } - pub async fn handle_update_diff_base( + pub async fn handle_update_diff_bases( this: Entity, - request: TypedEnvelope, + request: TypedEnvelope, mut cx: AsyncApp, ) -> Result<()> { let buffer_id = BufferId::new(request.payload.buffer_id)?; - let Some((buffer, change_set)) = this.update(&mut cx, |this, _| { - if let OpenBuffer::Complete { - unstaged_changes, + this.update(&mut cx, |this, cx| { + if let Some(OpenBuffer::Complete { + change_set_state, buffer, - } = this.opened_buffers.get(&buffer_id)? + }) = this.opened_buffers.get_mut(&buffer_id) { - Some((buffer.upgrade()?, unstaged_changes.as_ref()?.upgrade()?)) - } else { - None - } - })? - else { - return Ok(()); - }; - change_set.update(&mut cx, |change_set, cx| { - if let Some(staged_text) = request.payload.staged_text { - let _ = change_set.set_base_text(staged_text, buffer.read(cx).text_snapshot(), cx); - } else { - change_set.unset_base_text(buffer.read(cx).text_snapshot(), cx) + if let Some(buffer) = buffer.upgrade() { + let buffer = buffer.read(cx).text_snapshot(); + change_set_state.update(cx, |change_set_state, cx| { + change_set_state.handle_base_texts_updated(buffer, request.payload, cx); + }) + } } - })?; - Ok(()) + }) } pub fn reload_buffers( @@ -2050,7 +2625,7 @@ impl BufferStore { buffer_id, SharedBuffer { buffer: buffer.clone(), - unstaged_changes: None, + change_set: None, lsp_handle: None, }, ); @@ -2208,54 +2783,27 @@ impl BufferStore { impl EventEmitter for BufferChangeSet {} impl BufferChangeSet { - pub fn new(buffer: &Entity, cx: &mut Context) -> Self { - cx.subscribe(buffer, |this, buffer, event, cx| match event { - BufferEvent::LanguageChanged => { - this.language = buffer.read(cx).language().cloned(); - if let Some(base_text) = &this.base_text { - let snapshot = language::Buffer::build_snapshot( - base_text.as_rope().clone(), - this.language.clone(), - this.language_registry.clone(), - cx, - ); - this.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move { - let base_text = cx.background_executor().spawn(snapshot).await; - this.update(&mut cx, |this, cx| { - this.base_text = Some(base_text); - cx.emit(BufferChangeSetEvent::DiffChanged { - changed_range: text::Anchor::MIN..text::Anchor::MAX, - }); - }) - })); - } + fn set_state( + &mut self, + base_text: Option, + diff: BufferDiff, + buffer: &text::BufferSnapshot, + cx: &mut Context, + ) { + if let Some(base_text) = base_text.as_ref() { + let changed_range = if Some(base_text.remote_id()) + != self.base_text.as_ref().map(|buffer| buffer.remote_id()) + { + Some(text::Anchor::MIN..text::Anchor::MAX) + } else { + diff.compare(&self.diff_to_buffer, buffer) + }; + if let Some(changed_range) = changed_range { + cx.emit(BufferChangeSetEvent::DiffChanged { changed_range }); } - _ => {} - }) - .detach(); - - let buffer = buffer.read(cx); - - Self { - buffer_id: buffer.remote_id(), - base_text: None, - diff_to_buffer: git::diff::BufferDiff::new(buffer), - recalculate_diff_task: None, - diff_updated_futures: Vec::new(), - language: buffer.language().cloned(), - language_registry: buffer.language_registry(), } - } - - #[cfg(any(test, feature = "test-support"))] - pub fn new_with_base_text( - base_text: String, - buffer: &Entity, - cx: &mut Context, - ) -> Self { - let mut this = Self::new(&buffer, cx); - let _ = this.set_base_text(base_text, buffer.read(cx).text_snapshot(), cx); - this + self.base_text = base_text; + self.diff_to_buffer = diff; } pub fn diff_hunks_intersecting_range<'a>( @@ -2276,102 +2824,81 @@ impl BufferChangeSet { .hunks_intersecting_range_rev(range, buffer_snapshot) } - #[cfg(any(test, feature = "test-support"))] - pub fn base_text_string(&self) -> Option { - self.base_text.as_ref().map(|buffer| buffer.text()) - } - + /// Used in cases where the change set isn't derived from git. pub fn set_base_text( &mut self, - mut base_text: String, - buffer_snapshot: text::BufferSnapshot, + base_buffer: Entity, + buffer: text::BufferSnapshot, cx: &mut Context, ) -> oneshot::Receiver<()> { - LineEnding::normalize(&mut base_text); - self.recalculate_diff_internal(base_text, buffer_snapshot, true, cx) + let (tx, rx) = oneshot::channel(); + let this = cx.weak_entity(); + let base_buffer = base_buffer.read(cx).snapshot(); + cx.spawn(|_, mut cx| async move { + let diff = cx + .background_executor() + .spawn({ + let base_buffer = base_buffer.clone(); + let buffer = buffer.clone(); + async move { BufferDiff::build(Some(&base_buffer.text()), &buffer) } + }) + .await; + let Some(this) = this.upgrade() else { + tx.send(()).ok(); + return; + }; + this.update(&mut cx, |this, cx| { + this.set_state(Some(base_buffer), diff, &buffer, cx); + }) + .log_err(); + tx.send(()).ok(); + }) + .detach(); + rx } - pub fn unset_base_text( - &mut self, - buffer_snapshot: text::BufferSnapshot, - cx: &mut Context, - ) { - if self.base_text.is_some() { - self.base_text = None; - self.diff_to_buffer = BufferDiff::new(&buffer_snapshot); - self.recalculate_diff_task.take(); - cx.notify(); + #[cfg(any(test, feature = "test-support"))] + pub fn base_text_string(&self) -> Option { + self.base_text.as_ref().map(|buffer| buffer.text()) + } + + pub fn new(buffer: &Entity, cx: &mut App) -> Self { + BufferChangeSet { + buffer_id: buffer.read(cx).remote_id(), + base_text: None, + diff_to_buffer: BufferDiff::new(&buffer.read(cx).text_snapshot()), + staged_text: None, + staged_diff: None, } } - pub fn recalculate_diff( - &mut self, - buffer_snapshot: text::BufferSnapshot, - cx: &mut Context, - ) -> oneshot::Receiver<()> { - if let Some(base_text) = self.base_text.clone() { - self.recalculate_diff_internal(base_text.text(), buffer_snapshot, false, cx) - } else { - oneshot::channel().1 + #[cfg(any(test, feature = "test-support"))] + pub fn new_with_base_text(base_text: &str, buffer: &Entity, cx: &mut App) -> Self { + let mut base_text = base_text.to_owned(); + text::LineEnding::normalize(&mut base_text); + let diff_to_buffer = BufferDiff::build(Some(&base_text), &buffer.read(cx).text_snapshot()); + let base_text = language::Buffer::build_snapshot_sync(base_text.into(), None, None, cx); + BufferChangeSet { + buffer_id: buffer.read(cx).remote_id(), + base_text: Some(base_text), + diff_to_buffer, + staged_text: None, + staged_diff: None, } } - fn recalculate_diff_internal( + #[cfg(any(test, feature = "test-support"))] + pub fn recalculate_diff_sync( &mut self, - base_text: String, - buffer_snapshot: text::BufferSnapshot, - base_text_changed: bool, + snapshot: text::BufferSnapshot, cx: &mut Context, - ) -> oneshot::Receiver<()> { - let (tx, rx) = oneshot::channel(); - self.diff_updated_futures.push(tx); - self.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move { - let (old_diff, new_base_text) = this.update(&mut cx, |this, cx| { - let new_base_text = if base_text_changed { - let base_text_rope: Rope = base_text.as_str().into(); - let snapshot = language::Buffer::build_snapshot( - base_text_rope, - this.language.clone(), - this.language_registry.clone(), - cx, - ); - cx.background_executor() - .spawn(async move { Some(snapshot.await) }) - } else { - Task::ready(None) - }; - (this.diff_to_buffer.clone(), new_base_text) - })?; - - let diff = cx.background_executor().spawn(async move { - let new_diff = BufferDiff::build(&base_text, &buffer_snapshot); - let changed_range = if base_text_changed { - Some(text::Anchor::MIN..text::Anchor::MAX) - } else { - new_diff.compare(&old_diff, &buffer_snapshot) - }; - (new_diff, changed_range) - }); - - let (new_base_text, (diff, changed_range)) = futures::join!(new_base_text, diff); - - this.update(&mut cx, |this, cx| { - if let Some(new_base_text) = new_base_text { - this.base_text = Some(new_base_text) - } - this.diff_to_buffer = diff; - - this.recalculate_diff_task.take(); - for tx in this.diff_updated_futures.drain(..) { - tx.send(()).ok(); - } - if let Some(changed_range) = changed_range { - cx.emit(BufferChangeSetEvent::DiffChanged { changed_range }); - } - })?; - Ok(()) - })); - rx + ) { + let mut base_text = self.base_text.as_ref().map(|buffer| buffer.text()); + if let Some(base_text) = base_text.as_mut() { + text::LineEnding::normalize(base_text); + } + let diff_to_buffer = BufferDiff::build(base_text.as_deref(), &snapshot); + self.set_state(self.base_text.clone(), diff_to_buffer, &snapshot, cx); } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 252fab9eaf2ffeb76eeb01d7c38fdec5857dc1c4..069044bbc42dc9fbadd7239ea289c07e51453b04 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1970,6 +1970,20 @@ impl Project { }) } + pub fn open_uncommitted_changes( + &mut self, + buffer: Entity, + cx: &mut Context, + ) -> Task>> { + if self.is_disconnected(cx) { + return Task::ready(Err(anyhow!(ErrorCode::Disconnected))); + } + + self.buffer_store.update(cx, |buffer_store, cx| { + buffer_store.open_uncommitted_changes(buffer, cx) + }) + } + pub fn open_buffer_by_id( &mut self, id: BufferId, diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 85280ae1a4dd113e1a668f291012558ffac5b648..97dbc3bd2479c3d1fae76a849886912ac02a99ba 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -5624,7 +5624,7 @@ async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) { fs.set_index_for_repo( Path::new("/dir/.git"), - &[(Path::new("src/main.rs"), staged_contents)], + &[("src/main.rs".into(), staged_contents)], ); let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; @@ -5669,7 +5669,7 @@ async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) { fs.set_index_for_repo( Path::new("/dir/.git"), - &[(Path::new("src/main.rs"), staged_contents)], + &[("src/main.rs".into(), staged_contents)], ); cx.run_until_parked(); @@ -5684,6 +5684,108 @@ async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) { }); } +#[gpui::test] +async fn test_uncommitted_changes_for_buffer(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let committed_contents = r#" + fn main() { + println!("hello world"); + } + "# + .unindent(); + let staged_contents = r#" + fn main() { + println!("goodbye world"); + } + "# + .unindent(); + let file_contents = r#" + // print goodbye + fn main() { + println!("goodbye world"); + } + "# + .unindent(); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/dir", + json!({ + ".git": {}, + "src": { + "main.rs": file_contents, + } + }), + ) + .await; + + fs.set_index_for_repo( + Path::new("/dir/.git"), + &[("src/main.rs".into(), staged_contents)], + ); + fs.set_head_for_repo( + Path::new("/dir/.git"), + &[("src/main.rs".into(), committed_contents)], + ); + + let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/dir/src/main.rs", cx) + }) + .await + .unwrap(); + let uncommitted_changes = project + .update(cx, |project, cx| { + project.open_uncommitted_changes(buffer.clone(), cx) + }) + .await + .unwrap(); + + cx.run_until_parked(); + uncommitted_changes.update(cx, |uncommitted_changes, cx| { + let snapshot = buffer.read(cx).snapshot(); + assert_hunks( + uncommitted_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + &snapshot, + &uncommitted_changes.base_text.as_ref().unwrap().text(), + &[ + (0..1, "", "// print goodbye\n"), + ( + 2..3, + " println!(\"hello world\");\n", + " println!(\"goodbye world\");\n", + ), + ], + ); + }); + + let committed_contents = r#" + // print goodbye + fn main() { + } + "# + .unindent(); + + fs.set_head_for_repo( + Path::new("/dir/.git"), + &[("src/main.rs".into(), committed_contents)], + ); + + cx.run_until_parked(); + uncommitted_changes.update(cx, |uncommitted_changes, cx| { + let snapshot = buffer.read(cx).snapshot(); + assert_hunks( + uncommitted_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + &snapshot, + &uncommitted_changes.base_text.as_ref().unwrap().text(), + &[(2..3, "", " println!(\"goodbye world\");\n")], + ); + }); +} + async fn search( project: &Entity, query: SearchQuery, diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index c7190389213ef4e721e19233225e378122276d10..976e1e73fd0ce30c63036ca5adb54e3d0ec1610d 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -129,7 +129,7 @@ message Envelope { GetPrivateUserInfo get_private_user_info = 102; GetPrivateUserInfoResponse get_private_user_info_response = 103; UpdateUserPlan update_user_plan = 234; - UpdateDiffBase update_diff_base = 104; + UpdateDiffBases update_diff_bases = 104; AcceptTermsOfService accept_terms_of_service = 239; AcceptTermsOfServiceResponse accept_terms_of_service_response = 240; @@ -304,15 +304,18 @@ message Envelope { SyncExtensionsResponse sync_extensions_response = 286; InstallExtension install_extension = 287; - GetStagedText get_staged_text = 288; - GetStagedTextResponse get_staged_text_response = 289; + OpenUnstagedChanges open_unstaged_changes = 288; + OpenUnstagedChangesResponse open_unstaged_changes_response = 289; RegisterBufferWithLanguageServers register_buffer_with_language_servers = 290; Stage stage = 293; Unstage unstage = 294; Commit commit = 295; - OpenCommitMessageBuffer open_commit_message_buffer = 296; // current max + OpenCommitMessageBuffer open_commit_message_buffer = 296; + + OpenUncommittedChanges open_uncommitted_changes = 297; + OpenUncommittedChangesResponse open_uncommitted_changes_response = 298; // current max } reserved 87 to 88; @@ -2035,19 +2038,51 @@ message WorktreeMetadata { string abs_path = 4; } -message UpdateDiffBase { +message UpdateDiffBases { uint64 project_id = 1; uint64 buffer_id = 2; + + enum Mode { + // No collaborator is using the unstaged diff. + HEAD_ONLY = 0; + // No collaborator is using the diff from HEAD. + INDEX_ONLY = 1; + // Both the unstaged and uncommitted diffs are demanded, + // and the contents of the index and HEAD are the same for this path. + INDEX_MATCHES_HEAD = 2; + // Both the unstaged and uncommitted diffs are demanded, + // and the contents of the index and HEAD differ for this path, + // where None means the path doesn't exist in that state of the repo. + INDEX_AND_HEAD = 3; + } + optional string staged_text = 3; + optional string committed_text = 4; + Mode mode = 5; +} + +message OpenUnstagedChanges { + uint64 project_id = 1; + uint64 buffer_id = 2; +} + +message OpenUnstagedChangesResponse { + optional string staged_text = 1; } -message GetStagedText { +message OpenUncommittedChanges { uint64 project_id = 1; uint64 buffer_id = 2; } -message GetStagedTextResponse { +message OpenUncommittedChangesResponse { + enum Mode { + INDEX_MATCHES_HEAD = 0; + INDEX_AND_HEAD = 1; + } optional string staged_text = 1; + optional string committed_text = 2; + Mode mode = 3; } message GetNotifications { diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index eabd0d3ec71f98697f1332b96b6a51e8dd4ca7ed..ec35aef44ed7670dc7efb2091df8c5f277af4452 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -219,8 +219,10 @@ messages!( (GetImplementationResponse, Background), (GetLlmToken, Background), (GetLlmTokenResponse, Background), - (GetStagedText, Foreground), - (GetStagedTextResponse, Foreground), + (OpenUnstagedChanges, Foreground), + (OpenUnstagedChangesResponse, Foreground), + (OpenUncommittedChanges, Foreground), + (OpenUncommittedChangesResponse, Foreground), (GetUsers, Foreground), (Hello, Foreground), (IncomingCall, Foreground), @@ -309,7 +311,7 @@ messages!( (UpdateUserChannels, Foreground), (UpdateContacts, Foreground), (UpdateDiagnosticSummary, Foreground), - (UpdateDiffBase, Foreground), + (UpdateDiffBases, Foreground), (UpdateFollowers, Foreground), (UpdateInviteInfo, Foreground), (UpdateLanguageServer, Foreground), @@ -422,7 +424,8 @@ request_messages!( (GetProjectSymbols, GetProjectSymbolsResponse), (GetReferences, GetReferencesResponse), (GetSignatureHelp, GetSignatureHelpResponse), - (GetStagedText, GetStagedTextResponse), + (OpenUnstagedChanges, OpenUnstagedChangesResponse), + (OpenUncommittedChanges, OpenUncommittedChangesResponse), (GetSupermavenApiKey, GetSupermavenApiKeyResponse), (GetTypeDefinition, GetTypeDefinitionResponse), (LinkedEditingRange, LinkedEditingRangeResponse), @@ -543,7 +546,8 @@ entity_messages!( GetProjectSymbols, GetReferences, GetSignatureHelp, - GetStagedText, + OpenUnstagedChanges, + OpenUncommittedChanges, GetTypeDefinition, InlayHints, JoinProject, @@ -575,7 +579,7 @@ entity_messages!( UpdateBuffer, UpdateBufferFile, UpdateDiagnosticSummary, - UpdateDiffBase, + UpdateDiffBases, UpdateLanguageServer, UpdateProject, UpdateProjectCollaborator, diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 06391fcffc8ddce1c10f6cc48dea2aff017b8fc7..be18bad293829ea749ebe7a9d85d5a04e3d3354b 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -46,7 +46,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test .await; fs.set_index_for_repo( Path::new("/code/project1/.git"), - &[(Path::new("src/lib.rs"), "fn one() -> usize { 0 }".into())], + &[("src/lib.rs".into(), "fn one() -> usize { 0 }".into())], ); let (project, _headless) = init_test(&fs, cx, server_cx).await; @@ -147,7 +147,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test fs.set_index_for_repo( Path::new("/code/project1/.git"), - &[(Path::new("src/lib2.rs"), "fn one() -> usize { 100 }".into())], + &[("src/lib2.rs".into(), "fn one() -> usize { 100 }".into())], ); cx.executor().run_until_parked(); change_set.update(cx, |change_set, _| { diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index b384b2fc5cd72d1e43962e498f85649158acadeb..7f9f8f7503417149e4207f9fa55497d1eabdc91b 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -450,6 +450,10 @@ impl Rope { self.clip_point(Point::new(row, u32::MAX), Bias::Left) .column } + + pub fn ptr_eq(&self, other: &Self) -> bool { + self.chunks.ptr_eq(&other.chunks) + } } impl<'a> From<&'a str> for Rope { diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 5c89daa8ffc0fba2dc603f945cfb40aff65bb44f..58ca7dbfa9a845c3eb691ea8c9233b7eb1fd7183 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -516,6 +516,10 @@ impl SumTree { } } + pub fn ptr_eq(&self, other: &Self) -> bool { + Arc::ptr_eq(&self.0, &other.0) + } + fn push_tree_recursive( &mut self, other: SumTree, diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index d848271976da0d66b70f4d96b52ebb5acc4f23c9..955afbe20912a7b232545fdeec3e4e1a8e7fcf99 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -895,6 +895,30 @@ impl Worktree { } } + pub fn load_committed_file(&self, path: &Path, cx: &App) -> Task>> { + match self { + Worktree::Local(this) => { + let path = Arc::from(path); + let snapshot = this.snapshot(); + cx.background_executor().spawn(async move { + if let Some(repo) = snapshot.repository_for_path(&path) { + if let Some(repo_path) = repo.relativize(&path).log_err() { + if let Some(git_repo) = + snapshot.git_repositories.get(&repo.work_directory_id) + { + return Ok(git_repo.repo_ptr.load_committed_text(&repo_path)); + } + } + } + Ok(None) + }) + } + Worktree::Remote(_) => Task::ready(Err(anyhow!( + "remote worktrees can't yet load committed files" + ))), + } + } + pub fn load_binary_file( &self, path: &Path, From 6f0f9d631e14031a25bf3e48a6105dacda3165f5 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 4 Feb 2025 12:49:08 -0800 Subject: [PATCH 007/130] Allow running cancel-language-server-work action w/o editor focused (#24215) Release Notes: - Added the ability to run the `cancel language server work` action while a panel (like the terminal panel) is focused --- crates/editor/src/editor.rs | 22 +++++++++++++--------- crates/editor/src/element.rs | 1 - 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 63f0068eda84aef4dde92f713743e3f315a7be91..05337a0f5ade6fee7425cf0a1c57c10701d13083 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -306,6 +306,7 @@ pub fn init(cx: &mut App) { workspace.register_action(Editor::new_file); workspace.register_action(Editor::new_file_vertical); workspace.register_action(Editor::new_file_horizontal); + workspace.register_action(Editor::cancel_language_server_work); }, ) .detach(); @@ -11306,18 +11307,21 @@ impl Editor { } fn cancel_language_server_work( - &mut self, + workspace: &mut Workspace, _: &actions::CancelLanguageServerWork, _: &mut Window, - cx: &mut Context, + cx: &mut Context, ) { - if let Some(project) = self.project.clone() { - self.buffer.update(cx, |multi_buffer, cx| { - project.update(cx, |project, cx| { - project.cancel_language_server_work_for_buffers(multi_buffer.all_buffers(), cx); - }); - }) - } + let project = workspace.project(); + let buffers = workspace + .active_item(cx) + .and_then(|item| item.act_as::(cx)) + .map_or(HashSet::default(), |editor| { + editor.read(cx).buffer.read(cx).all_buffers() + }); + project.update(cx, |project, cx| { + project.cancel_language_server_work_for_buffers(buffers, cx); + }); } fn show_character_palette( diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 632c81c6b149b7f75438aededf7e9b9775f830a1..af6339cd687b58e4b97733a07ea95ee7917b6beb 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -430,7 +430,6 @@ impl EditorElement { } }); register_action(editor, window, Editor::restart_language_server); - register_action(editor, window, Editor::cancel_language_server_work); register_action(editor, window, Editor::show_character_palette); register_action(editor, window, |editor, action, window, cx| { if let Some(task) = editor.confirm_completion(action, window, cx) { From 58db66ef446f7571c0a4eb7d7002a81a0a418a72 Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Tue, 4 Feb 2025 17:58:25 -0300 Subject: [PATCH 008/130] edit prediction: Do not render jump cursor until line layout is ready (#24226) This is pretty rare but I found a case where `line_layouts` didn't have the requested line yet, so we now skip rendering the cursor for that period and avoid panicking. Release Notes: - N/A --- crates/editor/src/editor.rs | 31 +++++++++++++++++-------------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 05337a0f5ade6fee7425cf0a1c57c10701d13083..1d819b6038511117a39c051ab82331a71c6b0235 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -5675,11 +5675,12 @@ impl Editor { let end_point = range_around_target.end.to_point(&snapshot); let target_point = target.text_anchor.to_point(&snapshot); - let start_column_x = - line_layouts[start_point.row as usize].x_for_index(start_point.column as usize); - let target_column_x = line_layouts[target_point.row as usize] - .x_for_index(target_point.column as usize); - let cursor_relative_position = target_column_x - start_column_x; + let cursor_relative_position = + line_layouts.get(start_point.row as usize).map(|line| { + let start_column_x = line.x_for_index(start_point.column as usize); + let target_column_x = line.x_for_index(target_point.column as usize); + target_column_x - start_column_x + }); let fade_before = start_point.column > 0; let fade_after = end_point.column < snapshot.line_len(end_point.row); @@ -5722,15 +5723,17 @@ impl Editor { ), ) }) - .child( - div() - .w(px(2.)) - .h_full() - .bg(cursor_color) - .absolute() - .top_0() - .left(cursor_relative_position), - ), + .when_some(cursor_relative_position, |parent, position| { + parent.child( + div() + .w(px(2.)) + .h_full() + .bg(cursor_color) + .absolute() + .top_0() + .left(position), + ) + }), ) }), ) From d6a2a0b04af07aadbc3248950b56ab083991f2ce Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 4 Feb 2025 16:02:26 -0500 Subject: [PATCH 009/130] zeta: Rename `data_collection_permission` back to `can_collect_data` (#24225) This PR renames some bindings from `data_collection_permission` back to `can_collect_data`, as the latter name is clearer on account of being a modal verb. Release Notes: - N/A --- crates/rpc/src/llm.rs | 2 +- crates/zeta/src/zeta.rs | 15 +++++++-------- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/crates/rpc/src/llm.rs b/crates/rpc/src/llm.rs index 93ac5bdee8493eb0725456cd990e37451e85e3fd..c1612662bfa740fabdfc218d297f431d6e561cd0 100644 --- a/crates/rpc/src/llm.rs +++ b/crates/rpc/src/llm.rs @@ -41,7 +41,7 @@ pub struct PredictEditsParams { pub input_excerpt: String, /// Whether the user provided consent for sampling this interaction. #[serde(default)] - pub data_collection_permission: bool, + pub can_collect_data: bool, } #[derive(Debug, Serialize, Deserialize)] diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index c0ba581f6b7508f0271f8068ee5393d76facc304..57c12d6f5c2d3dc6a4c340763aebefc625305f3d 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -364,7 +364,7 @@ impl Zeta { &mut self, buffer: &Entity, cursor: language::Anchor, - data_collection_permission: bool, + can_collect_data: bool, cx: &mut Context, perform_predict_edits: F, ) -> Task>> @@ -429,7 +429,7 @@ impl Zeta { input_events: input_events.clone(), input_excerpt: input_excerpt.clone(), outline: Some(input_outline.clone()), - data_collection_permission, + can_collect_data, }; let response = perform_predict_edits(client, llm_token, is_staff, body).await?; @@ -609,13 +609,13 @@ and then another &mut self, buffer: &Entity, position: language::Anchor, - data_collection_permission: bool, + can_collect_data: bool, cx: &mut Context, ) -> Task>> { self.request_completion_impl( buffer, position, - data_collection_permission, + can_collect_data, cx, Self::perform_predict_edits, ) @@ -1365,7 +1365,7 @@ impl ProviderDataCollection { .map_or(false, |choice| choice.read(cx).is_enabled()) } - pub fn data_collection_permission(&self, cx: &App) -> bool { + pub fn can_collect_data(&self, cx: &App) -> bool { self.choice .as_ref() .is_some_and(|choice| choice.read(cx).is_enabled()) @@ -1499,8 +1499,7 @@ impl inline_completion::InlineCompletionProvider for ZetaInlineCompletionProvide let pending_completion_id = self.next_pending_completion_id; self.next_pending_completion_id += 1; - let data_collection_permission = - self.provider_data_collection.data_collection_permission(cx); + let can_collect_data = self.provider_data_collection.can_collect_data(cx); let last_request_timestamp = self.last_request_timestamp; let task = cx.spawn(|this, mut cx| async move { @@ -1513,7 +1512,7 @@ impl inline_completion::InlineCompletionProvider for ZetaInlineCompletionProvide let completion_request = this.update(&mut cx, |this, cx| { this.last_request_timestamp = Instant::now(); this.zeta.update(cx, |zeta, cx| { - zeta.request_completion(&buffer, position, data_collection_permission, cx) + zeta.request_completion(&buffer, position, can_collect_data, cx) }) }); From b02baea9d2bb954fad8e755f86d18034060ffea8 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 4 Feb 2025 16:39:15 -0500 Subject: [PATCH 010/130] zeta: Use DTOs from `zed_llm_client` crate (#24229) This PR updates the `zeta` crate to use the predictive edit DTOs defined in the `zed_llm_client` crate. This way we aren't duplicating their definitions (and risk them going out of sync). Release Notes: - N/A --- Cargo.lock | 10 ++++++++++ Cargo.toml | 1 + crates/rpc/src/llm.rs | 15 --------------- crates/zeta/Cargo.toml | 2 +- crates/zeta/src/zeta.rs | 8 ++++---- 5 files changed, 16 insertions(+), 20 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 565c1a729eebf4044ff7a6c77959142daf8f9be5..90373a0bcfeb46790f282ff6f2ccdc1df85a3a52 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -16634,6 +16634,15 @@ dependencies = [ "zed_extension_api 0.1.0", ] +[[package]] +name = "zed_llm_client" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54ca07d631d9d758f1820c7a7e7854ca00619b9783a5b6b3b6057fef06c786cb" +dependencies = [ + "serde", +] + [[package]] name = "zed_lua" version = "0.1.1" @@ -16873,6 +16882,7 @@ dependencies = [ "workspace", "worktree", "zed_actions", + "zed_llm_client", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 4ed9a358cede33216a86d7da41c263466348369f..9723d9beed45af10d166cce9e4ad931611f4e15b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -546,6 +546,7 @@ wasmtime = { version = "24", default-features = false, features = [ wasmtime-wasi = "24" which = "6.0.0" wit-component = "0.201" +zed_llm_client = "0.1.1" zstd = "0.11" metal = "0.31" diff --git a/crates/rpc/src/llm.rs b/crates/rpc/src/llm.rs index c1612662bfa740fabdfc218d297f431d6e561cd0..0a7510d891d3522c8794fb106fe168df10fc5aab 100644 --- a/crates/rpc/src/llm.rs +++ b/crates/rpc/src/llm.rs @@ -33,18 +33,3 @@ pub struct PerformCompletionParams { pub model: String, pub provider_request: Box, } - -#[derive(Debug, Serialize, Deserialize)] -pub struct PredictEditsParams { - pub outline: Option, - pub input_events: String, - pub input_excerpt: String, - /// Whether the user provided consent for sampling this interaction. - #[serde(default)] - pub can_collect_data: bool, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct PredictEditsResponse { - pub output_excerpt: String, -} diff --git a/crates/zeta/Cargo.toml b/crates/zeta/Cargo.toml index 138add6ad91ea74b4de891705663ca68220218e1..e981460256eb977e9c166b1a89a89cd4fde9def7 100644 --- a/crates/zeta/Cargo.toml +++ b/crates/zeta/Cargo.toml @@ -38,7 +38,6 @@ log.workspace = true menu.workspace = true postage.workspace = true regex.workspace = true -rpc.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true @@ -52,6 +51,7 @@ uuid.workspace = true workspace.workspace = true worktree.workspace = true zed_actions.workspace = true +zed_llm_client.workspace = true [dev-dependencies] collections = { workspace = true, features = ["test-support"] } diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index 57c12d6f5c2d3dc6a4c340763aebefc625305f3d..7aa2dc212a3f9165dfd4b898666ab76eaf849580 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -29,7 +29,6 @@ use language::{ }; use language_models::LlmApiToken; use postage::watch; -use rpc::{PredictEditsParams, PredictEditsResponse, EXPIRED_LLM_TOKEN_HEADER_NAME}; use settings::WorktreeId; use std::{ borrow::Cow, @@ -47,6 +46,7 @@ use telemetry_events::InlineCompletionRating; use util::ResultExt; use uuid::Uuid; use worktree::Worktree; +use zed_llm_client::{PredictEditsBody, PredictEditsResponse, EXPIRED_LLM_TOKEN_HEADER_NAME}; const CURSOR_MARKER: &'static str = "<|user_cursor_is_here|>"; const START_OF_FILE_MARKER: &'static str = "<|start_of_file|>"; @@ -369,7 +369,7 @@ impl Zeta { perform_predict_edits: F, ) -> Task>> where - F: FnOnce(Arc, LlmApiToken, bool, PredictEditsParams) -> R + 'static, + F: FnOnce(Arc, LlmApiToken, bool, PredictEditsBody) -> R + 'static, R: Future> + Send + 'static, { let snapshot = self.report_changes_for_buffer(&buffer, cx); @@ -425,7 +425,7 @@ impl Zeta { log::debug!("Events:\n{}\nExcerpt:\n{}", input_events, input_excerpt); - let body = PredictEditsParams { + let body = PredictEditsBody { input_events: input_events.clone(), input_excerpt: input_excerpt.clone(), outline: Some(input_outline.clone()), @@ -625,7 +625,7 @@ and then another client: Arc, llm_token: LlmApiToken, _is_staff: bool, - body: PredictEditsParams, + body: PredictEditsBody, ) -> impl Future> { async move { let http_client = client.http_client(); From b13498a5dd279a2eeb92a42b0ad996e85094e0c0 Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Tue, 4 Feb 2025 18:47:34 -0300 Subject: [PATCH 011/130] edit prediction: Fix jump cursor position when scrolled (#24230) We were looking up line layouts without subtracting start row so we would get the wrong one when scrolled Release Notes: - N/A --- crates/editor/src/editor.rs | 9 +++++++-- crates/editor/src/element.rs | 1 + 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 1d819b6038511117a39c051ab82331a71c6b0235..02f4b474b507d3f1491ced3745ecd81d5d79ca59 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -5421,6 +5421,7 @@ impl Editor { min_width: Pixels, max_width: Pixels, cursor_point: Point, + start_row: DisplayRow, line_layouts: &[LineWithInvisibles], style: &EditorStyle, accept_keystroke: &gpui::Keystroke, @@ -5473,6 +5474,7 @@ impl Editor { Some(completion) => self.render_edit_prediction_cursor_popover_preview( completion, cursor_point, + start_row, line_layouts, style, cx, @@ -5482,6 +5484,7 @@ impl Editor { Some(stale_completion) => self.render_edit_prediction_cursor_popover_preview( stale_completion, cursor_point, + start_row, line_layouts, style, cx, @@ -5568,6 +5571,7 @@ impl Editor { &self, completion: &InlineCompletionState, cursor_point: Point, + start_row: DisplayRow, line_layouts: &[LineWithInvisibles], style: &EditorStyle, cx: &mut Context, @@ -5675,8 +5679,9 @@ impl Editor { let end_point = range_around_target.end.to_point(&snapshot); let target_point = target.text_anchor.to_point(&snapshot); - let cursor_relative_position = - line_layouts.get(start_point.row as usize).map(|line| { + let cursor_relative_position = line_layouts + .get(start_point.row.saturating_sub(start_row.0) as usize) + .map(|line| { let start_column_x = line.x_for_index(start_point.column as usize); let target_column_x = line.x_for_index(target_point.column as usize); target_column_x - start_column_x diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index af6339cd687b58e4b97733a07ea95ee7917b6beb..97cfec90c12f628e69e20288136424b194bacd81 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -3285,6 +3285,7 @@ impl EditorElement { min_width, max_width, cursor_point, + start_row, &line_layouts, style, accept_keystroke.as_ref()?, From aa3da35e8ec8e166a0453ce09650addf660e5c03 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 4 Feb 2025 18:12:35 -0500 Subject: [PATCH 012/130] collab: Add `has_overdue_invoices` to `billing_customers` (#24239) This PR adds a new `has_overdue_invoices` field to the `billing_customers` table. This will be used to statefully track whether a customer has overdue invoices, and also to reset it when the invoices are paid. We will set this field to `true` when a subscription is canceled with the reason `payment_failed`. Release Notes: - N/A --- .../20221109000000_test_schema.sql | 1 + ..._overdue_invoices_to_billing_customers.sql | 2 ++ crates/collab/src/api/billing.rs | 21 +++++++++++++++++++ .../src/db/queries/billing_customers.rs | 2 ++ .../collab/src/db/tables/billing_customer.rs | 1 + 5 files changed, 27 insertions(+) create mode 100644 crates/collab/migrations/20250204224004_add_has_overdue_invoices_to_billing_customers.sql diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index fb2de18b6356d5389418e77cd41a4335cf2954a7..185bd45cd3235dcde7d21f83e4234a76cb7979d8 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -430,6 +430,7 @@ CREATE TABLE IF NOT EXISTS billing_customers ( id INTEGER PRIMARY KEY AUTOINCREMENT, created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, user_id INTEGER NOT NULL REFERENCES users(id), + has_overdue_invoices BOOLEAN NOT NULL DEFAULT FALSE, stripe_customer_id TEXT NOT NULL ); diff --git a/crates/collab/migrations/20250204224004_add_has_overdue_invoices_to_billing_customers.sql b/crates/collab/migrations/20250204224004_add_has_overdue_invoices_to_billing_customers.sql new file mode 100644 index 0000000000000000000000000000000000000000..07c40303994395e8f43c33df130955e0d82ab627 --- /dev/null +++ b/crates/collab/migrations/20250204224004_add_has_overdue_invoices_to_billing_customers.sql @@ -0,0 +1,2 @@ +alter table billing_customers +add column has_overdue_invoices bool not null default false; diff --git a/crates/collab/src/api/billing.rs b/crates/collab/src/api/billing.rs index eceb01ee231b4ffaa5d1069c4a8969017c4eaa50..2e5a4a925b3980bfb32378f5c16b20cbe6c0d026 100644 --- a/crates/collab/src/api/billing.rs +++ b/crates/collab/src/api/billing.rs @@ -666,6 +666,27 @@ async fn handle_customer_subscription_event( .await? .ok_or_else(|| anyhow!("billing customer not found"))?; + let was_canceled_due_to_payment_failure = subscription.status == SubscriptionStatus::Canceled + && subscription + .cancellation_details + .as_ref() + .and_then(|details| details.reason) + .map_or(false, |reason| { + reason == CancellationDetailsReason::PaymentFailed + }); + + if was_canceled_due_to_payment_failure { + app.db + .update_billing_customer( + billing_customer.id, + &UpdateBillingCustomerParams { + has_overdue_invoices: ActiveValue::set(true), + ..Default::default() + }, + ) + .await?; + } + if let Some(existing_subscription) = app .db .get_billing_subscription_by_stripe_subscription_id(&subscription.id) diff --git a/crates/collab/src/db/queries/billing_customers.rs b/crates/collab/src/db/queries/billing_customers.rs index 188bb39e1cd5d1f79ced636d81e8b62067becb5b..efbc31c7c7a1695b61e7eb907c175debd0c18d71 100644 --- a/crates/collab/src/db/queries/billing_customers.rs +++ b/crates/collab/src/db/queries/billing_customers.rs @@ -10,6 +10,7 @@ pub struct CreateBillingCustomerParams { pub struct UpdateBillingCustomerParams { pub user_id: ActiveValue, pub stripe_customer_id: ActiveValue, + pub has_overdue_invoices: ActiveValue, } impl Database { @@ -43,6 +44,7 @@ impl Database { id: ActiveValue::set(id), user_id: params.user_id.clone(), stripe_customer_id: params.stripe_customer_id.clone(), + has_overdue_invoices: params.has_overdue_invoices.clone(), ..Default::default() }) .exec(&*tx) diff --git a/crates/collab/src/db/tables/billing_customer.rs b/crates/collab/src/db/tables/billing_customer.rs index 258a7e0c0ccab04c7f734cc25b6cd89adab4b00d..914d73061f51bb3a1fced305af44419f346435d3 100644 --- a/crates/collab/src/db/tables/billing_customer.rs +++ b/crates/collab/src/db/tables/billing_customer.rs @@ -9,6 +9,7 @@ pub struct Model { pub id: BillingCustomerId, pub user_id: UserId, pub stripe_customer_id: String, + pub has_overdue_invoices: bool, pub created_at: DateTime, } From f366b978991bdbd39f41f51168942bd9582b8e59 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 4 Feb 2025 18:38:00 -0500 Subject: [PATCH 013/130] collab: Use `billing_customers.has_overdue_invoices` to gate subscription access (#24240) This PR updates the check that prevents subscribing with overdue subscriptions to use the `billing_customers.has_overdue_invoices` field instead. This will allow us to set the value of `has_overdue_invoices` to `false` when the invoices have been paid. Release Notes: - N/A --- crates/collab/src/api/billing.rs | 42 +++---- .../src/db/queries/billing_subscriptions.rs | 36 ------ .../db/tests/billing_subscription_tests.rs | 112 +----------------- 3 files changed, 23 insertions(+), 167 deletions(-) diff --git a/crates/collab/src/api/billing.rs b/crates/collab/src/api/billing.rs index 2e5a4a925b3980bfb32378f5c16b20cbe6c0d026..0a1a544483a0c53169d79471b9cfe35ae154eb9f 100644 --- a/crates/collab/src/api/billing.rs +++ b/crates/collab/src/api/billing.rs @@ -249,29 +249,31 @@ async fn create_billing_subscription( )); } - if app.db.has_overdue_billing_subscriptions(user.id).await? { - return Err(Error::http( - StatusCode::PAYMENT_REQUIRED, - "user has overdue billing subscriptions".into(), - )); + let existing_billing_customer = app.db.get_billing_customer_by_user_id(user.id).await?; + if let Some(existing_billing_customer) = &existing_billing_customer { + if existing_billing_customer.has_overdue_invoices { + return Err(Error::http( + StatusCode::PAYMENT_REQUIRED, + "user has overdue invoices".into(), + )); + } } - let customer_id = - if let Some(existing_customer) = app.db.get_billing_customer_by_user_id(user.id).await? { - CustomerId::from_str(&existing_customer.stripe_customer_id) - .context("failed to parse customer ID")? - } else { - let customer = Customer::create( - &stripe_client, - CreateCustomer { - email: user.email_address.as_deref(), - ..Default::default() - }, - ) - .await?; + let customer_id = if let Some(existing_customer) = existing_billing_customer { + CustomerId::from_str(&existing_customer.stripe_customer_id) + .context("failed to parse customer ID")? + } else { + let customer = Customer::create( + &stripe_client, + CreateCustomer { + email: user.email_address.as_deref(), + ..Default::default() + }, + ) + .await?; - customer.id - }; + customer.id + }; let default_model = llm_db.model(rpc::LanguageModelProvider::Anthropic, "claude-3-5-sonnet")?; let stripe_model = stripe_billing.register_model(default_model).await?; diff --git a/crates/collab/src/db/queries/billing_subscriptions.rs b/crates/collab/src/db/queries/billing_subscriptions.rs index d2762e2e8f45ba4b974073732a49734d76b1ea4e..4d2fce8c782d5a2affd1ebdae65e2f2d3b799ba2 100644 --- a/crates/collab/src/db/queries/billing_subscriptions.rs +++ b/crates/collab/src/db/queries/billing_subscriptions.rs @@ -170,40 +170,4 @@ impl Database { }) .await } - - /// Returns whether the user has any overdue billing subscriptions. - pub async fn has_overdue_billing_subscriptions(&self, user_id: UserId) -> Result { - Ok(self.count_overdue_billing_subscriptions(user_id).await? > 0) - } - - /// Returns the count of the overdue billing subscriptions for the user with the specified ID. - /// - /// This includes subscriptions: - /// - Whose status is `past_due` - /// - Whose status is `canceled` and the cancellation reason is `payment_failed` - pub async fn count_overdue_billing_subscriptions(&self, user_id: UserId) -> Result { - self.transaction(|tx| async move { - let past_due = billing_subscription::Column::StripeSubscriptionStatus - .eq(StripeSubscriptionStatus::PastDue); - let payment_failed = billing_subscription::Column::StripeSubscriptionStatus - .eq(StripeSubscriptionStatus::Canceled) - .and( - billing_subscription::Column::StripeCancellationReason - .eq(StripeCancellationReason::PaymentFailed), - ); - - let count = billing_subscription::Entity::find() - .inner_join(billing_customer::Entity) - .filter( - billing_customer::Column::UserId - .eq(user_id) - .and(past_due.or(payment_failed)), - ) - .count(&*tx) - .await?; - - Ok(count as usize) - }) - .await - } } diff --git a/crates/collab/src/db/tests/billing_subscription_tests.rs b/crates/collab/src/db/tests/billing_subscription_tests.rs index d2368b72b3301a4737a67f7f0674582806801829..4c9e0e77ec7240b7cac45126a07d7d6a13cdd999 100644 --- a/crates/collab/src/db/tests/billing_subscription_tests.rs +++ b/crates/collab/src/db/tests/billing_subscription_tests.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use crate::db::billing_subscription::{StripeCancellationReason, StripeSubscriptionStatus}; +use crate::db::billing_subscription::StripeSubscriptionStatus; use crate::db::tests::new_test_user; use crate::db::{CreateBillingCustomerParams, CreateBillingSubscriptionParams}; use crate::test_both_dbs; @@ -88,113 +88,3 @@ async fn test_get_active_billing_subscriptions(db: &Arc) { assert_eq!(subscription_count, 0); } } - -test_both_dbs!( - test_count_overdue_billing_subscriptions, - test_count_overdue_billing_subscriptions_postgres, - test_count_overdue_billing_subscriptions_sqlite -); - -async fn test_count_overdue_billing_subscriptions(db: &Arc) { - // A user with no subscription has no overdue billing subscriptions. - { - let user_id = new_test_user(db, "no-subscription-user@example.com").await; - let subscription_count = db - .count_overdue_billing_subscriptions(user_id) - .await - .unwrap(); - - assert_eq!(subscription_count, 0); - } - - // A user with a past-due subscription has an overdue billing subscription. - { - let user_id = new_test_user(db, "past-due-user@example.com").await; - let customer = db - .create_billing_customer(&CreateBillingCustomerParams { - user_id, - stripe_customer_id: "cus_past_due_user".into(), - }) - .await - .unwrap(); - assert_eq!(customer.stripe_customer_id, "cus_past_due_user".to_string()); - - db.create_billing_subscription(&CreateBillingSubscriptionParams { - billing_customer_id: customer.id, - stripe_subscription_id: "sub_past_due_user".into(), - stripe_subscription_status: StripeSubscriptionStatus::PastDue, - stripe_cancellation_reason: None, - }) - .await - .unwrap(); - - let subscription_count = db - .count_overdue_billing_subscriptions(user_id) - .await - .unwrap(); - assert_eq!(subscription_count, 1); - } - - // A user with a canceled subscription with a reason of `payment_failed` has an overdue billing subscription. - { - let user_id = - new_test_user(db, "canceled-subscription-payment-failed-user@example.com").await; - let customer = db - .create_billing_customer(&CreateBillingCustomerParams { - user_id, - stripe_customer_id: "cus_canceled_subscription_payment_failed_user".into(), - }) - .await - .unwrap(); - assert_eq!( - customer.stripe_customer_id, - "cus_canceled_subscription_payment_failed_user".to_string() - ); - - db.create_billing_subscription(&CreateBillingSubscriptionParams { - billing_customer_id: customer.id, - stripe_subscription_id: "sub_canceled_subscription_payment_failed_user".into(), - stripe_subscription_status: StripeSubscriptionStatus::Canceled, - stripe_cancellation_reason: Some(StripeCancellationReason::PaymentFailed), - }) - .await - .unwrap(); - - let subscription_count = db - .count_overdue_billing_subscriptions(user_id) - .await - .unwrap(); - assert_eq!(subscription_count, 1); - } - - // A user with a canceled subscription with a reason of `cancellation_requested` has no overdue billing subscriptions. - { - let user_id = new_test_user(db, "canceled-subscription-user@example.com").await; - let customer = db - .create_billing_customer(&CreateBillingCustomerParams { - user_id, - stripe_customer_id: "cus_canceled_subscription_user".into(), - }) - .await - .unwrap(); - assert_eq!( - customer.stripe_customer_id, - "cus_canceled_subscription_user".to_string() - ); - - db.create_billing_subscription(&CreateBillingSubscriptionParams { - billing_customer_id: customer.id, - stripe_subscription_id: "sub_canceled_subscription_user".into(), - stripe_subscription_status: StripeSubscriptionStatus::Canceled, - stripe_cancellation_reason: Some(StripeCancellationReason::CancellationRequested), - }) - .await - .unwrap(); - - let subscription_count = db - .count_overdue_billing_subscriptions(user_id) - .await - .unwrap(); - assert_eq!(subscription_count, 0); - } -} From 7c1132ed882d936454f3c31074923e7c5b0a0ae9 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Tue, 4 Feb 2025 20:11:01 -0500 Subject: [PATCH 014/130] Refactor change sets to store index text in only one place (#24245) This is a pure refactor that somewhat reduces the amount of code needed when handling diff base changes. There's also a small performance gain from reparsing the staged text and computing a new diff in parallel when we weren't previously. Release Notes: - N/A Co-authored-by: Max --- crates/project/src/buffer_store.rs | 166 ++++++++++++++--------------- 1 file changed, 79 insertions(+), 87 deletions(-) diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 4128990f8c5f888559c6dae217b399542de9917d..bbfaa1e4783bbea8659ab1d201582995d04b31bc 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -210,49 +210,40 @@ impl BufferChangeSetState { _ => false, }; self.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move { - let snapshot = if index_changed { - let snapshot = cx.update(|cx| { - index.as_ref().map(|head| { - language::Buffer::build_snapshot( - Rope::from(head.as_str()), - language.clone(), - language_registry.clone(), - cx, - ) - }) - })?; - cx.background_executor() - .spawn(OptionFuture::from(snapshot)) - .await - } else if let Some(unstaged_changes) = &unstaged_changes { - unstaged_changes.read_with(&cx, |change_set, _| change_set.base_text.clone())? - } else if let Some(uncommitted_changes) = &uncommitted_changes { - uncommitted_changes - .read_with(&cx, |change_set, _| change_set.staged_text.clone())? - } else { - return Ok(()); - }; - if let Some(unstaged_changes) = &unstaged_changes { - let diff = cx - .background_executor() - .spawn({ + let staged_snapshot = if index_changed { + let staged_snapshot = cx.update(|cx| { + index.as_ref().map(|head| { + language::Buffer::build_snapshot( + Rope::from(head.as_str()), + language.clone(), + language_registry.clone(), + cx, + ) + }) + })?; + cx.background_executor() + .spawn(OptionFuture::from(staged_snapshot)) + } else { + Task::ready( + unstaged_changes + .read_with(&cx, |change_set, _| change_set.base_text.clone())?, + ) + }; + + let diff = + cx.background_executor().spawn({ let buffer = buffer.clone(); async move { BufferDiff::build(index.as_ref().map(|index| index.as_str()), &buffer) } - }) - .await; + }); + + let (staged_snapshot, diff) = futures::join!(staged_snapshot, diff); unstaged_changes.update(&mut cx, |unstaged_changes, cx| { - unstaged_changes.set_state(snapshot.clone(), diff, &buffer, cx); + unstaged_changes.set_state(staged_snapshot.clone(), diff, &buffer, cx); })?; - - if let Some(uncommitted_changes) = &uncommitted_changes { - uncommitted_changes.update(&mut cx, |uncommitted_changes, _| { - uncommitted_changes.staged_text = snapshot; - })?; - } } if let Some(uncommitted_changes) = &uncommitted_changes { @@ -266,17 +257,26 @@ impl BufferChangeSetState { ) })? } else { - let snapshot = cx.update(|cx| { - head.as_deref().map(|head| { - language::Buffer::build_snapshot( - Rope::from(head.as_str()), - language.clone(), - language_registry.clone(), - cx, - ) - }) - })?; - let snapshot = cx.background_executor().spawn(OptionFuture::from(snapshot)); + let committed_snapshot = if head_changed { + let committed_snapshot = cx.update(|cx| { + head.as_ref().map(|head| { + language::Buffer::build_snapshot( + Rope::from(head.as_str()), + language.clone(), + language_registry.clone(), + cx, + ) + }) + })?; + cx.background_executor() + .spawn(OptionFuture::from(committed_snapshot)) + } else { + Task::ready( + uncommitted_changes + .read_with(&cx, |change_set, _| change_set.base_text.clone())?, + ) + }; + let diff = cx.background_executor().spawn({ let buffer = buffer.clone(); let head = head.clone(); @@ -284,38 +284,12 @@ impl BufferChangeSetState { BufferDiff::build(head.as_ref().map(|head| head.as_str()), &buffer) } }); - futures::join!(snapshot, diff) + futures::join!(committed_snapshot, diff) }; uncommitted_changes.update(&mut cx, |change_set, cx| { change_set.set_state(snapshot, diff, &buffer, cx); })?; - - if index_changed || head_changed { - let staged_text = uncommitted_changes - .read_with(&cx, |change_set, _| change_set.staged_text.clone())?; - - let diff = if index_matches_head { - staged_text.as_ref().map(|buffer| BufferDiff::new(buffer)) - } else if let Some(staged_text) = staged_text { - Some( - cx.background_executor() - .spawn(async move { - BufferDiff::build( - head.as_ref().map(|head| head.as_str()), - &staged_text, - ) - }) - .await, - ) - } else { - None - }; - - uncommitted_changes.update(&mut cx, |change_set, _| { - change_set.staged_diff = diff; - })?; - } } if let Some(this) = this.upgrade() { @@ -339,9 +313,7 @@ pub struct BufferChangeSet { pub buffer_id: BufferId, pub base_text: Option, pub diff_to_buffer: BufferDiff, - pub staged_text: Option, - // For an uncommitted changeset, this is the diff between HEAD and the index. - pub staged_diff: Option, + pub unstaged_change_set: Option>, } impl std::fmt::Debug for BufferChangeSet { @@ -350,8 +322,6 @@ impl std::fmt::Debug for BufferChangeSet { .field("buffer_id", &self.buffer_id) .field("base_text", &self.base_text.as_ref().map(|s| s.text())) .field("diff_to_buffer", &self.diff_to_buffer) - .field("staged_text", &self.staged_text.as_ref().map(|s| s.text())) - .field("staged_diff", &self.staged_diff) .finish() } } @@ -1579,14 +1549,33 @@ impl BufferStore { buffer_id, base_text: None, diff_to_buffer: BufferDiff::new(&buffer.read(cx).text_snapshot()), - staged_text: None, - staged_diff: None, + unstaged_change_set: None, }); match kind { ChangeSetKind::Unstaged => { change_set_state.unstaged_changes = Some(change_set.downgrade()) } ChangeSetKind::Uncommitted => { + let unstaged_change_set = + if let Some(change_set) = change_set_state.unstaged_changes() { + change_set + } else { + let unstaged_change_set = cx.new(|cx| BufferChangeSet { + buffer_id, + base_text: None, + diff_to_buffer: BufferDiff::new( + &buffer.read(cx).text_snapshot(), + ), + unstaged_change_set: None, + }); + change_set_state.unstaged_changes = + Some(unstaged_change_set.downgrade()); + unstaged_change_set + }; + + change_set.update(cx, |change_set, _| { + change_set.unstaged_change_set = Some(unstaged_change_set); + }); change_set_state.uncommitted_changes = Some(change_set.downgrade()) } }; @@ -2515,15 +2504,20 @@ impl BufferStore { shared.change_set = Some(change_set.clone()); } })?; - change_set.read_with(&cx, |change_set, _| { + change_set.read_with(&cx, |change_set, cx| { use proto::open_uncommitted_changes_response::Mode; + let staged_buffer = change_set + .unstaged_change_set + .as_ref() + .and_then(|change_set| change_set.read(cx).base_text.as_ref()); + let mode; let staged_text; let committed_text; if let Some(committed_buffer) = &change_set.base_text { committed_text = Some(committed_buffer.text()); - if let Some(staged_buffer) = &change_set.staged_text { + if let Some(staged_buffer) = staged_buffer { if staged_buffer.remote_id() == committed_buffer.remote_id() { mode = Mode::IndexMatchesHead; staged_text = None; @@ -2538,7 +2532,7 @@ impl BufferStore { } else { mode = Mode::IndexAndHead; committed_text = None; - staged_text = change_set.staged_text.as_ref().map(|buffer| buffer.text()); + staged_text = staged_buffer.as_ref().map(|buffer| buffer.text()); } proto::OpenUncommittedChangesResponse { @@ -2867,8 +2861,7 @@ impl BufferChangeSet { buffer_id: buffer.read(cx).remote_id(), base_text: None, diff_to_buffer: BufferDiff::new(&buffer.read(cx).text_snapshot()), - staged_text: None, - staged_diff: None, + unstaged_change_set: None, } } @@ -2882,8 +2875,7 @@ impl BufferChangeSet { buffer_id: buffer.read(cx).remote_id(), base_text: Some(base_text), diff_to_buffer, - staged_text: None, - staged_diff: None, + unstaged_change_set: None, } } From 22b7042b9e25e7759e2cbe1a492f1fbbf9dba4f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Marcos?= Date: Wed, 5 Feb 2025 02:52:25 -0300 Subject: [PATCH 015/130] Avoid suggesting 'find' key for linux shortcuts (#24252) this key isn't present in most keyboards, and so, other key combinations should be preferred over this one Release Notes: - N/A --- assets/keymaps/default-linux.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 90c8faaadf1006fd605bbd336ed9aac7d12bb3da..83c716bf2badd71940b4a425824b697b7c9be0b2 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -203,8 +203,8 @@ "enter": "search::SelectNextMatch", "shift-enter": "search::SelectPrevMatch", "alt-enter": "search::SelectAllMatches", - "ctrl-f": "search::FocusSearch", "find": "search::FocusSearch", + "ctrl-f": "search::FocusSearch", "ctrl-h": "search::ToggleReplace", "ctrl-l": "search::ToggleSelection" } @@ -290,15 +290,15 @@ "f3": "search::SelectNextMatch", "ctrl-alt-shift-g": "search::SelectPrevMatch", "shift-f3": "search::SelectPrevMatch", - "ctrl-shift-f": "project_search::ToggleFocus", "shift-find": "project_search::ToggleFocus", + "ctrl-shift-f": "project_search::ToggleFocus", "ctrl-alt-shift-h": "search::ToggleReplace", "ctrl-alt-shift-l": "search::ToggleSelection", "alt-enter": "search::SelectAllMatches", "alt-c": "search::ToggleCaseSensitive", "alt-w": "search::ToggleWholeWord", - "alt-ctrl-f": "project_search::ToggleFilters", "alt-find": "project_search::ToggleFilters", + "alt-ctrl-f": "project_search::ToggleFilters", "ctrl-alt-shift-r": "search::ToggleRegex", "ctrl-alt-shift-x": "search::ToggleRegex", "alt-r": "search::ToggleRegex", @@ -687,8 +687,8 @@ "ctrl-delete": ["project_panel::Delete", { "skip_prompt": false }], "alt-ctrl-r": "project_panel::RevealInFileManager", "ctrl-shift-enter": "project_panel::OpenWithSystem", - "ctrl-shift-f": "project_panel::NewSearchInDirectory", "shift-find": "project_panel::NewSearchInDirectory", + "ctrl-shift-f": "project_panel::NewSearchInDirectory", "shift-down": "menu::SelectNext", "shift-up": "menu::SelectPrev", "escape": "menu::Cancel" From 0963401a8d0e7afed461090cb57be8047e1f79c5 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 4 Feb 2025 23:09:41 -0700 Subject: [PATCH 016/130] Git improvements (#24238) - **Base diffs on uncommitted changes** - **Show added files in project diff view** - **Fix git panel optimism** Release Notes: - Git: update diffs to be relative to HEAD instead of the index; to pave the way for showing which hunks are staged --------- Co-authored-by: Cole --- crates/editor/src/editor.rs | 8 +- crates/editor/src/editor_tests.rs | 12 +- crates/editor/src/test/editor_test_context.rs | 2 +- crates/git/src/repository.rs | 6 +- crates/git_ui/src/git_panel.rs | 279 ++++++++---------- crates/git_ui/src/project_diff.rs | 80 +++-- crates/git_ui/src/repository_selector.rs | 2 +- crates/multi_buffer/src/multi_buffer.rs | 19 +- crates/multi_buffer/src/multi_buffer_tests.rs | 6 +- crates/project/src/buffer_store.rs | 37 +-- crates/project/src/git.rs | 21 +- 11 files changed, 241 insertions(+), 231 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 02f4b474b507d3f1491ced3745ecd81d5d79ca59..6e622602559d92aa9d7382a572b87ddbb415bfe7 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1285,7 +1285,7 @@ impl Editor { let mut code_action_providers = Vec::new(); if let Some(project) = project.clone() { - get_unstaged_changes_for_buffers( + get_uncommitted_changes_for_buffer( &project, buffer.read(cx).all_buffers(), buffer.clone(), @@ -13657,7 +13657,7 @@ impl Editor { let buffer_id = buffer.read(cx).remote_id(); if self.buffer.read(cx).change_set_for(buffer_id).is_none() { if let Some(project) = &self.project { - get_unstaged_changes_for_buffers( + get_uncommitted_changes_for_buffer( project, [buffer.clone()], self.buffer.clone(), @@ -14413,7 +14413,7 @@ impl Editor { } } -fn get_unstaged_changes_for_buffers( +fn get_uncommitted_changes_for_buffer( project: &Entity, buffers: impl IntoIterator>, buffer: Entity, @@ -14422,7 +14422,7 @@ fn get_unstaged_changes_for_buffers( let mut tasks = Vec::new(); project.update(cx, |project, cx| { for buffer in buffers { - tasks.push(project.open_unstaged_changes(buffer.clone(), cx)) + tasks.push(project.open_uncommitted_changes(buffer.clone(), cx)) } }); cx.spawn(|mut cx| async move { diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 35942cce25102c59e1f0485d167a0bf3c7968129..fae15adf461e1004460b9ae84e4e57e8e5a45f82 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -5619,13 +5619,13 @@ async fn test_fold_function_bodies(cx: &mut gpui::TestAppContext) { let base_text = r#" impl A { - // this is an unstaged comment + // this is an uncommitted comment fn b() { c(); } - // this is another unstaged comment + // this is another uncommitted comment fn d() { // e @@ -5668,13 +5668,13 @@ async fn test_fold_function_bodies(cx: &mut gpui::TestAppContext) { cx.assert_state_with_diff( " ˇimpl A { - - // this is an unstaged comment + - // this is an uncommitted comment fn b() { c(); } - - // this is another unstaged comment + - // this is another uncommitted comment - fn d() { // e @@ -5691,13 +5691,13 @@ async fn test_fold_function_bodies(cx: &mut gpui::TestAppContext) { let expected_display_text = " impl A { - // this is an unstaged comment + // this is an uncommitted comment fn b() { ⋯ } - // this is another unstaged comment + // this is another uncommitted comment fn d() { ⋯ diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index 6246ec14fb1fe64b4e4852860c3b51d0ffee1834..c51dc0d6a62e5da2f4fc46a7e9b5ecdec1e303d1 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -290,7 +290,7 @@ impl EditorTestContext { editor.project.as_ref().unwrap().read(cx).fs().as_fake() }); let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone()); - fs.set_index_for_repo( + fs.set_head_for_repo( &Self::root_path().join(".git"), &[(path.into(), diff_base.to_string())], ); diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index a3777401525778ed1e8e79d4bd114792aec89a6e..efedb0d461d70fd028af3bad3c0a58d939acefed 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -265,13 +265,13 @@ impl GitRepository for RealGitRepository { .to_path_buf(); if !paths.is_empty() { - let cmd = new_std_command(&self.git_binary_path) + let status = new_std_command(&self.git_binary_path) .current_dir(&working_directory) .args(["update-index", "--add", "--remove", "--"]) .args(paths.iter().map(|p| p.as_ref())) .status()?; - if !cmd.success() { - return Err(anyhow!("Failed to stage paths: {cmd}")); + if !status.success() { + return Err(anyhow!("Failed to stage paths: {status}")); } } Ok(()) diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 531267f3918c1eb8a584cfa2268f2eab9c21ec2e..1e7ce96cef8defeee09d23ce7c75a42784df94bd 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -12,13 +12,11 @@ use editor::scroll::ScrollbarAutoHide; use editor::{Editor, EditorMode, EditorSettings, MultiBuffer, ShowScrollbar}; use git::repository::RepoPath; use git::status::FileStatus; -use git::{ - CommitAllChanges, CommitChanges, RevertAll, StageAll, ToggleStaged, UnstageAll, COMMIT_MESSAGE, -}; +use git::{CommitAllChanges, CommitChanges, ToggleStaged, COMMIT_MESSAGE}; use gpui::*; use language::{Buffer, BufferId}; use menu::{SelectFirst, SelectLast, SelectNext, SelectPrev}; -use project::git::{GitRepo, RepositoryHandle}; +use project::git::{GitEvent, GitRepo, RepositoryHandle}; use project::{CreateOptions, Fs, Project, ProjectPath}; use rpc::proto; use serde::{Deserialize, Serialize}; @@ -43,7 +41,6 @@ actions!( Close, ToggleFocus, OpenMenu, - OpenSelected, FocusEditor, FocusChanges, FillCoAuthors, @@ -76,17 +73,17 @@ struct SerializedGitPanel { width: Option, } -#[derive(Debug, PartialEq, Eq, Clone)] +#[derive(Debug, PartialEq, Eq, Clone, Copy)] enum Section { Changed, - New, + Created, } impl Section { pub fn contains(&self, status: FileStatus) -> bool { match self { Section::Changed => !status.is_created(), - Section::New => status.is_created(), + Section::Created => status.is_created(), } } } @@ -94,7 +91,6 @@ impl Section { #[derive(Debug, PartialEq, Eq, Clone)] struct GitHeaderEntry { header: Section, - all_staged: ToggleState, } impl GitHeaderEntry { @@ -104,7 +100,7 @@ impl GitHeaderEntry { pub fn title(&self) -> &'static str { match self.header { Section::Changed => "Changed", - Section::New => "New", + Section::Created => "New", } } } @@ -126,11 +122,18 @@ impl GitListEntry { #[derive(Debug, PartialEq, Eq, Clone)] pub struct GitStatusEntry { - depth: usize, - display_name: String, - repo_path: RepoPath, - status: FileStatus, - is_staged: Option, + pub(crate) depth: usize, + pub(crate) display_name: String, + pub(crate) repo_path: RepoPath, + pub(crate) status: FileStatus, + pub(crate) is_staged: Option, +} + +pub struct PendingOperation { + finished: bool, + will_become_staged: bool, + repo_paths: HashSet, + op_id: usize, } pub struct GitPanel { @@ -152,9 +155,11 @@ pub struct GitPanel { entries: Vec, entries_by_path: collections::HashMap, width: Option, - pending: HashMap, + pending: Vec, commit_task: Task>, commit_pending: bool, + can_commit: bool, + can_commit_all: bool, } fn commit_message_buffer( @@ -287,9 +292,12 @@ impl GitPanel { &git_state, window, move |this, git_state, event, window, cx| match event { - project::git::Event::RepositoriesUpdated => { + GitEvent::FileSystemUpdated => { + this.schedule_update(false, window, cx); + } + GitEvent::ActiveRepositoryChanged | GitEvent::GitStateUpdated => { this.active_repository = git_state.read(cx).active_repository(); - this.schedule_update(window, cx); + this.schedule_update(true, window, cx); } }, ) @@ -303,7 +311,7 @@ impl GitPanel { pending_serialization: Task::ready(None), entries: Vec::new(), entries_by_path: HashMap::default(), - pending: HashMap::default(), + pending: Vec::new(), current_modifiers: window.modifiers(), width: Some(px(360.)), scrollbar_state: ScrollbarState::new(scroll_handle.clone()) @@ -321,8 +329,10 @@ impl GitPanel { commit_editor, project, workspace, + can_commit: false, + can_commit_all: false, }; - git_panel.schedule_update(window, cx); + git_panel.schedule_update(false, window, cx); git_panel.show_scrollbar = git_panel.should_show_scrollbar(cx); git_panel }); @@ -617,7 +627,7 @@ impl GitPanel { } } GitListEntry::Header(section) => { - let goal_staged_state = !section.all_staged.selected(); + let goal_staged_state = !self.header_state(section.header).selected(); let entries = self .entries .iter() @@ -629,12 +639,17 @@ impl GitPanel { .map(|status_entry| status_entry.repo_path) .collect::>(); - (!section.all_staged.selected(), entries) + (goal_staged_state, entries) } }; - for repo_path in repo_paths.iter() { - self.pending.insert(repo_path.clone(), stage); - } + + let op_id = self.pending.iter().map(|p| p.op_id).max().unwrap_or(0) + 1; + self.pending.push(PendingOperation { + op_id, + will_become_staged: stage, + repo_paths: repo_paths.iter().cloned().collect(), + finished: false, + }); cx.spawn({ let repo_paths = repo_paths.clone(); @@ -647,9 +662,9 @@ impl GitPanel { }; this.update(&mut cx, |this, cx| { - for repo_path in repo_paths { - if this.pending.get(&repo_path) == Some(&stage) { - this.pending.remove(&repo_path); + for pending in this.pending.iter_mut() { + if pending.op_id == op_id { + pending.finished = true } } result @@ -696,67 +711,6 @@ impl GitPanel { cx.emit(Event::OpenedEntry { path }); } - fn stage_all(&mut self, _: &git::StageAll, _window: &mut Window, cx: &mut Context) { - let Some(active_repository) = self.active_repository.as_ref().cloned() else { - return; - }; - let mut pending_paths = Vec::new(); - for entry in self.entries.iter() { - if let Some(status_entry) = entry.status_entry() { - self.pending.insert(status_entry.repo_path.clone(), true); - pending_paths.push(status_entry.repo_path.clone()); - } - } - - cx.spawn(|this, mut cx| async move { - if let Err(e) = active_repository.stage_all().await { - this.update(&mut cx, |this, cx| { - this.show_err_toast(e, cx); - }) - .ok(); - }; - this.update(&mut cx, |this, _cx| { - for repo_path in pending_paths { - this.pending.remove(&repo_path); - } - }) - }) - .detach(); - } - - fn unstage_all(&mut self, _: &git::UnstageAll, _window: &mut Window, cx: &mut Context) { - let Some(active_repository) = self.active_repository.as_ref().cloned() else { - return; - }; - let mut pending_paths = Vec::new(); - for entry in self.entries.iter() { - if let Some(status_entry) = entry.status_entry() { - self.pending.insert(status_entry.repo_path.clone(), false); - pending_paths.push(status_entry.repo_path.clone()); - } - } - - cx.spawn(|this, mut cx| async move { - if let Err(e) = active_repository.unstage_all().await { - this.update(&mut cx, |this, cx| { - this.show_err_toast(e, cx); - }) - .ok(); - }; - this.update(&mut cx, |this, _cx| { - for repo_path in pending_paths { - this.pending.remove(&repo_path); - } - }) - }) - .detach(); - } - - fn discard_all(&mut self, _: &git::RevertAll, _window: &mut Window, _cx: &mut Context) { - // TODO: Implement discard all - println!("Discard all triggered"); - } - /// Commit all staged changes fn commit_changes( &mut self, @@ -768,7 +722,7 @@ impl GitPanel { let Some(active_repository) = self.active_repository.clone() else { return; }; - if !active_repository.can_commit(false) { + if !self.can_commit { return; } if self.commit_editor.read(cx).is_empty(cx) { @@ -811,7 +765,7 @@ impl GitPanel { let Some(active_repository) = self.active_repository.clone() else { return; }; - if !active_repository.can_commit(true) { + if !self.can_commit_all { return; } if self.commit_editor.read(cx).is_empty(cx) { @@ -926,7 +880,12 @@ impl GitPanel { }); } - fn schedule_update(&mut self, window: &mut Window, cx: &mut Context) { + fn schedule_update( + &mut self, + clear_pending: bool, + window: &mut Window, + cx: &mut Context, + ) { let project = self.project.clone(); let handle = cx.entity().downgrade(); self.update_visible_entries_task = cx.spawn_in(window, |_, mut cx| async move { @@ -957,6 +916,9 @@ impl GitPanel { git_panel .update_in(&mut cx, |git_panel, window, cx| { git_panel.update_visible_entries(cx); + if clear_pending { + git_panel.clear_pending(); + } git_panel.commit_editor = cx.new(|cx| commit_message_editor(commit_message_buffer, window, cx)); }) @@ -965,6 +927,10 @@ impl GitPanel { }); } + fn clear_pending(&mut self) { + self.pending.retain(|v| !v.finished) + } + fn update_visible_entries(&mut self, cx: &mut Context) { self.entries.clear(); self.entries_by_path.clear(); @@ -980,12 +946,11 @@ impl GitPanel { // First pass - collect all paths let path_set = HashSet::from_iter(repo.status().map(|entry| entry.repo_path)); - // Second pass - create entries with proper depth calculation - let mut new_any_staged = false; - let mut new_all_staged = true; - let mut changed_any_staged = false; - let mut changed_all_staged = true; + let mut has_changed_checked_boxes = false; + let mut has_changed = false; + let mut has_added_checked_boxes = false; + // Second pass - create entries with proper depth calculation for entry in repo.status() { let (depth, difference) = Self::calculate_depth_and_difference(&entry.repo_path, &path_set); @@ -993,15 +958,6 @@ impl GitPanel { let is_new = entry.status.is_created(); let is_staged = entry.status.is_staged(); - let new_is_staged = is_staged.unwrap_or(false); - if is_new { - new_any_staged |= new_is_staged; - new_all_staged &= new_is_staged; - } else { - changed_any_staged |= new_is_staged; - changed_all_staged &= new_is_staged; - } - let display_name = if difference > 1 { // Show partial path for deeply nested files entry @@ -1030,8 +986,15 @@ impl GitPanel { }; if is_new { + if entry.is_staged != Some(false) { + has_added_checked_boxes = true + } new_entries.push(entry); } else { + has_changed = true; + if entry.is_staged != Some(false) { + has_changed_checked_boxes = true + } changed_entries.push(entry); } } @@ -1041,11 +1004,8 @@ impl GitPanel { new_entries.sort_by(|a, b| a.repo_path.cmp(&b.repo_path)); if changed_entries.len() > 0 { - let toggle_state = - ToggleState::from_any_and_all(changed_any_staged, changed_all_staged); self.entries.push(GitListEntry::Header(GitHeaderEntry { header: Section::Changed, - all_staged: toggle_state, })); self.entries.extend( changed_entries @@ -1054,10 +1014,8 @@ impl GitPanel { ); } if new_entries.len() > 0 { - let toggle_state = ToggleState::from_any_and_all(new_any_staged, new_all_staged); self.entries.push(GitListEntry::Header(GitHeaderEntry { - header: Section::New, - all_staged: toggle_state, + header: Section::Created, })); self.entries .extend(new_entries.into_iter().map(GitListEntry::GitStatusEntry)); @@ -1068,12 +1026,45 @@ impl GitPanel { self.entries_by_path.insert(status_entry.repo_path, ix); } } + self.can_commit = has_changed_checked_boxes || has_added_checked_boxes; + self.can_commit_all = has_changed || has_added_checked_boxes; self.select_first_entry_if_none(cx); cx.notify(); } + fn header_state(&self, header_type: Section) -> ToggleState { + let mut count = 0; + let mut staged_count = 0; + 'outer: for entry in &self.entries { + let Some(entry) = entry.status_entry() else { + continue; + }; + if entry.status.is_created() != (header_type == Section::Created) { + continue; + } + count += 1; + for pending in self.pending.iter().rev() { + if pending.repo_paths.contains(&entry.repo_path) { + if pending.will_become_staged { + staged_count += 1; + } + continue 'outer; + } + } + staged_count += entry.status.is_staged().unwrap_or(false) as usize; + } + + if staged_count == 0 { + ToggleState::Unselected + } else if count == staged_count { + ToggleState::Selected + } else { + ToggleState::Indeterminate + } + } + fn show_err_toast(&self, e: anyhow::Error, cx: &mut App) { let Some(workspace) = self.workspace.upgrade() else { return; @@ -1089,7 +1080,6 @@ impl GitPanel { } } -// GitPanel –– Render impl GitPanel { pub fn panel_button( &self, @@ -1199,21 +1189,13 @@ impl GitPanel { pub fn render_commit_editor( &self, name_and_email: Option<(SharedString, SharedString)>, - can_commit: bool, cx: &Context, ) -> impl IntoElement { let editor = self.commit_editor.clone(); - let can_commit = can_commit && !editor.read(cx).is_empty(cx); + let can_commit = !self.commit_pending && self.can_commit && !editor.read(cx).is_empty(cx); + let can_commit_all = + !self.commit_pending && self.can_commit_all && !editor.read(cx).is_empty(cx); let editor_focus_handle = editor.read(cx).focus_handle(cx).clone(); - let (can_commit, can_commit_all) = - self.active_repository - .as_ref() - .map_or((false, false), |active_repository| { - ( - can_commit && active_repository.can_commit(false), - can_commit && active_repository.can_commit(true), - ) - }); let focus_handle_1 = self.focus_handle(cx).clone(); let focus_handle_2 = self.focus_handle(cx).clone(); @@ -1466,7 +1448,7 @@ impl GitPanel { has_write_access: bool, cx: &Context, ) -> AnyElement { - let checkbox = Checkbox::new(header.title(), header.all_staged) + let checkbox = Checkbox::new(header.title(), self.header_state(header.header)) .disabled(!has_write_access) .fill() .elevation(ElevationIndex::Surface); @@ -1510,7 +1492,14 @@ impl GitPanel { .map(|name| name.to_string_lossy().into_owned()) .unwrap_or_else(|| entry.repo_path.to_string_lossy().into_owned()); - let pending = self.pending.get(&entry.repo_path).copied(); + let pending = self.pending.iter().rev().find_map(|pending| { + if pending.repo_paths.contains(&entry.repo_path) { + Some(pending.will_become_staged) + } else { + None + } + }); + let repo_path = entry.repo_path.clone(); let selected = self.selected_entry == Some(ix); let status_style = GitPanelSettings::get_global(cx).status_style; @@ -1559,13 +1548,19 @@ impl GitPanel { window, cx, ); + cx.stop_propagation(); }) }); let start_slot = h_flex() + .id(("start-slot", ix)) .gap(DynamicSpacing::Base04.rems(cx)) .child(checkbox) - .child(git_status_icon(status, cx)); + .child(git_status_icon(status, cx)) + .on_mouse_down(MouseButton::Left, |_, _, cx| { + // prevent the list item active state triggering when toggling checkbox + cx.stop_propagation(); + }); let id = ElementId::Name(format!("entry_{}", display_name).into()); @@ -1581,27 +1576,14 @@ impl GitPanel { .toggle_state(selected) .disabled(!has_write_access) .on_click({ - let repo_path = entry.repo_path.clone(); + let entry = entry.clone(); cx.listener(move |this, _, window, cx| { this.selected_entry = Some(ix); - window.dispatch_action(Box::new(OpenSelected), cx); - cx.notify(); let Some(workspace) = this.workspace.upgrade() else { return; }; - let Some(git_repo) = this.active_repository.as_ref() else { - return; - }; - let Some(path) = git_repo - .repo_path_to_project_path(&repo_path) - .and_then(|project_path| { - this.project.read(cx).absolute_path(&project_path, cx) - }) - else { - return; - }; workspace.update(cx, |workspace, cx| { - ProjectDiff::deploy_at(workspace, Some(path.into()), window, cx); + ProjectDiff::deploy_at(workspace, Some(entry.clone()), window, cx); }) }) }) @@ -1691,17 +1673,6 @@ impl Render for GitPanel { this.on_action(cx.listener(|this, &ToggleStaged, window, cx| { this.toggle_staged_for_selected(&ToggleStaged, window, cx) })) - .on_action( - cx.listener(|this, &StageAll, window, cx| { - this.stage_all(&StageAll, window, cx) - }), - ) - .on_action(cx.listener(|this, &UnstageAll, window, cx| { - this.unstage_all(&UnstageAll, window, cx) - })) - .on_action(cx.listener(|this, &RevertAll, window, cx| { - this.discard_all(&RevertAll, window, cx) - })) .when(can_commit, |git_panel| { git_panel .on_action({ @@ -1764,7 +1735,7 @@ impl Render for GitPanel { self.render_empty_state(cx).into_any_element() }) .child(self.render_divider(cx)) - .child(self.render_commit_editor(name_and_email, can_commit, cx)) + .child(self.render_commit_editor(name_and_email, cx)) } } diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index 8d8b025562880a55cf55d8d93c8071ab2e26be01..789dc8c21dd8260360e040ae5678aafa2e6601f5 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -1,8 +1,4 @@ -use std::{ - any::{Any, TypeId}, - path::Path, - sync::Arc, -}; +use std::any::{Any, TypeId}; use anyhow::Result; use collections::HashSet; @@ -14,7 +10,7 @@ use gpui::{ FocusHandle, Focusable, Render, Subscription, Task, WeakEntity, }; use language::{Anchor, Buffer, Capability, OffsetRangeExt}; -use multi_buffer::MultiBuffer; +use multi_buffer::{MultiBuffer, PathKey}; use project::{buffer_store::BufferChangeSet, git::GitState, Project, ProjectPath}; use theme::ActiveTheme; use ui::prelude::*; @@ -25,7 +21,7 @@ use workspace::{ ItemNavHistory, ToolbarItemLocation, Workspace, }; -use crate::git_panel::GitPanel; +use crate::git_panel::{GitPanel, GitStatusEntry}; actions!(git, [Diff]); @@ -37,18 +33,21 @@ pub(crate) struct ProjectDiff { workspace: WeakEntity, focus_handle: FocusHandle, update_needed: postage::watch::Sender<()>, - pending_scroll: Option>, + pending_scroll: Option, _task: Task>, _subscription: Subscription, } struct DiffBuffer { - abs_path: Arc, + path_key: PathKey, buffer: Entity, change_set: Entity, } +const CHANGED_NAMESPACE: &'static str = "0"; +const ADDED_NAMESPACE: &'static str = "1"; + impl ProjectDiff { pub(crate) fn register( _: &mut Workspace, @@ -72,7 +71,7 @@ impl ProjectDiff { pub fn deploy_at( workspace: &mut Workspace, - path: Option>, + entry: Option, window: &mut Window, cx: &mut Context, ) { @@ -92,9 +91,9 @@ impl ProjectDiff { ); project_diff }; - if let Some(path) = path { + if let Some(entry) = entry { project_diff.update(cx, |project_diff, cx| { - project_diff.scroll_to(path, window, cx); + project_diff.scroll_to(entry, window, cx); }) } } @@ -126,10 +125,8 @@ impl ProjectDiff { let git_state_subscription = cx.subscribe_in( &git_state, window, - move |this, _git_state, event, _window, _cx| match event { - project::git::Event::RepositoriesUpdated => { - *this.update_needed.borrow_mut() = (); - } + move |this, _git_state, _event, _window, _cx| { + *this.update_needed.borrow_mut() = (); }, ); @@ -155,15 +152,39 @@ impl ProjectDiff { } } - pub fn scroll_to(&mut self, path: Arc, window: &mut Window, cx: &mut Context) { - if let Some(position) = self.multibuffer.read(cx).location_for_path(&path, cx) { + pub fn scroll_to( + &mut self, + entry: GitStatusEntry, + window: &mut Window, + cx: &mut Context, + ) { + let Some(git_repo) = self.git_state.read(cx).active_repository() else { + return; + }; + + let Some(path) = git_repo + .repo_path_to_project_path(&entry.repo_path) + .and_then(|project_path| self.project.read(cx).absolute_path(&project_path, cx)) + else { + return; + }; + let path_key = if entry.status.is_created() { + PathKey::namespaced(ADDED_NAMESPACE, &path) + } else { + PathKey::namespaced(CHANGED_NAMESPACE, &path) + }; + self.scroll_to_path(path_key, window, cx) + } + + fn scroll_to_path(&mut self, path_key: PathKey, window: &mut Window, cx: &mut Context) { + if let Some(position) = self.multibuffer.read(cx).location_for_path(&path_key, cx) { self.editor.update(cx, |editor, cx| { editor.change_selections(Some(Autoscroll::focused()), window, cx, |s| { s.select_ranges([position..position]); }) }) } else { - self.pending_scroll = Some(path); + self.pending_scroll = Some(path_key); } } @@ -223,9 +244,14 @@ impl ProjectDiff { let Some(abs_path) = self.project.read(cx).absolute_path(&project_path, cx) else { continue; }; - let abs_path = Arc::from(abs_path); + // Craft some artificial paths so that created entries will appear last. + let path_key = if entry.status.is_created() { + PathKey::namespaced(ADDED_NAMESPACE, &abs_path) + } else { + PathKey::namespaced(CHANGED_NAMESPACE, &abs_path) + }; - previous_paths.remove(&abs_path); + previous_paths.remove(&path_key); let load_buffer = self .project .update(cx, |project, cx| project.open_buffer(project_path, cx)); @@ -235,11 +261,11 @@ impl ProjectDiff { let buffer = load_buffer.await?; let changes = project .update(&mut cx, |project, cx| { - project.open_unstaged_changes(buffer.clone(), cx) + project.open_uncommitted_changes(buffer.clone(), cx) })? .await?; Ok(DiffBuffer { - abs_path, + path_key, buffer, change_set: changes, }) @@ -259,7 +285,7 @@ impl ProjectDiff { window: &mut Window, cx: &mut Context, ) { - let abs_path = diff_buffer.abs_path; + let path_key = diff_buffer.path_key; let buffer = diff_buffer.buffer; let change_set = diff_buffer.change_set; @@ -272,15 +298,15 @@ impl ProjectDiff { self.multibuffer.update(cx, |multibuffer, cx| { multibuffer.set_excerpts_for_path( - abs_path.clone(), + path_key.clone(), buffer, diff_hunk_ranges, editor::DEFAULT_MULTIBUFFER_CONTEXT, cx, ); }); - if self.pending_scroll.as_ref() == Some(&abs_path) { - self.scroll_to(abs_path, window, cx); + if self.pending_scroll.as_ref() == Some(&path_key) { + self.scroll_to_path(path_key, window, cx); } } diff --git a/crates/git_ui/src/repository_selector.rs b/crates/git_ui/src/repository_selector.rs index 6ec2dab6c6bc75a95c82381106ace84c8596deff..9c7f5f4e077888a0e9f456ea4d6918750365c93a 100644 --- a/crates/git_ui/src/repository_selector.rs +++ b/crates/git_ui/src/repository_selector.rs @@ -49,7 +49,7 @@ impl RepositorySelector { fn handle_project_git_event( &mut self, git_state: &Entity, - _event: &project::git::Event, + _event: &project::git::GitEvent, window: &mut Window, cx: &mut Context, ) { diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index cc8afcb234f66c7725d4018633719f41e9bdf778..b523bbb92160108b2658c232214f591b882e36c4 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -67,7 +67,7 @@ pub struct MultiBuffer { /// Contains the state of the buffers being edited buffers: RefCell>, // only used by consumers using `set_excerpts_for_buffer` - buffers_by_path: BTreeMap, Vec>, + buffers_by_path: BTreeMap>, diff_bases: HashMap, all_diff_hunks_expanded: bool, subscriptions: Topic, @@ -143,6 +143,15 @@ impl MultiBufferDiffHunk { } } +#[derive(PartialEq, Eq, Ord, PartialOrd, Clone, Hash, Debug)] +pub struct PathKey(String); + +impl PathKey { + pub fn namespaced(namespace: &str, path: &Path) -> Self { + Self(format!("{}/{}", namespace, path.to_string_lossy())) + } +} + pub type MultiBufferPoint = Point; type ExcerptOffset = TypedOffset; type ExcerptPoint = TypedPoint; @@ -1395,7 +1404,7 @@ impl MultiBuffer { anchor_ranges } - pub fn location_for_path(&self, path: &Arc, cx: &App) -> Option { + pub fn location_for_path(&self, path: &PathKey, cx: &App) -> Option { let excerpt_id = self.buffers_by_path.get(path)?.first()?; let snapshot = self.snapshot(cx); let excerpt = snapshot.excerpt(*excerpt_id)?; @@ -1408,7 +1417,7 @@ impl MultiBuffer { pub fn set_excerpts_for_path( &mut self, - path: Arc, + path: PathKey, buffer: Entity, ranges: Vec>, context_line_count: u32, @@ -1517,11 +1526,11 @@ impl MultiBuffer { } } - pub fn paths(&self) -> impl Iterator> + '_ { + pub fn paths(&self) -> impl Iterator + '_ { self.buffers_by_path.keys().cloned() } - pub fn remove_excerpts_for_path(&mut self, path: Arc, cx: &mut Context) { + pub fn remove_excerpts_for_path(&mut self, path: PathKey, cx: &mut Context) { if let Some(to_remove) = self.buffers_by_path.remove(&path) { self.remove_excerpts(to_remove, cx) } diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index 61094a1b4f132dc7c67aac65f068be93716ea207..2e13fa4558892f987dc5110cbfc6c18007c3d59f 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -6,7 +6,7 @@ use language::{Buffer, Rope}; use parking_lot::RwLock; use rand::prelude::*; use settings::SettingsStore; -use std::{env, path::PathBuf}; +use std::env; use util::test::sample_text; #[ctor::ctor] @@ -1596,7 +1596,7 @@ fn test_set_excerpts_for_buffer(cx: &mut TestAppContext) { cx, ) }); - let path1: Arc = Arc::from(PathBuf::from("path1")); + let path1: PathKey = PathKey::namespaced("0", Path::new("/")); let buf2 = cx.new(|cx| { Buffer::local( indoc! { @@ -1615,7 +1615,7 @@ fn test_set_excerpts_for_buffer(cx: &mut TestAppContext) { cx, ) }); - let path2: Arc = Arc::from(PathBuf::from("path2")); + let path2 = PathKey::namespaced("x", Path::new("/")); let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); multibuffer.update(cx, |multibuffer, cx| { diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index bbfaa1e4783bbea8659ab1d201582995d04b31bc..10bc83da7a330bbf2288fab263663b5e663610e4 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -149,37 +149,32 @@ impl BufferChangeSetState { ) -> oneshot::Receiver<()> { match diff_bases_change { DiffBasesChange::SetIndex(index) => { - self.index_text = index.map(|mut text| { - text::LineEnding::normalize(&mut text); - Arc::new(text) - }); + let mut index = index.unwrap_or_default(); + text::LineEnding::normalize(&mut index); + self.index_text = Some(Arc::new(index)); self.index_changed = true; } DiffBasesChange::SetHead(head) => { - self.head_text = head.map(|mut text| { - text::LineEnding::normalize(&mut text); - Arc::new(text) - }); + let mut head = head.unwrap_or_default(); + text::LineEnding::normalize(&mut head); + self.head_text = Some(Arc::new(head)); self.head_changed = true; } - DiffBasesChange::SetBoth(mut text) => { - if let Some(text) = text.as_mut() { - text::LineEnding::normalize(text); - } - self.head_text = text.map(Arc::new); + DiffBasesChange::SetBoth(text) => { + let mut text = text.unwrap_or_default(); + text::LineEnding::normalize(&mut text); + self.head_text = Some(Arc::new(text)); self.index_text = self.head_text.clone(); self.head_changed = true; self.index_changed = true; } DiffBasesChange::SetEach { index, head } => { - self.index_text = index.map(|mut text| { - text::LineEnding::normalize(&mut text); - Arc::new(text) - }); - self.head_text = head.map(|mut text| { - text::LineEnding::normalize(&mut text); - Arc::new(text) - }); + let mut index = index.unwrap_or_default(); + text::LineEnding::normalize(&mut index); + let mut head = head.unwrap_or_default(); + text::LineEnding::normalize(&mut head); + self.index_text = Some(Arc::new(index)); + self.head_text = Some(Arc::new(head)); self.head_changed = true; self.index_changed = true; } diff --git a/crates/project/src/git.rs b/crates/project/src/git.rs index af86d1d14b1367d527d7d79b288f90885bb6d6cc..90dff1ed93c797948fd89726efd3d80ed36f47bb 100644 --- a/crates/project/src/git.rs +++ b/crates/project/src/git.rs @@ -69,11 +69,13 @@ enum Message { Unstage(GitRepo, Vec), } -pub enum Event { - RepositoriesUpdated, +pub enum GitEvent { + ActiveRepositoryChanged, + FileSystemUpdated, + GitStateUpdated, } -impl EventEmitter for GitState {} +impl EventEmitter for GitState {} impl GitState { pub fn new( @@ -103,7 +105,7 @@ impl GitState { fn on_worktree_store_event( &mut self, worktree_store: Entity, - _event: &WorktreeStoreEvent, + event: &WorktreeStoreEvent, cx: &mut Context<'_, Self>, ) { // TODO inspect the event @@ -172,7 +174,14 @@ impl GitState { self.repositories = new_repositories; self.active_index = new_active_index; - cx.emit(Event::RepositoriesUpdated); + match event { + WorktreeStoreEvent::WorktreeUpdatedGitRepositories(_) => { + cx.emit(GitEvent::GitStateUpdated); + } + _ => { + cx.emit(GitEvent::FileSystemUpdated); + } + } } pub fn all_repositories(&self) -> Vec { @@ -314,7 +323,7 @@ impl RepositoryHandle { return; }; git_state.active_index = Some(index); - cx.emit(Event::RepositoriesUpdated); + cx.emit(GitEvent::ActiveRepositoryChanged); }); } From 5a955e208ce514e4c59b6318e9affc7b380ed01a Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Wed, 5 Feb 2025 02:23:02 -0500 Subject: [PATCH 017/130] Fix panic when deleting an empty line after a deleted hunk (#24255) Release Notes: - Fix a panic when deleting text after a deletion hunk --- crates/editor/src/display_map.rs | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 538ec3d3b7a829d65c710d718f70ef6e98ef2105..4e3a7e646040b83adfdf4a9dc5f7264f68cc5813 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -1142,12 +1142,7 @@ impl DisplaySnapshot { } pub fn line_indent_for_buffer_row(&self, buffer_row: MultiBufferRow) -> LineIndent { - let (buffer, range) = self - .buffer_snapshot - .buffer_line_for_row(buffer_row) - .unwrap(); - - buffer.line_indent_for_row(range.start.row) + self.buffer_snapshot.line_indent_for_row(buffer_row) } pub fn line_len(&self, row: DisplayRow) -> u32 { From fef567bb4984cf55d1e354d4f07ca10425e4613c Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Wed, 5 Feb 2025 00:25:03 -0700 Subject: [PATCH 018/130] Remove extra space in `zed --version` string for non-stable (#24254) Release Notes: - N/A --- crates/cli/src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index c6562ea64abe7ddbf482cfde41036d18c2abfb20..b3436bc4e5310f83b1e05c06859f5d2eaa50e3ca 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -343,7 +343,7 @@ mod linux { if *RELEASE_CHANNEL == "stable" { "".to_string() } else { - format!(" {} ", *RELEASE_CHANNEL) + format!("{} ", *RELEASE_CHANNEL) }, option_env!("RELEASE_VERSION").unwrap_or_default(), self.0.display(), From 88b5f069fb440ef08a42f04f1cdf26c85b832c8e Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Wed, 5 Feb 2025 13:27:57 +0100 Subject: [PATCH 019/130] lsp: Add support for default rename behavior in prepareRename request (#24246) Fixes #24184 Release Notes: - Fixed renaming not working with some language servers (e.g. hls) --- crates/lsp/src/lsp.rs | 3 +++ crates/project/src/lsp_command.rs | 37 +++++++++++++++---------------- 2 files changed, 21 insertions(+), 19 deletions(-) diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index baff4c343c13f61dc129d085a4928e6ee4edb039..edbe564b795beae55aaf94d543120fedb1c17d55 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -724,6 +724,9 @@ impl LanguageServer { }), rename: Some(RenameClientCapabilities { prepare_support: Some(true), + prepare_support_default_behavior: Some( + PrepareSupportDefaultBehavior::IDENTIFIER, + ), ..Default::default() }), hover: Some(HoverClientCapabilities { diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index c632f9aca16847eb8cd964d03bec581bccdf33a2..68f1522af52ea8dcb2386271c27c493f4a232006 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -299,28 +299,27 @@ impl LspCommand for PrepareRename { _: LanguageServerId, mut cx: AsyncApp, ) -> Result { - buffer.update(&mut cx, |buffer, _| { - match message { - Some(lsp::PrepareRenameResponse::Range(range)) - | Some(lsp::PrepareRenameResponse::RangeWithPlaceholder { range, .. }) => { - let Range { start, end } = range_from_lsp(range); - if buffer.clip_point_utf16(start, Bias::Left) == start.0 - && buffer.clip_point_utf16(end, Bias::Left) == end.0 - { - Ok(PrepareRenameResponse::Success( - buffer.anchor_after(start)..buffer.anchor_before(end), - )) - } else { - Ok(PrepareRenameResponse::InvalidPosition) - } - } - Some(lsp::PrepareRenameResponse::DefaultBehavior { .. }) => { - Err(anyhow!("Invalid for language server to send a `defaultBehavior` response to `prepareRename`")) - } - None => { + buffer.update(&mut cx, |buffer, _| match message { + Some(lsp::PrepareRenameResponse::Range(range)) + | Some(lsp::PrepareRenameResponse::RangeWithPlaceholder { range, .. }) => { + let Range { start, end } = range_from_lsp(range); + if buffer.clip_point_utf16(start, Bias::Left) == start.0 + && buffer.clip_point_utf16(end, Bias::Left) == end.0 + { + Ok(PrepareRenameResponse::Success( + buffer.anchor_after(start)..buffer.anchor_before(end), + )) + } else { Ok(PrepareRenameResponse::InvalidPosition) } } + Some(lsp::PrepareRenameResponse::DefaultBehavior { .. }) => { + let snapshot = buffer.snapshot(); + let (range, _) = snapshot.surrounding_word(self.position); + let range = snapshot.anchor_after(range.start)..snapshot.anchor_before(range.end); + Ok(PrepareRenameResponse::Success(range)) + } + None => Ok(PrepareRenameResponse::InvalidPosition), })? } From f5e8048fcb737c402b2ebce04b5bb3696591c7fa Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Wed, 5 Feb 2025 10:39:20 -0300 Subject: [PATCH 020/130] edit prediction: Allow enabling OSS data collection with no project open (#24265) This was an leftover from when we were persisting a per-project setting. Release Notes: - N/A --- crates/zeta/src/onboarding_modal.rs | 28 ++-------------------------- 1 file changed, 2 insertions(+), 26 deletions(-) diff --git a/crates/zeta/src/onboarding_modal.rs b/crates/zeta/src/onboarding_modal.rs index d2eec7c0b09c9f52451b699087d228cf6673eb38..b9e214508cd122e7ae666226af62efdcf233d2f7 100644 --- a/crates/zeta/src/onboarding_modal.rs +++ b/crates/zeta/src/onboarding_modal.rs @@ -1,6 +1,6 @@ use std::{sync::Arc, time::Duration}; -use crate::{Zeta, ZED_PREDICT_DATA_COLLECTION_CHOICE}; +use crate::ZED_PREDICT_DATA_COLLECTION_CHOICE; use client::{Client, UserStore}; use db::kvp::KEY_VALUE_STORE; use feature_flags::FeatureFlagAppExt as _; @@ -11,10 +11,9 @@ use gpui::{ }; use language::language_settings::{AllLanguageSettings, InlineCompletionProvider}; use settings::{update_settings_file, Settings}; -use ui::{prelude::*, Checkbox, TintColor, Tooltip}; +use ui::{prelude::*, Checkbox, TintColor}; use util::ResultExt; use workspace::{notifications::NotifyTaskExt, ModalView, Workspace}; -use worktree::Worktree; /// Introduces user to Zed's Edit Prediction feature and terms of service pub struct ZedPredictModal { @@ -26,7 +25,6 @@ pub struct ZedPredictModal { terms_of_service: bool, data_collection_expanded: bool, data_collection_opted_in: bool, - worktrees: Vec>, } #[derive(PartialEq, Eq)] @@ -48,8 +46,6 @@ impl ZedPredictModal { window: &mut Window, cx: &mut Context, ) { - let worktrees = workspace.visible_worktrees(cx).collect(); - workspace.toggle_modal(window, cx, |_window, cx| Self { user_store, client, @@ -59,7 +55,6 @@ impl ZedPredictModal { terms_of_service: false, data_collection_expanded: false, data_collection_opted_in: false, - worktrees, }); } @@ -107,13 +102,6 @@ impl ZedPredictModal { .inline_completion_provider = Some(InlineCompletionProvider::Zed); }); - if this.worktrees.is_empty() { - cx.emit(DismissEvent); - return; - } - - Zeta::register(None, this.client.clone(), this.user_store.clone(), cx); - cx.emit(DismissEvent); }) }) @@ -336,17 +324,6 @@ impl Render for ZedPredictModal { ) .label("Optionally share training data (OSS-only).") .fill() - .when(self.worktrees.is_empty(), |element| { - element.disabled(true).tooltip(move |window, cx| { - Tooltip::with_meta( - "No Project Open", - None, - "Open a project to enable this option.", - window, - cx, - ) - }) - }) .on_click(cx.listener( move |this, state, _window, cx| { this.data_collection_opted_in = @@ -355,7 +332,6 @@ impl Render for ZedPredictModal { }, )), ) - // TODO: show each worktree if more than 1 .child( Button::new("learn-more", "Learn More") .icon(accordion_icons.0) From c252b5db169f5397f392421ea5eba614820465b0 Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Wed, 5 Feb 2025 11:06:12 -0300 Subject: [PATCH 021/130] Accept edit predictions with `alt-tab` in addition to `tab` (#24272) When you have an edit prediction available, you can now also accept it with `alt-tab` (or `alt-enter` on Linux) even if you don't have an LSP completions menu open. This is meant to lower the mental load when going from one mode to another. Release Notes: - N/A --- assets/keymaps/default-linux.json | 12 ++++++------ assets/keymaps/default-macos.json | 12 ++++++------ 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 83c716bf2badd71940b4a425824b697b7c9be0b2..ac4d604feea50dfadece8fa73157cdb57a2f16a5 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -503,17 +503,17 @@ } }, { - "context": "Editor && inline_completion && !showing_completions", - "use_key_equivalents": true, + "context": "Editor && inline_completion", "bindings": { - "tab": "editor::AcceptInlineCompletion" + // Changing the modifier currently breaks accepting while you also an LSP completions menu open + "alt-enter": "editor::AcceptInlineCompletion" } }, { - "context": "Editor && inline_completion && showing_completions", + "context": "Editor && inline_completion && !showing_completions", + "use_key_equivalents": true, "bindings": { - // Currently, changing this binding breaks the preview behavior - "alt-enter": "editor::AcceptInlineCompletion" + "tab": "editor::AcceptInlineCompletion" } }, { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 934373b675727104875fa6ddaeb67ff98e053d21..e865bc14ad44caddd559cd8b2778e329520e3a9a 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -580,17 +580,17 @@ } }, { - "context": "Editor && inline_completion && !showing_completions", - "use_key_equivalents": true, + "context": "Editor && inline_completion", "bindings": { - "tab": "editor::AcceptInlineCompletion" + // Changing the modifier currently breaks accepting while you also an LSP completions menu open + "alt-tab": "editor::AcceptInlineCompletion" } }, { - "context": "Editor && inline_completion && showing_completions", + "context": "Editor && inline_completion && !showing_completions", + "use_key_equivalents": true, "bindings": { - // Currently, changing this binding breaks the preview behavior - "alt-tab": "editor::AcceptInlineCompletion" + "tab": "editor::AcceptInlineCompletion" } }, { From 74c4dbd237327c08496508cf7be8859683283571 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BC=A0=E5=B0=8F=E7=99=BD?= <364772080@qq.com> Date: Wed, 5 Feb 2025 22:30:09 +0800 Subject: [PATCH 022/130] windows: Fix tests on Windows (#22616) Release Notes: - N/A --------- Co-authored-by: Mikayla --- .github/actions/run_tests_windows/action.yml | 26 + .github/workflows/ci.yml | 10 +- Cargo.lock | 9 + Cargo.toml | 13 +- .../src/file_command.rs | 73 ++- crates/copilot/src/copilot.rs | 5 +- .../src/copilot_completion_provider.rs | 13 +- crates/editor/src/display_map.rs | 8 +- crates/editor/src/display_map/block_map.rs | 12 +- crates/editor/src/display_map/wrap_map.rs | 6 +- crates/editor/src/editor_tests.rs | 85 +-- crates/editor/src/git/blame.rs | 20 +- crates/editor/src/hover_links.rs | 103 ++- crates/editor/src/inlay_hint_cache.rs | 71 +- crates/editor/src/items.rs | 19 +- crates/editor/src/test.rs | 21 +- .../src/extension_store_test.rs | 7 + crates/file_finder/src/file_finder_tests.rs | 123 ++-- crates/fuzzy/src/matcher.rs | 67 +- crates/git/src/blame.rs | 2 +- .../gpui/src/platform/windows/direct_write.rs | 29 +- crates/gpui/src/platform/windows/platform.rs | 17 +- crates/gpui_macros/Cargo.toml | 6 +- crates/language_tools/src/lsp_log_tests.rs | 7 +- crates/languages/src/rust.rs | 3 +- crates/prettier/src/prettier.rs | 2 +- crates/project/src/project_tests.rs | 618 ++++++++++-------- crates/project/src/task_inventory.rs | 8 +- crates/project_panel/src/project_panel.rs | 326 +++++---- crates/project_symbols/src/project_symbols.rs | 14 +- crates/recent_projects/src/recent_projects.rs | 5 +- .../refineable/derive_refineable/Cargo.toml | 6 +- .../remote_server/src/remote_editing_tests.rs | 68 +- crates/search/src/project_search.rs | 13 +- crates/semantic_index/Cargo.toml | 4 +- crates/semantic_index/src/semantic_index.rs | 6 +- crates/settings/src/settings_file.rs | 16 + crates/sqlez_macros/Cargo.toml | 2 +- crates/tab_switcher/src/tab_switcher_tests.rs | 21 +- crates/tasks_ui/src/modal.rs | 62 +- crates/tasks_ui/src/tasks_ui.rs | 34 +- crates/ui_macros/Cargo.toml | 6 +- crates/util/Cargo.toml | 4 +- crates/util/src/paths.rs | 34 +- crates/util/src/util.rs | 46 ++ crates/util_macros/Cargo.toml | 18 + crates/util_macros/LICENSE-APACHE | 1 + crates/util_macros/src/util_macros.rs | 56 ++ crates/vim/src/command.rs | 32 +- crates/vim/src/normal/paste.rs | 8 + crates/worktree/src/worktree.rs | 9 +- crates/worktree/src/worktree_tests.rs | 16 +- crates/zed/src/zed.rs | 154 +++-- crates/zed/src/zed/open_listener.rs | 27 +- script/exit-ci-if-dev-drive-is-full.ps1 | 22 + script/setup-dev-driver.ps1 | 3 +- 56 files changed, 1540 insertions(+), 856 deletions(-) create mode 100644 .github/actions/run_tests_windows/action.yml create mode 100644 crates/util_macros/Cargo.toml create mode 120000 crates/util_macros/LICENSE-APACHE create mode 100644 crates/util_macros/src/util_macros.rs create mode 100644 script/exit-ci-if-dev-drive-is-full.ps1 diff --git a/.github/actions/run_tests_windows/action.yml b/.github/actions/run_tests_windows/action.yml new file mode 100644 index 0000000000000000000000000000000000000000..c4be7f6d6db0a51753df0e300ee0de6d49376501 --- /dev/null +++ b/.github/actions/run_tests_windows/action.yml @@ -0,0 +1,26 @@ +name: "Run tests on Windows" +description: "Runs the tests on Windows" + +inputs: + working-directory: + description: "The working directory" + required: true + default: "." + +runs: + using: "composite" + steps: + - name: Install Rust + shell: pwsh + working-directory: ${{ inputs.working-directory }} + run: cargo install cargo-nextest --locked + + - name: Install Node + uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4 + with: + node-version: "18" + + - name: Run tests + shell: pwsh + working-directory: ${{ inputs.working-directory }} + run: cargo nextest run --workspace --no-fail-fast diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1c22474d4590dc3af09f38c262e7e95654b303cc..f20495baeb423f47021c1cc8fa6e5fd510bbe680 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -228,7 +228,6 @@ jobs: if: always() run: rm -rf ./../.cargo - # todo(windows): Actually run the tests windows_tests: timeout-minutes: 60 name: (Windows) Run Clippy and tests @@ -269,10 +268,19 @@ jobs: # Windows can't run shell scripts, so we need to use `cargo xtask`. run: cargo xtask clippy + - name: Run tests + uses: ./.github/actions/run_tests_windows + with: + working-directory: ${{ env.ZED_WORKSPACE }} + - name: Build Zed working-directory: ${{ env.ZED_WORKSPACE }} run: cargo build + - name: Check dev drive space + working-directory: ${{ env.ZED_WORKSPACE }} + run: ./script/exit-ci-if-dev-drive-is-full.ps1 55 + # Since the Windows runners are stateful, so we need to remove the config file to prevent potential bug. - name: Clean CI config file if: always() diff --git a/Cargo.lock b/Cargo.lock index 90373a0bcfeb46790f282ff6f2ccdc1df85a3a52..5353b9bc44aa0371513a5bf562fd70bd9a8f7d66 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14404,6 +14404,15 @@ dependencies = [ "tempfile", "tendril", "unicase", + "util_macros", +] + +[[package]] +name = "util_macros" +version = "0.1.0" +dependencies = [ + "quote", + "syn 1.0.109", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 9723d9beed45af10d166cce9e4ad931611f4e15b..d70d7b2faf1098ead8cf7e0f1d0ab53dba85e0a4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -144,6 +144,7 @@ members = [ "crates/ui_input", "crates/ui_macros", "crates/util", + "crates/util_macros", "crates/vcs_menu", "crates/vim", "crates/vim_mode_setting", @@ -339,6 +340,7 @@ ui = { path = "crates/ui" } ui_input = { path = "crates/ui_input" } ui_macros = { path = "crates/ui_macros" } util = { path = "crates/util" } +util_macros = { path = "crates/util_macros" } vcs_menu = { path = "crates/vcs_menu" } vim = { path = "crates/vim" } vim_mode_setting = { path = "crates/vim_mode_setting" } @@ -359,7 +361,7 @@ alacritty_terminal = { git = "https://github.com/alacritty/alacritty.git", rev = any_vec = "0.14" anyhow = "1.0.86" arrayvec = { version = "0.7.4", features = ["serde"] } -ashpd = { version = "0.10", default-features = false, features = ["async-std"]} +ashpd = { version = "0.10", default-features = false, features = ["async-std"] } async-compat = "0.2.1" async-compression = { version = "0.4", features = ["gzip", "futures-io"] } async-dispatcher = "0.1" @@ -421,7 +423,11 @@ jupyter-websocket-client = { version = "0.9.0" } libc = "0.2" libsqlite3-sys = { version = "0.30.1", features = ["bundled"] } linkify = "0.10.0" -livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev="811ceae29fabee455f110c56cd66b3f49a7e5003", features = ["dispatcher", "services-dispatcher", "rustls-tls-native-roots"], default-features = false } +livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "811ceae29fabee455f110c56cd66b3f49a7e5003", features = [ + "dispatcher", + "services-dispatcher", + "rustls-tls-native-roots", +], default-features = false } log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] } markup5ever_rcdom = "0.3.0" nanoid = "0.4" @@ -441,11 +447,13 @@ pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git" pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1abe5cec5ebfbe97ca71746a4cfc7fe89bddf8e0" } postage = { version = "0.5", features = ["futures-traits"] } pretty_assertions = { version = "1.3.0", features = ["unstable"] } +proc-macro2 = "1.0.93" profiling = "1" prost = "0.9" prost-build = "0.9" prost-types = "0.9" pulldown-cmark = { version = "0.12.0", default-features = false } +quote = "1.0.9" rand = "0.8.5" rayon = "1.8" regex = "1.5" @@ -489,6 +497,7 @@ sqlformat = "0.2" strsim = "0.11" strum = { version = "0.26.0", features = ["derive"] } subtle = "2.5.0" +syn = { version = "1.0.72", features = ["full", "extra-traits"] } sys-locale = "0.3.1" sysinfo = "0.31.0" take-until = "0.2.0" diff --git a/crates/assistant_slash_commands/src/file_command.rs b/crates/assistant_slash_commands/src/file_command.rs index d898d82bc3f538235374cce47b1518a4cf9c0aa3..71a7376845986517dd2a9d931cff5c564ea02a42 100644 --- a/crates/assistant_slash_commands/src/file_command.rs +++ b/crates/assistant_slash_commands/src/file_command.rs @@ -323,7 +323,14 @@ fn collect_files( )))?; directory_stack.push(entry.path.clone()); } else { - let entry_name = format!("{}/{}", prefix_paths, &filename); + // todo(windows) + // Potential bug: this assumes that the path separator is always `\` on Windows + let entry_name = format!( + "{}{}{}", + prefix_paths, + std::path::MAIN_SEPARATOR_STR, + &filename + ); events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection { icon: IconName::Folder, label: entry_name.clone().into(), @@ -455,6 +462,7 @@ mod custom_path_matcher { use std::{fmt::Debug as _, path::Path}; use globset::{Glob, GlobSet, GlobSetBuilder}; + use util::paths::SanitizedPath; #[derive(Clone, Debug, Default)] pub struct PathMatcher { @@ -481,7 +489,7 @@ mod custom_path_matcher { pub fn new(globs: &[String]) -> Result { let globs = globs .into_iter() - .map(|glob| Glob::new(&glob)) + .map(|glob| Glob::new(&SanitizedPath::from(glob).to_glob_string())) .collect::, _>>()?; let sources = globs.iter().map(|glob| glob.glob().to_owned()).collect(); let sources_with_trailing_slash = globs @@ -507,7 +515,9 @@ mod custom_path_matcher { .zip(self.sources_with_trailing_slash.iter()) .any(|(source, with_slash)| { let as_bytes = other_path.as_os_str().as_encoded_bytes(); - let with_slash = if source.ends_with("/") { + // todo(windows) + // Potential bug: this assumes that the path separator is always `\` on Windows + let with_slash = if source.ends_with(std::path::MAIN_SEPARATOR_STR) { source.as_bytes() } else { with_slash.as_bytes() @@ -569,6 +579,7 @@ mod test { use serde_json::json; use settings::SettingsStore; use smol::stream::StreamExt; + use util::{path, separator}; use super::collect_files; @@ -592,7 +603,7 @@ mod test { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/root", + path!("/root"), json!({ "dir": { "subdir": { @@ -607,7 +618,7 @@ mod test { ) .await; - let project = Project::test(fs, ["/root".as_ref()], cx).await; + let project = Project::test(fs, [path!("/root").as_ref()], cx).await; let result_1 = cx.update(|cx| collect_files(project.clone(), &["root/dir".to_string()], cx)); @@ -615,7 +626,7 @@ mod test { .await .unwrap(); - assert!(result_1.text.starts_with("root/dir")); + assert!(result_1.text.starts_with(separator!("root/dir"))); // 4 files + 2 directories assert_eq!(result_1.sections.len(), 6); @@ -631,7 +642,7 @@ mod test { cx.update(|cx| collect_files(project.clone(), &["root/dir*".to_string()], cx).boxed()); let result = SlashCommandOutput::from_event_stream(result).await.unwrap(); - assert!(result.text.starts_with("root/dir")); + assert!(result.text.starts_with(separator!("root/dir"))); // 5 files + 2 directories assert_eq!(result.sections.len(), 7); @@ -645,7 +656,7 @@ mod test { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/zed", + path!("/zed"), json!({ "assets": { "dir1": { @@ -670,7 +681,7 @@ mod test { ) .await; - let project = Project::test(fs, ["/zed".as_ref()], cx).await; + let project = Project::test(fs, [path!("/zed").as_ref()], cx).await; let result = cx.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx)); @@ -679,27 +690,36 @@ mod test { .unwrap(); // Sanity check - assert!(result.text.starts_with("zed/assets/themes\n")); + assert!(result.text.starts_with(separator!("zed/assets/themes\n"))); assert_eq!(result.sections.len(), 7); // Ensure that full file paths are included in the real output - assert!(result.text.contains("zed/assets/themes/andromeda/LICENSE")); - assert!(result.text.contains("zed/assets/themes/ayu/LICENSE")); - assert!(result.text.contains("zed/assets/themes/summercamp/LICENSE")); + assert!(result + .text + .contains(separator!("zed/assets/themes/andromeda/LICENSE"))); + assert!(result + .text + .contains(separator!("zed/assets/themes/ayu/LICENSE"))); + assert!(result + .text + .contains(separator!("zed/assets/themes/summercamp/LICENSE"))); assert_eq!(result.sections[5].label, "summercamp"); // Ensure that things are in descending order, with properly relativized paths assert_eq!( result.sections[0].label, - "zed/assets/themes/andromeda/LICENSE" + separator!("zed/assets/themes/andromeda/LICENSE") ); assert_eq!(result.sections[1].label, "andromeda"); - assert_eq!(result.sections[2].label, "zed/assets/themes/ayu/LICENSE"); + assert_eq!( + result.sections[2].label, + separator!("zed/assets/themes/ayu/LICENSE") + ); assert_eq!(result.sections[3].label, "ayu"); assert_eq!( result.sections[4].label, - "zed/assets/themes/summercamp/LICENSE" + separator!("zed/assets/themes/summercamp/LICENSE") ); // Ensure that the project lasts until after the last await @@ -712,7 +732,7 @@ mod test { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/zed", + path!("/zed"), json!({ "assets": { "themes": { @@ -732,7 +752,7 @@ mod test { ) .await; - let project = Project::test(fs, ["/zed".as_ref()], cx).await; + let project = Project::test(fs, [path!("/zed").as_ref()], cx).await; let result = cx.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx)); @@ -740,26 +760,29 @@ mod test { .await .unwrap(); - assert!(result.text.starts_with("zed/assets/themes\n")); - assert_eq!(result.sections[0].label, "zed/assets/themes/LICENSE"); + assert!(result.text.starts_with(separator!("zed/assets/themes\n"))); + assert_eq!( + result.sections[0].label, + separator!("zed/assets/themes/LICENSE") + ); assert_eq!( result.sections[1].label, - "zed/assets/themes/summercamp/LICENSE" + separator!("zed/assets/themes/summercamp/LICENSE") ); assert_eq!( result.sections[2].label, - "zed/assets/themes/summercamp/subdir/LICENSE" + separator!("zed/assets/themes/summercamp/subdir/LICENSE") ); assert_eq!( result.sections[3].label, - "zed/assets/themes/summercamp/subdir/subsubdir/LICENSE" + separator!("zed/assets/themes/summercamp/subdir/subsubdir/LICENSE") ); assert_eq!(result.sections[4].label, "subsubdir"); assert_eq!(result.sections[5].label, "subdir"); assert_eq!(result.sections[6].label, "summercamp"); - assert_eq!(result.sections[7].label, "zed/assets/themes"); + assert_eq!(result.sections[7].label, separator!("zed/assets/themes")); - assert_eq!(result.text, "zed/assets/themes\n```zed/assets/themes/LICENSE\n1\n```\n\nsummercamp\n```zed/assets/themes/summercamp/LICENSE\n1\n```\n\nsubdir\n```zed/assets/themes/summercamp/subdir/LICENSE\n1\n```\n\nsubsubdir\n```zed/assets/themes/summercamp/subdir/subsubdir/LICENSE\n3\n```\n\n"); + assert_eq!(result.text, separator!("zed/assets/themes\n```zed/assets/themes/LICENSE\n1\n```\n\nsummercamp\n```zed/assets/themes/summercamp/LICENSE\n1\n```\n\nsubdir\n```zed/assets/themes/summercamp/subdir/LICENSE\n1\n```\n\nsubsubdir\n```zed/assets/themes/summercamp/subdir/subsubdir/LICENSE\n3\n```\n\n")); // Ensure that the project lasts until after the last await drop(project); diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index a95b2efeb03c7abc67da1de899231519c88bf8ed..6b65b8057c009127cd1e24de438e87730bdf0eef 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -1061,6 +1061,7 @@ async fn get_copilot_lsp(http: Arc) -> anyhow::Result { mod tests { use super::*; use gpui::TestAppContext; + use util::path; #[gpui::test(iterations = 10)] async fn test_buffer_management(cx: &mut TestAppContext) { @@ -1123,7 +1124,7 @@ mod tests { buffer_1.update(cx, |buffer, cx| { buffer.file_updated( Arc::new(File { - abs_path: "/root/child/buffer-1".into(), + abs_path: path!("/root/child/buffer-1").into(), path: Path::new("child/buffer-1").into(), }), cx, @@ -1136,7 +1137,7 @@ mod tests { text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri), } ); - let buffer_1_uri = lsp::Url::from_file_path("/root/child/buffer-1").unwrap(); + let buffer_1_uri = lsp::Url::from_file_path(path!("/root/child/buffer-1")).unwrap(); assert_eq!( lsp.receive_notification::() .await, diff --git a/crates/copilot/src/copilot_completion_provider.rs b/crates/copilot/src/copilot_completion_provider.rs index dd1bf335cac141e624566351ca77a0c55554c022..9c25e295aa91f75b9186a31519a877402a82e4f9 100644 --- a/crates/copilot/src/copilot_completion_provider.rs +++ b/crates/copilot/src/copilot_completion_provider.rs @@ -290,7 +290,10 @@ mod tests { use serde_json::json; use settings::SettingsStore; use std::future::Future; - use util::test::{marked_text_ranges_by, TextRangeMarker}; + use util::{ + path, + test::{marked_text_ranges_by, TextRangeMarker}, + }; #[gpui::test(iterations = 10)] async fn test_copilot(executor: BackgroundExecutor, cx: &mut TestAppContext) { @@ -949,24 +952,24 @@ mod tests { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/test", + path!("/test"), json!({ ".env": "SECRET=something\n", "README.md": "hello\nworld\nhow\nare\nyou\ntoday" }), ) .await; - let project = Project::test(fs, ["/test".as_ref()], cx).await; + let project = Project::test(fs, [path!("/test").as_ref()], cx).await; let private_buffer = project .update(cx, |project, cx| { - project.open_local_buffer("/test/.env", cx) + project.open_local_buffer(path!("/test/.env"), cx) }) .await .unwrap(); let public_buffer = project .update(cx, |project, cx| { - project.open_local_buffer("/test/README.md", cx) + project.open_local_buffer(path!("/test/README.md"), cx) }) .await .unwrap(); diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 4e3a7e646040b83adfdf4a9dc5f7264f68cc5813..c933b04cf595247e02af082924c4efaeeea4bf7b 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -1433,7 +1433,10 @@ impl ToDisplayPoint for Anchor { #[cfg(test)] pub mod tests { use super::*; - use crate::{movement, test::marked_display_snapshot}; + use crate::{ + movement, + test::{marked_display_snapshot, test_font}, + }; use block_map::BlockPlacement; use gpui::{ div, font, observe, px, App, AppContext as _, BorrowAppContext, Element, Hsla, Rgba, @@ -1492,10 +1495,11 @@ pub mod tests { } }); + let font = test_font(); let map = cx.new(|cx| { DisplayMap::new( buffer.clone(), - font("Helvetica"), + font, font_size, wrap_width, true, diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 7715471a1f3f91803d5f2c26cce677d1a1b25db2..b4bf81846ede7797a8fb5534f543d8863eb201b5 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1992,8 +1992,9 @@ fn offset_for_row(s: &str, target: u32) -> (u32, usize) { #[cfg(test)] mod tests { use super::*; - use crate::display_map::{ - fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap, wrap_map::WrapMap, + use crate::{ + display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap, wrap_map::WrapMap}, + test::test_font, }; use gpui::{div, font, px, App, AppContext as _, Element}; use itertools::Itertools; @@ -2227,7 +2228,7 @@ mod tests { multi_buffer }); - let font = font("Helvetica"); + let font = test_font(); let font_size = px(14.); let font_id = cx.text_system().resolve_font(&font); let mut wrap_width = px(0.); @@ -3069,8 +3070,9 @@ mod tests { let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot); let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); - let (wrap_map, wraps_snapshot) = cx - .update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), font_size, wrap_width, cx)); + let font = test_font(); + let (wrap_map, wraps_snapshot) = + cx.update(|cx| WrapMap::new(tab_snapshot, font, font_size, wrap_width, cx)); let mut block_map = BlockMap::new( wraps_snapshot, true, diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 6b00ab7db0f796955d213fcc34e63ce7f75e5ef0..77eab5881cb9657d996fef02075c8e069de4a122 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -1169,9 +1169,10 @@ mod tests { use super::*; use crate::{ display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap}, + test::test_font, MultiBuffer, }; - use gpui::{font, px, test::observe}; + use gpui::{px, test::observe}; use rand::prelude::*; use settings::SettingsStore; use smol::stream::StreamExt; @@ -1196,7 +1197,8 @@ mod tests { Some(px(rng.gen_range(0.0..=1000.0))) }; let tab_size = NonZeroU32::new(rng.gen_range(1..=4)).unwrap(); - let font = font("Helvetica"); + + let font = test_font(); let _font_id = text_system.font_id(&font); let font_size = px(14.0); diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index fae15adf461e1004460b9ae84e4e57e8e5a45f82..491510ed32b2b8efdef05ea0908a8c654e37e172 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -40,8 +40,9 @@ use std::{ use test::{build_editor_with_project, editor_lsp_test_context::rust_lang}; use unindent::Unindent; use util::{ - assert_set_eq, + assert_set_eq, path, test::{marked_text_ranges, marked_text_ranges_by, sample_text, TextRangeMarker}, + uri, }; use workspace::{ item::{FollowEvent, FollowableItem, Item, ItemHandle}, @@ -7074,9 +7075,9 @@ async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) { init_test(cx, |_| {}); let fs = FakeFs::new(cx.executor()); - fs.insert_file("/file.rs", Default::default()).await; + fs.insert_file(path!("/file.rs"), Default::default()).await; - let project = Project::test(fs, ["/file.rs".as_ref()], cx).await; + let project = Project::test(fs, [path!("/file.rs").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(rust_lang()); @@ -7092,7 +7093,9 @@ async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) { ); let buffer = project - .update(cx, |project, cx| project.open_local_buffer("/file.rs", cx)) + .update(cx, |project, cx| { + project.open_local_buffer(path!("/file.rs"), cx) + }) .await .unwrap(); @@ -7117,7 +7120,7 @@ async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) { .handle_request::(move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path("/file.rs").unwrap() + lsp::Url::from_file_path(path!("/file.rs")).unwrap() ); assert_eq!(params.options.tab_size, 4); Ok(Some(vec![lsp::TextEdit::new( @@ -7145,7 +7148,7 @@ async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) { fake_server.handle_request::(move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path("/file.rs").unwrap() + lsp::Url::from_file_path(path!("/file.rs")).unwrap() ); futures::future::pending::<()>().await; unreachable!() @@ -7202,7 +7205,7 @@ async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) { .handle_request::(move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path("/file.rs").unwrap() + lsp::Url::from_file_path(path!("/file.rs")).unwrap() ); assert_eq!(params.options.tab_size, 8); Ok(Some(vec![])) @@ -7237,7 +7240,7 @@ async fn test_multibuffer_format_during_save(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/a", + path!("/a"), json!({ "main.rs": sample_text_1, "other.rs": sample_text_2, @@ -7246,7 +7249,7 @@ async fn test_multibuffer_format_during_save(cx: &mut gpui::TestAppContext) { ) .await; - let project = Project::test(fs, ["/a".as_ref()], cx).await; + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx); @@ -7421,20 +7424,20 @@ async fn test_multibuffer_format_during_save(cx: &mut gpui::TestAppContext) { assert!(cx.read(|cx| !multi_buffer_editor.is_dirty(cx))); assert_eq!( multi_buffer_editor.update(cx, |editor, cx| editor.text(cx)), - "a|o[file:///a/main.rs formatted]bbbb\ncccc\n\nffff\ngggg\n\njjjj\n\nlll[file:///a/other.rs formatted]mmmm\nnnnn|four|five|six|\nr\n\nuuuu\n\nvvvv\nwwww\nxxxx\n\n{{{{\n||||\n\n\u{7f}\u{7f}\u{7f}\u{7f}", + uri!("a|o[file:///a/main.rs formatted]bbbb\ncccc\n\nffff\ngggg\n\njjjj\n\nlll[file:///a/other.rs formatted]mmmm\nnnnn|four|five|six|\nr\n\nuuuu\n\nvvvv\nwwww\nxxxx\n\n{{{{\n||||\n\n\u{7f}\u{7f}\u{7f}\u{7f}"), ); buffer_1.update(cx, |buffer, _| { assert!(!buffer.is_dirty()); assert_eq!( buffer.text(), - "a|o[file:///a/main.rs formatted]bbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n", + uri!("a|o[file:///a/main.rs formatted]bbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n"), ) }); buffer_2.update(cx, |buffer, _| { assert!(!buffer.is_dirty()); assert_eq!( buffer.text(), - "lll[file:///a/other.rs formatted]mmmm\nnnnn|four|five|six|oooo\npppp\nr\nssss\ntttt\nuuuu\n", + uri!("lll[file:///a/other.rs formatted]mmmm\nnnnn|four|five|six|oooo\npppp\nr\nssss\ntttt\nuuuu\n"), ) }); buffer_3.update(cx, |buffer, _| { @@ -7448,9 +7451,9 @@ async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) { init_test(cx, |_| {}); let fs = FakeFs::new(cx.executor()); - fs.insert_file("/file.rs", Default::default()).await; + fs.insert_file(path!("/file.rs"), Default::default()).await; - let project = Project::test(fs, ["/file.rs".as_ref()], cx).await; + let project = Project::test(fs, [path!("/").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(rust_lang()); @@ -7466,7 +7469,9 @@ async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) { ); let buffer = project - .update(cx, |project, cx| project.open_local_buffer("/file.rs", cx)) + .update(cx, |project, cx| { + project.open_local_buffer(path!("/file.rs"), cx) + }) .await .unwrap(); @@ -7491,7 +7496,7 @@ async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) { .handle_request::(move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path("/file.rs").unwrap() + lsp::Url::from_file_path(path!("/file.rs")).unwrap() ); assert_eq!(params.options.tab_size, 4); Ok(Some(vec![lsp::TextEdit::new( @@ -7519,7 +7524,7 @@ async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) { move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path("/file.rs").unwrap() + lsp::Url::from_file_path(path!("/file.rs")).unwrap() ); futures::future::pending::<()>().await; unreachable!() @@ -7577,7 +7582,7 @@ async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) { .handle_request::(move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path("/file.rs").unwrap() + lsp::Url::from_file_path(path!("/file.rs")).unwrap() ); assert_eq!(params.options.tab_size, 8); Ok(Some(vec![])) @@ -7597,9 +7602,9 @@ async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) { }); let fs = FakeFs::new(cx.executor()); - fs.insert_file("/file.rs", Default::default()).await; + fs.insert_file(path!("/file.rs"), Default::default()).await; - let project = Project::test(fs, ["/file.rs".as_ref()], cx).await; + let project = Project::test(fs, [path!("/").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(Arc::new(Language::new( @@ -7633,7 +7638,9 @@ async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) { ); let buffer = project - .update(cx, |project, cx| project.open_local_buffer("/file.rs", cx)) + .update(cx, |project, cx| { + project.open_local_buffer(path!("/file.rs"), cx) + }) .await .unwrap(); @@ -7663,7 +7670,7 @@ async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) { .handle_request::(move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path("/file.rs").unwrap() + lsp::Url::from_file_path(path!("/file.rs")).unwrap() ); assert_eq!(params.options.tab_size, 4); Ok(Some(vec![lsp::TextEdit::new( @@ -7687,7 +7694,7 @@ async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) { fake_server.handle_request::(move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path("/file.rs").unwrap() + lsp::Url::from_file_path(path!("/file.rs")).unwrap() ); futures::future::pending::<()>().await; unreachable!() @@ -8727,14 +8734,14 @@ async fn test_multiline_completion(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/a", + path!("/a"), json!({ "main.ts": "a", }), ) .await; - let project = Project::test(fs, ["/a".as_ref()], cx).await; + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); let typescript_language = Arc::new(Language::new( LanguageConfig { @@ -8794,7 +8801,7 @@ async fn test_multiline_completion(cx: &mut gpui::TestAppContext) { .unwrap(); let _buffer = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/a/main.ts", cx) + project.open_local_buffer_with_lsp(path!("/a/main.ts"), cx) }) .await .unwrap(); @@ -10570,7 +10577,7 @@ async fn go_to_prev_overlapping_diagnostic( .update_diagnostics( LanguageServerId(0), lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path("/root/file").unwrap(), + uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(), version: None, diagnostics: vec![ lsp::Diagnostic { @@ -10663,7 +10670,7 @@ async fn test_diagnostics_with_links(cx: &mut TestAppContext) { lsp_store.update_diagnostics( LanguageServerId(0), lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path("/root/file").unwrap(), + uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 12)), @@ -10923,14 +10930,14 @@ async fn test_on_type_formatting_not_triggered(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/a", + path!("/a"), json!({ "main.rs": "fn main() { let a = 5; }", "other.rs": "// Test file", }), ) .await; - let project = Project::test(fs, ["/a".as_ref()], cx).await; + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(Arc::new(Language::new( @@ -10982,7 +10989,7 @@ async fn test_on_type_formatting_not_triggered(cx: &mut gpui::TestAppContext) { let buffer = project .update(cx, |project, cx| { - project.open_local_buffer("/a/main.rs", cx) + project.open_local_buffer(path!("/a/main.rs"), cx) }) .await .unwrap(); @@ -11002,7 +11009,7 @@ async fn test_on_type_formatting_not_triggered(cx: &mut gpui::TestAppContext) { fake_server.handle_request::(|params, _| async move { assert_eq!( params.text_document_position.text_document.uri, - lsp::Url::from_file_path("/a/main.rs").unwrap(), + lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), ); assert_eq!( params.text_document_position.position, @@ -11040,7 +11047,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut gpui::Test let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/a", + path!("/a"), json!({ "main.rs": "fn main() { let a = 5; }", "other.rs": "// Test file", @@ -11048,7 +11055,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut gpui::Test ) .await; - let project = Project::test(fs, ["/a".as_ref()], cx).await; + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; let server_restarts = Arc::new(AtomicUsize::new(0)); let closure_restarts = Arc::clone(&server_restarts); @@ -11088,7 +11095,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut gpui::Test let _window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let _buffer = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/a/main.rs", cx) + project.open_local_buffer_with_lsp(path!("/a/main.rs"), cx) }) .await .unwrap(); @@ -11861,9 +11868,9 @@ async fn test_document_format_with_prettier(cx: &mut gpui::TestAppContext) { }); let fs = FakeFs::new(cx.executor()); - fs.insert_file("/file.ts", Default::default()).await; + fs.insert_file(path!("/file.ts"), Default::default()).await; - let project = Project::test(fs, ["/file.ts".as_ref()], cx).await; + let project = Project::test(fs, [path!("/file.ts").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(Arc::new(Language::new( @@ -11895,7 +11902,9 @@ async fn test_document_format_with_prettier(cx: &mut gpui::TestAppContext) { let prettier_format_suffix = project::TEST_PRETTIER_FORMAT_SUFFIX; let buffer = project - .update(cx, |project, cx| project.open_local_buffer("/file.ts", cx)) + .update(cx, |project, cx| { + project.open_local_buffer(path!("/file.ts"), cx) + }) .await .unwrap(); diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index d9c4926d33cc857d35bc57cf3c81c0da6a9e2349..767c1eabb9e0e09756037d5b44ca7740cac4a017 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -560,7 +560,7 @@ mod tests { use settings::SettingsStore; use std::{cmp, env, ops::Range, path::Path}; use unindent::Unindent as _; - use util::RandomCharIter; + use util::{path, RandomCharIter}; // macro_rules! assert_blame_rows { // ($blame:expr, $rows:expr, $expected:expr, $cx:expr) => { @@ -793,7 +793,7 @@ mod tests { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/my-repo", + path!("/my-repo"), json!({ ".git": {}, "file.txt": r#" @@ -807,7 +807,7 @@ mod tests { .await; fs.set_blame_for_repo( - Path::new("/my-repo/.git"), + Path::new(path!("/my-repo/.git")), vec![( "file.txt".into(), Blame { @@ -817,10 +817,10 @@ mod tests { )], ); - let project = Project::test(fs, ["/my-repo".as_ref()], cx).await; + let project = Project::test(fs, [path!("/my-repo").as_ref()], cx).await; let buffer = project .update(cx, |project, cx| { - project.open_local_buffer("/my-repo/file.txt", cx) + project.open_local_buffer(path!("/my-repo/file.txt"), cx) }) .await .unwrap(); @@ -945,7 +945,7 @@ mod tests { log::info!("initial buffer text: {:?}", buffer_initial_text); fs.insert_tree( - "/my-repo", + path!("/my-repo"), json!({ ".git": {}, "file.txt": buffer_initial_text.to_string() @@ -956,7 +956,7 @@ mod tests { let blame_entries = gen_blame_entries(buffer_initial_text.max_point().row, &mut rng); log::info!("initial blame entries: {:?}", blame_entries); fs.set_blame_for_repo( - Path::new("/my-repo/.git"), + Path::new(path!("/my-repo/.git")), vec![( "file.txt".into(), Blame { @@ -966,10 +966,10 @@ mod tests { )], ); - let project = Project::test(fs.clone(), ["/my-repo".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/my-repo").as_ref()], cx).await; let buffer = project .update(cx, |project, cx| { - project.open_local_buffer("/my-repo/file.txt", cx) + project.open_local_buffer(path!("/my-repo/file.txt"), cx) }) .await .unwrap(); @@ -998,7 +998,7 @@ mod tests { log::info!("regenerating blame entries: {:?}", blame_entries); fs.set_blame_for_repo( - Path::new("/my-repo/.git"), + Path::new(path!("/my-repo/.git")), vec![( "file.txt".into(), Blame { diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index 0442669e5eecaa4150cf4d8e75719e352724f060..b0e4abcb32f4a4c3ee999061da00c65315b3b0df 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -921,7 +921,7 @@ mod tests { use indoc::indoc; use language::language_settings::InlayHintSettings; use lsp::request::{GotoDefinition, GotoTypeDefinition}; - use util::assert_set_eq; + use util::{assert_set_eq, path}; use workspace::item::Item; #[gpui::test] @@ -1574,18 +1574,31 @@ mod tests { // Insert a new file let fs = cx.update_workspace(|workspace, _, cx| workspace.project().read(cx).fs().clone()); fs.as_fake() - .insert_file("/root/dir/file2.rs", "This is file2.rs".as_bytes().to_vec()) + .insert_file( + path!("/root/dir/file2.rs"), + "This is file2.rs".as_bytes().to_vec(), + ) .await; + #[cfg(not(target_os = "windows"))] cx.set_state(indoc! {" You can't go to a file that does_not_exist.txt. Go to file2.rs if you want. Or go to ../dir/file2.rs if you want. Or go to /root/dir/file2.rs if project is local. Or go to /root/dir/file2 if this is a Rust file.ˇ + "}); + #[cfg(target_os = "windows")] + cx.set_state(indoc! {" + You can't go to a file that does_not_exist.txt. + Go to file2.rs if you want. + Or go to ../dir/file2.rs if you want. + Or go to C:/root/dir/file2.rs if project is local. + Or go to C:/root/dir/file2 if this is a Rust file.ˇ "}); // File does not exist + #[cfg(not(target_os = "windows"))] let screen_coord = cx.pixel_position(indoc! {" You can't go to a file that dˇoes_not_exist.txt. Go to file2.rs if you want. @@ -1593,6 +1606,14 @@ mod tests { Or go to /root/dir/file2.rs if project is local. Or go to /root/dir/file2 if this is a Rust file. "}); + #[cfg(target_os = "windows")] + let screen_coord = cx.pixel_position(indoc! {" + You can't go to a file that dˇoes_not_exist.txt. + Go to file2.rs if you want. + Or go to ../dir/file2.rs if you want. + Or go to C:/root/dir/file2.rs if project is local. + Or go to C:/root/dir/file2 if this is a Rust file. + "}); cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); // No highlight cx.update_editor(|editor, window, cx| { @@ -1605,6 +1626,7 @@ mod tests { }); // Moving the mouse over a file that does exist should highlight it. + #[cfg(not(target_os = "windows"))] let screen_coord = cx.pixel_position(indoc! {" You can't go to a file that does_not_exist.txt. Go to fˇile2.rs if you want. @@ -1612,8 +1634,17 @@ mod tests { Or go to /root/dir/file2.rs if project is local. Or go to /root/dir/file2 if this is a Rust file. "}); + #[cfg(target_os = "windows")] + let screen_coord = cx.pixel_position(indoc! {" + You can't go to a file that does_not_exist.txt. + Go to fˇile2.rs if you want. + Or go to ../dir/file2.rs if you want. + Or go to C:/root/dir/file2.rs if project is local. + Or go to C:/root/dir/file2 if this is a Rust file. + "}); cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); + #[cfg(not(target_os = "windows"))] cx.assert_editor_text_highlights::(indoc! {" You can't go to a file that does_not_exist.txt. Go to «file2.rsˇ» if you want. @@ -1621,8 +1652,17 @@ mod tests { Or go to /root/dir/file2.rs if project is local. Or go to /root/dir/file2 if this is a Rust file. "}); + #[cfg(target_os = "windows")] + cx.assert_editor_text_highlights::(indoc! {" + You can't go to a file that does_not_exist.txt. + Go to «file2.rsˇ» if you want. + Or go to ../dir/file2.rs if you want. + Or go to C:/root/dir/file2.rs if project is local. + Or go to C:/root/dir/file2 if this is a Rust file. + "}); // Moving the mouse over a relative path that does exist should highlight it + #[cfg(not(target_os = "windows"))] let screen_coord = cx.pixel_position(indoc! {" You can't go to a file that does_not_exist.txt. Go to file2.rs if you want. @@ -1630,8 +1670,17 @@ mod tests { Or go to /root/dir/file2.rs if project is local. Or go to /root/dir/file2 if this is a Rust file. "}); + #[cfg(target_os = "windows")] + let screen_coord = cx.pixel_position(indoc! {" + You can't go to a file that does_not_exist.txt. + Go to file2.rs if you want. + Or go to ../dir/fˇile2.rs if you want. + Or go to C:/root/dir/file2.rs if project is local. + Or go to C:/root/dir/file2 if this is a Rust file. + "}); cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); + #[cfg(not(target_os = "windows"))] cx.assert_editor_text_highlights::(indoc! {" You can't go to a file that does_not_exist.txt. Go to file2.rs if you want. @@ -1639,8 +1688,17 @@ mod tests { Or go to /root/dir/file2.rs if project is local. Or go to /root/dir/file2 if this is a Rust file. "}); + #[cfg(target_os = "windows")] + cx.assert_editor_text_highlights::(indoc! {" + You can't go to a file that does_not_exist.txt. + Go to file2.rs if you want. + Or go to «../dir/file2.rsˇ» if you want. + Or go to C:/root/dir/file2.rs if project is local. + Or go to C:/root/dir/file2 if this is a Rust file. + "}); // Moving the mouse over an absolute path that does exist should highlight it + #[cfg(not(target_os = "windows"))] let screen_coord = cx.pixel_position(indoc! {" You can't go to a file that does_not_exist.txt. Go to file2.rs if you want. @@ -1649,7 +1707,17 @@ mod tests { Or go to /root/dir/file2 if this is a Rust file. "}); + #[cfg(target_os = "windows")] + let screen_coord = cx.pixel_position(indoc! {" + You can't go to a file that does_not_exist.txt. + Go to file2.rs if you want. + Or go to ../dir/file2.rs if you want. + Or go to C:/root/diˇr/file2.rs if project is local. + Or go to C:/root/dir/file2 if this is a Rust file. + "}); + cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); + #[cfg(not(target_os = "windows"))] cx.assert_editor_text_highlights::(indoc! {" You can't go to a file that does_not_exist.txt. Go to file2.rs if you want. @@ -1657,8 +1725,17 @@ mod tests { Or go to «/root/dir/file2.rsˇ» if project is local. Or go to /root/dir/file2 if this is a Rust file. "}); + #[cfg(target_os = "windows")] + cx.assert_editor_text_highlights::(indoc! {" + You can't go to a file that does_not_exist.txt. + Go to file2.rs if you want. + Or go to ../dir/file2.rs if you want. + Or go to «C:/root/dir/file2.rsˇ» if project is local. + Or go to C:/root/dir/file2 if this is a Rust file. + "}); // Moving the mouse over a path that exists, if we add the language-specific suffix, it should highlight it + #[cfg(not(target_os = "windows"))] let screen_coord = cx.pixel_position(indoc! {" You can't go to a file that does_not_exist.txt. Go to file2.rs if you want. @@ -1666,8 +1743,17 @@ mod tests { Or go to /root/dir/file2.rs if project is local. Or go to /root/diˇr/file2 if this is a Rust file. "}); + #[cfg(target_os = "windows")] + let screen_coord = cx.pixel_position(indoc! {" + You can't go to a file that does_not_exist.txt. + Go to file2.rs if you want. + Or go to ../dir/file2.rs if you want. + Or go to C:/root/dir/file2.rs if project is local. + Or go to C:/root/diˇr/file2 if this is a Rust file. + "}); cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); + #[cfg(not(target_os = "windows"))] cx.assert_editor_text_highlights::(indoc! {" You can't go to a file that does_not_exist.txt. Go to file2.rs if you want. @@ -1675,6 +1761,14 @@ mod tests { Or go to /root/dir/file2.rs if project is local. Or go to «/root/dir/file2ˇ» if this is a Rust file. "}); + #[cfg(target_os = "windows")] + cx.assert_editor_text_highlights::(indoc! {" + You can't go to a file that does_not_exist.txt. + Go to file2.rs if you want. + Or go to ../dir/file2.rs if you want. + Or go to C:/root/dir/file2.rs if project is local. + Or go to «C:/root/dir/file2ˇ» if this is a Rust file. + "}); cx.simulate_click(screen_coord, Modifiers::secondary_key()); @@ -1692,7 +1786,10 @@ mod tests { let file = buffer.read(cx).file().unwrap(); let file_path = file.as_local().unwrap().abs_path(cx); - assert_eq!(file_path.to_str().unwrap(), "/root/dir/file2.rs"); + assert_eq!( + file_path, + std::path::PathBuf::from(path!("/root/dir/file2.rs")) + ); }); } diff --git a/crates/editor/src/inlay_hint_cache.rs b/crates/editor/src/inlay_hint_cache.rs index 3b325a58025b76c0a3e472f682f38d693697146a..e6789c3a3abf85c255f2e7ff530b70d1dcde809e 100644 --- a/crates/editor/src/inlay_hint_cache.rs +++ b/crates/editor/src/inlay_hint_cache.rs @@ -1274,6 +1274,7 @@ pub mod tests { use settings::SettingsStore; use std::sync::atomic::{AtomicBool, AtomicU32, AtomicUsize, Ordering}; use text::Point; + use util::path; use super::*; @@ -1499,7 +1500,7 @@ pub mod tests { let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( - "/a", + path!("/a"), json!({ "main.rs": "fn main() { a } // and some long comment to ensure inlays are not trimmed out", "other.md": "Test md file with some text", @@ -1507,7 +1508,7 @@ pub mod tests { ) .await; - let project = Project::test(fs, ["/a".as_ref()], cx).await; + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); let mut rs_fake_servers = None; @@ -1542,14 +1543,16 @@ pub mod tests { "Rust" => { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path("/a/main.rs").unwrap(), + lsp::Url::from_file_path(path!("/a/main.rs")) + .unwrap(), ); rs_lsp_request_count.fetch_add(1, Ordering::Release) + 1 } "Markdown" => { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path("/a/other.md").unwrap(), + lsp::Url::from_file_path(path!("/a/other.md")) + .unwrap(), ); md_lsp_request_count.fetch_add(1, Ordering::Release) + 1 } @@ -1585,7 +1588,7 @@ pub mod tests { let rs_buffer = project .update(cx, |project, cx| { - project.open_local_buffer("/a/main.rs", cx) + project.open_local_buffer(path!("/a/main.rs"), cx) }) .await .unwrap(); @@ -1611,7 +1614,7 @@ pub mod tests { cx.executor().run_until_parked(); let md_buffer = project .update(cx, |project, cx| { - project.open_local_buffer("/a/other.md", cx) + project.open_local_buffer(path!("/a/other.md"), cx) }) .await .unwrap(); @@ -2173,7 +2176,7 @@ pub mod tests { let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( - "/a", + path!("/a"), json!({ "main.rs": format!("fn main() {{\n{}\n}}", "let i = 5;\n".repeat(500)), "other.rs": "// Test file", @@ -2181,7 +2184,7 @@ pub mod tests { ) .await; - let project = Project::test(fs, ["/a".as_ref()], cx).await; + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(rust_lang()); @@ -2209,7 +2212,7 @@ pub mod tests { async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path("/a/main.rs").unwrap(), + lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), ); task_lsp_request_ranges.lock().push(params.range); @@ -2237,7 +2240,7 @@ pub mod tests { let buffer = project .update(cx, |project, cx| { - project.open_local_buffer("/a/main.rs", cx) + project.open_local_buffer(path!("/a/main.rs"), cx) }) .await .unwrap(); @@ -2471,7 +2474,7 @@ pub mod tests { let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( - "/a", + path!("/a"), json!({ "main.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|i| format!("let i = {i};\n")).collect::>().join("")), "other.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|j| format!("let j = {j};\n")).collect::>().join("")), @@ -2479,7 +2482,7 @@ pub mod tests { ) .await; - let project = Project::test(fs, ["/a".as_ref()], cx).await; + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); let language = rust_lang(); @@ -2497,13 +2500,13 @@ pub mod tests { let (buffer_1, _handle1) = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/a/main.rs", cx) + project.open_local_buffer_with_lsp(path!("/a/main.rs"), cx) }) .await .unwrap(); let (buffer_2, _handle2) = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/a/other.rs", cx) + project.open_local_buffer_with_lsp(path!("/a/other.rs"), cx) }) .await .unwrap(); @@ -2585,11 +2588,11 @@ pub mod tests { let task_editor_edited = Arc::clone(&closure_editor_edited); async move { let hint_text = if params.text_document.uri - == lsp::Url::from_file_path("/a/main.rs").unwrap() + == lsp::Url::from_file_path(path!("/a/main.rs")).unwrap() { "main hint" } else if params.text_document.uri - == lsp::Url::from_file_path("/a/other.rs").unwrap() + == lsp::Url::from_file_path(path!("/a/other.rs")).unwrap() { "other hint" } else { @@ -2815,7 +2818,7 @@ pub mod tests { let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( - "/a", + path!("/a"), json!({ "main.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|i| format!("let i = {i};\n")).collect::>().join("")), "other.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|j| format!("let j = {j};\n")).collect::>().join("")), @@ -2823,7 +2826,7 @@ pub mod tests { ) .await; - let project = Project::test(fs, ["/a".as_ref()], cx).await; + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(rust_lang()); @@ -2840,13 +2843,13 @@ pub mod tests { let (buffer_1, _handle) = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/a/main.rs", cx) + project.open_local_buffer_with_lsp(path!("/a/main.rs"), cx) }) .await .unwrap(); let (buffer_2, _handle2) = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/a/other.rs", cx) + project.open_local_buffer_with_lsp(path!("/a/other.rs"), cx) }) .await .unwrap(); @@ -2886,11 +2889,11 @@ pub mod tests { let task_editor_edited = Arc::clone(&closure_editor_edited); async move { let hint_text = if params.text_document.uri - == lsp::Url::from_file_path("/a/main.rs").unwrap() + == lsp::Url::from_file_path(path!("/a/main.rs")).unwrap() { "main hint" } else if params.text_document.uri - == lsp::Url::from_file_path("/a/other.rs").unwrap() + == lsp::Url::from_file_path(path!("/a/other.rs")).unwrap() { "other hint" } else { @@ -3027,7 +3030,7 @@ pub mod tests { let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( - "/a", + path!("/a"), json!({ "main.rs": format!(r#"fn main() {{\n{}\n}}"#, format!("let i = {};\n", "√".repeat(10)).repeat(500)), "other.rs": "// Test file", @@ -3035,7 +3038,7 @@ pub mod tests { ) .await; - let project = Project::test(fs, ["/a".as_ref()], cx).await; + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(rust_lang()); @@ -3054,7 +3057,7 @@ pub mod tests { async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path("/a/main.rs").unwrap(), + lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), ); let query_start = params.range.start; Ok(Some(vec![lsp::InlayHint { @@ -3077,7 +3080,7 @@ pub mod tests { let buffer = project .update(cx, |project, cx| { - project.open_local_buffer("/a/main.rs", cx) + project.open_local_buffer(path!("/a/main.rs"), cx) }) .await .unwrap(); @@ -3250,7 +3253,7 @@ pub mod tests { let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( - "/a", + path!("/a"), json!({ "main.rs": "fn main() { let x = 42; @@ -3265,7 +3268,7 @@ pub mod tests { ) .await; - let project = Project::test(fs, ["/a".as_ref()], cx).await; + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(rust_lang()); @@ -3281,7 +3284,7 @@ pub mod tests { move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path("/a/main.rs").unwrap(), + lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), ); Ok(Some( serde_json::from_value(json!([ @@ -3351,7 +3354,7 @@ pub mod tests { let buffer = project .update(cx, |project, cx| { - project.open_local_buffer("/a/main.rs", cx) + project.open_local_buffer(path!("/a/main.rs"), cx) }) .await .unwrap(); @@ -3408,7 +3411,7 @@ pub mod tests { ) -> (&'static str, WindowHandle, FakeLanguageServer) { let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( - "/a", + path!("/a"), json!({ "main.rs": "fn main() { a } // and some long comment to ensure inlays are not trimmed out", "other.rs": "// Test file", @@ -3416,8 +3419,8 @@ pub mod tests { ) .await; - let project = Project::test(fs, ["/a".as_ref()], cx).await; - let file_path = "/a/main.rs"; + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; + let file_path = path!("/a/main.rs"); let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(rust_lang()); @@ -3435,7 +3438,7 @@ pub mod tests { let buffer = project .update(cx, |project, cx| { - project.open_local_buffer("/a/main.rs", cx) + project.open_local_buffer(path!("/a/main.rs"), cx) }) .await .unwrap(); diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 2679df6a7c306de5ccf1b0a1b01da4c4bc4fca58..d53b95b007e733bd81f5d3c7f9d536b8a0f52c98 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -1717,6 +1717,7 @@ mod tests { use language::{LanguageMatcher, TestFile}; use project::FakeFs; use std::path::{Path, PathBuf}; + use util::path; #[gpui::test] fn test_path_for_file(cx: &mut App) { @@ -1771,24 +1772,24 @@ mod tests { init_test(cx, |_| {}); let fs = FakeFs::new(cx.executor()); - fs.insert_file("/file.rs", Default::default()).await; + fs.insert_file(path!("/file.rs"), Default::default()).await; // Test case 1: Deserialize with path and contents { - let project = Project::test(fs.clone(), ["/file.rs".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/file.rs").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); let workspace_id = workspace::WORKSPACE_DB.next_id().await.unwrap(); let item_id = 1234 as ItemId; let mtime = fs - .metadata(Path::new("/file.rs")) + .metadata(Path::new(path!("/file.rs"))) .await .unwrap() .unwrap() .mtime; let serialized_editor = SerializedEditor { - abs_path: Some(PathBuf::from("/file.rs")), + abs_path: Some(PathBuf::from(path!("/file.rs"))), contents: Some("fn main() {}".to_string()), language: Some("Rust".to_string()), mtime: Some(mtime), @@ -1812,7 +1813,7 @@ mod tests { // Test case 2: Deserialize with only path { - let project = Project::test(fs.clone(), ["/file.rs".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/file.rs").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); @@ -1820,7 +1821,7 @@ mod tests { let item_id = 5678 as ItemId; let serialized_editor = SerializedEditor { - abs_path: Some(PathBuf::from("/file.rs")), + abs_path: Some(PathBuf::from(path!("/file.rs"))), contents: None, language: None, mtime: None, @@ -1845,7 +1846,7 @@ mod tests { // Test case 3: Deserialize with no path (untitled buffer, with content and language) { - let project = Project::test(fs.clone(), ["/file.rs".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/file.rs").as_ref()], cx).await; // Add Rust to the language, so that we can restore the language of the buffer project.update(cx, |project, _| project.languages().add(rust_language())); @@ -1884,7 +1885,7 @@ mod tests { // Test case 4: Deserialize with path, content, and old mtime { - let project = Project::test(fs.clone(), ["/file.rs".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/file.rs").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); @@ -1893,7 +1894,7 @@ mod tests { let item_id = 9345 as ItemId; let old_mtime = MTime::from_seconds_and_nanos(0, 50); let serialized_editor = SerializedEditor { - abs_path: Some(PathBuf::from("/file.rs")), + abs_path: Some(PathBuf::from(path!("/file.rs"))), contents: Some("fn main() {}".to_string()), language: Some("Rust".to_string()), mtime: Some(old_mtime), diff --git a/crates/editor/src/test.rs b/crates/editor/src/test.rs index 6b8451cba1d0bc724a5579d7fe50ce89fbfaca74..35fb1b4c91f8571aa4d68245bca75929dab98c1c 100644 --- a/crates/editor/src/test.rs +++ b/crates/editor/src/test.rs @@ -1,12 +1,15 @@ pub mod editor_lsp_test_context; pub mod editor_test_context; +use std::sync::LazyLock; + use crate::{ display_map::{DisplayMap, DisplaySnapshot, ToDisplayPoint}, DisplayPoint, Editor, EditorMode, FoldPlaceholder, MultiBuffer, }; use gpui::{ - AppContext as _, Context, Entity, Font, FontFeatures, FontStyle, FontWeight, Pixels, Window, + font, AppContext as _, Context, Entity, Font, FontFeatures, FontStyle, FontWeight, Pixels, + Window, }; use project::Project; use util::test::{marked_text_offsets, marked_text_ranges}; @@ -19,6 +22,22 @@ fn init_logger() { } } +pub fn test_font() -> Font { + static TEST_FONT: LazyLock = LazyLock::new(|| { + #[cfg(not(target_os = "windows"))] + { + font("Helvetica") + } + + #[cfg(target_os = "windows")] + { + font("Courier New") + } + }); + + TEST_FONT.clone() +} + // Returns a snapshot from text containing '|' character markers with the markers removed, and DisplayPoints for each one. pub fn marked_display_snapshot( text: &str, diff --git a/crates/extension_host/src/extension_store_test.rs b/crates/extension_host/src/extension_store_test.rs index e65678bcc8b5ffd7769d1af2683e16a19e1e2209..137e3f80d92180fcdd2c9ffcc06722f711684dc1 100644 --- a/crates/extension_host/src/extension_store_test.rs +++ b/crates/extension_host/src/extension_store_test.rs @@ -455,7 +455,12 @@ async fn test_extension_store(cx: &mut TestAppContext) { }); } +// todo(windows) +// Disable this test on Windows for now. Because this test hangs at +// `let fake_server = fake_servers.next().await.unwrap();`. +// Reenable this test when we figure out why. #[gpui::test] +#[cfg_attr(target_os = "windows", ignore)] async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { init_test(cx); cx.executor().allow_parking(); @@ -634,6 +639,8 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { .await .unwrap(); + // todo(windows) + // This test hangs here on Windows. let fake_server = fake_servers.next().await.unwrap(); let expected_server_path = extensions_dir.join(format!("work/{test_extension_id}/gleam-v1.2.3/gleam")); diff --git a/crates/file_finder/src/file_finder_tests.rs b/crates/file_finder/src/file_finder_tests.rs index b25ed8b9c1555a130db96a0d8995b3e2b5436ff3..8555da775e693b9c48068668b040ae84ebb30156 100644 --- a/crates/file_finder/src/file_finder_tests.rs +++ b/crates/file_finder/src/file_finder_tests.rs @@ -6,6 +6,7 @@ use gpui::{Entity, TestAppContext, VisualTestContext}; use menu::{Confirm, SelectNext, SelectPrev}; use project::{RemoveOptions, FS_WATCH_LATENCY}; use serde_json::json; +use util::path; use workspace::{AppState, ToggleFileFinder, Workspace}; #[ctor::ctor] @@ -90,7 +91,7 @@ async fn test_absolute_paths(cx: &mut TestAppContext) { .fs .as_fake() .insert_tree( - "/root", + path!("/root"), json!({ "a": { "file1.txt": "", @@ -102,16 +103,16 @@ async fn test_absolute_paths(cx: &mut TestAppContext) { ) .await; - let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; let (picker, workspace, cx) = build_find_picker(project, cx); - let matching_abs_path = "/root/a/b/file2.txt"; + let matching_abs_path = path!("/root/a/b/file2.txt").to_string(); picker .update_in(cx, |picker, window, cx| { picker .delegate - .update_matches(matching_abs_path.to_string(), window, cx) + .update_matches(matching_abs_path, window, cx) }) .await; picker.update(cx, |picker, _| { @@ -128,12 +129,12 @@ async fn test_absolute_paths(cx: &mut TestAppContext) { assert_eq!(active_editor.read(cx).title(cx), "file2.txt"); }); - let mismatching_abs_path = "/root/a/b/file1.txt"; + let mismatching_abs_path = path!("/root/a/b/file1.txt").to_string(); picker .update_in(cx, |picker, window, cx| { picker .delegate - .update_matches(mismatching_abs_path.to_string(), window, cx) + .update_matches(mismatching_abs_path, window, cx) }) .await; picker.update(cx, |picker, _| { @@ -518,7 +519,7 @@ async fn test_path_distance_ordering(cx: &mut TestAppContext) { .fs .as_fake() .insert_tree( - "/root", + path!("/root"), json!({ "dir1": { "a.txt": "" }, "dir2": { @@ -529,7 +530,7 @@ async fn test_path_distance_ordering(cx: &mut TestAppContext) { ) .await; - let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); let worktree_id = cx.read(|cx| { @@ -606,7 +607,7 @@ async fn test_query_history(cx: &mut gpui::TestAppContext) { .fs .as_fake() .insert_tree( - "/src", + path!("/src"), json!({ "test": { "first.rs": "// First Rust file", @@ -617,7 +618,7 @@ async fn test_query_history(cx: &mut gpui::TestAppContext) { ) .await; - let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); let worktree_id = cx.read(|cx| { let worktrees = workspace.read(cx).worktrees(cx).collect::>(); @@ -648,7 +649,7 @@ async fn test_query_history(cx: &mut gpui::TestAppContext) { worktree_id, path: Arc::from(Path::new("test/first.rs")), }, - Some(PathBuf::from("/src/test/first.rs")) + Some(PathBuf::from(path!("/src/test/first.rs"))) )], "Should show 1st opened item in the history when opening the 2nd item" ); @@ -663,14 +664,14 @@ async fn test_query_history(cx: &mut gpui::TestAppContext) { worktree_id, path: Arc::from(Path::new("test/second.rs")), }, - Some(PathBuf::from("/src/test/second.rs")) + Some(PathBuf::from(path!("/src/test/second.rs"))) ), FoundPath::new( ProjectPath { worktree_id, path: Arc::from(Path::new("test/first.rs")), }, - Some(PathBuf::from("/src/test/first.rs")) + Some(PathBuf::from(path!("/src/test/first.rs"))) ), ], "Should show 1st and 2nd opened items in the history when opening the 3rd item. \ @@ -687,21 +688,21 @@ async fn test_query_history(cx: &mut gpui::TestAppContext) { worktree_id, path: Arc::from(Path::new("test/third.rs")), }, - Some(PathBuf::from("/src/test/third.rs")) + Some(PathBuf::from(path!("/src/test/third.rs"))) ), FoundPath::new( ProjectPath { worktree_id, path: Arc::from(Path::new("test/second.rs")), }, - Some(PathBuf::from("/src/test/second.rs")) + Some(PathBuf::from(path!("/src/test/second.rs"))) ), FoundPath::new( ProjectPath { worktree_id, path: Arc::from(Path::new("test/first.rs")), }, - Some(PathBuf::from("/src/test/first.rs")) + Some(PathBuf::from(path!("/src/test/first.rs"))) ), ], "Should show 1st, 2nd and 3rd opened items in the history when opening the 2nd item again. \ @@ -718,21 +719,21 @@ async fn test_query_history(cx: &mut gpui::TestAppContext) { worktree_id, path: Arc::from(Path::new("test/second.rs")), }, - Some(PathBuf::from("/src/test/second.rs")) + Some(PathBuf::from(path!("/src/test/second.rs"))) ), FoundPath::new( ProjectPath { worktree_id, path: Arc::from(Path::new("test/third.rs")), }, - Some(PathBuf::from("/src/test/third.rs")) + Some(PathBuf::from(path!("/src/test/third.rs"))) ), FoundPath::new( ProjectPath { worktree_id, path: Arc::from(Path::new("test/first.rs")), }, - Some(PathBuf::from("/src/test/first.rs")) + Some(PathBuf::from(path!("/src/test/first.rs"))) ), ], "Should show 1st, 2nd and 3rd opened items in the history when opening the 3rd item again. \ @@ -748,7 +749,7 @@ async fn test_external_files_history(cx: &mut gpui::TestAppContext) { .fs .as_fake() .insert_tree( - "/src", + path!("/src"), json!({ "test": { "first.rs": "// First Rust file", @@ -762,7 +763,7 @@ async fn test_external_files_history(cx: &mut gpui::TestAppContext) { .fs .as_fake() .insert_tree( - "/external-src", + path!("/external-src"), json!({ "test": { "third.rs": "// Third Rust file", @@ -772,10 +773,10 @@ async fn test_external_files_history(cx: &mut gpui::TestAppContext) { ) .await; - let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await; cx.update(|cx| { project.update(cx, |project, cx| { - project.find_or_create_worktree("/external-src", false, cx) + project.find_or_create_worktree(path!("/external-src"), false, cx) }) }) .detach(); @@ -791,7 +792,7 @@ async fn test_external_files_history(cx: &mut gpui::TestAppContext) { workspace .update_in(cx, |workspace, window, cx| { workspace.open_abs_path( - PathBuf::from("/external-src/test/third.rs"), + PathBuf::from(path!("/external-src/test/third.rs")), false, window, cx, @@ -827,7 +828,7 @@ async fn test_external_files_history(cx: &mut gpui::TestAppContext) { worktree_id: external_worktree_id, path: Arc::from(Path::new("")), }, - Some(PathBuf::from("/external-src/test/third.rs")) + Some(PathBuf::from(path!("/external-src/test/third.rs"))) )], "Should show external file with its full path in the history after it was open" ); @@ -842,14 +843,14 @@ async fn test_external_files_history(cx: &mut gpui::TestAppContext) { worktree_id, path: Arc::from(Path::new("test/second.rs")), }, - Some(PathBuf::from("/src/test/second.rs")) + Some(PathBuf::from(path!("/src/test/second.rs"))) ), FoundPath::new( ProjectPath { worktree_id: external_worktree_id, path: Arc::from(Path::new("")), }, - Some(PathBuf::from("/external-src/test/third.rs")) + Some(PathBuf::from(path!("/external-src/test/third.rs"))) ), ], "Should keep external file with history updates", @@ -864,7 +865,7 @@ async fn test_toggle_panel_new_selections(cx: &mut gpui::TestAppContext) { .fs .as_fake() .insert_tree( - "/src", + path!("/src"), json!({ "test": { "first.rs": "// First Rust file", @@ -875,7 +876,7 @@ async fn test_toggle_panel_new_selections(cx: &mut gpui::TestAppContext) { ) .await; - let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); // generate some history to select from @@ -919,7 +920,7 @@ async fn test_search_preserves_history_items(cx: &mut gpui::TestAppContext) { .fs .as_fake() .insert_tree( - "/src", + path!("/src"), json!({ "test": { "first.rs": "// First Rust file", @@ -931,7 +932,7 @@ async fn test_search_preserves_history_items(cx: &mut gpui::TestAppContext) { ) .await; - let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); let worktree_id = cx.read(|cx| { let worktrees = workspace.read(cx).worktrees(cx).collect::>(); @@ -964,7 +965,7 @@ async fn test_search_preserves_history_items(cx: &mut gpui::TestAppContext) { worktree_id, path: Arc::from(Path::new("test/first.rs")), }, - Some(PathBuf::from("/src/test/first.rs")) + Some(PathBuf::from(path!("/src/test/first.rs"))) )); assert_eq!(matches.search.len(), 1, "Only one non-history item contains {first_query}, it should be present"); assert_eq!(matches.search.first().unwrap(), Path::new("test/fourth.rs")); @@ -1007,7 +1008,7 @@ async fn test_search_preserves_history_items(cx: &mut gpui::TestAppContext) { worktree_id, path: Arc::from(Path::new("test/first.rs")), }, - Some(PathBuf::from("/src/test/first.rs")) + Some(PathBuf::from(path!("/src/test/first.rs"))) )); assert_eq!(matches.search.len(), 1, "Only one non-history item contains {first_query_again}, it should be present, even after non-matching query"); assert_eq!(matches.search.first().unwrap(), Path::new("test/fourth.rs")); @@ -1022,7 +1023,7 @@ async fn test_search_sorts_history_items(cx: &mut gpui::TestAppContext) { .fs .as_fake() .insert_tree( - "/root", + path!("/root"), json!({ "test": { "1_qw": "// First file that matches the query", @@ -1037,7 +1038,7 @@ async fn test_search_sorts_history_items(cx: &mut gpui::TestAppContext) { ) .await; - let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); // generate some history to select from open_close_queried_buffer("1", 1, "1_qw", &workspace, cx).await; @@ -1079,7 +1080,7 @@ async fn test_select_current_open_file_when_no_history(cx: &mut gpui::TestAppCon .fs .as_fake() .insert_tree( - "/root", + path!("/root"), json!({ "test": { "1_qw": "", @@ -1088,7 +1089,7 @@ async fn test_select_current_open_file_when_no_history(cx: &mut gpui::TestAppCon ) .await; - let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); // Open new buffer open_queried_buffer("1", 1, "1_qw", &workspace, cx).await; @@ -1109,7 +1110,7 @@ async fn test_keep_opened_file_on_top_of_search_results_and_select_next_one( .fs .as_fake() .insert_tree( - "/src", + path!("/src"), json!({ "test": { "bar.rs": "// Bar file", @@ -1122,7 +1123,7 @@ async fn test_keep_opened_file_on_top_of_search_results_and_select_next_one( ) .await; - let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); open_close_queried_buffer("bar", 1, "bar.rs", &workspace, cx).await; @@ -1202,7 +1203,7 @@ async fn test_non_separate_history_items(cx: &mut TestAppContext) { .fs .as_fake() .insert_tree( - "/src", + path!("/src"), json!({ "test": { "bar.rs": "// Bar file", @@ -1215,7 +1216,7 @@ async fn test_non_separate_history_items(cx: &mut TestAppContext) { ) .await; - let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); open_close_queried_buffer("bar", 1, "bar.rs", &workspace, cx).await; @@ -1296,7 +1297,7 @@ async fn test_history_items_shown_in_order_of_open(cx: &mut TestAppContext) { .fs .as_fake() .insert_tree( - "/test", + path!("/test"), json!({ "test": { "1.txt": "// One", @@ -1307,7 +1308,7 @@ async fn test_history_items_shown_in_order_of_open(cx: &mut TestAppContext) { ) .await; - let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/test").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); open_queried_buffer("1", 1, "1.txt", &workspace, cx).await; @@ -1354,7 +1355,7 @@ async fn test_selected_history_item_stays_selected_on_worktree_updated(cx: &mut .fs .as_fake() .insert_tree( - "/test", + path!("/test"), json!({ "test": { "1.txt": "// One", @@ -1365,7 +1366,7 @@ async fn test_selected_history_item_stays_selected_on_worktree_updated(cx: &mut ) .await; - let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/test").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); open_close_queried_buffer("1", 1, "1.txt", &workspace, cx).await; @@ -1384,7 +1385,11 @@ async fn test_selected_history_item_stays_selected_on_worktree_updated(cx: &mut // Add more files to the worktree to trigger update matches for i in 0..5 { - let filename = format!("/test/{}.txt", 4 + i); + let filename = if cfg!(windows) { + format!("C:/test/{}.txt", 4 + i) + } else { + format!("/test/{}.txt", 4 + i) + }; app_state .fs .create_file(Path::new(&filename), Default::default()) @@ -1410,7 +1415,7 @@ async fn test_history_items_vs_very_good_external_match(cx: &mut gpui::TestAppCo .fs .as_fake() .insert_tree( - "/src", + path!("/src"), json!({ "collab_ui": { "first.rs": "// First Rust file", @@ -1422,7 +1427,7 @@ async fn test_history_items_vs_very_good_external_match(cx: &mut gpui::TestAppCo ) .await; - let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); // generate some history to select from open_close_queried_buffer("fir", 1, "first.rs", &workspace, cx).await; @@ -1456,7 +1461,7 @@ async fn test_nonexistent_history_items_not_shown(cx: &mut gpui::TestAppContext) .fs .as_fake() .insert_tree( - "/src", + path!("/src"), json!({ "test": { "first.rs": "// First Rust file", @@ -1467,7 +1472,7 @@ async fn test_nonexistent_history_items_not_shown(cx: &mut gpui::TestAppContext) ) .await; - let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); // generate some history to select from open_close_queried_buffer("fir", 1, "first.rs", &workspace, cx).await; open_close_queried_buffer("non", 1, "nonexistent.rs", &workspace, cx).await; @@ -1476,7 +1481,7 @@ async fn test_nonexistent_history_items_not_shown(cx: &mut gpui::TestAppContext) app_state .fs .remove_file( - Path::new("/src/test/nonexistent.rs"), + Path::new(path!("/src/test/nonexistent.rs")), RemoveOptions::default(), ) .await @@ -1742,14 +1747,14 @@ async fn test_keeps_file_finder_open_after_modifier_keys_release(cx: &mut gpui:: .fs .as_fake() .insert_tree( - "/test", + path!("/test"), json!({ "1.txt": "// One", }), ) .await; - let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/test").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); open_queried_buffer("1", 1, "1.txt", &workspace, cx).await; @@ -1809,7 +1814,7 @@ async fn test_switches_between_release_norelease_modes_on_forward_nav( .fs .as_fake() .insert_tree( - "/test", + path!("/test"), json!({ "1.txt": "// One", "2.txt": "// Two", @@ -1817,7 +1822,7 @@ async fn test_switches_between_release_norelease_modes_on_forward_nav( ) .await; - let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/test").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); open_queried_buffer("1", 1, "1.txt", &workspace, cx).await; @@ -1864,7 +1869,7 @@ async fn test_switches_between_release_norelease_modes_on_backward_nav( .fs .as_fake() .insert_tree( - "/test", + path!("/test"), json!({ "1.txt": "// One", "2.txt": "// Two", @@ -1873,7 +1878,7 @@ async fn test_switches_between_release_norelease_modes_on_backward_nav( ) .await; - let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/test").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); open_queried_buffer("1", 1, "1.txt", &workspace, cx).await; @@ -1921,14 +1926,14 @@ async fn test_extending_modifiers_does_not_confirm_selection(cx: &mut gpui::Test .fs .as_fake() .insert_tree( - "/test", + path!("/test"), json!({ "1.txt": "// One", }), ) .await; - let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/test").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); open_queried_buffer("1", 1, "1.txt", &workspace, cx).await; diff --git a/crates/fuzzy/src/matcher.rs b/crates/fuzzy/src/matcher.rs index 1b039c16f507bab1a8865ad7e917c2e7d70b0b0b..66a480d87a815181f68d76318e18e711faed4f60 100644 --- a/crates/fuzzy/src/matcher.rs +++ b/crates/fuzzy/src/matcher.rs @@ -9,6 +9,8 @@ const BASE_DISTANCE_PENALTY: f64 = 0.6; const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05; const MIN_DISTANCE_PENALTY: f64 = 0.2; +// TODO: +// Use `Path` instead of `&str` for paths. pub struct Matcher<'a> { query: &'a [char], lowercase_query: &'a [char], @@ -173,6 +175,8 @@ impl<'a> Matcher<'a> { path_idx: usize, cur_score: f64, ) -> f64 { + use std::path::MAIN_SEPARATOR; + if query_idx == self.query.len() { return 1.0; } @@ -196,13 +200,19 @@ impl<'a> Matcher<'a> { } else { path_cased[j - prefix.len()] }; - let is_path_sep = path_char == '/' || path_char == '\\'; + let is_path_sep = path_char == MAIN_SEPARATOR; if query_idx == 0 && is_path_sep { last_slash = j; } - if query_char == path_char || (is_path_sep && query_char == '_' || query_char == '\\') { + #[cfg(not(target_os = "windows"))] + let need_to_score = + query_char == path_char || (is_path_sep && query_char == '_' || query_char == '\\'); + // `query_char == '\\'` breaks `test_match_path_entries` on Windows, `\` is only used as a path separator on Windows. + #[cfg(target_os = "windows")] + let need_to_score = query_char == path_char || (is_path_sep && query_char == '_'); + if need_to_score { let curr = if j < prefix.len() { prefix[j] } else { @@ -217,7 +227,7 @@ impl<'a> Matcher<'a> { path[j - 1 - prefix.len()] }; - if last == '/' { + if last == MAIN_SEPARATOR { char_score = 0.9; } else if (last == '-' || last == '_' || last == ' ' || last.is_numeric()) || (last.is_lowercase() && curr.is_uppercase()) @@ -238,7 +248,7 @@ impl<'a> Matcher<'a> { // Apply a severe penalty if the case doesn't match. // This will make the exact matches have higher score than the case-insensitive and the // path insensitive matches. - if (self.smart_case || curr == '/') && self.query[query_idx] != curr { + if (self.smart_case || curr == MAIN_SEPARATOR) && self.query[query_idx] != curr { char_score *= 0.001; } @@ -322,6 +332,7 @@ mod tests { assert_eq!(matcher.last_positions, vec![0, 3, 4, 8]); } + #[cfg(not(target_os = "windows"))] #[test] fn test_match_path_entries() { let paths = vec![ @@ -363,6 +374,54 @@ mod tests { ); } + /// todo(windows) + /// Now, on Windows, users can only use the backslash as a path separator. + /// I do want to support both the backslash and the forward slash as path separators on Windows. + #[cfg(target_os = "windows")] + #[test] + fn test_match_path_entries() { + let paths = vec![ + "", + "a", + "ab", + "abC", + "abcd", + "alphabravocharlie", + "AlphaBravoCharlie", + "thisisatestdir", + "\\\\\\\\\\ThisIsATestDir", + "\\this\\is\\a\\test\\dir", + "\\test\\tiatd", + ]; + + assert_eq!( + match_single_path_query("abc", false, &paths), + vec![ + ("abC", vec![0, 1, 2]), + ("abcd", vec![0, 1, 2]), + ("AlphaBravoCharlie", vec![0, 5, 10]), + ("alphabravocharlie", vec![4, 5, 10]), + ] + ); + assert_eq!( + match_single_path_query("t\\i\\a\\t\\d", false, &paths), + vec![( + "\\this\\is\\a\\test\\dir", + vec![1, 5, 6, 8, 9, 10, 11, 15, 16] + ),] + ); + + assert_eq!( + match_single_path_query("tiatd", false, &paths), + vec![ + ("\\test\\tiatd", vec![6, 7, 8, 9, 10]), + ("\\this\\is\\a\\test\\dir", vec![1, 6, 9, 11, 16]), + ("\\\\\\\\\\ThisIsATestDir", vec![5, 9, 11, 12, 16]), + ("thisisatestdir", vec![0, 2, 6, 7, 11]), + ] + ); + } + #[test] fn test_lowercase_longer_than_uppercase() { // This character has more chars in lower-case than in upper-case. diff --git a/crates/git/src/blame.rs b/crates/git/src/blame.rs index 0a7a4b921145f28461734812a96ea6f6ad3c115d..e4947e5bbd6dae0135dc3dfc730cff77606da49d 100644 --- a/crates/git/src/blame.rs +++ b/crates/git/src/blame.rs @@ -353,7 +353,7 @@ mod tests { let want_json = std::fs::read_to_string(&path).unwrap_or_else(|_| { panic!("could not read golden test data file at {:?}. Did you run the test with UPDATE_GOLDEN=true before?", path); - }); + }).replace("\r\n", "\n"); pretty_assertions::assert_eq!(have_json, want_json, "wrong blame entries"); } diff --git a/crates/gpui/src/platform/windows/direct_write.rs b/crates/gpui/src/platform/windows/direct_write.rs index fb53a833d64e66386e5cb10f9955925cd327003a..eef52b2014d7606f6e2e4d8c33194a0aab4a5a25 100644 --- a/crates/gpui/src/platform/windows/direct_write.rs +++ b/crates/gpui/src/platform/windows/direct_write.rs @@ -428,17 +428,24 @@ impl DirectWriteState { target_font.fallbacks.as_ref(), ) .unwrap_or_else(|| { - let family = self.system_ui_font_name.clone(); - log::error!("{} not found, use {} instead.", target_font.family, family); - self.get_font_id_from_font_collection( - family.as_ref(), - target_font.weight, - target_font.style, - &target_font.features, - target_font.fallbacks.as_ref(), - true, - ) - .unwrap() + #[cfg(any(test, feature = "test-support"))] + { + panic!("ERROR: {} font not found!", target_font.family); + } + #[cfg(not(any(test, feature = "test-support")))] + { + let family = self.system_ui_font_name.clone(); + log::error!("{} not found, use {} instead.", target_font.family, family); + self.get_font_id_from_font_collection( + family.as_ref(), + target_font.weight, + target_font.style, + &target_font.features, + target_font.fallbacks.as_ref(), + true, + ) + .unwrap() + } }) } } diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index ef4fd4a778a0465801cd81dbe801fa292601a1ea..5423dfcbc775e037722441bfe9e4f0062ced77cb 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -756,21 +756,20 @@ fn should_auto_hide_scrollbars() -> Result { #[cfg(test)] mod tests { - use crate::{ClipboardItem, Platform, WindowsPlatform}; + use crate::{read_from_clipboard, write_to_clipboard, ClipboardItem}; #[test] fn test_clipboard() { - let platform = WindowsPlatform::new(); - let item = ClipboardItem::new_string("你好".to_string()); - platform.write_to_clipboard(item.clone()); - assert_eq!(platform.read_from_clipboard(), Some(item)); + let item = ClipboardItem::new_string("你好,我是张小白".to_string()); + write_to_clipboard(item.clone()); + assert_eq!(read_from_clipboard(), Some(item)); let item = ClipboardItem::new_string("12345".to_string()); - platform.write_to_clipboard(item.clone()); - assert_eq!(platform.read_from_clipboard(), Some(item)); + write_to_clipboard(item.clone()); + assert_eq!(read_from_clipboard(), Some(item)); let item = ClipboardItem::new_string_with_json_metadata("abcdef".to_string(), vec![3, 4]); - platform.write_to_clipboard(item.clone()); - assert_eq!(platform.read_from_clipboard(), Some(item)); + write_to_clipboard(item.clone()); + assert_eq!(read_from_clipboard(), Some(item)); } } diff --git a/crates/gpui_macros/Cargo.toml b/crates/gpui_macros/Cargo.toml index c8236245e69332565d07f765daa981f0681a1c1c..997b167f891dfdfb3c1f3988b687aeddd827ff9d 100644 --- a/crates/gpui_macros/Cargo.toml +++ b/crates/gpui_macros/Cargo.toml @@ -14,9 +14,9 @@ proc-macro = true doctest = true [dependencies] -proc-macro2 = "1.0.66" -quote = "1.0.9" -syn = { version = "1.0.72", features = ["full", "extra-traits"] } +proc-macro2.workspace = true +quote.workspace = true +syn.workspace = true [dev-dependencies] gpui.workspace = true diff --git a/crates/language_tools/src/lsp_log_tests.rs b/crates/language_tools/src/lsp_log_tests.rs index 204625a05f40ac0f0c0269b277593b2c5ce3d1f9..5d318d0afadbbea49595ef5db760433ca4e46ec9 100644 --- a/crates/language_tools/src/lsp_log_tests.rs +++ b/crates/language_tools/src/lsp_log_tests.rs @@ -11,6 +11,7 @@ use lsp_log::LogKind; use project::{FakeFs, Project}; use serde_json::json; use settings::SettingsStore; +use util::path; #[gpui::test] async fn test_lsp_logs(cx: &mut TestAppContext) { @@ -22,7 +23,7 @@ async fn test_lsp_logs(cx: &mut TestAppContext) { let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( - "/the-root", + path!("/the-root"), json!({ "test.rs": "", "package.json": "", @@ -30,7 +31,7 @@ async fn test_lsp_logs(cx: &mut TestAppContext) { ) .await; - let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(Arc::new(Language::new( @@ -57,7 +58,7 @@ async fn test_lsp_logs(cx: &mut TestAppContext) { let _rust_buffer = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/the-root/test.rs", cx) + project.open_local_buffer_with_lsp(path!("/the-root/test.rs"), cx) }) .await .unwrap(); diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index ba68cf2be19e051c38f1bd5fea7c921053d0f55d..61167620fca03616078da53b265b2cefe05ce20f 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -818,11 +818,12 @@ mod tests { use lsp::CompletionItemLabelDetails; use settings::SettingsStore; use theme::SyntaxTheme; + use util::path; #[gpui::test] async fn test_process_rust_diagnostics() { let mut params = lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path("/a").unwrap(), + uri: lsp::Url::from_file_path(path!("/a")).unwrap(), version: None, diagnostics: vec![ // no newlines diff --git a/crates/prettier/src/prettier.rs b/crates/prettier/src/prettier.rs index 4411e43f1b4458759271b769a624d8b1a9642c13..a9254ac157d56b2d5508b4313f2cea41b18da8bd 100644 --- a/crates/prettier/src/prettier.rs +++ b/crates/prettier/src/prettier.rs @@ -946,7 +946,7 @@ mod tests { .await { Ok(path) => panic!("Expected to fail for prettier in package.json but not in node_modules found, but got path {path:?}"), Err(e) => { - let message = e.to_string(); + let message = e.to_string().replace("\\\\", "/"); assert!(message.contains("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader"), "Error message should mention which project had prettier defined"); assert!(message.contains("/root/work/full-stack-foundations"), "Error message should mention potential candidates without prettier node_modules contents"); }, diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 97dbc3bd2479c3d1fae76a849886912ac02a99ba..d2f36ae960ce70caf6f3a78b4b17c46081a95512 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -25,10 +25,7 @@ use std::{mem, num::NonZeroU32, ops::Range, task::Poll}; use task::{ResolvedTask, TaskContext}; use unindent::Unindent as _; use util::{ - assert_set_eq, - paths::{replace_path_separator, PathMatcher}, - test::TempTree, - TryFutureExt as _, + assert_set_eq, path, paths::PathMatcher, separator, test::TempTree, uri, TryFutureExt as _, }; #[gpui::test] @@ -37,7 +34,10 @@ async fn test_block_via_channel(cx: &mut gpui::TestAppContext) { let (tx, mut rx) = futures::channel::mpsc::unbounded(); let _thread = std::thread::spawn(move || { + #[cfg(not(target_os = "windows"))] std::fs::metadata("/tmp").unwrap(); + #[cfg(target_os = "windows")] + std::fs::metadata("C:/Windows").unwrap(); std::thread::sleep(Duration::from_millis(1000)); tx.unbounded_send(1).unwrap(); }); @@ -199,7 +199,7 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/the-root", + path!("/dir"), json!({ ".zed": { "settings.json": r#"{ "tab_size": 8 }"#, @@ -227,7 +227,7 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) ) .await; - let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap()); let task_context = TaskContext::default(); @@ -280,8 +280,12 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) ( TaskSourceKind::Worktree { id: worktree_id, - directory_in_worktree: PathBuf::from("b/.zed"), - id_base: "local worktree tasks from directory \"b/.zed\"".into(), + directory_in_worktree: PathBuf::from(separator!("b/.zed")), + id_base: if cfg!(windows) { + "local worktree tasks from directory \"b\\\\.zed\"".into() + } else { + "local worktree tasks from directory \"b/.zed\"".into() + }, }, "cargo check".to_string(), vec!["check".to_string()], @@ -359,8 +363,12 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) ( TaskSourceKind::Worktree { id: worktree_id, - directory_in_worktree: PathBuf::from("b/.zed"), - id_base: "local worktree tasks from directory \"b/.zed\"".into(), + directory_in_worktree: PathBuf::from(separator!("b/.zed")), + id_base: if cfg!(windows) { + "local worktree tasks from directory \"b\\\\.zed\"".into() + } else { + "local worktree tasks from directory \"b/.zed\"".into() + }, }, "cargo check".to_string(), vec!["check".to_string()], @@ -392,7 +400,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/the-root", + path!("/dir"), json!({ "test.rs": "const A: i32 = 1;", "test2.rs": "", @@ -402,7 +410,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { ) .await; - let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); let mut fake_rust_servers = language_registry.register_fake_lsp( @@ -449,7 +457,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { // Open a buffer without an associated language server. let (toml_buffer, _handle) = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/the-root/Cargo.toml", cx) + project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx) }) .await .unwrap(); @@ -457,7 +465,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { // Open a buffer with an associated language server before the language for it has been loaded. let (rust_buffer, _handle2) = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/the-root/test.rs", cx) + project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx) }) .await .unwrap(); @@ -482,7 +490,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::TextDocumentItem { - uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(), + uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(), version: 0, text: "const A: i32 = 1;".to_string(), language_id: "rust".to_string(), @@ -512,7 +520,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::VersionedTextDocumentIdentifier::new( - lsp::Url::from_file_path("/the-root/test.rs").unwrap(), + lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(), 1 ) ); @@ -520,7 +528,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { // Open a third buffer with a different associated language server. let (json_buffer, _json_handle) = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/the-root/package.json", cx) + project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx) }) .await .unwrap(); @@ -533,7 +541,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::TextDocumentItem { - uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(), + uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(), version: 0, text: "{\"a\": 1}".to_string(), language_id: "json".to_string(), @@ -557,7 +565,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { // it is also configured based on the existing language server's capabilities. let (rust_buffer2, _handle4) = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/the-root/test2.rs", cx) + project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx) }) .await .unwrap(); @@ -583,7 +591,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::VersionedTextDocumentIdentifier::new( - lsp::Url::from_file_path("/the-root/test2.rs").unwrap(), + lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(), 1 ) ); @@ -598,20 +606,24 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .receive_notification::() .await .text_document, - lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()) + lsp::TextDocumentIdentifier::new( + lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap() + ) ); assert_eq!( fake_json_server .receive_notification::() .await .text_document, - lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()) + lsp::TextDocumentIdentifier::new( + lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap() + ) ); // Renames are reported only to servers matching the buffer's language. fs.rename( - Path::new("/the-root/test2.rs"), - Path::new("/the-root/test3.rs"), + Path::new(path!("/dir/test2.rs")), + Path::new(path!("/dir/test3.rs")), Default::default(), ) .await @@ -621,7 +633,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .receive_notification::() .await .text_document, - lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()), + lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()), ); assert_eq!( fake_rust_server @@ -629,7 +641,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::TextDocumentItem { - uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(), + uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(), version: 0, text: rust_buffer2.update(cx, |buffer, _| buffer.text()), language_id: "rust".to_string(), @@ -660,8 +672,8 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { // When the rename changes the extension of the file, the buffer gets closed on the old // language server and gets opened on the new one. fs.rename( - Path::new("/the-root/test3.rs"), - Path::new("/the-root/test3.json"), + Path::new(path!("/dir/test3.rs")), + Path::new(path!("/dir/test3.json")), Default::default(), ) .await @@ -671,7 +683,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .receive_notification::() .await .text_document, - lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),), + lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),), ); assert_eq!( fake_json_server @@ -679,7 +691,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::TextDocumentItem { - uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(), + uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(), version: 0, text: rust_buffer2.update(cx, |buffer, _| buffer.text()), language_id: "json".to_string(), @@ -705,7 +717,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::VersionedTextDocumentIdentifier::new( - lsp::Url::from_file_path("/the-root/test3.json").unwrap(), + lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(), 1 ) ); @@ -734,7 +746,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::TextDocumentItem { - uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(), + uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(), version: 0, text: rust_buffer.update(cx, |buffer, _| buffer.text()), language_id: "rust".to_string(), @@ -755,13 +767,13 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { ], [ lsp::TextDocumentItem { - uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(), + uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(), version: 0, text: json_buffer.update(cx, |buffer, _| buffer.text()), language_id: "json".to_string(), }, lsp::TextDocumentItem { - uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(), + uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(), version: 0, text: rust_buffer2.update(cx, |buffer, _| buffer.text()), language_id: "json".to_string(), @@ -773,7 +785,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { cx.update(|_| drop(_json_handle)); let close_message = lsp::DidCloseTextDocumentParams { text_document: lsp::TextDocumentIdentifier::new( - lsp::Url::from_file_path("/the-root/package.json").unwrap(), + lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(), ), }; assert_eq!( @@ -786,19 +798,11 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { #[gpui::test] async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) { - fn add_root_for_windows(path: &str) -> String { - if cfg!(windows) { - format!("C:{}", path) - } else { - path.to_string() - } - } - init_test(cx); let fs = FakeFs::new(cx.executor()); fs.insert_tree( - add_root_for_windows("/the-root"), + path!("/the-root"), json!({ ".gitignore": "target\n", "src": { @@ -826,7 +830,7 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon ) .await; - let project = Project::test(fs.clone(), [add_root_for_windows("/the-root").as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(rust_lang()); let mut fake_servers = language_registry.register_fake_lsp( @@ -842,7 +846,7 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon // Start the language server by opening a buffer with a compatible file extension. let _ = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp(add_root_for_windows("/the-root/src/a.rs"), cx) + project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx) }) .await .unwrap(); @@ -882,21 +886,21 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon lsp::DidChangeWatchedFilesRegistrationOptions { watchers: vec![ lsp::FileSystemWatcher { - glob_pattern: lsp::GlobPattern::String(add_root_for_windows( - "/the-root/Cargo.toml", - )), + glob_pattern: lsp::GlobPattern::String( + path!("/the-root/Cargo.toml").to_string(), + ), kind: None, }, lsp::FileSystemWatcher { - glob_pattern: lsp::GlobPattern::String(add_root_for_windows( - "/the-root/src/*.{rs,c}", - )), + glob_pattern: lsp::GlobPattern::String( + path!("/the-root/src/*.{rs,c}").to_string(), + ), kind: None, }, lsp::FileSystemWatcher { - glob_pattern: lsp::GlobPattern::String(add_root_for_windows( - "/the-root/target/y/**/*.rs", - )), + glob_pattern: lsp::GlobPattern::String( + path!("/the-root/target/y/**/*.rs").to_string(), + ), kind: None, }, ], @@ -949,32 +953,23 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon // Perform some file system mutations, two of which match the watched patterns, // and one of which does not. + fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default()) + .await + .unwrap(); + fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default()) + .await + .unwrap(); + fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default()) + .await + .unwrap(); fs.create_file( - add_root_for_windows("/the-root/src/c.rs").as_ref(), - Default::default(), - ) - .await - .unwrap(); - fs.create_file( - add_root_for_windows("/the-root/src/d.txt").as_ref(), - Default::default(), - ) - .await - .unwrap(); - fs.remove_file( - add_root_for_windows("/the-root/src/b.rs").as_ref(), - Default::default(), - ) - .await - .unwrap(); - fs.create_file( - add_root_for_windows("/the-root/target/x/out/x2.rs").as_ref(), + path!("/the-root/target/x/out/x2.rs").as_ref(), Default::default(), ) .await .unwrap(); fs.create_file( - add_root_for_windows("/the-root/target/y/out/y2.rs").as_ref(), + path!("/the-root/target/y/out/y2.rs").as_ref(), Default::default(), ) .await @@ -986,16 +981,15 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon &*file_changes.lock(), &[ lsp::FileEvent { - uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/src/b.rs")).unwrap(), + uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(), typ: lsp::FileChangeType::DELETED, }, lsp::FileEvent { - uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/src/c.rs")).unwrap(), + uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(), typ: lsp::FileChangeType::CREATED, }, lsp::FileEvent { - uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/target/y/out/y2.rs")) - .unwrap(), + uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(), typ: lsp::FileChangeType::CREATED, }, ] @@ -1008,7 +1002,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "a.rs": "let a = 1;", "b.rs": "let b = 2;" @@ -1016,15 +1010,24 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) { ) .await; - let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await; + let project = Project::test( + fs, + [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()], + cx, + ) + .await; let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); let buffer_a = project - .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx)) + .update(cx, |project, cx| { + project.open_local_buffer(path!("/dir/a.rs"), cx) + }) .await .unwrap(); let buffer_b = project - .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx)) + .update(cx, |project, cx| { + project.open_local_buffer(path!("/dir/b.rs"), cx) + }) .await .unwrap(); @@ -1033,7 +1036,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) { .update_diagnostics( LanguageServerId(0), lsp::PublishDiagnosticsParams { - uri: Url::from_file_path("/dir/a.rs").unwrap(), + uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)), @@ -1050,7 +1053,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) { .update_diagnostics( LanguageServerId(0), lsp::PublishDiagnosticsParams { - uri: Url::from_file_path("/dir/b.rs").unwrap(), + uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)), @@ -1101,7 +1104,7 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/root", + path!("/root"), json!({ "dir": { ".git": { @@ -1116,11 +1119,11 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) { ) .await; - let project = Project::test(fs, ["/root/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await; let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); let (worktree, _) = project .update(cx, |project, cx| { - project.find_or_create_worktree("/root/dir", true, cx) + project.find_or_create_worktree(path!("/root/dir"), true, cx) }) .await .unwrap(); @@ -1128,7 +1131,7 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) { let (worktree, _) = project .update(cx, |project, cx| { - project.find_or_create_worktree("/root/other.rs", false, cx) + project.find_or_create_worktree(path!("/root/other.rs"), false, cx) }) .await .unwrap(); @@ -1140,7 +1143,7 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) { .update_diagnostics( server_id, lsp::PublishDiagnosticsParams { - uri: Url::from_file_path("/root/dir/b.rs").unwrap(), + uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)), @@ -1157,7 +1160,7 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) { .update_diagnostics( server_id, lsp::PublishDiagnosticsParams { - uri: Url::from_file_path("/root/other.rs").unwrap(), + uri: Url::from_file_path(path!("/root/other.rs")).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)), @@ -1244,7 +1247,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "a.rs": "fn a() { A }", "b.rs": "const y: i32 = 1", @@ -1252,7 +1255,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { ) .await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(rust_lang()); @@ -1270,7 +1273,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { // Cause worktree to start the fake language server let _ = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/dir/b.rs", cx) + project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx) }) .await .unwrap(); @@ -1299,7 +1302,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { ); fake_server.notify::(&lsp::PublishDiagnosticsParams { - uri: Url::from_file_path("/dir/a.rs").unwrap(), + uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)), @@ -1325,7 +1328,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { ); let buffer = project - .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx)) + .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx)) .await .unwrap(); @@ -1351,7 +1354,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { // Ensure publishing empty diagnostics twice only results in one update event. fake_server.notify::(&lsp::PublishDiagnosticsParams { - uri: Url::from_file_path("/dir/a.rs").unwrap(), + uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(), version: None, diagnostics: Default::default(), }); @@ -1364,7 +1367,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { ); fake_server.notify::(&lsp::PublishDiagnosticsParams { - uri: Url::from_file_path("/dir/a.rs").unwrap(), + uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(), version: None, diagnostics: Default::default(), }); @@ -1379,9 +1382,9 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC let progress_token = "the-progress-token"; let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/dir", json!({ "a.rs": "" })).await; + fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(rust_lang()); @@ -1399,7 +1402,7 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC let (buffer, _handle) = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/dir/a.rs", cx) + project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx) }) .await .unwrap(); @@ -1465,9 +1468,9 @@ async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAp init_test(cx); let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/dir", json!({ "a.rs": "x" })).await; + fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(rust_lang()); @@ -1475,7 +1478,7 @@ async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAp let (buffer, _) = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/dir/a.rs", cx) + project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx) }) .await .unwrap(); @@ -1483,7 +1486,7 @@ async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAp // Publish diagnostics let fake_server = fake_servers.next().await.unwrap(); fake_server.notify::(&lsp::PublishDiagnosticsParams { - uri: Url::from_file_path("/dir/a.rs").unwrap(), + uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)), @@ -1546,9 +1549,9 @@ async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::T init_test(cx); let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/dir", json!({ "a.rs": "" })).await; + fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(rust_lang()); @@ -1556,7 +1559,7 @@ async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::T let (buffer, _handle) = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/dir/a.rs", cx) + project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx) }) .await .unwrap(); @@ -1564,7 +1567,7 @@ async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::T // Before restarting the server, report diagnostics with an unknown buffer version. let fake_server = fake_servers.next().await.unwrap(); fake_server.notify::(&lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(), + uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(), version: Some(10000), diagnostics: Vec::new(), }); @@ -1588,9 +1591,9 @@ async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) { let progress_token = "the-progress-token"; let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/dir", json!({ "a.rs": "" })).await; + fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(rust_lang()); @@ -1606,7 +1609,7 @@ async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) { let (buffer, _handle) = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/dir/a.rs", cx) + project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx) }) .await .unwrap(); @@ -1651,10 +1654,10 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) { init_test(cx); let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" })) + fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" })) .await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); let mut fake_rust_servers = language_registry.register_fake_lsp( @@ -1676,13 +1679,13 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) { let _rs_buffer = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/dir/a.rs", cx) + project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx) }) .await .unwrap(); let _js_buffer = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/dir/b.js", cx) + project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx) }) .await .unwrap(); @@ -1695,7 +1698,7 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) { .text_document .uri .as_str(), - "file:///dir/a.rs" + uri!("file:///dir/a.rs") ); let mut fake_js_server = fake_js_servers.next().await.unwrap(); @@ -1706,7 +1709,7 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) { .text_document .uri .as_str(), - "file:///dir/b.js" + uri!("file:///dir/b.js") ); // Disable Rust language server, ensuring only that server gets stopped. @@ -1757,7 +1760,7 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) { .text_document .uri .as_str(), - "file:///dir/a.rs" + uri!("file:///dir/a.rs") ); fake_js_server .receive_notification::() @@ -1776,9 +1779,9 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) { .unindent(); let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/dir", json!({ "a.rs": text })).await; + fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); let language_registry = project.read_with(cx, |project, _| project.languages().clone()); @@ -1792,7 +1795,9 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) { ); let buffer = project - .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx)) + .update(cx, |project, cx| { + project.open_local_buffer(path!("/dir/a.rs"), cx) + }) .await .unwrap(); @@ -1814,7 +1819,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) { // Report some diagnostics for the initial version of the buffer fake_server.notify::(&lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(), + uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(), version: Some(open_notification.text_document.version), diagnostics: vec![ lsp::Diagnostic { @@ -1900,7 +1905,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) { // Ensure overlapping diagnostics are highlighted correctly. fake_server.notify::(&lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(), + uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(), version: Some(open_notification.text_document.version), diagnostics: vec![ lsp::Diagnostic { @@ -1992,7 +1997,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) { // Handle out-of-order diagnostics fake_server.notify::(&lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(), + uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(), version: Some(change_notification_2.text_document.version), diagnostics: vec![ lsp::Diagnostic { @@ -2198,14 +2203,14 @@ async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "a.rs": text.clone(), }), ) .await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); let language_registry = project.read_with(cx, |project, _| project.languages().clone()); @@ -2214,7 +2219,7 @@ async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) { let (buffer, _handle) = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/dir/a.rs", cx) + project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx) }) .await .unwrap(); @@ -2351,17 +2356,19 @@ async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAp let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "a.rs": text.clone(), }), ) .await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); let buffer = project - .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx)) + .update(cx, |project, cx| { + project.open_local_buffer(path!("/dir/a.rs"), cx) + }) .await .unwrap(); @@ -2460,17 +2467,19 @@ async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "a.rs": text.clone(), }), ) .await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); let buffer = project - .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx)) + .update(cx, |project, cx| { + project.open_local_buffer(path!("/dir/a.rs"), cx) + }) .await .unwrap(); @@ -2571,7 +2580,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "a.rs": "const fn a() { A }", "b.rs": "const y: i32 = crate::a()", @@ -2579,7 +2588,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) { ) .await; - let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(rust_lang()); @@ -2587,7 +2596,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) { let (buffer, _handle) = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/dir/b.rs", cx) + project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx) }) .await .unwrap(); @@ -2597,13 +2606,13 @@ async fn test_definition(cx: &mut gpui::TestAppContext) { let params = params.text_document_position_params; assert_eq!( params.text_document.uri.to_file_path().unwrap(), - Path::new("/dir/b.rs"), + Path::new(path!("/dir/b.rs")), ); assert_eq!(params.position, lsp::Position::new(0, 22)); Ok(Some(lsp::GotoDefinitionResponse::Scalar( lsp::Location::new( - lsp::Url::from_file_path("/dir/a.rs").unwrap(), + lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(), lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)), ), ))) @@ -2629,18 +2638,24 @@ async fn test_definition(cx: &mut gpui::TestAppContext) { .as_local() .unwrap() .abs_path(cx), - Path::new("/dir/a.rs"), + Path::new(path!("/dir/a.rs")), ); assert_eq!(definition.target.range.to_offset(target_buffer), 9..10); assert_eq!( list_worktrees(&project, cx), - [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)], + [ + (path!("/dir/a.rs").as_ref(), false), + (path!("/dir/b.rs").as_ref(), true) + ], ); drop(definition); }); cx.update(|cx| { - assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]); + assert_eq!( + list_worktrees(&project, cx), + [(path!("/dir/b.rs").as_ref(), true)] + ); }); fn list_worktrees<'a>(project: &'a Entity, cx: &'a App) -> Vec<(&'a Path, bool)> { @@ -2664,14 +2679,14 @@ async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "a.ts": "", }), ) .await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(typescript_lang()); @@ -2690,7 +2705,9 @@ async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) { ); let (buffer, _handle) = project - .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx)) + .update(cx, |p, cx| { + p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx) + }) .await .unwrap(); @@ -2756,14 +2773,14 @@ async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "a.ts": "", }), ) .await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(typescript_lang()); @@ -2782,7 +2799,9 @@ async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) { ); let (buffer, _handle) = project - .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx)) + .update(cx, |p, cx| { + p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx) + }) .await .unwrap(); @@ -2817,14 +2836,14 @@ async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "a.ts": "a", }), ) .await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(typescript_lang()); @@ -2845,7 +2864,9 @@ async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) { ); let (buffer, _handle) = project - .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx)) + .update(cx, |p, cx| { + p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx) + }) .await .unwrap(); @@ -2910,7 +2931,7 @@ async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) { edit: lsp::WorkspaceEdit { changes: Some( [( - lsp::Url::from_file_path("/dir/a.ts").unwrap(), + lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(), vec![lsp::TextEdit { range: lsp::Range::new( lsp::Position::new(0, 0), @@ -2952,16 +2973,16 @@ async fn test_save_file(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "file1": "the old contents", }), ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let buffer = project - .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx)) + .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx)) .await .unwrap(); buffer.update(cx, |buffer, cx| { @@ -2974,7 +2995,11 @@ async fn test_save_file(cx: &mut gpui::TestAppContext) { .await .unwrap(); - let new_text = fs.load(Path::new("/dir/file1")).await.unwrap(); + let new_text = fs + .load(Path::new(path!("/dir/file1"))) + .await + .unwrap() + .replace("\r\n", "\n"); assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text())); } @@ -2984,17 +3009,17 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) let fs = FakeFs::new(cx.executor().clone()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "file1": "the original contents", }), ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap()); let buffer = project - .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx)) + .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx)) .await .unwrap(); @@ -3005,7 +3030,7 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) // Change the buffer's file on disk, and then wait for the file change // to be detected by the worktree, so that the buffer starts reloading. fs.save( - "/dir/file1".as_ref(), + path!("/dir/file1").as_ref(), &"the first contents".into(), Default::default(), ) @@ -3016,7 +3041,7 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) // Change the buffer's file again. Depending on the random seed, the // previous file change may still be in progress. fs.save( - "/dir/file1".as_ref(), + path!("/dir/file1").as_ref(), &"the second contents".into(), Default::default(), ) @@ -3025,7 +3050,7 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) worktree.next_event(cx).await; cx.executor().run_until_parked(); - let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap(); + let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap(); buffer.read_with(cx, |buffer, _| { assert_eq!(buffer.text(), on_disk_text); assert!(!buffer.is_dirty(), "buffer should not be dirty"); @@ -3039,17 +3064,17 @@ async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor().clone()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "file1": "the original contents", }), ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap()); let buffer = project - .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx)) + .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx)) .await .unwrap(); @@ -3060,7 +3085,7 @@ async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) { // Change the buffer's file on disk, and then wait for the file change // to be detected by the worktree, so that the buffer starts reloading. fs.save( - "/dir/file1".as_ref(), + path!("/dir/file1").as_ref(), &"the first contents".into(), Default::default(), ) @@ -3079,7 +3104,7 @@ async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) { }); cx.executor().run_until_parked(); - let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap(); + let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap(); buffer.read_with(cx, |buffer, _| { let buffer_text = buffer.text(); if buffer_text == on_disk_text { @@ -3103,16 +3128,16 @@ async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "file1": "the old contents", }), ) .await; - let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await; let buffer = project - .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx)) + .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx)) .await .unwrap(); buffer.update(cx, |buffer, cx| { @@ -3124,7 +3149,11 @@ async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) { .await .unwrap(); - let new_text = fs.load(Path::new("/dir/file1")).await.unwrap(); + let new_text = fs + .load(Path::new(path!("/dir/file1"))) + .await + .unwrap() + .replace("\r\n", "\n"); assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text())); } @@ -3259,26 +3288,21 @@ async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) { std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap(); tree.flush_fs_events(cx).await; - let expected_paths = vec![ - "a", - "a/file1", - "a/file2.new", - "b", - "d", - "d/file3", - "d/file4", - ] - .into_iter() - .map(replace_path_separator) - .collect::>(); - cx.update(|app| { assert_eq!( tree.read(app) .paths() .map(|p| p.to_str().unwrap()) .collect::>(), - expected_paths + vec![ + "a", + separator!("a/file1"), + separator!("a/file2.new"), + "b", + "d", + separator!("d/file3"), + separator!("d/file4"), + ] ); }); @@ -3338,7 +3362,15 @@ async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) { .paths() .map(|p| p.to_str().unwrap()) .collect::>(), - expected_paths + vec![ + "a", + separator!("a/file1"), + separator!("a/file2.new"), + "b", + "d", + separator!("d/file3"), + separator!("d/file4"), + ] ); }); } @@ -3447,7 +3479,7 @@ async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "file1": "abc", "file2": "def", @@ -3456,10 +3488,10 @@ async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) { ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let buffer1 = project - .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx)) + .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx)) .await .unwrap(); let events = Arc::new(Mutex::new(Vec::new())); @@ -3542,7 +3574,7 @@ async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) { // When a file is deleted, the buffer is considered dirty. let events = Arc::new(Mutex::new(Vec::new())); let buffer2 = project - .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx)) + .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx)) .await .unwrap(); buffer2.update(cx, |_, cx| { @@ -3553,7 +3585,7 @@ async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) { .detach(); }); - fs.remove_file("/dir/file2".as_ref(), Default::default()) + fs.remove_file(path!("/dir/file2").as_ref(), Default::default()) .await .unwrap(); cx.executor().run_until_parked(); @@ -3569,7 +3601,7 @@ async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) { // When a file is already dirty when deleted, we don't emit a Dirtied event. let events = Arc::new(Mutex::new(Vec::new())); let buffer3 = project - .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx)) + .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx)) .await .unwrap(); buffer3.update(cx, |_, cx| { @@ -3584,7 +3616,7 @@ async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) { buffer.edit([(0..0, "x")], None, cx); }); events.lock().clear(); - fs.remove_file("/dir/file3".as_ref(), Default::default()) + fs.remove_file(path!("/dir/file3").as_ref(), Default::default()) .await .unwrap(); cx.executor().run_until_parked(); @@ -3599,15 +3631,15 @@ async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) { let initial_contents = "aaa\nbbbbb\nc\n"; let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "the-file": initial_contents, }), ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let buffer = project - .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx)) + .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx)) .await .unwrap(); @@ -3623,7 +3655,7 @@ async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) { }); let new_contents = "AAAA\naaa\nBB\nbbbbb\n"; fs.save( - "/dir/the-file".as_ref(), + path!("/dir/the-file").as_ref(), &new_contents.into(), LineEnding::Unix, ) @@ -3658,7 +3690,7 @@ async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) { // Change the file on disk again, adding blank lines to the beginning. fs.save( - "/dir/the-file".as_ref(), + path!("/dir/the-file").as_ref(), &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(), LineEnding::Unix, ) @@ -3679,7 +3711,7 @@ async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "file1": "a\nb\nc\n", "file2": "one\r\ntwo\r\nthree\r\n", @@ -3687,13 +3719,13 @@ async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) { ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let buffer1 = project - .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx)) + .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx)) .await .unwrap(); let buffer2 = project - .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx)) + .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx)) .await .unwrap(); @@ -3709,7 +3741,7 @@ async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) { // Change a file's line endings on disk from unix to windows. The buffer's // state updates correctly. fs.save( - "/dir/file1".as_ref(), + path!("/dir/file1").as_ref(), &"aaa\nb\nc\n".into(), LineEnding::Windows, ) @@ -3730,7 +3762,7 @@ async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) { .await .unwrap(); assert_eq!( - fs.load("/dir/file2".as_ref()).await.unwrap(), + fs.load(path!("/dir/file2").as_ref()).await.unwrap(), "one\r\ntwo\r\nthree\r\nfour\r\n", ); } @@ -3741,7 +3773,7 @@ async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/the-dir", + path!("/dir"), json!({ "a.rs": " fn foo(mut v: Vec) { @@ -3755,14 +3787,14 @@ async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) { ) .await; - let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); let buffer = project - .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx)) + .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx)) .await .unwrap(); - let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap(); + let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap(); let message = lsp::PublishDiagnosticsParams { uri: buffer_uri.clone(), diagnostics: vec![ @@ -3984,7 +4016,7 @@ async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "one.rs": "const ONE: usize = 1;", "two": { @@ -3994,7 +4026,7 @@ async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) { }), ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(rust_lang()); @@ -4038,7 +4070,7 @@ async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) { let _ = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/dir/one.rs", cx) + project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx) }) .await .unwrap(); @@ -4067,7 +4099,7 @@ async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) { new_text: "This is not a drill".to_owned(), })], text_document: lsp::OptionalVersionedTextDocumentIdentifier { - uri: Url::from_str("file:///dir/two/two.rs").unwrap(), + uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(), version: Some(1337), }, }] @@ -4084,8 +4116,8 @@ async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) { let expected_edit = expected_edit.clone(); async move { assert_eq!(params.files.len(), 1); - assert_eq!(params.files[0].old_uri, "file:///dir/one.rs"); - assert_eq!(params.files[0].new_uri, "file:///dir/three.rs"); + assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs")); + assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs")); resolved_workspace_edit.set(expected_edit.clone()).unwrap(); Ok(Some(expected_edit)) } @@ -4098,8 +4130,8 @@ async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) { fake_server .handle_notification::(|params, _| { assert_eq!(params.files.len(), 1); - assert_eq!(params.files[0].old_uri, "file:///dir/one.rs"); - assert_eq!(params.files[0].new_uri, "file:///dir/three.rs"); + assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs")); + assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs")); }) .next() .await @@ -4114,7 +4146,7 @@ async fn test_rename(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "one.rs": "const ONE: usize = 1;", "two.rs": "const TWO: usize = one::ONE + one::ONE;" @@ -4122,7 +4154,7 @@ async fn test_rename(cx: &mut gpui::TestAppContext) { ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(rust_lang()); @@ -4142,7 +4174,7 @@ async fn test_rename(cx: &mut gpui::TestAppContext) { let (buffer, _handle) = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/dir/one.rs", cx) + project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx) }) .await .unwrap(); @@ -4154,7 +4186,10 @@ async fn test_rename(cx: &mut gpui::TestAppContext) { }); fake_server .handle_request::(|params, _| async move { - assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs"); + assert_eq!( + params.text_document.uri.as_str(), + uri!("file:///dir/one.rs") + ); assert_eq!(params.position, lsp::Position::new(0, 7)); Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new( lsp::Position::new(0, 6), @@ -4178,7 +4213,7 @@ async fn test_rename(cx: &mut gpui::TestAppContext) { .handle_request::(|params, _| async move { assert_eq!( params.text_document_position.text_document.uri.as_str(), - "file:///dir/one.rs" + uri!("file:///dir/one.rs") ); assert_eq!( params.text_document_position.position, @@ -4189,14 +4224,14 @@ async fn test_rename(cx: &mut gpui::TestAppContext) { changes: Some( [ ( - lsp::Url::from_file_path("/dir/one.rs").unwrap(), + lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(), vec![lsp::TextEdit::new( lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)), "THREE".to_string(), )], ), ( - lsp::Url::from_file_path("/dir/two.rs").unwrap(), + lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(), vec![ lsp::TextEdit::new( lsp::Range::new( @@ -4250,7 +4285,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "one.rs": "const ONE: usize = 1;", "two.rs": "const TWO: usize = one::ONE + one::ONE;", @@ -4259,7 +4294,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) { }), ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; assert_eq!( search( &project, @@ -4278,14 +4313,14 @@ async fn test_search(cx: &mut gpui::TestAppContext) { .await .unwrap(), HashMap::from_iter([ - ("dir/two.rs".to_string(), vec![6..9]), - ("dir/three.rs".to_string(), vec![37..40]) + (separator!("dir/two.rs").to_string(), vec![6..9]), + (separator!("dir/three.rs").to_string(), vec![37..40]) ]) ); let buffer_4 = project .update(cx, |project, cx| { - project.open_local_buffer("/dir/four.rs", cx) + project.open_local_buffer(path!("/dir/four.rs"), cx) }) .await .unwrap(); @@ -4312,9 +4347,9 @@ async fn test_search(cx: &mut gpui::TestAppContext) { .await .unwrap(), HashMap::from_iter([ - ("dir/two.rs".to_string(), vec![6..9]), - ("dir/three.rs".to_string(), vec![37..40]), - ("dir/four.rs".to_string(), vec![25..28, 36..39]) + (separator!("dir/two.rs").to_string(), vec![6..9]), + (separator!("dir/three.rs").to_string(), vec![37..40]), + (separator!("dir/four.rs").to_string(), vec![25..28, 36..39]) ]) ); } @@ -4327,7 +4362,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "one.rs": r#"// Rust file one"#, "one.ts": r#"// TypeScript file one"#, @@ -4336,7 +4371,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { }), ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; assert!( search( @@ -4377,8 +4412,8 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { .await .unwrap(), HashMap::from_iter([ - ("dir/one.rs".to_string(), vec![8..12]), - ("dir/two.rs".to_string(), vec![8..12]), + (separator!("dir/one.rs").to_string(), vec![8..12]), + (separator!("dir/two.rs").to_string(), vec![8..12]), ]), "Rust only search should give only Rust files" ); @@ -4402,8 +4437,8 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { .await .unwrap(), HashMap::from_iter([ - ("dir/one.ts".to_string(), vec![14..18]), - ("dir/two.ts".to_string(), vec![14..18]), + (separator!("dir/one.ts").to_string(), vec![14..18]), + (separator!("dir/two.ts").to_string(), vec![14..18]), ]), "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything" ); @@ -4427,10 +4462,10 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { .await .unwrap(), HashMap::from_iter([ - ("dir/two.ts".to_string(), vec![14..18]), - ("dir/one.rs".to_string(), vec![8..12]), - ("dir/one.ts".to_string(), vec![14..18]), - ("dir/two.rs".to_string(), vec![8..12]), + (separator!("dir/two.ts").to_string(), vec![14..18]), + (separator!("dir/one.rs").to_string(), vec![8..12]), + (separator!("dir/one.ts").to_string(), vec![14..18]), + (separator!("dir/two.rs").to_string(), vec![8..12]), ]), "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything" ); @@ -4444,7 +4479,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "one.rs": r#"// Rust file one"#, "one.ts": r#"// TypeScript file one"#, @@ -4453,7 +4488,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { }), ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; assert_eq!( search( @@ -4473,10 +4508,10 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { .await .unwrap(), HashMap::from_iter([ - ("dir/one.rs".to_string(), vec![8..12]), - ("dir/one.ts".to_string(), vec![14..18]), - ("dir/two.rs".to_string(), vec![8..12]), - ("dir/two.ts".to_string(), vec![14..18]), + (separator!("dir/one.rs").to_string(), vec![8..12]), + (separator!("dir/one.ts").to_string(), vec![14..18]), + (separator!("dir/two.rs").to_string(), vec![8..12]), + (separator!("dir/two.ts").to_string(), vec![14..18]), ]), "If no exclusions match, all files should be returned" ); @@ -4499,8 +4534,8 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { .await .unwrap(), HashMap::from_iter([ - ("dir/one.ts".to_string(), vec![14..18]), - ("dir/two.ts".to_string(), vec![14..18]), + (separator!("dir/one.ts").to_string(), vec![14..18]), + (separator!("dir/two.ts").to_string(), vec![14..18]), ]), "Rust exclusion search should give only TypeScript files" ); @@ -4522,8 +4557,8 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { .await .unwrap(), HashMap::from_iter([ - ("dir/one.rs".to_string(), vec![8..12]), - ("dir/two.rs".to_string(), vec![8..12]), + (separator!("dir/one.rs").to_string(), vec![8..12]), + (separator!("dir/two.rs").to_string(), vec![8..12]), ]), "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything" ); @@ -4558,7 +4593,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "one.rs": r#"// Rust file one"#, "one.ts": r#"// TypeScript file one"#, @@ -4567,7 +4602,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex }), ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; assert!( search( @@ -4649,8 +4684,8 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex .await .unwrap(), HashMap::from_iter([ - ("dir/one.ts".to_string(), vec![14..18]), - ("dir/two.ts".to_string(), vec![14..18]), + (separator!("dir/one.ts").to_string(), vec![14..18]), + (separator!("dir/two.ts").to_string(), vec![14..18]), ]), "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files" ); @@ -4662,7 +4697,7 @@ async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppCo let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/worktree-a", + path!("/worktree-a"), json!({ "haystack.rs": r#"// NEEDLE"#, "haystack.ts": r#"// NEEDLE"#, @@ -4670,7 +4705,7 @@ async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppCo ) .await; fs.insert_tree( - "/worktree-b", + path!("/worktree-b"), json!({ "haystack.rs": r#"// NEEDLE"#, "haystack.ts": r#"// NEEDLE"#, @@ -4680,7 +4715,7 @@ async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppCo let project = Project::test( fs.clone(), - ["/worktree-a".as_ref(), "/worktree-b".as_ref()], + [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()], cx, ) .await; @@ -4702,7 +4737,7 @@ async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppCo ) .await .unwrap(), - HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]), + HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]), "should only return results from included worktree" ); assert_eq!( @@ -4722,7 +4757,7 @@ async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppCo ) .await .unwrap(), - HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]), + HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]), "should only return results from included worktree" ); @@ -4744,8 +4779,8 @@ async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppCo .await .unwrap(), HashMap::from_iter([ - ("worktree-a/haystack.ts".to_string(), vec![3..9]), - ("worktree-b/haystack.ts".to_string(), vec![3..9]) + (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]), + (separator!("worktree-b/haystack.ts").to_string(), vec![3..9]) ]), "should return results from both worktrees" ); @@ -4757,7 +4792,7 @@ async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ ".git": {}, ".gitignore": "**/target\n/node_modules\n", @@ -4778,7 +4813,7 @@ async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) { }), ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let query = "key"; assert_eq!( @@ -4798,11 +4833,11 @@ async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) { ) .await .unwrap(), - HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]), + HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]), "Only one non-ignored file should have the query" ); - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; assert_eq!( search( &project, @@ -4821,19 +4856,22 @@ async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) { .await .unwrap(), HashMap::from_iter([ - ("dir/package.json".to_string(), vec![8..11]), - ("dir/target/index.txt".to_string(), vec![6..9]), + (separator!("dir/package.json").to_string(), vec![8..11]), + (separator!("dir/target/index.txt").to_string(), vec![6..9]), ( - "dir/node_modules/prettier/package.json".to_string(), + separator!("dir/node_modules/prettier/package.json").to_string(), vec![9..12] ), ( - "dir/node_modules/prettier/index.ts".to_string(), + separator!("dir/node_modules/prettier/index.ts").to_string(), vec![15..18] ), - ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]), ( - "dir/node_modules/eslint/package.json".to_string(), + separator!("dir/node_modules/eslint/index.ts").to_string(), + vec![13..16] + ), + ( + separator!("dir/node_modules/eslint/package.json").to_string(), vec![8..11] ), ]), @@ -4842,7 +4880,7 @@ async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) { let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap(); let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap(); - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; assert_eq!( search( &project, @@ -4861,7 +4899,7 @@ async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) { .await .unwrap(), HashMap::from_iter([( - "dir/node_modules/prettier/package.json".to_string(), + separator!("dir/node_modules/prettier/package.json").to_string(), vec![9..12] )]), "With search including ignored prettier directory and excluding TS files, only one file should be found" @@ -4944,14 +4982,14 @@ async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "a.tsx": "a", }), ) .await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(tsx_lang()); @@ -5009,7 +5047,9 @@ async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) { ]; let (buffer, _handle) = project - .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.tsx", cx)) + .update(cx, |p, cx| { + p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx) + }) .await .unwrap(); cx.executor().run_until_parked(); @@ -5095,14 +5135,14 @@ async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "a.ts": "a", }), ) .await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(typescript_lang()); @@ -5118,7 +5158,9 @@ async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) { ); let (buffer, _handle) = project - .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx)) + .update(cx, |p, cx| { + p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx) + }) .await .unwrap(); cx.executor().run_until_parked(); @@ -5165,14 +5207,14 @@ async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "a.ts": "a", }), ) .await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(typescript_lang()); @@ -5188,7 +5230,9 @@ async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) { ); let (buffer, _handle) = project - .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx)) + .update(cx, |p, cx| { + p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx) + }) .await .unwrap(); cx.executor().run_until_parked(); @@ -5243,14 +5287,14 @@ async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "a.tsx": "a", }), ) .await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(tsx_lang()); @@ -5309,7 +5353,9 @@ async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) { ]; let (buffer, _handle) = project - .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.tsx", cx)) + .update(cx, |p, cx| { + p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx) + }) .await .unwrap(); cx.executor().run_until_parked(); diff --git a/crates/project/src/task_inventory.rs b/crates/project/src/task_inventory.rs index 3e7c9dd644e4606d98ecd6f5d7db717469cbf9be..b727d1365b6ca8d1aeb94cc4332375d8d96678b1 100644 --- a/crates/project/src/task_inventory.rs +++ b/crates/project/src/task_inventory.rs @@ -18,7 +18,7 @@ use task::{ ResolvedTask, TaskContext, TaskId, TaskTemplate, TaskTemplates, TaskVariables, VariableName, }; use text::{Point, ToPoint}; -use util::{post_inc, NumericPrefixWithSuffix, ResultExt as _}; +use util::{paths::PathExt as _, post_inc, NumericPrefixWithSuffix, ResultExt as _}; use worktree::WorktreeId; use crate::worktree_store::WorktreeStore; @@ -470,7 +470,7 @@ impl ContextProvider for BasicContextProvider { let current_file = buffer .file() .and_then(|file| file.as_local()) - .map(|file| file.abs_path(cx).to_string_lossy().to_string()); + .map(|file| file.abs_path(cx).to_sanitized_string()); let Point { row, column } = location.range.start.to_point(&buffer_snapshot); let row = row + 1; let column = column + 1; @@ -502,14 +502,14 @@ impl ContextProvider for BasicContextProvider { if let Some(Some(worktree_path)) = worktree_root_dir { task_variables.insert( VariableName::WorktreeRoot, - worktree_path.to_string_lossy().to_string(), + worktree_path.to_sanitized_string(), ); if let Some(full_path) = current_file.as_ref() { let relative_path = pathdiff::diff_paths(full_path, worktree_path); if let Some(relative_path) = relative_path { task_variables.insert( VariableName::RelativeFile, - relative_path.to_string_lossy().into_owned(), + relative_path.to_sanitized_string(), ); } } diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 7baee66f698fa99d9a83865cf7b1e4589b75aa22..b2c0ab89236b018f88e67969208537658179df71 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -1106,8 +1106,13 @@ impl ProjectPanel { let worktree_id = edit_state.worktree_id; let is_new_entry = edit_state.is_new_entry(); let filename = self.filename_editor.read(cx).text(cx); - edit_state.is_dir = edit_state.is_dir - || (edit_state.is_new_entry() && filename.ends_with(std::path::MAIN_SEPARATOR)); + #[cfg(not(target_os = "windows"))] + let filename_indicates_dir = filename.ends_with("/"); + // On Windows, path separator could be either `/` or `\`. + #[cfg(target_os = "windows")] + let filename_indicates_dir = filename.ends_with("/") || filename.ends_with("\\"); + edit_state.is_dir = + edit_state.is_dir || (edit_state.is_new_entry() && filename_indicates_dir); let is_dir = edit_state.is_dir; let worktree = self.project.read(cx).worktree_for_id(worktree_id, cx)?; let entry = worktree.read(cx).entry_for_id(edit_state.entry_id)?.clone(); @@ -4793,6 +4798,7 @@ mod tests { use serde_json::json; use settings::SettingsStore; use std::path::{Path, PathBuf}; + use util::{path, separator}; use workspace::{ item::{Item, ProjectItem}, register_project_item, AppState, @@ -4894,7 +4900,7 @@ mod tests { let fs = FakeFs::new(cx.executor().clone()); fs.insert_tree( - "/src", + path!("/src"), json!({ "test": { "first.rs": "// First Rust file", @@ -4905,7 +4911,7 @@ mod tests { ) .await; - let project = Project::test(fs.clone(), ["/src".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/src").as_ref()], cx).await; let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let cx = &mut VisualTestContext::from_window(*workspace, cx); @@ -5066,7 +5072,7 @@ mod tests { let fs = FakeFs::new(cx.executor().clone()); fs.insert_tree( - "/root1", + path!("/root1"), json!({ "dir_1": { "nested_dir_1": { @@ -5088,7 +5094,7 @@ mod tests { ) .await; fs.insert_tree( - "/root2", + path!("/root2"), json!({ "dir_2": { "file_1.java": "// File contents", @@ -5097,7 +5103,12 @@ mod tests { ) .await; - let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await; + let project = Project::test( + fs.clone(), + [path!("/root1").as_ref(), path!("/root2").as_ref()], + cx, + ) + .await; let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let cx = &mut VisualTestContext::from_window(*workspace, cx); @@ -5115,10 +5126,10 @@ mod tests { assert_eq!( visible_entries_as_strings(&panel, 0..10, cx), &[ - "v root1", - " > dir_1/nested_dir_1/nested_dir_2/nested_dir_3", - "v root2", - " > dir_2", + separator!("v root1"), + separator!(" > dir_1/nested_dir_1/nested_dir_2/nested_dir_3"), + separator!("v root2"), + separator!(" > dir_2"), ] ); @@ -5130,14 +5141,14 @@ mod tests { assert_eq!( visible_entries_as_strings(&panel, 0..10, cx), &[ - "v root1", - " v dir_1/nested_dir_1/nested_dir_2/nested_dir_3 <== selected", - " > nested_dir_4/nested_dir_5", - " file_a.java", - " file_b.java", - " file_c.java", - "v root2", - " > dir_2", + separator!("v root1"), + separator!(" v dir_1/nested_dir_1/nested_dir_2/nested_dir_3 <== selected"), + separator!(" > nested_dir_4/nested_dir_5"), + separator!(" file_a.java"), + separator!(" file_b.java"), + separator!(" file_c.java"), + separator!("v root2"), + separator!(" > dir_2"), ] ); @@ -5149,31 +5160,31 @@ mod tests { assert_eq!( visible_entries_as_strings(&panel, 0..10, cx), &[ - "v root1", - " v dir_1/nested_dir_1/nested_dir_2/nested_dir_3", - " v nested_dir_4/nested_dir_5 <== selected", - " file_d.java", - " file_a.java", - " file_b.java", - " file_c.java", - "v root2", - " > dir_2", + separator!("v root1"), + separator!(" v dir_1/nested_dir_1/nested_dir_2/nested_dir_3"), + separator!(" v nested_dir_4/nested_dir_5 <== selected"), + separator!(" file_d.java"), + separator!(" file_a.java"), + separator!(" file_b.java"), + separator!(" file_c.java"), + separator!("v root2"), + separator!(" > dir_2"), ] ); toggle_expand_dir(&panel, "root2/dir_2", cx); assert_eq!( visible_entries_as_strings(&panel, 0..10, cx), &[ - "v root1", - " v dir_1/nested_dir_1/nested_dir_2/nested_dir_3", - " v nested_dir_4/nested_dir_5", - " file_d.java", - " file_a.java", - " file_b.java", - " file_c.java", - "v root2", - " v dir_2 <== selected", - " file_1.java", + separator!("v root1"), + separator!(" v dir_1/nested_dir_1/nested_dir_2/nested_dir_3"), + separator!(" v nested_dir_4/nested_dir_5"), + separator!(" file_d.java"), + separator!(" file_a.java"), + separator!(" file_b.java"), + separator!(" file_c.java"), + separator!("v root2"), + separator!(" v dir_2 <== selected"), + separator!(" file_1.java"), ] ); } @@ -5682,7 +5693,7 @@ mod tests { let fs = FakeFs::new(cx.executor().clone()); fs.insert_tree( - "/root1", + path!("/root1"), json!({ ".dockerignore": "", ".git": { @@ -5692,7 +5703,7 @@ mod tests { ) .await; - let project = Project::test(fs.clone(), ["/root1".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/root1").as_ref()], cx).await; let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let cx = &mut VisualTestContext::from_window(*workspace, cx); @@ -5727,9 +5738,10 @@ mod tests { ); let confirm = panel.update_in(cx, |panel, window, cx| { + // If we want to create a subdirectory, there should be no prefix slash. panel .filename_editor - .update(cx, |editor, cx| editor.set_text("/new_dir/", window, cx)); + .update(cx, |editor, cx| editor.set_text("new_dir/", window, cx)); panel.confirm_edit(window, cx).unwrap() }); @@ -5738,14 +5750,14 @@ mod tests { &[ "v root1", " > .git", - " [PROCESSING: '/new_dir/'] <== selected", + " [PROCESSING: 'new_dir/'] <== selected", " .dockerignore", ] ); confirm.await.unwrap(); assert_eq!( - visible_entries_as_strings(&panel, 0..13, cx), + visible_entries_as_strings(&panel, 0..10, cx), &[ "v root1", " > .git", @@ -5753,6 +5765,54 @@ mod tests { " .dockerignore", ] ); + + // Test filename with whitespace + select_path(&panel, "root1", cx); + panel.update_in(cx, |panel, window, cx| panel.new_file(&NewFile, window, cx)); + let confirm = panel.update_in(cx, |panel, window, cx| { + // If we want to create a subdirectory, there should be no prefix slash. + panel + .filename_editor + .update(cx, |editor, cx| editor.set_text("new dir 2/", window, cx)); + panel.confirm_edit(window, cx).unwrap() + }); + confirm.await.unwrap(); + assert_eq!( + visible_entries_as_strings(&panel, 0..10, cx), + &[ + "v root1", + " > .git", + " v new dir 2 <== selected", + " v new_dir", + " .dockerignore", + ] + ); + + // Test filename ends with "\" + #[cfg(target_os = "windows")] + { + select_path(&panel, "root1", cx); + panel.update_in(cx, |panel, window, cx| panel.new_file(&NewFile, window, cx)); + let confirm = panel.update_in(cx, |panel, window, cx| { + // If we want to create a subdirectory, there should be no prefix slash. + panel + .filename_editor + .update(cx, |editor, cx| editor.set_text("new_dir_3\\", window, cx)); + panel.confirm_edit(window, cx).unwrap() + }); + confirm.await.unwrap(); + assert_eq!( + visible_entries_as_strings(&panel, 0..10, cx), + &[ + "v root1", + " > .git", + " v new dir 2", + " v new_dir", + " v new_dir_3 <== selected", + " .dockerignore", + ] + ); + } } #[gpui::test] @@ -6409,7 +6469,7 @@ mod tests { let fs = FakeFs::new(cx.executor().clone()); fs.insert_tree( - "/src", + path!("/src"), json!({ "test": { "first.rs": "// First Rust file", @@ -6420,7 +6480,7 @@ mod tests { ) .await; - let project = Project::test(fs.clone(), ["/src".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/src").as_ref()], cx).await; let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let cx = &mut VisualTestContext::from_window(*workspace, cx); @@ -8545,7 +8605,7 @@ mod tests { let fs = FakeFs::new(cx.executor().clone()); fs.insert_tree( - "/root", + path!("/root"), json!({ ".gitignore": "**/ignored_dir\n**/ignored_nested", "dir1": { @@ -8573,7 +8633,7 @@ mod tests { ) .await; - let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let cx = &mut VisualTestContext::from_window(*workspace, cx); @@ -8602,12 +8662,12 @@ mod tests { assert_eq!( visible_entries_as_strings(&panel, 0..20, cx), &[ - "v root", - " v dir1 <== selected", - " > empty1/empty2/empty3", - " > ignored_dir", - " > subdir1", - " .gitignore", + separator!("v root"), + separator!(" v dir1 <== selected"), + separator!(" > empty1/empty2/empty3"), + separator!(" > ignored_dir"), + separator!(" > subdir1"), + separator!(" .gitignore"), ], "Should show first level with auto-folded dirs and ignored dir visible" ); @@ -8624,18 +8684,18 @@ mod tests { assert_eq!( visible_entries_as_strings(&panel, 0..20, cx), &[ - "v root", - " v dir1 <== selected", - " v empty1", - " v empty2", - " v empty3", - " file.txt", - " > ignored_dir", - " v subdir1", - " > ignored_nested", - " file1.txt", - " file2.txt", - " .gitignore", + separator!("v root"), + separator!(" v dir1 <== selected"), + separator!(" v empty1"), + separator!(" v empty2"), + separator!(" v empty3"), + separator!(" file.txt"), + separator!(" > ignored_dir"), + separator!(" v subdir1"), + separator!(" > ignored_nested"), + separator!(" file1.txt"), + separator!(" file2.txt"), + separator!(" .gitignore"), ], "After expand_all with auto-fold: should not expand ignored_dir, should expand folded dirs, and should not expand ignored_nested" ); @@ -8660,12 +8720,12 @@ mod tests { assert_eq!( visible_entries_as_strings(&panel, 0..20, cx), &[ - "v root", - " v dir1 <== selected", - " > empty1", - " > ignored_dir", - " > subdir1", - " .gitignore", + separator!("v root"), + separator!(" v dir1 <== selected"), + separator!(" > empty1"), + separator!(" > ignored_dir"), + separator!(" > subdir1"), + separator!(" .gitignore"), ], "With auto-fold disabled: should show all directories separately" ); @@ -8682,18 +8742,18 @@ mod tests { assert_eq!( visible_entries_as_strings(&panel, 0..20, cx), &[ - "v root", - " v dir1 <== selected", - " v empty1", - " v empty2", - " v empty3", - " file.txt", - " > ignored_dir", - " v subdir1", - " > ignored_nested", - " file1.txt", - " file2.txt", - " .gitignore", + separator!("v root"), + separator!(" v dir1 <== selected"), + separator!(" v empty1"), + separator!(" v empty2"), + separator!(" v empty3"), + separator!(" file.txt"), + separator!(" > ignored_dir"), + separator!(" v subdir1"), + separator!(" > ignored_nested"), + separator!(" file1.txt"), + separator!(" file2.txt"), + separator!(" .gitignore"), ], "After expand_all without auto-fold: should expand all dirs normally, \ expand ignored_dir itself but not its subdirs, and not expand ignored_nested" @@ -8712,20 +8772,20 @@ mod tests { assert_eq!( visible_entries_as_strings(&panel, 0..20, cx), &[ - "v root", - " v dir1 <== selected", - " v empty1", - " v empty2", - " v empty3", - " file.txt", - " v ignored_dir", - " v subdir", - " deep_file.txt", - " v subdir1", - " > ignored_nested", - " file1.txt", - " file2.txt", - " .gitignore", + separator!("v root"), + separator!(" v dir1 <== selected"), + separator!(" v empty1"), + separator!(" v empty2"), + separator!(" v empty3"), + separator!(" file.txt"), + separator!(" v ignored_dir"), + separator!(" v subdir"), + separator!(" deep_file.txt"), + separator!(" v subdir1"), + separator!(" > ignored_nested"), + separator!(" file1.txt"), + separator!(" file2.txt"), + separator!(" .gitignore"), ], "After expand_all on ignored_dir: should expand all contents of the ignored directory" ); @@ -8737,7 +8797,7 @@ mod tests { let fs = FakeFs::new(cx.executor().clone()); fs.insert_tree( - "/root", + path!("/root"), json!({ "dir1": { "subdir1": { @@ -8759,7 +8819,7 @@ mod tests { ) .await; - let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let cx = &mut VisualTestContext::from_window(*workspace, cx); @@ -8776,15 +8836,15 @@ mod tests { assert_eq!( visible_entries_as_strings(&panel, 0..20, cx), &[ - "v root", - " v dir1", - " v subdir1", - " v nested1", - " file1.txt", - " file2.txt", - " v subdir2 <== selected", - " file4.txt", - " > dir2", + separator!("v root"), + separator!(" v dir1"), + separator!(" v subdir1"), + separator!(" v nested1"), + separator!(" file1.txt"), + separator!(" file2.txt"), + separator!(" v subdir2 <== selected"), + separator!(" file4.txt"), + separator!(" > dir2"), ], "Initial state with everything expanded" ); @@ -8826,13 +8886,13 @@ mod tests { assert_eq!( visible_entries_as_strings(&panel, 0..20, cx), &[ - "v root", - " v dir1", - " v subdir1/nested1 <== selected", - " file1.txt", - " file2.txt", - " > subdir2", - " > dir2/single_file", + separator!("v root"), + separator!(" v dir1"), + separator!(" v subdir1/nested1 <== selected"), + separator!(" file1.txt"), + separator!(" file2.txt"), + separator!(" > subdir2"), + separator!(" > dir2/single_file"), ], "Initial state with some dirs expanded" ); @@ -8849,11 +8909,11 @@ mod tests { assert_eq!( visible_entries_as_strings(&panel, 0..20, cx), &[ - "v root", - " v dir1 <== selected", - " > subdir1/nested1", - " > subdir2", - " > dir2/single_file", + separator!("v root"), + separator!(" v dir1 <== selected"), + separator!(" > subdir1/nested1"), + separator!(" > subdir2"), + separator!(" > dir2/single_file"), ], "Subdirs should be collapsed and folded with auto-fold enabled" ); @@ -8881,14 +8941,14 @@ mod tests { assert_eq!( visible_entries_as_strings(&panel, 0..20, cx), &[ - "v root", - " v dir1", - " v subdir1", - " v nested1 <== selected", - " file1.txt", - " file2.txt", - " > subdir2", - " > dir2", + separator!("v root"), + separator!(" v dir1"), + separator!(" v subdir1"), + separator!(" v nested1 <== selected"), + separator!(" file1.txt"), + separator!(" file2.txt"), + separator!(" > subdir2"), + separator!(" > dir2"), ], "Initial state with some dirs expanded and auto-fold disabled" ); @@ -8905,11 +8965,11 @@ mod tests { assert_eq!( visible_entries_as_strings(&panel, 0..20, cx), &[ - "v root", - " v dir1 <== selected", - " > subdir1", - " > subdir2", - " > dir2", + separator!("v root"), + separator!(" v dir1 <== selected"), + separator!(" > subdir1"), + separator!(" > subdir2"), + separator!(" > dir2"), ], "Subdirs should be collapsed but not folded with auto-fold disabled" ); diff --git a/crates/project_symbols/src/project_symbols.rs b/crates/project_symbols/src/project_symbols.rs index 2b2462ef2556f4d96d9ba8dfd7d563b145e64f3e..7ae87aeff2a8fc119a9952d8a72137aacfd0a670 100644 --- a/crates/project_symbols/src/project_symbols.rs +++ b/crates/project_symbols/src/project_symbols.rs @@ -272,15 +272,17 @@ mod tests { use serde_json::json; use settings::SettingsStore; use std::{path::Path, sync::Arc}; + use util::path; #[gpui::test] async fn test_project_symbols(cx: &mut TestAppContext) { init_test(cx); let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/dir", json!({ "test.rs": "" })).await; + fs.insert_tree(path!("/dir"), json!({ "test.rs": "" })) + .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(Arc::new(Language::new( @@ -299,7 +301,7 @@ mod tests { let _buffer = project .update(cx, |project, cx| { - project.open_local_buffer_with_lsp("/dir/test.rs", cx) + project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx) }) .await .unwrap(); @@ -307,9 +309,9 @@ mod tests { // Set up fake language server to return fuzzy matches against // a fixed set of symbol names. let fake_symbols = [ - symbol("one", "/external"), - symbol("ton", "/dir/test.rs"), - symbol("uno", "/dir/test.rs"), + symbol("one", path!("/external")), + symbol("ton", path!("/dir/test.rs")), + symbol("uno", path!("/dir/test.rs")), ]; let fake_server = fake_servers.next().await.unwrap(); fake_server.handle_request::( diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 1d21d5860e09db1a2cfe5d45dee6ff76427e3122..6dc394b2f23159a6dcd85d5d3ef5aefff0670c42 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -595,6 +595,7 @@ mod tests { use project::{project_settings::ProjectSettings, Project}; use serde_json::json; use settings::SettingsStore; + use util::path; use workspace::{open_paths, AppState}; use super::*; @@ -615,7 +616,7 @@ mod tests { .fs .as_fake() .insert_tree( - "/dir", + path!("/dir"), json!({ "main.ts": "a" }), @@ -623,7 +624,7 @@ mod tests { .await; cx.update(|cx| { open_paths( - &[PathBuf::from("/dir/main.ts")], + &[PathBuf::from(path!("/dir/main.ts"))], app_state, workspace::OpenOptions::default(), cx, diff --git a/crates/refineable/derive_refineable/Cargo.toml b/crates/refineable/derive_refineable/Cargo.toml index 62669c610c36edf441c4ec366ec17468f3a419d8..8ec8de31fd2fb28bb88c1c145301ccb080b376ae 100644 --- a/crates/refineable/derive_refineable/Cargo.toml +++ b/crates/refineable/derive_refineable/Cargo.toml @@ -14,6 +14,6 @@ proc-macro = true doctest = false [dependencies] -syn = "1.0.72" -quote = "1.0.9" -proc-macro2 = "1.0.66" +proc-macro2.workspace = true +quote.workspace = true +syn.workspace = true diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index be18bad293829ea749ebe7a9d85d5a04e3d3354b..b39df8edce3f8c9ae210d00565ddf006f4ef19b6 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -1,3 +1,6 @@ +/// todo(windows) +/// The tests in this file assume that server_cx is running on Windows too. +/// We neead to find a way to test Windows-Non-Windows interactions. use crate::headless_project::HeadlessProject; use client::{Client, UserStore}; use clock::FakeSystemClock; @@ -24,12 +27,13 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; +use util::{path, separator}; #[gpui::test] async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let fs = FakeFs::new(server_cx.executor()); fs.insert_tree( - "/code", + path!("/code"), json!({ "project1": { ".git": {}, @@ -45,14 +49,14 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test ) .await; fs.set_index_for_repo( - Path::new("/code/project1/.git"), + Path::new(path!("/code/project1/.git")), &[("src/lib.rs".into(), "fn one() -> usize { 0 }".into())], ); let (project, _headless) = init_test(&fs, cx, server_cx).await; let (worktree, _) = project .update(cx, |project, cx| { - project.find_or_create_worktree("/code/project1", true, cx) + project.find_or_create_worktree(path!("/code/project1"), true, cx) }) .await .unwrap(); @@ -113,7 +117,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test // A new file is created in the remote filesystem. The user // sees the new file. fs.save( - "/code/project1/src/main.rs".as_ref(), + path!("/code/project1/src/main.rs").as_ref(), &"fn main() {}".into(), Default::default(), ) @@ -134,8 +138,8 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test // A file that is currently open in a buffer is renamed. fs.rename( - "/code/project1/src/lib.rs".as_ref(), - "/code/project1/src/lib2.rs".as_ref(), + path!("/code/project1/src/lib.rs").as_ref(), + path!("/code/project1/src/lib2.rs").as_ref(), Default::default(), ) .await @@ -146,7 +150,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test }); fs.set_index_for_repo( - Path::new("/code/project1/.git"), + Path::new(path!("/code/project1/.git")), &[("src/lib2.rs".into(), "fn one() -> usize { 100 }".into())], ); cx.executor().run_until_parked(); @@ -162,7 +166,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test async fn test_remote_project_search(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let fs = FakeFs::new(server_cx.executor()); fs.insert_tree( - "/code", + path!("/code"), json!({ "project1": { ".git": {}, @@ -179,7 +183,7 @@ async fn test_remote_project_search(cx: &mut TestAppContext, server_cx: &mut Tes project .update(cx, |project, cx| { - project.find_or_create_worktree("/code/project1", true, cx) + project.find_or_create_worktree(path!("/code/project1"), true, cx) }) .await .unwrap(); @@ -210,7 +214,7 @@ async fn test_remote_project_search(cx: &mut TestAppContext, server_cx: &mut Tes buffer.update(&mut cx, |buffer, cx| { assert_eq!( buffer.file().unwrap().full_path(cx).to_string_lossy(), - "project1/README.md" + separator!("project1/README.md") ) }); @@ -368,7 +372,7 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let fs = FakeFs::new(server_cx.executor()); fs.insert_tree( - "/code", + path!("/code"), json!({ "project1": { ".git": {}, @@ -384,7 +388,7 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext let (project, headless) = init_test(&fs, cx, server_cx).await; fs.insert_tree( - "/code/project1/.zed", + path!("/code/project1/.zed"), json!({ "settings.json": r#" { @@ -431,7 +435,7 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext let worktree_id = project .update(cx, |project, cx| { - project.find_or_create_worktree("/code/project1", true, cx) + project.find_or_create_worktree(path!("/code/project1"), true, cx) }) .await .unwrap() @@ -512,7 +516,7 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext Ok(Some(lsp::WorkspaceEdit { changes: Some( [( - lsp::Url::from_file_path("/code/project1/src/lib.rs").unwrap(), + lsp::Url::from_file_path(path!("/code/project1/src/lib.rs")).unwrap(), vec![lsp::TextEdit::new( lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(0, 6)), "two".to_string(), @@ -545,7 +549,7 @@ async fn test_remote_cancel_language_server_work( ) { let fs = FakeFs::new(server_cx.executor()); fs.insert_tree( - "/code", + path!("/code"), json!({ "project1": { ".git": {}, @@ -561,7 +565,7 @@ async fn test_remote_cancel_language_server_work( let (project, headless) = init_test(&fs, cx, server_cx).await; fs.insert_tree( - "/code/project1/.zed", + path!("/code/project1/.zed"), json!({ "settings.json": r#" { @@ -608,7 +612,7 @@ async fn test_remote_cancel_language_server_work( let worktree_id = project .update(cx, |project, cx| { - project.find_or_create_worktree("/code/project1", true, cx) + project.find_or_create_worktree(path!("/code/project1"), true, cx) }) .await .unwrap() @@ -708,7 +712,7 @@ async fn test_remote_cancel_language_server_work( async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let fs = FakeFs::new(server_cx.executor()); fs.insert_tree( - "/code", + path!("/code"), json!({ "project1": { ".git": {}, @@ -724,7 +728,7 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont let (project, _headless) = init_test(&fs, cx, server_cx).await; let (worktree, _) = project .update(cx, |project, cx| { - project.find_or_create_worktree("/code/project1", true, cx) + project.find_or_create_worktree(path!("/code/project1"), true, cx) }) .await .unwrap(); @@ -739,7 +743,7 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont .unwrap(); fs.save( - &PathBuf::from("/code/project1/src/lib.rs"), + &PathBuf::from(path!("/code/project1/src/lib.rs")), &("bangles".to_string().into()), LineEnding::Unix, ) @@ -754,7 +758,7 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont }); fs.save( - &PathBuf::from("/code/project1/src/lib.rs"), + &PathBuf::from(path!("/code/project1/src/lib.rs")), &("bloop".to_string().into()), LineEnding::Unix, ) @@ -786,7 +790,7 @@ async fn test_remote_resolve_path_in_buffer( ) { let fs = FakeFs::new(server_cx.executor()); fs.insert_tree( - "/code", + path!("/code"), json!({ "project1": { ".git": {}, @@ -802,7 +806,7 @@ async fn test_remote_resolve_path_in_buffer( let (project, _headless) = init_test(&fs, cx, server_cx).await; let (worktree, _) = project .update(cx, |project, cx| { - project.find_or_create_worktree("/code/project1", true, cx) + project.find_or_create_worktree(path!("/code/project1"), true, cx) }) .await .unwrap(); @@ -818,14 +822,14 @@ async fn test_remote_resolve_path_in_buffer( let path = project .update(cx, |project, cx| { - project.resolve_path_in_buffer("/code/project1/README.md", &buffer, cx) + project.resolve_path_in_buffer(path!("/code/project1/README.md"), &buffer, cx) }) .await .unwrap(); assert!(path.is_file()); assert_eq!( path.abs_path().unwrap().to_string_lossy(), - "/code/project1/README.md" + path!("/code/project1/README.md") ); let path = project @@ -1013,7 +1017,7 @@ async fn test_adding_then_removing_then_adding_worktrees( async fn test_open_server_settings(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let fs = FakeFs::new(server_cx.executor()); fs.insert_tree( - "/code", + path!("/code"), json!({ "project1": { ".git": {}, @@ -1035,7 +1039,9 @@ async fn test_open_server_settings(cx: &mut TestAppContext, server_cx: &mut Test cx.update(|cx| { assert_eq!( buffer.read(cx).text(), - initial_server_settings_content().to_string() + initial_server_settings_content() + .to_string() + .replace("\r\n", "\n") ) }) } @@ -1044,7 +1050,7 @@ async fn test_open_server_settings(cx: &mut TestAppContext, server_cx: &mut Test async fn test_reconnect(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let fs = FakeFs::new(server_cx.executor()); fs.insert_tree( - "/code", + path!("/code"), json!({ "project1": { ".git": {}, @@ -1061,7 +1067,7 @@ async fn test_reconnect(cx: &mut TestAppContext, server_cx: &mut TestAppContext) let (worktree, _) = project .update(cx, |project, cx| { - project.find_or_create_worktree("/code/project1", true, cx) + project.find_or_create_worktree(path!("/code/project1"), true, cx) }) .await .unwrap(); @@ -1091,7 +1097,9 @@ async fn test_reconnect(cx: &mut TestAppContext, server_cx: &mut TestAppContext) .unwrap(); assert_eq!( - fs.load("/code/project1/src/lib.rs".as_ref()).await.unwrap(), + fs.load(path!("/code/project1/src/lib.rs").as_ref()) + .await + .unwrap(), "fn one() -> usize { 100 }" ); } diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 3c40eecb2f579f1257d497bebff99059b4be533c..3fe85f13f20ad78209b675071ec28a72b676ed4d 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -2188,6 +2188,7 @@ pub mod tests { use project::FakeFs; use serde_json::json; use settings::SettingsStore; + use util::path; use workspace::DeploySearch; #[gpui::test] @@ -3313,13 +3314,13 @@ pub mod tests { let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "one.rs": "const ONE: usize = 1;", }), ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let worktree_id = project.update(cx, |this, cx| { this.worktrees(cx).next().unwrap().read(cx).id() }); @@ -3537,13 +3538,13 @@ pub mod tests { // Setup 2 panes, both with a file open and one with a project search. let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "one.rs": "const ONE: usize = 1;", }), ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let worktree_id = project.update(cx, |this, cx| { this.worktrees(cx).next().unwrap().read(cx).id() }); @@ -3771,13 +3772,13 @@ pub mod tests { let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "one.rs": "const ONE: usize = 1;", }), ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let worktree_id = project.update(cx, |this, cx| { this.worktrees(cx).next().unwrap().read(cx).id() }); diff --git a/crates/semantic_index/Cargo.toml b/crates/semantic_index/Cargo.toml index 221e854195fe9229b317bcd2830b766d4014a852..1f3c40507bf6d83b391bf26ca8064464691e0261 100644 --- a/crates/semantic_index/Cargo.toml +++ b/crates/semantic_index/Cargo.toml @@ -44,9 +44,9 @@ sha2.workspace = true smol.workspace = true theme.workspace = true tree-sitter.workspace = true -ui. workspace = true +ui.workspace = true unindent.workspace = true -util. workspace = true +util.workspace = true workspace.workspace = true worktree.workspace = true diff --git a/crates/semantic_index/src/semantic_index.rs b/crates/semantic_index/src/semantic_index.rs index 0daf4a985ac2b25707956d59cd906945c251d196..9345965ccd727462db1e1ad805161cf03896382c 100644 --- a/crates/semantic_index/src/semantic_index.rs +++ b/crates/semantic_index/src/semantic_index.rs @@ -279,6 +279,7 @@ mod tests { use settings::SettingsStore; use smol::channel; use std::{future, path::Path, sync::Arc}; + use util::separator; fn init_test(cx: &mut TestAppContext) { env_logger::try_init().ok(); @@ -421,7 +422,10 @@ mod tests { // Find result that is greater than 0.5 let search_result = results.iter().find(|result| result.score > 0.9).unwrap(); - assert_eq!(search_result.path.to_string_lossy(), "fixture/needle.md"); + assert_eq!( + search_result.path.to_string_lossy(), + separator!("fixture/needle.md") + ); let content = cx .update(|cx| { diff --git a/crates/settings/src/settings_file.rs b/crates/settings/src/settings_file.rs index 101695508f0ce9f083730babf9ef3842e26542b7..622c42d006df7a0bf89280ad666afab20f60d035 100644 --- a/crates/settings/src/settings_file.rs +++ b/crates/settings/src/settings_file.rs @@ -12,6 +12,7 @@ pub fn test_settings() -> String { crate::default_settings().as_ref(), ) .unwrap(); + #[cfg(not(target_os = "windows"))] util::merge_non_null_json_value_into( serde_json::json!({ "ui_font_family": "Courier", @@ -26,6 +27,21 @@ pub fn test_settings() -> String { }), &mut value, ); + #[cfg(target_os = "windows")] + util::merge_non_null_json_value_into( + serde_json::json!({ + "ui_font_family": "Courier New", + "ui_font_features": {}, + "ui_font_size": 14, + "ui_font_fallback": [], + "buffer_font_family": "Courier New", + "buffer_font_features": {}, + "buffer_font_size": 14, + "buffer_font_fallback": [], + "theme": EMPTY_THEME_NAME, + }), + &mut value, + ); value.as_object_mut().unwrap().remove("languages"); serde_json::to_string(&value).unwrap() } diff --git a/crates/sqlez_macros/Cargo.toml b/crates/sqlez_macros/Cargo.toml index 5959617f72c911ccbfdac1d756c381200be2a881..cff96d0b8949757761421c9003250343297bd14c 100644 --- a/crates/sqlez_macros/Cargo.toml +++ b/crates/sqlez_macros/Cargo.toml @@ -16,4 +16,4 @@ doctest = false [dependencies] sqlez.workspace = true sqlformat.workspace = true -syn = "1.0" +syn.workspace = true diff --git a/crates/tab_switcher/src/tab_switcher_tests.rs b/crates/tab_switcher/src/tab_switcher_tests.rs index 045879ef069bb6135df6f73ef03b8143cc99d6f8..f1a5b64b10b8aea37fb252b14b1e45094e108e76 100644 --- a/crates/tab_switcher/src/tab_switcher_tests.rs +++ b/crates/tab_switcher/src/tab_switcher_tests.rs @@ -5,6 +5,7 @@ use menu::SelectPrev; use project::{Project, ProjectPath}; use serde_json::json; use std::path::Path; +use util::path; use workspace::{AppState, Workspace}; #[ctor::ctor] @@ -24,7 +25,7 @@ async fn test_open_with_prev_tab_selected_and_cycle_on_toggle_action( .fs .as_fake() .insert_tree( - "/root", + path!("/root"), json!({ "1.txt": "First file", "2.txt": "Second file", @@ -34,7 +35,7 @@ async fn test_open_with_prev_tab_selected_and_cycle_on_toggle_action( ) .await; - let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); @@ -81,7 +82,7 @@ async fn test_open_with_last_tab_selected(cx: &mut gpui::TestAppContext) { .fs .as_fake() .insert_tree( - "/root", + path!("/root"), json!({ "1.txt": "First file", "2.txt": "Second file", @@ -90,7 +91,7 @@ async fn test_open_with_last_tab_selected(cx: &mut gpui::TestAppContext) { ) .await; - let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); @@ -172,10 +173,10 @@ async fn test_open_with_single_item(cx: &mut gpui::TestAppContext) { app_state .fs .as_fake() - .insert_tree("/root", json!({"1.txt": "Single file"})) + .insert_tree(path!("/root"), json!({"1.txt": "Single file"})) .await; - let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); @@ -195,7 +196,7 @@ async fn test_close_selected_item(cx: &mut gpui::TestAppContext) { .fs .as_fake() .insert_tree( - "/root", + path!("/root"), json!({ "1.txt": "First file", "2.txt": "Second file", @@ -203,7 +204,7 @@ async fn test_close_selected_item(cx: &mut gpui::TestAppContext) { ) .await; - let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); @@ -241,7 +242,7 @@ async fn test_close_preserves_selected_position(cx: &mut gpui::TestAppContext) { .fs .as_fake() .insert_tree( - "/root", + path!("/root"), json!({ "1.txt": "First file", "2.txt": "Second file", @@ -250,7 +251,7 @@ async fn test_close_preserves_selected_position(cx: &mut gpui::TestAppContext) { ) .await; - let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); diff --git a/crates/tasks_ui/src/modal.rs b/crates/tasks_ui/src/modal.rs index 4d65f772f2cf4b491a8306ee92e89ff66fc09c7a..f0d1c21f1e6934821caf97a8d2facf5825bf12e4 100644 --- a/crates/tasks_ui/src/modal.rs +++ b/crates/tasks_ui/src/modal.rs @@ -603,6 +603,7 @@ mod tests { use project::{ContextProviderWithTasks, FakeFs, Project}; use serde_json::json; use task::TaskTemplates; + use util::path; use workspace::CloseInactiveTabsAndPanes; use crate::{modal::Spawn, tests::init_test}; @@ -614,7 +615,7 @@ mod tests { init_test(cx); let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ ".zed": { "tasks.json": r#"[ @@ -635,7 +636,7 @@ mod tests { ) .await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); @@ -654,7 +655,7 @@ mod tests { let _ = workspace .update_in(cx, |workspace, window, cx| { - workspace.open_abs_path(PathBuf::from("/dir/a.ts"), true, window, cx) + workspace.open_abs_path(PathBuf::from(path!("/dir/a.ts")), true, window, cx) }) .await .unwrap(); @@ -778,7 +779,7 @@ mod tests { init_test(cx); let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ ".zed": { "tasks.json": r#"[ @@ -800,7 +801,7 @@ mod tests { ) .await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); @@ -819,7 +820,7 @@ mod tests { let _ = workspace .update_in(cx, |workspace, window, cx| { workspace.open_abs_path( - PathBuf::from("/dir/file_with.odd_extension"), + PathBuf::from(path!("/dir/file_with.odd_extension")), true, window, cx, @@ -832,8 +833,8 @@ mod tests { assert_eq!( task_names(&tasks_picker, cx), vec![ - "hello from /dir/file_with.odd_extension:1:1".to_string(), - "opened now: /dir".to_string() + concat!("hello from ", path!("/dir/file_with.odd_extension:1:1")).to_string(), + concat!("opened now: ", path!("/dir")).to_string(), ], "Second opened buffer should fill the context, labels should be trimmed if long enough" ); @@ -846,7 +847,7 @@ mod tests { let second_item = workspace .update_in(cx, |workspace, window, cx| { workspace.open_abs_path( - PathBuf::from("/dir/file_without_extension"), + PathBuf::from(path!("/dir/file_without_extension")), true, window, cx, @@ -868,8 +869,8 @@ mod tests { assert_eq!( task_names(&tasks_picker, cx), vec![ - "hello from /dir/file_without_extension:2:3".to_string(), - "opened now: /dir".to_string() + concat!("hello from ", path!("/dir/file_without_extension:2:3")).to_string(), + concat!("opened now: ", path!("/dir")).to_string(), ], "Opened buffer should fill the context, labels should be trimmed if long enough" ); @@ -885,7 +886,7 @@ mod tests { init_test(cx); let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "a1.ts": "// a1", "a2.ts": "// a2", @@ -894,7 +895,7 @@ mod tests { ) .await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; project.read_with(cx, |project, _| { let language_registry = project.languages(); language_registry.add(Arc::new( @@ -955,7 +956,7 @@ mod tests { let _ts_file_1 = workspace .update_in(cx, |workspace, window, cx| { - workspace.open_abs_path(PathBuf::from("/dir/a1.ts"), true, window, cx) + workspace.open_abs_path(PathBuf::from(path!("/dir/a1.ts")), true, window, cx) }) .await .unwrap(); @@ -963,23 +964,28 @@ mod tests { assert_eq!( task_names(&tasks_picker, cx), vec![ - "Another task from file /dir/a1.ts", - "TypeScript task from file /dir/a1.ts", + concat!("Another task from file ", path!("/dir/a1.ts")), + concat!("TypeScript task from file ", path!("/dir/a1.ts")), "Task without variables", ], "Should open spawn TypeScript tasks for the opened file, tasks with most template variables above, all groups sorted alphanumerically" ); + emulate_task_schedule( tasks_picker, &project, - "TypeScript task from file /dir/a1.ts", + concat!("TypeScript task from file ", path!("/dir/a1.ts")), cx, ); let tasks_picker = open_spawn_tasks(&workspace, cx); assert_eq!( task_names(&tasks_picker, cx), - vec!["TypeScript task from file /dir/a1.ts", "Another task from file /dir/a1.ts", "Task without variables"], + vec![ + concat!("TypeScript task from file ", path!("/dir/a1.ts")), + concat!("Another task from file ", path!("/dir/a1.ts")), + "Task without variables", + ], "After spawning the task and getting it into the history, it should be up in the sort as recently used. Tasks with the same labels and context are deduplicated." ); @@ -991,7 +997,7 @@ mod tests { let _ts_file_2 = workspace .update_in(cx, |workspace, window, cx| { - workspace.open_abs_path(PathBuf::from("/dir/a2.ts"), true, window, cx) + workspace.open_abs_path(PathBuf::from(path!("/dir/a2.ts")), true, window, cx) }) .await .unwrap(); @@ -999,10 +1005,10 @@ mod tests { assert_eq!( task_names(&tasks_picker, cx), vec![ - "TypeScript task from file /dir/a1.ts", - "Another task from file /dir/a2.ts", - "TypeScript task from file /dir/a2.ts", - "Task without variables" + concat!("TypeScript task from file ", path!("/dir/a1.ts")), + concat!("Another task from file ", path!("/dir/a2.ts")), + concat!("TypeScript task from file ", path!("/dir/a2.ts")), + "Task without variables", ], "Even when both TS files are open, should only show the history (on the top), and tasks, resolved for the current file" ); @@ -1029,7 +1035,7 @@ mod tests { emulate_task_schedule(tasks_picker, &project, "Rust task", cx); let _ts_file_2 = workspace .update_in(cx, |workspace, window, cx| { - workspace.open_abs_path(PathBuf::from("/dir/a2.ts"), true, window, cx) + workspace.open_abs_path(PathBuf::from(path!("/dir/a2.ts")), true, window, cx) }) .await .unwrap(); @@ -1037,10 +1043,10 @@ mod tests { assert_eq!( task_names(&tasks_picker, cx), vec![ - "TypeScript task from file /dir/a1.ts", - "Another task from file /dir/a2.ts", - "TypeScript task from file /dir/a2.ts", - "Task without variables" + concat!("TypeScript task from file ", path!("/dir/a1.ts")), + concat!("Another task from file ", path!("/dir/a2.ts")), + concat!("TypeScript task from file ", path!("/dir/a2.ts")), + "Task without variables", ], "After closing all but *.rs tabs, running a Rust task and switching back to TS tasks, \ same TS spawn history should be restored" diff --git a/crates/tasks_ui/src/tasks_ui.rs b/crates/tasks_ui/src/tasks_ui.rs index 0b0eb3654c851468d85965cf783086fb74f132e2..36cc14099343aa5101cb00768ee1f75736852199 100644 --- a/crates/tasks_ui/src/tasks_ui.rs +++ b/crates/tasks_ui/src/tasks_ui.rs @@ -262,6 +262,7 @@ mod tests { use serde_json::json; use task::{TaskContext, TaskVariables, VariableName}; use ui::VisualContext; + use util::{path, separator}; use workspace::{AppState, Workspace}; use crate::task_context; @@ -271,7 +272,7 @@ mod tests { init_test(cx); let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ ".zed": { "tasks.json": r#"[ @@ -295,7 +296,7 @@ mod tests { }), ) .await; - let project = Project::test(fs, ["/dir".as_ref()], cx).await; + let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; let worktree_store = project.update(cx, |project, _| project.worktree_store().clone()); let rust_language = Arc::new( Language::new( @@ -375,17 +376,18 @@ mod tests { task_context(workspace, window, cx) }) .await; + assert_eq!( first_context, TaskContext { - cwd: Some("/dir".into()), + cwd: Some(path!("/dir").into()), task_variables: TaskVariables::from_iter([ - (VariableName::File, "/dir/rust/b.rs".into()), + (VariableName::File, path!("/dir/rust/b.rs").into()), (VariableName::Filename, "b.rs".into()), - (VariableName::RelativeFile, "rust/b.rs".into()), - (VariableName::Dirname, "/dir/rust".into()), + (VariableName::RelativeFile, separator!("rust/b.rs").into()), + (VariableName::Dirname, path!("/dir/rust").into()), (VariableName::Stem, "b".into()), - (VariableName::WorktreeRoot, "/dir".into()), + (VariableName::WorktreeRoot, path!("/dir").into()), (VariableName::Row, "1".into()), (VariableName::Column, "1".into()), ]), @@ -407,14 +409,14 @@ mod tests { }) .await, TaskContext { - cwd: Some("/dir".into()), + cwd: Some(path!("/dir").into()), task_variables: TaskVariables::from_iter([ - (VariableName::File, "/dir/rust/b.rs".into()), + (VariableName::File, path!("/dir/rust/b.rs").into()), (VariableName::Filename, "b.rs".into()), - (VariableName::RelativeFile, "rust/b.rs".into()), - (VariableName::Dirname, "/dir/rust".into()), + (VariableName::RelativeFile, separator!("rust/b.rs").into()), + (VariableName::Dirname, path!("/dir/rust").into()), (VariableName::Stem, "b".into()), - (VariableName::WorktreeRoot, "/dir".into()), + (VariableName::WorktreeRoot, path!("/dir").into()), (VariableName::Row, "1".into()), (VariableName::Column, "15".into()), (VariableName::SelectedText, "is_i".into()), @@ -433,14 +435,14 @@ mod tests { }) .await, TaskContext { - cwd: Some("/dir".into()), + cwd: Some(path!("/dir").into()), task_variables: TaskVariables::from_iter([ - (VariableName::File, "/dir/a.ts".into()), + (VariableName::File, path!("/dir/a.ts").into()), (VariableName::Filename, "a.ts".into()), (VariableName::RelativeFile, "a.ts".into()), - (VariableName::Dirname, "/dir".into()), + (VariableName::Dirname, path!("/dir").into()), (VariableName::Stem, "a".into()), - (VariableName::WorktreeRoot, "/dir".into()), + (VariableName::WorktreeRoot, path!("/dir").into()), (VariableName::Row, "1".into()), (VariableName::Column, "1".into()), (VariableName::Symbol, "this_is_a_test".into()), diff --git a/crates/ui_macros/Cargo.toml b/crates/ui_macros/Cargo.toml index 7687c2b36ba66df61b87f75d0e653d240264a4e3..773c07d2383b62d62f948d986c275635fbfa2e08 100644 --- a/crates/ui_macros/Cargo.toml +++ b/crates/ui_macros/Cargo.toml @@ -13,7 +13,7 @@ path = "src/ui_macros.rs" proc-macro = true [dependencies] -proc-macro2 = "1.0.66" -quote = "1.0.9" -syn = { version = "1.0.72", features = ["full", "extra-traits"] } +proc-macro2.workspace = true +quote.workspace = true +syn.workspace = true convert_case.workspace = true diff --git a/crates/util/Cargo.toml b/crates/util/Cargo.toml index 213e2fc0d4f38a2cf3c65d1e71c65938485358ff..06c2c4d8bac78620484a3c71a098506db18b4fb1 100644 --- a/crates/util/Cargo.toml +++ b/crates/util/Cargo.toml @@ -13,7 +13,7 @@ path = "src/util.rs" doctest = true [features] -test-support = ["tempfile", "git2", "rand"] +test-support = ["tempfile", "git2", "rand", "util_macros"] [dependencies] anyhow.workspace = true @@ -35,6 +35,7 @@ smol.workspace = true take-until.workspace = true tempfile = { workspace = true, optional = true } unicase.workspace = true +util_macros = { workspace = true, optional = true } [target.'cfg(unix)'.dependencies] libc.workspace = true @@ -47,3 +48,4 @@ dunce = "1.0" git2.workspace = true rand.workspace = true tempfile.workspace = true +util_macros.workspace = true diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index ba78d6d06d4270247846f33dc4e7f8228ef224f7..275895d228440ebe320e44ed0511e245b89eeed3 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -23,6 +23,7 @@ pub trait PathExt { fn compact(&self) -> PathBuf; fn icon_stem_or_suffix(&self) -> Option<&str>; fn extension_or_hidden_file_name(&self) -> Option<&str>; + fn to_sanitized_string(&self) -> String; fn try_from_bytes<'a>(bytes: &'a [u8]) -> anyhow::Result where Self: From<&'a Path>, @@ -94,6 +95,20 @@ impl> PathExt for T { self.as_ref().file_name()?.to_str()?.split('.').last() } + + /// Returns a sanitized string representation of the path. + /// Note, on Windows, this assumes that the path is a valid UTF-8 string and + /// is not a UNC path. + fn to_sanitized_string(&self) -> String { + #[cfg(target_os = "windows")] + { + self.as_ref().to_string_lossy().replace("/", "\\") + } + #[cfg(not(target_os = "windows"))] + { + self.as_ref().to_string_lossy().to_string() + } + } } /// Due to the issue of UNC paths on Windows, which can cause bugs in various parts of Zed, introducing this `SanitizedPath` @@ -115,6 +130,17 @@ impl SanitizedPath { self.0.to_string_lossy().to_string() } + pub fn to_glob_string(&self) -> String { + #[cfg(target_os = "windows")] + { + self.0.to_string_lossy().replace("/", "\\") + } + #[cfg(not(target_os = "windows"))] + { + self.0.to_string_lossy().to_string() + } + } + pub fn join(&self, path: &Self) -> Self { self.0.join(&path.0).into() } @@ -448,14 +474,6 @@ pub fn compare_paths( } } -#[cfg(any(test, feature = "test-support"))] -pub fn replace_path_separator(path: &str) -> String { - #[cfg(target_os = "windows")] - return path.replace("/", std::path::MAIN_SEPARATOR_STR); - #[cfg(not(target_os = "windows"))] - return path.to_string(); -} - #[cfg(test)] mod tests { use super::*; diff --git a/crates/util/src/util.rs b/crates/util/src/util.rs index 77a788aef2cc56562c16d17376d0e9fdc8357b6c..9fd802a09cf1658941d038ff9bdc3b754716bf51 100644 --- a/crates/util/src/util.rs +++ b/crates/util/src/util.rs @@ -28,6 +28,8 @@ use unicase::UniCase; use anyhow::{anyhow, Context as _}; pub use take_until::*; +#[cfg(any(test, feature = "test-support"))] +pub use util_macros::{separator, uri}; #[macro_export] macro_rules! debug_panic { @@ -41,6 +43,50 @@ macro_rules! debug_panic { }; } +/// A macro to add "C:" to the beginning of a path literal on Windows, and replace all +/// the separator from `/` to `\`. +/// But on non-Windows platforms, it will return the path literal as is. +/// +/// # Examples +/// ```rust +/// use util::path; +/// +/// let path = path!("/Users/user/file.txt"); +/// #[cfg(target_os = "windows")] +/// assert_eq!(path, "C:\\Users\\user\\file.txt"); +/// #[cfg(not(target_os = "windows"))] +/// assert_eq!(path, "/Users/user/file.txt"); +/// ``` +#[cfg(all(any(test, feature = "test-support"), target_os = "windows"))] +#[macro_export] +macro_rules! path { + ($path:literal) => { + concat!("C:", util::separator!($path)) + }; +} + +/// A macro to add "C:" to the beginning of a path literal on Windows, and replace all +/// the separator from `/` to `\`. +/// But on non-Windows platforms, it will return the path literal as is. +/// +/// # Examples +/// ```rust +/// use util::path; +/// +/// let path = path!("/Users/user/file.txt"); +/// #[cfg(target_os = "windows")] +/// assert_eq!(path, "C:\\Users\\user\\file.txt"); +/// #[cfg(not(target_os = "windows"))] +/// assert_eq!(path, "/Users/user/file.txt"); +/// ``` +#[cfg(all(any(test, feature = "test-support"), not(target_os = "windows")))] +#[macro_export] +macro_rules! path { + ($path:literal) => { + $path + }; +} + pub fn truncate(s: &str, max_chars: usize) -> &str { match s.char_indices().nth(max_chars) { None => s, diff --git a/crates/util_macros/Cargo.toml b/crates/util_macros/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..59c8ee9699515203b681c54833b6e15191a32c3f --- /dev/null +++ b/crates/util_macros/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "util_macros" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/util_macros.rs" +proc-macro = true +doctest = false + +[dependencies] +quote.workspace = true +syn.workspace = true diff --git a/crates/util_macros/LICENSE-APACHE b/crates/util_macros/LICENSE-APACHE new file mode 120000 index 0000000000000000000000000000000000000000..1cd601d0a3affae83854be02a0afdec3b7a9ec4d --- /dev/null +++ b/crates/util_macros/LICENSE-APACHE @@ -0,0 +1 @@ +../../LICENSE-APACHE \ No newline at end of file diff --git a/crates/util_macros/src/util_macros.rs b/crates/util_macros/src/util_macros.rs new file mode 100644 index 0000000000000000000000000000000000000000..2baba2f473881ea8672d5a19eb56a5c6b4180666 --- /dev/null +++ b/crates/util_macros/src/util_macros.rs @@ -0,0 +1,56 @@ +#![cfg_attr(not(target_os = "windows"), allow(unused))] + +use proc_macro::TokenStream; +use quote::quote; +use syn::{parse_macro_input, LitStr}; + +/// This macro replaces the path separator `/` with `\` for Windows. +/// But if the target OS is not Windows, the path is returned as is. +/// +/// # Example +/// ```rust +/// # use util_macros::separator; +/// let path = separator!("path/to/file"); +/// #[cfg(target_os = "windows")] +/// assert_eq!(path, "path\\to\\file"); +/// #[cfg(not(target_os = "windows"))] +/// assert_eq!(path, "path/to/file"); +/// ``` +#[proc_macro] +pub fn separator(input: TokenStream) -> TokenStream { + let path = parse_macro_input!(input as LitStr); + let path = path.value(); + + #[cfg(target_os = "windows")] + let path = path.replace("/", "\\"); + + TokenStream::from(quote! { + #path + }) +} + +/// This macro replaces the path prefix `file:///` with `file:///C:/` for Windows. +/// But if the target OS is not Windows, the URI is returned as is. +/// +/// # Example +/// ```rust +/// use util_macros::uri; +/// +/// let uri = uri!("file:///path/to/file"); +/// #[cfg(target_os = "windows")] +/// assert_eq!(uri, "file:///C:/path/to/file"); +/// #[cfg(not(target_os = "windows"))] +/// assert_eq!(uri, "file:///path/to/file"); +/// ``` +#[proc_macro] +pub fn uri(input: TokenStream) -> TokenStream { + let uri = parse_macro_input!(input as LitStr); + let uri = uri.value(); + + #[cfg(target_os = "windows")] + let uri = uri.replace("file:///", "file:///C:/"); + + TokenStream::from(quote! { + #uri + }) +} diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index ef378c0e48017aabbfd83ce307259450d0badc3a..bc1b4ba2017da6561404c32a0f3d1e183c32aff7 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -1455,6 +1455,7 @@ mod test { use editor::Editor; use gpui::{Context, TestAppContext}; use indoc::indoc; + use util::path; use workspace::Workspace; #[gpui::test] @@ -1551,13 +1552,13 @@ mod test { #[gpui::test] async fn test_command_write(cx: &mut TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; - let path = Path::new("/root/dir/file.rs"); + let path = Path::new(path!("/root/dir/file.rs")); let fs = cx.workspace(|workspace, _, cx| workspace.project().read(cx).fs().clone()); cx.simulate_keystrokes("i @ escape"); cx.simulate_keystrokes(": w enter"); - assert_eq!(fs.load(path).await.unwrap(), "@\n"); + assert_eq!(fs.load(path).await.unwrap().replace("\r\n", "\n"), "@\n"); fs.as_fake().insert_file(path, b"oops\n".to_vec()).await; @@ -1567,12 +1568,12 @@ mod test { assert!(cx.has_pending_prompt()); // "Cancel" cx.simulate_prompt_answer(0); - assert_eq!(fs.load(path).await.unwrap(), "oops\n"); + assert_eq!(fs.load(path).await.unwrap().replace("\r\n", "\n"), "oops\n"); assert!(!cx.has_pending_prompt()); // force overwrite cx.simulate_keystrokes(": w ! enter"); assert!(!cx.has_pending_prompt()); - assert_eq!(fs.load(path).await.unwrap(), "@@\n"); + assert_eq!(fs.load(path).await.unwrap().replace("\r\n", "\n"), "@@\n"); } #[gpui::test] @@ -1664,7 +1665,7 @@ mod test { let file_path = file.as_local().unwrap().abs_path(cx); assert_eq!(text, expected_text); - assert_eq!(file_path.to_str().unwrap(), expected_path); + assert_eq!(file_path, Path::new(expected_path)); } #[gpui::test] @@ -1673,16 +1674,22 @@ mod test { // Assert base state, that we're in /root/dir/file.rs cx.workspace(|workspace, _, cx| { - assert_active_item(workspace, "/root/dir/file.rs", "", cx); + assert_active_item(workspace, path!("/root/dir/file.rs"), "", cx); }); // Insert a new file let fs = cx.workspace(|workspace, _, cx| workspace.project().read(cx).fs().clone()); fs.as_fake() - .insert_file("/root/dir/file2.rs", "This is file2.rs".as_bytes().to_vec()) + .insert_file( + path!("/root/dir/file2.rs"), + "This is file2.rs".as_bytes().to_vec(), + ) .await; fs.as_fake() - .insert_file("/root/dir/file3.rs", "go to file3".as_bytes().to_vec()) + .insert_file( + path!("/root/dir/file3.rs"), + "go to file3".as_bytes().to_vec(), + ) .await; // Put the path to the second file into the currently open buffer @@ -1694,7 +1701,12 @@ mod test { // We now have two items cx.workspace(|workspace, _, cx| assert_eq!(workspace.items(cx).count(), 2)); cx.workspace(|workspace, _, cx| { - assert_active_item(workspace, "/root/dir/file2.rs", "This is file2.rs", cx); + assert_active_item( + workspace, + path!("/root/dir/file2.rs"), + "This is file2.rs", + cx, + ); }); // Update editor to point to `file2.rs` @@ -1711,7 +1723,7 @@ mod test { // We now have three items cx.workspace(|workspace, _, cx| assert_eq!(workspace.items(cx).count(), 3)); cx.workspace(|workspace, _, cx| { - assert_active_item(workspace, "/root/dir/file3.rs", "go to file3", cx); + assert_active_item(workspace, path!("/root/dir/file3.rs"), "go to file3", cx); }); } diff --git a/crates/vim/src/normal/paste.rs b/crates/vim/src/normal/paste.rs index eb3acde6dc3982ca8763f475c15fe43574e4b4d1..e47e48a6b496f4853bec780506791e2b7cae4f27 100644 --- a/crates/vim/src/normal/paste.rs +++ b/crates/vim/src/normal/paste.rs @@ -696,12 +696,20 @@ mod test { // not testing nvim as it doesn't have a filename cx.simulate_keystrokes("\" % p"); + #[cfg(not(target_os = "windows"))] cx.assert_state( indoc! {" The quick brown dogdir/file.rˇs"}, Mode::Normal, ); + #[cfg(target_os = "windows")] + cx.assert_state( + indoc! {" + The quick brown + dogdir\\file.rˇs"}, + Mode::Normal, + ); } #[gpui::test] diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 955afbe20912a7b232545fdeec3e4e1a8e7fcf99..8ba52747d1f50ad03ad4d39c70e2d2141dfaf503 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -1319,14 +1319,7 @@ impl LocalWorktree { let settings = self.settings.clone(); let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded(); let background_scanner = cx.background_executor().spawn({ - let abs_path = &snapshot.abs_path; - #[cfg(target_os = "windows")] - let abs_path = abs_path - .as_path() - .canonicalize() - .unwrap_or_else(|_| abs_path.as_path().to_path_buf()); - #[cfg(not(target_os = "windows"))] - let abs_path = abs_path.as_path().to_path_buf(); + let abs_path = snapshot.abs_path.as_path().to_path_buf(); let background = cx.background_executor().clone(); async move { let (events, watcher) = fs.watch(&abs_path, FS_WATCH_LATENCY).await; diff --git a/crates/worktree/src/worktree_tests.rs b/crates/worktree/src/worktree_tests.rs index 533ae7eb8724decc8c03a17ee0d862e81d8406d1..2cee728aec89e40500700c182ed617400085739e 100644 --- a/crates/worktree/src/worktree_tests.rs +++ b/crates/worktree/src/worktree_tests.rs @@ -2156,7 +2156,13 @@ const CONFLICT: FileStatus = FileStatus::Unmerged(UnmergedStatus { second_head: UnmergedStatusCode::Updated, }); +// NOTE: +// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename +// a directory which some program has already open. +// This is a limitation of the Windows. +// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder #[gpui::test] +#[cfg_attr(target_os = "windows", ignore)] async fn test_rename_work_directory(cx: &mut TestAppContext) { init_test(cx); cx.executor().allow_parking(); @@ -2184,7 +2190,7 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) { let repo = git_init(&root_path.join("projects/project1")); git_add("a", &repo); git_commit("init", &repo); - std::fs::write(root_path.join("projects/project1/a"), "aa").ok(); + std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap(); cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) .await; @@ -2209,7 +2215,7 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) { root_path.join("projects/project1"), root_path.join("projects/project2"), ) - .ok(); + .unwrap(); tree.flush_fs_events(cx).await; cx.read(|cx| { @@ -2335,7 +2341,13 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) { }); } +// NOTE: +// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename +// a directory which some program has already open. +// This is a limitation of the Windows. +// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder #[gpui::test] +#[cfg_attr(target_os = "windows", ignore)] async fn test_file_status(cx: &mut TestAppContext) { init_test(cx); cx.executor().allow_parking(); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index b037e1e703f625c8f779c902b0063b0be04d6024..f9793952f8a697edefc7c8543bf22d79c3f4873f 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -1569,6 +1569,7 @@ mod tests { time::Duration, }; use theme::{ThemeRegistry, ThemeSettings}; + use util::{path, separator}; use workspace::{ item::{Item, ItemHandle}, open_new, open_paths, pane, NewFile, OpenVisible, SaveIntent, SplitDirection, @@ -1737,12 +1738,15 @@ mod tests { app_state .fs .as_fake() - .insert_tree("/root", json!({"a": "hey", "b": "", "dir": {"c": "f"}})) + .insert_tree( + path!("/root"), + json!({"a": "hey", "b": "", "dir": {"c": "f"}}), + ) .await; cx.update(|cx| { open_paths( - &[PathBuf::from("/root/dir")], + &[PathBuf::from(path!("/root/dir"))], app_state.clone(), workspace::OpenOptions::default(), cx, @@ -1754,7 +1758,7 @@ mod tests { cx.update(|cx| { open_paths( - &[PathBuf::from("/root/a")], + &[PathBuf::from(path!("/root/a"))], app_state.clone(), workspace::OpenOptions { open_new_workspace: Some(false), @@ -1769,7 +1773,7 @@ mod tests { cx.update(|cx| { open_paths( - &[PathBuf::from("/root/dir/c")], + &[PathBuf::from(path!("/root/dir/c"))], app_state.clone(), workspace::OpenOptions { open_new_workspace: Some(true), @@ -1789,12 +1793,15 @@ mod tests { app_state .fs .as_fake() - .insert_tree("/root", json!({"dir1": {"a": "b"}, "dir2": {"c": "d"}})) + .insert_tree( + path!("/root"), + json!({"dir1": {"a": "b"}, "dir2": {"c": "d"}}), + ) .await; cx.update(|cx| { open_paths( - &[PathBuf::from("/root/dir1/a")], + &[PathBuf::from(path!("/root/dir1/a"))], app_state.clone(), workspace::OpenOptions::default(), cx, @@ -1807,7 +1814,7 @@ mod tests { cx.update(|cx| { open_paths( - &[PathBuf::from("/root/dir2/c")], + &[PathBuf::from(path!("/root/dir2/c"))], app_state.clone(), workspace::OpenOptions::default(), cx, @@ -1819,7 +1826,7 @@ mod tests { cx.update(|cx| { open_paths( - &[PathBuf::from("/root/dir2")], + &[PathBuf::from(path!("/root/dir2"))], app_state.clone(), workspace::OpenOptions::default(), cx, @@ -1835,7 +1842,7 @@ mod tests { cx.update(|cx| { open_paths( - &[PathBuf::from("/root/dir2/c")], + &[PathBuf::from(path!("/root/dir2/c"))], app_state.clone(), workspace::OpenOptions::default(), cx, @@ -1864,12 +1871,12 @@ mod tests { app_state .fs .as_fake() - .insert_tree("/root", json!({"a": "hey"})) + .insert_tree(path!("/root"), json!({"a": "hey"})) .await; cx.update(|cx| { open_paths( - &[PathBuf::from("/root/a")], + &[PathBuf::from(path!("/root/a"))], app_state.clone(), workspace::OpenOptions::default(), cx, @@ -1951,7 +1958,7 @@ mod tests { // Opening the buffer again doesn't impact the window's edited state. cx.update(|cx| { open_paths( - &[PathBuf::from("/root/a")], + &[PathBuf::from(path!("/root/a"))], app_state, workspace::OpenOptions::default(), cx, @@ -2013,12 +2020,12 @@ mod tests { app_state .fs .as_fake() - .insert_tree("/root", json!({"a": "hey"})) + .insert_tree(path!("/root"), json!({"a": "hey"})) .await; cx.update(|cx| { open_paths( - &[PathBuf::from("/root/a")], + &[PathBuf::from(path!("/root/a"))], app_state.clone(), workspace::OpenOptions::default(), cx, @@ -2070,7 +2077,7 @@ mod tests { // When we now reopen the window, the edited state and the edited buffer are back cx.update(|cx| { open_paths( - &[PathBuf::from("/root/a")], + &[PathBuf::from(path!("/root/a"))], app_state.clone(), workspace::OpenOptions::default(), cx, @@ -2166,7 +2173,7 @@ mod tests { .fs .as_fake() .insert_tree( - "/root", + path!("/root"), json!({ "a": { "file1": "contents 1", @@ -2177,7 +2184,7 @@ mod tests { ) .await; - let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; project.update(cx, |project, _cx| { project.languages().add(markdown_language()) }); @@ -2298,7 +2305,7 @@ mod tests { .fs .as_fake() .insert_tree( - "/", + path!("/"), json!({ "dir1": { "a.txt": "" @@ -2316,7 +2323,7 @@ mod tests { cx.update(|cx| { open_paths( - &[PathBuf::from("/dir1/")], + &[PathBuf::from(path!("/dir1/"))], app_state, workspace::OpenOptions::default(), cx, @@ -2363,7 +2370,7 @@ mod tests { window .update(cx, |workspace, window, cx| { workspace.open_paths( - vec!["/dir1/a.txt".into()], + vec![path!("/dir1/a.txt").into()], OpenVisible::All, None, window, @@ -2374,7 +2381,12 @@ mod tests { .await; cx.read(|cx| { let workspace = workspace.read(cx); - assert_project_panel_selection(workspace, Path::new("/dir1"), Path::new("a.txt"), cx); + assert_project_panel_selection( + workspace, + Path::new(path!("/dir1")), + Path::new("a.txt"), + cx, + ); assert_eq!( workspace .active_pane() @@ -2393,7 +2405,7 @@ mod tests { window .update(cx, |workspace, window, cx| { workspace.open_paths( - vec!["/dir2/b.txt".into()], + vec![path!("/dir2/b.txt").into()], OpenVisible::All, None, window, @@ -2404,14 +2416,19 @@ mod tests { .await; cx.read(|cx| { let workspace = workspace.read(cx); - assert_project_panel_selection(workspace, Path::new("/dir2/b.txt"), Path::new(""), cx); + assert_project_panel_selection( + workspace, + Path::new(path!("/dir2/b.txt")), + Path::new(""), + cx, + ); let worktree_roots = workspace .worktrees(cx) .map(|w| w.read(cx).as_local().unwrap().abs_path().as_ref()) .collect::>(); assert_eq!( worktree_roots, - vec!["/dir1", "/dir2/b.txt"] + vec![path!("/dir1"), path!("/dir2/b.txt")] .into_iter() .map(Path::new) .collect(), @@ -2434,7 +2451,7 @@ mod tests { window .update(cx, |workspace, window, cx| { workspace.open_paths( - vec!["/dir3".into(), "/dir3/c.txt".into()], + vec![path!("/dir3").into(), path!("/dir3/c.txt").into()], OpenVisible::All, None, window, @@ -2445,14 +2462,19 @@ mod tests { .await; cx.read(|cx| { let workspace = workspace.read(cx); - assert_project_panel_selection(workspace, Path::new("/dir3"), Path::new("c.txt"), cx); + assert_project_panel_selection( + workspace, + Path::new(path!("/dir3")), + Path::new("c.txt"), + cx, + ); let worktree_roots = workspace .worktrees(cx) .map(|w| w.read(cx).as_local().unwrap().abs_path().as_ref()) .collect::>(); assert_eq!( worktree_roots, - vec!["/dir1", "/dir2/b.txt", "/dir3"] + vec![path!("/dir1"), path!("/dir2/b.txt"), path!("/dir3")] .into_iter() .map(Path::new) .collect(), @@ -2474,23 +2496,39 @@ mod tests { // Ensure opening invisibly a file outside an existing worktree adds a new, invisible worktree. window .update(cx, |workspace, window, cx| { - workspace.open_paths(vec!["/d.txt".into()], OpenVisible::None, None, window, cx) + workspace.open_paths( + vec![path!("/d.txt").into()], + OpenVisible::None, + None, + window, + cx, + ) }) .unwrap() .await; cx.read(|cx| { let workspace = workspace.read(cx); - assert_project_panel_selection(workspace, Path::new("/d.txt"), Path::new(""), cx); + assert_project_panel_selection( + workspace, + Path::new(path!("/d.txt")), + Path::new(""), + cx, + ); let worktree_roots = workspace .worktrees(cx) .map(|w| w.read(cx).as_local().unwrap().abs_path().as_ref()) .collect::>(); assert_eq!( worktree_roots, - vec!["/dir1", "/dir2/b.txt", "/dir3", "/d.txt"] - .into_iter() - .map(Path::new) - .collect(), + vec![ + path!("/dir1"), + path!("/dir2/b.txt"), + path!("/dir3"), + path!("/d.txt") + ] + .into_iter() + .map(Path::new) + .collect(), ); let visible_worktree_roots = workspace @@ -2499,7 +2537,7 @@ mod tests { .collect::>(); assert_eq!( visible_worktree_roots, - vec!["/dir1", "/dir2/b.txt", "/dir3"] + vec![path!("/dir1"), path!("/dir2/b.txt"), path!("/dir3")] .into_iter() .map(Path::new) .collect(), @@ -2535,7 +2573,7 @@ mod tests { .fs .as_fake() .insert_tree( - "/root", + path!("/root"), json!({ ".gitignore": "ignored_dir\n", ".git": { @@ -2560,7 +2598,7 @@ mod tests { ) .await; - let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; project.update(cx, |project, _cx| { project.languages().add(markdown_language()) }); @@ -2569,9 +2607,9 @@ mod tests { let initial_entries = cx.read(|cx| workspace.file_project_paths(cx)); let paths_to_open = [ - Path::new("/root/excluded_dir/file").to_path_buf(), - Path::new("/root/.git/HEAD").to_path_buf(), - Path::new("/root/excluded_dir/ignored_subdir").to_path_buf(), + PathBuf::from(path!("/root/excluded_dir/file")), + PathBuf::from(path!("/root/.git/HEAD")), + PathBuf::from(path!("/root/excluded_dir/ignored_subdir")), ]; let (opened_workspace, new_items) = cx .update(|cx| { @@ -2616,8 +2654,8 @@ mod tests { opened_paths, vec![ None, - Some(".git/HEAD".to_string()), - Some("excluded_dir/file".to_string()), + Some(separator!(".git/HEAD").to_string()), + Some(separator!("excluded_dir/file").to_string()), ], "Excluded files should get opened, excluded dir should not get opened" ); @@ -2643,7 +2681,7 @@ mod tests { opened_buffer_paths.sort(); assert_eq!( opened_buffer_paths, - vec![".git/HEAD".to_string(), "excluded_dir/file".to_string()], + vec![separator!(".git/HEAD").to_string(), separator!("excluded_dir/file").to_string()], "Despite not being present in the worktrees, buffers for excluded files are opened and added to the pane" ); }); @@ -2655,10 +2693,10 @@ mod tests { app_state .fs .as_fake() - .insert_tree("/root", json!({ "a.txt": "" })) + .insert_tree(path!("/root"), json!({ "a.txt": "" })) .await; - let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; project.update(cx, |project, _cx| { project.languages().add(markdown_language()) }); @@ -2669,7 +2707,7 @@ mod tests { window .update(cx, |workspace, window, cx| { workspace.open_paths( - vec![PathBuf::from("/root/a.txt")], + vec![PathBuf::from(path!("/root/a.txt"))], OpenVisible::All, None, window, @@ -2693,7 +2731,7 @@ mod tests { app_state .fs .as_fake() - .insert_file("/root/a.txt", b"changed".to_vec()) + .insert_file(path!("/root/a.txt"), b"changed".to_vec()) .await; cx.run_until_parked(); @@ -2721,9 +2759,13 @@ mod tests { #[gpui::test] async fn test_open_and_save_new_file(cx: &mut TestAppContext) { let app_state = init_test(cx); - app_state.fs.create_dir(Path::new("/root")).await.unwrap(); + app_state + .fs + .create_dir(Path::new(path!("/root"))) + .await + .unwrap(); - let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; project.update(cx, |project, _| { project.languages().add(markdown_language()); project.languages().add(rust_lang()); @@ -2766,7 +2808,7 @@ mod tests { .unwrap(); cx.background_executor.run_until_parked(); cx.simulate_new_path_selection(|parent_dir| { - assert_eq!(parent_dir, Path::new("/root")); + assert_eq!(parent_dir, Path::new(path!("/root"))); Some(parent_dir.join("the-new-name.rs")) }); cx.read(|cx| { @@ -2922,7 +2964,7 @@ mod tests { .fs .as_fake() .insert_tree( - "/root", + path!("/root"), json!({ "a": { "file1": "contents 1", @@ -2933,7 +2975,7 @@ mod tests { ) .await; - let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; project.update(cx, |project, _cx| { project.languages().add(markdown_language()) }); @@ -3020,7 +3062,7 @@ mod tests { .fs .as_fake() .insert_tree( - "/root", + path!("/root"), json!({ "a": { "file1": "contents 1\n".repeat(20), @@ -3031,7 +3073,7 @@ mod tests { ) .await; - let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; project.update(cx, |project, _cx| { project.languages().add(markdown_language()) }); @@ -3262,7 +3304,7 @@ mod tests { .unwrap(); app_state .fs - .remove_file(Path::new("/root/a/file2"), Default::default()) + .remove_file(Path::new(path!("/root/a/file2")), Default::default()) .await .unwrap(); cx.background_executor.run_until_parked(); @@ -3403,7 +3445,7 @@ mod tests { .fs .as_fake() .insert_tree( - "/root", + path!("/root"), json!({ "a": { "file1": "", @@ -3415,7 +3457,7 @@ mod tests { ) .await; - let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await; + let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; project.update(cx, |project, _cx| { project.languages().add(markdown_language()) }); diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index 9389ff481ec51500bd778089930347f5cf544af3..4fa22fed796f89eecf35158085dbe24009ec03e6 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -535,6 +535,7 @@ mod tests { use editor::Editor; use gpui::TestAppContext; use serde_json::json; + use util::path; use workspace::{AppState, Workspace}; use crate::zed::{open_listener::open_local_workspace, tests::init_test}; @@ -547,7 +548,7 @@ mod tests { .fs .as_fake() .insert_tree( - "/root", + path!("/root"), json!({ "dir1": { "file1.txt": "content1", @@ -560,7 +561,7 @@ mod tests { assert_eq!(cx.windows().len(), 0); // First open the workspace directory - open_workspace_file("/root/dir1", None, app_state.clone(), cx).await; + open_workspace_file(path!("/root/dir1"), None, app_state.clone(), cx).await; assert_eq!(cx.windows().len(), 1); let workspace = cx.windows()[0].downcast::().unwrap(); @@ -571,7 +572,7 @@ mod tests { .unwrap(); // Now open a file inside that workspace - open_workspace_file("/root/dir1/file1.txt", None, app_state.clone(), cx).await; + open_workspace_file(path!("/root/dir1/file1.txt"), None, app_state.clone(), cx).await; assert_eq!(cx.windows().len(), 1); workspace @@ -581,7 +582,13 @@ mod tests { .unwrap(); // Now open a file inside that workspace, but tell Zed to open a new window - open_workspace_file("/root/dir1/file1.txt", Some(true), app_state.clone(), cx).await; + open_workspace_file( + path!("/root/dir1/file1.txt"), + Some(true), + app_state.clone(), + cx, + ) + .await; assert_eq!(cx.windows().len(), 2); @@ -599,12 +606,16 @@ mod tests { async fn test_open_workspace_with_nonexistent_files(cx: &mut TestAppContext) { let app_state = init_test(cx); - app_state.fs.as_fake().insert_tree("/root", json!({})).await; + app_state + .fs + .as_fake() + .insert_tree(path!("/root"), json!({})) + .await; assert_eq!(cx.windows().len(), 0); // Test case 1: Open a single file that does not exist yet - open_workspace_file("/root/file5.txt", None, app_state.clone(), cx).await; + open_workspace_file(path!("/root/file5.txt"), None, app_state.clone(), cx).await; assert_eq!(cx.windows().len(), 1); let workspace_1 = cx.windows()[0].downcast::().unwrap(); @@ -616,7 +627,7 @@ mod tests { // Test case 2: Open a single file that does not exist yet, // but tell Zed to add it to the current workspace - open_workspace_file("/root/file6.txt", Some(false), app_state.clone(), cx).await; + open_workspace_file(path!("/root/file6.txt"), Some(false), app_state.clone(), cx).await; assert_eq!(cx.windows().len(), 1); workspace_1 @@ -628,7 +639,7 @@ mod tests { // Test case 3: Open a single file that does not exist yet, // but tell Zed to NOT add it to the current workspace - open_workspace_file("/root/file7.txt", Some(true), app_state.clone(), cx).await; + open_workspace_file(path!("/root/file7.txt"), Some(true), app_state.clone(), cx).await; assert_eq!(cx.windows().len(), 2); let workspace_2 = cx.windows()[1].downcast::().unwrap(); diff --git a/script/exit-ci-if-dev-drive-is-full.ps1 b/script/exit-ci-if-dev-drive-is-full.ps1 new file mode 100644 index 0000000000000000000000000000000000000000..98684d58ee606d358462d0f76676dbbcb316e58d --- /dev/null +++ b/script/exit-ci-if-dev-drive-is-full.ps1 @@ -0,0 +1,22 @@ +param ( + [Parameter(Mandatory = $true)] + [int]$MAX_SIZE_IN_GB +) + +$ErrorActionPreference = "Stop" +$PSNativeCommandUseErrorActionPreference = $true +$ProgressPreference = "SilentlyContinue" + +if (-Not (Test-Path -Path "target")) { + Write-Host "target directory does not exist yet" + exit 0 +} + +$current_size_gb = (Get-ChildItem -Recurse -Force -File -Path "target" | Measure-Object -Property Length -Sum).Sum / 1GB + +Write-Host "target directory size: ${current_size_gb}GB. max size: ${MAX_SIZE_IN_GB}GB" + +if ($current_size_gb -gt $MAX_SIZE_IN_GB) { + Write-Host "Dev drive is almost full, increase the size first!" + exit 1 +} diff --git a/script/setup-dev-driver.ps1 b/script/setup-dev-driver.ps1 index 28a9c3ed6c616801b94a2e8bed49d512f68a693c..51aa17c2678ac93879a409ecd52004520561fc85 100644 --- a/script/setup-dev-driver.ps1 +++ b/script/setup-dev-driver.ps1 @@ -3,7 +3,8 @@ # The current version of the Windows runner is 10.0.20348 which does not support DevDrive option. # Ref: https://learn.microsoft.com/en-us/windows/dev-drive/ -$Volume = New-VHD -Path C:/zed_dev_drive.vhdx -SizeBytes 30GB | +# Currently, total CI requires almost 45GB of space, here we are creating a 60GB drive. +$Volume = New-VHD -Path C:/zed_dev_drive.vhdx -SizeBytes 60GB | Mount-VHD -Passthru | Initialize-Disk -Passthru | New-Partition -AssignDriveLetter -UseMaximumSize | From 992125bec2e1a00dd8ea443b704263a806d7a478 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 5 Feb 2025 10:03:45 -0500 Subject: [PATCH 023/130] Revert "copilot: Correct o3-mini context length" (#24275) Reverts zed-industries/zed#24152 See comment: https://github.com/zed-industries/zed/pull/24152#issuecomment-2636808170 Manually confirmed >20k generates error. --- crates/copilot/src/copilot_chat.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/copilot/src/copilot_chat.rs b/crates/copilot/src/copilot_chat.rs index b0ec1cefd2f74c57658cf555fd90489135612b9e..b45bd6270c067b7a44b5aae7bb7b8408c71c5749 100644 --- a/crates/copilot/src/copilot_chat.rs +++ b/crates/copilot/src/copilot_chat.rs @@ -89,7 +89,7 @@ impl Model { Self::Gpt4o => 64000, Self::Gpt4 => 32768, Self::Gpt3_5Turbo => 12288, - Self::O3Mini => 200_000, + Self::O3Mini => 20000, Self::O1 => 20000, Self::Claude3_5Sonnet => 200_000, } From 0a89d1a479f2e7937cd2ee77240450c3c566c7a9 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 5 Feb 2025 10:07:53 -0500 Subject: [PATCH 024/130] languages: Sort dependencies in `Cargo.toml` (#24277) This PR sorts the dependency lists in the `Cargo.toml` for the `languages` crate. Release Notes: - N/A --- crates/languages/Cargo.toml | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index 3d51f748c1f7c334df7a62e6bc80649fdbf563bb..5665b9b53ab25daba2a4b979e7f19201e8f89436 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -13,6 +13,7 @@ test-support = [ "load-grammars" ] load-grammars = [ + "tree-sitter", "tree-sitter-bash", "tree-sitter-c", "tree-sitter-cpp", @@ -29,7 +30,6 @@ load-grammars = [ "tree-sitter-rust", "tree-sitter-typescript", "tree-sitter-yaml", - "tree-sitter", ] [dependencies] @@ -46,12 +46,12 @@ log.workspace = true lsp.workspace = true node_runtime.workspace = true paths.workspace = true -pet.workspace = true -pet-fs.workspace = true -pet-core.workspace = true pet-conda.workspace = true +pet-core.workspace = true +pet-fs.workspace = true pet-poetry.workspace = true pet-reporter.workspace = true +pet.workspace = true project.workspace = true regex.workspace = true rope.workspace = true @@ -83,15 +83,15 @@ tree-sitter-yaml = { workspace = true, optional = true } util.workspace = true [dev-dependencies] -tree-sitter.workspace = true +pretty_assertions.workspace = true text.workspace = true theme = { workspace = true, features = ["test-support"] } -unindent.workspace = true -workspace = { workspace = true, features = ["test-support"] } -tree-sitter-typescript.workspace = true -tree-sitter-python.workspace = true -tree-sitter-go.workspace = true +tree-sitter-bash.workspace = true tree-sitter-c.workspace = true tree-sitter-css.workspace = true -tree-sitter-bash.workspace = true -pretty_assertions.workspace = true +tree-sitter-go.workspace = true +tree-sitter-python.workspace = true +tree-sitter-typescript.workspace = true +tree-sitter.workspace = true +unindent.workspace = true +workspace = { workspace = true, features = ["test-support"] } From 630d0add190b77da1fa48ea4d1e7d52bc386adca Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Wed, 5 Feb 2025 12:26:11 -0300 Subject: [PATCH 025/130] edit predictions: Onboarding funnel telemetry (#24237) Release Notes: - N/A --- Cargo.lock | 1 + crates/editor/src/editor.rs | 14 +++++----- crates/inline_completion_button/Cargo.toml | 1 + .../src/inline_completion_button.rs | 20 +++++++++++++- .../zed/src/zed/inline_completion_registry.rs | 19 ++++++++++---- crates/zeta/src/onboarding_banner.rs | 4 +++ crates/zeta/src/onboarding_modal.rs | 26 ++++++++++++++++--- crates/zeta/src/onboarding_telemetry.rs | 9 +++++++ crates/zeta/src/rate_completion_modal.rs | 2 ++ crates/zeta/src/zeta.rs | 1 + 10 files changed, 82 insertions(+), 15 deletions(-) create mode 100644 crates/zeta/src/onboarding_telemetry.rs diff --git a/Cargo.lock b/Cargo.lock index 5353b9bc44aa0371513a5bf562fd70bd9a8f7d66..8da23e96fe65075e06e487e9d759720efefe76ce 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6388,6 +6388,7 @@ dependencies = [ "serde_json", "settings", "supermaven", + "telemetry", "theme", "ui", "workspace", diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 6e622602559d92aa9d7382a572b87ddbb415bfe7..1ecd630dd65eaa97790c24af98a0806dd8deabd8 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3946,10 +3946,6 @@ impl Editor { self.do_completion(action.item_ix, CompletionIntent::Compose, window, cx) } - fn toggle_zed_predict_onboarding(&mut self, window: &mut Window, cx: &mut Context) { - window.dispatch_action(zed_actions::OpenZedPredictOnboarding.boxed_clone(), cx); - } - fn do_completion( &mut self, item_ix: Option, @@ -5445,7 +5441,11 @@ impl Editor { .on_mouse_down(MouseButton::Left, |_, window, _| window.prevent_default()) .on_click(cx.listener(|this, _event, window, cx| { cx.stop_propagation(); - this.toggle_zed_predict_onboarding(window, cx) + this.report_editor_event("Edit Prediction Provider ToS Clicked", None, cx); + window.dispatch_action( + zed_actions::OpenZedPredictOnboarding.boxed_clone(), + cx, + ); })) .child( h_flex() @@ -14074,7 +14074,8 @@ impl Editor { .get("vim_mode") == Some(&serde_json::Value::Bool(true)); - let copilot_enabled = all_language_settings(file, cx).inline_completions.provider + let edit_predictions_provider = all_language_settings(file, cx).inline_completions.provider; + let copilot_enabled = edit_predictions_provider == language::language_settings::InlineCompletionProvider::Copilot; let copilot_enabled_for_language = self .buffer @@ -14089,6 +14090,7 @@ impl Editor { vim_mode, copilot_enabled, copilot_enabled_for_language, + edit_predictions_provider, is_via_ssh = project.is_via_ssh(), ); } diff --git a/crates/inline_completion_button/Cargo.toml b/crates/inline_completion_button/Cargo.toml index b5daba3893ae24f37747cf138a5f0ed5767b5aa8..e8c51efcaf3405762c5591c712f90928f7525cf7 100644 --- a/crates/inline_completion_button/Cargo.toml +++ b/crates/inline_completion_button/Cargo.toml @@ -29,6 +29,7 @@ workspace.workspace = true zed_actions.workspace = true zeta.workspace = true client.workspace = true +telemetry.workspace = true [dev-dependencies] copilot = { workspace = true, features = ["test-support"] } diff --git a/crates/inline_completion_button/src/inline_completion_button.rs b/crates/inline_completion_button/src/inline_completion_button.rs index 20cad6ec0b1e06df3d8932aef244206d5843abe8..a2b72ed1c2ebc9bea728cd9d2dd8d998c793c512 100644 --- a/crates/inline_completion_button/src/inline_completion_button.rs +++ b/crates/inline_completion_button/src/inline_completion_button.rs @@ -256,6 +256,10 @@ impl Render for InlineCompletionButton { ) }) .on_click(cx.listener(move |_, _, window, cx| { + telemetry::event!( + "Pending ToS Clicked", + source = "Edit Prediction Status Button" + ); window.dispatch_action( zed_actions::OpenZedPredictOnboarding.boxed_clone(), cx, @@ -426,6 +430,8 @@ impl InlineCompletionButton { if data_collection.is_supported() { let provider = provider.clone(); + let enabled = data_collection.is_enabled(); + menu = menu .separator() .header("Help Improve The Model") @@ -434,9 +440,21 @@ impl InlineCompletionButton { // TODO: We want to add something later that communicates whether // the current project is open-source. ContextMenuEntry::new("Share Training Data") - .toggleable(IconPosition::Start, data_collection.is_enabled()) + .toggleable(IconPosition::Start, enabled) .handler(move |_, cx| { provider.toggle_data_collection(cx); + + if !enabled { + telemetry::event!( + "Data Collection Enabled", + source = "Edit Prediction Status Menu" + ); + } else { + telemetry::event!( + "Data Collection Disabled", + source = "Edit Prediction Status Menu" + ); + } }), ); } diff --git a/crates/zed/src/zed/inline_completion_registry.rs b/crates/zed/src/zed/inline_completion_registry.rs index 58faf1263d04224b21fe10faf105e831e4fa84d2..6e2879a6c98a15c79965b6a78d49c03708a1003e 100644 --- a/crates/zed/src/zed/inline_completion_registry.rs +++ b/crates/zed/src/zed/inline_completion_registry.rs @@ -94,7 +94,20 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { let user_store = user_store.clone(); move |cx| { let new_provider = all_language_settings(None, cx).inline_completions.provider; + if new_provider != provider { + let tos_accepted = user_store + .read(cx) + .current_user_has_accepted_terms() + .unwrap_or(false); + + telemetry::event!( + "Edit Prediction Provider Changed", + from = provider, + to = new_provider, + zed_ai_tos_accepted = tos_accepted, + ); + provider = new_provider; assign_inline_completion_providers( &editors, @@ -104,11 +117,7 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { cx, ); - if !user_store - .read(cx) - .current_user_has_accepted_terms() - .unwrap_or(false) - { + if !tos_accepted { match provider { InlineCompletionProvider::Zed => { let Some(window) = cx.active_window() else { diff --git a/crates/zeta/src/onboarding_banner.rs b/crates/zeta/src/onboarding_banner.rs index 26169b2cbffc3bd443ec1c88679173390be2fcd3..54a6939d6292c505fcbd9a5419c3085b60366331 100644 --- a/crates/zeta/src/onboarding_banner.rs +++ b/crates/zeta/src/onboarding_banner.rs @@ -6,6 +6,8 @@ use settings::SettingsStore; use ui::{prelude::*, ButtonLike, Tooltip}; use util::ResultExt; +use crate::onboarding_event; + /// Prompts the user to try Zed's Edit Prediction feature pub struct ZedPredictBanner { dismissed: bool, @@ -53,6 +55,7 @@ impl ZedPredictBanner { } fn dismiss(&mut self, cx: &mut Context) { + onboarding_event!("Banner Dismissed"); persist_dismissed(cx); self.dismissed = true; cx.notify(); @@ -107,6 +110,7 @@ impl Render for ZedPredictBanner { ), ) .on_click(|_, window, cx| { + onboarding_event!("Banner Clicked"); window.dispatch_action(Box::new(zed_actions::OpenZedPredictOnboarding), cx) }), ) diff --git a/crates/zeta/src/onboarding_modal.rs b/crates/zeta/src/onboarding_modal.rs index b9e214508cd122e7ae666226af62efdcf233d2f7..c17289b78f13db3b6964e8b3bccefb7011d8345b 100644 --- a/crates/zeta/src/onboarding_modal.rs +++ b/crates/zeta/src/onboarding_modal.rs @@ -1,6 +1,6 @@ use std::{sync::Arc, time::Duration}; -use crate::ZED_PREDICT_DATA_COLLECTION_CHOICE; +use crate::{onboarding_event, ZED_PREDICT_DATA_COLLECTION_CHOICE}; use client::{Client, UserStore}; use db::kvp::KEY_VALUE_STORE; use feature_flags::FeatureFlagAppExt as _; @@ -61,16 +61,22 @@ impl ZedPredictModal { fn view_terms(&mut self, _: &ClickEvent, _: &mut Window, cx: &mut Context) { cx.open_url("https://zed.dev/terms-of-service"); cx.notify(); + + onboarding_event!("ToS Link Clicked"); } fn view_blog(&mut self, _: &ClickEvent, _: &mut Window, cx: &mut Context) { cx.open_url("https://zed.dev/blog/"); // TODO Add the link when live cx.notify(); + + onboarding_event!("Blog Link clicked"); } fn inline_completions_doc(&mut self, _: &ClickEvent, _: &mut Window, cx: &mut Context) { cx.open_url("https://zed.dev/docs/configuring-zed#inline-completions"); cx.notify(); + + onboarding_event!("Docs Link Clicked"); } fn accept_and_enable(&mut self, _: &ClickEvent, window: &mut Window, cx: &mut Context) { @@ -106,6 +112,11 @@ impl ZedPredictModal { }) }) .detach_and_notify_err(window, cx); + + onboarding_event!( + "Enable Clicked", + data_collection_opted_in = self.data_collection_opted_in, + ); } fn sign_in(&mut self, _: &ClickEvent, window: &mut Window, cx: &mut Context) { @@ -122,12 +133,15 @@ impl ZedPredictModal { this.update(&mut cx, |this, cx| { this.sign_in_status = status; + onboarding_event!("Signed In"); cx.notify() })?; result }) .detach_and_notify_err(window, cx); + + onboarding_event!("Sign In Clicked"); } fn cancel(&mut self, _: &menu::Cancel, _: &mut Window, cx: &mut Context) { @@ -159,6 +173,7 @@ impl Render for ZedPredictModal { .track_focus(&self.focus_handle(cx)) .on_action(cx.listener(Self::cancel)) .on_action(cx.listener(|_, _: &menu::Cancel, _window, cx| { + onboarding_event!("Cancelled", trigger = "Action"); cx.emit(DismissEvent); })) .on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, window, _cx| { @@ -241,6 +256,7 @@ impl Render for ZedPredictModal { .child(h_flex().absolute().top_2().right_2().child( IconButton::new("cancel", IconName::X).on_click(cx.listener( |_, _: &ClickEvent, _window, cx| { + onboarding_event!("Cancelled", trigger = "X click"); cx.emit(DismissEvent); }, )), @@ -302,7 +318,7 @@ impl Render for ZedPredictModal { .label("Read and accept the") .on_click(cx.listener(move |this, state, _window, cx| { this.terms_of_service = *state == ToggleState::Selected; - cx.notify() + cx.notify(); })), ) .child( @@ -340,7 +356,11 @@ impl Render for ZedPredictModal { .on_click(cx.listener(|this, _, _, cx| { this.data_collection_expanded = !this.data_collection_expanded; - cx.notify() + cx.notify(); + + if this.data_collection_expanded { + onboarding_event!("Data Collection Learn More Clicked"); + } })), ), ) diff --git a/crates/zeta/src/onboarding_telemetry.rs b/crates/zeta/src/onboarding_telemetry.rs new file mode 100644 index 0000000000000000000000000000000000000000..3c7d5e1442947c3e8cea446ebf37597a3cce1f80 --- /dev/null +++ b/crates/zeta/src/onboarding_telemetry.rs @@ -0,0 +1,9 @@ +#[macro_export] +macro_rules! onboarding_event { + ($name:expr) => { + telemetry::event!($name, source = "Edit Prediction Onboarding"); + }; + ($name:expr, $($key:ident $(= $value:expr)?),+ $(,)?) => { + telemetry::event!($name, source = "Edit Prediction Onboarding", $($key $(= $value)?),+); + }; +} diff --git a/crates/zeta/src/rate_completion_modal.rs b/crates/zeta/src/rate_completion_modal.rs index 073388e22c91702404d556823aaae04ddb282657..dda838c21b3b927af387a93dbf731e76dead44c6 100644 --- a/crates/zeta/src/rate_completion_modal.rs +++ b/crates/zeta/src/rate_completion_modal.rs @@ -52,6 +52,8 @@ impl RateCompletionModal { pub fn toggle(workspace: &mut Workspace, window: &mut Window, cx: &mut Context) { if let Some(zeta) = Zeta::global(cx) { workspace.toggle_modal(window, cx, |_window, cx| RateCompletionModal::new(zeta, cx)); + + telemetry::event!("Rate Completion Modal Open", source = "Edit Prediction"); } } diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index 7aa2dc212a3f9165dfd4b898666ab76eaf849580..584e4a8bb88a7c9744d4bfaee3bdc23d8002c66d 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -3,6 +3,7 @@ mod init; mod license_detection; mod onboarding_banner; mod onboarding_modal; +mod onboarding_telemetry; mod rate_completion_modal; pub(crate) use completion_diff_element::*; From da4bad3a55bb00c57979f390445461923d9f9ae6 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 5 Feb 2025 12:28:44 -0300 Subject: [PATCH 026/130] edit prediction: Don't log an error if license file isn't found (#24278) Logging an error in this case isn't super necessary. Release Notes: - N/A Co-authored-by: Bennet Bo Fenner <53836821+bennetbo@users.noreply.github.com> --- crates/zeta/src/zeta.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index 584e4a8bb88a7c9744d4bfaee3bdc23d8002c66d..0be88b3c6a93a2e30fb1c331379da474e3996022 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -958,8 +958,7 @@ impl LicenseDetectionWatcher { Self { is_open_source_rx, _is_open_source_task: cx.spawn(|_, _| async move { - // TODO: Don't display error if file not found - let Some(loaded_file) = loaded_file_fut.await.log_err() else { + let Ok(loaded_file) = loaded_file_fut.await else { return; }; From 868e3f75b238fdbff14e5f1febbbab1d426141ae Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 5 Feb 2025 17:36:24 +0200 Subject: [PATCH 027/130] Rework shared commit editors (#24274) Rework of https://github.com/zed-industries/zed/pull/24130 Uses https://github.com/d1y/git_firefly/tree/1033c0b57ec88a002cb68efc64c8d9bf5c212e30 `COMMIT_EDITMSG` language-related definitions (thanks @d1y ) Instead of using real `.git/COMMIT_EDITMSG` file, create a buffer without FS representation, stored in the `Repository` and shared the regular way via the `BufferStore`. Adds a knowledge of what `Git Commit` language is, and uses it in the buffers which are rendered in the git panel. Release Notes: - N/A --------- Co-authored-by: Conrad Irwin Co-authored-by: d1y Co-authored-by: Smit --- Cargo.lock | 11 +- Cargo.toml | 1 + crates/git/src/repository.rs | 18 +- crates/git_ui/Cargo.toml | 1 - crates/git_ui/src/git_panel.rs | 198 +++++++---------- crates/git_ui/src/project_diff.rs | 75 +++---- crates/git_ui/src/repository_selector.rs | 12 +- crates/languages/Cargo.toml | 2 + crates/languages/src/gitcommit/config.toml | 18 ++ crates/languages/src/gitcommit/highlights.scm | 18 ++ crates/languages/src/gitcommit/injections.scm | 5 + crates/languages/src/lib.rs | 20 ++ crates/project/src/git.rs | 204 ++++++++++++------ crates/project/src/project.rs | 126 +++++------ crates/proto/proto/zed.proto | 1 + crates/remote_server/src/headless_project.rs | 86 +++----- crates/worktree/src/worktree.rs | 2 +- 17 files changed, 427 insertions(+), 371 deletions(-) create mode 100644 crates/languages/src/gitcommit/config.toml create mode 100644 crates/languages/src/gitcommit/highlights.scm create mode 100644 crates/languages/src/gitcommit/injections.scm diff --git a/Cargo.lock b/Cargo.lock index 8da23e96fe65075e06e487e9d759720efefe76ce..7b9fed4f37606949e7d48b0df6e7af5e9de3fecb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5254,7 +5254,6 @@ dependencies = [ "picker", "postage", "project", - "rpc", "schemars", "serde", "serde_derive", @@ -7008,6 +7007,7 @@ dependencies = [ "tree-sitter-cpp", "tree-sitter-css", "tree-sitter-diff", + "tree-sitter-gitcommit", "tree-sitter-go", "tree-sitter-gomod", "tree-sitter-gowork", @@ -13944,6 +13944,15 @@ dependencies = [ "tree-sitter-language", ] +[[package]] +name = "tree-sitter-gitcommit" +version = "0.0.1" +source = "git+https://github.com/zed-industries/tree-sitter-git-commit?rev=88309716a69dd13ab83443721ba6e0b491d37ee9#88309716a69dd13ab83443721ba6e0b491d37ee9" +dependencies = [ + "cc", + "tree-sitter-language", +] + [[package]] name = "tree-sitter-go" version = "0.23.4" diff --git a/Cargo.toml b/Cargo.toml index d70d7b2faf1098ead8cf7e0f1d0ab53dba85e0a4..1b47335b4c47097232c846d94e34f56062abc183 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -522,6 +522,7 @@ tree-sitter-cpp = "0.23" tree-sitter-css = "0.23" tree-sitter-elixir = "0.3" tree-sitter-embedded-template = "0.23.0" +tree-sitter-gitcommit = {git = "https://github.com/zed-industries/tree-sitter-git-commit", rev = "88309716a69dd13ab83443721ba6e0b491d37ee9"} tree-sitter-go = "0.23" tree-sitter-go-mod = { git = "https://github.com/camdencheek/tree-sitter-go-mod", rev = "6efb59652d30e0e9cd5f3b3a669afd6f1a926d3c", package = "tree-sitter-gomod" } tree-sitter-gowork = { git = "https://github.com/zed-industries/tree-sitter-go-work", rev = "acb0617bf7f4fda02c6217676cc64acb89536dc7" } diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index efedb0d461d70fd028af3bad3c0a58d939acefed..50191ea6836dc33a7c3f7d084e2871e1b0877255 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -1,6 +1,6 @@ use crate::status::FileStatus; +use crate::GitHostingProviderRegistry; use crate::{blame::Blame, status::GitStatus}; -use crate::{GitHostingProviderRegistry, COMMIT_MESSAGE}; use anyhow::{anyhow, Context as _, Result}; use collections::{HashMap, HashSet}; use git2::BranchType; @@ -68,7 +68,7 @@ pub trait GitRepository: Send + Sync { /// If any of the paths were previously staged but do not exist in HEAD, they will be removed from the index. fn unstage_paths(&self, paths: &[RepoPath]) -> Result<()>; - fn commit(&self, name_and_email: Option<(&str, &str)>) -> Result<()>; + fn commit(&self, message: &str, name_and_email: Option<(&str, &str)>) -> Result<()>; } impl std::fmt::Debug for dyn GitRepository { @@ -298,22 +298,14 @@ impl GitRepository for RealGitRepository { Ok(()) } - fn commit(&self, name_and_email: Option<(&str, &str)>) -> Result<()> { + fn commit(&self, message: &str, name_and_email: Option<(&str, &str)>) -> Result<()> { let working_directory = self .repository .lock() .workdir() .context("failed to read git work directory")? .to_path_buf(); - let commit_file = self.dot_git_dir().join(*COMMIT_MESSAGE); - let commit_file_path = commit_file.to_string_lossy(); - let mut args = vec![ - "commit", - "--quiet", - "-F", - commit_file_path.as_ref(), - "--cleanup=strip", - ]; + let mut args = vec!["commit", "--quiet", "-m", message, "--cleanup=strip"]; let author = name_and_email.map(|(name, email)| format!("{name} <{email}>")); if let Some(author) = author.as_deref() { args.push("--author"); @@ -480,7 +472,7 @@ impl GitRepository for FakeGitRepository { unimplemented!() } - fn commit(&self, _name_and_email: Option<(&str, &str)>) -> Result<()> { + fn commit(&self, _message: &str, _name_and_email: Option<(&str, &str)>) -> Result<()> { unimplemented!() } } diff --git a/crates/git_ui/Cargo.toml b/crates/git_ui/Cargo.toml index 055410760485f224fc0842ffcb35e5db695329f2..701f9a01d7014ee159d774cefcc33d6e15d76167 100644 --- a/crates/git_ui/Cargo.toml +++ b/crates/git_ui/Cargo.toml @@ -26,7 +26,6 @@ multi_buffer.workspace = true menu.workspace = true postage.workspace = true project.workspace = true -rpc.workspace = true schemars.workspace = true serde.workspace = true serde_derive.workspace = true diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 1e7ce96cef8defeee09d23ce7c75a42784df94bd..9cf054467d86561ad0d2a38bf7f0c3b043a963ba 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -4,7 +4,7 @@ use crate::ProjectDiff; use crate::{ git_panel_settings::GitPanelSettings, git_status_icon, repository_selector::RepositorySelector, }; -use anyhow::{Context as _, Result}; +use anyhow::Result; use collections::HashMap; use db::kvp::KEY_VALUE_STORE; use editor::actions::MoveToEnd; @@ -12,13 +12,12 @@ use editor::scroll::ScrollbarAutoHide; use editor::{Editor, EditorMode, EditorSettings, MultiBuffer, ShowScrollbar}; use git::repository::RepoPath; use git::status::FileStatus; -use git::{CommitAllChanges, CommitChanges, ToggleStaged, COMMIT_MESSAGE}; +use git::{CommitAllChanges, CommitChanges, ToggleStaged}; use gpui::*; -use language::{Buffer, BufferId}; +use language::Buffer; use menu::{SelectFirst, SelectLast, SelectNext, SelectPrev}; -use project::git::{GitEvent, GitRepo, RepositoryHandle}; -use project::{CreateOptions, Fs, Project, ProjectPath}; -use rpc::proto; +use project::git::{GitEvent, Repository}; +use project::{Fs, Project, ProjectPath}; use serde::{Deserialize, Serialize}; use settings::Settings as _; use std::{collections::HashSet, path::PathBuf, sync::Arc, time::Duration, usize}; @@ -32,7 +31,7 @@ use workspace::notifications::{DetachAndPromptErr, NotificationId}; use workspace::Toast; use workspace::{ dock::{DockPosition, Panel, PanelEvent}, - Item, Workspace, + Workspace, }; actions!( @@ -144,7 +143,7 @@ pub struct GitPanel { pending_serialization: Task>, workspace: WeakEntity, project: Entity, - active_repository: Option, + active_repository: Option>, scroll_handle: UniformListScrollHandle, scrollbar_state: ScrollbarState, selected_entry: Option, @@ -162,63 +161,6 @@ pub struct GitPanel { can_commit_all: bool, } -fn commit_message_buffer( - project: &Entity, - active_repository: &RepositoryHandle, - cx: &mut App, -) -> Task>> { - match &active_repository.git_repo { - GitRepo::Local(repo) => { - let commit_message_file = repo.dot_git_dir().join(*COMMIT_MESSAGE); - let fs = project.read(cx).fs().clone(); - let project = project.downgrade(); - cx.spawn(|mut cx| async move { - fs.create_file( - &commit_message_file, - CreateOptions { - overwrite: false, - ignore_if_exists: true, - }, - ) - .await - .with_context(|| format!("creating commit message file {commit_message_file:?}"))?; - let buffer = project - .update(&mut cx, |project, cx| { - project.open_local_buffer(&commit_message_file, cx) - })? - .await - .with_context(|| { - format!("opening commit message buffer at {commit_message_file:?}",) - })?; - Ok(buffer) - }) - } - GitRepo::Remote { - project_id, - client, - worktree_id, - work_directory_id, - } => { - let request = client.request(proto::OpenCommitMessageBuffer { - project_id: project_id.0, - worktree_id: worktree_id.to_proto(), - work_directory_id: work_directory_id.to_proto(), - }); - let project = project.downgrade(); - cx.spawn(|mut cx| async move { - let response = request.await.context("requesting to open commit buffer")?; - let buffer_id = BufferId::new(response.buffer_id)?; - let buffer = project - .update(&mut cx, { - |project, cx| project.wait_for_remote_buffer(buffer_id, cx) - })? - .await?; - Ok(buffer) - }) - } - } -} - fn commit_message_editor( commit_message_buffer: Option>, window: &mut Window, @@ -360,7 +302,7 @@ impl GitPanel { let Some(git_repo) = self.active_repository.as_ref() else { return; }; - let Some(repo_path) = git_repo.project_path_to_repo_path(&path) else { + let Some(repo_path) = git_repo.read(cx).project_path_to_repo_path(&path) else { return; }; let Some(ix) = self.entries_by_path.get(&repo_path) else { @@ -578,7 +520,7 @@ impl GitPanel { .active_repository .as_ref() .map_or(false, |active_repository| { - active_repository.entry_count() > 0 + active_repository.read(cx).entry_count() > 0 }); if have_entries && self.selected_entry.is_none() { self.selected_entry = Some(0); @@ -655,11 +597,17 @@ impl GitPanel { let repo_paths = repo_paths.clone(); let active_repository = active_repository.clone(); |this, mut cx| async move { - let result = if stage { - active_repository.stage_entries(repo_paths.clone()).await - } else { - active_repository.unstage_entries(repo_paths.clone()).await - }; + let result = cx + .update(|cx| { + if stage { + active_repository.read(cx).stage_entries(repo_paths.clone()) + } else { + active_repository + .read(cx) + .unstage_entries(repo_paths.clone()) + } + })? + .await?; this.update(&mut cx, |this, cx| { for pending in this.pending.iter_mut() { @@ -697,7 +645,9 @@ impl GitPanel { let Some(active_repository) = self.active_repository.as_ref() else { return; }; - let Some(path) = active_repository.repo_path_to_project_path(&status_entry.repo_path) + let Some(path) = active_repository + .read(cx) + .repo_path_to_project_path(&status_entry.repo_path) else { return; }; @@ -725,18 +675,18 @@ impl GitPanel { if !self.can_commit { return; } - if self.commit_editor.read(cx).is_empty(cx) { + let message = self.commit_editor.read(cx).text(cx); + if message.trim().is_empty() { return; } self.commit_pending = true; - let save_task = self.commit_editor.update(cx, |editor, cx| { - editor.save(false, self.project.clone(), window, cx) - }); let commit_editor = self.commit_editor.clone(); self.commit_task = cx.spawn_in(window, |git_panel, mut cx| async move { + let commit = active_repository.update(&mut cx, |active_repository, _| { + active_repository.commit(SharedString::from(message), name_and_email) + })?; let result = maybe!(async { - save_task.await?; - active_repository.commit(name_and_email).await?; + commit.await??; cx.update(|window, cx| { commit_editor.update(cx, |editor, cx| editor.clear(window, cx)); }) @@ -768,14 +718,12 @@ impl GitPanel { if !self.can_commit_all { return; } - if self.commit_editor.read(cx).is_empty(cx) { + + let message = self.commit_editor.read(cx).text(cx); + if message.trim().is_empty() { return; } self.commit_pending = true; - let save_task = self.commit_editor.update(cx, |editor, cx| { - editor.save(false, self.project.clone(), window, cx) - }); - let commit_editor = self.commit_editor.clone(); let tracked_files = self .entries @@ -790,9 +738,15 @@ impl GitPanel { self.commit_task = cx.spawn_in(window, |git_panel, mut cx| async move { let result = maybe!(async { - save_task.await?; - active_repository.stage_entries(tracked_files).await?; - active_repository.commit(name_and_email).await + cx.update(|_, cx| active_repository.read(cx).stage_entries(tracked_files))? + .await??; + cx.update(|_, cx| { + active_repository + .read(cx) + .commit(SharedString::from(message), name_and_email) + })? + .await??; + Ok(()) }) .await; cx.update(|window, cx| match result { @@ -886,47 +840,56 @@ impl GitPanel { window: &mut Window, cx: &mut Context, ) { - let project = self.project.clone(); let handle = cx.entity().downgrade(); + self.reopen_commit_buffer(window, cx); self.update_visible_entries_task = cx.spawn_in(window, |_, mut cx| async move { cx.background_executor().timer(UPDATE_DEBOUNCE).await; if let Some(git_panel) = handle.upgrade() { - let Ok(commit_message_buffer) = git_panel.update_in(&mut cx, |git_panel, _, cx| { - git_panel - .active_repository - .as_ref() - .map(|active_repository| { - commit_message_buffer(&project, active_repository, cx) - }) - }) else { - return; - }; - let commit_message_buffer = match commit_message_buffer { - Some(commit_message_buffer) => match commit_message_buffer - .await - .context("opening commit buffer on repo update") - .log_err() - { - Some(buffer) => Some(buffer), - None => return, - }, - None => None, - }; - git_panel - .update_in(&mut cx, |git_panel, window, cx| { - git_panel.update_visible_entries(cx); + .update_in(&mut cx, |git_panel, _, cx| { if clear_pending { git_panel.clear_pending(); } - git_panel.commit_editor = - cx.new(|cx| commit_message_editor(commit_message_buffer, window, cx)); + git_panel.update_visible_entries(cx); }) .ok(); } }); } + fn reopen_commit_buffer(&mut self, window: &mut Window, cx: &mut Context) { + let Some(active_repo) = self.active_repository.as_ref() else { + return; + }; + let load_buffer = active_repo.update(cx, |active_repo, cx| { + let project = self.project.read(cx); + active_repo.open_commit_buffer( + Some(project.languages().clone()), + project.buffer_store().clone(), + cx, + ) + }); + + cx.spawn_in(window, |git_panel, mut cx| async move { + let buffer = load_buffer.await?; + git_panel.update_in(&mut cx, |git_panel, window, cx| { + if git_panel + .commit_editor + .read(cx) + .buffer() + .read(cx) + .as_singleton() + .as_ref() + != Some(&buffer) + { + git_panel.commit_editor = + cx.new(|cx| commit_message_editor(Some(buffer), window, cx)); + } + }) + }) + .detach_and_log_err(cx); + } + fn clear_pending(&mut self) { self.pending.retain(|v| !v.finished) } @@ -944,6 +907,7 @@ impl GitPanel { }; // First pass - collect all paths + let repo = repo.read(cx); let path_set = HashSet::from_iter(repo.status().map(|entry| entry.repo_path)); let mut has_changed_checked_boxes = false; @@ -1117,7 +1081,7 @@ impl GitPanel { let entry_count = self .active_repository .as_ref() - .map_or(0, RepositoryHandle::entry_count); + .map_or(0, |repo| repo.read(cx).entry_count()); let changes_string = match entry_count { 0 => "No changes".to_string(), @@ -1151,7 +1115,7 @@ impl GitPanel { let active_repository = self.project.read(cx).active_repository(cx); let repository_display_name = active_repository .as_ref() - .map(|repo| repo.display_name(self.project.read(cx), cx)) + .map(|repo| repo.read(cx).display_name(self.project.read(cx), cx)) .unwrap_or_default(); let entry_count = self.entries.len(); @@ -1619,7 +1583,7 @@ impl Render for GitPanel { .active_repository .as_ref() .map_or(false, |active_repository| { - active_repository.entry_count() > 0 + active_repository.read(cx).entry_count() > 0 }); let room = self .workspace diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index 789dc8c21dd8260360e040ae5678aafa2e6601f5..a78f097e244d331fbfca1547be1381e35afd83ea 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -163,6 +163,7 @@ impl ProjectDiff { }; let Some(path) = git_repo + .read(cx) .repo_path_to_project_path(&entry.repo_path) .and_then(|project_path| self.project.read(cx).absolute_path(&project_path, cx)) else { @@ -234,43 +235,45 @@ impl ProjectDiff { let mut previous_paths = self.multibuffer.read(cx).paths().collect::>(); let mut result = vec![]; - for entry in repo.status() { - if !entry.status.has_changes() { - continue; + repo.update(cx, |repo, cx| { + for entry in repo.status() { + if !entry.status.has_changes() { + continue; + } + let Some(project_path) = repo.repo_path_to_project_path(&entry.repo_path) else { + continue; + }; + let Some(abs_path) = self.project.read(cx).absolute_path(&project_path, cx) else { + continue; + }; + // Craft some artificial paths so that created entries will appear last. + let path_key = if entry.status.is_created() { + PathKey::namespaced(ADDED_NAMESPACE, &abs_path) + } else { + PathKey::namespaced(CHANGED_NAMESPACE, &abs_path) + }; + + previous_paths.remove(&path_key); + let load_buffer = self + .project + .update(cx, |project, cx| project.open_buffer(project_path, cx)); + + let project = self.project.clone(); + result.push(cx.spawn(|_, mut cx| async move { + let buffer = load_buffer.await?; + let changes = project + .update(&mut cx, |project, cx| { + project.open_uncommitted_changes(buffer.clone(), cx) + })? + .await?; + Ok(DiffBuffer { + path_key, + buffer, + change_set: changes, + }) + })); } - let Some(project_path) = repo.repo_path_to_project_path(&entry.repo_path) else { - continue; - }; - let Some(abs_path) = self.project.read(cx).absolute_path(&project_path, cx) else { - continue; - }; - // Craft some artificial paths so that created entries will appear last. - let path_key = if entry.status.is_created() { - PathKey::namespaced(ADDED_NAMESPACE, &abs_path) - } else { - PathKey::namespaced(CHANGED_NAMESPACE, &abs_path) - }; - - previous_paths.remove(&path_key); - let load_buffer = self - .project - .update(cx, |project, cx| project.open_buffer(project_path, cx)); - - let project = self.project.clone(); - result.push(cx.spawn(|_, mut cx| async move { - let buffer = load_buffer.await?; - let changes = project - .update(&mut cx, |project, cx| { - project.open_uncommitted_changes(buffer.clone(), cx) - })? - .await?; - Ok(DiffBuffer { - path_key, - buffer, - change_set: changes, - }) - })); - } + }); self.multibuffer.update(cx, |multibuffer, cx| { for path in previous_paths { multibuffer.remove_excerpts_for_path(path, cx); diff --git a/crates/git_ui/src/repository_selector.rs b/crates/git_ui/src/repository_selector.rs index 9c7f5f4e077888a0e9f456ea4d6918750365c93a..81d5f06635d6a7c387fb8ad44cf1b3d8c47f02d1 100644 --- a/crates/git_ui/src/repository_selector.rs +++ b/crates/git_ui/src/repository_selector.rs @@ -4,7 +4,7 @@ use gpui::{ }; use picker::{Picker, PickerDelegate}; use project::{ - git::{GitState, RepositoryHandle}, + git::{GitState, Repository}, Project, }; use std::sync::Arc; @@ -117,13 +117,13 @@ impl RenderOnce for RepositorySelectorPopoverMenu { pub struct RepositorySelectorDelegate { project: WeakEntity, repository_selector: WeakEntity, - repository_entries: Vec, - filtered_repositories: Vec, + repository_entries: Vec>, + filtered_repositories: Vec>, selected_index: usize, } impl RepositorySelectorDelegate { - pub fn update_repository_entries(&mut self, all_repositories: Vec) { + pub fn update_repository_entries(&mut self, all_repositories: Vec>) { self.repository_entries = all_repositories.clone(); self.filtered_repositories = all_repositories; self.selected_index = 0; @@ -194,7 +194,7 @@ impl PickerDelegate for RepositorySelectorDelegate { let Some(selected_repo) = self.filtered_repositories.get(self.selected_index) else { return; }; - selected_repo.activate(cx); + selected_repo.update(cx, |selected_repo, cx| selected_repo.activate(cx)); self.dismissed(window, cx); } @@ -222,7 +222,7 @@ impl PickerDelegate for RepositorySelectorDelegate { ) -> Option { let project = self.project.upgrade()?; let repo_info = self.filtered_repositories.get(ix)?; - let display_name = repo_info.display_name(project.read(cx), cx); + let display_name = repo_info.read(cx).display_name(project.read(cx), cx); // TODO: Implement repository item rendering Some( ListItem::new(ix) diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index 5665b9b53ab25daba2a4b979e7f19201e8f89436..99ee6997fda822b20d11c360cc3e6fbc9f0ba3a7 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -19,6 +19,7 @@ load-grammars = [ "tree-sitter-cpp", "tree-sitter-css", "tree-sitter-diff", + "tree-sitter-gitcommit", "tree-sitter-go", "tree-sitter-go-mod", "tree-sitter-gowork", @@ -69,6 +70,7 @@ tree-sitter-c = { workspace = true, optional = true } tree-sitter-cpp = { workspace = true, optional = true } tree-sitter-css = { workspace = true, optional = true } tree-sitter-diff = { workspace = true, optional = true } +tree-sitter-gitcommit = {workspace = true, optional = true } tree-sitter-go = { workspace = true, optional = true } tree-sitter-go-mod = { workspace = true, optional = true } tree-sitter-gowork = { workspace = true, optional = true } diff --git a/crates/languages/src/gitcommit/config.toml b/crates/languages/src/gitcommit/config.toml new file mode 100644 index 0000000000000000000000000000000000000000..c8ffca31056acb0d7245ae42739c521879df2030 --- /dev/null +++ b/crates/languages/src/gitcommit/config.toml @@ -0,0 +1,18 @@ +name = "Git Commit" +grammar = "git_commit" +path_suffixes = [ + "TAG_EDITMSG", + "MERGE_MSG", + "COMMIT_EDITMSG", + "NOTES_EDITMSG", + "EDIT_DESCRIPTION", +] +line_comments = ["#"] +brackets = [ + { start = "(", end = ")", close = true, newline = false }, + { start = "`", end = "`", close = true, newline = false }, + { start = "\"", end = "\"", close = true, newline = false }, + { start = "'", end = "'", close = true, newline = false }, + { start = "{", end = "}", close = true, newline = false }, + { start = "[", end = "]", close = true, newline = false }, +] diff --git a/crates/languages/src/gitcommit/highlights.scm b/crates/languages/src/gitcommit/highlights.scm new file mode 100644 index 0000000000000000000000000000000000000000..319d76569e56f101c7efb33b5ae676db7d51e0ab --- /dev/null +++ b/crates/languages/src/gitcommit/highlights.scm @@ -0,0 +1,18 @@ +(subject) @markup.heading +(path) @string.special.path +(branch) @string.special.symbol +(commit) @constant +(item) @markup.link.url +(header) @tag + +(change kind: "new file" @diff.plus) +(change kind: "deleted" @diff.minus) +(change kind: "modified" @diff.delta) +(change kind: "renamed" @diff.delta.moved) + +(trailer + key: (trailer_key) @variable.other.member + value: (trailer_value) @string) + +[":" "=" "->" (scissors)] @punctuation.delimiter +(comment) @comment diff --git a/crates/languages/src/gitcommit/injections.scm b/crates/languages/src/gitcommit/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..db0af176578cfe1ba50db0cc7543d9b805ed8163 --- /dev/null +++ b/crates/languages/src/gitcommit/injections.scm @@ -0,0 +1,5 @@ +((scissors) @content + (#set! "language" "diff")) + +((rebase_command) @content + (#set! "language" "git_rebase")) diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 574af6dc23ed690b39c7a39b7b4bc83b8b89db97..fbfe7b371ce1fc3b26a41b464064a342d8d9f34b 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -31,6 +31,25 @@ mod yaml; #[exclude = "*.rs"] struct LanguageDir; +/// A shared grammar for plain text, exposed for reuse by downstream crates. +#[cfg(feature = "tree-sitter-gitcommit")] +pub static LANGUAGE_GIT_COMMIT: std::sync::LazyLock> = + std::sync::LazyLock::new(|| { + Arc::new(Language::new( + LanguageConfig { + name: "Git Commit".into(), + soft_wrap: Some(language::language_settings::SoftWrap::EditorWidth), + matcher: LanguageMatcher { + path_suffixes: vec!["COMMIT_EDITMSG".to_owned()], + first_line_pattern: None, + }, + line_comments: vec![Arc::from("#")], + ..LanguageConfig::default() + }, + Some(tree_sitter_gitcommit::LANGUAGE.into()), + )) + }); + pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mut App) { #[cfg(feature = "load-grammars")] languages.register_native_grammars([ @@ -53,6 +72,7 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu ("tsx", tree_sitter_typescript::LANGUAGE_TSX), ("typescript", tree_sitter_typescript::LANGUAGE_TYPESCRIPT), ("yaml", tree_sitter_yaml::LANGUAGE), + ("gitcommit", tree_sitter_gitcommit::LANGUAGE), ]); macro_rules! language { diff --git a/crates/project/src/git.rs b/crates/project/src/git.rs index 90dff1ed93c797948fd89726efd3d80ed36f47bb..38a891005916fba97dccc55fef45b04577555ec6 100644 --- a/crates/project/src/git.rs +++ b/crates/project/src/git.rs @@ -1,6 +1,7 @@ +use crate::buffer_store::BufferStore; use crate::worktree_store::{WorktreeStore, WorktreeStoreEvent}; use crate::{Project, ProjectPath}; -use anyhow::{anyhow, Context as _}; +use anyhow::Context as _; use client::ProjectId; use futures::channel::{mpsc, oneshot}; use futures::StreamExt as _; @@ -8,24 +9,28 @@ use git::{ repository::{GitRepository, RepoPath}, status::{GitSummary, TrackedSummary}, }; -use gpui::{App, Context, Entity, EventEmitter, SharedString, Subscription, WeakEntity}; +use gpui::{ + App, AppContext, Context, Entity, EventEmitter, SharedString, Subscription, Task, WeakEntity, +}; +use language::{Buffer, LanguageRegistry}; use rpc::{proto, AnyProtoClient}; use settings::WorktreeId; use std::sync::Arc; +use text::BufferId; use util::{maybe, ResultExt}; use worktree::{ProjectEntryId, RepositoryEntry, StatusEntry}; pub struct GitState { project_id: Option, client: Option, - repositories: Vec, + repositories: Vec>, active_index: Option, update_sender: mpsc::UnboundedSender<(Message, oneshot::Sender>)>, _subscription: Subscription, } -#[derive(Clone)] -pub struct RepositoryHandle { +pub struct Repository { + commit_message_buffer: Option>, git_state: WeakEntity, pub worktree_id: WorktreeId, pub repository_entry: RepositoryEntry, @@ -44,25 +49,10 @@ pub enum GitRepo { }, } -impl PartialEq for RepositoryHandle { - fn eq(&self, other: &Self) -> bool { - self.worktree_id == other.worktree_id - && self.repository_entry.work_directory_id() - == other.repository_entry.work_directory_id() - } -} - -impl Eq for RepositoryHandle {} - -impl PartialEq for RepositoryHandle { - fn eq(&self, other: &RepositoryEntry) -> bool { - self.repository_entry.work_directory_id() == other.work_directory_id() - } -} - enum Message { Commit { git_repo: GitRepo, + message: SharedString, name_and_email: Option<(SharedString, SharedString)>, }, Stage(GitRepo, Vec), @@ -97,7 +87,7 @@ impl GitState { } } - pub fn active_repository(&self) -> Option { + pub fn active_repository(&self) -> Option> { self.active_index .map(|index| self.repositories[index].clone()) } @@ -118,7 +108,7 @@ impl GitState { worktree_store.update(cx, |worktree_store, cx| { for worktree in worktree_store.worktrees() { - worktree.update(cx, |worktree, _| { + worktree.update(cx, |worktree, cx| { let snapshot = worktree.snapshot(); for repo in snapshot.repositories().iter() { let git_repo = worktree @@ -139,27 +129,34 @@ impl GitState { let Some(git_repo) = git_repo else { continue; }; - let existing = self - .repositories - .iter() - .enumerate() - .find(|(_, existing_handle)| existing_handle == &repo); + let worktree_id = worktree.id(); + let existing = + self.repositories + .iter() + .enumerate() + .find(|(_, existing_handle)| { + existing_handle.read(cx).id() + == (worktree_id, repo.work_directory_id()) + }); let handle = if let Some((index, handle)) = existing { if self.active_index == Some(index) { new_active_index = Some(new_repositories.len()); } // Update the statuses but keep everything else. - let mut existing_handle = handle.clone(); - existing_handle.repository_entry = repo.clone(); + let existing_handle = handle.clone(); + existing_handle.update(cx, |existing_handle, _| { + existing_handle.repository_entry = repo.clone(); + }); existing_handle } else { - RepositoryHandle { + cx.new(|_| Repository { git_state: this.clone(), - worktree_id: worktree.id(), + worktree_id, repository_entry: repo.clone(), git_repo, update_sender: self.update_sender.clone(), - } + commit_message_buffer: None, + }) }; new_repositories.push(handle); } @@ -184,7 +181,7 @@ impl GitState { } } - pub fn all_repositories(&self) -> Vec { + pub fn all_repositories(&self) -> Vec> { self.repositories.clone() } @@ -260,10 +257,12 @@ impl GitState { } Message::Commit { git_repo, + message, name_and_email, } => { match git_repo { GitRepo::Local(repo) => repo.commit( + message.as_ref(), name_and_email .as_ref() .map(|(name, email)| (name.as_ref(), email.as_ref())), @@ -280,6 +279,7 @@ impl GitState { project_id: project_id.0, worktree_id: worktree_id.to_proto(), work_directory_id: work_directory_id.to_proto(), + message: String::from(message), name: name.map(String::from), email: email.map(String::from), }) @@ -293,7 +293,11 @@ impl GitState { } } -impl RepositoryHandle { +impl Repository { + fn id(&self) -> (WorktreeId, ProjectEntryId) { + (self.worktree_id, self.repository_entry.work_directory_id()) + } + pub fn display_name(&self, project: &Project, cx: &App) -> SharedString { maybe!({ let path = self.repo_path_to_project_path(&"".into())?; @@ -318,7 +322,7 @@ impl RepositoryHandle { .repositories .iter() .enumerate() - .find(|(_, handle)| handle == &self) + .find(|(_, handle)| handle.read(cx).id() == self.id()) else { return; }; @@ -343,47 +347,121 @@ impl RepositoryHandle { self.repository_entry.relativize(&path.path).log_err() } - pub async fn stage_entries(&self, entries: Vec) -> anyhow::Result<()> { - if entries.is_empty() { - return Ok(()); + pub fn open_commit_buffer( + &mut self, + languages: Option>, + buffer_store: Entity, + cx: &mut Context, + ) -> Task>> { + if let Some(buffer) = self.commit_message_buffer.clone() { + return Task::ready(Ok(buffer)); } + + if let GitRepo::Remote { + project_id, + client, + worktree_id, + work_directory_id, + } = self.git_repo.clone() + { + let client = client.clone(); + cx.spawn(|repository, mut cx| async move { + let request = client.request(proto::OpenCommitMessageBuffer { + project_id: project_id.0, + worktree_id: worktree_id.to_proto(), + work_directory_id: work_directory_id.to_proto(), + }); + let response = request.await.context("requesting to open commit buffer")?; + let buffer_id = BufferId::new(response.buffer_id)?; + let buffer = buffer_store + .update(&mut cx, |buffer_store, cx| { + buffer_store.wait_for_remote_buffer(buffer_id, cx) + })? + .await?; + if let Some(language_registry) = languages { + let git_commit_language = + language_registry.language_for_name("Git Commit").await?; + buffer.update(&mut cx, |buffer, cx| { + buffer.set_language(Some(git_commit_language), cx); + })?; + } + repository.update(&mut cx, |repository, _| { + repository.commit_message_buffer = Some(buffer.clone()); + })?; + Ok(buffer) + }) + } else { + self.open_local_commit_buffer(languages, buffer_store, cx) + } + } + + fn open_local_commit_buffer( + &mut self, + language_registry: Option>, + buffer_store: Entity, + cx: &mut Context, + ) -> Task>> { + cx.spawn(|repository, mut cx| async move { + let buffer = buffer_store + .update(&mut cx, |buffer_store, cx| buffer_store.create_buffer(cx))? + .await?; + + if let Some(language_registry) = language_registry { + let git_commit_language = language_registry.language_for_name("Git Commit").await?; + buffer.update(&mut cx, |buffer, cx| { + buffer.set_language(Some(git_commit_language), cx); + })?; + } + + repository.update(&mut cx, |repository, _| { + repository.commit_message_buffer = Some(buffer.clone()); + })?; + Ok(buffer) + }) + } + + pub fn stage_entries(&self, entries: Vec) -> oneshot::Receiver> { let (result_tx, result_rx) = futures::channel::oneshot::channel(); + if entries.is_empty() { + result_tx.send(Ok(())).ok(); + return result_rx; + } self.update_sender .unbounded_send((Message::Stage(self.git_repo.clone(), entries), result_tx)) - .map_err(|_| anyhow!("Failed to submit stage operation"))?; - - result_rx.await? + .ok(); + result_rx } - pub async fn unstage_entries(&self, entries: Vec) -> anyhow::Result<()> { + pub fn unstage_entries(&self, entries: Vec) -> oneshot::Receiver> { + let (result_tx, result_rx) = futures::channel::oneshot::channel(); if entries.is_empty() { - return Ok(()); + result_tx.send(Ok(())).ok(); + return result_rx; } - let (result_tx, result_rx) = futures::channel::oneshot::channel(); self.update_sender .unbounded_send((Message::Unstage(self.git_repo.clone(), entries), result_tx)) - .map_err(|_| anyhow!("Failed to submit unstage operation"))?; - result_rx.await? + .ok(); + result_rx } - pub async fn stage_all(&self) -> anyhow::Result<()> { + pub fn stage_all(&self) -> oneshot::Receiver> { let to_stage = self .repository_entry .status() .filter(|entry| !entry.status.is_staged().unwrap_or(false)) .map(|entry| entry.repo_path.clone()) .collect(); - self.stage_entries(to_stage).await + self.stage_entries(to_stage) } - pub async fn unstage_all(&self) -> anyhow::Result<()> { + pub fn unstage_all(&self) -> oneshot::Receiver> { let to_unstage = self .repository_entry .status() .filter(|entry| entry.status.is_staged().unwrap_or(true)) .map(|entry| entry.repo_path.clone()) .collect(); - self.unstage_entries(to_unstage).await + self.unstage_entries(to_unstage) } /// Get a count of all entries in the active repository, including @@ -404,18 +482,22 @@ impl RepositoryHandle { return self.have_changes() && (commit_all || self.have_staged_changes()); } - pub async fn commit( + pub fn commit( &self, + message: SharedString, name_and_email: Option<(SharedString, SharedString)>, - ) -> anyhow::Result<()> { + ) -> oneshot::Receiver> { let (result_tx, result_rx) = futures::channel::oneshot::channel(); - self.update_sender.unbounded_send(( - Message::Commit { - git_repo: self.git_repo.clone(), - name_and_email, - }, - result_tx, - ))?; - result_rx.await? + self.update_sender + .unbounded_send(( + Message::Commit { + git_repo: self.git_repo.clone(), + message, + name_and_email, + }, + result_tx, + )) + .ok(); + result_rx } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 069044bbc42dc9fbadd7239ea289c07e51453b04..2a7759daa4e556d7681d4c98332422323b0ea109 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -22,7 +22,7 @@ mod project_tests; mod direnv; mod environment; pub use environment::EnvironmentErrorMessage; -use git::RepositoryHandle; +use git::Repository; pub mod search_history; mod yarn; @@ -48,7 +48,6 @@ use ::git::{ blame::Blame, repository::{Branch, GitRepository, RepoPath}, status::FileStatus, - COMMIT_MESSAGE, }; use gpui::{ AnyEntity, App, AppContext as _, AsyncApp, BorrowAppContext, Context, Entity, EventEmitter, @@ -1998,12 +1997,15 @@ impl Project { project_id, id: id.into(), }); - cx.spawn(move |this, mut cx| async move { + cx.spawn(move |project, mut cx| async move { let buffer_id = BufferId::new(request.await?.buffer_id)?; - this.update(&mut cx, |this, cx| { - this.wait_for_remote_buffer(buffer_id, cx) - })? - .await + project + .update(&mut cx, |project, cx| { + project.buffer_store.update(cx, |buffer_store, cx| { + buffer_store.wait_for_remote_buffer(buffer_id, cx) + }) + })? + .await }) } else { Task::ready(Err(anyhow!("cannot open buffer while disconnected"))) @@ -2846,16 +2848,21 @@ impl Project { let proto_client = ssh_client.read(cx).proto_client(); - cx.spawn(|this, mut cx| async move { + cx.spawn(|project, mut cx| async move { let buffer = proto_client .request(proto::OpenServerSettings { project_id: SSH_PROJECT_ID, }) .await?; - let buffer = this - .update(&mut cx, |this, cx| { - anyhow::Ok(this.wait_for_remote_buffer(BufferId::new(buffer.buffer_id)?, cx)) + let buffer = project + .update(&mut cx, |project, cx| { + project.buffer_store.update(cx, |buffer_store, cx| { + anyhow::Ok( + buffer_store + .wait_for_remote_buffer(BufferId::new(buffer.buffer_id)?, cx), + ) + }) })?? .await; @@ -3186,13 +3193,15 @@ impl Project { }); let guard = self.retain_remotely_created_models(cx); - cx.spawn(move |this, mut cx| async move { + cx.spawn(move |project, mut cx| async move { let response = request.await?; for buffer_id in response.buffer_ids { let buffer_id = BufferId::new(buffer_id)?; - let buffer = this - .update(&mut cx, |this, cx| { - this.wait_for_remote_buffer(buffer_id, cx) + let buffer = project + .update(&mut cx, |project, cx| { + project.buffer_store.update(cx, |buffer_store, cx| { + buffer_store.wait_for_remote_buffer(buffer_id, cx) + }) })? .await?; let _ = tx.send(buffer).await; @@ -3998,7 +4007,11 @@ impl Project { .map(RepoPath::new) .collect(); - repository_handle.stage_entries(entries).await?; + repository_handle + .update(&mut cx, |repository_handle, _| { + repository_handle.stage_entries(entries) + })? + .await??; Ok(proto::Ack {}) } @@ -4020,7 +4033,11 @@ impl Project { .map(RepoPath::new) .collect(); - repository_handle.unstage_entries(entries).await?; + repository_handle + .update(&mut cx, |repository_handle, _| { + repository_handle.unstage_entries(entries) + })? + .await??; Ok(proto::Ack {}) } @@ -4034,9 +4051,14 @@ impl Project { let repository_handle = Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?; + let message = SharedString::from(envelope.payload.message); let name = envelope.payload.name.map(SharedString::from); let email = envelope.payload.email.map(SharedString::from); - repository_handle.commit(name.zip(email)).await?; + repository_handle + .update(&mut cx, |repository_handle, _| { + repository_handle.commit(message, name.zip(email)) + })? + .await??; Ok(proto::Ack {}) } @@ -4049,55 +4071,12 @@ impl Project { let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); let repository_handle = Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?; - let git_repository = match &repository_handle.git_repo { - git::GitRepo::Local(git_repository) => git_repository.clone(), - git::GitRepo::Remote { .. } => { - anyhow::bail!("Cannot handle open commit message buffer for remote git repo") - } - }; - let commit_message_file = git_repository.dot_git_dir().join(*COMMIT_MESSAGE); - let fs = this.update(&mut cx, |project, _| project.fs().clone())?; - fs.create_file( - &commit_message_file, - CreateOptions { - overwrite: false, - ignore_if_exists: true, - }, - ) - .await - .with_context(|| format!("creating commit message file {commit_message_file:?}"))?; - - let (worktree, relative_path) = this - .update(&mut cx, |headless_project, cx| { - headless_project - .worktree_store - .update(cx, |worktree_store, cx| { - worktree_store.find_or_create_worktree(&commit_message_file, false, cx) - }) - })? - .await - .with_context(|| { - format!("deriving worktree for commit message file {commit_message_file:?}") - })?; - - let buffer = this - .update(&mut cx, |headless_project, cx| { - headless_project - .buffer_store - .update(cx, |buffer_store, cx| { - buffer_store.open_buffer( - ProjectPath { - worktree_id: worktree.read(cx).id(), - path: Arc::from(relative_path), - }, - cx, - ) - }) - }) - .with_context(|| { - format!("opening buffer for commit message file {commit_message_file:?}") + let buffer = repository_handle + .update(&mut cx, |repository_handle, cx| { + repository_handle.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx) })? .await?; + let peer_id = envelope.original_sender_id()?; Project::respond_to_open_buffer_request(this, buffer, peer_id, &mut cx) } @@ -4107,7 +4086,7 @@ impl Project { worktree_id: WorktreeId, work_directory_id: ProjectEntryId, cx: &mut AsyncApp, - ) -> Result { + ) -> Result> { this.update(cx, |project, cx| { let repository_handle = project .git_state() @@ -4115,6 +4094,7 @@ impl Project { .all_repositories() .into_iter() .find(|repository_handle| { + let repository_handle = repository_handle.read(cx); repository_handle.worktree_id == worktree_id && repository_handle.repository_entry.work_directory_id() == work_directory_id @@ -4160,16 +4140,6 @@ impl Project { buffer.read(cx).remote_id() } - pub fn wait_for_remote_buffer( - &mut self, - id: BufferId, - cx: &mut Context, - ) -> Task>> { - self.buffer_store.update(cx, |buffer_store, cx| { - buffer_store.wait_for_remote_buffer(id, cx) - }) - } - fn synchronize_remote_buffers(&mut self, cx: &mut Context) -> Task> { let project_id = match self.client_state { ProjectClientState::Remote { @@ -4329,11 +4299,11 @@ impl Project { &self.git_state } - pub fn active_repository(&self, cx: &App) -> Option { + pub fn active_repository(&self, cx: &App) -> Option> { self.git_state.read(cx).active_repository() } - pub fn all_repositories(&self, cx: &App) -> Vec { + pub fn all_repositories(&self, cx: &App) -> Vec> { self.git_state.read(cx).all_repositories() } } diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 976e1e73fd0ce30c63036ca5adb54e3d0ec1610d..195294fe68a015845447c73ad8ab38313f6d020b 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -2693,6 +2693,7 @@ message Commit { uint64 work_directory_id = 3; optional string name = 4; optional string email = 5; + string message = 6; } message OpenCommitMessageBuffer { diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 3accb105ad5ebf1701d0ebaef572dd6a660b09ec..be22a52fa77b71c178a908b729fa652316f6435a 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -1,15 +1,15 @@ use anyhow::{anyhow, Context as _, Result}; use extension::ExtensionHostProxy; use extension_host::headless_host::HeadlessExtensionStore; -use fs::{CreateOptions, Fs}; -use git::{repository::RepoPath, COMMIT_MESSAGE}; +use fs::Fs; +use git::repository::RepoPath; use gpui::{App, AppContext as _, AsyncApp, Context, Entity, PromptLevel, SharedString}; use http_client::HttpClient; use language::{proto::serialize_operation, Buffer, BufferEvent, LanguageRegistry}; use node_runtime::NodeRuntime; use project::{ buffer_store::{BufferStore, BufferStoreEvent}, - git::{GitRepo, GitState, RepositoryHandle}, + git::{GitState, Repository}, project_settings::SettingsObserver, search::SearchQuery, task_store::TaskStore, @@ -635,7 +635,11 @@ impl HeadlessProject { .map(RepoPath::new) .collect(); - repository_handle.stage_entries(entries).await?; + repository_handle + .update(&mut cx, |repository_handle, _| { + repository_handle.stage_entries(entries) + })? + .await??; Ok(proto::Ack {}) } @@ -657,7 +661,11 @@ impl HeadlessProject { .map(RepoPath::new) .collect(); - repository_handle.unstage_entries(entries).await?; + repository_handle + .update(&mut cx, |repository_handle, _| { + repository_handle.unstage_entries(entries) + })? + .await??; Ok(proto::Ack {}) } @@ -672,10 +680,15 @@ impl HeadlessProject { let repository_handle = Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?; + let message = SharedString::from(envelope.payload.message); let name = envelope.payload.name.map(SharedString::from); let email = envelope.payload.email.map(SharedString::from); - repository_handle.commit(name.zip(email)).await?; + repository_handle + .update(&mut cx, |repository_handle, _| { + repository_handle.commit(message, name.zip(email)) + })? + .await??; Ok(proto::Ack {}) } @@ -686,55 +699,11 @@ impl HeadlessProject { ) -> Result { let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository_handle = + let repository = Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?; - let git_repository = match &repository_handle.git_repo { - GitRepo::Local(git_repository) => git_repository.clone(), - GitRepo::Remote { .. } => { - anyhow::bail!("Cannot handle open commit message buffer for remote git repo") - } - }; - let commit_message_file = git_repository.dot_git_dir().join(*COMMIT_MESSAGE); - let fs = this.update(&mut cx, |headless_project, _| headless_project.fs.clone())?; - fs.create_file( - &commit_message_file, - CreateOptions { - overwrite: false, - ignore_if_exists: true, - }, - ) - .await - .with_context(|| format!("creating commit message file {commit_message_file:?}"))?; - - let (worktree, relative_path) = this - .update(&mut cx, |headless_project, cx| { - headless_project - .worktree_store - .update(cx, |worktree_store, cx| { - worktree_store.find_or_create_worktree(&commit_message_file, false, cx) - }) - })? - .await - .with_context(|| { - format!("deriving worktree for commit message file {commit_message_file:?}") - })?; - - let buffer = this - .update(&mut cx, |headless_project, cx| { - headless_project - .buffer_store - .update(cx, |buffer_store, cx| { - buffer_store.open_buffer( - ProjectPath { - worktree_id: worktree.read(cx).id(), - path: Arc::from(relative_path), - }, - cx, - ) - }) - }) - .with_context(|| { - format!("opening buffer for commit message file {commit_message_file:?}") + let buffer = repository + .update(&mut cx, |repository, cx| { + repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx) })? .await?; @@ -759,7 +728,7 @@ impl HeadlessProject { worktree_id: WorktreeId, work_directory_id: ProjectEntryId, cx: &mut AsyncApp, - ) -> Result { + ) -> Result> { this.update(cx, |project, cx| { let repository_handle = project .git_state @@ -767,8 +736,11 @@ impl HeadlessProject { .all_repositories() .into_iter() .find(|repository_handle| { - repository_handle.worktree_id == worktree_id - && repository_handle.repository_entry.work_directory_id() + repository_handle.read(cx).worktree_id == worktree_id + && repository_handle + .read(cx) + .repository_entry + .work_directory_id() == work_directory_id }) .context("missing repository handle")?; diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 8ba52747d1f50ad03ad4d39c70e2d2141dfaf503..7084fc7d3bab71787839d56710ac051cc59db5af 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -199,7 +199,7 @@ pub struct RepositoryEntry { /// - my_sub_folder_1/project_root/changed_file_1 /// - my_sub_folder_2/changed_file_2 pub(crate) statuses_by_path: SumTree, - pub work_directory_id: ProjectEntryId, + work_directory_id: ProjectEntryId, pub work_directory: WorkDirectory, pub(crate) branch: Option>, } From 6b29616c9579e28d98e59b0e1e7b441c2d9343fc Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Wed, 5 Feb 2025 10:37:51 -0500 Subject: [PATCH 028/130] Fix the worktree's repository_for_path (#24279) Go back to a less optimized implementation for now since the custom cursor target seems to have some bugs. Release Notes: - Fixed missing git blame and status output in some projects with multiple git repositories --- crates/worktree/src/worktree.rs | 27 ++++----------------------- 1 file changed, 4 insertions(+), 23 deletions(-) diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 7084fc7d3bab71787839d56710ac051cc59db5af..cc6075f07a68afbae66dfcabcd97c92bb8a00a80 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -2682,21 +2682,10 @@ impl Snapshot { /// Get the repository whose work directory contains the given path. pub fn repository_for_path(&self, path: &Path) -> Option<&RepositoryEntry> { - let mut cursor = self.repositories.cursor::(&()); - let mut repository = None; - - // Git repositories may contain other git repositories. As a side effect of - // lexicographic sorting by path, deeper repositories will be after higher repositories - // So, let's loop through every matching repository until we can't find any more to find - // the deepest repository that could contain this path. - while cursor.seek_forward(&PathTarget::Contains(path), Bias::Left, &()) - && cursor.item().is_some() - { - repository = cursor.item(); - cursor.next(&()); - } - - repository + self.repositories + .iter() + .filter(|repo| repo.work_directory.directory_contains(path)) + .last() } /// Given an ordered iterator of entries, returns an iterator of those entries, @@ -5982,7 +5971,6 @@ impl<'a> Iterator for Traversal<'a> { enum PathTarget<'a> { Path(&'a Path), Successor(&'a Path), - Contains(&'a Path), } impl<'a> PathTarget<'a> { @@ -5996,13 +5984,6 @@ impl<'a> PathTarget<'a> { Ordering::Equal } } - PathTarget::Contains(path) => { - if path.starts_with(other) { - Ordering::Equal - } else { - Ordering::Greater - } - } } } } From 17a749533247b6e2d1078ac8ab94da608791afbe Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 5 Feb 2025 13:15:41 -0300 Subject: [PATCH 029/130] edit prediction: Fix license detection error logging + check for different spellings (#24281) Follow-up to https://github.com/zed-industries/zed/pull/24278 This PR ensures we're checking if there's a license-type file in both US & UK English spelling, and fixes the error logging again, treating for when the worktree contains just a single file or multiple. Release Notes: - N/A Co-authored-by: Bennet Bo Fenner <53836821+bennetbo@users.noreply.github.com> --- crates/zeta/src/zeta.rs | 35 ++++++++++++++++++++++++----------- 1 file changed, 24 insertions(+), 11 deletions(-) diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index 0be88b3c6a93a2e30fb1c331379da474e3996022..6e68a957c9bd92cccb932ca3b69b97ff5a207d08 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -953,20 +953,33 @@ impl LicenseDetectionWatcher { pub fn new(worktree: &Worktree, cx: &mut Context) -> Self { let (mut is_open_source_tx, is_open_source_rx) = watch::channel_with::(false); - let loaded_file_fut = worktree.load_file(Path::new("LICENSE"), cx); + const LICENSE_FILES_TO_CHECK: [&'static str; 2] = ["LICENSE", "LICENCE"]; // US and UK English spelling - Self { - is_open_source_rx, - _is_open_source_task: cx.spawn(|_, _| async move { - let Ok(loaded_file) = loaded_file_fut.await else { - return; - }; + // Check if worktree is a single file, if so we do not need to check for a LICENSE file + let task = if worktree.abs_path().is_file() { + Task::ready(()) + } else { + let loaded_files_task = futures::future::join_all( + LICENSE_FILES_TO_CHECK + .iter() + .map(|file| worktree.load_file(Path::new(file), cx)), + ); - let is_loaded_file_open_source_thing: bool = - is_license_eligible_for_data_collection(&loaded_file.text); + cx.background_executor().spawn(async move { + for loaded_file in loaded_files_task.await { + if let Some(content) = loaded_file.log_err() { + if is_license_eligible_for_data_collection(&content.text) { + *is_open_source_tx.borrow_mut() = true; + break; + } + } + } + }) + }; - *is_open_source_tx.borrow_mut() = is_loaded_file_open_source_thing; - }), + Self { + is_open_source_rx, + _is_open_source_task: task, } } From 37db1dcd4882b79a9fcbc4503f8287bee6c1624f Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 5 Feb 2025 13:39:27 -0300 Subject: [PATCH 030/130] Revise the `MessageNotification` component (#24287) This PR makes adding icons to the primary and secondary actions, in the `MessageNotification` component, optional. Also took the opportunity to remove a probably unnecessary "third action" from it; streamlining the component API (we had added that for a design that we're not using anymore). I did keep the "more info" possibility, which may be useful in the future, though. Release Notes: - N/A --- crates/extensions_ui/src/extension_suggest.rs | 13 +- crates/workspace/src/notifications.rs | 157 +++++++++--------- crates/workspace/src/workspace.rs | 5 +- crates/zed/src/zed.rs | 15 +- 4 files changed, 101 insertions(+), 89 deletions(-) diff --git a/crates/extensions_ui/src/extension_suggest.rs b/crates/extensions_ui/src/extension_suggest.rs index c131b4c3cf0a056638e2d6a977055381c885a6a1..4844dce7558837de1dd98776467d299e0af44b1e 100644 --- a/crates/extensions_ui/src/extension_suggest.rs +++ b/crates/extensions_ui/src/extension_suggest.rs @@ -7,6 +7,7 @@ use editor::Editor; use extension_host::ExtensionStore; use gpui::{AppContext as _, Context, Entity, SharedString, Window}; use language::Buffer; +use ui::prelude::*; use workspace::notifications::simple_message_notification::MessageNotification; use workspace::{notifications::NotificationId, Workspace}; @@ -172,8 +173,10 @@ pub(crate) fn suggest(buffer: Entity, window: &mut Window, cx: &mut Cont "Do you want to install the recommended '{}' extension for '{}' files?", extension_id, file_name_or_extension )) - .with_click_message("Yes, install extension") - .on_click({ + .primary_message("Yes, install extension") + .primary_icon(IconName::Check) + .primary_icon_color(Color::Success) + .primary_on_click({ let extension_id = extension_id.clone(); move |_window, cx| { let extension_id = extension_id.clone(); @@ -183,8 +186,10 @@ pub(crate) fn suggest(buffer: Entity, window: &mut Window, cx: &mut Cont }); } }) - .with_secondary_click_message("No, don't install it") - .on_secondary_click(move |_window, cx| { + .secondary_message("No, don't install it") + .secondary_icon(IconName::Close) + .secondary_icon_color(Color::Error) + .secondary_on_click(move |_window, cx| { let key = language_extension_key(&extension_id); db::write_and_log(cx, move || { KEY_VALUE_STORE.write_kvp(key, "dismissed".to_string()) diff --git a/crates/workspace/src/notifications.rs b/crates/workspace/src/notifications.rs index 3a850e5e5c47ed83dfa06b2b6ae2ef2dda2f3f89..dacca6067ef03a9cc9f9b5a6db333e869ef322ed 100644 --- a/crates/workspace/src/notifications.rs +++ b/crates/workspace/src/notifications.rs @@ -124,8 +124,8 @@ impl Workspace { Some((click_msg, on_click)) => { let on_click = on_click.clone(); simple_message_notification::MessageNotification::new(toast.msg.clone()) - .with_click_message(click_msg.clone()) - .on_click(move |window, cx| on_click(window, cx)) + .primary_message(click_msg.clone()) + .primary_on_click(move |window, cx| on_click(window, cx)) } None => simple_message_notification::MessageNotification::new(toast.msg.clone()), }) @@ -375,12 +375,14 @@ pub mod simple_message_notification { pub struct MessageNotification { build_content: Box) -> AnyElement>, - on_click: Option)>>, - click_message: Option, - secondary_click_message: Option, + primary_message: Option, + primary_icon: Option, + primary_icon_color: Option, + primary_on_click: Option)>>, + secondary_message: Option, + secondary_icon: Option, + secondary_icon_color: Option, secondary_on_click: Option)>>, - tertiary_click_message: Option, - tertiary_on_click: Option)>>, more_info_message: Option, more_info_url: Option>, show_close_button: bool, @@ -404,12 +406,14 @@ pub mod simple_message_notification { { Self { build_content: Box::new(content), - on_click: None, - click_message: None, + primary_message: None, + primary_icon: None, + primary_icon_color: None, + primary_on_click: None, + secondary_message: None, + secondary_icon: None, + secondary_icon_color: None, secondary_on_click: None, - secondary_click_message: None, - tertiary_on_click: None, - tertiary_click_message: None, more_info_message: None, more_info_url: None, show_close_button: true, @@ -417,51 +421,55 @@ pub mod simple_message_notification { } } - pub fn with_click_message(mut self, message: S) -> Self + pub fn primary_message(mut self, message: S) -> Self where S: Into, { - self.click_message = Some(message.into()); + self.primary_message = Some(message.into()); self } - pub fn on_click(mut self, on_click: F) -> Self - where - F: 'static + Fn(&mut Window, &mut Context), - { - self.on_click = Some(Arc::new(on_click)); + pub fn primary_icon(mut self, icon: IconName) -> Self { + self.primary_icon = Some(icon); self } - pub fn with_secondary_click_message(mut self, message: S) -> Self - where - S: Into, - { - self.secondary_click_message = Some(message.into()); + pub fn primary_icon_color(mut self, color: Color) -> Self { + self.primary_icon_color = Some(color); self } - pub fn on_secondary_click(mut self, on_click: F) -> Self + pub fn primary_on_click(mut self, on_click: F) -> Self where F: 'static + Fn(&mut Window, &mut Context), { - self.secondary_on_click = Some(Arc::new(on_click)); + self.primary_on_click = Some(Arc::new(on_click)); self } - pub fn with_tertiary_click_message(mut self, message: S) -> Self + pub fn secondary_message(mut self, message: S) -> Self where S: Into, { - self.tertiary_click_message = Some(message.into()); + self.secondary_message = Some(message.into()); self } - pub fn on_tertiary_click(mut self, on_click: F) -> Self + pub fn secondary_icon(mut self, icon: IconName) -> Self { + self.secondary_icon = Some(icon); + self + } + + pub fn secondary_icon_color(mut self, color: Color) -> Self { + self.secondary_icon_color = Some(color); + self + } + + pub fn secondary_on_click(mut self, on_click: F) -> Self where F: 'static + Fn(&mut Window, &mut Context), { - self.tertiary_on_click = Some(Arc::new(on_click)); + self.secondary_on_click = Some(Arc::new(on_click)); self } @@ -529,66 +537,63 @@ pub mod simple_message_notification { .child( h_flex() .gap_1() - .children(self.click_message.iter().map(|message| { - Button::new(message.clone(), message.clone()) + .children(self.primary_message.iter().map(|message| { + let mut button = Button::new(message.clone(), message.clone()) .label_size(LabelSize::Small) - .icon(IconName::Check) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) - .icon_color(Color::Success) .on_click(cx.listener(|this, _, window, cx| { - if let Some(on_click) = this.on_click.as_ref() { + if let Some(on_click) = this.primary_on_click.as_ref() { (on_click)(window, cx) }; this.dismiss(cx) - })) + })); + + if let Some(icon) = self.primary_icon { + button = button + .icon(icon) + .icon_color(self.primary_icon_color.unwrap_or(Color::Muted)) + .icon_position(IconPosition::Start) + .icon_size(IconSize::Small); + } + + button })) - .children(self.secondary_click_message.iter().map(|message| { - Button::new(message.clone(), message.clone()) + .children(self.secondary_message.iter().map(|message| { + let mut button = Button::new(message.clone(), message.clone()) .label_size(LabelSize::Small) - .icon(IconName::Close) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) - .icon_color(Color::Error) .on_click(cx.listener(|this, _, window, cx| { if let Some(on_click) = this.secondary_on_click.as_ref() { (on_click)(window, cx) }; this.dismiss(cx) - })) + })); + + if let Some(icon) = self.secondary_icon { + button = button + .icon(icon) + .icon_position(IconPosition::Start) + .icon_size(IconSize::Small) + .icon_color(self.secondary_icon_color.unwrap_or(Color::Muted)); + } + + button })) .child( - h_flex() - .w_full() - .gap_1() - .justify_end() - .children(self.tertiary_click_message.iter().map(|message| { - Button::new(message.clone(), message.clone()) - .label_size(LabelSize::Small) - .on_click(cx.listener(|this, _, window, cx| { - if let Some(on_click) = this.tertiary_on_click.as_ref() - { - (on_click)(window, cx) - }; - this.dismiss(cx) - })) - })) - .children( - self.more_info_message - .iter() - .zip(self.more_info_url.iter()) - .map(|(message, url)| { - let url = url.clone(); - Button::new(message.clone(), message.clone()) - .label_size(LabelSize::Small) - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::Indicator) - .icon_color(Color::Muted) - .on_click(cx.listener(move |_, _, _, cx| { - cx.open_url(&url); - })) - }), - ), + h_flex().w_full().justify_end().children( + self.more_info_message + .iter() + .zip(self.more_info_url.iter()) + .map(|(message, url)| { + let url = url.clone(); + Button::new(message.clone(), message.clone()) + .label_size(LabelSize::Small) + .icon(IconName::ArrowUpRight) + .icon_size(IconSize::Indicator) + .icon_color(Color::Muted) + .on_click(cx.listener(move |_, _, _, cx| { + cx.open_url(&url); + })) + }), + ), ), ) } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 2a5f5c74b08e85ed2f92bf8e9eff4e4edd3131a0..8fd0873d03fd2ee95cea7c7be365509c25d09309 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -5207,8 +5207,9 @@ fn notify_if_database_failed(workspace: WindowHandle, cx: &mut AsyncA |cx| { cx.new(|_| { MessageNotification::new("Failed to load the database file.") - .with_click_message("File an issue") - .on_click(|_window, cx| cx.open_url(REPORT_ISSUE_URL)) + .primary_message("File an Issue") + .primary_icon(IconName::Plus) + .primary_on_click(|_window, cx| cx.open_url(REPORT_ISSUE_URL)) }) }, ); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index f9793952f8a697edefc7c8543bf22d79c3f4873f..11d12f394ef31c29f451f3e7d01471cb24e11742 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -49,7 +49,7 @@ use std::time::Duration; use std::{borrow::Cow, ops::Deref, path::Path, sync::Arc}; use terminal_view::terminal_panel::{self, TerminalPanel}; use theme::{ActiveTheme, ThemeSettings}; -use ui::PopoverMenuHandle; +use ui::{prelude::*, PopoverMenuHandle}; use util::markdown::MarkdownString; use util::{asset_str, ResultExt}; use uuid::Uuid; @@ -1177,8 +1177,8 @@ fn show_keymap_file_json_error( show_app_notification(notification_id, cx, move |cx| { cx.new(|_cx| { MessageNotification::new(message.clone()) - .with_click_message("Open keymap file") - .on_click(|window, cx| { + .primary_message("Open Keymap File") + .primary_on_click(|window, cx| { window.dispatch_action(zed_actions::OpenKeymap.boxed_clone(), cx); cx.emit(DismissEvent); }) @@ -1220,8 +1220,8 @@ fn show_keymap_file_load_error( )) .into_any() }) - .with_click_message("Open keymap file") - .on_click(|window, cx| { + .primary_message("Open Keymap File") + .primary_on_click(|window, cx| { window.dispatch_action(zed_actions::OpenKeymap.boxed_clone(), cx); cx.emit(DismissEvent); }) @@ -1273,8 +1273,9 @@ pub fn handle_settings_changed(error: Option, cx: &mut App) { show_app_notification(id, cx, move |cx| { cx.new(|_cx| { MessageNotification::new(format!("Invalid user settings file\n{error}")) - .with_click_message("Open settings file") - .on_click(|window, cx| { + .primary_message("Open Settings File") + .primary_icon(IconName::Settings) + .primary_on_click(|window, cx| { window.dispatch_action(zed_actions::OpenSettings.boxed_clone(), cx); cx.emit(DismissEvent); }) From e1a6d9a4859747daa75d2e0d9d959510a294da59 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Wed, 5 Feb 2025 18:09:19 +0100 Subject: [PATCH 031/130] edit prediction: Improve UX around `disabled_globs` and `show_inline_completions` (#24207) Release Notes: - N/A --------- Co-authored-by: Danilo Co-authored-by: Danilo Leal --- Cargo.lock | 1 + assets/icons/zed_predict_disabled.svg | 6 + .../src/copilot_completion_provider.rs | 21 +- crates/editor/src/editor.rs | 143 +++-- crates/inline_completion_button/Cargo.toml | 5 +- .../src/inline_completion_button.rs | 250 +++++---- crates/language/src/language_settings.rs | 13 +- .../src/supermaven_completion_provider.rs | 14 +- crates/ui/src/components/context_menu.rs | 491 +++++++++++------- crates/ui/src/components/icon.rs | 1 + crates/vim/src/vim.rs | 2 +- crates/zed/src/zed/quick_action_bar.rs | 35 +- crates/zeta/src/zeta.rs | 15 +- 13 files changed, 578 insertions(+), 419 deletions(-) create mode 100644 assets/icons/zed_predict_disabled.svg diff --git a/Cargo.lock b/Cargo.lock index 7b9fed4f37606949e7d48b0df6e7af5e9de3fecb..42afceedd4b1b3d30c9ef16219f4ea531def35e1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6384,6 +6384,7 @@ dependencies = [ "lsp", "paths", "project", + "regex", "serde_json", "settings", "supermaven", diff --git a/assets/icons/zed_predict_disabled.svg b/assets/icons/zed_predict_disabled.svg new file mode 100644 index 0000000000000000000000000000000000000000..d10c4d560a88c718075a5c5dca6abc32daee2ae1 --- /dev/null +++ b/assets/icons/zed_predict_disabled.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/crates/copilot/src/copilot_completion_provider.rs b/crates/copilot/src/copilot_completion_provider.rs index 9c25e295aa91f75b9186a31519a877402a82e4f9..f953e5a1100371c6990e71e1208bb6e33b15d8bd 100644 --- a/crates/copilot/src/copilot_completion_provider.rs +++ b/crates/copilot/src/copilot_completion_provider.rs @@ -2,10 +2,7 @@ use crate::{Completion, Copilot}; use anyhow::Result; use gpui::{App, Context, Entity, EntityId, Task}; use inline_completion::{Direction, InlineCompletion, InlineCompletionProvider}; -use language::{ - language_settings::{all_language_settings, AllLanguageSettings}, - Buffer, OffsetRangeExt, ToOffset, -}; +use language::{language_settings::AllLanguageSettings, Buffer, OffsetRangeExt, ToOffset}; use settings::Settings; use std::{path::Path, time::Duration}; @@ -73,19 +70,11 @@ impl InlineCompletionProvider for CopilotCompletionProvider { fn is_enabled( &self, - buffer: &Entity, - cursor_position: language::Anchor, + _buffer: &Entity, + _cursor_position: language::Anchor, cx: &App, ) -> bool { - if !self.copilot.read(cx).status().is_authorized() { - return false; - } - - let buffer = buffer.read(cx); - let file = buffer.file(); - let language = buffer.language_at(cursor_position); - let settings = all_language_settings(file, cx); - settings.inline_completions_enabled(language.as_ref(), file.map(|f| f.path().as_ref()), cx) + self.copilot.read(cx).status().is_authorized() } fn refresh( @@ -205,7 +194,7 @@ impl InlineCompletionProvider for CopilotCompletionProvider { fn discard(&mut self, cx: &mut Context) { let settings = AllLanguageSettings::get_global(cx); - let copilot_enabled = settings.inline_completions_enabled(None, None, cx); + let copilot_enabled = settings.show_inline_completions(None, cx); if !copilot_enabled { return; diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 1ecd630dd65eaa97790c24af98a0806dd8deabd8..dab3ef5d7187805c8ce783c40a40c0c68ac69392 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -680,7 +680,7 @@ pub struct Editor { stale_inline_completion_in_menu: Option, // enable_inline_completions is a switch that Vim can use to disable // edit predictions based on its mode. - enable_inline_completions: bool, + show_inline_completions: bool, show_inline_completions_override: Option, menu_inline_completions_policy: MenuInlineCompletionsPolicy, inlay_hint_cache: InlayHintCache, @@ -1388,7 +1388,7 @@ impl Editor { next_editor_action_id: EditorActionId::default(), editor_actions: Rc::default(), show_inline_completions_override: None, - enable_inline_completions: true, + show_inline_completions: true, menu_inline_completions_policy: MenuInlineCompletionsPolicy::ByProvider, custom_context_menu: None, show_git_blame_gutter: false, @@ -1818,9 +1818,9 @@ impl Editor { self.input_enabled = input_enabled; } - pub fn set_inline_completions_enabled(&mut self, enabled: bool, cx: &mut Context) { - self.enable_inline_completions = enabled; - if !self.enable_inline_completions { + pub fn set_show_inline_completions_enabled(&mut self, enabled: bool, cx: &mut Context) { + self.show_inline_completions = enabled; + if !self.show_inline_completions { self.take_active_inline_completion(cx); cx.notify(); } @@ -1871,8 +1871,11 @@ impl Editor { if let Some((buffer, cursor_buffer_position)) = self.buffer.read(cx).text_anchor_for_position(cursor, cx) { - let show_inline_completions = - !self.should_show_inline_completions(&buffer, cursor_buffer_position, cx); + let show_inline_completions = !self.should_show_inline_completions_in_buffer( + &buffer, + cursor_buffer_position, + cx, + ); self.set_show_inline_completions(Some(show_inline_completions), window, cx); } } @@ -1888,42 +1891,6 @@ impl Editor { self.refresh_inline_completion(false, true, window, cx); } - pub fn inline_completions_enabled(&self, cx: &App) -> bool { - let cursor = self.selections.newest_anchor().head(); - if let Some((buffer, buffer_position)) = - self.buffer.read(cx).text_anchor_for_position(cursor, cx) - { - self.should_show_inline_completions(&buffer, buffer_position, cx) - } else { - false - } - } - - fn should_show_inline_completions( - &self, - buffer: &Entity, - buffer_position: language::Anchor, - cx: &App, - ) -> bool { - if !self.snippet_stack.is_empty() { - return false; - } - - if self.inline_completions_disabled_in_scope(buffer, buffer_position, cx) { - return false; - } - - if let Some(provider) = self.inline_completion_provider() { - if let Some(show_inline_completions) = self.show_inline_completions_override { - show_inline_completions - } else { - self.mode == EditorMode::Full && provider.is_enabled(buffer, buffer_position, cx) - } - } else { - false - } - } - fn inline_completions_disabled_in_scope( &self, buffer: &Entity, @@ -4650,9 +4617,18 @@ impl Editor { let (buffer, cursor_buffer_position) = self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; + if !self.inline_completions_enabled_in_buffer(&buffer, cursor_buffer_position, cx) { + self.discard_inline_completion(false, cx); + return None; + } + if !user_requested - && (!self.enable_inline_completions - || !self.should_show_inline_completions(&buffer, cursor_buffer_position, cx) + && (!self.show_inline_completions + || !self.should_show_inline_completions_in_buffer( + &buffer, + cursor_buffer_position, + cx, + ) || !self.is_focused(window) || buffer.read(cx).is_empty()) { @@ -4665,6 +4641,77 @@ impl Editor { Some(()) } + pub fn should_show_inline_completions(&self, cx: &App) -> bool { + let cursor = self.selections.newest_anchor().head(); + if let Some((buffer, cursor_position)) = + self.buffer.read(cx).text_anchor_for_position(cursor, cx) + { + self.should_show_inline_completions_in_buffer(&buffer, cursor_position, cx) + } else { + false + } + } + + fn should_show_inline_completions_in_buffer( + &self, + buffer: &Entity, + buffer_position: language::Anchor, + cx: &App, + ) -> bool { + if !self.snippet_stack.is_empty() { + return false; + } + + if self.inline_completions_disabled_in_scope(buffer, buffer_position, cx) { + return false; + } + + if let Some(show_inline_completions) = self.show_inline_completions_override { + show_inline_completions + } else { + let buffer = buffer.read(cx); + self.mode == EditorMode::Full + && language_settings( + buffer.language_at(buffer_position).map(|l| l.name()), + buffer.file(), + cx, + ) + .show_inline_completions + } + } + + pub fn inline_completions_enabled(&self, cx: &App) -> bool { + let cursor = self.selections.newest_anchor().head(); + if let Some((buffer, cursor_position)) = + self.buffer.read(cx).text_anchor_for_position(cursor, cx) + { + self.inline_completions_enabled_in_buffer(&buffer, cursor_position, cx) + } else { + false + } + } + + fn inline_completions_enabled_in_buffer( + &self, + buffer: &Entity, + buffer_position: language::Anchor, + cx: &App, + ) -> bool { + maybe!({ + let provider = self.inline_completion_provider()?; + if !provider.is_enabled(&buffer, buffer_position, cx) { + return Some(false); + } + let buffer = buffer.read(cx); + let Some(file) = buffer.file() else { + return Some(true); + }; + let settings = all_language_settings(Some(file), cx); + Some(settings.inline_completions_enabled_for_path(file.path())) + }) + .unwrap_or(false) + } + fn cycle_inline_completion( &mut self, direction: Direction, @@ -4675,8 +4722,8 @@ impl Editor { let cursor = self.selections.newest_anchor().head(); let (buffer, cursor_buffer_position) = self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; - if !self.enable_inline_completions - || !self.should_show_inline_completions(&buffer, cursor_buffer_position, cx) + if !self.show_inline_completions + || !self.should_show_inline_completions_in_buffer(&buffer, cursor_buffer_position, cx) { return None; } @@ -5014,7 +5061,7 @@ impl Editor { || (!self.completion_tasks.is_empty() && !self.has_active_inline_completion())); if completions_menu_has_precedence || !offset_selection.is_empty() - || !self.enable_inline_completions + || !self.show_inline_completions || self .active_inline_completion .as_ref() diff --git a/crates/inline_completion_button/Cargo.toml b/crates/inline_completion_button/Cargo.toml index e8c51efcaf3405762c5591c712f90928f7525cf7..973e7d327301d17a1830b816e9623ef0eab89083 100644 --- a/crates/inline_completion_button/Cargo.toml +++ b/crates/inline_completion_button/Cargo.toml @@ -14,6 +14,7 @@ doctest = false [dependencies] anyhow.workspace = true +client.workspace = true copilot.workspace = true editor.workspace = true feature_flags.workspace = true @@ -22,14 +23,14 @@ gpui.workspace = true inline_completion.workspace = true language.workspace = true paths.workspace = true +regex.workspace = true settings.workspace = true supermaven.workspace = true +telemetry.workspace = true ui.workspace = true workspace.workspace = true zed_actions.workspace = true zeta.workspace = true -client.workspace = true -telemetry.workspace = true [dev-dependencies] copilot = { workspace = true, features = ["test-support"] } diff --git a/crates/inline_completion_button/src/inline_completion_button.rs b/crates/inline_completion_button/src/inline_completion_button.rs index a2b72ed1c2ebc9bea728cd9d2dd8d998c793c512..447141864688bf5757737e450c778fa508181ee8 100644 --- a/crates/inline_completion_button/src/inline_completion_button.rs +++ b/crates/inline_completion_button/src/inline_completion_button.rs @@ -17,8 +17,12 @@ use language::{ }, File, Language, }; +use regex::Regex; use settings::{update_settings_file, Settings, SettingsStore}; -use std::{path::Path, sync::Arc, time::Duration}; +use std::{ + sync::{Arc, LazyLock}, + time::Duration, +}; use supermaven::{AccountStatus, Supermaven}; use ui::{ prelude::*, Clickable, ContextMenu, ContextMenuEntry, IconButton, IconButtonShape, PopoverMenu, @@ -71,9 +75,7 @@ impl Render for InlineCompletionButton { }; let status = copilot.read(cx).status(); - let enabled = self.editor_enabled.unwrap_or_else(|| { - all_language_settings.inline_completions_enabled(None, None, cx) - }); + let enabled = self.editor_enabled.unwrap_or(false); let icon = match status { Status::Error(_) => IconName::CopilotError, @@ -228,25 +230,35 @@ impl Render for InlineCompletionButton { return div(); } - fn icon_button() -> IconButton { - IconButton::new("zed-predict-pending-button", IconName::ZedPredict) - .shape(IconButtonShape::Square) - } + let enabled = self.editor_enabled.unwrap_or(false); + + let zeta_icon = if enabled { + IconName::ZedPredict + } else { + IconName::ZedPredictDisabled + }; let current_user_terms_accepted = self.user_store.read(cx).current_user_has_accepted_terms(); - if !current_user_terms_accepted.unwrap_or(false) { - let signed_in = current_user_terms_accepted.is_some(); - let tooltip_meta = if signed_in { - "Read Terms of Service" - } else { - "Sign in to use" - }; + let icon_button = || { + let base = IconButton::new("zed-predict-pending-button", zeta_icon) + .shape(IconButtonShape::Square); + + match ( + current_user_terms_accepted, + self.popover_menu_handle.is_deployed(), + enabled, + ) { + (Some(false) | None, _, _) => { + let signed_in = current_user_terms_accepted.is_some(); + let tooltip_meta = if signed_in { + "Read Terms of Service" + } else { + "Sign in to use" + }; - return div().child( - icon_button() - .tooltip(move |window, cx| { + base.tooltip(move |window, cx| { Tooltip::with_meta( "Edit Predictions", None, @@ -255,27 +267,37 @@ impl Render for InlineCompletionButton { cx, ) }) - .on_click(cx.listener(move |_, _, window, cx| { - telemetry::event!( - "Pending ToS Clicked", - source = "Edit Prediction Status Button" - ); - window.dispatch_action( - zed_actions::OpenZedPredictOnboarding.boxed_clone(), - cx, - ); - })), - ); - } + .on_click(cx.listener( + move |_, _, window, cx| { + telemetry::event!( + "Pending ToS Clicked", + source = "Edit Prediction Status Button" + ); + window.dispatch_action( + zed_actions::OpenZedPredictOnboarding.boxed_clone(), + cx, + ); + }, + )) + } + (Some(true), true, _) => base, + (Some(true), false, true) => base.tooltip(|window, cx| { + Tooltip::for_action("Edit Prediction", &ToggleMenu, window, cx) + }), + (Some(true), false, false) => base.tooltip(|window, cx| { + Tooltip::with_meta( + "Edit Prediction", + Some(&ToggleMenu), + "Disabled For This File", + window, + cx, + ) + }), + } + }; let this = cx.entity().clone(); - if !self.popover_menu_handle.is_deployed() { - icon_button().tooltip(|window, cx| { - Tooltip::for_action("Edit Prediction", &ToggleMenu, window, cx) - }); - } - let mut popover_menu = PopoverMenu::new("zeta") .menu(move |window, cx| { Some(this.update(cx, |this, cx| this.build_zeta_context_menu(window, cx))) @@ -362,15 +384,10 @@ impl InlineCompletionButton { }) } - // Predict Edits at Cursor – alt-tab - // Automatically Predict: - // ✓ PATH - // ✓ Rust - // ✓ All Files pub fn build_language_settings_menu(&self, mut menu: ContextMenu, cx: &mut App) -> ContextMenu { let fs = self.fs.clone(); - menu = menu.header("Predict Edits For:"); + menu = menu.header("Show Predict Edits For"); if let Some(language) = self.language.clone() { let fs = fs.clone(); @@ -381,66 +398,39 @@ impl InlineCompletionButton { menu = menu.toggleable_entry( language.name(), language_enabled, - IconPosition::Start, + IconPosition::End, None, move |_, cx| { - toggle_inline_completions_for_language(language.clone(), fs.clone(), cx) + toggle_show_inline_completions_for_language(language.clone(), fs.clone(), cx) }, ); } let settings = AllLanguageSettings::get_global(cx); - if let Some(file) = &self.file { - let path = file.path().clone(); - let path_enabled = settings.inline_completions_enabled_for_path(&path); - - menu = menu.toggleable_entry( - "This File", - path_enabled, - IconPosition::Start, - None, - move |window, cx| { - if let Some(workspace) = window.root().flatten() { - let workspace = workspace.downgrade(); - window - .spawn(cx, |cx| { - configure_disabled_globs( - workspace, - path_enabled.then_some(path.clone()), - cx, - ) - }) - .detach_and_log_err(cx); - } - }, - ); - } - - let globally_enabled = settings.inline_completions_enabled(None, None, cx); + let globally_enabled = settings.show_inline_completions(None, cx); menu = menu.toggleable_entry( "All Files", globally_enabled, - IconPosition::Start, + IconPosition::End, None, move |_, cx| toggle_inline_completions_globally(fs.clone(), cx), ); + menu = menu.separator().header("Privacy Settings"); if let Some(provider) = &self.inline_completion_provider { let data_collection = provider.data_collection_state(cx); - if data_collection.is_supported() { let provider = provider.clone(); let enabled = data_collection.is_enabled(); - menu = menu - .separator() - .header("Help Improve The Model") - .header("Valid Only For OSS Projects"); menu = menu.item( // TODO: We want to add something later that communicates whether // the current project is open-source. ContextMenuEntry::new("Share Training Data") - .toggleable(IconPosition::Start, enabled) + .toggleable(IconPosition::End, data_collection.is_enabled()) + .documentation_aside(|_| { + Label::new("Zed automatically detects if your project is open-source. This setting is only applicable in such cases.").into_any_element() + }) .handler(move |_, cx| { provider.toggle_data_collection(cx); @@ -455,11 +445,42 @@ impl InlineCompletionButton { source = "Edit Prediction Status Menu" ); } - }), - ); + }) + ) } } + menu = menu.item( + ContextMenuEntry::new("Exclude Files") + .documentation_aside(|_| { + Label::new("This item takes you to the settings where you can specify files that will never be captured by any edit prediction model. You can list both specific file extensions and individual file names.").into_any_element() + }) + .handler(move |window, cx| { + if let Some(workspace) = window.root().flatten() { + let workspace = workspace.downgrade(); + window + .spawn(cx, |cx| { + open_disabled_globs_setting_in_editor( + workspace, + cx, + ) + }) + .detach_and_log_err(cx); + } + }), + ); + + if self.file.as_ref().map_or(false, |file| { + !all_language_settings(Some(file), cx).inline_completions_enabled_for_path(file.path()) + }) { + menu = menu.item( + ContextMenuEntry::new("This file is excluded.") + .disabled(true) + .icon(IconName::ZedPredictDisabled) + .icon_size(IconSize::Small), + ); + } + if let Some(editor_focus_handle) = self.editor_focus_handle.clone() { menu = menu .separator() @@ -546,12 +567,11 @@ impl InlineCompletionButton { self.editor_enabled = { let file = file.as_ref(); Some( - file.map(|file| !file.is_private()).unwrap_or(true) - && all_language_settings(file, cx).inline_completions_enabled( - language, - file.map(|file| file.path().as_ref()), - cx, - ), + file.map(|file| { + all_language_settings(Some(file), cx) + .inline_completions_enabled_for_path(file.path()) + }) + .unwrap_or(true), ) }; self.inline_completion_provider = editor.inline_completion_provider(); @@ -616,9 +636,8 @@ impl SupermavenButtonStatus { } } -async fn configure_disabled_globs( +async fn open_disabled_globs_setting_in_editor( workspace: WeakEntity, - path_to_disable: Option>, mut cx: AsyncWindowContext, ) -> Result<()> { let settings_editor = workspace @@ -637,34 +656,34 @@ async fn configure_disabled_globs( let text = item.buffer().read(cx).snapshot(cx).text(); let settings = cx.global::(); - let edits = settings.edits_for_update::(&text, |file| { - let copilot = file.inline_completions.get_or_insert_with(Default::default); - let globs = copilot.disabled_globs.get_or_insert_with(|| { - settings - .get::(None) - .inline_completions - .disabled_globs - .iter() - .map(|glob| glob.glob().to_string()) - .collect() - }); - if let Some(path_to_disable) = &path_to_disable { - globs.push(path_to_disable.to_string_lossy().into_owned()); - } else { - globs.clear(); - } + // Ensure that we always have "inline_completions { "disabled_globs": [] }" + let edits = settings.edits_for_update::(&text, |file| { + file.inline_completions + .get_or_insert_with(Default::default) + .disabled_globs + .get_or_insert_with(Vec::new); }); if !edits.is_empty() { + item.edit(edits.iter().cloned(), cx); + } + + let text = item.buffer().read(cx).snapshot(cx).text(); + + static DISABLED_GLOBS_REGEX: LazyLock = LazyLock::new(|| { + Regex::new(r#""disabled_globs":\s*\[\s*(?P(?:.|\n)*?)\s*\]"#).unwrap() + }); + // Only capture [...] + let range = DISABLED_GLOBS_REGEX.captures(&text).and_then(|captures| { + captures + .name("content") + .map(|inner_match| inner_match.start()..inner_match.end()) + }); + if let Some(range) = range { item.change_selections(Some(Autoscroll::newest()), window, cx, |selections| { - selections.select_ranges(edits.iter().map(|e| e.0.clone())); + selections.select_ranges(vec![range]); }); - - // When *enabling* a path, don't actually perform an edit, just select the range. - if path_to_disable.is_some() { - item.edit(edits.iter().cloned(), cx); - } } })?; @@ -672,8 +691,7 @@ async fn configure_disabled_globs( } fn toggle_inline_completions_globally(fs: Arc, cx: &mut App) { - let show_inline_completions = - all_language_settings(None, cx).inline_completions_enabled(None, None, cx); + let show_inline_completions = all_language_settings(None, cx).show_inline_completions(None, cx); update_settings_file::(fs, cx, move |file, _| { file.defaults.show_inline_completions = Some(!show_inline_completions) }); @@ -687,9 +705,13 @@ fn set_completion_provider(fs: Arc, cx: &mut App, provider: InlineComple }); } -fn toggle_inline_completions_for_language(language: Arc, fs: Arc, cx: &mut App) { +fn toggle_show_inline_completions_for_language( + language: Arc, + fs: Arc, + cx: &mut App, +) { let show_inline_completions = - all_language_settings(None, cx).inline_completions_enabled(Some(&language), None, cx); + all_language_settings(None, cx).show_inline_completions(Some(&language), cx); update_settings_file::(fs, cx, move |file, _| { file.languages .entry(language.name()) diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 55d284fedb2e15af20c071c0a2768dc6981e858d..ac57e566f4c36aabf505101843ffa35e4342c304 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -886,18 +886,7 @@ impl AllLanguageSettings { } /// Returns whether edit predictions are enabled for the given language and path. - pub fn inline_completions_enabled( - &self, - language: Option<&Arc>, - path: Option<&Path>, - cx: &App, - ) -> bool { - if let Some(path) = path { - if !self.inline_completions_enabled_for_path(path) { - return false; - } - } - + pub fn show_inline_completions(&self, language: Option<&Arc>, cx: &App) -> bool { self.language(None, language.map(|l| l.name()).as_ref(), cx) .show_inline_completions } diff --git a/crates/supermaven/src/supermaven_completion_provider.rs b/crates/supermaven/src/supermaven_completion_provider.rs index 01e52b2f8420fea8ac956f28e7879ad0572cbadf..f80551a3f39d3f3a417bded1f4affa1bce46253b 100644 --- a/crates/supermaven/src/supermaven_completion_provider.rs +++ b/crates/supermaven/src/supermaven_completion_provider.rs @@ -3,7 +3,7 @@ use anyhow::Result; use futures::StreamExt as _; use gpui::{App, Context, Entity, EntityId, Task}; use inline_completion::{Direction, InlineCompletion, InlineCompletionProvider}; -use language::{language_settings::all_language_settings, Anchor, Buffer, BufferSnapshot}; +use language::{Anchor, Buffer, BufferSnapshot}; use std::{ ops::{AddAssign, Range}, path::Path, @@ -113,16 +113,8 @@ impl InlineCompletionProvider for SupermavenCompletionProvider { false } - fn is_enabled(&self, buffer: &Entity, cursor_position: Anchor, cx: &App) -> bool { - if !self.supermaven.read(cx).is_enabled() { - return false; - } - - let buffer = buffer.read(cx); - let file = buffer.file(); - let language = buffer.language_at(cursor_position); - let settings = all_language_settings(file, cx); - settings.inline_completions_enabled(language.as_ref(), file.map(|f| f.path().as_ref()), cx) + fn is_enabled(&self, _buffer: &Entity, _cursor_position: Anchor, cx: &App) -> bool { + self.supermaven.read(cx).is_enabled() } fn is_refreshing(&self) -> bool { diff --git a/crates/ui/src/components/context_menu.rs b/crates/ui/src/components/context_menu.rs index 765c216ccd0c797e6f05b2f30c0130edf9e41a17..db9632d4ff31e36195c5216f0820d40c512ae47d 100644 --- a/crates/ui/src/components/context_menu.rs +++ b/crates/ui/src/components/context_menu.rs @@ -47,6 +47,7 @@ pub struct ContextMenuEntry { handler: Rc, &mut Window, &mut App)>, action: Option>, disabled: bool, + documentation_aside: Option AnyElement>>, } impl ContextMenuEntry { @@ -61,6 +62,7 @@ impl ContextMenuEntry { handler: Rc::new(|_, _, _| {}), action: None, disabled: false, + documentation_aside: None, } } @@ -108,6 +110,14 @@ impl ContextMenuEntry { self.disabled = disabled; self } + + pub fn documentation_aside( + mut self, + element: impl Fn(&mut App) -> AnyElement + 'static, + ) -> Self { + self.documentation_aside = Some(Rc::new(element)); + self + } } impl From for ContextMenuItem { @@ -125,6 +135,7 @@ pub struct ContextMenu { clicked: bool, _on_blur_subscription: Subscription, keep_open_on_confirm: bool, + documentation_aside: Option<(usize, Rc AnyElement>)>, } impl Focusable for ContextMenu { @@ -161,6 +172,7 @@ impl ContextMenu { clicked: false, _on_blur_subscription, keep_open_on_confirm: false, + documentation_aside: None, }, window, cx, @@ -209,6 +221,7 @@ impl ContextMenu { icon_color: None, action, disabled: false, + documentation_aside: None, })); self } @@ -231,6 +244,7 @@ impl ContextMenu { icon_color: None, action, disabled: false, + documentation_aside: None, })); self } @@ -281,6 +295,7 @@ impl ContextMenu { icon_size: IconSize::Small, icon_color: None, disabled: false, + documentation_aside: None, })); self } @@ -294,7 +309,6 @@ impl ContextMenu { toggle: None, label: label.into(), action: Some(action.boxed_clone()), - handler: Rc::new(move |context, window, cx| { if let Some(context) = &context { window.focus(context); @@ -306,6 +320,7 @@ impl ContextMenu { icon_position: IconPosition::End, icon_color: None, disabled: true, + documentation_aside: None, })); self } @@ -314,7 +329,6 @@ impl ContextMenu { self.items.push(ContextMenuItem::Entry(ContextMenuEntry { toggle: None, label: label.into(), - action: Some(action.boxed_clone()), handler: Rc::new(move |_, window, cx| window.dispatch_action(action.boxed_clone(), cx)), icon: Some(IconName::ArrowUpRight), @@ -322,6 +336,7 @@ impl ContextMenu { icon_position: IconPosition::End, icon_color: None, disabled: false, + documentation_aside: None, })); self } @@ -356,15 +371,16 @@ impl ContextMenu { } fn select_first(&mut self, _: &SelectFirst, _: &mut Window, cx: &mut Context) { - self.selected_index = self.items.iter().position(|item| item.is_selectable()); + if let Some(ix) = self.items.iter().position(|item| item.is_selectable()) { + self.select_index(ix); + } cx.notify(); } pub fn select_last(&mut self) -> Option { for (ix, item) in self.items.iter().enumerate().rev() { if item.is_selectable() { - self.selected_index = Some(ix); - return Some(ix); + return self.select_index(ix); } } None @@ -384,7 +400,7 @@ impl ContextMenu { } else { for (ix, item) in self.items.iter().enumerate().skip(next_index) { if item.is_selectable() { - self.selected_index = Some(ix); + self.select_index(ix); cx.notify(); break; } @@ -402,7 +418,7 @@ impl ContextMenu { } else { for (ix, item) in self.items.iter().enumerate().take(ix).rev() { if item.is_selectable() { - self.selected_index = Some(ix); + self.select_index(ix); cx.notify(); break; } @@ -413,6 +429,20 @@ impl ContextMenu { } } + fn select_index(&mut self, ix: usize) -> Option { + self.documentation_aside = None; + let item = self.items.get(ix)?; + if item.is_selectable() { + self.selected_index = Some(ix); + if let ContextMenuItem::Entry(entry) = item { + if let Some(callback) = &entry.documentation_aside { + self.documentation_aside = Some((ix, callback.clone())); + } + } + } + Some(ix) + } + pub fn on_action_dispatch( &mut self, dispatched: &dyn Action, @@ -436,7 +466,7 @@ impl ContextMenu { false } }) { - self.selected_index = Some(ix); + self.select_index(ix); self.delayed = true; cx.notify(); let action = dispatched.boxed_clone(); @@ -479,198 +509,275 @@ impl Render for ContextMenu { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let ui_font_size = ThemeSettings::get_global(cx).ui_font_size; - WithRemSize::new(ui_font_size) - .occlude() - .elevation_2(cx) - .flex() - .flex_row() + let aside = self + .documentation_aside + .as_ref() + .map(|(_, callback)| callback.clone()); + + h_flex() + .w_full() + .items_start() + .gap_1() + .when_some(aside, |this, aside| { + this.child( + WithRemSize::new(ui_font_size) + .occlude() + .elevation_2(cx) + .p_2() + .max_w_80() + .child(aside(cx)), + ) + }) .child( - v_flex() - .id("context-menu") - .min_w(px(200.)) - .max_h(vh(0.75, window)) - .flex_1() - .overflow_y_scroll() - .track_focus(&self.focus_handle(cx)) - .on_mouse_down_out( - cx.listener(|this, _, window, cx| this.cancel(&menu::Cancel, window, cx)), - ) - .key_context("menu") - .on_action(cx.listener(ContextMenu::select_first)) - .on_action(cx.listener(ContextMenu::handle_select_last)) - .on_action(cx.listener(ContextMenu::select_next)) - .on_action(cx.listener(ContextMenu::select_prev)) - .on_action(cx.listener(ContextMenu::confirm)) - .on_action(cx.listener(ContextMenu::cancel)) - .when(!self.delayed, |mut el| { - for item in self.items.iter() { - if let ContextMenuItem::Entry(ContextMenuEntry { - action: Some(action), - disabled: false, - .. - }) = item - { - el = el.on_boxed_action( - &**action, - cx.listener(ContextMenu::on_action_dispatch), - ); - } - } - el - }) - .child(List::new().children(self.items.iter_mut().enumerate().map( - |(ix, item)| { - match item { - ContextMenuItem::Separator => ListSeparator.into_any_element(), - ContextMenuItem::Header(header) => { - ListSubHeader::new(header.clone()) - .inset(true) - .into_any_element() + WithRemSize::new(ui_font_size) + .occlude() + .elevation_2(cx) + .flex() + .flex_row() + .child( + v_flex() + .id("context-menu") + .min_w(px(200.)) + .max_h(vh(0.75, window)) + .flex_1() + .overflow_y_scroll() + .track_focus(&self.focus_handle(cx)) + .on_mouse_down_out(cx.listener(|this, _, window, cx| { + this.cancel(&menu::Cancel, window, cx) + })) + .key_context("menu") + .on_action(cx.listener(ContextMenu::select_first)) + .on_action(cx.listener(ContextMenu::handle_select_last)) + .on_action(cx.listener(ContextMenu::select_next)) + .on_action(cx.listener(ContextMenu::select_prev)) + .on_action(cx.listener(ContextMenu::confirm)) + .on_action(cx.listener(ContextMenu::cancel)) + .when(!self.delayed, |mut el| { + for item in self.items.iter() { + if let ContextMenuItem::Entry(ContextMenuEntry { + action: Some(action), + disabled: false, + .. + }) = item + { + el = el.on_boxed_action( + &**action, + cx.listener(ContextMenu::on_action_dispatch), + ); + } } - ContextMenuItem::Label(label) => ListItem::new(ix) - .inset(true) - .disabled(true) - .child(Label::new(label.clone())) - .into_any_element(), - ContextMenuItem::Entry(ContextMenuEntry { - toggle, - label, - handler, - icon, - icon_position, - icon_size, - icon_color, - action, - disabled, - }) => { - let handler = handler.clone(); - let menu = cx.entity().downgrade(); - let icon_color = if *disabled { - Color::Muted - } else { - icon_color.unwrap_or(Color::Default) - }; - let label_color = if *disabled { - Color::Muted - } else { - Color::Default - }; - let label_element = if let Some(icon_name) = icon { - h_flex() - .gap_1p5() - .when(*icon_position == IconPosition::Start, |flex| { - flex.child( - Icon::new(*icon_name) - .size(*icon_size) - .color(icon_color), - ) - }) - .child(Label::new(label.clone()).color(label_color)) - .when(*icon_position == IconPosition::End, |flex| { - flex.child( - Icon::new(*icon_name) - .size(*icon_size) - .color(icon_color), - ) - }) - .into_any_element() - } else { - Label::new(label.clone()) - .color(label_color) - .into_any_element() - }; - - ListItem::new(ix) - .inset(true) - .disabled(*disabled) - .toggle_state(Some(ix) == self.selected_index) - .when_some(*toggle, |list_item, (position, toggled)| { - let contents = if toggled { - v_flex().flex_none().child( - Icon::new(IconName::Check).color(Color::Accent), - ) + el + }) + .child(List::new().children(self.items.iter_mut().enumerate().map( + |(ix, item)| { + match item { + ContextMenuItem::Separator => { + ListSeparator.into_any_element() + } + ContextMenuItem::Header(header) => { + ListSubHeader::new(header.clone()) + .inset(true) + .into_any_element() + } + ContextMenuItem::Label(label) => ListItem::new(ix) + .inset(true) + .disabled(true) + .child(Label::new(label.clone())) + .into_any_element(), + ContextMenuItem::Entry(ContextMenuEntry { + toggle, + label, + handler, + icon, + icon_position, + icon_size, + icon_color, + action, + disabled, + documentation_aside, + }) => { + let handler = handler.clone(); + let menu = cx.entity().downgrade(); + let icon_color = if *disabled { + Color::Muted + } else { + icon_color.unwrap_or(Color::Default) + }; + let label_color = if *disabled { + Color::Muted } else { - v_flex() - .flex_none() - .size(IconSize::default().rems()) + Color::Default }; - match position { - IconPosition::Start => { - list_item.start_slot(contents) - } - IconPosition::End => list_item.end_slot(contents), - } - }) - .child( - h_flex() - .w_full() - .justify_between() - .child(label_element) - .debug_selector(|| format!("MENU_ITEM-{}", label)) - .children(action.as_ref().and_then(|action| { - self.action_context - .as_ref() - .map(|focus| { - KeyBinding::for_action_in( - &**action, focus, window, + let label_element = if let Some(icon_name) = icon { + h_flex() + .gap_1p5() + .when( + *icon_position == IconPosition::Start, + |flex| { + flex.child( + Icon::new(*icon_name) + .size(*icon_size) + .color(icon_color), ) - }) - .unwrap_or_else(|| { - KeyBinding::for_action( - &**action, window, + }, + ) + .child( + Label::new(label.clone()) + .color(label_color), + ) + .when( + *icon_position == IconPosition::End, + |flex| { + flex.child( + Icon::new(*icon_name) + .size(*icon_size) + .color(icon_color), ) - }) - .map(|binding| div().ml_4().child(binding)) - })), - ) - .on_click({ - let context = self.action_context.clone(); - move |_, window, cx| { - handler(context.as_ref(), window, cx); - menu.update(cx, |menu, cx| { - menu.clicked = true; - cx.emit(DismissEvent); + }, + ) + .into_any_element() + } else { + Label::new(label.clone()) + .color(label_color) + .into_any_element() + }; + let documentation_aside_callback = + documentation_aside.clone(); + div() + .id(("context-menu-child", ix)) + .when_some( + documentation_aside_callback, + |this, documentation_aside_callback| { + this.occlude().on_hover(cx.listener( + move |menu, hovered, _, cx| { + if *hovered { + menu.documentation_aside = Some((ix, documentation_aside_callback.clone())); + cx.notify(); + } else if matches!(menu.documentation_aside, Some((id, _)) if id == ix) { + menu.documentation_aside = None; + cx.notify(); + } + }, + )) + }, + ) + .child( + ListItem::new(ix) + .inset(true) + .disabled(*disabled) + .toggle_state( + Some(ix) == self.selected_index, + ) + .when_some( + *toggle, + |list_item, (position, toggled)| { + let contents = if toggled { + v_flex().flex_none().child( + Icon::new(IconName::Check) + .color(Color::Accent), + ) + } else { + v_flex().flex_none().size( + IconSize::default().rems(), + ) + }; + match position { + IconPosition::Start => { + list_item + .start_slot(contents) + } + IconPosition::End => { + list_item.end_slot(contents) + } + } + }, + ) + .child( + h_flex() + .w_full() + .justify_between() + .child(label_element) + .debug_selector(|| { + format!("MENU_ITEM-{}", label) + }) + .children( + action.as_ref().and_then( + |action| { + self.action_context + .as_ref() + .map(|focus| { + KeyBinding::for_action_in( + &**action, focus, + window, + ) + }) + .unwrap_or_else(|| { + KeyBinding::for_action( + &**action, window, + ) + }) + .map(|binding| { + div().ml_4().child(binding) + }) + }, + ), + ), + ) + .on_click({ + let context = + self.action_context.clone(); + move |_, window, cx| { + handler( + context.as_ref(), + window, + cx, + ); + menu.update(cx, |menu, cx| { + menu.clicked = true; + cx.emit(DismissEvent); + }) + .ok(); + } + }), + ) + .into_any_element() + } + ContextMenuItem::CustomEntry { + entry_render, + handler, + selectable, + } => { + let handler = handler.clone(); + let menu = cx.entity().downgrade(); + let selectable = *selectable; + ListItem::new(ix) + .inset(true) + .toggle_state(if selectable { + Some(ix) == self.selected_index + } else { + false }) - .ok(); - } - }) - .into_any_element() - } - ContextMenuItem::CustomEntry { - entry_render, - handler, - selectable, - } => { - let handler = handler.clone(); - let menu = cx.entity().downgrade(); - let selectable = *selectable; - ListItem::new(ix) - .inset(true) - .toggle_state(if selectable { - Some(ix) == self.selected_index - } else { - false - }) - .selectable(selectable) - .when(selectable, |item| { - item.on_click({ - let context = self.action_context.clone(); - move |_, window, cx| { - handler(context.as_ref(), window, cx); - menu.update(cx, |menu, cx| { - menu.clicked = true; - cx.emit(DismissEvent); + .selectable(selectable) + .when(selectable, |item| { + item.on_click({ + let context = self.action_context.clone(); + move |_, window, cx| { + handler(context.as_ref(), window, cx); + menu.update(cx, |menu, cx| { + menu.clicked = true; + cx.emit(DismissEvent); + }) + .ok(); + } }) - .ok(); - } - }) - }) - .child(entry_render(window, cx)) - .into_any_element() - } - } - }, - ))), + }) + .child(entry_render(window, cx)) + .into_any_element() + } + } + }, + ))), + ), ) } } diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index c1aea34371067388d474e42036485e99f99eba45..a3e2c1897af3ebccab530122bc649e1cbf08839b 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -323,6 +323,7 @@ pub enum IconName { ZedAssistant2, ZedAssistantFilled, ZedPredict, + ZedPredictDisabled, ZedXCopilot, } diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 1e47a08d2a458b2789c4d4253bde6858f0b4d91b..e331260faa22db5c87eb4c084cd2fd528cf2e58b 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -1289,7 +1289,7 @@ impl Vim { .map_or(false, |provider| provider.show_completions_in_normal_mode()), _ => false, }; - editor.set_inline_completions_enabled(enable_inline_completions, cx); + editor.set_show_inline_completions_enabled(enable_inline_completions, cx); }); cx.notify() } diff --git a/crates/zed/src/zed/quick_action_bar.rs b/crates/zed/src/zed/quick_action_bar.rs index bd498a126d2b60823284bcdb01273912a704c83d..96e839d523b506a8f0fe5d3fc7ca2498512c3f2e 100644 --- a/crates/zed/src/zed/quick_action_bar.rs +++ b/crates/zed/src/zed/quick_action_bar.rs @@ -16,8 +16,8 @@ use gpui::{ use search::{buffer_search, BufferSearchBar}; use settings::{Settings, SettingsStore}; use ui::{ - prelude::*, ButtonStyle, ContextMenu, IconButton, IconButtonShape, IconName, IconSize, - PopoverMenu, PopoverMenuHandle, Tooltip, + prelude::*, ButtonStyle, ContextMenu, ContextMenuEntry, IconButton, IconButtonShape, IconName, + IconSize, PopoverMenu, PopoverMenuHandle, Tooltip, }; use vim_mode_setting::VimModeSetting; use workspace::{ @@ -94,7 +94,8 @@ impl Render for QuickActionBar { git_blame_inline_enabled, show_git_blame_gutter, auto_signature_help_enabled, - inline_completions_enabled, + show_inline_completions, + inline_completion_enabled, ) = { let editor = editor.read(cx); let selection_menu_enabled = editor.selection_menu_enabled(cx); @@ -103,7 +104,8 @@ impl Render for QuickActionBar { let git_blame_inline_enabled = editor.git_blame_inline_enabled(); let show_git_blame_gutter = editor.show_git_blame_gutter(); let auto_signature_help_enabled = editor.auto_signature_help_enabled(cx); - let inline_completions_enabled = editor.inline_completions_enabled(cx); + let show_inline_completions = editor.should_show_inline_completions(cx); + let inline_completion_enabled = editor.inline_completions_enabled(cx); ( selection_menu_enabled, @@ -112,7 +114,8 @@ impl Render for QuickActionBar { git_blame_inline_enabled, show_git_blame_gutter, auto_signature_help_enabled, - inline_completions_enabled, + show_inline_completions, + inline_completion_enabled, ) }; @@ -294,12 +297,12 @@ impl Render for QuickActionBar { }, ); - menu = menu.toggleable_entry( - "Edit Predictions", - inline_completions_enabled, - IconPosition::Start, - Some(editor::actions::ToggleInlineCompletions.boxed_clone()), - { + let mut inline_completion_entry = ContextMenuEntry::new("Edit Predictions") + .toggleable(IconPosition::Start, inline_completion_enabled && show_inline_completions) + .disabled(!inline_completion_enabled) + .action(Some( + editor::actions::ToggleInlineCompletions.boxed_clone(), + )).handler({ let editor = editor.clone(); move |window, cx| { editor @@ -312,8 +315,14 @@ impl Render for QuickActionBar { }) .ok(); } - }, - ); + }); + if !inline_completion_enabled { + inline_completion_entry = inline_completion_entry.documentation_aside(|_| { + Label::new("You can't toggle edit predictions for this file as it is within the excluded files list.").into_any_element() + }); + } + + menu = menu.item(inline_completion_entry); menu = menu.separator(); diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index 6e68a957c9bd92cccb932ca3b69b97ff5a207d08..7ef46959008e14065cfe27a5302151630c2ca322 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -25,8 +25,7 @@ use gpui::{ }; use http_client::{HttpClient, Method}; use language::{ - language_settings::all_language_settings, Anchor, Buffer, BufferSnapshot, EditPreview, - OffsetRangeExt, Point, ToOffset, ToPoint, + Anchor, Buffer, BufferSnapshot, EditPreview, OffsetRangeExt, Point, ToOffset, ToPoint, }; use language_models::LlmApiToken; use postage::watch; @@ -1469,15 +1468,11 @@ impl inline_completion::InlineCompletionProvider for ZetaInlineCompletionProvide fn is_enabled( &self, - buffer: &Entity, - cursor_position: language::Anchor, - cx: &App, + _buffer: &Entity, + _cursor_position: language::Anchor, + _cx: &App, ) -> bool { - let buffer = buffer.read(cx); - let file = buffer.file(); - let language = buffer.language_at(cursor_position); - let settings = all_language_settings(file, cx); - settings.inline_completions_enabled(language.as_ref(), file.map(|f| f.path().as_ref()), cx) + true } fn needs_terms_acceptance(&self, cx: &App) -> bool { From aaf432fcd24a0472c8d7da2a5d11894a8ebf4ad9 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 5 Feb 2025 19:17:26 +0200 Subject: [PATCH 032/130] Revert recent anti-aliasing improvements (#24289) This reverts commit 31fa4144226fcbd140bb4f26d80001f0abd6facd. This reverts commit b9e0aae49fad996ddb0ce55225873e5c1d5abecd. `lyon` commit revert: ![image](https://github.com/user-attachments/assets/0243f61c-0713-416d-b8db-47372e04abaa) `MSAA` commit revert: ![image](https://github.com/user-attachments/assets/b1a4a9fe-0192-47ef-be6f-52e03c025724) cc @huacnlee , @\as-cii had decided to revert this PR due to a selection right corner rendering bug. Not sure what to propose for a fix from my side Release Notes: - N/A --- Cargo.lock | 76 +----- Cargo.toml | 6 +- crates/editor/src/element.rs | 48 ++-- crates/gpui/Cargo.toml | 2 - crates/gpui/examples/gradient.rs | 14 +- crates/gpui/examples/painting.rs | 141 ++++------ crates/gpui/src/gpui.rs | 2 - crates/gpui/src/path_builder.rs | 241 ------------------ crates/gpui/src/platform/blade/blade_atlas.rs | 49 +--- .../gpui/src/platform/blade/blade_renderer.rs | 60 ++--- crates/gpui/src/platform/mac/metal_atlas.rs | 22 +- .../gpui/src/platform/mac/metal_renderer.rs | 27 +- crates/gpui/src/scene.rs | 10 +- 13 files changed, 113 insertions(+), 585 deletions(-) delete mode 100644 crates/gpui/src/path_builder.rs diff --git a/Cargo.lock b/Cargo.lock index 42afceedd4b1b3d30c9ef16219f4ea531def35e1..dadaeeaba15cbc724281e886deae738c6bcbeba4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1821,7 +1821,7 @@ dependencies = [ [[package]] name = "blade-graphics" version = "0.6.0" -source = "git+https://github.com/kvark/blade?rev=b16f5c7bd873c7126f48c82c39e7ae64602ae74f#b16f5c7bd873c7126f48c82c39e7ae64602ae74f" +source = "git+https://github.com/kvark/blade?rev=091a8401033847bb9b6ace3fcf70448d069621c5#091a8401033847bb9b6ace3fcf70448d069621c5" dependencies = [ "ash", "ash-window", @@ -1853,7 +1853,7 @@ dependencies = [ [[package]] name = "blade-macros" version = "0.3.0" -source = "git+https://github.com/kvark/blade?rev=b16f5c7bd873c7126f48c82c39e7ae64602ae74f#b16f5c7bd873c7126f48c82c39e7ae64602ae74f" +source = "git+https://github.com/kvark/blade?rev=091a8401033847bb9b6ace3fcf70448d069621c5#091a8401033847bb9b6ace3fcf70448d069621c5" dependencies = [ "proc-macro2", "quote", @@ -1863,7 +1863,7 @@ dependencies = [ [[package]] name = "blade-util" version = "0.2.0" -source = "git+https://github.com/kvark/blade?rev=b16f5c7bd873c7126f48c82c39e7ae64602ae74f#b16f5c7bd873c7126f48c82c39e7ae64602ae74f" +source = "git+https://github.com/kvark/blade?rev=091a8401033847bb9b6ace3fcf70448d069621c5#091a8401033847bb9b6ace3fcf70448d069621c5" dependencies = [ "blade-graphics", "bytemuck", @@ -4668,12 +4668,6 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ce81f49ae8a0482e4c55ea62ebbd7e5a686af544c00b9d090bba3ff9be97b3d" -[[package]] -name = "float_next_after" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bf7cc16383c4b8d58b9905a8509f02926ce3058053c056376248d958c9df1e8" - [[package]] name = "flume" version = "0.11.1" @@ -5418,7 +5412,6 @@ dependencies = [ "inventory", "itertools 0.14.0", "log", - "lyon", "media", "metal", "naga", @@ -7435,69 +7428,6 @@ dependencies = [ "url", ] -[[package]] -name = "lyon" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e7f9cda98b5430809e63ca5197b06c7d191bf7e26dfc467d5a3f0290e2a74f" -dependencies = [ - "lyon_algorithms", - "lyon_extra", - "lyon_tessellation", -] - -[[package]] -name = "lyon_algorithms" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f13c9be19d257c7d37e70608ed858e8eab4b2afcea2e3c9a622e892acbf43c08" -dependencies = [ - "lyon_path", - "num-traits", -] - -[[package]] -name = "lyon_extra" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ca94c7bf1e2557c2798989c43416822c12fc5dcc5e17cc3307ef0e71894a955" -dependencies = [ - "lyon_path", - "thiserror 1.0.69", -] - -[[package]] -name = "lyon_geom" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8af69edc087272df438b3ee436c4bb6d7c04aa8af665cfd398feae627dbd8570" -dependencies = [ - "arrayvec", - "euclid", - "num-traits", -] - -[[package]] -name = "lyon_path" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e0b8aec2f58586f6eef237985b9a9b7cb3a3aff4417c575075cf95bf925252e" -dependencies = [ - "lyon_geom", - "num-traits", -] - -[[package]] -name = "lyon_tessellation" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "579d42360a4b09846eff2feef28f538696c7d6c7439bfa65874ff3cbe0951b2c" -dependencies = [ - "float_next_after", - "lyon_path", - "num-traits", -] - [[package]] name = "mac" version = "0.1.1" diff --git a/Cargo.toml b/Cargo.toml index 1b47335b4c47097232c846d94e34f56062abc183..a0e80d7392a0ad53d198880cb3958b13ecab1091 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -375,9 +375,9 @@ async-watch = "0.3.1" async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] } base64 = "0.22" bitflags = "2.6.0" -blade-graphics = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" } -blade-macros = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" } -blade-util = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" } +blade-graphics = { git = "https://github.com/kvark/blade", rev = "091a8401033847bb9b6ace3fcf70448d069621c5" } +blade-macros = { git = "https://github.com/kvark/blade", rev = "091a8401033847bb9b6ace3fcf70448d069621c5" } +blade-util = { git = "https://github.com/kvark/blade", rev = "091a8401033847bb9b6ace3fcf70448d069621c5" } naga = { version = "23.1.0", features = ["wgsl-in"] } blake3 = "1.5.3" bytes = "1.0" diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 97cfec90c12f628e69e20288136424b194bacd81..f0b0730ad5e9f75b87dff2e850535b4864476722 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -8197,8 +8197,8 @@ impl HighlightedRange { }; let top_curve_width = curve_width(first_line.start_x, first_line.end_x); - let mut builder = gpui::PathBuilder::fill(); - builder.curve_to(first_top_right + curve_height, first_top_right); + let mut path = gpui::Path::new(first_top_right - top_curve_width); + path.curve_to(first_top_right + curve_height, first_top_right); let mut iter = lines.iter().enumerate().peekable(); while let Some((ix, line)) = iter.next() { @@ -8209,42 +8209,42 @@ impl HighlightedRange { match next_top_right.x.partial_cmp(&bottom_right.x).unwrap() { Ordering::Equal => { - builder.line_to(bottom_right); + path.line_to(bottom_right); } Ordering::Less => { let curve_width = curve_width(next_top_right.x, bottom_right.x); - builder.line_to(bottom_right - curve_height); + path.line_to(bottom_right - curve_height); if self.corner_radius > Pixels::ZERO { - builder.curve_to(bottom_right - curve_width, bottom_right); + path.curve_to(bottom_right - curve_width, bottom_right); } - builder.line_to(next_top_right + curve_width); + path.line_to(next_top_right + curve_width); if self.corner_radius > Pixels::ZERO { - builder.curve_to(next_top_right + curve_height, next_top_right); + path.curve_to(next_top_right + curve_height, next_top_right); } } Ordering::Greater => { let curve_width = curve_width(bottom_right.x, next_top_right.x); - builder.line_to(bottom_right - curve_height); + path.line_to(bottom_right - curve_height); if self.corner_radius > Pixels::ZERO { - builder.curve_to(bottom_right + curve_width, bottom_right); + path.curve_to(bottom_right + curve_width, bottom_right); } - builder.line_to(next_top_right - curve_width); + path.line_to(next_top_right - curve_width); if self.corner_radius > Pixels::ZERO { - builder.curve_to(next_top_right + curve_height, next_top_right); + path.curve_to(next_top_right + curve_height, next_top_right); } } } } else { let curve_width = curve_width(line.start_x, line.end_x); - builder.line_to(bottom_right - curve_height); + path.line_to(bottom_right - curve_height); if self.corner_radius > Pixels::ZERO { - builder.curve_to(bottom_right - curve_width, bottom_right); + path.curve_to(bottom_right - curve_width, bottom_right); } let bottom_left = point(line.start_x, bottom_right.y); - builder.line_to(bottom_left + curve_width); + path.line_to(bottom_left + curve_width); if self.corner_radius > Pixels::ZERO { - builder.curve_to(bottom_left - curve_height, bottom_left); + path.curve_to(bottom_left - curve_height, bottom_left); } } } @@ -8252,26 +8252,24 @@ impl HighlightedRange { if first_line.start_x > last_line.start_x { let curve_width = curve_width(last_line.start_x, first_line.start_x); let second_top_left = point(last_line.start_x, start_y + self.line_height); - builder.line_to(second_top_left + curve_height); + path.line_to(second_top_left + curve_height); if self.corner_radius > Pixels::ZERO { - builder.curve_to(second_top_left + curve_width, second_top_left); + path.curve_to(second_top_left + curve_width, second_top_left); } let first_bottom_left = point(first_line.start_x, second_top_left.y); - builder.line_to(first_bottom_left - curve_width); + path.line_to(first_bottom_left - curve_width); if self.corner_radius > Pixels::ZERO { - builder.curve_to(first_bottom_left - curve_height, first_bottom_left); + path.curve_to(first_bottom_left - curve_height, first_bottom_left); } } - builder.line_to(first_top_left + curve_height); + path.line_to(first_top_left + curve_height); if self.corner_radius > Pixels::ZERO { - builder.curve_to(first_top_left + top_curve_width, first_top_left); + path.curve_to(first_top_left + top_curve_width, first_top_left); } - builder.line_to(first_top_right - top_curve_width); + path.line_to(first_top_right - top_curve_width); - if let Ok(path) = builder.build() { - window.paint_path(path, self.color); - } + window.paint_path(path, self.color); } } diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index 05a5b28e764dc56486858f87ee87bdced7b3a53d..a0220cd572c32dcf29524f2c86a7e52950d23ece 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -108,7 +108,6 @@ thiserror.workspace = true util.workspace = true uuid.workspace = true waker-fn = "1.2.0" -lyon = "1.0" [target.'cfg(target_os = "macos")'.dependencies] block = "0.1" @@ -206,7 +205,6 @@ rand.workspace = true util = { workspace = true, features = ["test-support"] } http_client = { workspace = true, features = ["test-support"] } unicode-segmentation.workspace = true -lyon = { version = "1.0", features = ["extra"] } [target.'cfg(target_os = "windows")'.build-dependencies] embed-resource = "3.0" diff --git a/crates/gpui/examples/gradient.rs b/crates/gpui/examples/gradient.rs index ec4cdf9bfcdf97f8e0bc3287b94bc3e5a5e198d9..45de8cdd0afa3d190d046947a5308d5618ec22aa 100644 --- a/crates/gpui/examples/gradient.rs +++ b/crates/gpui/examples/gradient.rs @@ -218,17 +218,13 @@ impl Render for GradientViewer { let height = square_bounds.size.height; let horizontal_offset = height; let vertical_offset = px(30.); - let mut builder = gpui::PathBuilder::fill(); - builder.move_to(square_bounds.bottom_left()); - builder - .line_to(square_bounds.origin + point(horizontal_offset, vertical_offset)); - builder.line_to( + let mut path = gpui::Path::new(square_bounds.bottom_left()); + path.line_to(square_bounds.origin + point(horizontal_offset, vertical_offset)); + path.line_to( square_bounds.top_right() + point(-horizontal_offset, vertical_offset), ); - - builder.line_to(square_bounds.bottom_right()); - builder.line_to(square_bounds.bottom_left()); - let path = builder.build().unwrap(); + path.line_to(square_bounds.bottom_right()); + path.line_to(square_bounds.bottom_left()); window.paint_path( path, linear_gradient( diff --git a/crates/gpui/examples/painting.rs b/crates/gpui/examples/painting.rs index 7c1a6a367d1335f58ed1ba4f09f3385ecb452980..9a8ab790650130efeb43fbcdc8306963678e50ec 100644 --- a/crates/gpui/examples/painting.rs +++ b/crates/gpui/examples/painting.rs @@ -1,62 +1,46 @@ use gpui::{ - canvas, div, linear_color_stop, linear_gradient, point, prelude::*, px, rgb, size, Application, - Background, Bounds, ColorSpace, Context, MouseDownEvent, Path, PathBuilder, PathStyle, Pixels, - Point, Render, StrokeOptions, Window, WindowOptions, + canvas, div, point, prelude::*, px, size, App, Application, Bounds, Context, MouseDownEvent, + Path, Pixels, Point, Render, Window, WindowOptions, }; - struct PaintingViewer { - default_lines: Vec<(Path, Background)>, + default_lines: Vec>, lines: Vec>>, start: Point, _painting: bool, } impl PaintingViewer { - fn new(_window: &mut Window, _cx: &mut Context) -> Self { + fn new() -> Self { let mut lines = vec![]; - // draw a Rust logo - let mut builder = lyon::path::Path::svg_builder(); - lyon::extra::rust_logo::build_logo_path(&mut builder); - // move down the Path - let mut builder: PathBuilder = builder.into(); - builder.translate(point(px(10.), px(100.))); - builder.scale(0.9); - let path = builder.build().unwrap(); - lines.push((path, gpui::black().into())); + // draw a line + let mut path = Path::new(point(px(50.), px(180.))); + path.line_to(point(px(100.), px(120.))); + // go back to close the path + path.line_to(point(px(100.), px(121.))); + path.line_to(point(px(50.), px(181.))); + lines.push(path); // draw a lightening bolt ⚡ - let mut builder = PathBuilder::fill(); - builder.move_to(point(px(150.), px(200.))); - builder.line_to(point(px(200.), px(125.))); - builder.line_to(point(px(200.), px(175.))); - builder.line_to(point(px(250.), px(100.))); - let path = builder.build().unwrap(); - lines.push((path, rgb(0x1d4ed8).into())); + let mut path = Path::new(point(px(150.), px(200.))); + path.line_to(point(px(200.), px(125.))); + path.line_to(point(px(200.), px(175.))); + path.line_to(point(px(250.), px(100.))); + lines.push(path); // draw a ⭐ - let mut builder = PathBuilder::fill(); - builder.move_to(point(px(350.), px(100.))); - builder.line_to(point(px(370.), px(160.))); - builder.line_to(point(px(430.), px(160.))); - builder.line_to(point(px(380.), px(200.))); - builder.line_to(point(px(400.), px(260.))); - builder.line_to(point(px(350.), px(220.))); - builder.line_to(point(px(300.), px(260.))); - builder.line_to(point(px(320.), px(200.))); - builder.line_to(point(px(270.), px(160.))); - builder.line_to(point(px(330.), px(160.))); - builder.line_to(point(px(350.), px(100.))); - let path = builder.build().unwrap(); - lines.push(( - path, - linear_gradient( - 180., - linear_color_stop(rgb(0xFACC15), 0.7), - linear_color_stop(rgb(0xD56D0C), 1.), - ) - .color_space(ColorSpace::Oklab), - )); + let mut path = Path::new(point(px(350.), px(100.))); + path.line_to(point(px(370.), px(160.))); + path.line_to(point(px(430.), px(160.))); + path.line_to(point(px(380.), px(200.))); + path.line_to(point(px(400.), px(260.))); + path.line_to(point(px(350.), px(220.))); + path.line_to(point(px(300.), px(260.))); + path.line_to(point(px(320.), px(200.))); + path.line_to(point(px(270.), px(160.))); + path.line_to(point(px(330.), px(160.))); + path.line_to(point(px(350.), px(100.))); + lines.push(path); let square_bounds = Bounds { origin: point(px(450.), px(100.)), @@ -65,42 +49,18 @@ impl PaintingViewer { let height = square_bounds.size.height; let horizontal_offset = height; let vertical_offset = px(30.); - let mut builder = PathBuilder::fill(); - builder.move_to(square_bounds.bottom_left()); - builder.curve_to( + let mut path = Path::new(square_bounds.bottom_left()); + path.curve_to( square_bounds.origin + point(horizontal_offset, vertical_offset), square_bounds.origin + point(px(0.0), vertical_offset), ); - builder.line_to(square_bounds.top_right() + point(-horizontal_offset, vertical_offset)); - builder.curve_to( + path.line_to(square_bounds.top_right() + point(-horizontal_offset, vertical_offset)); + path.curve_to( square_bounds.bottom_right(), square_bounds.top_right() + point(px(0.0), vertical_offset), ); - builder.line_to(square_bounds.bottom_left()); - let path = builder.build().unwrap(); - lines.push(( - path, - linear_gradient( - 180., - linear_color_stop(gpui::blue(), 0.4), - linear_color_stop(gpui::red(), 1.), - ), - )); - - // draw a wave - let options = StrokeOptions::default() - .with_line_width(1.) - .with_line_join(lyon::path::LineJoin::Bevel); - let mut builder = PathBuilder::stroke(px(1.)).with_style(PathStyle::Stroke(options)); - builder.move_to(point(px(40.), px(320.))); - for i in 0..50 { - builder.line_to(point( - px(40.0 + i as f32 * 10.0), - px(320.0 + (i as f32 * 10.0).sin() * 40.0), - )); - } - let path = builder.build().unwrap(); - lines.push((path, gpui::green().into())); + path.line_to(square_bounds.bottom_left()); + lines.push(path); Self { default_lines: lines.clone(), @@ -155,28 +115,27 @@ impl Render for PaintingViewer { canvas( move |_, _, _| {}, move |_, _, window, _| { - - for (path, color) in default_lines { - window.paint_path(path, color); + const STROKE_WIDTH: Pixels = px(2.0); + for path in default_lines { + window.paint_path(path, gpui::black()); } - for points in lines { - if points.len() < 2 { - continue; + let mut path = Path::new(points[0]); + for p in points.iter().skip(1) { + path.line_to(*p); } - let mut builder = PathBuilder::stroke(px(1.)); - for (i, p) in points.into_iter().enumerate() { - if i == 0 { - builder.move_to(p); - } else { - builder.line_to(p); + let mut last = points.last().unwrap(); + for p in points.iter().rev() { + let mut offset_x = px(0.); + if last.x == p.x { + offset_x = STROKE_WIDTH; } + path.line_to(point(p.x + offset_x, p.y + STROKE_WIDTH)); + last = p; } - if let Ok(path) = builder.build() { - window.paint_path(path, gpui::black()); - } + window.paint_path(path, gpui::black()); } }, ) @@ -226,13 +185,13 @@ impl Render for PaintingViewer { } fn main() { - Application::new().run(|cx| { + Application::new().run(|cx: &mut App| { cx.open_window( WindowOptions { focus: true, ..Default::default() }, - |window, cx| cx.new(|cx| PaintingViewer::new(window, cx)), + |_, cx| cx.new(|_| PaintingViewer::new()), ) .unwrap(); cx.activate(true); diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index 1ebfc643ee57ccaaa23fe4cfd77a15187a06a146..db33bfca2e512671d58252341265ed62d02fd06d 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -82,7 +82,6 @@ mod input; mod interactive; mod key_dispatch; mod keymap; -mod path_builder; mod platform; pub mod prelude; mod scene; @@ -136,7 +135,6 @@ pub use input::*; pub use interactive::*; use key_dispatch::*; pub use keymap::*; -pub use path_builder::*; pub use platform::*; pub use refineable::*; pub use scene::*; diff --git a/crates/gpui/src/path_builder.rs b/crates/gpui/src/path_builder.rs deleted file mode 100644 index 0fd8eb6fa5458315483db45dfb03bbc3a42d5800..0000000000000000000000000000000000000000 --- a/crates/gpui/src/path_builder.rs +++ /dev/null @@ -1,241 +0,0 @@ -use anyhow::Error; -use etagere::euclid::Vector2D; -use lyon::geom::Angle; -use lyon::tessellation::{ - BuffersBuilder, FillTessellator, FillVertex, StrokeTessellator, StrokeVertex, VertexBuffers, -}; - -pub use lyon::math::Transform; -pub use lyon::tessellation::{FillOptions, FillRule, StrokeOptions}; - -use crate::{point, px, Path, Pixels, Point}; - -/// Style of the PathBuilder -pub enum PathStyle { - /// Stroke style - Stroke(StrokeOptions), - /// Fill style - Fill(FillOptions), -} - -/// A [`Path`] builder. -pub struct PathBuilder { - raw: lyon::path::builder::WithSvg, - transform: Option, - /// PathStyle of the PathBuilder - pub style: PathStyle, -} - -impl From for PathBuilder { - fn from(builder: lyon::path::Builder) -> Self { - Self { - raw: builder.with_svg(), - ..Default::default() - } - } -} - -impl From> for PathBuilder { - fn from(raw: lyon::path::builder::WithSvg) -> Self { - Self { - raw, - ..Default::default() - } - } -} - -impl From for Point { - fn from(p: lyon::math::Point) -> Self { - point(px(p.x), px(p.y)) - } -} - -impl From> for lyon::math::Point { - fn from(p: Point) -> Self { - lyon::math::point(p.x.0, p.y.0) - } -} - -impl Default for PathBuilder { - fn default() -> Self { - Self { - raw: lyon::path::Path::builder().with_svg(), - style: PathStyle::Fill(FillOptions::default()), - transform: None, - } - } -} - -impl PathBuilder { - /// Creates a new [`PathBuilder`] to build a Stroke path. - pub fn stroke(width: Pixels) -> Self { - Self { - style: PathStyle::Stroke(StrokeOptions::default().with_line_width(width.0)), - ..Self::default() - } - } - - /// Creates a new [`PathBuilder`] to build a Fill path. - pub fn fill() -> Self { - Self::default() - } - - /// Sets the style of the [`PathBuilder`]. - pub fn with_style(self, style: PathStyle) -> Self { - Self { style, ..self } - } - - /// Move the current point to the given point. - #[inline] - pub fn move_to(&mut self, to: Point) { - self.raw.move_to(to.into()); - } - - /// Draw a straight line from the current point to the given point. - #[inline] - pub fn line_to(&mut self, to: Point) { - self.raw.line_to(to.into()); - } - - /// Draw a curve from the current point to the given point, using the given control point. - #[inline] - pub fn curve_to(&mut self, to: Point, ctrl: Point) { - self.raw.quadratic_bezier_to(ctrl.into(), to.into()); - } - - /// Adds a cubic Bézier to the [`Path`] given its two control points - /// and its end point. - #[inline] - pub fn cubic_bezier_to( - &mut self, - to: Point, - control_a: Point, - control_b: Point, - ) { - self.raw - .cubic_bezier_to(control_a.into(), control_b.into(), to.into()); - } - - /// Close the current sub-path. - #[inline] - pub fn close(&mut self) { - self.raw.close(); - } - - /// Applies a transform to the path. - #[inline] - pub fn transform(&mut self, transform: Transform) { - self.transform = Some(transform); - } - - /// Applies a translation to the path. - #[inline] - pub fn translate(&mut self, to: Point) { - if let Some(transform) = self.transform { - self.transform = Some(transform.then_translate(Vector2D::new(to.x.0, to.y.0))); - } else { - self.transform = Some(Transform::translation(to.x.0, to.y.0)) - } - } - - /// Applies a scale to the path. - #[inline] - pub fn scale(&mut self, scale: f32) { - if let Some(transform) = self.transform { - self.transform = Some(transform.then_scale(scale, scale)); - } else { - self.transform = Some(Transform::scale(scale, scale)); - } - } - - /// Applies a rotation to the path. - /// - /// The `angle` is in degrees value in the range 0.0 to 360.0. - #[inline] - pub fn rotate(&mut self, angle: f32) { - let radians = angle.to_radians(); - if let Some(transform) = self.transform { - self.transform = Some(transform.then_rotate(Angle::radians(radians))); - } else { - self.transform = Some(Transform::rotation(Angle::radians(radians))); - } - } - - /// Builds into a [`Path`]. - #[inline] - pub fn build(self) -> Result, Error> { - let path = if let Some(transform) = self.transform { - self.raw.build().transformed(&transform) - } else { - self.raw.build() - }; - - match self.style { - PathStyle::Stroke(options) => Self::tessellate_stroke(&path, &options), - PathStyle::Fill(options) => Self::tessellate_fill(&path, &options), - } - } - - fn tessellate_fill( - path: &lyon::path::Path, - options: &FillOptions, - ) -> Result, Error> { - // Will contain the result of the tessellation. - let mut buf: VertexBuffers = VertexBuffers::new(); - let mut tessellator = FillTessellator::new(); - - // Compute the tessellation. - tessellator.tessellate_path( - path, - options, - &mut BuffersBuilder::new(&mut buf, |vertex: FillVertex| vertex.position()), - )?; - - Ok(Self::build_path(buf)) - } - - fn tessellate_stroke( - path: &lyon::path::Path, - options: &StrokeOptions, - ) -> Result, Error> { - // Will contain the result of the tessellation. - let mut buf: VertexBuffers = VertexBuffers::new(); - let mut tessellator = StrokeTessellator::new(); - - // Compute the tessellation. - tessellator.tessellate_path( - path, - options, - &mut BuffersBuilder::new(&mut buf, |vertex: StrokeVertex| vertex.position()), - )?; - - Ok(Self::build_path(buf)) - } - - /// Builds a [`Path`] from a [`lyon::VertexBuffers`]. - pub fn build_path(buf: VertexBuffers) -> Path { - if buf.vertices.is_empty() { - return Path::new(Point::default()); - } - - let first_point = buf.vertices[0]; - - let mut path = Path::new(first_point.into()); - for i in 0..buf.indices.len() / 3 { - let i0 = buf.indices[i * 3] as usize; - let i1 = buf.indices[i * 3 + 1] as usize; - let i2 = buf.indices[i * 3 + 2] as usize; - - let v0 = buf.vertices[i0]; - let v1 = buf.vertices[i1]; - let v2 = buf.vertices[i2]; - - path.push_triangle( - (v0.into(), v1.into(), v2.into()), - (point(0., 1.), point(0., 1.), point(0., 1.)), - ); - } - - path - } -} diff --git a/crates/gpui/src/platform/blade/blade_atlas.rs b/crates/gpui/src/platform/blade/blade_atlas.rs index 2783d57127e6c8d5e0b4bf6e2d5978ad1b8ff955..fb703f2a411c078b7e2a52b861868976481e69c9 100644 --- a/crates/gpui/src/platform/blade/blade_atlas.rs +++ b/crates/gpui/src/platform/blade/blade_atlas.rs @@ -27,7 +27,6 @@ struct BladeAtlasState { tiles_by_key: FxHashMap, initializations: Vec, uploads: Vec, - path_sample_count: u32, } #[cfg(gles)] @@ -43,11 +42,10 @@ impl BladeAtlasState { pub struct BladeTextureInfo { pub size: gpu::Extent, pub raw_view: gpu::TextureView, - pub msaa_view: Option, } impl BladeAtlas { - pub(crate) fn new(gpu: &Arc, path_sample_count: u32) -> Self { + pub(crate) fn new(gpu: &Arc) -> Self { BladeAtlas(Mutex::new(BladeAtlasState { gpu: Arc::clone(gpu), upload_belt: BufferBelt::new(BufferBeltDescriptor { @@ -59,7 +57,6 @@ impl BladeAtlas { tiles_by_key: Default::default(), initializations: Vec::new(), uploads: Vec::new(), - path_sample_count, })) } @@ -109,7 +106,6 @@ impl BladeAtlas { depth: 1, }, raw_view: texture.raw_view, - msaa_view: texture.msaa_view, } } } @@ -208,39 +204,6 @@ impl BladeAtlasState { } } - // We currently only enable MSAA for path textures. - let (msaa, msaa_view) = if self.path_sample_count > 1 && kind == AtlasTextureKind::Path { - let msaa = self.gpu.create_texture(gpu::TextureDesc { - name: "msaa path texture", - format, - size: gpu::Extent { - width: size.width.into(), - height: size.height.into(), - depth: 1, - }, - array_layer_count: 1, - mip_level_count: 1, - sample_count: self.path_sample_count, - dimension: gpu::TextureDimension::D2, - usage: gpu::TextureUsage::TARGET, - }); - - ( - Some(msaa), - Some(self.gpu.create_texture_view( - msaa, - gpu::TextureViewDesc { - name: "msaa texture view", - format, - dimension: gpu::ViewDimension::D2, - subresources: &Default::default(), - }, - )), - ) - } else { - (None, None) - }; - let raw = self.gpu.create_texture(gpu::TextureDesc { name: "atlas", format, @@ -277,8 +240,6 @@ impl BladeAtlasState { format, raw, raw_view, - msaa, - msaa_view, live_atlas_keys: 0, }; @@ -393,8 +354,6 @@ struct BladeAtlasTexture { allocator: BucketedAtlasAllocator, raw: gpu::Texture, raw_view: gpu::TextureView, - msaa: Option, - msaa_view: Option, format: gpu::TextureFormat, live_atlas_keys: u32, } @@ -422,12 +381,6 @@ impl BladeAtlasTexture { fn destroy(&mut self, gpu: &gpu::Context) { gpu.destroy_texture(self.raw); gpu.destroy_texture_view(self.raw_view); - if let Some(msaa) = self.msaa { - gpu.destroy_texture(msaa); - } - if let Some(msaa_view) = self.msaa_view { - gpu.destroy_texture_view(msaa_view); - } } fn bytes_per_pixel(&self) -> u8 { diff --git a/crates/gpui/src/platform/blade/blade_renderer.rs b/crates/gpui/src/platform/blade/blade_renderer.rs index 200ebaaf07fd2b275fbc4b3510df264ecc404949..ee8ffdfda7fa26b6a4edb970c343ac5c0f26d3c7 100644 --- a/crates/gpui/src/platform/blade/blade_renderer.rs +++ b/crates/gpui/src/platform/blade/blade_renderer.rs @@ -7,18 +7,16 @@ use crate::{ MonochromeSprite, Path, PathId, PathVertex, PolychromeSprite, PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size, Underline, }; -use blade_graphics as gpu; -use blade_util::{BufferBelt, BufferBeltDescriptor}; use bytemuck::{Pod, Zeroable}; use collections::HashMap; #[cfg(target_os = "macos")] use media::core_video::CVMetalTextureCache; + +use blade_graphics as gpu; +use blade_util::{BufferBelt, BufferBeltDescriptor}; use std::{mem, sync::Arc}; const MAX_FRAME_TIME_MS: u32 = 10000; -// Use 4x MSAA, all devices support it. -// https://developer.apple.com/documentation/metal/mtldevice/1433355-supportstexturesamplecount -const PATH_SAMPLE_COUNT: u32 = 4; #[repr(C)] #[derive(Clone, Copy, Pod, Zeroable)] @@ -210,10 +208,7 @@ impl BladePipelines { blend: Some(gpu::BlendState::ADDITIVE), write_mask: gpu::ColorWrites::default(), }], - multisample_state: gpu::MultisampleState { - sample_count: PATH_SAMPLE_COUNT, - ..Default::default() - }, + multisample_state: gpu::MultisampleState::default(), }), paths: gpu.create_render_pipeline(gpu::RenderPipelineDesc { name: "paths", @@ -353,7 +348,7 @@ impl BladeRenderer { min_chunk_size: 0x1000, alignment: 0x40, // Vulkan `minStorageBufferOffsetAlignment` on Intel Xe }); - let atlas = Arc::new(BladeAtlas::new(&context.gpu, PATH_SAMPLE_COUNT)); + let atlas = Arc::new(BladeAtlas::new(&context.gpu)); let atlas_sampler = context.gpu.create_sampler(gpu::SamplerDesc { name: "atlas", mag_filter: gpu::FilterMode::Linear, @@ -502,38 +497,27 @@ impl BladeRenderer { }; let vertex_buf = unsafe { self.instance_belt.alloc_typed(&vertices, &self.gpu) }; - let frame_view = tex_info.raw_view; - let color_target = if let Some(msaa_view) = tex_info.msaa_view { - gpu::RenderTarget { - view: msaa_view, - init_op: gpu::InitOp::Clear(gpu::TextureColor::OpaqueBlack), - finish_op: gpu::FinishOp::ResolveTo(frame_view), - } - } else { - gpu::RenderTarget { - view: frame_view, - init_op: gpu::InitOp::Clear(gpu::TextureColor::OpaqueBlack), - finish_op: gpu::FinishOp::Store, - } - }; - - if let mut pass = self.command_encoder.render( + let mut pass = self.command_encoder.render( "paths", gpu::RenderTargetSet { - colors: &[color_target], + colors: &[gpu::RenderTarget { + view: tex_info.raw_view, + init_op: gpu::InitOp::Clear(gpu::TextureColor::OpaqueBlack), + finish_op: gpu::FinishOp::Store, + }], depth_stencil: None, }, - ) { - let mut encoder = pass.with(&self.pipelines.path_rasterization); - encoder.bind( - 0, - &ShaderPathRasterizationData { - globals, - b_path_vertices: vertex_buf, - }, - ); - encoder.draw(0, vertices.len() as u32, 0, 1); - } + ); + + let mut encoder = pass.with(&self.pipelines.path_rasterization); + encoder.bind( + 0, + &ShaderPathRasterizationData { + globals, + b_path_vertices: vertex_buf, + }, + ); + encoder.draw(0, vertices.len() as u32, 0, 1); } } diff --git a/crates/gpui/src/platform/mac/metal_atlas.rs b/crates/gpui/src/platform/mac/metal_atlas.rs index 4662761a7d32a26b5b22111f8eb00cc71d2c90c6..ca595c5ce3475356e47ac886f90704ceb6fdecfd 100644 --- a/crates/gpui/src/platform/mac/metal_atlas.rs +++ b/crates/gpui/src/platform/mac/metal_atlas.rs @@ -13,14 +13,13 @@ use std::borrow::Cow; pub(crate) struct MetalAtlas(Mutex); impl MetalAtlas { - pub(crate) fn new(device: Device, path_sample_count: u32) -> Self { + pub(crate) fn new(device: Device) -> Self { MetalAtlas(Mutex::new(MetalAtlasState { device: AssertSend(device), monochrome_textures: Default::default(), polychrome_textures: Default::default(), path_textures: Default::default(), tiles_by_key: Default::default(), - path_sample_count, })) } @@ -28,10 +27,6 @@ impl MetalAtlas { self.0.lock().texture(id).metal_texture.clone() } - pub(crate) fn msaa_texture(&self, id: AtlasTextureId) -> Option { - self.0.lock().texture(id).msaa_texture.clone() - } - pub(crate) fn allocate( &self, size: Size, @@ -59,7 +54,6 @@ struct MetalAtlasState { polychrome_textures: AtlasTextureList, path_textures: AtlasTextureList, tiles_by_key: FxHashMap, - path_sample_count: u32, } impl PlatformAtlas for MetalAtlas { @@ -182,18 +176,6 @@ impl MetalAtlasState { texture_descriptor.set_usage(usage); let metal_texture = self.device.new_texture(&texture_descriptor); - // We currently only enable MSAA for path textures. - let msaa_texture = if self.path_sample_count > 1 && kind == AtlasTextureKind::Path { - let mut descriptor = texture_descriptor.clone(); - descriptor.set_texture_type(metal::MTLTextureType::D2Multisample); - descriptor.set_storage_mode(metal::MTLStorageMode::Private); - descriptor.set_sample_count(self.path_sample_count as _); - let msaa_texture = self.device.new_texture(&descriptor); - Some(msaa_texture) - } else { - None - }; - let texture_list = match kind { AtlasTextureKind::Monochrome => &mut self.monochrome_textures, AtlasTextureKind::Polychrome => &mut self.polychrome_textures, @@ -209,7 +191,6 @@ impl MetalAtlasState { }, allocator: etagere::BucketedAtlasAllocator::new(size.into()), metal_texture: AssertSend(metal_texture), - msaa_texture: AssertSend(msaa_texture), live_atlas_keys: 0, }; @@ -236,7 +217,6 @@ struct MetalAtlasTexture { id: AtlasTextureId, allocator: BucketedAtlasAllocator, metal_texture: AssertSend, - msaa_texture: AssertSend>, live_atlas_keys: u32, } diff --git a/crates/gpui/src/platform/mac/metal_renderer.rs b/crates/gpui/src/platform/mac/metal_renderer.rs index 56109d2ff6e5cd9f39a02413f6dea26195522cfc..c290d12f7e752191f64aa4dbd5f89fd246b10639 100644 --- a/crates/gpui/src/platform/mac/metal_renderer.rs +++ b/crates/gpui/src/platform/mac/metal_renderer.rs @@ -28,9 +28,6 @@ pub(crate) type PointF = crate::Point; const SHADERS_METALLIB: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib")); #[cfg(feature = "runtime_shaders")] const SHADERS_SOURCE_FILE: &str = include_str!(concat!(env!("OUT_DIR"), "/stitched_shaders.metal")); -// Use 4x MSAA, all devices support it. -// https://developer.apple.com/documentation/metal/mtldevice/1433355-supportstexturesamplecount -const PATH_SAMPLE_COUNT: u32 = 4; pub type Context = Arc>; pub type Renderer = MetalRenderer; @@ -173,7 +170,6 @@ impl MetalRenderer { "path_rasterization_vertex", "path_rasterization_fragment", MTLPixelFormat::R16Float, - PATH_SAMPLE_COUNT, ); let path_sprites_pipeline_state = build_pipeline_state( &device, @@ -233,7 +229,7 @@ impl MetalRenderer { ); let command_queue = device.new_command_queue(); - let sprite_atlas = Arc::new(MetalAtlas::new(device.clone(), PATH_SAMPLE_COUNT)); + let sprite_atlas = Arc::new(MetalAtlas::new(device.clone())); let core_video_texture_cache = unsafe { CVMetalTextureCache::new(device.as_ptr()).unwrap() }; @@ -535,20 +531,10 @@ impl MetalRenderer { .unwrap(); let texture = self.sprite_atlas.metal_texture(texture_id); - let msaa_texture = self.sprite_atlas.msaa_texture(texture_id); - - if let Some(msaa_texture) = msaa_texture { - color_attachment.set_texture(Some(&msaa_texture)); - color_attachment.set_resolve_texture(Some(&texture)); - color_attachment.set_load_action(metal::MTLLoadAction::Clear); - color_attachment.set_store_action(metal::MTLStoreAction::MultisampleResolve); - } else { - color_attachment.set_texture(Some(&texture)); - color_attachment.set_load_action(metal::MTLLoadAction::Clear); - color_attachment.set_store_action(metal::MTLStoreAction::Store); - } + color_attachment.set_texture(Some(&texture)); + color_attachment.set_load_action(metal::MTLLoadAction::Clear); + color_attachment.set_store_action(metal::MTLStoreAction::Store); color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.)); - let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor); command_encoder.set_render_pipeline_state(&self.paths_rasterization_pipeline_state); command_encoder.set_vertex_buffer( @@ -1174,7 +1160,6 @@ fn build_path_rasterization_pipeline_state( vertex_fn_name: &str, fragment_fn_name: &str, pixel_format: metal::MTLPixelFormat, - path_sample_count: u32, ) -> metal::RenderPipelineState { let vertex_fn = library .get_function(vertex_fn_name, None) @@ -1187,10 +1172,6 @@ fn build_path_rasterization_pipeline_state( descriptor.set_label(label); descriptor.set_vertex_function(Some(vertex_fn.as_ref())); descriptor.set_fragment_function(Some(fragment_fn.as_ref())); - if path_sample_count > 1 { - descriptor.set_raster_sample_count(path_sample_count as _); - descriptor.set_alpha_to_coverage_enabled(true); - } let color_attachment = descriptor.color_attachments().object_at(0).unwrap(); color_attachment.set_pixel_format(pixel_format); color_attachment.set_blending_enabled(true); diff --git a/crates/gpui/src/scene.rs b/crates/gpui/src/scene.rs index b837f2ad9131b65e9f4db3d7bdc1bc3719fae991..778a5d1f2734181cf9a7320f347ef041666f05d6 100644 --- a/crates/gpui/src/scene.rs +++ b/crates/gpui/src/scene.rs @@ -715,13 +715,6 @@ impl Path { } } - /// Move the start, current point to the given point. - pub fn move_to(&mut self, to: Point) { - self.contour_count += 1; - self.start = to; - self.current = to; - } - /// Draw a straight line from the current point to the given point. pub fn line_to(&mut self, to: Point) { self.contour_count += 1; @@ -751,8 +744,7 @@ impl Path { self.current = to; } - /// Push a triangle to the Path. - pub fn push_triangle( + fn push_triangle( &mut self, xy: (Point, Point, Point), st: (Point, Point, Point), From 5a25751521d3f6e9da51eb5d0af34dc25d9b7ce8 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 5 Feb 2025 13:17:19 -0500 Subject: [PATCH 033/130] extension_cli: Include the list of what an extension provides in the generated manifest (#24295) This PR updates the Zed extension CLI with support for populating the `provides` field in the generated extension manifest. This field will contain the set of features that the extension provides. For example: ``` "provides": ["themes", "icon-themes"] ``` Release Notes: - N/A --- crates/collab/src/db/queries/extensions.rs | 2 + crates/collab/src/db/tests/extension_tests.rs | 12 +++- crates/extension_cli/src/main.rs | 61 +++++++++++++++---- crates/rpc/src/extension.rs | 18 ++++++ 4 files changed, 81 insertions(+), 12 deletions(-) diff --git a/crates/collab/src/db/queries/extensions.rs b/crates/collab/src/db/queries/extensions.rs index 93604868fae0e3d2e4e5f9694df3d7e9da1b32c6..51c63956323b9390ddc6413239058a644f2d7556 100644 --- a/crates/collab/src/db/queries/extensions.rs +++ b/crates/collab/src/db/queries/extensions.rs @@ -1,3 +1,4 @@ +use std::collections::BTreeSet; use std::str::FromStr; use chrono::Utc; @@ -370,6 +371,7 @@ fn metadata_from_extension_and_version( repository: version.repository, schema_version: Some(version.schema_version), wasm_api_version: version.wasm_api_version, + provides: BTreeSet::default(), }, published_at: convert_time_to_chrono(version.published_at), diff --git a/crates/collab/src/db/tests/extension_tests.rs b/crates/collab/src/db/tests/extension_tests.rs index b91570c49405c16ef9308bd63f352b8177403327..84e53c5cab2b816cca16109b10a7f43652f9498e 100644 --- a/crates/collab/src/db/tests/extension_tests.rs +++ b/crates/collab/src/db/tests/extension_tests.rs @@ -1,10 +1,12 @@ +use std::collections::BTreeSet; +use std::sync::Arc; + use super::Database; use crate::db::ExtensionVersionConstraints; use crate::{ db::{queries::extensions::convert_time_to_chrono, ExtensionMetadata, NewExtensionVersion}, test_both_dbs, }; -use std::sync::Arc; test_both_dbs!( test_extensions, @@ -97,6 +99,7 @@ async fn test_extensions(db: &Arc) { repository: "ext1/repo".into(), schema_version: Some(1), wasm_api_version: None, + provides: BTreeSet::default(), }, published_at: t0_chrono, download_count: 0, @@ -111,6 +114,7 @@ async fn test_extensions(db: &Arc) { repository: "ext2/repo".into(), schema_version: Some(0), wasm_api_version: None, + provides: BTreeSet::default(), }, published_at: t0_chrono, download_count: 0 @@ -132,6 +136,7 @@ async fn test_extensions(db: &Arc) { repository: "ext2/repo".into(), schema_version: Some(0), wasm_api_version: None, + provides: BTreeSet::default(), }, published_at: t0_chrono, download_count: 0 @@ -172,6 +177,7 @@ async fn test_extensions(db: &Arc) { repository: "ext2/repo".into(), schema_version: Some(0), wasm_api_version: None, + provides: BTreeSet::default(), }, published_at: t0_chrono, download_count: 7 @@ -186,6 +192,7 @@ async fn test_extensions(db: &Arc) { repository: "ext1/repo".into(), schema_version: Some(1), wasm_api_version: None, + provides: BTreeSet::default(), }, published_at: t0_chrono, download_count: 5, @@ -258,6 +265,7 @@ async fn test_extensions(db: &Arc) { repository: "ext2/repo".into(), schema_version: Some(0), wasm_api_version: None, + provides: BTreeSet::default(), }, published_at: t0_chrono, download_count: 7 @@ -272,6 +280,7 @@ async fn test_extensions(db: &Arc) { repository: "ext1/repo".into(), schema_version: Some(1), wasm_api_version: None, + provides: BTreeSet::default(), }, published_at: t0_chrono, download_count: 5, @@ -378,6 +387,7 @@ async fn test_extensions_by_id(db: &Arc) { repository: "ext1/repo".into(), schema_version: Some(1), wasm_api_version: Some("0.0.4".into()), + provides: BTreeSet::default(), }, published_at: t0_chrono, download_count: 0, diff --git a/crates/extension_cli/src/main.rs b/crates/extension_cli/src/main.rs index 917eca96db900c9683ebb46d8b11a5ec95974331..442884912928ce09cbd6fe0f4ea09de41ff28775 100644 --- a/crates/extension_cli/src/main.rs +++ b/crates/extension_cli/src/main.rs @@ -1,20 +1,18 @@ -use std::{ - collections::HashMap, - env, fs, - path::{Path, PathBuf}, - process::Command, - sync::Arc, -}; +use std::collections::{BTreeSet, HashMap}; +use std::env; +use std::fs; +use std::path::{Path, PathBuf}; +use std::process::Command; +use std::sync::Arc; use ::fs::{copy_recursive, CopyOptions, Fs, RealFs}; use anyhow::{anyhow, bail, Context, Result}; use clap::Parser; -use extension::{ - extension_builder::{CompileExtensionOptions, ExtensionBuilder}, - ExtensionManifest, -}; +use extension::extension_builder::{CompileExtensionOptions, ExtensionBuilder}; +use extension::ExtensionManifest; use language::LanguageConfig; use reqwest_client::ReqwestClient; +use rpc::ExtensionProvides; use tree_sitter::{Language, Query, WasmStore}; #[derive(Parser, Debug)] @@ -99,6 +97,8 @@ async fn main() -> Result<()> { ); } + let extension_provides = extension_provides(&manifest); + let manifest_json = serde_json::to_string(&rpc::ExtensionApiManifest { name: manifest.name, version: manifest.version, @@ -109,6 +109,7 @@ async fn main() -> Result<()> { .repository .ok_or_else(|| anyhow!("missing repository in extension manifest"))?, wasm_api_version: manifest.lib.version.map(|version| version.to_string()), + provides: extension_provides, })?; fs::remove_dir_all(&archive_dir)?; fs::write(output_dir.join("manifest.json"), manifest_json.as_bytes())?; @@ -116,6 +117,44 @@ async fn main() -> Result<()> { Ok(()) } +/// Returns the set of features provided by the extension. +fn extension_provides(manifest: &ExtensionManifest) -> BTreeSet { + let mut provides = BTreeSet::default(); + if !manifest.themes.is_empty() { + provides.insert(ExtensionProvides::Themes); + } + + if !manifest.icon_themes.is_empty() { + provides.insert(ExtensionProvides::IconThemes); + } + + if !manifest.languages.is_empty() { + provides.insert(ExtensionProvides::Languages); + } + + if !manifest.grammars.is_empty() { + provides.insert(ExtensionProvides::Grammars); + } + + if !manifest.language_servers.is_empty() { + provides.insert(ExtensionProvides::LanguageServers); + } + + if !manifest.context_servers.is_empty() { + provides.insert(ExtensionProvides::ContextServers); + } + + if !manifest.indexed_docs_providers.is_empty() { + provides.insert(ExtensionProvides::IndexedDocsProviders); + } + + if manifest.snippets.is_some() { + provides.insert(ExtensionProvides::Snippets); + } + + provides +} + async fn copy_extension_resources( manifest: &ExtensionManifest, extension_path: &Path, diff --git a/crates/rpc/src/extension.rs b/crates/rpc/src/extension.rs index f64e9feedd4b17d01be942aa37227832de0edcfc..67b9116b83b73cdac50c002f7b61de7c6682ca6c 100644 --- a/crates/rpc/src/extension.rs +++ b/crates/rpc/src/extension.rs @@ -1,3 +1,5 @@ +use std::collections::BTreeSet; + use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use std::sync::Arc; @@ -11,6 +13,22 @@ pub struct ExtensionApiManifest { pub repository: String, pub schema_version: Option, pub wasm_api_version: Option, + #[serde(default)] + pub provides: BTreeSet, +} + +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub enum ExtensionProvides { + Themes, + IconThemes, + Languages, + Grammars, + LanguageServers, + ContextServers, + SlashCommands, + IndexedDocsProviders, + Snippets, } #[derive(Clone, Serialize, Deserialize, Debug, PartialEq)] From ffe503d77c523ea6a91a890245e2a1092a56161d Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Wed, 5 Feb 2025 13:41:08 -0500 Subject: [PATCH 034/130] Fix spurious addition hunks in files with no git repo (#24288) Release Notes: - N/A --- crates/editor/src/editor.rs | 6 ++---- crates/worktree/src/worktree.rs | 4 ++-- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index dab3ef5d7187805c8ce783c40a40c0c68ac69392..6304a56b66585f48898d1bbabf625e1fbee68754 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -14478,10 +14478,8 @@ fn get_uncommitted_changes_for_buffer( let change_sets = futures::future::join_all(tasks).await; buffer .update(&mut cx, |buffer, cx| { - for change_set in change_sets { - if let Some(change_set) = change_set.log_err() { - buffer.add_change_set(change_set, cx); - } + for change_set in change_sets.into_iter().flatten() { + buffer.add_change_set(change_set, cx); } }) .ok(); diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index cc6075f07a68afbae66dfcabcd97c92bb8a00a80..c87187a32e38c0f3de356e95e1886d054ab2329e 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -886,7 +886,7 @@ impl Worktree { } } } - Ok(None) + Err(anyhow!("No repository found for {path:?}")) }) } Worktree::Remote(_) => { @@ -910,7 +910,7 @@ impl Worktree { } } } - Ok(None) + Err(anyhow!("No repository found for {path:?}")) }) } Worktree::Remote(_) => Task::ready(Err(anyhow!( From 70b1e0eec0f4118c15d908636b9a38591dcf43ea Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 5 Feb 2025 11:48:33 -0700 Subject: [PATCH 035/130] Fix expand buttons adjacent to folded hunks (#24297) Release Notes: - Fix expand buttons adjacent to folded hunks --- crates/editor/src/element.rs | 292 ++++++++++++++--------------------- 1 file changed, 116 insertions(+), 176 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index f0b0730ad5e9f75b87dff2e850535b4864476722..4669f5d57fbba7d5e62f030f9345e0288f8c1977 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -2403,35 +2403,18 @@ impl EditorElement { height, } => { let selected = selected_buffer_ids.contains(&first_excerpt.buffer_id); - let icon_offset = gutter_dimensions.width - - (gutter_dimensions.left_padding + gutter_dimensions.margin); let mut result = v_flex().id(block_id).w_full(); if let Some(prev_excerpt) = prev_excerpt { if *show_excerpt_controls { - result = - result.child( - h_flex() - .id("expand_down_hit_area") - .w(icon_offset) - .h(MULTI_BUFFER_EXCERPT_HEADER_HEIGHT as f32 - * window.line_height()) - .flex_none() - .justify_end() - .child(self.render_expand_excerpt_button( - IconName::ArrowDownFromLine, - None, - cx, - )) - .on_click(window.listener_for(&self.editor, { - let excerpt_id = prev_excerpt.id; - let direction = ExpandExcerptDirection::Down; - move |editor, _, _, cx| { - editor.expand_excerpt(excerpt_id, direction, cx); - cx.stop_propagation(); - } - })), - ); + result = result.child(self.render_expand_excerpt_control( + block_id, + ExpandExcerptDirection::Down, + prev_excerpt.id, + gutter_dimensions, + window, + cx, + )); } } @@ -2455,65 +2438,19 @@ impl EditorElement { height, starts_new_buffer, } => { - let icon_offset = gutter_dimensions.width - - (gutter_dimensions.left_padding + gutter_dimensions.margin); - let header_height = - MULTI_BUFFER_EXCERPT_HEADER_HEIGHT as f32 * window.line_height(); let color = cx.theme().colors().clone(); - let hover_color = color.border_variant.opacity(0.5); - let focus_handle = self.editor.focus_handle(cx).clone(); - let mut result = v_flex().id(block_id).w_full(); - let expand_area = |id: SharedString| { - h_flex() - .id(id) - .w_full() - .cursor_pointer() - .block_mouse_down() - .on_mouse_move(|_, _, cx| cx.stop_propagation()) - .hover(|style| style.bg(hover_color)) - .tooltip({ - let focus_handle = focus_handle.clone(); - move |window, cx| { - Tooltip::for_action_in( - "Expand Excerpt", - &ExpandExcerpts { lines: 0 }, - &focus_handle, - window, - cx, - ) - } - }) - }; if let Some(prev_excerpt) = prev_excerpt { if *show_excerpt_controls { - let group_name = "expand-down"; - - result = result.child( - expand_area(format!("block-{}-down", block_id).into()) - .group(group_name) - .child( - h_flex() - .w(icon_offset) - .h(header_height) - .flex_none() - .justify_end() - .child(self.render_expand_excerpt_button( - IconName::ArrowDownFromLine, - Some(group_name.to_string()), - cx, - )), - ) - .on_click(window.listener_for(&self.editor, { - let excerpt_id = prev_excerpt.id; - let direction = ExpandExcerptDirection::Down; - move |editor, _, _, cx| { - editor.expand_excerpt(excerpt_id, direction, cx); - cx.stop_propagation(); - } - })), - ); + result = result.child(self.render_expand_excerpt_control( + block_id, + ExpandExcerptDirection::Down, + prev_excerpt.id, + gutter_dimensions, + window, + cx, + )); } } @@ -2539,43 +2476,20 @@ impl EditorElement { } if *show_excerpt_controls { - let group_name = "expand-up-first"; - - result = result.child( - h_flex().group(group_name).child( - expand_area(format!("block-{}-up-first", block_id).into()) - .h(header_height) - .child( - h_flex() - .w(icon_offset) - .h(header_height) - .flex_none() - .justify_end() - .child(self.render_expand_excerpt_button( - IconName::ArrowUpFromLine, - Some(group_name.to_string()), - cx, - )), - ) - .on_click(window.listener_for(&self.editor, { - let excerpt_id = next_excerpt.id; - let direction = ExpandExcerptDirection::Up; - move |editor, _, _, cx| { - editor.expand_excerpt(excerpt_id, direction, cx); - cx.stop_propagation(); - } - })), - ), - ); + result = result.child(self.render_expand_excerpt_control( + block_id, + ExpandExcerptDirection::Up, + next_excerpt.id, + gutter_dimensions, + window, + cx, + )); } } else { - let group_name = "expand-up-subsequent"; - if *show_excerpt_controls { result = result.child( h_flex() .relative() - .group(group_name) .child( div() .top(px(0.)) @@ -2584,55 +2498,14 @@ impl EditorElement { .h_px() .bg(color.border_variant), ) - .child( - expand_area(format!("block-{}-up", block_id).into()) - .h(header_height) - .child( - h_flex() - .w(icon_offset) - .h(header_height) - .flex_none() - .justify_end() - .child(if *show_excerpt_controls { - self.render_expand_excerpt_button( - IconName::ArrowUpFromLine, - Some(group_name.to_string()), - cx, - ) - } else { - ButtonLike::new("jump-icon") - .style(ButtonStyle::Transparent) - .child( - svg() - .path( - IconName::ArrowUpRight - .path(), - ) - .size(IconSize::XSmall.rems()) - .text_color( - color.border_variant, - ) - .group_hover( - group_name, - |style| { - style.text_color( - color.border, - ) - }, - ), - ) - }), - ) - .on_click(window.listener_for(&self.editor, { - let excerpt_id = next_excerpt.id; - let direction = ExpandExcerptDirection::Up; - move |editor, _, _, cx| { - editor - .expand_excerpt(excerpt_id, direction, cx); - cx.stop_propagation(); - } - })), - ), + .child(self.render_expand_excerpt_control( + block_id, + ExpandExcerptDirection::Up, + next_excerpt.id, + gutter_dimensions, + window, + cx, + )), ); } }; @@ -2809,26 +2682,93 @@ impl EditorElement { ) } - fn render_expand_excerpt_button( + fn render_expand_excerpt_control( &self, - icon: IconName, - group_name: impl Into>, + block_id: BlockId, + direction: ExpandExcerptDirection, + excerpt_id: ExcerptId, + gutter_dimensions: &GutterDimensions, + window: &Window, cx: &mut App, - ) -> ButtonLike { - let group_name = group_name.into(); - ButtonLike::new("expand-icon") - .style(ButtonStyle::Transparent) - .child( - svg() - .path(icon.path()) - .size(IconSize::XSmall.rems()) - .text_color(cx.theme().colors().editor_line_number) - .when_some(group_name, |svg, group_name| { - svg.group_hover(group_name, |style| { - style.text_color(cx.theme().colors().editor_active_line_number) - }) - }), + ) -> impl IntoElement { + let color = cx.theme().colors().clone(); + let hover_color = color.border_variant.opacity(0.5); + let focus_handle = self.editor.focus_handle(cx).clone(); + + let icon_offset = + gutter_dimensions.width - (gutter_dimensions.left_padding + gutter_dimensions.margin); + let header_height = MULTI_BUFFER_EXCERPT_HEADER_HEIGHT as f32 * window.line_height(); + let group_name = if direction == ExpandExcerptDirection::Down { + "expand-down" + } else { + "expand-up" + }; + + let expand_area = |id: SharedString| { + h_flex() + .id(id) + .w_full() + .cursor_pointer() + .block_mouse_down() + .on_mouse_move(|_, _, cx| cx.stop_propagation()) + .hover(|style| style.bg(hover_color)) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |window, cx| { + Tooltip::for_action_in( + "Expand Excerpt", + &ExpandExcerpts { lines: 0 }, + &focus_handle, + window, + cx, + ) + } + }) + }; + + expand_area( + format!( + "block-{}-{}", + block_id, + if direction == ExpandExcerptDirection::Down { + "down" + } else { + "up" + } ) + .into(), + ) + .group(group_name) + .child( + h_flex() + .w(icon_offset) + .h(header_height) + .flex_none() + .justify_end() + .child( + ButtonLike::new("expand-icon") + .style(ButtonStyle::Transparent) + .child( + svg() + .path(if direction == ExpandExcerptDirection::Down { + IconName::ArrowDownFromLine.path() + } else { + IconName::ArrowUpFromLine.path() + }) + .size(IconSize::XSmall.rems()) + .text_color(cx.theme().colors().editor_line_number) + .group_hover(group_name, |style| { + style.text_color(cx.theme().colors().editor_active_line_number) + }), + ), + ), + ) + .on_click(window.listener_for(&self.editor, { + move |editor, _, _, cx| { + editor.expand_excerpt(excerpt_id, direction, cx); + cx.stop_propagation(); + } + })) } #[allow(clippy::too_many_arguments)] From 6d81ad1e0b5ed395d47a3f221f8a185a522c7375 Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Wed, 5 Feb 2025 13:54:14 -0500 Subject: [PATCH 036/130] git_ui: Start unifying panel style with other panels (#24296) - Adds the `panel` crate for defining UI shared between panels, like common button and header designs, etc - Starts to update the git ui to be more consistent with other panels Release Notes: - N/A --- Cargo.lock | 10 ++++ Cargo.toml | 5 +- crates/git_ui/Cargo.toml | 5 +- crates/git_ui/src/git_panel.rs | 62 +++++++++++++--------- crates/panel/Cargo.toml | 21 ++++++++ crates/panel/LICENSE-GPL | 1 + crates/panel/src/panel.rs | 66 +++++++++++++++++++++++ crates/workspace/src/workspace.rs | 3 +- script/new-crate | 87 +++++++++++++++++++++++++++++++ 9 files changed, 230 insertions(+), 30 deletions(-) create mode 100644 crates/panel/Cargo.toml create mode 120000 crates/panel/LICENSE-GPL create mode 100644 crates/panel/src/panel.rs create mode 100755 script/new-crate diff --git a/Cargo.lock b/Cargo.lock index dadaeeaba15cbc724281e886deae738c6bcbeba4..c679633fae3f97d50f6e514535e35c294542a611 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5245,6 +5245,7 @@ dependencies = [ "language", "menu", "multi_buffer", + "panel", "picker", "postage", "project", @@ -8821,6 +8822,15 @@ dependencies = [ "syn 2.0.90", ] +[[package]] +name = "panel" +version = "0.1.0" +dependencies = [ + "gpui", + "ui", + "workspace", +] + [[package]] name = "parity-tokio-ipc" version = "0.9.0" diff --git a/Cargo.toml b/Cargo.toml index a0e80d7392a0ad53d198880cb3958b13ecab1091..17e865c05431153dc9f43a777dea9f3df7308c1c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -87,6 +87,7 @@ members = [ "crates/open_ai", "crates/outline", "crates/outline_panel", + "crates/panel", "crates/paths", "crates/picker", "crates/prettier", @@ -103,7 +104,6 @@ members = [ "crates/remote_server", "crates/repl", "crates/reqwest_client", - "crates/reqwest_client", "crates/rich_text", "crates/rope", "crates/rpc", @@ -243,8 +243,8 @@ fs = { path = "crates/fs" } fsevent = { path = "crates/fsevent" } fuzzy = { path = "crates/fuzzy" } git = { path = "crates/git" } -git_ui = { path = "crates/git_ui" } git_hosting_providers = { path = "crates/git_hosting_providers" } +git_ui = { path = "crates/git_ui" } go_to_line = { path = "crates/go_to_line" } google_ai = { path = "crates/google_ai" } gpui = { path = "crates/gpui", default-features = false, features = [ @@ -285,6 +285,7 @@ open_ai = { path = "crates/open_ai" } outline = { path = "crates/outline" } outline_panel = { path = "crates/outline_panel" } paths = { path = "crates/paths" } +panel = { path = "crates/panel" } picker = { path = "crates/picker" } plugin = { path = "crates/plugin" } plugin_macros = { path = "crates/plugin_macros" } diff --git a/crates/git_ui/Cargo.toml b/crates/git_ui/Cargo.toml index 701f9a01d7014ee159d774cefcc33d6e15d76167..8a2519b8c007f41b9111fbbb1fe6f85c3b3d9ff7 100644 --- a/crates/git_ui/Cargo.toml +++ b/crates/git_ui/Cargo.toml @@ -22,8 +22,10 @@ futures.workspace = true git.workspace = true gpui.workspace = true language.workspace = true -multi_buffer.workspace = true menu.workspace = true +multi_buffer.workspace = true +panel.workspace = true +picker.workspace = true postage.workspace = true project.workspace = true schemars.workspace = true @@ -35,7 +37,6 @@ theme.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true -picker.workspace = true [target.'cfg(windows)'.dependencies] windows.workspace = true diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 9cf054467d86561ad0d2a38bf7f0c3b043a963ba..49b12c0a3f70dc690483f68a6624cfeef314a3a2 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -16,6 +16,7 @@ use git::{CommitAllChanges, CommitChanges, ToggleStaged}; use gpui::*; use language::Buffer; use menu::{SelectFirst, SelectLast, SelectNext, SelectPrev}; +use panel::PanelHeader; use project::git::{GitEvent, Repository}; use project::{Fs, Project, ProjectPath}; use serde::{Deserialize, Serialize}; @@ -1060,6 +1061,10 @@ impl GitPanel { .style(ButtonStyle::Filled) } + pub fn indent_size(&self, window: &Window, cx: &mut Context) -> Pixels { + Checkbox::container_size(cx).to_pixels(window.rem_size()) + } + pub fn render_divider(&self, _cx: &mut Context) -> impl IntoElement { h_flex() .items_center() @@ -1069,7 +1074,7 @@ impl GitPanel { pub fn render_panel_header( &self, - _window: &mut Window, + window: &mut Window, cx: &mut Context, ) -> impl IntoElement { let all_repositories = self @@ -1089,11 +1094,7 @@ impl GitPanel { n => format!("{} changes", n), }; - h_flex() - .h(px(32.)) - .items_center() - .px_2() - .bg(ElevationIndex::Surface.bg(cx)) + self.panel_header_container(window, cx) .child(h_flex().gap_2().child(if all_repositories.len() <= 1 { div() .id("changes-label") @@ -1304,7 +1305,12 @@ impl GitPanel { ) } - fn render_entries(&self, has_write_access: bool, cx: &mut Context) -> impl IntoElement { + fn render_entries( + &self, + has_write_access: bool, + window: &Window, + cx: &mut Context, + ) -> impl IntoElement { let entry_count = self.entries.len(); v_flex() @@ -1312,19 +1318,26 @@ impl GitPanel { .overflow_hidden() .child( uniform_list(cx.entity().clone(), "entries", entry_count, { - move |this, range, _window, cx| { + move |this, range, window, cx| { let mut items = Vec::with_capacity(range.end - range.start); for ix in range { match &this.entries.get(ix) { Some(GitListEntry::GitStatusEntry(entry)) => { - items.push(this.render_entry(ix, entry, has_write_access, cx)); + items.push(this.render_entry( + ix, + entry, + has_write_access, + window, + cx, + )); } Some(GitListEntry::Header(header)) => { - items.push(this.render_header( + items.push(this.render_list_header( ix, header, has_write_access, + window, cx, )); } @@ -1338,7 +1351,7 @@ impl GitPanel { .with_decoration( ui::indent_guides( cx.entity().clone(), - px(10.0), + self.indent_size(window, cx), IndentGuideColors::panel(cx), |this, range, _windows, _cx| { this.entries @@ -1353,12 +1366,9 @@ impl GitPanel { ) .with_render_fn( cx.entity().clone(), - move |_, params, window, cx| { - let left_offset = Checkbox::container_size(cx) - .to_pixels(window.rem_size()) - .half(); - const PADDING_Y: f32 = 4.; + move |_, params, _, _| { let indent_size = params.indent_size; + let left_offset = indent_size - px(3.0); let item_height = params.item_height; params @@ -1369,7 +1379,7 @@ impl GitPanel { let offset = if layout.continues_offscreen { px(0.) } else { - px(PADDING_Y) + px(4.0) }; let bounds = Bounds::new( point( @@ -1405,11 +1415,12 @@ impl GitPanel { Label::new(label.into()).color(color).single_line() } - fn render_header( + fn render_list_header( &self, ix: usize, header: &GitHeaderEntry, has_write_access: bool, + _window: &Window, cx: &Context, ) -> AnyElement { let checkbox = Checkbox::new(header.title(), self.header_state(header.header)) @@ -1420,7 +1431,6 @@ impl GitPanel { div() .w_full() - .px_0p5() .child( ListHeader::new(header.title()) .start_slot(checkbox) @@ -1438,7 +1448,8 @@ impl GitPanel { cx, ) }) - }), + }) + .inset(true), ) .into_any_element() } @@ -1448,6 +1459,7 @@ impl GitPanel { ix: usize, entry: &GitStatusEntry, has_write_access: bool, + window: &Window, cx: &Context, ) -> AnyElement { let display_name = entry @@ -1534,7 +1546,7 @@ impl GitPanel { .child( ListItem::new(id) .indent_level(1) - .indent_step_size(px(10.0)) + .indent_step_size(Checkbox::container_size(cx).to_pixels(window.rem_size())) .spacing(ListItemSpacing::Sparse) .start_slot(start_slot) .toggle_state(selected) @@ -1689,16 +1701,14 @@ impl Render for GitPanel { })) .size_full() .overflow_hidden() - .py_1() .bg(ElevationIndex::Surface.bg(cx)) .child(self.render_panel_header(window, cx)) - .child(self.render_divider(cx)) .child(if has_entries { - self.render_entries(has_write_access, cx).into_any_element() + self.render_entries(has_write_access, window, cx) + .into_any_element() } else { self.render_empty_state(cx).into_any_element() }) - .child(self.render_divider(cx)) .child(self.render_commit_editor(name_and_email, cx)) } } @@ -1761,3 +1771,5 @@ impl Panel for GitPanel { 2 } } + +impl PanelHeader for GitPanel {} diff --git a/crates/panel/Cargo.toml b/crates/panel/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..03db05bb0bff359f94ad0c04b4c4346e6b840b72 --- /dev/null +++ b/crates/panel/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "panel" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +name = "panel" +path = "src/panel.rs" + +[dependencies] +gpui.workspace = true +ui.workspace = true +workspace.workspace = true + +[features] +default = [] diff --git a/crates/panel/LICENSE-GPL b/crates/panel/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/panel/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/panel/src/panel.rs b/crates/panel/src/panel.rs new file mode 100644 index 0000000000000000000000000000000000000000..017a362b0ef15f6bcb7adc9dbeaca7541c62d30a --- /dev/null +++ b/crates/panel/src/panel.rs @@ -0,0 +1,66 @@ +//! # panel +use gpui::actions; +use ui::{prelude::*, Tab}; + +actions!(panel, [NextPanelTab, PreviousPanelTab]); + +pub trait PanelHeader: workspace::Panel { + fn header_height(&self, cx: &mut App) -> Pixels { + Tab::container_height(cx) + } + + fn panel_header_container(&self, _window: &mut Window, cx: &mut App) -> Div { + h_flex() + .h(self.header_height(cx)) + .w_full() + .px_1() + .flex_none() + .border_b_1() + .border_color(cx.theme().colors().border) + } +} + +/// Implement this trait to enable a panel to have tabs. +pub trait PanelTabs: PanelHeader { + /// Returns the index of the currently selected tab. + fn selected_tab(&self, cx: &mut App) -> usize; + /// Selects the tab at the given index. + fn select_tab(&self, cx: &mut App, index: usize); + /// Moves to the next tab. + fn next_tab(&self, _: NextPanelTab, cx: &mut App) -> Self; + /// Moves to the previous tab. + fn previous_tab(&self, _: PreviousPanelTab, cx: &mut App) -> Self; +} + +#[derive(IntoElement)] +pub struct PanelTab {} + +impl RenderOnce for PanelTab { + fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { + div() + } +} + +pub fn panel_button(label: impl Into) -> ui::Button { + let label = label.into(); + let id = ElementId::Name(label.clone().to_lowercase().replace(' ', "_").into()); + ui::Button::new(id, label) + .label_size(ui::LabelSize::Small) + .layer(ui::ElevationIndex::Surface) + .size(ui::ButtonSize::Compact) +} + +pub fn panel_filled_button(label: impl Into) -> ui::Button { + panel_button(label).style(ui::ButtonStyle::Filled) +} + +pub fn panel_icon_button(id: impl Into, icon: IconName) -> ui::IconButton { + let id = ElementId::Name(id.into()); + ui::IconButton::new(id, icon) + .layer(ui::ElevationIndex::Surface) + .size(ui::ButtonSize::Compact) +} + +pub fn panel_filled_icon_button(id: impl Into, icon: IconName) -> ui::IconButton { + panel_icon_button(id, icon).style(ui::ButtonStyle::Filled) +} diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 8fd0873d03fd2ee95cea7c7be365509c25d09309..0472d1ce9806695562fecc089293c25cbc5a7b63 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -21,7 +21,8 @@ use client::{ }; use collections::{hash_map, HashMap, HashSet}; use derive_more::{Deref, DerefMut}; -use dock::{Dock, DockPosition, Panel, PanelButtons, PanelHandle, RESIZE_HANDLE_SIZE}; +pub use dock::Panel; +use dock::{Dock, DockPosition, PanelButtons, PanelHandle, RESIZE_HANDLE_SIZE}; use futures::{ channel::{ mpsc::{self, UnboundedReceiver, UnboundedSender}, diff --git a/script/new-crate b/script/new-crate new file mode 100755 index 0000000000000000000000000000000000000000..459384d7aea44b7c8fff3d1b303ea542bbd56a09 --- /dev/null +++ b/script/new-crate @@ -0,0 +1,87 @@ +#!/bin/bash + +# Try to make sure we are in the zed repo root +if [ ! -d "crates" ] || [ ! -d "script" ]; then + echo "Error: Run from the \`zed\` repo root" + exit 1 +fi + +if [ ! -f "Cargo.toml" ]; then + echo "Error: Run from the \`zed\` repo root" + exit 1 +fi + +if [ $# -eq 0 ]; then + echo "Usage: $0 [optional_license_flag]" + exit 1 +fi + +CRATE_NAME="$1" + +LICENSE_FLAG=$(echo "${2}" | tr '[:upper:]' '[:lower:]') +if [[ "$LICENSE_FLAG" == *"apache"* ]]; then + LICENSE_MODE="Apache-2.0" + LICENSE_FILE="LICENSE-APACHE" +elif [[ "$LICENSE_FLAG" == *"agpl"* ]]; then + LICENSE_MODE="AGPL-3.0-or-later" + LICENSE_FILE="LICENSE-AGPL" +else + LICENSE_MODE="GPL-3.0-or-later" + LICENSE_FILE="LICENSE" +fi + +if [[ ! "$CRATE_NAME" =~ ^[a-z0-9_]+$ ]]; then + echo "Error: Crate name must be lowercase and contain only alphanumeric characters and underscores" + exit 1 +fi + +CRATE_PATH="crates/$CRATE_NAME" +mkdir -p "$CRATE_PATH/src" + +# Symlink the license +ln -sf "../../../$LICENSE_FILE" "$CRATE_PATH/LICENSE" + +CARGO_TOML_TEMPLATE=$(cat << 'EOF' +[package] +name = "$CRATE_NAME" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "$LICENSE_MODE" + +[lints] +workspace = true + +[lib] +name = "$CRATE_NAME" +path = "src/$CRATE_NAME.rs" + +[dependencies] +anyhow.workspace = true +gpui.workspace = true +ui.workspace = true +util.workspace = true + +# Uncomment other workspace dependencies as needed +# assistant.workspace = true +# client.workspace = true +# project.workspace = true +# settings.workspace = true + +[features] +default = [] +EOF +) + +# Populate template +CARGO_TOML_CONTENT=$(echo "$CARGO_TOML_TEMPLATE" | sed \ + -e "s/\$CRATE_NAME/$CRATE_NAME/g" \ + -e "s/\$LICENSE_MODE/$LICENSE_MODE/g") + +echo "$CARGO_TOML_CONTENT" > "$CRATE_PATH/Cargo.toml" + +echo "//! # $CRATE_NAME" > "$CRATE_PATH/src/$CRATE_NAME.rs" + +echo "Created new crate: $CRATE_NAME in $CRATE_PATH" +echo "License: $LICENSE_MODE (symlinked from $LICENSE_FILE)" +echo "Don't forget to add the new crate to the workspace!" From 971a91ced789f45d0196350863f7ad442dd6947a Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 5 Feb 2025 12:13:32 -0700 Subject: [PATCH 037/130] Commit All Mode (#24293) - **Base diffs on uncommitted changes** - **Show added files in project diff view** - **Fix git panel optimism** - **boop** - **Co-Authored-By: Cole ** - **Fix commit (all) buttons state** - **WIP** - **WIP: commit all mode** Closes #ISSUE Release Notes: - N/A --- assets/icons/circle.svg | 1 + crates/git_ui/src/git_panel.rs | 199 +++++++++++++++-------------- crates/ui/src/components/icon.rs | 1 + crates/ui/src/components/toggle.rs | 41 +++--- crates/ui/src/traits/toggleable.rs | 18 +-- 5 files changed, 142 insertions(+), 118 deletions(-) create mode 100644 assets/icons/circle.svg diff --git a/assets/icons/circle.svg b/assets/icons/circle.svg new file mode 100644 index 0000000000000000000000000000000000000000..67306cb12a7133d0b6bcf8c589bc9f8bec994998 --- /dev/null +++ b/assets/icons/circle.svg @@ -0,0 +1 @@ + diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 49b12c0a3f70dc690483f68a6624cfeef314a3a2..19028168dd21ba745175a1842cf4b3145429293e 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -75,15 +75,15 @@ struct SerializedGitPanel { #[derive(Debug, PartialEq, Eq, Clone, Copy)] enum Section { - Changed, - Created, + Tracked, + New, } impl Section { pub fn contains(&self, status: FileStatus) -> bool { match self { - Section::Changed => !status.is_created(), - Section::Created => status.is_created(), + Section::Tracked => !status.is_created(), + Section::New => status.is_created(), } } } @@ -99,8 +99,8 @@ impl GitHeaderEntry { } pub fn title(&self) -> &'static str { match self.header { - Section::Changed => "Changed", - Section::Created => "New", + Section::Tracked => "Changed", + Section::New => "New", } } } @@ -112,9 +112,9 @@ enum GitListEntry { } impl GitListEntry { - fn status_entry(&self) -> Option { + fn status_entry(&self) -> Option<&GitStatusEntry> { match self { - GitListEntry::GitStatusEntry(entry) => Some(entry.clone()), + GitListEntry::GitStatusEntry(entry) => Some(entry), _ => None, } } @@ -129,7 +129,7 @@ pub struct GitStatusEntry { pub(crate) is_staged: Option, } -pub struct PendingOperation { +struct PendingOperation { finished: bool, will_become_staged: bool, repo_paths: HashSet, @@ -158,8 +158,11 @@ pub struct GitPanel { pending: Vec, commit_task: Task>, commit_pending: bool, - can_commit: bool, - can_commit_all: bool, + + tracked_staged_count: usize, + tracked_count: usize, + new_staged_count: usize, + new_count: usize, } fn commit_message_editor( @@ -272,8 +275,10 @@ impl GitPanel { commit_editor, project, workspace, - can_commit: false, - can_commit_all: false, + tracked_staged_count: 0, + tracked_count: 0, + new_staged_count: 0, + new_count: 0, }; git_panel.schedule_update(false, window, cx); git_panel.show_scrollbar = git_panel.should_show_scrollbar(cx); @@ -579,7 +584,7 @@ impl GitPanel { section.contains(&status_entry) && status_entry.is_staged != Some(goal_staged_state) }) - .map(|status_entry| status_entry.repo_path) + .map(|status_entry| status_entry.repo_path.clone()) .collect::>(); (goal_staged_state, entries) @@ -593,10 +598,12 @@ impl GitPanel { repo_paths: repo_paths.iter().cloned().collect(), finished: false, }); + let repo_paths = repo_paths.clone(); + let active_repository = active_repository.clone(); + self.update_counts(); + cx.notify(); cx.spawn({ - let repo_paths = repo_paths.clone(); - let active_repository = active_repository.clone(); |this, mut cx| async move { let result = cx .update(|cx| { @@ -673,7 +680,8 @@ impl GitPanel { let Some(active_repository) = self.active_repository.clone() else { return; }; - if !self.can_commit { + if !self.has_staged_changes() { + self.commit_tracked_changes(&Default::default(), name_and_email, window, cx); return; } let message = self.commit_editor.read(cx).text(cx); @@ -716,7 +724,7 @@ impl GitPanel { let Some(active_repository) = self.active_repository.clone() else { return; }; - if !self.can_commit_all { + if !self.has_staged_changes() || !self.has_tracked_changes() { return; } @@ -731,10 +739,10 @@ impl GitPanel { .iter() .filter_map(|entry| entry.status_entry()) .filter(|status_entry| { - Section::Changed.contains(status_entry.status) + Section::Tracked.contains(status_entry.status) && !status_entry.is_staged.unwrap_or(false) }) - .map(|status_entry| status_entry.repo_path) + .map(|status_entry| status_entry.repo_path.clone()) .collect::>(); self.commit_task = cx.spawn_in(window, |git_panel, mut cx| async move { @@ -911,10 +919,6 @@ impl GitPanel { let repo = repo.read(cx); let path_set = HashSet::from_iter(repo.status().map(|entry| entry.repo_path)); - let mut has_changed_checked_boxes = false; - let mut has_changed = false; - let mut has_added_checked_boxes = false; - // Second pass - create entries with proper depth calculation for entry in repo.status() { let (depth, difference) = @@ -951,15 +955,8 @@ impl GitPanel { }; if is_new { - if entry.is_staged != Some(false) { - has_added_checked_boxes = true - } new_entries.push(entry); } else { - has_changed = true; - if entry.is_staged != Some(false) { - has_changed_checked_boxes = true - } changed_entries.push(entry); } } @@ -970,7 +967,7 @@ impl GitPanel { if changed_entries.len() > 0 { self.entries.push(GitListEntry::Header(GitHeaderEntry { - header: Section::Changed, + header: Section::Tracked, })); self.entries.extend( changed_entries @@ -980,7 +977,7 @@ impl GitPanel { } if new_entries.len() > 0 { self.entries.push(GitListEntry::Header(GitHeaderEntry { - header: Section::Created, + header: Section::New, })); self.entries .extend(new_entries.into_iter().map(GitListEntry::GitStatusEntry)); @@ -988,39 +985,62 @@ impl GitPanel { for (ix, entry) in self.entries.iter().enumerate() { if let Some(status_entry) = entry.status_entry() { - self.entries_by_path.insert(status_entry.repo_path, ix); + self.entries_by_path + .insert(status_entry.repo_path.clone(), ix); } } - self.can_commit = has_changed_checked_boxes || has_added_checked_boxes; - self.can_commit_all = has_changed || has_added_checked_boxes; + self.update_counts(); self.select_first_entry_if_none(cx); cx.notify(); } - fn header_state(&self, header_type: Section) -> ToggleState { - let mut count = 0; - let mut staged_count = 0; - 'outer: for entry in &self.entries { - let Some(entry) = entry.status_entry() else { + fn update_counts(&mut self) { + self.new_count = 0; + self.tracked_count = 0; + self.new_staged_count = 0; + self.tracked_staged_count = 0; + for entry in &self.entries { + let Some(status_entry) = entry.status_entry() else { continue; }; - if entry.status.is_created() != (header_type == Section::Created) { - continue; - } - count += 1; - for pending in self.pending.iter().rev() { - if pending.repo_paths.contains(&entry.repo_path) { - if pending.will_become_staged { - staged_count += 1; - } - continue 'outer; + if status_entry.status.is_created() { + self.new_count += 1; + if self.entry_appears_staged(status_entry) != Some(false) { + self.new_staged_count += 1; + } + } else { + self.tracked_count += 1; + if self.entry_appears_staged(status_entry) != Some(false) { + self.tracked_staged_count += 1; } } - staged_count += entry.status.is_staged().unwrap_or(false) as usize; } + } + fn entry_appears_staged(&self, entry: &GitStatusEntry) -> Option { + for pending in self.pending.iter().rev() { + if pending.repo_paths.contains(&entry.repo_path) { + return Some(pending.will_become_staged); + } + } + entry.is_staged + } + + fn has_staged_changes(&self) -> bool { + self.tracked_staged_count > 0 || self.new_staged_count > 0 + } + + fn has_tracked_changes(&self) -> bool { + self.tracked_count > 0 + } + + fn header_state(&self, header_type: Section) -> ToggleState { + let (staged_count, count) = match header_type { + Section::New => (self.new_staged_count, self.new_count), + Section::Tracked => (self.tracked_staged_count, self.tracked_count), + }; if staged_count == 0 { ToggleState::Unselected } else if count == staged_count { @@ -1157,25 +1177,27 @@ impl GitPanel { cx: &Context, ) -> impl IntoElement { let editor = self.commit_editor.clone(); - let can_commit = !self.commit_pending && self.can_commit && !editor.read(cx).is_empty(cx); - let can_commit_all = - !self.commit_pending && self.can_commit_all && !editor.read(cx).is_empty(cx); + let can_commit = + (self.has_staged_changes() || self.has_tracked_changes()) && !self.commit_pending; let editor_focus_handle = editor.read(cx).focus_handle(cx).clone(); let focus_handle_1 = self.focus_handle(cx).clone(); - let focus_handle_2 = self.focus_handle(cx).clone(); + let tooltip = if self.has_staged_changes() { + "Commit staged changes" + } else { + "Commit changes to tracked files" + }; + let title = if self.has_staged_changes() { + "Commit" + } else { + "Commit All" + }; - let commit_staged_button = self - .panel_button("commit-staged-changes", "Commit") + let commit_button = self + .panel_button("commit-changes", title) .tooltip(move |window, cx| { let focus_handle = focus_handle_1.clone(); - Tooltip::for_action_in( - "Commit all staged changes", - &CommitChanges, - &focus_handle, - window, - cx, - ) + Tooltip::for_action_in(tooltip, &CommitChanges, &focus_handle, window, cx) }) .disabled(!can_commit) .on_click({ @@ -1185,31 +1207,6 @@ impl GitPanel { }) }); - let commit_all_button = self - .panel_button("commit-all-changes", "Commit All") - .tooltip(move |window, cx| { - let focus_handle = focus_handle_2.clone(); - Tooltip::for_action_in( - "Commit all changes, including unstaged changes", - &CommitAllChanges, - &focus_handle, - window, - cx, - ) - }) - .disabled(!can_commit_all) - .on_click({ - let name_and_email = name_and_email.clone(); - cx.listener(move |this, _: &ClickEvent, window, cx| { - this.commit_tracked_changes( - &CommitAllChanges, - name_and_email.clone(), - window, - cx, - ) - }) - }); - div().w_full().h(px(140.)).px_2().pt_1().pb_2().child( v_flex() .id("commit-editor-container") @@ -1229,8 +1226,7 @@ impl GitPanel { .right_3() .gap_1p5() .child(div().gap_1().flex_grow()) - .child(commit_all_button) - .child(commit_staged_button), + .child(commit_button), ), ) } @@ -1423,8 +1419,17 @@ impl GitPanel { _window: &Window, cx: &Context, ) -> AnyElement { - let checkbox = Checkbox::new(header.title(), self.header_state(header.header)) + let header_state = if self.has_staged_changes() { + self.header_state(header.header) + } else { + match header.header { + Section::Tracked => ToggleState::Selected, + Section::New => ToggleState::Unselected, + } + }; + let checkbox = Checkbox::new(header.title(), header_state) .disabled(!has_write_access) + .placeholder(!self.has_staged_changes()) .fill() .elevation(ElevationIndex::Surface); let selected = self.selected_entry == Some(ix); @@ -1507,14 +1512,19 @@ impl GitPanel { let id: ElementId = ElementId::Name(format!("entry_{}", display_name).into()); - let is_staged = pending + let mut is_staged = pending .or_else(|| entry.is_staged) .map(ToggleState::from) .unwrap_or(ToggleState::Indeterminate); + if !self.has_staged_changes() && !entry.status.is_created() { + is_staged = ToggleState::Selected; + } + let checkbox = Checkbox::new(id, is_staged) .disabled(!has_write_access) .fill() + .placeholder(!self.has_staged_changes()) .elevation(ElevationIndex::Surface) .on_click({ let entry = entry.clone(); @@ -1532,6 +1542,7 @@ impl GitPanel { .id(("start-slot", ix)) .gap(DynamicSpacing::Base04.rems(cx)) .child(checkbox) + .tooltip(|window, cx| Tooltip::for_action("Stage File", &ToggleStaged, window, cx)) .child(git_status_icon(status, cx)) .on_mouse_down(MouseButton::Left, |_, _, cx| { // prevent the list item active state triggering when toggling checkbox diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index a3e2c1897af3ebccab530122bc649e1cbf08839b..ba4878898b140029e7962c1bfc1679f2dffb3a01 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -164,6 +164,7 @@ pub enum IconName { ChevronRight, ChevronUp, ChevronUpDown, + Circle, Close, Code, Command, diff --git a/crates/ui/src/components/toggle.rs b/crates/ui/src/components/toggle.rs index 008edd1f070f65f8e52395450adae880ea85a7e5..0413891811d4baf07d243233db50d2a42b8d3cff 100644 --- a/crates/ui/src/components/toggle.rs +++ b/crates/ui/src/components/toggle.rs @@ -43,6 +43,7 @@ pub struct Checkbox { id: ElementId, toggle_state: ToggleState, disabled: bool, + placeholder: bool, on_click: Option>, filled: bool, style: ToggleStyle, @@ -62,6 +63,7 @@ impl Checkbox { style: ToggleStyle::default(), tooltip: None, label: None, + placeholder: false, } } @@ -71,6 +73,12 @@ impl Checkbox { self } + /// Sets the disabled state of the [`Checkbox`]. + pub fn placeholder(mut self, placeholder: bool) -> Self { + self.placeholder = placeholder; + self + } + /// Binds a handler to the [`Checkbox`] that will be called when clicked. pub fn on_click( mut self, @@ -145,23 +153,26 @@ impl Checkbox { impl RenderOnce for Checkbox { fn render(self, _: &mut Window, cx: &mut App) -> impl IntoElement { let group_id = format!("checkbox_group_{:?}", self.id); + let color = if self.disabled { + Color::Disabled + } else if self.placeholder { + Color::Placeholder + } else { + Color::Selected + }; let icon = match self.toggle_state { - ToggleState::Selected => Some(Icon::new(IconName::Check).size(IconSize::Small).color( - if self.disabled { - Color::Disabled - } else { - Color::Selected - }, - )), - ToggleState::Indeterminate => Some( - Icon::new(IconName::Dash) + ToggleState::Selected => Some(if self.placeholder { + Icon::new(IconName::Circle) + .size(IconSize::XSmall) + .color(color) + } else { + Icon::new(IconName::Check) .size(IconSize::Small) - .color(if self.disabled { - Color::Disabled - } else { - Color::Selected - }), - ), + .color(color) + }), + ToggleState::Indeterminate => { + Some(Icon::new(IconName::Dash).size(IconSize::Small).color(color)) + } ToggleState::Unselected => None, }; diff --git a/crates/ui/src/traits/toggleable.rs b/crates/ui/src/traits/toggleable.rs index e73a6cd65897971e50e00cdf225b079d08ab6d1a..8771cedaa70e73ef0b8da4c92b9767afd3ab16f5 100644 --- a/crates/ui/src/traits/toggleable.rs +++ b/crates/ui/src/traits/toggleable.rs @@ -58,12 +58,12 @@ impl From for ToggleState { } } -impl From> for ToggleState { - fn from(selected: Option) -> Self { - match selected { - Some(true) => Self::Selected, - Some(false) => Self::Unselected, - None => Self::Unselected, - } - } -} +// impl From> for ToggleState { +// fn from(selected: Option) -> Self { +// match selected { +// Some(true) => Self::Selected, +// Some(false) => Self::Unselected, +// None => Self::Unselected, +// } +// } +// } From 9369b724757b92169a3ed98c6715d7670071d1d8 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 5 Feb 2025 12:13:54 -0700 Subject: [PATCH 038/130] Delete old project diff code (#24299) Closes #ISSUE Co-Authored-By: Mikayla Release Notes: - N/A --- crates/editor/src/git/project_diff.rs | 1296 ------------------------- 1 file changed, 1296 deletions(-) delete mode 100644 crates/editor/src/git/project_diff.rs diff --git a/crates/editor/src/git/project_diff.rs b/crates/editor/src/git/project_diff.rs deleted file mode 100644 index 8420aa99806a655b4e67e9aa59bf8dba39c167e9..0000000000000000000000000000000000000000 --- a/crates/editor/src/git/project_diff.rs +++ /dev/null @@ -1,1296 +0,0 @@ -use std::{ - any::{Any, TypeId}, - cmp::Ordering, - collections::HashSet, - ops::Range, - time::Duration, -}; - -use anyhow::{anyhow, Context as _}; -use collections::{BTreeMap, HashMap}; -use feature_flags::FeatureFlagAppExt; -use git::diff::{BufferDiff, DiffHunk}; -use gpui::{ - actions, AnyElement, AnyView, App, Entity, EventEmitter, FocusHandle, Focusable, - InteractiveElement, Render, Subscription, Task, WeakEntity, -}; -use language::{Buffer, BufferRow}; -use multi_buffer::{ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer}; -use project::{Project, ProjectEntryId, ProjectPath, WorktreeId}; -use text::{OffsetRangeExt, ToPoint}; -use theme::ActiveTheme; -use ui::prelude::*; -use util::{paths::compare_paths, ResultExt}; -use workspace::{ - item::{BreadcrumbText, Item, ItemEvent, ItemHandle, TabContentParams}, - ItemNavHistory, ToolbarItemLocation, Workspace, -}; - -use crate::{Editor, EditorEvent, DEFAULT_MULTIBUFFER_CONTEXT}; - -actions!(project_diff, [Deploy]); - -pub fn init(cx: &mut App) { - cx.observe_new(ProjectDiffEditor::register).detach(); -} - -const UPDATE_DEBOUNCE: Duration = Duration::from_millis(50); - -struct ProjectDiffEditor { - buffer_changes: BTreeMap>, - entry_order: HashMap>, - excerpts: Entity, - editor: Entity, - - project: Entity, - workspace: WeakEntity, - focus_handle: FocusHandle, - worktree_rescans: HashMap>, - _subscriptions: Vec, -} - -#[derive(Debug)] -struct Changes { - buffer: Entity, - hunks: Vec, -} - -impl ProjectDiffEditor { - fn register( - workspace: &mut Workspace, - _window: Option<&mut Window>, - _: &mut Context, - ) { - workspace.register_action(Self::deploy); - } - - fn deploy( - workspace: &mut Workspace, - _: &Deploy, - window: &mut Window, - cx: &mut Context, - ) { - if !cx.is_staff() { - return; - } - - if let Some(existing) = workspace.item_of_type::(cx) { - workspace.activate_item(&existing, true, true, window, cx); - } else { - let workspace_handle = cx.entity().downgrade(); - let project_diff = - cx.new(|cx| Self::new(workspace.project().clone(), workspace_handle, window, cx)); - workspace.add_item_to_active_pane(Box::new(project_diff), None, true, window, cx); - } - } - - fn new( - project: Entity, - workspace: WeakEntity, - window: &mut Window, - cx: &mut Context, - ) -> Self { - // TODO diff change subscriptions. For that, needed: - // * `-20/+50` stats retrieval: some background process that reacts on file changes - let focus_handle = cx.focus_handle(); - let changed_entries_subscription = - cx.subscribe_in(&project, window, |project_diff_editor, _, e, window, cx| { - let mut worktree_to_rescan = None; - match e { - project::Event::WorktreeAdded(id) => { - worktree_to_rescan = Some(*id); - // project_diff_editor - // .buffer_changes - // .insert(*id, HashMap::default()); - } - project::Event::WorktreeRemoved(id) => { - project_diff_editor.buffer_changes.remove(id); - } - project::Event::WorktreeUpdatedEntries(id, _updated_entries) => { - // TODO cannot invalidate buffer entries without invalidating the corresponding excerpts and order entries. - worktree_to_rescan = Some(*id); - // let entry_changes = - // project_diff_editor.buffer_changes.entry(*id).or_default(); - // for (_, entry_id, change) in updated_entries.iter() { - // let changes = entry_changes.entry(*entry_id); - // match change { - // project::PathChange::Removed => { - // if let hash_map::Entry::Occupied(entry) = changes { - // entry.remove(); - // } - // } - // // TODO understand the invalidation case better: now, we do that but still rescan the entire worktree - // // What if we already have the buffer loaded inside the diff multi buffer and it was edited there? We should not do anything. - // _ => match changes { - // hash_map::Entry::Occupied(mut o) => o.get_mut().invalidate(), - // hash_map::Entry::Vacant(v) => { - // v.insert(None); - // } - // }, - // } - // } - } - project::Event::WorktreeUpdatedGitRepositories(id) => { - worktree_to_rescan = Some(*id); - // project_diff_editor.buffer_changes.clear(); - } - project::Event::DeletedEntry(id, _entry_id) => { - worktree_to_rescan = Some(*id); - // if let Some(entries) = project_diff_editor.buffer_changes.get_mut(id) { - // entries.remove(entry_id); - // } - } - project::Event::Closed => { - project_diff_editor.buffer_changes.clear(); - } - _ => {} - } - - if let Some(worktree_to_rescan) = worktree_to_rescan { - project_diff_editor.schedule_worktree_rescan(worktree_to_rescan, window, cx); - } - }); - - let excerpts = cx.new(|cx| MultiBuffer::new(project.read(cx).capability())); - - let editor = cx.new(|cx| { - let mut diff_display_editor = - Editor::for_multibuffer(excerpts.clone(), Some(project.clone()), true, window, cx); - diff_display_editor.set_expand_all_diff_hunks(cx); - diff_display_editor - }); - - let mut new_self = Self { - project, - workspace, - buffer_changes: BTreeMap::default(), - entry_order: HashMap::default(), - worktree_rescans: HashMap::default(), - focus_handle, - editor, - excerpts, - _subscriptions: vec![changed_entries_subscription], - }; - new_self.schedule_rescan_all(window, cx); - new_self - } - - fn schedule_rescan_all(&mut self, window: &mut Window, cx: &mut Context) { - let mut current_worktrees = HashSet::::default(); - for worktree in self.project.read(cx).worktrees(cx).collect::>() { - let worktree_id = worktree.read(cx).id(); - current_worktrees.insert(worktree_id); - self.schedule_worktree_rescan(worktree_id, window, cx); - } - - self.worktree_rescans - .retain(|worktree_id, _| current_worktrees.contains(worktree_id)); - self.buffer_changes - .retain(|worktree_id, _| current_worktrees.contains(worktree_id)); - self.entry_order - .retain(|worktree_id, _| current_worktrees.contains(worktree_id)); - } - - fn schedule_worktree_rescan( - &mut self, - id: WorktreeId, - window: &mut Window, - cx: &mut Context, - ) { - let project = self.project.clone(); - self.worktree_rescans.insert( - id, - cx.spawn_in(window, |project_diff_editor, mut cx| async move { - cx.background_executor().timer(UPDATE_DEBOUNCE).await; - let open_tasks = project - .update(&mut cx, |project, cx| { - let worktree = project.worktree_for_id(id, cx)?; - let snapshot = worktree.read(cx).snapshot(); - let applicable_entries = snapshot - .repositories() - .iter() - .flat_map(|entry| { - entry - .status() - .map(|git_entry| entry.join(git_entry.repo_path)) - }) - .filter_map(|path| { - let id = snapshot.entry_for_path(&path)?.id; - Some(( - id, - ProjectPath { - worktree_id: snapshot.id(), - path: path.into(), - }, - )) - }) - .collect::>(); - Some( - applicable_entries - .into_iter() - .map(|(entry_id, entry_path)| { - let open_task = project.open_path(entry_path.clone(), cx); - (entry_id, entry_path, open_task) - }) - .collect::>(), - ) - }) - .ok() - .flatten() - .unwrap_or_default(); - - let Some((buffers, mut new_entries, change_sets)) = cx - .spawn(|mut cx| async move { - let mut new_entries = Vec::new(); - let mut buffers = HashMap::< - ProjectEntryId, - (text::BufferSnapshot, Entity, BufferDiff), - >::default(); - let mut change_sets = Vec::new(); - for (entry_id, entry_path, open_task) in open_tasks { - let Some(buffer) = open_task - .await - .and_then(|(_, opened_model)| { - opened_model - .downcast::() - .map_err(|_| anyhow!("Unexpected non-buffer")) - }) - .with_context(|| { - format!("loading {:?} for git diff", entry_path.path) - }) - .log_err() - else { - continue; - }; - - let Some(change_set) = project - .update(&mut cx, |project, cx| { - project.open_unstaged_changes(buffer.clone(), cx) - })? - .await - .log_err() - else { - continue; - }; - - cx.update(|_, cx| { - buffers.insert( - entry_id, - ( - buffer.read(cx).text_snapshot(), - buffer, - change_set.read(cx).diff_to_buffer.clone(), - ), - ); - })?; - change_sets.push(change_set); - new_entries.push((entry_path, entry_id)); - } - - anyhow::Ok((buffers, new_entries, change_sets)) - }) - .await - .log_err() - else { - return; - }; - - let (new_changes, new_entry_order) = cx - .background_executor() - .spawn(async move { - let mut new_changes = HashMap::::default(); - for (entry_id, (buffer_snapshot, buffer, buffer_diff)) in buffers { - new_changes.insert( - entry_id, - Changes { - buffer, - hunks: buffer_diff - .hunks_in_row_range(0..BufferRow::MAX, &buffer_snapshot) - .collect::>(), - }, - ); - } - - new_entries.sort_by(|(project_path_a, _), (project_path_b, _)| { - compare_paths( - (project_path_a.path.as_ref(), true), - (project_path_b.path.as_ref(), true), - ) - }); - (new_changes, new_entries) - }) - .await; - - project_diff_editor - .update_in(&mut cx, |project_diff_editor, _window, cx| { - project_diff_editor.update_excerpts(id, new_changes, new_entry_order, cx); - project_diff_editor.editor.update(cx, |editor, cx| { - editor.buffer.update(cx, |buffer, cx| { - for change_set in change_sets { - buffer.add_change_set(change_set, cx) - } - }); - }); - }) - .ok(); - }), - ); - } - - fn update_excerpts( - &mut self, - worktree_id: WorktreeId, - new_changes: HashMap, - new_entry_order: Vec<(ProjectPath, ProjectEntryId)>, - - cx: &mut Context, - ) { - if let Some(current_order) = self.entry_order.get(&worktree_id) { - let current_entries = self.buffer_changes.entry(worktree_id).or_default(); - let mut new_order_entries = new_entry_order.iter().fuse().peekable(); - let mut excerpts_to_remove = Vec::new(); - let mut new_excerpt_hunks = BTreeMap::< - ExcerptId, - Vec<(ProjectPath, Entity, Vec>)>, - >::new(); - let mut excerpt_to_expand = - HashMap::<(u32, ExpandExcerptDirection), Vec>::default(); - let mut latest_excerpt_id = ExcerptId::min(); - - for (current_path, current_entry_id) in current_order { - let current_changes = match current_entries.get(current_entry_id) { - Some(current_changes) => { - if current_changes.hunks.is_empty() { - continue; - } - current_changes - } - None => continue, - }; - let buffer_excerpts = self - .excerpts - .read(cx) - .excerpts_for_buffer(¤t_changes.buffer, cx); - let last_current_excerpt_id = - buffer_excerpts.last().map(|(excerpt_id, _)| *excerpt_id); - let mut current_excerpts = buffer_excerpts.into_iter().fuse().peekable(); - loop { - match new_order_entries.peek() { - Some((new_path, new_entry)) => { - match compare_paths( - (current_path.path.as_ref(), true), - (new_path.path.as_ref(), true), - ) { - Ordering::Less => { - excerpts_to_remove - .extend(current_excerpts.map(|(excerpt_id, _)| excerpt_id)); - break; - } - Ordering::Greater => { - if let Some(new_changes) = new_changes.get(new_entry) { - if !new_changes.hunks.is_empty() { - let hunks = new_excerpt_hunks - .entry(latest_excerpt_id) - .or_default(); - match hunks.binary_search_by(|(probe, ..)| { - compare_paths( - (new_path.path.as_ref(), true), - (probe.path.as_ref(), true), - ) - }) { - Ok(i) => hunks[i].2.extend( - new_changes - .hunks - .iter() - .map(|hunk| hunk.buffer_range.clone()), - ), - Err(i) => hunks.insert( - i, - ( - new_path.clone(), - new_changes.buffer.clone(), - new_changes - .hunks - .iter() - .map(|hunk| hunk.buffer_range.clone()) - .collect(), - ), - ), - } - } - }; - let _ = new_order_entries.next(); - } - Ordering::Equal => { - match new_changes.get(new_entry) { - Some(new_changes) => { - let buffer_snapshot = - new_changes.buffer.read(cx).snapshot(); - let mut current_hunks = - current_changes.hunks.iter().fuse().peekable(); - let mut new_hunks_unchanged = - Vec::with_capacity(new_changes.hunks.len()); - let mut new_hunks_with_updates = - Vec::with_capacity(new_changes.hunks.len()); - 'new_changes: for new_hunk in &new_changes.hunks { - loop { - match current_hunks.peek() { - Some(current_hunk) => { - match ( - current_hunk - .buffer_range - .start - .cmp( - &new_hunk - .buffer_range - .start, - &buffer_snapshot, - ), - current_hunk.buffer_range.end.cmp( - &new_hunk.buffer_range.end, - &buffer_snapshot, - ), - ) { - ( - Ordering::Equal, - Ordering::Equal, - ) => { - new_hunks_unchanged - .push(new_hunk); - let _ = current_hunks.next(); - continue 'new_changes; - } - (Ordering::Equal, _) - | (_, Ordering::Equal) => { - new_hunks_with_updates - .push(new_hunk); - continue 'new_changes; - } - ( - Ordering::Less, - Ordering::Greater, - ) - | ( - Ordering::Greater, - Ordering::Less, - ) => { - new_hunks_with_updates - .push(new_hunk); - continue 'new_changes; - } - ( - Ordering::Less, - Ordering::Less, - ) => { - if current_hunk - .buffer_range - .start - .cmp( - &new_hunk - .buffer_range - .end, - &buffer_snapshot, - ) - .is_le() - { - new_hunks_with_updates - .push(new_hunk); - continue 'new_changes; - } else { - let _ = - current_hunks.next(); - } - } - ( - Ordering::Greater, - Ordering::Greater, - ) => { - if current_hunk - .buffer_range - .end - .cmp( - &new_hunk - .buffer_range - .start, - &buffer_snapshot, - ) - .is_ge() - { - new_hunks_with_updates - .push(new_hunk); - continue 'new_changes; - } else { - let _ = - current_hunks.next(); - } - } - } - } - None => { - new_hunks_with_updates.push(new_hunk); - continue 'new_changes; - } - } - } - } - - let mut excerpts_with_new_changes = - HashSet::::default(); - 'new_hunks: for new_hunk in new_hunks_with_updates { - loop { - match current_excerpts.peek() { - Some(( - current_excerpt_id, - current_excerpt_range, - )) => { - match ( - current_excerpt_range - .context - .start - .cmp( - &new_hunk - .buffer_range - .start, - &buffer_snapshot, - ), - current_excerpt_range - .context - .end - .cmp( - &new_hunk.buffer_range.end, - &buffer_snapshot, - ), - ) { - ( - Ordering::Less - | Ordering::Equal, - Ordering::Greater - | Ordering::Equal, - ) => { - excerpts_with_new_changes - .insert( - *current_excerpt_id, - ); - continue 'new_hunks; - } - ( - Ordering::Greater - | Ordering::Equal, - Ordering::Less - | Ordering::Equal, - ) => { - let expand_up = current_excerpt_range - .context - .start - .to_point(&buffer_snapshot) - .row - .saturating_sub( - new_hunk - .buffer_range - .start - .to_point(&buffer_snapshot) - .row, - ); - let expand_down = new_hunk - .buffer_range - .end - .to_point(&buffer_snapshot) - .row - .saturating_sub( - current_excerpt_range - .context - .end - .to_point( - &buffer_snapshot, - ) - .row, - ); - excerpt_to_expand.entry((expand_up.max(expand_down).max(DEFAULT_MULTIBUFFER_CONTEXT), ExpandExcerptDirection::UpAndDown)).or_default().push(*current_excerpt_id); - excerpts_with_new_changes - .insert( - *current_excerpt_id, - ); - continue 'new_hunks; - } - ( - Ordering::Less, - Ordering::Less, - ) => { - if current_excerpt_range - .context - .start - .cmp( - &new_hunk - .buffer_range - .end, - &buffer_snapshot, - ) - .is_le() - { - let expand_up = current_excerpt_range - .context - .start - .to_point(&buffer_snapshot) - .row - .saturating_sub( - new_hunk.buffer_range - .start - .to_point( - &buffer_snapshot, - ) - .row, - ); - excerpt_to_expand.entry((expand_up.max(DEFAULT_MULTIBUFFER_CONTEXT), ExpandExcerptDirection::Up)).or_default().push(*current_excerpt_id); - excerpts_with_new_changes - .insert( - *current_excerpt_id, - ); - continue 'new_hunks; - } else { - if !new_changes - .hunks - .is_empty() - { - let hunks = new_excerpt_hunks - .entry(latest_excerpt_id) - .or_default(); - match hunks.binary_search_by(|(probe, ..)| { - compare_paths( - (new_path.path.as_ref(), true), - (probe.path.as_ref(), true), - ) - }) { - Ok(i) => hunks[i].2.extend( - new_changes - .hunks - .iter() - .map(|hunk| hunk.buffer_range.clone()), - ), - Err(i) => hunks.insert( - i, - ( - new_path.clone(), - new_changes.buffer.clone(), - new_changes - .hunks - .iter() - .map(|hunk| hunk.buffer_range.clone()) - .collect(), - ), - ), - } - } - continue 'new_hunks; - } - } - /* TODO remove or leave? - [ ><<<<<<<--]----<-- - cur_s > cur_e < - > < - new_s>>>>>>>>< - */ - ( - Ordering::Greater, - Ordering::Greater, - ) => { - if current_excerpt_range - .context - .end - .cmp( - &new_hunk - .buffer_range - .start, - &buffer_snapshot, - ) - .is_ge() - { - let expand_down = new_hunk - .buffer_range - .end - .to_point(&buffer_snapshot) - .row - .saturating_sub( - current_excerpt_range - .context - .end - .to_point( - &buffer_snapshot, - ) - .row, - ); - excerpt_to_expand.entry((expand_down.max(DEFAULT_MULTIBUFFER_CONTEXT), ExpandExcerptDirection::Down)).or_default().push(*current_excerpt_id); - excerpts_with_new_changes - .insert( - *current_excerpt_id, - ); - continue 'new_hunks; - } else { - latest_excerpt_id = - *current_excerpt_id; - let _ = - current_excerpts.next(); - } - } - } - } - None => { - let hunks = new_excerpt_hunks - .entry(latest_excerpt_id) - .or_default(); - match hunks.binary_search_by( - |(probe, ..)| { - compare_paths( - ( - new_path.path.as_ref(), - true, - ), - (probe.path.as_ref(), true), - ) - }, - ) { - Ok(i) => hunks[i].2.extend( - new_changes.hunks.iter().map( - |hunk| { - hunk.buffer_range - .clone() - }, - ), - ), - Err(i) => hunks.insert( - i, - ( - new_path.clone(), - new_changes.buffer.clone(), - new_changes - .hunks - .iter() - .map(|hunk| { - hunk.buffer_range - .clone() - }) - .collect(), - ), - ), - } - continue 'new_hunks; - } - } - } - } - - for (excerpt_id, excerpt_range) in current_excerpts { - if !excerpts_with_new_changes.contains(&excerpt_id) - && !new_hunks_unchanged.iter().any(|hunk| { - excerpt_range - .context - .start - .cmp( - &hunk.buffer_range.end, - &buffer_snapshot, - ) - .is_le() - && excerpt_range - .context - .end - .cmp( - &hunk.buffer_range.start, - &buffer_snapshot, - ) - .is_ge() - }) - { - excerpts_to_remove.push(excerpt_id); - } - latest_excerpt_id = excerpt_id; - } - } - None => excerpts_to_remove.extend( - current_excerpts.map(|(excerpt_id, _)| excerpt_id), - ), - } - let _ = new_order_entries.next(); - break; - } - } - } - None => { - excerpts_to_remove - .extend(current_excerpts.map(|(excerpt_id, _)| excerpt_id)); - break; - } - } - } - latest_excerpt_id = last_current_excerpt_id.unwrap_or(latest_excerpt_id); - } - - for (path, project_entry_id) in new_order_entries { - if let Some(changes) = new_changes.get(project_entry_id) { - if !changes.hunks.is_empty() { - let hunks = new_excerpt_hunks.entry(latest_excerpt_id).or_default(); - match hunks.binary_search_by(|(probe, ..)| { - compare_paths((path.path.as_ref(), true), (probe.path.as_ref(), true)) - }) { - Ok(i) => hunks[i] - .2 - .extend(changes.hunks.iter().map(|hunk| hunk.buffer_range.clone())), - Err(i) => hunks.insert( - i, - ( - path.clone(), - changes.buffer.clone(), - changes - .hunks - .iter() - .map(|hunk| hunk.buffer_range.clone()) - .collect(), - ), - ), - } - } - } - } - - self.excerpts.update(cx, |multi_buffer, cx| { - for (mut after_excerpt_id, excerpts_to_add) in new_excerpt_hunks { - for (_, buffer, hunk_ranges) in excerpts_to_add { - let buffer_snapshot = buffer.read(cx).snapshot(); - let max_point = buffer_snapshot.max_point(); - let new_excerpts = multi_buffer.insert_excerpts_after( - after_excerpt_id, - buffer, - hunk_ranges.into_iter().map(|range| { - let mut extended_point_range = range.to_point(&buffer_snapshot); - extended_point_range.start.row = extended_point_range - .start - .row - .saturating_sub(DEFAULT_MULTIBUFFER_CONTEXT); - extended_point_range.end.row = (extended_point_range.end.row - + DEFAULT_MULTIBUFFER_CONTEXT) - .min(max_point.row); - ExcerptRange { - context: extended_point_range, - primary: None, - } - }), - cx, - ); - after_excerpt_id = new_excerpts.last().copied().unwrap_or(after_excerpt_id); - } - } - multi_buffer.remove_excerpts(excerpts_to_remove, cx); - for ((line_count, direction), excerpts) in excerpt_to_expand { - multi_buffer.expand_excerpts(excerpts, line_count, direction, cx); - } - }); - } else { - self.excerpts.update(cx, |multi_buffer, cx| { - for new_changes in new_entry_order - .iter() - .filter_map(|(_, entry_id)| new_changes.get(entry_id)) - { - multi_buffer.push_excerpts_with_context_lines( - new_changes.buffer.clone(), - new_changes - .hunks - .iter() - .map(|hunk| hunk.buffer_range.clone()) - .collect(), - DEFAULT_MULTIBUFFER_CONTEXT, - cx, - ); - } - }); - }; - - let mut new_changes = new_changes; - let mut new_entry_order = new_entry_order; - std::mem::swap( - self.buffer_changes.entry(worktree_id).or_default(), - &mut new_changes, - ); - std::mem::swap( - self.entry_order.entry(worktree_id).or_default(), - &mut new_entry_order, - ); - } -} - -impl EventEmitter for ProjectDiffEditor {} - -impl Focusable for ProjectDiffEditor { - fn focus_handle(&self, _: &App) -> FocusHandle { - self.focus_handle.clone() - } -} - -impl Item for ProjectDiffEditor { - type Event = EditorEvent; - - fn to_item_events(event: &EditorEvent, f: impl FnMut(ItemEvent)) { - Editor::to_item_events(event, f) - } - - fn deactivated(&mut self, window: &mut Window, cx: &mut Context) { - self.editor - .update(cx, |editor, cx| editor.deactivated(window, cx)); - } - - fn navigate( - &mut self, - data: Box, - window: &mut Window, - cx: &mut Context, - ) -> bool { - self.editor - .update(cx, |editor, cx| editor.navigate(data, window, cx)) - } - - fn tab_tooltip_text(&self, _: &App) -> Option { - Some("Project Diff".into()) - } - - fn tab_content(&self, params: TabContentParams, _window: &Window, _: &App) -> AnyElement { - if self.buffer_changes.is_empty() { - Label::new("No changes") - .color(if params.selected { - Color::Default - } else { - Color::Muted - }) - .into_any_element() - } else { - h_flex() - .gap_1() - .when(true, |then| { - then.child( - h_flex() - .gap_1() - .child(Icon::new(IconName::XCircle).color(Color::Error)) - .child(Label::new(self.buffer_changes.len().to_string()).color( - if params.selected { - Color::Default - } else { - Color::Muted - }, - )), - ) - }) - .when(true, |then| { - then.child( - h_flex() - .gap_1() - .child(Icon::new(IconName::Indicator).color(Color::Warning)) - .child(Label::new(self.buffer_changes.len().to_string()).color( - if params.selected { - Color::Default - } else { - Color::Muted - }, - )), - ) - }) - .into_any_element() - } - } - - fn telemetry_event_text(&self) -> Option<&'static str> { - Some("Project Diagnostics Opened") - } - - fn for_each_project_item( - &self, - cx: &App, - f: &mut dyn FnMut(gpui::EntityId, &dyn project::ProjectItem), - ) { - self.editor.for_each_project_item(cx, f) - } - - fn is_singleton(&self, _: &App) -> bool { - false - } - - fn set_nav_history( - &mut self, - nav_history: ItemNavHistory, - _: &mut Window, - cx: &mut Context, - ) { - self.editor.update(cx, |editor, _| { - editor.set_nav_history(Some(nav_history)); - }); - } - - fn clone_on_split( - &self, - _workspace_id: Option, - window: &mut Window, - cx: &mut Context, - ) -> Option> - where - Self: Sized, - { - Some(cx.new(|cx| { - ProjectDiffEditor::new(self.project.clone(), self.workspace.clone(), window, cx) - })) - } - - fn is_dirty(&self, cx: &App) -> bool { - self.excerpts.read(cx).is_dirty(cx) - } - - fn has_conflict(&self, cx: &App) -> bool { - self.excerpts.read(cx).has_conflict(cx) - } - - fn can_save(&self, _: &App) -> bool { - true - } - - fn save( - &mut self, - format: bool, - project: Entity, - window: &mut Window, - cx: &mut Context, - ) -> Task> { - self.editor.save(format, project, window, cx) - } - - fn save_as( - &mut self, - _: Entity, - _: ProjectPath, - _window: &mut Window, - _: &mut Context, - ) -> Task> { - unreachable!() - } - - fn reload( - &mut self, - project: Entity, - window: &mut Window, - cx: &mut Context, - ) -> Task> { - self.editor.reload(project, window, cx) - } - - fn act_as_type<'a>( - &'a self, - type_id: TypeId, - self_handle: &'a Entity, - _: &'a App, - ) -> Option { - if type_id == TypeId::of::() { - Some(self_handle.to_any()) - } else if type_id == TypeId::of::() { - Some(self.editor.to_any()) - } else { - None - } - } - - fn breadcrumb_location(&self, _: &App) -> ToolbarItemLocation { - ToolbarItemLocation::PrimaryLeft - } - - fn breadcrumbs(&self, theme: &theme::Theme, cx: &App) -> Option> { - self.editor.breadcrumbs(theme, cx) - } - - fn added_to_workspace( - &mut self, - workspace: &mut Workspace, - window: &mut Window, - cx: &mut Context, - ) { - self.editor.update(cx, |editor, cx| { - editor.added_to_workspace(workspace, window, cx) - }); - } -} - -impl Render for ProjectDiffEditor { - fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - let child = if self.buffer_changes.is_empty() { - div() - .bg(cx.theme().colors().editor_background) - .flex() - .items_center() - .justify_center() - .size_full() - .child(Label::new("No changes in the workspace")) - } else { - div().size_full().child(self.editor.clone()) - }; - - div() - .track_focus(&self.focus_handle) - .size_full() - .child(child) - } -} - -#[cfg(test)] -mod tests { - use git::status::{StatusCode, TrackedStatus}; - use gpui::{SemanticVersion, TestAppContext, VisualTestContext}; - use project::buffer_store::BufferChangeSet; - use serde_json::json; - use settings::SettingsStore; - use std::{ - ops::Deref as _, - path::{Path, PathBuf}, - }; - - use crate::test::editor_test_context::assert_state_with_diff; - - use super::*; - - // TODO finish - // #[gpui::test] - // async fn randomized_tests(cx: &mut TestAppContext) { - // // Create a new project (how?? temp fs?), - // let fs = FakeFs::new(cx.executor()); - // let project = Project::test(fs, [], cx).await; - - // // create random files with random content - - // // Commit it into git somehow (technically can do with "real" fs in a temp dir) - // // - // // Apply randomized changes to the project: select a random file, random change and apply to buffers - // } - - #[gpui::test(iterations = 30)] - async fn simple_edit_test(cx: &mut TestAppContext) { - cx.executor().allow_parking(); - init_test(cx); - - let fs = fs::FakeFs::new(cx.executor().clone()); - fs.insert_tree( - "/root", - json!({ - ".git": {}, - "file_a": "This is file_a", - "file_b": "This is file_b", - }), - ) - .await; - - let project = Project::test(fs.clone(), [Path::new("/root")], cx).await; - let workspace = - cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); - let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx); - - let file_a_editor = workspace - .update(cx, |workspace, window, cx| { - let file_a_editor = - workspace.open_abs_path(PathBuf::from("/root/file_a"), true, window, cx); - ProjectDiffEditor::deploy(workspace, &Deploy, window, cx); - file_a_editor - }) - .unwrap() - .await - .expect("did not open an item at all") - .downcast::() - .expect("did not open an editor for file_a"); - let project_diff_editor = workspace - .update(cx, |workspace, _, cx| { - workspace - .active_pane() - .read(cx) - .items() - .find_map(|item| item.downcast::()) - }) - .unwrap() - .expect("did not find a ProjectDiffEditor"); - project_diff_editor.update(cx, |project_diff_editor, cx| { - assert!( - project_diff_editor.editor.read(cx).text(cx).is_empty(), - "Should have no changes after opening the diff on no git changes" - ); - }); - - let old_text = file_a_editor.update(cx, |editor, cx| editor.text(cx)); - let change = "an edit after git add"; - file_a_editor - .update_in(cx, |file_a_editor, window, cx| { - file_a_editor.insert(change, window, cx); - file_a_editor.save(false, project.clone(), window, cx) - }) - .await - .expect("failed to save a file"); - file_a_editor.update_in(cx, |file_a_editor, _window, cx| { - let change_set = cx.new(|cx| { - BufferChangeSet::new_with_base_text( - &old_text, - &file_a_editor.buffer().read(cx).as_singleton().unwrap(), - cx, - ) - }); - file_a_editor.buffer.update(cx, |buffer, cx| { - buffer.add_change_set(change_set.clone(), cx) - }); - project.update(cx, |project, cx| { - project.buffer_store().update(cx, |buffer_store, cx| { - buffer_store.set_unstaged_change_set( - file_a_editor - .buffer() - .read(cx) - .as_singleton() - .unwrap() - .read(cx) - .remote_id(), - change_set, - ); - }); - }); - }); - fs.set_status_for_repo_via_git_operation( - Path::new("/root/.git"), - &[( - Path::new("file_a"), - TrackedStatus { - worktree_status: StatusCode::Modified, - index_status: StatusCode::Unmodified, - } - .into(), - )], - ); - cx.executor() - .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); - cx.run_until_parked(); - let editor = project_diff_editor.update(cx, |diff_editor, _| diff_editor.editor.clone()); - - assert_state_with_diff( - &editor, - cx, - indoc::indoc! { - " - - This is file_a - + an edit after git addThis is file_aˇ", - }, - ); - } - - fn init_test(cx: &mut gpui::TestAppContext) { - if std::env::var("RUST_LOG").is_ok() { - env_logger::try_init().ok(); - } - - cx.update(|cx| { - assets::Assets.load_test_fonts(cx); - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - theme::init(theme::LoadThemes::JustBase, cx); - release_channel::init(SemanticVersion::default(), cx); - client::init_settings(cx); - language::init(cx); - Project::init_settings(cx); - workspace::init_settings(cx); - crate::init(cx); - cx.set_staff(true); - }); - } -} From 44a7614a7430f9983d16927045e2b7ee727cbf35 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 5 Feb 2025 12:14:02 -0700 Subject: [PATCH 039/130] Fix panic when editing diff (#24298) Release Notes: - N/A --- crates/multi_buffer/src/multi_buffer.rs | 24 ++--- crates/multi_buffer/src/multi_buffer_tests.rs | 101 ++++++++++++++++++ 2 files changed, 110 insertions(+), 15 deletions(-) diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index b523bbb92160108b2658c232214f591b882e36c4..d00c161a8633a1fe304008246cddcca778c394c9 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -1424,24 +1424,19 @@ impl MultiBuffer { cx: &mut Context, ) { let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); - let (mut insert_after, excerpt_ids) = - if let Some(existing) = self.buffers_by_path.get(&path) { - (*existing.last().unwrap(), existing.clone()) - } else { - ( - self.buffers_by_path - .range(..path.clone()) - .next_back() - .map(|(_, value)| *value.last().unwrap()) - .unwrap_or(ExcerptId::min()), - Vec::default(), - ) - }; + + let mut insert_after = self + .buffers_by_path + .range(..path.clone()) + .next_back() + .map(|(_, value)| *value.last().unwrap()) + .unwrap_or(ExcerptId::min()); + let existing = self.buffers_by_path.get(&path).cloned().unwrap_or_default(); let (new, _) = build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count); let mut new_iter = new.into_iter().peekable(); - let mut existing_iter = excerpt_ids.into_iter().peekable(); + let mut existing_iter = existing.into_iter().peekable(); let mut new_excerpt_ids = Vec::new(); let mut to_remove = Vec::new(); @@ -1495,7 +1490,6 @@ impl MultiBuffer { // maybe merge overlapping excerpts? // it's hard to distinguish between a manually expanded excerpt, and one that // got smaller because of a missing diff. - // if existing_start == new.context.start && existing_end == new.context.end { new_excerpt_ids.append(&mut self.insert_excerpts_after( insert_after, diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index 2e13fa4558892f987dc5110cbfc6c18007c3d59f..25c3a4cf9124674b9409dc9789c96297a9978dfa 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -1578,6 +1578,107 @@ fn test_repeatedly_expand_a_diff_hunk(cx: &mut TestAppContext) { ); } +#[gpui::test] +fn test_set_excerpts_for_buffer_ordering(cx: &mut TestAppContext) { + let buf1 = cx.new(|cx| { + Buffer::local( + indoc! { + "zero + one + two + two.five + three + four + five + six + seven + eight + nine + ten + eleven + ", + }, + cx, + ) + }); + let path1: PathKey = PathKey::namespaced("0", Path::new("/")); + + let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path1.clone(), + buf1.clone(), + vec![ + Point::row_range(1..2), + Point::row_range(6..7), + Point::row_range(11..12), + ], + 1, + cx, + ); + }); + + assert_excerpts_match( + &multibuffer, + cx, + indoc! { + "----- + zero + one + two + two.five + ----- + four + five + six + seven + ----- + nine + ten + eleven + " + }, + ); + + buf1.update(cx, |buffer, cx| buffer.edit([(0..5, "")], None, cx)); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path1.clone(), + buf1.clone(), + vec![ + Point::row_range(0..2), + Point::row_range(5..6), + Point::row_range(10..11), + ], + 1, + cx, + ); + }); + + assert_excerpts_match( + &multibuffer, + cx, + indoc! { + "----- + one + two + two.five + three + ----- + four + five + six + seven + ----- + nine + ten + eleven + " + }, + ); +} + #[gpui::test] fn test_set_excerpts_for_buffer(cx: &mut TestAppContext) { let buf1 = cx.new(|cx| { From 2f5abe2b5a79875bcc847930fd910cf6e5b8bfbf Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 5 Feb 2025 14:30:06 -0500 Subject: [PATCH 040/130] panel: Remove unneeded `lib.name` field in `Cargo.toml` (#24301) This PR removes the `name` field from under `lib` in the `Cargo.toml` file for the `panel` crate, as it isn't necessary. Also removed it from `script/new-crate`. Release Notes: - N/A --- crates/panel/Cargo.toml | 4 ---- script/new-crate | 7 +++---- 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/crates/panel/Cargo.toml b/crates/panel/Cargo.toml index 03db05bb0bff359f94ad0c04b4c4346e6b840b72..4e7c81804d32b329bbc701b5e068777ab24d4a5b 100644 --- a/crates/panel/Cargo.toml +++ b/crates/panel/Cargo.toml @@ -9,13 +9,9 @@ license = "GPL-3.0-or-later" workspace = true [lib] -name = "panel" path = "src/panel.rs" [dependencies] gpui.workspace = true ui.workspace = true workspace.workspace = true - -[features] -default = [] diff --git a/script/new-crate b/script/new-crate index 459384d7aea44b7c8fff3d1b303ea542bbd56a09..099a9870b4ecfa82441edca4a6ed4afc724c2c89 100755 --- a/script/new-crate +++ b/script/new-crate @@ -53,9 +53,11 @@ license = "$LICENSE_MODE" workspace = true [lib] -name = "$CRATE_NAME" path = "src/$CRATE_NAME.rs" +[features] +default = [] + [dependencies] anyhow.workspace = true gpui.workspace = true @@ -67,9 +69,6 @@ util.workspace = true # client.workspace = true # project.workspace = true # settings.workspace = true - -[features] -default = [] EOF ) From 59738f88c299665fe4e239d3d8d25edad8ee5eb9 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 5 Feb 2025 14:50:24 -0500 Subject: [PATCH 041/130] collab: Store features provided by extensions in the database (#24303) This PR adds new columns to the `extension_versions` table to record which features an extension provides. These `provides_*` columns are populated from the `provides` field on the extension manifest. We'll be able to leverage this data in the future for showing what an extension provides in the extensions UI, as well as allowing to filter by extensions that provide a certain feature. Release Notes: - N/A --- .../20221109000000_test_schema.sql | 9 ++++ ..._provides_fields_to_extension_versions.sql | 10 ++++ crates/collab/src/api/extensions.rs | 1 + crates/collab/src/db.rs | 4 +- crates/collab/src/db/queries/extensions.rs | 38 ++++++++++++- .../collab/src/db/tables/extension_version.rs | 54 +++++++++++++++++++ crates/collab/src/db/tests/extension_tests.rs | 28 +++++++++- 7 files changed, 140 insertions(+), 4 deletions(-) create mode 100644 crates/collab/migrations/20250205192813_add_provides_fields_to_extension_versions.sql diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index 185bd45cd3235dcde7d21f83e4234a76cb7979d8..dbd63cf679d666f966c033305d3a17a9e6b45520 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -401,6 +401,15 @@ CREATE TABLE extension_versions ( schema_version INTEGER NOT NULL DEFAULT 0, wasm_api_version TEXT, download_count INTEGER NOT NULL DEFAULT 0, + provides_themes BOOLEAN NOT NULL DEFAULT FALSE, + provides_icon_themes BOOLEAN NOT NULL DEFAULT FALSE, + provides_languages BOOLEAN NOT NULL DEFAULT FALSE, + provides_grammars BOOLEAN NOT NULL DEFAULT FALSE, + provides_language_servers BOOLEAN NOT NULL DEFAULT FALSE, + provides_context_servers BOOLEAN NOT NULL DEFAULT FALSE, + provides_slash_commands BOOLEAN NOT NULL DEFAULT FALSE, + provides_indexed_docs_providers BOOLEAN NOT NULL DEFAULT FALSE, + provides_snippets BOOLEAN NOT NULL DEFAULT FALSE, PRIMARY KEY (extension_id, version) ); diff --git a/crates/collab/migrations/20250205192813_add_provides_fields_to_extension_versions.sql b/crates/collab/migrations/20250205192813_add_provides_fields_to_extension_versions.sql new file mode 100644 index 0000000000000000000000000000000000000000..50dcb0508f35cc69a87bbcbf83fed9809f539b41 --- /dev/null +++ b/crates/collab/migrations/20250205192813_add_provides_fields_to_extension_versions.sql @@ -0,0 +1,10 @@ +alter table extension_versions +add column provides_themes bool not null default false, +add column provides_icon_themes bool not null default false, +add column provides_languages bool not null default false, +add column provides_grammars bool not null default false, +add column provides_language_servers bool not null default false, +add column provides_context_servers bool not null default false, +add column provides_slash_commands bool not null default false, +add column provides_indexed_docs_providers bool not null default false, +add column provides_snippets bool not null default false; diff --git a/crates/collab/src/api/extensions.rs b/crates/collab/src/api/extensions.rs index deeb0bb56e8cce300521e6d73672a8a01ac5c40c..e132acaf0b106c4c93012e968e9aa74f4f51b718 100644 --- a/crates/collab/src/api/extensions.rs +++ b/crates/collab/src/api/extensions.rs @@ -391,6 +391,7 @@ async fn fetch_extension_manifest( repository: manifest.repository, schema_version: manifest.schema_version.unwrap_or(0), wasm_api_version: manifest.wasm_api_version, + provides: manifest.provides, published_at, }) } diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 857c54ac99c73c7f7d2c74006ed11b33a967b3c0..908e488af6dd52686bcf659e4e8e61db6eb4a33a 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -6,10 +6,11 @@ pub mod tests; use crate::{executor::Executor, Error, Result}; use anyhow::anyhow; -use collections::{BTreeMap, HashMap, HashSet}; +use collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use dashmap::DashMap; use futures::StreamExt; use rand::{prelude::StdRng, Rng, SeedableRng}; +use rpc::ExtensionProvides; use rpc::{ proto::{self}, ConnectionId, ExtensionMetadata, @@ -781,6 +782,7 @@ pub struct NewExtensionVersion { pub repository: String, pub schema_version: i32, pub wasm_api_version: Option, + pub provides: BTreeSet, pub published_at: PrimitiveDateTime, } diff --git a/crates/collab/src/db/queries/extensions.rs b/crates/collab/src/db/queries/extensions.rs index 51c63956323b9390ddc6413239058a644f2d7556..54f47ae45ee015e80cf916c0d06703f0faff2e01 100644 --- a/crates/collab/src/db/queries/extensions.rs +++ b/crates/collab/src/db/queries/extensions.rs @@ -1,4 +1,3 @@ -use std::collections::BTreeSet; use std::str::FromStr; use chrono::Utc; @@ -283,6 +282,39 @@ impl Database { description: ActiveValue::Set(version.description.clone()), schema_version: ActiveValue::Set(version.schema_version), wasm_api_version: ActiveValue::Set(version.wasm_api_version.clone()), + provides_themes: ActiveValue::Set( + version.provides.contains(&ExtensionProvides::Themes), + ), + provides_icon_themes: ActiveValue::Set( + version.provides.contains(&ExtensionProvides::IconThemes), + ), + provides_languages: ActiveValue::Set( + version.provides.contains(&ExtensionProvides::Languages), + ), + provides_grammars: ActiveValue::Set( + version.provides.contains(&ExtensionProvides::Grammars), + ), + provides_language_servers: ActiveValue::Set( + version + .provides + .contains(&ExtensionProvides::LanguageServers), + ), + provides_context_servers: ActiveValue::Set( + version + .provides + .contains(&ExtensionProvides::ContextServers), + ), + provides_slash_commands: ActiveValue::Set( + version.provides.contains(&ExtensionProvides::SlashCommands), + ), + provides_indexed_docs_providers: ActiveValue::Set( + version + .provides + .contains(&ExtensionProvides::IndexedDocsProviders), + ), + provides_snippets: ActiveValue::Set( + version.provides.contains(&ExtensionProvides::Snippets), + ), download_count: ActiveValue::NotSet, } })) @@ -357,6 +389,8 @@ fn metadata_from_extension_and_version( extension: extension::Model, version: extension_version::Model, ) -> ExtensionMetadata { + let provides = version.provides(); + ExtensionMetadata { id: extension.external_id.into(), manifest: rpc::ExtensionApiManifest { @@ -371,7 +405,7 @@ fn metadata_from_extension_and_version( repository: version.repository, schema_version: Some(version.schema_version), wasm_api_version: version.wasm_api_version, - provides: BTreeSet::default(), + provides, }, published_at: convert_time_to_chrono(version.published_at), diff --git a/crates/collab/src/db/tables/extension_version.rs b/crates/collab/src/db/tables/extension_version.rs index 60e3e5c7daba65eb623602763d026cbf8c376662..8c41c41a65d6d847340643be736329ec64dfc2aa 100644 --- a/crates/collab/src/db/tables/extension_version.rs +++ b/crates/collab/src/db/tables/extension_version.rs @@ -1,4 +1,6 @@ use crate::db::ExtensionId; +use collections::BTreeSet; +use rpc::ExtensionProvides; use sea_orm::entity::prelude::*; use time::PrimitiveDateTime; @@ -16,6 +18,58 @@ pub struct Model { pub schema_version: i32, pub wasm_api_version: Option, pub download_count: i64, + pub provides_themes: bool, + pub provides_icon_themes: bool, + pub provides_languages: bool, + pub provides_grammars: bool, + pub provides_language_servers: bool, + pub provides_context_servers: bool, + pub provides_slash_commands: bool, + pub provides_indexed_docs_providers: bool, + pub provides_snippets: bool, +} + +impl Model { + pub fn provides(&self) -> BTreeSet { + let mut provides = BTreeSet::default(); + if self.provides_themes { + provides.insert(ExtensionProvides::Themes); + } + + if self.provides_icon_themes { + provides.insert(ExtensionProvides::IconThemes); + } + + if self.provides_languages { + provides.insert(ExtensionProvides::Languages); + } + + if self.provides_grammars { + provides.insert(ExtensionProvides::Grammars); + } + + if self.provides_language_servers { + provides.insert(ExtensionProvides::LanguageServers); + } + + if self.provides_context_servers { + provides.insert(ExtensionProvides::ContextServers); + } + + if self.provides_slash_commands { + provides.insert(ExtensionProvides::SlashCommands); + } + + if self.provides_indexed_docs_providers { + provides.insert(ExtensionProvides::IndexedDocsProviders); + } + + if self.provides_snippets { + provides.insert(ExtensionProvides::Snippets); + } + + provides + } } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/crates/collab/src/db/tests/extension_tests.rs b/crates/collab/src/db/tests/extension_tests.rs index 84e53c5cab2b816cca16109b10a7f43652f9498e..f7a5398d3c1849c0bdb689186fcd50f914bc31bb 100644 --- a/crates/collab/src/db/tests/extension_tests.rs +++ b/crates/collab/src/db/tests/extension_tests.rs @@ -1,6 +1,8 @@ use std::collections::BTreeSet; use std::sync::Arc; +use rpc::ExtensionProvides; + use super::Database; use crate::db::ExtensionVersionConstraints; use crate::{ @@ -39,6 +41,7 @@ async fn test_extensions(db: &Arc) { repository: "ext1/repo".into(), schema_version: 1, wasm_api_version: None, + provides: BTreeSet::default(), published_at: t0, }, NewExtensionVersion { @@ -49,6 +52,7 @@ async fn test_extensions(db: &Arc) { repository: "ext1/repo".into(), schema_version: 1, wasm_api_version: None, + provides: BTreeSet::default(), published_at: t0, }, ], @@ -63,6 +67,7 @@ async fn test_extensions(db: &Arc) { repository: "ext2/repo".into(), schema_version: 0, wasm_api_version: None, + provides: BTreeSet::default(), published_at: t0, }], ), @@ -214,6 +219,7 @@ async fn test_extensions(db: &Arc) { repository: "ext1/repo".into(), schema_version: 1, wasm_api_version: None, + provides: BTreeSet::default(), published_at: t0, }], ), @@ -227,6 +233,7 @@ async fn test_extensions(db: &Arc) { repository: "ext2/repo".into(), schema_version: 0, wasm_api_version: None, + provides: BTreeSet::default(), published_at: t0, }], ), @@ -320,6 +327,10 @@ async fn test_extensions_by_id(db: &Arc) { repository: "ext1/repo".into(), schema_version: 1, wasm_api_version: Some("0.0.4".into()), + provides: BTreeSet::from_iter([ + ExtensionProvides::Grammars, + ExtensionProvides::Languages, + ]), published_at: t0, }, NewExtensionVersion { @@ -330,6 +341,11 @@ async fn test_extensions_by_id(db: &Arc) { repository: "ext1/repo".into(), schema_version: 1, wasm_api_version: Some("0.0.4".into()), + provides: BTreeSet::from_iter([ + ExtensionProvides::Grammars, + ExtensionProvides::Languages, + ExtensionProvides::LanguageServers, + ]), published_at: t0, }, NewExtensionVersion { @@ -340,6 +356,11 @@ async fn test_extensions_by_id(db: &Arc) { repository: "ext1/repo".into(), schema_version: 1, wasm_api_version: Some("0.0.5".into()), + provides: BTreeSet::from_iter([ + ExtensionProvides::Grammars, + ExtensionProvides::Languages, + ExtensionProvides::LanguageServers, + ]), published_at: t0, }, ], @@ -354,6 +375,7 @@ async fn test_extensions_by_id(db: &Arc) { repository: "ext2/repo".into(), schema_version: 0, wasm_api_version: None, + provides: BTreeSet::default(), published_at: t0, }], ), @@ -387,7 +409,11 @@ async fn test_extensions_by_id(db: &Arc) { repository: "ext1/repo".into(), schema_version: Some(1), wasm_api_version: Some("0.0.4".into()), - provides: BTreeSet::default(), + provides: BTreeSet::from_iter([ + ExtensionProvides::Grammars, + ExtensionProvides::Languages, + ExtensionProvides::LanguageServers, + ]), }, published_at: t0_chrono, download_count: 0, From b710945949ed19559779e11332bda39cba76093b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 5 Feb 2025 12:37:32 -0800 Subject: [PATCH 042/130] Fix replication of head text when head matches index (#24306) Release Notes: - N/A --------- Co-authored-by: cole-miller --- Cargo.lock | 1 + crates/project/src/buffer_store.rs | 8 +- crates/remote_server/Cargo.toml | 2 +- .../remote_server/src/remote_editing_tests.rs | 120 ++++++++++++++++++ 4 files changed, 126 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c679633fae3f97d50f6e514535e35c294542a611..de606b70d99f14e221187f5671ba675c246387e2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10700,6 +10700,7 @@ dependencies = [ "sysinfo", "telemetry_events", "toml 0.8.19", + "unindent", "util", "worktree", ] diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 10bc83da7a330bbf2288fab263663b5e663610e4..d3831dcce30f6234ac993ca094d82f638186e47d 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -131,7 +131,7 @@ impl BufferChangeSetState { let diff_bases_change = match mode { Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text), Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text), - Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.staged_text), + Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text), Mode::IndexAndHead => DiffBasesChange::SetEach { index: message.staged_text, head: message.committed_text, @@ -402,7 +402,7 @@ impl RemoteBufferStore { .await?; let mode = Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?; let bases = match mode { - Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.staged_text), + Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text), Mode::IndexAndHead => DiffBasesChange::SetEach { head: response.committed_text, index: response.staged_text, @@ -896,7 +896,7 @@ impl LocalBufferStore { let diff_bases_change = match (needs_staged_text, needs_committed_text) { (true, true) => Some(if staged_text == committed_text { - DiffBasesChange::SetBoth(staged_text) + DiffBasesChange::SetBoth(committed_text) } else { DiffBasesChange::SetEach { index: staged_text, @@ -944,7 +944,7 @@ impl LocalBufferStore { (index, head, Mode::IndexAndHead) } DiffBasesChange::SetBoth(text) => { - (text, None, Mode::IndexMatchesHead) + (None, text, Mode::IndexMatchesHead) } }; let message = proto::UpdateDiffBases { diff --git a/crates/remote_server/Cargo.toml b/crates/remote_server/Cargo.toml index f2238d91fa2d3956a469cdf3d2acbc79cfba77a7..5286f4574a7aed5aa13d7f7d33f98a05a1f942e1 100644 --- a/crates/remote_server/Cargo.toml +++ b/crates/remote_server/Cargo.toml @@ -77,7 +77,7 @@ node_runtime = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] } remote = { workspace = true, features = ["test-support"] } lsp = { workspace = true, features=["test-support"] } - +unindent.workspace = true serde_json.workspace = true [build-dependencies] diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index b39df8edce3f8c9ae210d00565ddf006f4ef19b6..4e34953ea26417866cf603a31bd7ae013d327ca5 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -27,6 +27,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; +use unindent::Unindent as _; use util::{path, separator}; #[gpui::test] @@ -1183,6 +1184,125 @@ async fn test_remote_rename_entry(cx: &mut TestAppContext, server_cx: &mut TestA assert_eq!(worktree.entry_for_path("README.rst").unwrap().id, entry.id) }); } + +#[gpui::test] +async fn test_remote_git_diffs(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { + let text_2 = " + fn one() -> usize { + 1 + } + " + .unindent(); + let text_1 = " + fn one() -> usize { + 0 + } + " + .unindent(); + + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "src": { + "lib.rs": text_2 + }, + "README.md": "# project 1", + }, + }), + ) + .await; + fs.set_index_for_repo( + Path::new("/code/project1/.git"), + &[("src/lib.rs".into(), text_1.clone())], + ); + fs.set_head_for_repo( + Path::new("/code/project1/.git"), + &[("src/lib.rs".into(), text_1.clone())], + ); + + let (project, _headless) = init_test(&fs, cx, server_cx).await; + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project1", true, cx) + }) + .await + .unwrap(); + let worktree_id = cx.update(|cx| worktree.read(cx).id()); + cx.executor().run_until_parked(); + + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, Path::new("src/lib.rs")), cx) + }) + .await + .unwrap(); + let change_set = project + .update(cx, |project, cx| { + project.open_uncommitted_changes(buffer.clone(), cx) + }) + .await + .unwrap(); + + change_set.read_with(cx, |change_set, cx| { + assert_eq!(change_set.base_text_string().unwrap(), text_1); + assert_eq!( + change_set + .unstaged_change_set + .as_ref() + .unwrap() + .read(cx) + .base_text_string() + .unwrap(), + text_1 + ); + }); + + // stage the current buffer's contents + fs.set_index_for_repo( + Path::new("/code/project1/.git"), + &[("src/lib.rs".into(), text_2.clone())], + ); + + cx.executor().run_until_parked(); + change_set.read_with(cx, |change_set, cx| { + assert_eq!(change_set.base_text_string().unwrap(), text_1); + assert_eq!( + change_set + .unstaged_change_set + .as_ref() + .unwrap() + .read(cx) + .base_text_string() + .unwrap(), + text_2 + ); + }); + + // commit the current buffer's contents + fs.set_head_for_repo( + Path::new("/code/project1/.git"), + &[("src/lib.rs".into(), text_2.clone())], + ); + + cx.executor().run_until_parked(); + change_set.read_with(cx, |change_set, cx| { + assert_eq!(change_set.base_text_string().unwrap(), text_2); + assert_eq!( + change_set + .unstaged_change_set + .as_ref() + .unwrap() + .read(cx) + .base_text_string() + .unwrap(), + text_2 + ); + }); +} + #[gpui::test] async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let fs = FakeFs::new(server_cx.executor()); From f0239c0a89bb4d9fce6938aa39170e3bfed948a9 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 15:48:47 -0500 Subject: [PATCH 043/130] Update actions/setup-node digest to 1d0ff46 (#24304) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [actions/setup-node](https://redirect.github.com/actions/setup-node) | action | digest | `39370e3` -> `1d0ff46` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/actions/run_tests_windows/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/run_tests_windows/action.yml b/.github/actions/run_tests_windows/action.yml index c4be7f6d6db0a51753df0e300ee0de6d49376501..e562f7a48e1bc993f635fe1c9bda7f731592ac40 100644 --- a/.github/actions/run_tests_windows/action.yml +++ b/.github/actions/run_tests_windows/action.yml @@ -16,7 +16,7 @@ runs: run: cargo install cargo-nextest --locked - name: Install Node - uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4 + uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4 with: node-version: "18" From e506efa9bf8bbf52ebf7ca17ffbe1601fa92e832 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 15:53:25 -0500 Subject: [PATCH 044/130] Update Rust crate async-trait to v0.1.86 (#24305) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [async-trait](https://redirect.github.com/dtolnay/async-trait) | workspace.dependencies | patch | `0.1.85` -> `0.1.86` | --- ### Release Notes
dtolnay/async-trait (async-trait) ### [`v0.1.86`](https://redirect.github.com/dtolnay/async-trait/releases/tag/0.1.86) [Compare Source](https://redirect.github.com/dtolnay/async-trait/compare/0.1.85...0.1.86) - Documentation improvements
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index de606b70d99f14e221187f5671ba675c246387e2..cc7cadabbbf7c4aeb8e6b4c1ad409ca377b7129f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1013,9 +1013,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.85" +version = "0.1.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056" +checksum = "644dd749086bf3771a2fbc5f256fdb982d53f011c7d5d560304eafeecebce79d" dependencies = [ "proc-macro2", "quote", From 9114ca973c49e6008252661c6897612877b7ff60 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 5 Feb 2025 14:24:21 -0700 Subject: [PATCH 045/130] Revert "Revert "Upgrade to rustls v0.23.22" (#24197)" (#24210) try to reland rustls without breaking linux arm builders See: - #24197 - #24138 Release Notes: - N/A --- Cargo.lock | 128 +++++++++++++++++++++++++----------- Cargo.toml | 3 +- crates/client/src/client.rs | 11 +--- script/bundle-linux | 2 + script/linux | 1 + 5 files changed, 98 insertions(+), 47 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cc7cadabbbf7c4aeb8e6b4c1ad409ca377b7129f..651ca95a2ae87b775693994bcd2d925a54225678 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1001,14 +1001,13 @@ checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" [[package]] name = "async-tls" version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ae3c9eba89d472a0e4fe1dea433df78fbbe63d2b764addaf2ba3a6bde89a5e" +source = "git+https://github.com/zed-industries/async-tls?rev=1e759a4b5e370f87dc15e40756ac4f8815b61d9d#1e759a4b5e370f87dc15e40756ac4f8815b61d9d" dependencies = [ "futures-core", "futures-io", - "rustls 0.21.12", - "rustls-pemfile 1.0.4", - "webpki-roots 0.22.6", + "rustls 0.23.22", + "rustls-pemfile 2.2.0", + "webpki-roots", ] [[package]] @@ -1222,6 +1221,31 @@ dependencies = [ "zeroize", ] +[[package]] +name = "aws-lc-rs" +version = "1.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c2b7ddaa2c56a367ad27a094ad8ef4faacf8a617c2575acb2ba88949df999ca" +dependencies = [ + "aws-lc-sys", + "paste", + "zeroize", +] + +[[package]] +name = "aws-lc-sys" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71b2ddd3ada61a305e1d8bb6c005d1eaa7d14d903681edfc400406d523a9b491" +dependencies = [ + "bindgen 0.69.5", + "cc", + "cmake", + "dunce", + "fs_extra", + "paste", +] + [[package]] name = "aws-runtime" version = "1.5.4" @@ -1729,6 +1753,29 @@ dependencies = [ "serde", ] +[[package]] +name = "bindgen" +version = "0.69.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088" +dependencies = [ + "bitflags 2.8.0", + "cexpr", + "clang-sys", + "itertools 0.12.1", + "lazy_static", + "lazycell", + "log", + "prettyplease", + "proc-macro2", + "quote", + "regex", + "rustc-hash 1.1.0", + "shlex", + "syn 2.0.90", + "which 4.4.2", +] + [[package]] name = "bindgen" version = "0.70.1" @@ -2560,7 +2607,7 @@ dependencies = [ "rand 0.8.5", "release_channel", "rpc", - "rustls 0.21.12", + "rustls 0.23.22", "rustls-native-certs 0.8.1", "schemars", "serde", @@ -2590,6 +2637,15 @@ dependencies = [ "smallvec", ] +[[package]] +name = "cmake" +version = "0.1.53" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e24a03c8b52922d68a1589ad61032f2c1aa5a8158d2aa0d93c6e9534944bbad6" +dependencies = [ + "cc", +] + [[package]] name = "cobs" version = "0.2.3" @@ -3162,7 +3218,7 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2ce857aa0b77d77287acc1ac3e37a05a8c95a2af3647d23b15f263bdaeb7562b" dependencies = [ - "bindgen", + "bindgen 0.70.1", ] [[package]] @@ -4873,6 +4929,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + [[package]] name = "fsevent" version = "0.1.0" @@ -5381,7 +5443,7 @@ dependencies = [ "ashpd", "async-task", "backtrace", - "bindgen", + "bindgen 0.70.1", "blade-graphics", "blade-macros", "blade-util", @@ -6005,7 +6067,7 @@ dependencies = [ "http 1.2.0", "hyper 1.5.1", "hyper-util", - "rustls 0.23.20", + "rustls 0.23.22", "rustls-native-certs 0.8.1", "rustls-pki-types", "tokio", @@ -7028,6 +7090,12 @@ dependencies = [ "spin", ] +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + [[package]] name = "leb128" version = "0.2.5" @@ -7084,7 +7152,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-targets 0.48.5", ] [[package]] @@ -7609,7 +7677,7 @@ name = "media" version = "0.1.0" dependencies = [ "anyhow", - "bindgen", + "bindgen 0.70.1", "core-foundation 0.9.4", "ctor", "foreign-types 0.5.0", @@ -10254,7 +10322,7 @@ dependencies = [ "quinn-proto", "quinn-udp", "rustc-hash 2.1.0", - "rustls 0.23.20", + "rustls 0.23.22", "socket2", "thiserror 2.0.6", "tokio", @@ -10272,7 +10340,7 @@ dependencies = [ "rand 0.8.5", "ring", "rustc-hash 2.1.0", - "rustls 0.23.20", + "rustls 0.23.22", "rustls-pki-types", "slab", "thiserror 2.0.6", @@ -10834,7 +10902,7 @@ dependencies = [ "percent-encoding", "pin-project-lite", "quinn", - "rustls 0.23.20", + "rustls 0.23.22", "rustls-native-certs 0.8.1", "rustls-pemfile 2.2.0", "rustls-pki-types", @@ -11208,10 +11276,12 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.20" +version = "0.23.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5065c3f250cbd332cd894be57c40fa52387247659b14a2d6041d121547903b1b" +checksum = "9fb9263ab4eb695e42321db096e3b8fbd715a59b154d5c88d82db2175b681ba7" dependencies = [ + "aws-lc-rs", + "log", "once_cell", "ring", "rustls-pki-types", @@ -11287,6 +11357,7 @@ version = "0.102.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" dependencies = [ + "aws-lc-rs", "ring", "rustls-pki-types", "untrusted", @@ -12285,7 +12356,7 @@ dependencies = [ "once_cell", "percent-encoding", "rust_decimal", - "rustls 0.23.20", + "rustls 0.23.22", "rustls-pemfile 2.2.0", "serde", "serde_json", @@ -12298,7 +12369,7 @@ dependencies = [ "tracing", "url", "uuid", - "webpki-roots 0.26.7", + "webpki-roots", ] [[package]] @@ -13530,7 +13601,7 @@ version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37" dependencies = [ - "rustls 0.23.20", + "rustls 0.23.22", "tokio", ] @@ -15178,25 +15249,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "webpki" -version = "0.22.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" -dependencies = [ - "ring", - "untrusted", -] - -[[package]] -name = "webpki-roots" -version = "0.22.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" -dependencies = [ - "webpki", -] - [[package]] name = "webpki-roots" version = "0.26.7" diff --git a/Cargo.toml b/Cargo.toml index 17e865c05431153dc9f43a777dea9f3df7308c1c..9f77103e625841da6dff11dc7aa6dd4bea65023d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -474,7 +474,7 @@ runtimelib = { version = "0.25.0", default-features = false, features = [ rustc-demangle = "0.1.23" rust-embed = { version = "8.4", features = ["include-exclude"] } rustc-hash = "2.1.0" -rustls = "0.21.12" +rustls = { version = "0.23.22" } rustls-native-certs = "0.8.0" schemars = { version = "0.8", features = ["impl_json_schema", "indexmap2"] } semver = "1.0" @@ -618,6 +618,7 @@ features = [ # TODO livekit https://github.com/RustAudio/cpal/pull/891 [patch.crates-io] cpal = { git = "https://github.com/zed-industries/cpal", rev = "fd8bc2fd39f1f5fdee5a0690656caff9a26d9d50" } +real-async-tls = { git = "https://github.com/zed-industries/async-tls", rev = "1e759a4b5e370f87dc15e40756ac4f8815b61d9d", package = "async-tls"} [profile.dev] split-debuginfo = "unpacked" diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index e6b86a4e52faca4fc7a5aeab94177ff6e6154411..f6cb04c86612b39821cb64886451b9ffcc6ecc16 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -146,6 +146,8 @@ pub fn init_settings(cx: &mut App) { } pub fn init(client: &Arc, cx: &mut App) { + let _ = rustls::crypto::aws_lc_rs::default_provider().install_default(); + let client = Arc::downgrade(client); cx.on_action({ let client = client.clone(); @@ -1131,15 +1133,8 @@ impl Client { for error in root_certs.errors { log::warn!("error loading native certs: {:?}", error); } - root_store.add_parsable_certificates( - &root_certs - .certs - .into_iter() - .map(|cert| cert.as_ref().to_owned()) - .collect::>(), - ); + root_store.add_parsable_certificates(root_certs.certs); rustls::ClientConfig::builder() - .with_safe_defaults() .with_root_certificates(root_store) .with_no_client_auth() }; diff --git a/script/bundle-linux b/script/bundle-linux index 4d32988608fb39a6edc8f641482a3a617955e9da..e40eb33c93a8fff3aa099a089a71d23b76811360 100755 --- a/script/bundle-linux +++ b/script/bundle-linux @@ -52,6 +52,8 @@ if "$rustup_installed"; then rustup target add "$remote_server_triple" fi +export CC=$(which clang) + # Build binary in release mode export RUSTFLAGS="${RUSTFLAGS:-} -C link-args=-Wl,--disable-new-dtags,-rpath,\$ORIGIN/../lib" cargo build --release --target "${target_triple}" --package zed --package cli diff --git a/script/linux b/script/linux index d599d9cce79345d0a4ec9f47bc52012e4bf285a8..943c9d61b4677b58f2fb36f59082b73180ada54d 100755 --- a/script/linux +++ b/script/linux @@ -45,6 +45,7 @@ if [[ -n $apt ]]; then libsqlite3-dev musl-tools musl-dev + build-essential ) if (grep -qP 'PRETTY_NAME="(Linux Mint 22|.+24\.(04|10))' /etc/os-release); then deps+=( mold libstdc++-14-dev ) From ca01a8b9cb58c90efb29475b5fa4b001a6146392 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 5 Feb 2025 13:29:39 -0800 Subject: [PATCH 046/130] Fix two issues with diff highlights (#24309) * fix syntax highlighting of deleted text when buffer language changes * do not highlight entire untracked files as created, except in the project diff view Release Notes: - N/A Co-authored-by: ConradIrwin Co-authored-by: cole-miller --- crates/git/src/diff.rs | 12 ++++ crates/git_ui/src/project_diff.rs | 16 +++-- crates/language/src/buffer.rs | 17 ++++++ crates/multi_buffer/src/multi_buffer.rs | 77 +++++++++++++++++-------- crates/project/src/buffer_store.rs | 56 +++++++++++------- 5 files changed, 126 insertions(+), 52 deletions(-) diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index 7fd6628a89efc8677de400bf9a28b33d86173112..764c254119321847260275372d1d9c4c7e16f836 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -74,6 +74,18 @@ impl BufferDiff { } } + pub fn new_with_single_insertion(buffer: &BufferSnapshot) -> Self { + Self { + tree: SumTree::from_item( + InternalDiffHunk { + buffer_range: Anchor::MIN..Anchor::MAX, + diff_base_byte_range: 0..0, + }, + buffer, + ), + } + } + pub fn build(diff_base: Option<&str>, buffer: &text::BufferSnapshot) -> Self { let mut tree = SumTree::new(buffer); diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index a78f097e244d331fbfca1547be1381e35afd83ea..d35bb59d6a79e4b0e111259f0f707952ad94ed95 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -9,7 +9,7 @@ use gpui::{ actions, AnyElement, AnyView, App, AppContext, AsyncWindowContext, Entity, EventEmitter, FocusHandle, Focusable, Render, Subscription, Task, WeakEntity, }; -use language::{Anchor, Buffer, Capability, OffsetRangeExt}; +use language::{Anchor, Buffer, Capability, OffsetRangeExt, Point}; use multi_buffer::{MultiBuffer, PathKey}; use project::{buffer_store::BufferChangeSet, git::GitState, Project, ProjectPath}; use theme::ActiveTheme; @@ -293,11 +293,15 @@ impl ProjectDiff { let change_set = diff_buffer.change_set; let snapshot = buffer.read(cx).snapshot(); - let diff_hunk_ranges = change_set - .read(cx) - .diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot) - .map(|diff_hunk| diff_hunk.buffer_range.to_point(&snapshot)) - .collect::>(); + let change_set = change_set.read(cx); + let diff_hunk_ranges = if change_set.base_text.is_none() { + vec![Point::zero()..snapshot.max_point()] + } else { + change_set + .diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot) + .map(|diff_hunk| diff_hunk.buffer_range.to_point(&snapshot)) + .collect::>() + }; self.multibuffer.update(cx, |multibuffer, cx| { multibuffer.set_excerpts_for_path( diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index ceb387d2e10ce5b5658aa44bd704e20e4ec8b214..b2112f31c5ae5b40044d6a9167dc3df2c1a6c49b 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1001,6 +1001,23 @@ impl Buffer { } } + pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot { + let entity_id = cx.reserve_entity::().entity_id(); + let buffer_id = entity_id.as_non_zero_u64().into(); + let text = + TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot(); + let syntax = SyntaxMap::new(&text).snapshot(); + BufferSnapshot { + text, + syntax, + file: None, + diagnostics: Default::default(), + remote_selections: Default::default(), + language: None, + non_text_state_update_count: 0, + } + } + #[cfg(any(test, feature = "test-support"))] pub fn build_snapshot_sync( text: Rope, diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index d00c161a8633a1fe304008246cddcca778c394c9..1986944a1d6e95f2b7dfd6a28d0c58ac54ca2e53 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -220,6 +220,22 @@ struct ChangeSetState { _subscription: gpui::Subscription, } +impl ChangeSetState { + fn new(change_set: Entity, cx: &mut Context) -> Self { + ChangeSetState { + _subscription: cx.subscribe(&change_set, |this, change_set, event, cx| match event { + BufferChangeSetEvent::DiffChanged { changed_range } => { + this.buffer_diff_changed(change_set, changed_range.clone(), cx) + } + BufferChangeSetEvent::LanguageChanged => { + this.buffer_diff_language_changed(change_set, cx) + } + }), + change_set, + } + } +} + /// The contents of a [`MultiBuffer`] at a single point in time. #[derive(Clone, Default)] pub struct MultiBufferSnapshot { @@ -560,17 +576,7 @@ impl MultiBuffer { for (buffer_id, change_set_state) in self.diff_bases.iter() { diff_bases.insert( *buffer_id, - ChangeSetState { - _subscription: new_cx.subscribe( - &change_set_state.change_set, - |this, change_set, event, cx| match event { - BufferChangeSetEvent::DiffChanged { changed_range } => { - this.buffer_diff_changed(change_set, changed_range.clone(), cx) - } - }, - ), - change_set: change_set_state.change_set.clone(), - }, + ChangeSetState::new(change_set_state.change_set.clone(), new_cx), ); } Self { @@ -2146,6 +2152,30 @@ impl MultiBuffer { }); } + fn buffer_diff_language_changed( + &mut self, + change_set: Entity, + cx: &mut Context, + ) { + self.sync(cx); + let mut snapshot = self.snapshot.borrow_mut(); + let change_set = change_set.read(cx); + let buffer_id = change_set.buffer_id; + let base_text = change_set.base_text.clone(); + let diff = change_set.diff_to_buffer.clone(); + if let Some(base_text) = base_text { + snapshot.diffs.insert( + buffer_id, + DiffSnapshot { + diff: diff.clone(), + base_text, + }, + ); + } else { + snapshot.diffs.remove(&buffer_id); + } + } + fn buffer_diff_changed( &mut self, change_set: Entity, @@ -2175,6 +2205,15 @@ impl MultiBuffer { base_text, }, ); + } else if self.all_diff_hunks_expanded { + let base_text = Buffer::build_empty_snapshot(cx); + snapshot.diffs.insert( + buffer_id, + DiffSnapshot { + diff: git::diff::BufferDiff::new_with_single_insertion(&base_text), + base_text, + }, + ); } else { snapshot.diffs.remove(&buffer_id); } @@ -2316,20 +2355,8 @@ impl MultiBuffer { pub fn add_change_set(&mut self, change_set: Entity, cx: &mut Context) { let buffer_id = change_set.read(cx).buffer_id; self.buffer_diff_changed(change_set.clone(), text::Anchor::MIN..text::Anchor::MAX, cx); - self.diff_bases.insert( - buffer_id, - ChangeSetState { - _subscription: cx.subscribe( - &change_set, - |this, change_set, event, cx| match event { - BufferChangeSetEvent::DiffChanged { changed_range } => { - this.buffer_diff_changed(change_set, changed_range.clone(), cx); - } - }, - ), - change_set, - }, - ); + self.diff_bases + .insert(buffer_id, ChangeSetState::new(change_set, cx)); } pub fn change_set_for(&self, buffer_id: BufferId) -> Option> { diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index d3831dcce30f6234ac993ca094d82f638186e47d..82bfca95b7216f1c81d3e9f6f7ac93e8dc90b99f 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -85,6 +85,7 @@ struct BufferChangeSetState { index_text: Option>, head_changed: bool, index_changed: bool, + language_changed: bool, } #[derive(Clone, Debug)] @@ -101,8 +102,7 @@ enum DiffBasesChange { impl BufferChangeSetState { fn buffer_language_changed(&mut self, buffer: Entity, cx: &mut Context) { self.language = buffer.read(cx).language().cloned(); - self.index_changed = self.index_text.is_some(); - self.head_changed = self.head_text.is_some(); + self.language_changed = true; let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx); } @@ -149,34 +149,40 @@ impl BufferChangeSetState { ) -> oneshot::Receiver<()> { match diff_bases_change { DiffBasesChange::SetIndex(index) => { - let mut index = index.unwrap_or_default(); - text::LineEnding::normalize(&mut index); - self.index_text = Some(Arc::new(index)); + self.index_text = index.map(|mut index| { + text::LineEnding::normalize(&mut index); + Arc::new(index) + }); self.index_changed = true; } DiffBasesChange::SetHead(head) => { - let mut head = head.unwrap_or_default(); - text::LineEnding::normalize(&mut head); - self.head_text = Some(Arc::new(head)); + self.head_text = head.map(|mut head| { + text::LineEnding::normalize(&mut head); + Arc::new(head) + }); self.head_changed = true; } DiffBasesChange::SetBoth(text) => { - let mut text = text.unwrap_or_default(); - text::LineEnding::normalize(&mut text); - self.head_text = Some(Arc::new(text)); - self.index_text = self.head_text.clone(); + let text = text.map(|mut text| { + text::LineEnding::normalize(&mut text); + Arc::new(text) + }); + self.head_text = text.clone(); + self.index_text = text; self.head_changed = true; self.index_changed = true; } DiffBasesChange::SetEach { index, head } => { - let mut index = index.unwrap_or_default(); - text::LineEnding::normalize(&mut index); - let mut head = head.unwrap_or_default(); - text::LineEnding::normalize(&mut head); - self.index_text = Some(Arc::new(index)); - self.head_text = Some(Arc::new(head)); - self.head_changed = true; + self.index_text = index.map(|mut index| { + text::LineEnding::normalize(&mut index); + Arc::new(index) + }); self.index_changed = true; + self.head_text = head.map(|mut head| { + text::LineEnding::normalize(&mut head); + Arc::new(head) + }); + self.head_changed = true; } } @@ -199,6 +205,7 @@ impl BufferChangeSetState { let index = self.index_text.clone(); let index_changed = self.index_changed; let head_changed = self.head_changed; + let language_changed = self.language_changed; let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) { (Some(index), Some(head)) => Arc::ptr_eq(index, head), (None, None) => true, @@ -206,7 +213,7 @@ impl BufferChangeSetState { }; self.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move { if let Some(unstaged_changes) = &unstaged_changes { - let staged_snapshot = if index_changed { + let staged_snapshot = if index_changed || language_changed { let staged_snapshot = cx.update(|cx| { index.as_ref().map(|head| { language::Buffer::build_snapshot( @@ -238,6 +245,9 @@ impl BufferChangeSetState { unstaged_changes.update(&mut cx, |unstaged_changes, cx| { unstaged_changes.set_state(staged_snapshot.clone(), diff, &buffer, cx); + if language_changed { + cx.emit(BufferChangeSetEvent::LanguageChanged); + } })?; } @@ -252,7 +262,7 @@ impl BufferChangeSetState { ) })? } else { - let committed_snapshot = if head_changed { + let committed_snapshot = if head_changed || language_changed { let committed_snapshot = cx.update(|cx| { head.as_ref().map(|head| { language::Buffer::build_snapshot( @@ -284,6 +294,9 @@ impl BufferChangeSetState { uncommitted_changes.update(&mut cx, |change_set, cx| { change_set.set_state(snapshot, diff, &buffer, cx); + if language_changed { + cx.emit(BufferChangeSetEvent::LanguageChanged); + } })?; } @@ -323,6 +336,7 @@ impl std::fmt::Debug for BufferChangeSet { pub enum BufferChangeSetEvent { DiffChanged { changed_range: Range }, + LanguageChanged, } enum BufferStoreState { From b7244af0938bf8eb85c55301aae347f6eb73fbb1 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 5 Feb 2025 16:31:52 -0500 Subject: [PATCH 047/130] extensions_ui: Show extension features on cards (#24310) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR updates the extensions list to display the features that an extension provides. Screenshot 2025-02-05 at 4 12 07 PM Note that this will only show up for extensions that have this data (which will be extensions published/updated on or after now). Here's the view with some mocked data: Screenshot 2025-02-05 at 4 01 56 PM Release Notes: - N/A --- crates/extensions_ui/src/extensions_ui.rs | 50 +++++++++++++++++++++-- 1 file changed, 47 insertions(+), 3 deletions(-) diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index 15fee8e630cbb29b587ada6dda83474e23e35ef1..6d9cfa3f4b353b58f8f4a876c683e71e318300a8 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -6,7 +6,7 @@ use std::sync::OnceLock; use std::time::Duration; use std::{ops::Range, sync::Arc}; -use client::ExtensionMetadata; +use client::{ExtensionMetadata, ExtensionProvides}; use collections::{BTreeMap, BTreeSet}; use editor::{Editor, EditorElement, EditorStyle}; use extension_host::{ExtensionManifest, ExtensionOperation, ExtensionStore}; @@ -575,7 +575,6 @@ impl ExtensionsPage { .child( h_flex() .gap_2() - .items_end() .child( Headline::new(extension.manifest.name.clone()) .size(HeadlineSize::Medium), @@ -588,7 +587,52 @@ impl ExtensionsPage { Headline::new(format!("(v{installed_version} installed)",)) .size(HeadlineSize::XSmall) }), - ), + ) + .map(|parent| { + if extension.manifest.provides.is_empty() { + return parent; + } + + parent.child( + h_flex().gap_2().children( + extension + .manifest + .provides + .iter() + .map(|provides| { + let label = match provides { + ExtensionProvides::Themes => "Themes", + ExtensionProvides::IconThemes => "Icon Themes", + ExtensionProvides::Languages => "Languages", + ExtensionProvides::Grammars => "Grammars", + ExtensionProvides::LanguageServers => { + "Language Servers" + } + ExtensionProvides::ContextServers => { + "Context Servers" + } + ExtensionProvides::SlashCommands => { + "Slash Commands" + } + ExtensionProvides::IndexedDocsProviders => { + "Indexed Docs Providers" + } + ExtensionProvides::Snippets => "Snippets", + }; + div() + .bg(cx.theme().colors().element_background) + .px_0p5() + .border_1() + .border_color(cx.theme().colors().border) + .rounded_md() + .child( + Label::new(label).size(LabelSize::XSmall), + ) + }) + .collect::>(), + ), + ) + }), ) .child( h_flex() From c0dd7e8367fc9540685d86183c7370374413f324 Mon Sep 17 00:00:00 2001 From: Patrick Detlefsen Date: Wed, 5 Feb 2025 22:45:38 +0100 Subject: [PATCH 048/130] open_ai: Include o3-mini in `Model::from_id` (#24261) --- crates/open_ai/src/open_ai.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index f6ce348352cb4c7b5ec6590525f2f48d174ba059..b200da8ce0a534faf877b9bd19fb6f3083ba7f3c 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -103,6 +103,7 @@ impl Model { "o1" => Ok(Self::O1), "o1-preview" => Ok(Self::O1Preview), "o1-mini" => Ok(Self::O1Mini), + "o3-mini" => Ok(Self::O3Mini), _ => Err(anyhow!("invalid model id")), } } From e1919b41215f2db23c1ff79edae435191f3fd510 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 5 Feb 2025 17:12:18 -0500 Subject: [PATCH 049/130] collab: Add the ability to filter extensions by what they provide (#24315) This PR adds the ability to filter extension results from the extension API by the features that they provide. For instance, to filter down just to extensions that provide icon themes: ``` https://api.zed.dev/extensions?provides=icon-themes ``` Release Notes: - N/A --- crates/collab/src/api/extensions.rs | 28 +++++++++-- crates/collab/src/db/queries/extensions.rs | 48 +++++++++++++++++++ crates/collab/src/db/tests/extension_tests.rs | 12 ++--- crates/rpc/src/extension.rs | 8 +++- 4 files changed, 85 insertions(+), 11 deletions(-) diff --git a/crates/collab/src/api/extensions.rs b/crates/collab/src/api/extensions.rs index e132acaf0b106c4c93012e968e9aa74f4f51b718..73aea4534067f4f6e557c98fac3b27d3260c5d3a 100644 --- a/crates/collab/src/api/extensions.rs +++ b/crates/collab/src/api/extensions.rs @@ -9,10 +9,11 @@ use axum::{ routing::get, Extension, Json, Router, }; -use collections::HashMap; -use rpc::{ExtensionApiManifest, GetExtensionsResponse}; +use collections::{BTreeSet, HashMap}; +use rpc::{ExtensionApiManifest, ExtensionProvides, GetExtensionsResponse}; use semantic_version::SemanticVersion; use serde::Deserialize; +use std::str::FromStr; use std::{sync::Arc, time::Duration}; use time::PrimitiveDateTime; use util::{maybe, ResultExt}; @@ -35,6 +36,14 @@ pub fn router() -> Router { #[derive(Debug, Deserialize)] struct GetExtensionsParams { filter: Option, + /// A comma-delimited list of features that the extension must provide. + /// + /// For example: + /// - `themes` + /// - `themes,icon-themes` + /// - `languages,language-servers` + #[serde(default)] + provides: Option, #[serde(default)] max_schema_version: i32, } @@ -43,9 +52,22 @@ async fn get_extensions( Extension(app): Extension>, Query(params): Query, ) -> Result> { + let provides_filter = params.provides.map(|provides| { + provides + .split(',') + .map(|value| value.trim()) + .filter_map(|value| ExtensionProvides::from_str(value).ok()) + .collect::>() + }); + let mut extensions = app .db - .get_extensions(params.filter.as_deref(), params.max_schema_version, 500) + .get_extensions( + params.filter.as_deref(), + provides_filter.as_ref(), + params.max_schema_version, + 500, + ) .await?; if let Some(filter) = params.filter.as_deref() { diff --git a/crates/collab/src/db/queries/extensions.rs b/crates/collab/src/db/queries/extensions.rs index 54f47ae45ee015e80cf916c0d06703f0faff2e01..2b76e12335108a6f57c24b6ba17dd19c2d998708 100644 --- a/crates/collab/src/db/queries/extensions.rs +++ b/crates/collab/src/db/queries/extensions.rs @@ -10,6 +10,7 @@ impl Database { pub async fn get_extensions( &self, filter: Option<&str>, + provides_filter: Option<&BTreeSet>, max_schema_version: i32, limit: usize, ) -> Result> { @@ -26,6 +27,10 @@ impl Database { condition = condition.add(Expr::cust_with_expr("name ILIKE $1", fuzzy_name_filter)); } + if let Some(provides_filter) = provides_filter { + condition = apply_provides_filter(condition, provides_filter); + } + self.get_extensions_where(condition, Some(limit as u64), &tx) .await }) @@ -385,6 +390,49 @@ impl Database { } } +fn apply_provides_filter( + mut condition: Condition, + provides_filter: &BTreeSet, +) -> Condition { + if provides_filter.contains(&ExtensionProvides::Themes) { + condition = condition.add(extension_version::Column::ProvidesThemes.eq(true)); + } + + if provides_filter.contains(&ExtensionProvides::IconThemes) { + condition = condition.add(extension_version::Column::ProvidesIconThemes.eq(true)); + } + + if provides_filter.contains(&ExtensionProvides::Languages) { + condition = condition.add(extension_version::Column::ProvidesLanguages.eq(true)); + } + + if provides_filter.contains(&ExtensionProvides::Grammars) { + condition = condition.add(extension_version::Column::ProvidesGrammars.eq(true)); + } + + if provides_filter.contains(&ExtensionProvides::LanguageServers) { + condition = condition.add(extension_version::Column::ProvidesLanguageServers.eq(true)); + } + + if provides_filter.contains(&ExtensionProvides::ContextServers) { + condition = condition.add(extension_version::Column::ProvidesContextServers.eq(true)); + } + + if provides_filter.contains(&ExtensionProvides::SlashCommands) { + condition = condition.add(extension_version::Column::ProvidesSlashCommands.eq(true)); + } + + if provides_filter.contains(&ExtensionProvides::IndexedDocsProviders) { + condition = condition.add(extension_version::Column::ProvidesIndexedDocsProviders.eq(true)); + } + + if provides_filter.contains(&ExtensionProvides::Snippets) { + condition = condition.add(extension_version::Column::ProvidesSnippets.eq(true)); + } + + condition +} + fn metadata_from_extension_and_version( extension: extension::Model, version: extension_version::Model, diff --git a/crates/collab/src/db/tests/extension_tests.rs b/crates/collab/src/db/tests/extension_tests.rs index f7a5398d3c1849c0bdb689186fcd50f914bc31bb..460d74ffc0588c8243962a1a2b5e9d4bf9006fe8 100644 --- a/crates/collab/src/db/tests/extension_tests.rs +++ b/crates/collab/src/db/tests/extension_tests.rs @@ -20,7 +20,7 @@ async fn test_extensions(db: &Arc) { let versions = db.get_known_extension_versions().await.unwrap(); assert!(versions.is_empty()); - let extensions = db.get_extensions(None, 1, 5).await.unwrap(); + let extensions = db.get_extensions(None, None, 1, 5).await.unwrap(); assert!(extensions.is_empty()); let t0 = time::OffsetDateTime::from_unix_timestamp_nanos(0).unwrap(); @@ -90,7 +90,7 @@ async fn test_extensions(db: &Arc) { ); // The latest version of each extension is returned. - let extensions = db.get_extensions(None, 1, 5).await.unwrap(); + let extensions = db.get_extensions(None, None, 1, 5).await.unwrap(); assert_eq!( extensions, &[ @@ -128,7 +128,7 @@ async fn test_extensions(db: &Arc) { ); // Extensions with too new of a schema version are excluded. - let extensions = db.get_extensions(None, 0, 5).await.unwrap(); + let extensions = db.get_extensions(None, None, 0, 5).await.unwrap(); assert_eq!( extensions, &[ExtensionMetadata { @@ -168,7 +168,7 @@ async fn test_extensions(db: &Arc) { .unwrap()); // Extensions are returned in descending order of total downloads. - let extensions = db.get_extensions(None, 1, 5).await.unwrap(); + let extensions = db.get_extensions(None, None, 1, 5).await.unwrap(); assert_eq!( extensions, &[ @@ -258,7 +258,7 @@ async fn test_extensions(db: &Arc) { .collect() ); - let extensions = db.get_extensions(None, 1, 5).await.unwrap(); + let extensions = db.get_extensions(None, None, 1, 5).await.unwrap(); assert_eq!( extensions, &[ @@ -306,7 +306,7 @@ async fn test_extensions_by_id(db: &Arc) { let versions = db.get_known_extension_versions().await.unwrap(); assert!(versions.is_empty()); - let extensions = db.get_extensions(None, 1, 5).await.unwrap(); + let extensions = db.get_extensions(None, None, 1, 5).await.unwrap(); assert!(extensions.is_empty()); let t0 = time::OffsetDateTime::from_unix_timestamp_nanos(0).unwrap(); diff --git a/crates/rpc/src/extension.rs b/crates/rpc/src/extension.rs index 67b9116b83b73cdac50c002f7b61de7c6682ca6c..f1dcdc28d669251e811a15d8fa7cafb35d7eebcf 100644 --- a/crates/rpc/src/extension.rs +++ b/crates/rpc/src/extension.rs @@ -1,8 +1,9 @@ use std::collections::BTreeSet; +use std::sync::Arc; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; -use std::sync::Arc; +use strum::EnumString; #[derive(Clone, Serialize, Deserialize, Debug, PartialEq)] pub struct ExtensionApiManifest { @@ -17,8 +18,11 @@ pub struct ExtensionApiManifest { pub provides: BTreeSet, } -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)] +#[derive( + Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize, EnumString, +)] #[serde(rename_all = "kebab-case")] +#[strum(serialize_all = "kebab-case")] pub enum ExtensionProvides { Themes, IconThemes, From 1dbca5d9a09cbcf719d3123d20e8191707d1f3a9 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Wed, 5 Feb 2025 16:08:56 -0700 Subject: [PATCH 050/130] Mostly fix hover tooltips not respecting occlusion (#24319) Regression in #22644 Unfortunately not a full fix, In the case where a tooltip gets displayed and then gets occluded after display, it will stick around until the mouse exits the hover bounds. Release Notes: - N/A Co-authored-by: Ben --- crates/gpui/src/elements/div.rs | 39 +++++++++++++++++++++++++------- crates/gpui/src/elements/text.rs | 25 ++++++++++++++++---- 2 files changed, 51 insertions(+), 13 deletions(-) diff --git a/crates/gpui/src/elements/div.rs b/crates/gpui/src/elements/div.rs index f9ff17ea3c8af066c9f98035b4f17169f041164f..6460172168c255b80661a76f78e651db3d65f56b 100644 --- a/crates/gpui/src/elements/div.rs +++ b/crates/gpui/src/elements/div.rs @@ -2008,18 +2008,27 @@ impl Interactivity { let build_tooltip = Rc::new(move |window: &mut Window, cx: &mut App| { Some(((tooltip_builder.build)(window, cx), tooltip_is_hoverable)) }); - // Use bounds instead of testing hitbox since check_is_hovered is also called - // during prepaint. - let source_bounds = hitbox.bounds; - let check_is_hovered = Rc::new(move |window: &Window| { - pending_mouse_down.borrow().is_none() - && source_bounds.contains(&window.mouse_position()) + // Use bounds instead of testing hitbox since this is called during prepaint. + let check_is_hovered_during_prepaint = Rc::new({ + let pending_mouse_down = pending_mouse_down.clone(); + let source_bounds = hitbox.bounds; + move |window: &Window| { + pending_mouse_down.borrow().is_none() + && source_bounds.contains(&window.mouse_position()) + } + }); + let check_is_hovered = Rc::new({ + let hitbox = hitbox.clone(); + move |window: &Window| { + pending_mouse_down.borrow().is_none() && hitbox.is_hovered(window) + } }); register_tooltip_mouse_handlers( &active_tooltip, self.tooltip_id, build_tooltip, check_is_hovered, + check_is_hovered_during_prepaint, window, ); } @@ -2361,6 +2370,7 @@ pub(crate) fn register_tooltip_mouse_handlers( tooltip_id: Option, build_tooltip: Rc Option<(AnyView, bool)>>, check_is_hovered: Rc bool>, + check_is_hovered_during_prepaint: Rc bool>, window: &mut Window, ) { window.on_mouse_event({ @@ -2372,6 +2382,7 @@ pub(crate) fn register_tooltip_mouse_handlers( &active_tooltip, &build_tooltip, &check_is_hovered, + &check_is_hovered_during_prepaint, phase, window, cx, @@ -2398,10 +2409,22 @@ pub(crate) fn register_tooltip_mouse_handlers( }); } +/// Handles displaying tooltips when an element is hovered. +/// +/// The mouse hovering logic also relies on being called from window prepaint in order to handle the +/// case where the element the tooltip is on is not rendered - in that case its mouse listeners are +/// also not registered. During window prepaint, the hitbox information is not available, so +/// `check_is_hovered_during_prepaint` is used which bases the check off of the absolute bounds of +/// the element. +/// +/// TODO: There's a minor bug due to the use of absolute bounds while checking during prepaint - it +/// does not know if the hitbox is occluded. In the case where a tooltip gets displayed and then +/// gets occluded after display, it will stick around until the mouse exits the hover bounds. fn handle_tooltip_mouse_move( active_tooltip: &Rc>>, build_tooltip: &Rc Option<(AnyView, bool)>>, check_is_hovered: &Rc bool>, + check_is_hovered_during_prepaint: &Rc bool>, phase: DispatchPhase, window: &mut Window, cx: &mut App, @@ -2447,7 +2470,7 @@ fn handle_tooltip_mouse_move( let delayed_show_task = window.spawn(cx, { let active_tooltip = active_tooltip.clone(); let build_tooltip = build_tooltip.clone(); - let check_is_hovered = check_is_hovered.clone(); + let check_is_hovered_during_prepaint = check_is_hovered_during_prepaint.clone(); move |mut cx| async move { cx.background_executor().timer(TOOLTIP_SHOW_DELAY).await; cx.update(|window, cx| { @@ -2463,7 +2486,7 @@ fn handle_tooltip_mouse_move( handle_tooltip_check_visible_and_update( &active_tooltip, tooltip_is_hoverable, - &check_is_hovered, + &check_is_hovered_during_prepaint, tooltip_bounds, window, cx, diff --git a/crates/gpui/src/elements/text.rs b/crates/gpui/src/elements/text.rs index 36771ed5ecc567c82fad6a569369f7cdf1d482c0..21913af93f2770d69629a107afb4a6a00d31d80b 100644 --- a/crates/gpui/src/elements/text.rs +++ b/crates/gpui/src/elements/text.rs @@ -734,7 +734,6 @@ impl Element for InteractiveText { if let Some(tooltip_builder) = self.tooltip_builder.clone() { let active_tooltip = interactive_state.active_tooltip.clone(); - let pending_mouse_down = interactive_state.mouse_down_index.clone(); let build_tooltip = Rc::new({ let tooltip_is_hoverable = false; let text_layout = text_layout.clone(); @@ -746,11 +745,12 @@ impl Element for InteractiveText { .map(|view| (view, tooltip_is_hoverable)) } }); - // Use bounds instead of testing hitbox since check_is_hovered is also - // called during prepaint. - let source_bounds = hitbox.bounds; - let check_is_hovered = Rc::new({ + + // Use bounds instead of testing hitbox since this is called during prepaint. + let check_is_hovered_during_prepaint = Rc::new({ + let source_bounds = hitbox.bounds; let text_layout = text_layout.clone(); + let pending_mouse_down = interactive_state.mouse_down_index.clone(); move |window: &Window| { text_layout .index_for_position(window.mouse_position()) @@ -759,11 +759,26 @@ impl Element for InteractiveText { && pending_mouse_down.get().is_none() } }); + + let check_is_hovered = Rc::new({ + let hitbox = hitbox.clone(); + let text_layout = text_layout.clone(); + let pending_mouse_down = interactive_state.mouse_down_index.clone(); + move |window: &Window| { + text_layout + .index_for_position(window.mouse_position()) + .is_ok() + && hitbox.is_hovered(window) + && pending_mouse_down.get().is_none() + } + }); + register_tooltip_mouse_handlers( &active_tooltip, self.tooltip_id, build_tooltip, check_is_hovered, + check_is_hovered_during_prepaint, window, ); } From 980ce5fbf2d0de7e954c32dd982268d3b58dfccc Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 6 Feb 2025 01:14:26 +0200 Subject: [PATCH 051/130] Move git status updates to a background thread (#24307) Part of https://github.com/zed-industries/zed/issues/24099 Part of https://github.com/zed-industries/zed/issues/23025 Git status checks & updates are still slow for such repos, but those are now not blocking FS entry population and rescans. Release Notes: - Improved project panel's speed in large projects --- crates/worktree/src/worktree.rs | 513 +++++++++++++------------- crates/worktree/src/worktree_tests.rs | 2 + 2 files changed, 258 insertions(+), 257 deletions(-) diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index c87187a32e38c0f3de356e95e1886d054ab2329e..3033a6e9faf47a1182a04e737954ad90f66c22c3 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -13,6 +13,7 @@ use futures::{ mpsc::{self, UnboundedSender}, oneshot, }, + future::join_all, select_biased, task::Poll, FutureExt as _, Stream, StreamExt, @@ -450,6 +451,7 @@ struct BackgroundScannerState { changed_paths: Vec>, prev_snapshot: Snapshot, git_hosting_provider_registry: Option>, + repository_scans: HashMap, Task<()>>, } #[derive(Debug, Clone)] @@ -1336,7 +1338,7 @@ impl LocalWorktree { scan_requests_rx, path_prefixes_to_scan_rx, next_entry_id, - state: Mutex::new(BackgroundScannerState { + state: Arc::new(Mutex::new(BackgroundScannerState { prev_snapshot: snapshot.snapshot.clone(), snapshot, scanned_dirs: Default::default(), @@ -1344,8 +1346,9 @@ impl LocalWorktree { paths_to_scan: Default::default(), removed_entries: Default::default(), changed_paths: Default::default(), + repository_scans: HashMap::default(), git_hosting_provider_registry, - }), + })), phase: BackgroundScannerPhase::InitialScan, share_private_files, settings, @@ -4083,7 +4086,7 @@ impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey { } struct BackgroundScanner { - state: Mutex, + state: Arc>, fs: Arc, fs_case_sensitive: bool, status_updates_tx: UnboundedSender, @@ -4097,7 +4100,7 @@ struct BackgroundScanner { share_private_files: bool, } -#[derive(PartialEq)] +#[derive(Copy, Clone, PartialEq)] enum BackgroundScannerPhase { InitialScan, EventsReceivedDuringInitialScan, @@ -4106,8 +4109,6 @@ enum BackgroundScannerPhase { impl BackgroundScanner { async fn run(&mut self, mut fs_events_rx: Pin>>>) { - use futures::FutureExt as _; - // If the worktree root does not contain a git repository, then find // the git repository in an ancestor directory. Find any gitignore files // in ancestor directories. @@ -4418,22 +4419,33 @@ impl BackgroundScanner { self.update_ignore_statuses(scan_job_tx).await; self.scan_dirs(false, scan_job_rx).await; - if !dot_git_abs_paths.is_empty() { - self.update_git_repositories(dot_git_abs_paths).await; - } - - { - let mut state = self.state.lock(); - state.snapshot.completed_scan_id = state.snapshot.scan_id; - for (_, entry) in mem::take(&mut state.removed_entries) { - state.scanned_dirs.remove(&entry.id); - } - } + let status_update = if !dot_git_abs_paths.is_empty() { + Some(self.schedule_git_repositories_update(dot_git_abs_paths)) + } else { + None + }; - #[cfg(test)] - self.state.lock().snapshot.check_git_invariants(); + let phase = self.phase; + let status_update_tx = self.status_updates_tx.clone(); + let state = self.state.clone(); + self.executor + .spawn(async move { + if let Some(status_update) = status_update { + status_update.await; + } - self.send_status_update(false, SmallVec::new()); + { + let mut state = state.lock(); + state.snapshot.completed_scan_id = state.snapshot.scan_id; + for (_, entry) in mem::take(&mut state.removed_entries) { + state.scanned_dirs.remove(&entry.id); + } + #[cfg(test)] + state.snapshot.check_git_invariants(); + } + send_status_update_inner(phase, state, status_update_tx, false, SmallVec::new()); + }) + .detach(); } async fn forcibly_load_paths(&self, paths: &[Arc]) -> bool { @@ -4467,8 +4479,6 @@ impl BackgroundScanner { enable_progress_updates: bool, scan_jobs_rx: channel::Receiver, ) { - use futures::FutureExt as _; - if self .status_updates_tx .unbounded_send(ScanState::Started) @@ -4536,24 +4546,13 @@ impl BackgroundScanner { } fn send_status_update(&self, scanning: bool, barrier: SmallVec<[barrier::Sender; 1]>) -> bool { - let mut state = self.state.lock(); - if state.changed_paths.is_empty() && scanning { - return true; - } - - let new_snapshot = state.snapshot.clone(); - let old_snapshot = mem::replace(&mut state.prev_snapshot, new_snapshot.snapshot.clone()); - let changes = self.build_change_set(&old_snapshot, &new_snapshot, &state.changed_paths); - state.changed_paths.clear(); - - self.status_updates_tx - .unbounded_send(ScanState::Updated { - snapshot: new_snapshot, - changes, - scanning, - barrier, - }) - .is_ok() + send_status_update_inner( + self.phase, + self.state.clone(), + self.status_updates_tx.clone(), + scanning, + barrier, + ) } async fn scan_dir(&self, job: &ScanJob) -> Result<()> { @@ -4609,9 +4608,7 @@ impl BackgroundScanner { ); if let Some(local_repo) = repo { - self.update_git_statuses(UpdateGitStatusesJob { - local_repository: local_repo, - }); + let _ = self.schedule_git_statuses_update(local_repo); } } else if child_name == *GITIGNORE { match build_gitignore(&child_abs_path, self.fs.as_ref()).await { @@ -4968,8 +4965,6 @@ impl BackgroundScanner { } async fn update_ignore_statuses(&self, scan_job_tx: Sender) { - use futures::FutureExt as _; - let mut ignores_to_update = Vec::new(); let (ignore_queue_tx, ignore_queue_rx) = channel::unbounded(); let prev_snapshot; @@ -5119,10 +5114,10 @@ impl BackgroundScanner { state.snapshot.entries_by_id.edit(entries_by_id_edits, &()); } - async fn update_git_repositories(&self, dot_git_paths: Vec) { + fn schedule_git_repositories_update(&self, dot_git_paths: Vec) -> Task<()> { log::debug!("reloading repositories: {dot_git_paths:?}"); - let mut repo_updates = Vec::new(); + let mut repos_to_update = Vec::new(); { let mut state = self.state.lock(); let scan_id = state.snapshot.scan_id; @@ -5182,7 +5177,7 @@ impl BackgroundScanner { } }; - repo_updates.push(UpdateGitStatusesJob { local_repository }); + repos_to_update.push(local_repository); } // Remove any git repositories whose .git entry no longer exists. @@ -5213,223 +5208,98 @@ impl BackgroundScanner { }); } - let (mut updates_done_tx, mut updates_done_rx) = barrier::channel(); - self.executor - .scoped(|scope| { - scope.spawn(async { - for repo_update in repo_updates { - self.update_git_statuses(repo_update); - } - updates_done_tx.blocking_send(()).ok(); - }); - - scope.spawn(async { - loop { - select_biased! { - // Process any path refresh requests before moving on to process - // the queue of git statuses. - request = self.next_scan_request().fuse() => { - let Ok(request) = request else { break }; - if !self.process_scan_request(request, true).await { - return; - } - } - _ = updates_done_rx.recv().fuse() => break, - } - } - }); - }) - .await; + let mut status_updates = Vec::new(); + for local_repository in repos_to_update { + status_updates.push(self.schedule_git_statuses_update(local_repository)); + } + self.executor.spawn(async move { + let _updates_finished: Vec> = + join_all(status_updates).await; + }) } /// Update the git statuses for a given batch of entries. - fn update_git_statuses(&self, job: UpdateGitStatusesJob) { - log::trace!( - "updating git statuses for repo {:?}", - job.local_repository.work_directory.path - ); - let t0 = Instant::now(); - - let Some(statuses) = job - .local_repository - .repo() - .status(&[git::WORK_DIRECTORY_REPO_PATH.clone()]) - .log_err() - else { - return; - }; - log::trace!( - "computed git statuses for repo {:?} in {:?}", - job.local_repository.work_directory.path, - t0.elapsed() - ); - - let t0 = Instant::now(); - let mut changed_paths = Vec::new(); - let snapshot = self.state.lock().snapshot.snapshot.clone(); - - let Some(mut repository) = - snapshot.repository(job.local_repository.work_directory.path_key()) - else { - log::error!("Got an UpdateGitStatusesJob for a repository that isn't in the snapshot"); - debug_assert!(false); - return; - }; - - let mut new_entries_by_path = SumTree::new(&()); - for (repo_path, status) in statuses.entries.iter() { - let project_path = repository.work_directory.unrelativize(repo_path); - - new_entries_by_path.insert_or_replace( - StatusEntry { - repo_path: repo_path.clone(), - status: *status, - }, - &(), - ); + fn schedule_git_statuses_update( + &self, + local_repository: LocalRepositoryEntry, + ) -> oneshot::Receiver<()> { + let repository_path = local_repository.work_directory.path.clone(); + let state = self.state.clone(); + let (tx, rx) = oneshot::channel(); - if let Some(path) = project_path { - changed_paths.push(path); - } - } + self.state.lock().repository_scans.insert( + repository_path.clone(), + self.executor.spawn(async move { + log::trace!("updating git statuses for repo {repository_path:?}",); + let t0 = Instant::now(); + + let Some(statuses) = local_repository + .repo() + .status(&[git::WORK_DIRECTORY_REPO_PATH.clone()]) + .log_err() + else { + return; + }; + log::trace!( + "computed git statuses for repo {:?} in {:?}", + repository_path, + t0.elapsed() + ); - repository.statuses_by_path = new_entries_by_path; - let mut state = self.state.lock(); - state - .snapshot - .repositories - .insert_or_replace(repository, &()); + let t0 = Instant::now(); + let mut changed_paths = Vec::new(); + let snapshot = state.lock().snapshot.snapshot.clone(); - util::extend_sorted( - &mut state.changed_paths, - changed_paths, - usize::MAX, - Ord::cmp, - ); + let Some(mut repository) = + snapshot.repository(local_repository.work_directory.path_key()) + else { + log::error!( + "Tried to update git statuses for a repository that isn't in the snapshot" + ); + debug_assert!(false); + return; + }; - log::trace!( - "applied git status updates for repo {:?} in {:?}", - job.local_repository.work_directory.path, - t0.elapsed(), - ); - } + let mut new_entries_by_path = SumTree::new(&()); + for (repo_path, status) in statuses.entries.iter() { + let project_path = repository.work_directory.unrelativize(repo_path); - fn build_change_set( - &self, - old_snapshot: &Snapshot, - new_snapshot: &Snapshot, - event_paths: &[Arc], - ) -> UpdatedEntriesSet { - use BackgroundScannerPhase::*; - use PathChange::{Added, AddedOrUpdated, Loaded, Removed, Updated}; - - // Identify which paths have changed. Use the known set of changed - // parent paths to optimize the search. - let mut changes = Vec::new(); - let mut old_paths = old_snapshot.entries_by_path.cursor::(&()); - let mut new_paths = new_snapshot.entries_by_path.cursor::(&()); - let mut last_newly_loaded_dir_path = None; - old_paths.next(&()); - new_paths.next(&()); - for path in event_paths { - let path = PathKey(path.clone()); - if old_paths.item().map_or(false, |e| e.path < path.0) { - old_paths.seek_forward(&path, Bias::Left, &()); - } - if new_paths.item().map_or(false, |e| e.path < path.0) { - new_paths.seek_forward(&path, Bias::Left, &()); - } - loop { - match (old_paths.item(), new_paths.item()) { - (Some(old_entry), Some(new_entry)) => { - if old_entry.path > path.0 - && new_entry.path > path.0 - && !old_entry.path.starts_with(&path.0) - && !new_entry.path.starts_with(&path.0) - { - break; - } + new_entries_by_path.insert_or_replace( + StatusEntry { + repo_path: repo_path.clone(), + status: *status, + }, + &(), + ); - match Ord::cmp(&old_entry.path, &new_entry.path) { - Ordering::Less => { - changes.push((old_entry.path.clone(), old_entry.id, Removed)); - old_paths.next(&()); - } - Ordering::Equal => { - if self.phase == EventsReceivedDuringInitialScan { - if old_entry.id != new_entry.id { - changes.push(( - old_entry.path.clone(), - old_entry.id, - Removed, - )); - } - // If the worktree was not fully initialized when this event was generated, - // we can't know whether this entry was added during the scan or whether - // it was merely updated. - changes.push(( - new_entry.path.clone(), - new_entry.id, - AddedOrUpdated, - )); - } else if old_entry.id != new_entry.id { - changes.push((old_entry.path.clone(), old_entry.id, Removed)); - changes.push((new_entry.path.clone(), new_entry.id, Added)); - } else if old_entry != new_entry { - if old_entry.kind.is_unloaded() { - last_newly_loaded_dir_path = Some(&new_entry.path); - changes.push(( - new_entry.path.clone(), - new_entry.id, - Loaded, - )); - } else { - changes.push(( - new_entry.path.clone(), - new_entry.id, - Updated, - )); - } - } - old_paths.next(&()); - new_paths.next(&()); - } - Ordering::Greater => { - let is_newly_loaded = self.phase == InitialScan - || last_newly_loaded_dir_path - .as_ref() - .map_or(false, |dir| new_entry.path.starts_with(dir)); - changes.push(( - new_entry.path.clone(), - new_entry.id, - if is_newly_loaded { Loaded } else { Added }, - )); - new_paths.next(&()); - } - } - } - (Some(old_entry), None) => { - changes.push((old_entry.path.clone(), old_entry.id, Removed)); - old_paths.next(&()); - } - (None, Some(new_entry)) => { - let is_newly_loaded = self.phase == InitialScan - || last_newly_loaded_dir_path - .as_ref() - .map_or(false, |dir| new_entry.path.starts_with(dir)); - changes.push(( - new_entry.path.clone(), - new_entry.id, - if is_newly_loaded { Loaded } else { Added }, - )); - new_paths.next(&()); + if let Some(path) = project_path { + changed_paths.push(path); } - (None, None) => break, } - } - } - changes.into() + repository.statuses_by_path = new_entries_by_path; + let mut state = state.lock(); + state + .snapshot + .repositories + .insert_or_replace(repository, &()); + + util::extend_sorted( + &mut state.changed_paths, + changed_paths, + usize::MAX, + Ord::cmp, + ); + + log::trace!( + "applied git status updates for repo {:?} in {:?}", + repository_path, + t0.elapsed(), + ); + tx.send(()).ok(); + }), + ); + rx } async fn progress_timer(&self, running: bool) { @@ -5459,6 +5329,139 @@ impl BackgroundScanner { } } +fn send_status_update_inner( + phase: BackgroundScannerPhase, + state: Arc>, + status_updates_tx: UnboundedSender, + scanning: bool, + barrier: SmallVec<[barrier::Sender; 1]>, +) -> bool { + let mut state = state.lock(); + if state.changed_paths.is_empty() && scanning { + return true; + } + + let new_snapshot = state.snapshot.clone(); + let old_snapshot = mem::replace(&mut state.prev_snapshot, new_snapshot.snapshot.clone()); + let changes = build_change_set(phase, &old_snapshot, &new_snapshot, &state.changed_paths); + state.changed_paths.clear(); + + status_updates_tx + .unbounded_send(ScanState::Updated { + snapshot: new_snapshot, + changes, + scanning, + barrier, + }) + .is_ok() +} + +fn build_change_set( + phase: BackgroundScannerPhase, + old_snapshot: &Snapshot, + new_snapshot: &Snapshot, + event_paths: &[Arc], +) -> UpdatedEntriesSet { + use BackgroundScannerPhase::*; + use PathChange::{Added, AddedOrUpdated, Loaded, Removed, Updated}; + + // Identify which paths have changed. Use the known set of changed + // parent paths to optimize the search. + let mut changes = Vec::new(); + let mut old_paths = old_snapshot.entries_by_path.cursor::(&()); + let mut new_paths = new_snapshot.entries_by_path.cursor::(&()); + let mut last_newly_loaded_dir_path = None; + old_paths.next(&()); + new_paths.next(&()); + for path in event_paths { + let path = PathKey(path.clone()); + if old_paths.item().map_or(false, |e| e.path < path.0) { + old_paths.seek_forward(&path, Bias::Left, &()); + } + if new_paths.item().map_or(false, |e| e.path < path.0) { + new_paths.seek_forward(&path, Bias::Left, &()); + } + loop { + match (old_paths.item(), new_paths.item()) { + (Some(old_entry), Some(new_entry)) => { + if old_entry.path > path.0 + && new_entry.path > path.0 + && !old_entry.path.starts_with(&path.0) + && !new_entry.path.starts_with(&path.0) + { + break; + } + + match Ord::cmp(&old_entry.path, &new_entry.path) { + Ordering::Less => { + changes.push((old_entry.path.clone(), old_entry.id, Removed)); + old_paths.next(&()); + } + Ordering::Equal => { + if phase == EventsReceivedDuringInitialScan { + if old_entry.id != new_entry.id { + changes.push((old_entry.path.clone(), old_entry.id, Removed)); + } + // If the worktree was not fully initialized when this event was generated, + // we can't know whether this entry was added during the scan or whether + // it was merely updated. + changes.push(( + new_entry.path.clone(), + new_entry.id, + AddedOrUpdated, + )); + } else if old_entry.id != new_entry.id { + changes.push((old_entry.path.clone(), old_entry.id, Removed)); + changes.push((new_entry.path.clone(), new_entry.id, Added)); + } else if old_entry != new_entry { + if old_entry.kind.is_unloaded() { + last_newly_loaded_dir_path = Some(&new_entry.path); + changes.push((new_entry.path.clone(), new_entry.id, Loaded)); + } else { + changes.push((new_entry.path.clone(), new_entry.id, Updated)); + } + } + old_paths.next(&()); + new_paths.next(&()); + } + Ordering::Greater => { + let is_newly_loaded = phase == InitialScan + || last_newly_loaded_dir_path + .as_ref() + .map_or(false, |dir| new_entry.path.starts_with(dir)); + changes.push(( + new_entry.path.clone(), + new_entry.id, + if is_newly_loaded { Loaded } else { Added }, + )); + new_paths.next(&()); + } + } + } + (Some(old_entry), None) => { + changes.push((old_entry.path.clone(), old_entry.id, Removed)); + old_paths.next(&()); + } + (None, Some(new_entry)) => { + let is_newly_loaded = phase == InitialScan + || last_newly_loaded_dir_path + .as_ref() + .map_or(false, |dir| new_entry.path.starts_with(dir)); + changes.push(( + new_entry.path.clone(), + new_entry.id, + if is_newly_loaded { Loaded } else { Added }, + )); + new_paths.next(&()); + } + (None, None) => break, + } + } + } + + changes.into() +} + fn swap_to_front(child_paths: &mut Vec, file: &OsStr) { let position = child_paths .iter() @@ -5521,10 +5524,6 @@ struct UpdateIgnoreStatusJob { scan_queue: Sender, } -struct UpdateGitStatusesJob { - local_repository: LocalRepositoryEntry, -} - pub trait WorktreeModelHandle { #[cfg(any(test, feature = "test-support"))] fn flush_fs_events<'a>( diff --git a/crates/worktree/src/worktree_tests.rs b/crates/worktree/src/worktree_tests.rs index 2cee728aec89e40500700c182ed617400085739e..34e1f0063e102b751a85f3e86ece2eaac2bd9d3b 100644 --- a/crates/worktree/src/worktree_tests.rs +++ b/crates/worktree/src/worktree_tests.rs @@ -24,6 +24,7 @@ use std::{ mem, path::{Path, PathBuf}, sync::Arc, + time::Duration, }; use util::{test::TempTree, ResultExt}; @@ -1504,6 +1505,7 @@ async fn test_bump_mtime_of_git_repo_workdir(cx: &mut TestAppContext) { &[(Path::new("b/c.txt"), StatusCode::Modified.index())], ); cx.executor().run_until_parked(); + cx.executor().advance_clock(Duration::from_secs(1)); let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); From d81a4ec7ecffb2c1f3b710bc08ce5f7ffa46c90d Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 5 Feb 2025 18:35:27 -0500 Subject: [PATCH 052/130] file_icons: Use a separate icon key for HTML files (#24323) This PR updates the file icon mappings such that HTML (`.html` and `.htm`) files map to the `html` key. This allows for the HTML file icons to be replaced in icon themes. Release Notes: - Icon themes: Added the ability to change the file icon for HTML (`.html`, `.htm`) files. --- assets/icons/file_icons/file_types.json | 4 ++-- crates/theme/src/icon_theme.rs | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/assets/icons/file_icons/file_types.json b/assets/icons/file_icons/file_types.json index 222b056bfc2309c81b68df84d79a6f916e5a63a3..9580c8fd4ec5b7820ee9661f1b2ec0257b194f2f 100644 --- a/assets/icons/file_icons/file_types.json +++ b/assets/icons/file_icons/file_types.json @@ -86,8 +86,8 @@ "hpp": "cpp", "hrl": "erlang", "hs": "haskell", - "htm": "template", - "html": "template", + "htm": "html", + "html": "html", "hxx": "cpp", "ib": "storage", "ico": "image", diff --git a/crates/theme/src/icon_theme.rs b/crates/theme/src/icon_theme.rs index 170b51a1f928fc465b5251b5a3b2247eef510215..1a99e42ed817aafd606a2efaa95baedb3697dfa9 100644 --- a/crates/theme/src/icon_theme.rs +++ b/crates/theme/src/icon_theme.rs @@ -84,6 +84,7 @@ const FILE_ICONS: &[(&str, &str)] = &[ ("haskell", "icons/file_icons/haskell.svg"), ("hcl", "icons/file_icons/hcl.svg"), ("heroku", "icons/file_icons/heroku.svg"), + ("html", "icons/file_icons/html.svg"), ("image", "icons/file_icons/image.svg"), ("java", "icons/file_icons/java.svg"), ("javascript", "icons/file_icons/javascript.svg"), From 4e5b11a0a70d05aa4324afa7d5193a000f8a53b7 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 5 Feb 2025 19:09:37 -0500 Subject: [PATCH 053/130] extensions_ui: Add general structure for filtering extensions by what they provide (#24325) This PR adds the general structure for filtering the extensions list by what the extensions provide. Currently flagged for Zed staff until we get some design direction on how best to present the filter. Release Notes: - N/A --- Cargo.lock | 1 + crates/extension_host/src/extension_host.rs | 15 +++++- crates/extensions_ui/Cargo.toml | 1 + crates/extensions_ui/src/extensions_ui.rs | 60 ++++++++++++++++++--- crates/rpc/src/extension.rs | 13 ++++- 5 files changed, 81 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 651ca95a2ae87b775693994bcd2d925a54225678..06a48f8de738eb003aa9dbb99c71a1ae20ac3615 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4490,6 +4490,7 @@ dependencies = [ "db", "editor", "extension_host", + "feature_flags", "fs", "fuzzy", "gpui", diff --git a/crates/extension_host/src/extension_host.rs b/crates/extension_host/src/extension_host.rs index 35a58af2e4ca2c913913adb760aa958c5e354c9d..69c26d44a46b61ed41906498337d0899670cd1fa 100644 --- a/crates/extension_host/src/extension_host.rs +++ b/crates/extension_host/src/extension_host.rs @@ -8,8 +8,9 @@ mod extension_store_test; use anyhow::{anyhow, bail, Context as _, Result}; use async_compression::futures::bufread::GzipDecoder; use async_tar::Archive; +use client::ExtensionProvides; use client::{proto, telemetry::Telemetry, Client, ExtensionMetadata, GetExtensionsResponse}; -use collections::{btree_map, BTreeMap, HashMap, HashSet}; +use collections::{btree_map, BTreeMap, BTreeSet, HashMap, HashSet}; use extension::extension_builder::{CompileExtensionOptions, ExtensionBuilder}; pub use extension::ExtensionManifest; use extension::{ @@ -464,6 +465,7 @@ impl ExtensionStore { pub fn fetch_extensions( &self, search: Option<&str>, + provides_filter: Option<&BTreeSet>, cx: &mut Context, ) -> Task>> { let version = CURRENT_SCHEMA_VERSION.to_string(); @@ -472,6 +474,17 @@ impl ExtensionStore { query.push(("filter", search)); } + let provides_filter = provides_filter.map(|provides_filter| { + provides_filter + .iter() + .map(|provides| provides.to_string()) + .collect::>() + .join(",") + }); + if let Some(provides_filter) = provides_filter.as_deref() { + query.push(("provides", provides_filter)); + } + self.fetch_extensions_from_api("/extensions", &query, cx) } diff --git a/crates/extensions_ui/Cargo.toml b/crates/extensions_ui/Cargo.toml index 1df235c82209b5d8e45b5d8104ba5cd08b50482c..afdb3bf0a359d97640c905f4d7e6955d2d0b02d0 100644 --- a/crates/extensions_ui/Cargo.toml +++ b/crates/extensions_ui/Cargo.toml @@ -18,6 +18,7 @@ collections.workspace = true db.workspace = true editor.workspace = true extension_host.workspace = true +feature_flags.workspace = true fs.workspace = true fuzzy.workspace = true gpui.workspace = true diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index 6d9cfa3f4b353b58f8f4a876c683e71e318300a8..afca5616db7f0d3867300ec31893f945b9cd660c 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -10,6 +10,7 @@ use client::{ExtensionMetadata, ExtensionProvides}; use collections::{BTreeMap, BTreeSet}; use editor::{Editor, EditorElement, EditorStyle}; use extension_host::{ExtensionManifest, ExtensionOperation, ExtensionStore}; +use feature_flags::FeatureFlagAppExt as _; use fuzzy::{match_strings, StringMatchCandidate}; use gpui::{ actions, uniform_list, Action, App, ClipboardItem, Context, Entity, EventEmitter, Flatten, @@ -210,6 +211,7 @@ pub struct ExtensionsPage { filtered_remote_extension_indices: Vec, query_editor: Entity, query_contains_error: bool, + provides_filter: Option, _subscriptions: [gpui::Subscription; 2], extension_fetch_task: Option>, upsells: BTreeSet, @@ -261,12 +263,13 @@ impl ExtensionsPage { filtered_remote_extension_indices: Vec::new(), remote_extension_entries: Vec::new(), query_contains_error: false, + provides_filter: None, extension_fetch_task: None, _subscriptions: subscriptions, query_editor, upsells: BTreeSet::default(), }; - this.fetch_extensions(None, cx); + this.fetch_extensions(None, None, cx); this }) } @@ -363,7 +366,12 @@ impl ExtensionsPage { cx.notify(); } - fn fetch_extensions(&mut self, search: Option, cx: &mut Context) { + fn fetch_extensions( + &mut self, + search: Option, + provides_filter: Option>, + cx: &mut Context, + ) { self.is_fetching_extensions = true; cx.notify(); @@ -374,7 +382,7 @@ impl ExtensionsPage { }); let remote_extensions = extension_store.update(cx, |store, cx| { - store.fetch_extensions(search.as_deref(), cx) + store.fetch_extensions(search.as_deref(), provides_filter.as_ref(), cx) }); cx.spawn(move |this, mut cx| async move { @@ -953,11 +961,15 @@ impl ExtensionsPage { ) { if let editor::EditorEvent::Edited { .. } = event { self.query_contains_error = false; - self.fetch_extensions_debounced(cx); - self.refresh_feature_upsells(cx); + self.refresh_search(cx); } } + fn refresh_search(&mut self, cx: &mut Context) { + self.fetch_extensions_debounced(cx); + self.refresh_feature_upsells(cx); + } + fn fetch_extensions_debounced(&mut self, cx: &mut Context) { self.extension_fetch_task = Some(cx.spawn(|this, mut cx| async move { let search = this @@ -978,7 +990,7 @@ impl ExtensionsPage { }; this.update(&mut cx, |this, cx| { - this.fetch_extensions(search, cx); + this.fetch_extensions(search, Some(BTreeSet::from_iter(this.provides_filter)), cx); }) .ok(); })); @@ -1162,7 +1174,41 @@ impl Render for ExtensionsPage { .w_full() .gap_2() .justify_between() - .child(h_flex().child(self.render_search(cx))) + .child( + h_flex() + .gap_2() + .child(self.render_search(cx)) + .map(|parent| { + // Note: Staff-only until this gets design input. + if !cx.is_staff() { + return parent; + } + + parent.child(CheckboxWithLabel::new( + "icon-themes-filter", + Label::new("Icon themes"), + match self.provides_filter { + Some(ExtensionProvides::IconThemes) => { + ToggleState::Selected + } + _ => ToggleState::Unselected, + }, + cx.listener(|this, checked, _window, cx| { + match checked { + ToggleState::Unselected + | ToggleState::Indeterminate => { + this.provides_filter = None + } + ToggleState::Selected => { + this.provides_filter = + Some(ExtensionProvides::IconThemes) + } + }; + this.refresh_search(cx); + }), + )) + }), + ) .child( h_flex() .child( diff --git a/crates/rpc/src/extension.rs b/crates/rpc/src/extension.rs index f1dcdc28d669251e811a15d8fa7cafb35d7eebcf..e8dd22b1bbae970d60f41e508ec4272637783604 100644 --- a/crates/rpc/src/extension.rs +++ b/crates/rpc/src/extension.rs @@ -19,7 +19,18 @@ pub struct ExtensionApiManifest { } #[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize, EnumString, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Clone, + Copy, + Serialize, + Deserialize, + EnumString, + strum::Display, )] #[serde(rename_all = "kebab-case")] #[strum(serialize_all = "kebab-case")] From 3374514f82778da0f816ec48862d363e9d448350 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 6 Feb 2025 00:26:24 +0000 Subject: [PATCH 054/130] Update Rust crate zed_llm_client to v0.1.2 (#24329) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [zed_llm_client](https://crates.io/crates/zed_llm_client) | workspace.dependencies | patch | `0.1.1` -> `0.1.2` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 06a48f8de738eb003aa9dbb99c71a1ae20ac3615..ebb8f7fffb93f0db5efca3d2d1b68eec2e9d20b3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -16650,9 +16650,9 @@ dependencies = [ [[package]] name = "zed_llm_client" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54ca07d631d9d758f1820c7a7e7854ca00619b9783a5b6b3b6057fef06c786cb" +checksum = "4ab9496dc5c80b2c5fb9654a76d7208d31b53130fb282085fcdde07653831843" dependencies = [ "serde", ] From 0671be215fe89ff9e3e03e69badadc804adb8b87 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 5 Feb 2025 20:05:43 -0500 Subject: [PATCH 055/130] gpui: Render SVGs at 2x size when rendered in an `img` (#24332) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR adjusts the rendering of SVGs when used with the `img` element such that they are rendered at 2x their displayed size. This results in much crisper icons for icons loaded by icon themes: Screenshot 2025-02-05 at 7 39 48 PM Screenshot 2025-02-05 at 7 40 01 PM Release Notes: - Improved the resolution of icons rendered by icon themes. --- crates/gpui/src/elements/img.rs | 3 ++- crates/gpui/src/svg_renderer.rs | 3 +++ crates/gpui/src/window.rs | 9 ++++----- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/crates/gpui/src/elements/img.rs b/crates/gpui/src/elements/img.rs index 961429b826c56c99f20428ea2ffd2bd169bb7049..2ce63d7e30f9c22bff7b50f7cf9e0c5062133814 100644 --- a/crates/gpui/src/elements/img.rs +++ b/crates/gpui/src/elements/img.rs @@ -3,6 +3,7 @@ use crate::{ DefiniteLength, Element, ElementId, GlobalElementId, Hitbox, Image, InteractiveElement, Interactivity, IntoElement, LayoutId, Length, ObjectFit, Pixels, RenderImage, Resource, SharedString, SharedUri, StyleRefinement, Styled, SvgSize, Task, Window, + SMOOTH_SVG_SCALE_FACTOR, }; use anyhow::{anyhow, Result}; @@ -610,7 +611,7 @@ impl Asset for ImageAssetLoader { } else { let pixmap = // TODO: Can we make svgs always rescale? - svg_renderer.render_pixmap(&bytes, SvgSize::ScaleFactor(1.0))?; + svg_renderer.render_pixmap(&bytes, SvgSize::ScaleFactor(SMOOTH_SVG_SCALE_FACTOR))?; let mut buffer = ImageBuffer::from_raw(pixmap.width(), pixmap.height(), pixmap.take()).unwrap(); diff --git a/crates/gpui/src/svg_renderer.rs b/crates/gpui/src/svg_renderer.rs index f99880ec5e31cc947047f8027ae73f590571fd55..306f8ed6518157f9bc6de6721da94e9befaaac6c 100644 --- a/crates/gpui/src/svg_renderer.rs +++ b/crates/gpui/src/svg_renderer.rs @@ -3,6 +3,9 @@ use anyhow::anyhow; use resvg::tiny_skia::Pixmap; use std::{hash::Hash, sync::Arc}; +/// When rendering SVGs, we render them at twice the size to get a higher-quality result. +pub const SMOOTH_SVG_SCALE_FACTOR: f32 = 2.; + #[derive(Clone, PartialEq, Hash, Eq)] pub(crate) struct RenderSvgParams { pub(crate) path: SharedString, diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 1b5b6bc71788f77b2ac3a87aa1aa7366389f1dea..e0e473974db2b08694c3eeb7a9574037b0d2ecbc 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -13,7 +13,7 @@ use crate::{ Subscription, TaffyLayoutEngine, Task, TextStyle, TextStyleRefinement, TransformationMatrix, Underline, UnderlineStyle, WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControls, WindowDecorations, WindowOptions, WindowParams, WindowTextSystem, - SUBPIXEL_VARIANTS, + SMOOTH_SVG_SCALE_FACTOR, SUBPIXEL_VARIANTS, }; use anyhow::{anyhow, Context as _, Result}; use collections::{FxHashMap, FxHashSet}; @@ -2553,12 +2553,11 @@ impl Window { let element_opacity = self.element_opacity(); let scale_factor = self.scale_factor(); let bounds = bounds.scale(scale_factor); - // Render the SVG at twice the size to get a higher quality result. let params = RenderSvgParams { path, - size: bounds - .size - .map(|pixels| DevicePixels::from((pixels.0 * 2.).ceil() as i32)), + size: bounds.size.map(|pixels| { + DevicePixels::from((pixels.0 * SMOOTH_SVG_SCALE_FACTOR).ceil() as i32) + }), }; let Some(tile) = From 5d1c56829a797a57ee69c46677eb80a85f4eb302 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 5 Feb 2025 18:32:07 -0700 Subject: [PATCH 056/130] Add staged checkboxes to multibuffer headers (#24308) Co-authored-by: Mikayla Release Notes: - N/A --------- Co-authored-by: Mikayla --- crates/editor/src/editor.rs | 12 +++- crates/editor/src/element.rs | 10 +++ crates/git_ui/src/git_panel.rs | 97 +++++++++++++++++++++++++----- crates/git_ui/src/project_diff.rs | 38 +++++++++--- crates/project/src/git.rs | 13 +++- crates/ui/src/traits/toggleable.rs | 18 +++--- 6 files changed, 151 insertions(+), 37 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 6304a56b66585f48898d1bbabf625e1fbee68754..170371164f532e8c069eaa87615ffc9f19ed804b 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -124,7 +124,8 @@ pub use multi_buffer::{ ToOffset, ToPoint, }; use multi_buffer::{ - ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow, ToOffsetUtf16, + ExcerptInfo, ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow, + ToOffsetUtf16, }; use project::{ lsp_store::{FormatTrigger, LspFormatTarget, OpenLspBufferHandle}, @@ -580,6 +581,15 @@ struct BufferOffset(usize); pub trait Addon: 'static { fn extend_key_context(&self, _: &mut KeyContext, _: &App) {} + fn render_buffer_header_controls( + &self, + _: &ExcerptInfo, + _: &Window, + _: &App, + ) -> Option { + None + } + fn to_any(&self) -> &dyn std::any::Any; } diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 4669f5d57fbba7d5e62f030f9345e0288f8c1977..9d0c50b89668dac66a8c040b64803731596d7709 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -2633,6 +2633,16 @@ impl EditorElement { ), ) }) + .children( + self.editor + .read(cx) + .addons + .values() + .filter_map(|addon| { + addon.render_buffer_header_controls(for_excerpt, window, cx) + }) + .take(1), + ) .child( h_flex() .cursor_pointer() diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 19028168dd21ba745175a1842cf4b3145429293e..1af585bafa150c7abd778bbc134c73452dd78d36 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -14,8 +14,9 @@ use git::repository::RepoPath; use git::status::FileStatus; use git::{CommitAllChanges, CommitChanges, ToggleStaged}; use gpui::*; -use language::Buffer; +use language::{Buffer, File}; use menu::{SelectFirst, SelectLast, SelectNext, SelectPrev}; +use multi_buffer::ExcerptInfo; use panel::PanelHeader; use project::git::{GitEvent, Repository}; use project::{Fs, Project, ProjectPath}; @@ -1007,19 +1008,19 @@ impl GitPanel { }; if status_entry.status.is_created() { self.new_count += 1; - if self.entry_appears_staged(status_entry) != Some(false) { + if self.entry_is_staged(status_entry) != Some(false) { self.new_staged_count += 1; } } else { self.tracked_count += 1; - if self.entry_appears_staged(status_entry) != Some(false) { + if self.entry_is_staged(status_entry) != Some(false) { self.tracked_staged_count += 1; } } } } - fn entry_appears_staged(&self, entry: &GitStatusEntry) -> Option { + fn entry_is_staged(&self, entry: &GitStatusEntry) -> Option { for pending in self.pending.iter().rev() { if pending.repo_paths.contains(&entry.repo_path) { return Some(pending.will_become_staged); @@ -1301,6 +1302,49 @@ impl GitPanel { ) } + pub fn render_buffer_header_controls( + &self, + entity: &Entity, + file: &Arc, + _: &Window, + cx: &App, + ) -> Option { + let repo = self.active_repository.as_ref()?.read(cx); + let repo_path = repo.worktree_id_path_to_repo_path(file.worktree_id(cx), file.path())?; + let ix = self.entries_by_path.get(&repo_path)?; + let entry = self.entries.get(*ix)?; + + let is_staged = self.entry_is_staged(entry.status_entry()?); + + let checkbox = Checkbox::new("stage-file", is_staged.into()) + .disabled(!self.has_write_access(cx)) + .fill() + .elevation(ElevationIndex::Surface) + .on_click({ + let entry = entry.clone(); + let git_panel = entity.downgrade(); + move |_, window, cx| { + git_panel + .update(cx, |this, cx| { + this.toggle_staged_for_entry(&entry, window, cx); + cx.stop_propagation(); + }) + .ok(); + } + }); + Some( + h_flex() + .id("start-slot") + .child(checkbox) + .child(git_status_icon(entry.status_entry()?.status, cx)) + .on_mouse_down(MouseButton::Left, |_, _, cx| { + // prevent the list item active state triggering when toggling checkbox + cx.stop_propagation(); + }) + .into_any_element(), + ) + } + fn render_entries( &self, has_write_access: bool, @@ -1473,14 +1517,6 @@ impl GitPanel { .map(|name| name.to_string_lossy().into_owned()) .unwrap_or_else(|| entry.repo_path.to_string_lossy().into_owned()); - let pending = self.pending.iter().rev().find_map(|pending| { - if pending.repo_paths.contains(&entry.repo_path) { - Some(pending.will_become_staged) - } else { - None - } - }); - let repo_path = entry.repo_path.clone(); let selected = self.selected_entry == Some(ix); let status_style = GitPanelSettings::get_global(cx).status_style; @@ -1512,10 +1548,7 @@ impl GitPanel { let id: ElementId = ElementId::Name(format!("entry_{}", display_name).into()); - let mut is_staged = pending - .or_else(|| entry.is_staged) - .map(ToggleState::from) - .unwrap_or(ToggleState::Indeterminate); + let mut is_staged: ToggleState = self.entry_is_staged(entry).into(); if !self.has_staged_changes() && !entry.status.is_created() { is_staged = ToggleState::Selected; @@ -1597,6 +1630,16 @@ impl GitPanel { ) .into_any_element() } + + fn has_write_access(&self, cx: &App) -> bool { + let room = self + .workspace + .upgrade() + .and_then(|workspace| workspace.read(cx).active_call()?.read(cx).room().cloned()); + + room.as_ref() + .map_or(true, |room| room.read(cx).local_participant().can_write()) + } } impl Render for GitPanel { @@ -1734,6 +1777,28 @@ impl EventEmitter for GitPanel {} impl EventEmitter for GitPanel {} +pub(crate) struct GitPanelAddon { + pub(crate) git_panel: Entity, +} + +impl editor::Addon for GitPanelAddon { + fn to_any(&self) -> &dyn std::any::Any { + self + } + + fn render_buffer_header_controls( + &self, + excerpt_info: &ExcerptInfo, + window: &Window, + cx: &App, + ) -> Option { + let file = excerpt_info.buffer.file()?; + let git_panel = self.git_panel.read(cx); + + git_panel.render_buffer_header_controls(&self.git_panel, &file, window, cx) + } +} + impl Panel for GitPanel { fn persistent_name() -> &'static str { "GitPanel" diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index d35bb59d6a79e4b0e111259f0f707952ad94ed95..1581d0fc8ad943a6723ba5ce7ba938b53708a775 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -21,7 +21,7 @@ use workspace::{ ItemNavHistory, ToolbarItemLocation, Workspace, }; -use crate::git_panel::{GitPanel, GitStatusEntry}; +use crate::git_panel::{GitPanel, GitPanelAddon, GitStatusEntry}; actions!(git, [Diff]); @@ -29,6 +29,7 @@ pub(crate) struct ProjectDiff { multibuffer: Entity, editor: Entity, project: Entity, + git_panel: Entity, git_state: Entity, workspace: WeakEntity, focus_handle: FocusHandle, @@ -79,9 +80,16 @@ impl ProjectDiff { workspace.activate_item(&existing, true, true, window, cx); existing } else { - let workspace_handle = cx.entity().downgrade(); - let project_diff = - cx.new(|cx| Self::new(workspace.project().clone(), workspace_handle, window, cx)); + let workspace_handle = cx.entity(); + let project_diff = cx.new(|cx| { + Self::new( + workspace.project().clone(), + workspace_handle, + workspace.panel::(cx).unwrap(), + window, + cx, + ) + }); workspace.add_item_to_active_pane( Box::new(project_diff.clone()), None, @@ -100,7 +108,8 @@ impl ProjectDiff { fn new( project: Entity, - workspace: WeakEntity, + workspace: Entity, + git_panel: Entity, window: &mut Window, cx: &mut Context, ) -> Self { @@ -116,6 +125,9 @@ impl ProjectDiff { cx, ); diff_display_editor.set_expand_all_diff_hunks(cx); + diff_display_editor.register_addon(GitPanelAddon { + git_panel: git_panel.clone(), + }); diff_display_editor }); cx.subscribe_in(&editor, window, Self::handle_editor_event) @@ -141,7 +153,8 @@ impl ProjectDiff { Self { project, git_state: git_state.clone(), - workspace, + git_panel: git_panel.clone(), + workspace: workspace.downgrade(), focus_handle, editor, multibuffer, @@ -423,9 +436,16 @@ impl Item for ProjectDiff { where Self: Sized, { - Some( - cx.new(|cx| ProjectDiff::new(self.project.clone(), self.workspace.clone(), window, cx)), - ) + let workspace = self.workspace.upgrade()?; + Some(cx.new(|cx| { + ProjectDiff::new( + self.project.clone(), + workspace, + self.git_panel.clone(), + window, + cx, + ) + })) } fn is_dirty(&self, cx: &App) -> bool { diff --git a/crates/project/src/git.rs b/crates/project/src/git.rs index 38a891005916fba97dccc55fef45b04577555ec6..f4ab1791a7a1aa8180ab6b6e2234799898ddb879 100644 --- a/crates/project/src/git.rs +++ b/crates/project/src/git.rs @@ -15,6 +15,7 @@ use gpui::{ use language::{Buffer, LanguageRegistry}; use rpc::{proto, AnyProtoClient}; use settings::WorktreeId; +use std::path::Path; use std::sync::Arc; use text::BufferId; use util::{maybe, ResultExt}; @@ -341,10 +342,18 @@ impl Repository { } pub fn project_path_to_repo_path(&self, path: &ProjectPath) -> Option { - if path.worktree_id != self.worktree_id { + self.worktree_id_path_to_repo_path(path.worktree_id, &path.path) + } + + pub fn worktree_id_path_to_repo_path( + &self, + worktree_id: WorktreeId, + path: &Path, + ) -> Option { + if worktree_id != self.worktree_id { return None; } - self.repository_entry.relativize(&path.path).log_err() + self.repository_entry.relativize(path).log_err() } pub fn open_commit_buffer( diff --git a/crates/ui/src/traits/toggleable.rs b/crates/ui/src/traits/toggleable.rs index 8771cedaa70e73ef0b8da4c92b9767afd3ab16f5..f731f9965e9c18f6e60cfadf986c68f2f9a3f122 100644 --- a/crates/ui/src/traits/toggleable.rs +++ b/crates/ui/src/traits/toggleable.rs @@ -58,12 +58,12 @@ impl From for ToggleState { } } -// impl From> for ToggleState { -// fn from(selected: Option) -> Self { -// match selected { -// Some(true) => Self::Selected, -// Some(false) => Self::Unselected, -// None => Self::Unselected, -// } -// } -// } +impl From> for ToggleState { + fn from(selected: Option) -> Self { + match selected { + Some(true) => Self::Selected, + Some(false) => Self::Unselected, + None => Self::Indeterminate, + } + } +} From 0a70627f005a1b070dbe495ffd9ea0e4a3817da1 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 5 Feb 2025 18:34:14 -0700 Subject: [PATCH 057/130] Split conflicts into their own section (#24324) Co-Authored-By: Mikayla Release Notes: - N/A --- .../20221109000000_test_schema.sql | 1 + ...05232017_add_conflicts_to_repositories.sql | 2 + crates/collab/src/db/queries/projects.rs | 11 +++ crates/collab/src/db/queries/rooms.rs | 8 +++ .../src/db/tables/worktree_repository.rs | 2 + crates/git/src/repository.rs | 18 +++++ crates/git/src/status.rs | 6 +- crates/git_ui/src/git_panel.rs | 68 +++++++++++++------ crates/git_ui/src/project_diff.rs | 31 ++++++--- crates/project/src/git.rs | 6 ++ crates/proto/proto/zed.proto | 1 + crates/sum_tree/src/tree_map.rs | 2 +- crates/worktree/src/worktree.rs | 57 +++++++++++++--- 13 files changed, 171 insertions(+), 42 deletions(-) create mode 100644 crates/collab/migrations/20250205232017_add_conflicts_to_repositories.sql diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index dbd63cf679d666f966c033305d3a17a9e6b45520..ba6fdeb9290d38412665868d8b18e5fd719c61c2 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -100,6 +100,7 @@ CREATE TABLE "worktree_repositories" ( "branch" VARCHAR, "scan_id" INTEGER NOT NULL, "is_deleted" BOOL NOT NULL, + "current_merge_conflicts" VARCHAR, PRIMARY KEY(project_id, worktree_id, work_directory_id), FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE, FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE diff --git a/crates/collab/migrations/20250205232017_add_conflicts_to_repositories.sql b/crates/collab/migrations/20250205232017_add_conflicts_to_repositories.sql new file mode 100644 index 0000000000000000000000000000000000000000..e6e0770bba8cbbb7649689705c526ead9629518d --- /dev/null +++ b/crates/collab/migrations/20250205232017_add_conflicts_to_repositories.sql @@ -0,0 +1,2 @@ +ALTER TABLE worktree_repositories +ADD COLUMN current_merge_conflicts VARCHAR NULL; diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index fd83cd3da8f9dd08bebbadd9a27684202bc27241..2755f1223037b1f5204e1ade3a0aa199780d5ac5 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -333,6 +333,9 @@ impl Database { scan_id: ActiveValue::set(update.scan_id as i64), branch: ActiveValue::set(repository.branch.clone()), is_deleted: ActiveValue::set(false), + current_merge_conflicts: ActiveValue::Set(Some( + serde_json::to_string(&repository.current_merge_conflicts).unwrap(), + )), }, )) .on_conflict( @@ -769,6 +772,13 @@ impl Database { updated_statuses.push(db_status_to_proto(status_entry)?); } + let current_merge_conflicts = db_repository_entry + .current_merge_conflicts + .as_ref() + .map(|conflicts| serde_json::from_str(&conflicts)) + .transpose()? + .unwrap_or_default(); + worktree.repository_entries.insert( db_repository_entry.work_directory_id as u64, proto::RepositoryEntry { @@ -776,6 +786,7 @@ impl Database { branch: db_repository_entry.branch, updated_statuses, removed_statuses: Vec::new(), + current_merge_conflicts, }, ); } diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index 4a46e79fa27d80c3b3b987064d9529dedcdedfb7..8c9089dd756019a9c2d0f71616f63ea693cf559d 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -736,11 +736,19 @@ impl Database { } } + let current_merge_conflicts = db_repository + .current_merge_conflicts + .as_ref() + .map(|conflicts| serde_json::from_str(&conflicts)) + .transpose()? + .unwrap_or_default(); + worktree.updated_repositories.push(proto::RepositoryEntry { work_directory_id: db_repository.work_directory_id as u64, branch: db_repository.branch, updated_statuses, removed_statuses, + current_merge_conflicts, }); } } diff --git a/crates/collab/src/db/tables/worktree_repository.rs b/crates/collab/src/db/tables/worktree_repository.rs index 6f86ff0c2dda3e4ce310b9da056857e6ada98686..66ff7b76430ef0cc0d4681c223ebdd83355f6a90 100644 --- a/crates/collab/src/db/tables/worktree_repository.rs +++ b/crates/collab/src/db/tables/worktree_repository.rs @@ -13,6 +13,8 @@ pub struct Model { pub scan_id: i64, pub branch: Option, pub is_deleted: bool, + // JSON array typed string + pub current_merge_conflicts: Option, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 50191ea6836dc33a7c3f7d084e2871e1b0877255..58dc9a9dce724167e242165a4dae6e6f6369efdb 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -46,6 +46,8 @@ pub trait GitRepository: Send + Sync { /// Returns the SHA of the current HEAD. fn head_sha(&self) -> Option; + fn merge_head_shas(&self) -> Vec; + /// Returns the list of git statuses, sorted by path fn status(&self, path_prefixes: &[RepoPath]) -> Result; @@ -162,6 +164,18 @@ impl GitRepository for RealGitRepository { Some(self.repository.lock().head().ok()?.target()?.to_string()) } + fn merge_head_shas(&self) -> Vec { + let mut shas = Vec::default(); + self.repository + .lock() + .mergehead_foreach(|oid| { + shas.push(oid.to_string()); + true + }) + .ok(); + shas + } + fn status(&self, path_prefixes: &[RepoPath]) -> Result { let working_directory = self .repository @@ -387,6 +401,10 @@ impl GitRepository for FakeGitRepository { None } + fn merge_head_shas(&self) -> Vec { + vec![] + } + fn dot_git_dir(&self) -> PathBuf { let state = self.state.lock(); state.dot_git_dir.clone() diff --git a/crates/git/src/status.rs b/crates/git/src/status.rs index a882a5a14bf4bfa86422f027593ef3d33c5615c6..5cd7310b23672148bd3346a3a31b67e77b6a1e12 100644 --- a/crates/git/src/status.rs +++ b/crates/git/src/status.rs @@ -134,7 +134,11 @@ impl FileStatus { } pub fn has_changes(&self) -> bool { - self.is_modified() || self.is_created() || self.is_deleted() || self.is_untracked() + self.is_modified() + || self.is_created() + || self.is_deleted() + || self.is_untracked() + || self.is_conflicted() } pub fn is_modified(self) -> bool { diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 1af585bafa150c7abd778bbc134c73452dd78d36..d47e066f33729792e1c5af50544a403a6d5dfd73 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -76,30 +76,29 @@ struct SerializedGitPanel { #[derive(Debug, PartialEq, Eq, Clone, Copy)] enum Section { + Conflict, Tracked, New, } -impl Section { - pub fn contains(&self, status: FileStatus) -> bool { - match self { - Section::Tracked => !status.is_created(), - Section::New => status.is_created(), - } - } -} - #[derive(Debug, PartialEq, Eq, Clone)] struct GitHeaderEntry { header: Section, } impl GitHeaderEntry { - pub fn contains(&self, status_entry: &GitStatusEntry) -> bool { - self.header.contains(status_entry.status) + pub fn contains(&self, status_entry: &GitStatusEntry, repo: &Repository) -> bool { + let this = &self.header; + let status = status_entry.status; + match this { + Section::Conflict => repo.has_conflict(&status_entry.repo_path), + Section::Tracked => !status.is_created(), + Section::New => status.is_created(), + } } pub fn title(&self) -> &'static str { match self.header { + Section::Conflict => "Conflicts", Section::Tracked => "Changed", Section::New => "New", } @@ -160,6 +159,8 @@ pub struct GitPanel { commit_task: Task>, commit_pending: bool, + conflicted_staged_count: usize, + conflicted_count: usize, tracked_staged_count: usize, tracked_count: usize, new_staged_count: usize, @@ -276,6 +277,8 @@ impl GitPanel { commit_editor, project, workspace, + conflicted_count: 0, + conflicted_staged_count: 0, tracked_staged_count: 0, tracked_count: 0, new_staged_count: 0, @@ -577,12 +580,13 @@ impl GitPanel { } GitListEntry::Header(section) => { let goal_staged_state = !self.header_state(section.header).selected(); + let repository = active_repository.read(cx); let entries = self .entries .iter() .filter_map(|entry| entry.status_entry()) .filter(|status_entry| { - section.contains(&status_entry) + section.contains(&status_entry, repository) && status_entry.is_staged != Some(goal_staged_state) }) .map(|status_entry| status_entry.repo_path.clone()) @@ -601,7 +605,8 @@ impl GitPanel { }); let repo_paths = repo_paths.clone(); let active_repository = active_repository.clone(); - self.update_counts(); + let repository = active_repository.read(cx); + self.update_counts(repository); cx.notify(); cx.spawn({ @@ -740,8 +745,7 @@ impl GitPanel { .iter() .filter_map(|entry| entry.status_entry()) .filter(|status_entry| { - Section::Tracked.contains(status_entry.status) - && !status_entry.is_staged.unwrap_or(false) + !status_entry.status.is_created() && !status_entry.is_staged.unwrap_or(false) }) .map(|status_entry| status_entry.repo_path.clone()) .collect::>(); @@ -909,6 +913,7 @@ impl GitPanel { self.entries_by_path.clear(); let mut changed_entries = Vec::new(); let mut new_entries = Vec::new(); + let mut conflict_entries = Vec::new(); let Some(repo) = self.active_repository.as_ref() else { // Just clear entries if no repository is active. @@ -925,6 +930,7 @@ impl GitPanel { let (depth, difference) = Self::calculate_depth_and_difference(&entry.repo_path, &path_set); + let is_conflict = repo.has_conflict(&entry.repo_path); let is_new = entry.status.is_created(); let is_staged = entry.status.is_staged(); @@ -955,7 +961,9 @@ impl GitPanel { is_staged, }; - if is_new { + if is_conflict { + conflict_entries.push(entry); + } else if is_new { new_entries.push(entry); } else { changed_entries.push(entry); @@ -963,9 +971,21 @@ impl GitPanel { } // Sort entries by path to maintain consistent order + conflict_entries.sort_by(|a, b| a.repo_path.cmp(&b.repo_path)); changed_entries.sort_by(|a, b| a.repo_path.cmp(&b.repo_path)); new_entries.sort_by(|a, b| a.repo_path.cmp(&b.repo_path)); + if conflict_entries.len() > 0 { + self.entries.push(GitListEntry::Header(GitHeaderEntry { + header: Section::Conflict, + })); + self.entries.extend( + conflict_entries + .into_iter() + .map(GitListEntry::GitStatusEntry), + ); + } + if changed_entries.len() > 0 { self.entries.push(GitListEntry::Header(GitHeaderEntry { header: Section::Tracked, @@ -990,14 +1010,16 @@ impl GitPanel { .insert(status_entry.repo_path.clone(), ix); } } - self.update_counts(); + self.update_counts(repo); self.select_first_entry_if_none(cx); cx.notify(); } - fn update_counts(&mut self) { + fn update_counts(&mut self, repo: &Repository) { + self.conflicted_count = 0; + self.conflicted_staged_count = 0; self.new_count = 0; self.tracked_count = 0; self.new_staged_count = 0; @@ -1006,7 +1028,12 @@ impl GitPanel { let Some(status_entry) = entry.status_entry() else { continue; }; - if status_entry.status.is_created() { + if repo.has_conflict(&status_entry.repo_path) { + self.conflicted_count += 1; + if self.entry_appears_staged(status_entry) != Some(false) { + self.conflicted_staged_count += 1; + } + } else if status_entry.status.is_created() { self.new_count += 1; if self.entry_is_staged(status_entry) != Some(false) { self.new_staged_count += 1; @@ -1041,6 +1068,7 @@ impl GitPanel { let (staged_count, count) = match header_type { Section::New => (self.new_staged_count, self.new_count), Section::Tracked => (self.tracked_staged_count, self.tracked_count), + Section::Conflict => (self.conflicted_staged_count, self.conflicted_count), }; if staged_count == 0 { ToggleState::Unselected @@ -1467,7 +1495,7 @@ impl GitPanel { self.header_state(header.header) } else { match header.header { - Section::Tracked => ToggleState::Selected, + Section::Tracked | Section::Conflict => ToggleState::Selected, Section::New => ToggleState::Unselected, } }; diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index 1581d0fc8ad943a6723ba5ce7ba938b53708a775..5d2689ed4cfa59c15023f394a3c7380f2ab34aa4 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -46,8 +46,9 @@ struct DiffBuffer { change_set: Entity, } -const CHANGED_NAMESPACE: &'static str = "0"; -const ADDED_NAMESPACE: &'static str = "1"; +const CONFLICT_NAMESPACE: &'static str = "0"; +const TRACKED_NAMESPACE: &'static str = "1"; +const NEW_NAMESPACE: &'static str = "2"; impl ProjectDiff { pub(crate) fn register( @@ -174,19 +175,25 @@ impl ProjectDiff { let Some(git_repo) = self.git_state.read(cx).active_repository() else { return; }; + let repo = git_repo.read(cx); - let Some(path) = git_repo - .read(cx) + let Some(abs_path) = repo .repo_path_to_project_path(&entry.repo_path) .and_then(|project_path| self.project.read(cx).absolute_path(&project_path, cx)) else { return; }; - let path_key = if entry.status.is_created() { - PathKey::namespaced(ADDED_NAMESPACE, &path) + + let namespace = if repo.has_conflict(&entry.repo_path) { + CONFLICT_NAMESPACE + } else if entry.status.is_created() { + NEW_NAMESPACE } else { - PathKey::namespaced(CHANGED_NAMESPACE, &path) + TRACKED_NAMESPACE }; + + let path_key = PathKey::namespaced(namespace, &abs_path); + self.scroll_to_path(path_key, window, cx) } @@ -259,12 +266,14 @@ impl ProjectDiff { let Some(abs_path) = self.project.read(cx).absolute_path(&project_path, cx) else { continue; }; - // Craft some artificial paths so that created entries will appear last. - let path_key = if entry.status.is_created() { - PathKey::namespaced(ADDED_NAMESPACE, &abs_path) + let namespace = if repo.has_conflict(&entry.repo_path) { + CONFLICT_NAMESPACE + } else if entry.status.is_created() { + NEW_NAMESPACE } else { - PathKey::namespaced(CHANGED_NAMESPACE, &abs_path) + TRACKED_NAMESPACE }; + let path_key = PathKey::namespaced(namespace, &abs_path); previous_paths.remove(&path_key); let load_buffer = self diff --git a/crates/project/src/git.rs b/crates/project/src/git.rs index f4ab1791a7a1aa8180ab6b6e2234799898ddb879..fad10f1ba4c5131beeaf03864ae90e0099af7b70 100644 --- a/crates/project/src/git.rs +++ b/crates/project/src/git.rs @@ -336,6 +336,12 @@ impl Repository { self.repository_entry.status() } + pub fn has_conflict(&self, path: &RepoPath) -> bool { + self.repository_entry + .current_merge_conflicts + .contains(&path) + } + pub fn repo_path_to_project_path(&self, path: &RepoPath) -> Option { let path = self.repository_entry.unrelativize(path)?; Some((self.worktree_id, path).into()) diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 195294fe68a015845447c73ad8ab38313f6d020b..fd10f0d113412619bb2e0ebdccc51753913fdcdc 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -1800,6 +1800,7 @@ message RepositoryEntry { optional string branch = 2; repeated StatusEntry updated_statuses = 3; repeated string removed_statuses = 4; + repeated string current_merge_conflicts = 5; } message StatusEntry { diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index 9a4d952e93f2264cdc1c32fe98cc1f377296c63b..09274c37c217362f1eaed301db659084638b4208 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -32,7 +32,7 @@ impl<'a, K> Default for MapKeyRef<'a, K> { } } -#[derive(Clone)] +#[derive(Clone, Debug, PartialEq, Eq)] pub struct TreeSet(TreeMap) where K: Clone + Ord; diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 3033a6e9faf47a1182a04e737954ad90f66c22c3..8c3bd04657ac99d37670a485aa65fd15631498d5 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -178,7 +178,7 @@ pub struct Snapshot { completed_scan_id: usize, } -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq)] pub struct RepositoryEntry { /// The git status entries for this repository. /// Note that the paths on this repository are relative to the git work directory. @@ -203,6 +203,7 @@ pub struct RepositoryEntry { work_directory_id: ProjectEntryId, pub work_directory: WorkDirectory, pub(crate) branch: Option>, + pub current_merge_conflicts: TreeSet, } impl Deref for RepositoryEntry { @@ -256,6 +257,11 @@ impl RepositoryEntry { .map(|entry| entry.to_proto()) .collect(), removed_statuses: Default::default(), + current_merge_conflicts: self + .current_merge_conflicts + .iter() + .map(|repo_path| repo_path.to_proto()) + .collect(), } } @@ -306,6 +312,11 @@ impl RepositoryEntry { branch: self.branch.as_ref().map(|branch| branch.to_string()), updated_statuses, removed_statuses, + current_merge_conflicts: self + .current_merge_conflicts + .iter() + .map(RepoPath::to_proto) + .collect(), } } } @@ -456,6 +467,7 @@ struct BackgroundScannerState { #[derive(Debug, Clone)] pub struct LocalRepositoryEntry { + pub(crate) work_directory_id: ProjectEntryId, pub(crate) work_directory: WorkDirectory, pub(crate) git_dir_scan_id: usize, pub(crate) status_scan_id: usize, @@ -465,6 +477,7 @@ pub struct LocalRepositoryEntry { pub(crate) dot_git_dir_abs_path: Arc, /// Absolute path to the .git file, if we're in a git worktree. pub(crate) dot_git_worktree_abs_path: Option>, + pub current_merge_head_shas: Vec, } impl sum_tree::Item for LocalRepositoryEntry { @@ -2520,6 +2533,13 @@ impl Snapshot { for repository in update.updated_repositories { let work_directory_id = ProjectEntryId::from_proto(repository.work_directory_id); if let Some(work_dir_entry) = self.entry_for_id(work_directory_id) { + let conflicted_paths = TreeSet::from_ordered_entries( + repository + .current_merge_conflicts + .into_iter() + .map(|path| RepoPath(Path::new(&path).into())), + ); + if self .repositories .contains(&PathKey(work_dir_entry.path.clone()), &()) @@ -2539,6 +2559,7 @@ impl Snapshot { .update(&PathKey(work_dir_entry.path.clone()), &(), |repo| { repo.branch = repository.branch.map(Into::into); repo.statuses_by_path.edit(edits, &()); + repo.current_merge_conflicts = conflicted_paths }); } else { let statuses = SumTree::from_iter( @@ -2561,6 +2582,7 @@ impl Snapshot { }, branch: repository.branch.map(Into::into), statuses_by_path: statuses, + current_merge_conflicts: conflicted_paths, }, &(), ); @@ -3363,17 +3385,20 @@ impl BackgroundScannerState { work_directory: work_directory.clone(), branch: repository.branch_name().map(Into::into), statuses_by_path: Default::default(), + current_merge_conflicts: Default::default(), }, &(), ); let local_repository = LocalRepositoryEntry { + work_directory_id: work_dir_id, work_directory: work_directory.clone(), git_dir_scan_id: 0, status_scan_id: 0, repo_ptr: repository.clone(), dot_git_dir_abs_path: actual_dot_git_dir_abs_path, dot_git_worktree_abs_path, + current_merge_head_shas: Default::default(), }; self.snapshot @@ -5127,11 +5152,11 @@ impl BackgroundScanner { .snapshot .git_repositories .iter() - .find_map(|(entry_id, repo)| { + .find_map(|(_, repo)| { if repo.dot_git_dir_abs_path.as_ref() == &dot_git_dir || repo.dot_git_worktree_abs_path.as_deref() == Some(&dot_git_dir) { - Some((*entry_id, repo.clone())) + Some(repo.clone()) } else { None } @@ -5148,13 +5173,13 @@ impl BackgroundScanner { None => continue, } } - Some((entry_id, local_repository)) => { + Some(local_repository) => { if local_repository.git_dir_scan_id == scan_id { continue; } let Some(work_dir) = state .snapshot - .entry_for_id(entry_id) + .entry_for_id(local_repository.work_directory_id) .map(|entry| entry.path.clone()) else { continue; @@ -5163,10 +5188,13 @@ impl BackgroundScanner { let branch = local_repository.repo_ptr.branch_name(); local_repository.repo_ptr.reload_index(); - state.snapshot.git_repositories.update(&entry_id, |entry| { - entry.git_dir_scan_id = scan_id; - entry.status_scan_id = scan_id; - }); + state.snapshot.git_repositories.update( + &local_repository.work_directory_id, + |entry| { + entry.git_dir_scan_id = scan_id; + entry.status_scan_id = scan_id; + }, + ); state.snapshot.snapshot.repositories.update( &PathKey(work_dir.clone()), &(), @@ -5260,6 +5288,11 @@ impl BackgroundScanner { return; }; + let merge_head_shas = local_repository.repo().merge_head_shas(); + if merge_head_shas != local_repository.current_merge_head_shas { + mem::take(&mut repository.current_merge_conflicts); + } + let mut new_entries_by_path = SumTree::new(&()); for (repo_path, status) in statuses.entries.iter() { let project_path = repository.work_directory.unrelativize(repo_path); @@ -5283,6 +5316,12 @@ impl BackgroundScanner { .snapshot .repositories .insert_or_replace(repository, &()); + state.snapshot.git_repositories.update( + &local_repository.work_directory_id, + |entry| { + entry.current_merge_head_shas = merge_head_shas; + }, + ); util::extend_sorted( &mut state.changed_paths, From 4270f89956b573512984c6bcbfb9544879381461 Mon Sep 17 00:00:00 2001 From: Amr Bashir Date: Thu, 6 Feb 2025 03:55:17 +0200 Subject: [PATCH 058/130] gpui: Implement `HasWindowHandle` on `Window` (#24327) Implement `raw_window_handle::HasWindowHandle` for `gpui::Window` This opens a lot of possibility of using gpui with platform specific APIs. Edit: With this exposed, we can use crates like `window-vibrancy`, `muda` (menus crate) or even use `wry` (a webview renderer) to create a child `WebView` inside the gpui window. Release Notes: - N/A --- crates/gpui/src/window.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index e0e473974db2b08694c3eeb7a9574037b0d2ecbc..7639f5e676c25a33aac90083f99f97dfae583625 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -23,6 +23,7 @@ use futures::FutureExt; #[cfg(target_os = "macos")] use media::core_video::CVImageBuffer; use parking_lot::RwLock; +use raw_window_handle::{HandleError, HasWindowHandle}; use refineable::Refineable; use slotmap::SlotMap; use smallvec::SmallVec; @@ -3943,6 +3944,12 @@ impl AnyWindowHandle { } } +impl HasWindowHandle for Window { + fn window_handle(&self) -> Result, HandleError> { + self.platform_window.window_handle() + } +} + /// An identifier for an [`Element`](crate::Element). /// /// Can be constructed with a string, a number, or both, as well From 10b6bc2508a7ef6041a035ef2cc507e73e0ccd47 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Wed, 5 Feb 2025 18:21:42 -0800 Subject: [PATCH 059/130] Fix broken merge (#24341) Release Notes: - N/A --- crates/git_ui/src/git_panel.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index d47e066f33729792e1c5af50544a403a6d5dfd73..a65b55bcb7fa52c4f1eadb6a55343b491a3a9f12 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -1030,7 +1030,7 @@ impl GitPanel { }; if repo.has_conflict(&status_entry.repo_path) { self.conflicted_count += 1; - if self.entry_appears_staged(status_entry) != Some(false) { + if self.entry_is_staged(status_entry) != Some(false) { self.conflicted_staged_count += 1; } } else if status_entry.status.is_created() { From 8b3d315e406eb121b7b0030a7f39a4c9777a2e08 Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Wed, 5 Feb 2025 21:23:46 -0600 Subject: [PATCH 060/130] Fix #24081 - lsp diagnostic code type conversion (#24347) - **store `buffer::Diagnostic`as NumberOrString instead of assuming String** - **update zed-industries/lsp-types rev** Closes #24081 Release Notes: - Fixed an issue where language server diagnostic codes would be converted to strings leading to errors with some language servers --- crates/diagnostics/src/diagnostics.rs | 2 +- crates/language/src/buffer.rs | 4 ++-- crates/language/src/diagnostic_set.rs | 8 +------- crates/language/src/proto.rs | 4 ++-- crates/lsp/Cargo.toml | 2 +- crates/project/src/lsp_store.rs | 14 +++++--------- 6 files changed, 12 insertions(+), 22 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 586e82bf8f34bdeb136f0a793ed9e35c0a21dfe4..6f936f2c8c4536ee8ee41c65f0e9d705afa3472f 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -933,7 +933,7 @@ fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock { .when_some(diagnostic.code.as_ref(), |stack, code| { stack.child( div() - .child(SharedString::from(format!("({code})"))) + .child(SharedString::from(format!("({code:?})"))) .text_color(color.text_muted), ) }), diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index b2112f31c5ae5b40044d6a9167dc3df2c1a6c49b..5c9798c9b199b02a9fe409a32dc20553c318a7f1 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -28,7 +28,7 @@ use gpui::{ AnyElement, App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, Pixels, SharedString, StyledText, Task, TaskLabel, TextStyle, Window, }; -use lsp::LanguageServerId; +use lsp::{LanguageServerId, NumberOrString}; use parking_lot::Mutex; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -202,7 +202,7 @@ pub struct Diagnostic { /// The name of the service that produced this diagnostic. pub source: Option, /// A machine-readable code that identifies this diagnostic. - pub code: Option, + pub code: Option, /// Whether this diagnostic is a hint, warning, or error. pub severity: DiagnosticSeverity, /// The human-readable message associated with this diagnostic. diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index 2319cb1bfb6caaa2063d1e4a51e03c4c6b772639..0f2e39275cd6f2d6ddd31d7fc5bc9b3836c3166b 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -56,17 +56,11 @@ impl DiagnosticEntry { /// Returns a raw LSP diagnostic used to provide diagnostic context to LSP /// codeAction request pub fn to_lsp_diagnostic_stub(&self) -> Result { - let code = self - .diagnostic - .code - .clone() - .map(lsp::NumberOrString::String); - let range = range_to_lsp(self.range.clone())?; Ok(lsp::Diagnostic { - code, range, + code: self.diagnostic.code.clone(), severity: Some(self.diagnostic.severity), source: self.diagnostic.source.clone(), message: self.diagnostic.message.clone(), diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index ec864a9519eae154c81ef003a7f8c05bfac59a6c..672703f6ee338a4928aaa9fe70dcd84853ef383e 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -213,7 +213,7 @@ pub fn serialize_diagnostics<'a>( group_id: entry.diagnostic.group_id as u64, is_primary: entry.diagnostic.is_primary, is_valid: true, - code: entry.diagnostic.code.clone(), + code: entry.diagnostic.code.as_ref().map(|s| s.to_string()), is_disk_based: entry.diagnostic.is_disk_based, is_unnecessary: entry.diagnostic.is_unnecessary, data: entry.diagnostic.data.as_ref().map(|data| data.to_string()), @@ -419,7 +419,7 @@ pub fn deserialize_diagnostics( }, message: diagnostic.message, group_id: diagnostic.group_id as usize, - code: diagnostic.code, + code: diagnostic.code.map(lsp::NumberOrString::from_string), is_primary: diagnostic.is_primary, is_disk_based: diagnostic.is_disk_based, is_unnecessary: diagnostic.is_unnecessary, diff --git a/crates/lsp/Cargo.toml b/crates/lsp/Cargo.toml index 0937b47217c09fc74b54f3ab1ebd4ae5aaa61fa8..e973fd32c586d31ed2a2524292c65b7db9fa8095 100644 --- a/crates/lsp/Cargo.toml +++ b/crates/lsp/Cargo.toml @@ -22,7 +22,7 @@ collections.workspace = true futures.workspace = true gpui.workspace = true log.workspace = true -lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "72357d6f6d212bdffba3b5ef4b31d8ca856058e7" } +lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "1fff0dd12e2071c5667327394cfec163d2a466ab" } parking_lot.workspace = true postage.workspace = true serde.workspace = true diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 7966bf6d95144d58ec9f909955272c55d56800ff..e73bef795b59a0d4e6739ef1006c246a2d84cdd6 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -7366,10 +7366,6 @@ impl LspStore { for diagnostic in ¶ms.diagnostics { let source = diagnostic.source.as_ref(); - let code = diagnostic.code.as_ref().map(|code| match code { - lsp::NumberOrString::Number(code) => code.to_string(), - lsp::NumberOrString::String(code) => code.clone(), - }); let range = range_from_lsp(diagnostic.range); let is_supporting = diagnostic .related_information @@ -7378,7 +7374,7 @@ impl LspStore { infos.iter().any(|info| { primary_diagnostic_group_ids.contains_key(&( source, - code.clone(), + diagnostic.code.clone(), range_from_lsp(info.location.range), )) }) @@ -7390,7 +7386,7 @@ impl LspStore { if is_supporting { supporting_diagnostics.insert( - (source, code.clone(), range), + (source, diagnostic.code.clone(), range), (diagnostic.severity, is_unnecessary), ); } else { @@ -7400,13 +7396,13 @@ impl LspStore { sources_by_group_id.insert(group_id, source); primary_diagnostic_group_ids - .insert((source, code.clone(), range.clone()), group_id); + .insert((source, diagnostic.code.clone(), range.clone()), group_id); diagnostics.push(DiagnosticEntry { range, diagnostic: Diagnostic { source: diagnostic.source.clone(), - code: code.clone(), + code: diagnostic.code.clone(), severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR), message: diagnostic.message.trim().to_string(), group_id, @@ -7424,7 +7420,7 @@ impl LspStore { range, diagnostic: Diagnostic { source: diagnostic.source.clone(), - code: code.clone(), + code: diagnostic.code.clone(), severity: DiagnosticSeverity::INFORMATION, message: info.message.trim().to_string(), group_id, From 5931af810e2dd42ec2038d7304a92d1d92e8db69 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Marcos?= Date: Thu, 6 Feb 2025 00:54:39 -0300 Subject: [PATCH 061/130] Update Cargo.lock according to changes on #24347 (#24350) Release Notes: - N/A --- Cargo.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index ebb8f7fffb93f0db5efca3d2d1b68eec2e9d20b3..78afbb7a2a691cf5bc66bfcfbd15599dd48c5ef4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7489,7 +7489,7 @@ dependencies = [ [[package]] name = "lsp-types" version = "0.95.1" -source = "git+https://github.com/zed-industries/lsp-types?rev=72357d6f6d212bdffba3b5ef4b31d8ca856058e7#72357d6f6d212bdffba3b5ef4b31d8ca856058e7" +source = "git+https://github.com/zed-industries/lsp-types?rev=1fff0dd12e2071c5667327394cfec163d2a466ab#1fff0dd12e2071c5667327394cfec163d2a466ab" dependencies = [ "bitflags 1.3.2", "serde", From fa0261e3add8ad16671f62a5ff31140c793c8a43 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Marcos?= Date: Thu, 6 Feb 2025 00:58:21 -0300 Subject: [PATCH 062/130] Add more info to `CONTRIBUTING.md` (#24348) mention the crates: - `cli` - `zed` and add a section for packaging Zed, which links to our website docs Release Notes: - N/A --- CONTRIBUTING.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4a0a632413911faecb24ddac2c1837aaba3e2299..6cd18ce4217066de4756c15e1f0ff7e4967acb0c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -52,3 +52,9 @@ Zed is made up of several smaller crates - let's go over those you're most likel - [`rpc`](/crates/rpc) defines messages to be exchanged with collaboration server. - [`theme`](/crates/theme) defines the theme system and provides a default theme. - [`ui`](/crates/ui) is a collection of UI components and common patterns used throughout Zed. +- [`cli`](/crates/cli) is the CLI crate which invokes the Zed binary. +- [`zed`](/crates/zed) is where all things come together, and the `main` entry point for Zed. + +## Packaging Zed + +Check our [notes for packaging Zed](https://zed.dev/docs/development/linux#notes-for-packaging-zed). From 1cdfbe2d5f060321f6536b99713831e5cd49c78b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Marcos?= Date: Thu, 6 Feb 2025 01:04:35 -0300 Subject: [PATCH 063/130] License detection: also check `LICENSE.txt` and `LICENCE.txt` (#24351) and move the list of files to `crates/zeta/src/license_detection.rs` for better visibility. Release Notes: - N/A --- crates/zeta/src/license_detection.rs | 3 +++ crates/zeta/src/zeta.rs | 3 +-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/zeta/src/license_detection.rs b/crates/zeta/src/license_detection.rs index edba7178b6032c4e524cf891a74e826b78032a94..a6b1de928faf1d6c01ac7b9ed08199772d4088ec 100644 --- a/crates/zeta/src/license_detection.rs +++ b/crates/zeta/src/license_detection.rs @@ -1,5 +1,8 @@ use regex::Regex; +/// The most common license locations, with US and UK English spelling. +pub const LICENSE_FILES_TO_CHECK: &[&str] = &["LICENSE", "LICENCE", "LICENSE.txt", "LICENCE.txt"]; + pub fn is_license_eligible_for_data_collection(license: &str) -> bool { // TODO: Include more licenses later (namely, Apache) for pattern in [MIT_LICENSE_REGEX, ISC_LICENSE_REGEX] { diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index 7ef46959008e14065cfe27a5302151630c2ca322..c287a30677d33c93f7176d591c7674294ad254da 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -11,6 +11,7 @@ use db::kvp::KEY_VALUE_STORE; pub use init::*; use inline_completion::DataCollectionState; pub use license_detection::is_license_eligible_for_data_collection; +use license_detection::LICENSE_FILES_TO_CHECK; pub use onboarding_banner::*; pub use rate_completion_modal::*; @@ -952,8 +953,6 @@ impl LicenseDetectionWatcher { pub fn new(worktree: &Worktree, cx: &mut Context) -> Self { let (mut is_open_source_tx, is_open_source_rx) = watch::channel_with::(false); - const LICENSE_FILES_TO_CHECK: [&'static str; 2] = ["LICENSE", "LICENCE"]; // US and UK English spelling - // Check if worktree is a single file, if so we do not need to check for a LICENSE file let task = if worktree.abs_path().is_file() { Task::ready(()) From c61f12dd225ffabd3913aa29adfdf3feb1a62522 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Marcos?= Date: Thu, 6 Feb 2025 03:07:05 -0300 Subject: [PATCH 064/130] Zeta: Skip opening files redundantly if a license was found (#24357) Release Notes: - N/A --- crates/zeta/src/zeta.rs | 32 +++++++++++++++++++++----------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index c287a30677d33c93f7176d591c7674294ad254da..c20522b00b8610dc3b6dc4c20fc653712a9c0f5b 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -957,21 +957,31 @@ impl LicenseDetectionWatcher { let task = if worktree.abs_path().is_file() { Task::ready(()) } else { - let loaded_files_task = futures::future::join_all( - LICENSE_FILES_TO_CHECK - .iter() - .map(|file| worktree.load_file(Path::new(file), cx)), - ); + let loaded_files = LICENSE_FILES_TO_CHECK + .iter() + .map(Path::new) + .map(|file| worktree.load_file(file, cx)) + .collect::>(); cx.background_executor().spawn(async move { - for loaded_file in loaded_files_task.await { - if let Some(content) = loaded_file.log_err() { - if is_license_eligible_for_data_collection(&content.text) { - *is_open_source_tx.borrow_mut() = true; - break; - } + for loaded_file in loaded_files.into_iter() { + let Ok(loaded_file) = loaded_file.await else { + continue; + }; + + let path = &loaded_file.file.path; + if is_license_eligible_for_data_collection(&loaded_file.text) { + log::info!("detected '{path:?}' as open source license"); + *is_open_source_tx.borrow_mut() = true; + } else { + log::info!("didn't detect '{path:?}' as open source license"); } + + // stop on the first license that successfully read + return; } + + log::debug!("didn't find a license file to check, assuming closed source"); }) }; From 10792ee0ad3b58c24f9977c903d2b8a84dfbdd15 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Wed, 5 Feb 2025 23:46:23 -0700 Subject: [PATCH 065/130] First check if menu visible in `layout_gutter_menu` (#24259) Also uses an expect instead of unwrap for result of `render_context_menu` Release Notes: - N/A --- crates/editor/src/element.rs | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 9d0c50b89668dac66a8c040b64803731596d7709..99c103934870eb15bcbba4b2fc5ecf015ba7bad7 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -3170,7 +3170,7 @@ impl EditorElement { }; let mut element = self .render_context_menu(line_height, menu_height, y_flipped, window, cx) - .unwrap(); + .expect("Visible context menu should always render."); let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); Some((CursorPopoverType::CodeContextMenu, element, size)) } else { @@ -3328,8 +3328,12 @@ impl EditorElement { window: &mut Window, cx: &mut App, ) { + let editor = self.editor.read(cx); + if !editor.context_menu_visible() { + return; + } let Some(crate::ContextMenuOrigin::GutterIndicator(gutter_row)) = - self.editor.read(cx).context_menu_origin() + editor.context_menu_origin() else { return; }; @@ -3357,11 +3361,9 @@ impl EditorElement { window, cx, move |height, _max_width_for_stable_x, y_flipped, window, cx| { - let Some(mut element) = - self.render_context_menu(line_height, height, y_flipped, window, cx) - else { - return vec![]; - }; + let mut element = self + .render_context_menu(line_height, height, y_flipped, window, cx) + .expect("Visible context menu should always render."); let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); vec![(CursorPopoverType::CodeContextMenu, element, size)] }, From 7cbcca2881eae8d9aa7b363f03f999933cbde2bd Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 6 Feb 2025 10:01:56 +0200 Subject: [PATCH 066/130] Update Rust crate wayland-cursor to v0.31.8 (#24328) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [wayland-cursor](https://redirect.github.com/smithay/wayland-rs) | dependencies | patch | `0.31.7` -> `0.31.8` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 50 +++++++++++++++++++++++++------------------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 78afbb7a2a691cf5bc66bfcfbd15599dd48c5ef4..84543fb36281c66b6ae9fba1889fbb00f629d540 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2213,7 +2213,7 @@ dependencies = [ "cap-primitives", "cap-std", "io-lifetimes", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -2241,7 +2241,7 @@ dependencies = [ "ipnet", "maybe-owned", "rustix", - "windows-sys 0.59.0", + "windows-sys 0.52.0", "winx", ] @@ -4259,7 +4259,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -4917,7 +4917,7 @@ checksum = "5e2e6123af26f0f2c51cc66869137080199406754903cc926a7690401ce09cb4" dependencies = [ "io-lifetimes", "rustix", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -6540,7 +6540,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2285ddfe3054097ef4b2fe909ef8c3bcd1ea52a8f0d274416caebeef39f04a65" dependencies = [ "io-lifetimes", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -7153,7 +7153,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ "cfg-if", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] @@ -10305,9 +10305,9 @@ dependencies = [ [[package]] name = "quick-xml" -version = "0.36.2" +version = "0.37.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7649a7b4df05aed9ea7ec6f628c67c9953a43869b8bc50929569b2999d443fe" +checksum = "165859e9e55f79d67b96c5d96f4e88b6f2695a1972849c15a6a3f5c59fc2c003" dependencies = [ "memchr", ] @@ -10361,7 +10361,7 @@ dependencies = [ "once_cell", "socket2", "tracing", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -11249,7 +11249,7 @@ dependencies = [ "libc", "linux-raw-sys", "once_cell", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -12974,7 +12974,7 @@ dependencies = [ "fd-lock", "io-lifetimes", "rustix", - "windows-sys 0.59.0", + "windows-sys 0.52.0", "winx", ] @@ -13108,7 +13108,7 @@ dependencies = [ "getrandom 0.3.1", "once_cell", "rustix", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -15147,9 +15147,9 @@ dependencies = [ [[package]] name = "wayland-backend" -version = "0.3.7" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "056535ced7a150d45159d3a8dc30f91a2e2d588ca0b23f70e56033622b8016f6" +checksum = "b7208998eaa3870dad37ec8836979581506e0c5c64c20c9e79e9d2a10d6f47bf" dependencies = [ "cc", "downcast-rs", @@ -15161,9 +15161,9 @@ dependencies = [ [[package]] name = "wayland-client" -version = "0.31.7" +version = "0.31.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b66249d3fc69f76fd74c82cc319300faa554e9d865dab1f7cd66cc20db10b280" +checksum = "c2120de3d33638aaef5b9f4472bff75f07c56379cf76ea320bd3a3d65ecaf73f" dependencies = [ "bitflags 2.8.0", "rustix", @@ -15173,9 +15173,9 @@ dependencies = [ [[package]] name = "wayland-cursor" -version = "0.31.7" +version = "0.31.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32b08bc3aafdb0035e7fe0fdf17ba0c09c268732707dca4ae098f60cb28c9e4c" +checksum = "a93029cbb6650748881a00e4922b076092a6a08c11e7fbdb923f064b23968c5d" dependencies = [ "rustix", "wayland-client", @@ -15209,20 +15209,20 @@ dependencies = [ [[package]] name = "wayland-scanner" -version = "0.31.5" +version = "0.31.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597f2001b2e5fc1121e3d5b9791d3e78f05ba6bfa4641053846248e3a13661c3" +checksum = "896fdafd5d28145fce7958917d69f2fd44469b1d4e861cb5961bcbeebc6d1484" dependencies = [ "proc-macro2", - "quick-xml 0.36.2", + "quick-xml 0.37.2", "quote", ] [[package]] name = "wayland-sys" -version = "0.31.5" +version = "0.31.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efa8ac0d8e8ed3e3b5c9fc92c7881406a268e11555abe36493efabe649a29e09" +checksum = "dbcebb399c77d5aa9fa5db874806ee7b4eba4e73650948e8f93963f128896615" dependencies = [ "dlib", "log", @@ -15414,7 +15414,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.48.0", ] [[package]] @@ -15863,7 +15863,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f3fd376f71958b862e7afb20cfe5a22830e1963462f3a17f49d82a6c1d1f42d" dependencies = [ "bitflags 2.8.0", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] From 97cda3f4104d093d8ecff6a860098aee74850fa5 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 6 Feb 2025 10:02:14 +0200 Subject: [PATCH 067/130] Update aws-sdk-rust monorepo (#24334) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [aws-config](https://redirect.github.com/smithy-lang/smithy-rs) | dependencies | patch | `1.5.15` -> `1.5.16` | | [aws-sdk-kinesis](https://redirect.github.com/awslabs/aws-sdk-rust) | dependencies | minor | `1.59.0` -> `1.60.0` | | [aws-sdk-s3](https://redirect.github.com/awslabs/aws-sdk-rust) | dependencies | minor | `1.72.0` -> `1.73.0` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 👻 **Immortal**: This PR will be recreated if closed unmerged. Get [config help](https://redirect.github.com/renovatebot/renovate/discussions) if that's undesired. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 44 ++++++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 84543fb36281c66b6ae9fba1889fbb00f629d540..6f066f6b790266dda53c6360ce7de66553540b99 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1181,9 +1181,9 @@ dependencies = [ [[package]] name = "aws-config" -version = "1.5.15" +version = "1.5.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc47e70fc35d054c8fcd296d47a61711f043ac80534a10b4f741904f81e73a90" +checksum = "50236e4d60fe8458de90a71c0922c761e41755adf091b1b03de1cef537179915" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1248,9 +1248,9 @@ dependencies = [ [[package]] name = "aws-runtime" -version = "1.5.4" +version = "1.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bee7643696e7fdd74c10f9eb42848a87fe469d35eae9c3323f80aa98f350baac" +checksum = "76dd04d39cc12844c0994f2c9c5a6f5184c22e9188ec1ff723de41910a21dcad" dependencies = [ "aws-credential-types", "aws-sigv4", @@ -1274,9 +1274,9 @@ dependencies = [ [[package]] name = "aws-sdk-kinesis" -version = "1.59.0" +version = "1.60.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7963cf7a0f49ba4f8351044751f4d42c003c4a5f31d9e084f0d0e68b6fb8b8cf" +checksum = "9b8052335b6ba19b08ba2b363c7505f8ed34074ac23fa14a652ff6a0a02a4c06" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1296,9 +1296,9 @@ dependencies = [ [[package]] name = "aws-sdk-s3" -version = "1.72.0" +version = "1.73.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c7ce6d85596c4bcb3aba8ad5bb134b08e204c8a475c9999c1af9290f80aa8ad" +checksum = "3978e0a211bdc5cddecfd91fb468665a662a27fbdaef39ddf36a2a18fef12cb4" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1330,9 +1330,9 @@ dependencies = [ [[package]] name = "aws-sdk-sso" -version = "1.57.0" +version = "1.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c54bab121fe1881a74c338c5f723d1592bf3b53167f80268a1274f404e1acc38" +checksum = "16ff718c9ee45cc1ebd4774a0e086bb80a6ab752b4902edf1c9f56b86ee1f770" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1352,9 +1352,9 @@ dependencies = [ [[package]] name = "aws-sdk-ssooidc" -version = "1.58.0" +version = "1.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c8234fd024f7ac61c4e44ea008029bde934250f371efe7d4a39708397b1080c" +checksum = "5183e088715cc135d8d396fdd3bc02f018f0da4c511f53cb8d795b6a31c55809" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1374,9 +1374,9 @@ dependencies = [ [[package]] name = "aws-sdk-sts" -version = "1.58.0" +version = "1.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba60e1d519d6f23a9df712c04fdeadd7872ac911c84b2f62a8bda92e129b7962" +checksum = "c9f944ef032717596639cea4a2118a3a457268ef51bbb5fde9637e54c465da00" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1397,9 +1397,9 @@ dependencies = [ [[package]] name = "aws-sigv4" -version = "1.2.7" +version = "1.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "690118821e46967b3c4501d67d7d52dd75106a9c54cf36cefa1985cedbe94e05" +checksum = "0bc5bbd1e4a2648fd8c5982af03935972c24a2f9846b396de661d351ee3ce837" dependencies = [ "aws-credential-types", "aws-smithy-eventstream", @@ -1510,9 +1510,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime" -version = "1.7.7" +version = "1.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "865f7050bbc7107a6c98a397a9fcd9413690c27fa718446967cf03b2d3ac517e" +checksum = "d526a12d9ed61fadefda24abe2e682892ba288c2018bcb38b1b4c111d13f6d92" dependencies = [ "aws-smithy-async", "aws-smithy-http", @@ -1554,9 +1554,9 @@ dependencies = [ [[package]] name = "aws-smithy-types" -version = "1.2.12" +version = "1.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a28f6feb647fb5e0d5b50f0472c19a7db9462b74e2fec01bb0b44eedcc834e97" +checksum = "c7b8a53819e42f10d0821f56da995e1470b199686a1809168db6ca485665f042" dependencies = [ "base64-simd", "bytes 1.9.0", @@ -1589,9 +1589,9 @@ dependencies = [ [[package]] name = "aws-types" -version = "1.3.4" +version = "1.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0df5a18c4f951c645300d365fec53a61418bcf4650f604f85fe2a665bfaa0c2" +checksum = "dfbd0a668309ec1f66c0f6bda4840dd6d4796ae26d699ebc266d7cc95c6d040f" dependencies = [ "aws-credential-types", "aws-smithy-async", From a6f83c283ce131840c8a9c9a27ee4d10902c1ba1 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 6 Feb 2025 10:02:36 +0200 Subject: [PATCH 068/130] Update Rust crate bytes to v1.10.0 (#24335) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [bytes](https://redirect.github.com/tokio-rs/bytes) | workspace.dependencies | minor | `1.9.0` -> `1.10.0` | --- ### Release Notes
tokio-rs/bytes (bytes) ### [`v1.10.0`](https://redirect.github.com/tokio-rs/bytes/blob/HEAD/CHANGELOG.md#1100-February-3rd-2025) [Compare Source](https://redirect.github.com/tokio-rs/bytes/compare/v1.9.0...v1.10.0) ##### Added - Add feature to support platforms without atomic CAS ([#​467](https://redirect.github.com/tokio-rs/bytes/issues/467)) - `try_get_*` methods for `Buf` trait ([#​753](https://redirect.github.com/tokio-rs/bytes/issues/753)) - Implement `Buf::chunks_vectored` for `Take` ([#​617](https://redirect.github.com/tokio-rs/bytes/issues/617)) - Implement `Buf::chunks_vectored` for `VecDeque` ([#​708](https://redirect.github.com/tokio-rs/bytes/issues/708)) ##### Fixed - Remove incorrect guarantee for `chunks_vectored` ([#​754](https://redirect.github.com/tokio-rs/bytes/issues/754)) - Ensure that tests pass under `panic=abort` ([#​749](https://redirect.github.com/tokio-rs/bytes/issues/749))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 122 ++++++++++++++++++++++++++--------------------------- 1 file changed, 61 insertions(+), 61 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6f066f6b790266dda53c6360ce7de66553540b99..522d76c99888c8e9bb5245d26641245520603d99 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1067,7 +1067,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a860072022177f903e59730004fb5dc13db9275b79bb2aef7ba8ce831956c233" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "futures-sink", "futures-util", "memchr", @@ -1197,7 +1197,7 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "aws-types", - "bytes 1.9.0", + "bytes 1.10.0", "fastrand 2.3.0", "hex", "http 0.2.12", @@ -1261,7 +1261,7 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "aws-types", - "bytes 1.9.0", + "bytes 1.10.0", "fastrand 2.3.0", "http 0.2.12", "http-body 0.4.6", @@ -1287,7 +1287,7 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "aws-types", - "bytes 1.9.0", + "bytes 1.10.0", "http 0.2.12", "once_cell", "regex-lite", @@ -1313,7 +1313,7 @@ dependencies = [ "aws-smithy-types", "aws-smithy-xml", "aws-types", - "bytes 1.9.0", + "bytes 1.10.0", "fastrand 2.3.0", "hex", "hmac", @@ -1343,7 +1343,7 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "aws-types", - "bytes 1.9.0", + "bytes 1.10.0", "http 0.2.12", "once_cell", "regex-lite", @@ -1365,7 +1365,7 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "aws-types", - "bytes 1.9.0", + "bytes 1.10.0", "http 0.2.12", "once_cell", "regex-lite", @@ -1406,7 +1406,7 @@ dependencies = [ "aws-smithy-http", "aws-smithy-runtime-api", "aws-smithy-types", - "bytes 1.9.0", + "bytes 1.10.0", "crypto-bigint 0.5.5", "form_urlencoded", "hex", @@ -1443,7 +1443,7 @@ checksum = "f2f45a1c384d7a393026bc5f5c177105aa9fa68e4749653b985707ac27d77295" dependencies = [ "aws-smithy-http", "aws-smithy-types", - "bytes 1.9.0", + "bytes 1.10.0", "crc32c", "crc32fast", "crc64fast-nvme", @@ -1464,7 +1464,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b18559a41e0c909b77625adf2b8c50de480a8041e5e4a3f5f7d177db70abc5a" dependencies = [ "aws-smithy-types", - "bytes 1.9.0", + "bytes 1.10.0", "crc32fast", ] @@ -1477,7 +1477,7 @@ dependencies = [ "aws-smithy-eventstream", "aws-smithy-runtime-api", "aws-smithy-types", - "bytes 1.9.0", + "bytes 1.10.0", "bytes-utils", "futures-core", "http 0.2.12", @@ -1518,7 +1518,7 @@ dependencies = [ "aws-smithy-http", "aws-smithy-runtime-api", "aws-smithy-types", - "bytes 1.9.0", + "bytes 1.10.0", "fastrand 2.3.0", "h2 0.3.26", "http 0.2.12", @@ -1543,7 +1543,7 @@ checksum = "92165296a47a812b267b4f41032ff8069ab7ff783696d217f0994a0d7ab585cd" dependencies = [ "aws-smithy-async", "aws-smithy-types", - "bytes 1.9.0", + "bytes 1.10.0", "http 0.2.12", "http 1.2.0", "pin-project-lite", @@ -1559,7 +1559,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7b8a53819e42f10d0821f56da995e1470b199686a1809168db6ca485665f042" dependencies = [ "base64-simd", - "bytes 1.9.0", + "bytes 1.10.0", "bytes-utils", "futures-core", "http 0.2.12", @@ -1611,7 +1611,7 @@ dependencies = [ "axum-core", "base64 0.21.7", "bitflags 1.3.2", - "bytes 1.9.0", + "bytes 1.10.0", "futures-util", "headers", "http 0.2.12", @@ -1644,7 +1644,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" dependencies = [ "async-trait", - "bytes 1.9.0", + "bytes 1.10.0", "futures-util", "http 0.2.12", "http-body 0.4.6", @@ -1661,7 +1661,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9a320103719de37b7b4da4c8eb629d4573f6bcfd3dfe80d3208806895ccf81d" dependencies = [ "axum", - "bytes 1.9.0", + "bytes 1.10.0", "futures-util", "http 0.2.12", "mime", @@ -2108,9 +2108,9 @@ dependencies = [ [[package]] name = "bytes" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" +checksum = "f61dac84819c6588b558454b194026eb1f09c293b9036ae9b159e74e73ab6cf9" [[package]] name = "bytes-utils" @@ -2118,7 +2118,7 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7dafe3a8757b027e2be6e4e5601ed563c55989fcf1546e933c66c8eb3a058d35" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "either", ] @@ -2899,7 +2899,7 @@ version = "4.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "memchr", ] @@ -5567,7 +5567,7 @@ version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "fnv", "futures-core", "futures-sink", @@ -5587,7 +5587,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" dependencies = [ "atomic-waker", - "bytes 1.9.0", + "bytes 1.10.0", "fnv", "futures-core", "futures-sink", @@ -5703,7 +5703,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270" dependencies = [ "base64 0.21.7", - "bytes 1.9.0", + "bytes 1.10.0", "headers-core", "http 0.2.12", "httpdate", @@ -5882,7 +5882,7 @@ version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "fnv", "itoa", ] @@ -5893,7 +5893,7 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "fnv", "itoa", ] @@ -5904,7 +5904,7 @@ version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "http 0.2.12", "pin-project-lite", ] @@ -5915,7 +5915,7 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "http 1.2.0", ] @@ -5925,7 +5925,7 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "futures-util", "http 1.2.0", "http-body 1.0.1", @@ -5964,7 +5964,7 @@ name = "http_client" version = "0.1.0" dependencies = [ "anyhow", - "bytes 1.9.0", + "bytes 1.10.0", "derive_more", "futures 0.3.31", "http 1.2.0", @@ -6004,7 +6004,7 @@ version = "0.14.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "futures-channel", "futures-core", "futures-util", @@ -6028,7 +6028,7 @@ version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "futures-channel", "futures-util", "h2 0.4.7", @@ -6082,7 +6082,7 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "hyper 0.14.32", "native-tls", "tokio", @@ -6095,7 +6095,7 @@ version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "futures-channel", "futures-util", "http 1.2.0", @@ -6747,7 +6747,7 @@ checksum = "c9ae6296f9476658b3550293c113996daf75fa542cd8d078abb4c60207bded14" dependencies = [ "anyhow", "async-trait", - "bytes 1.9.0", + "bytes 1.10.0", "chrono", "futures 0.3.31", "serde", @@ -9032,7 +9032,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "18f596653ba4ac51bdecbb4ef6773bc7f56042dc13927910de1684ad3d32aa12" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "chrono", "pbjson", "pbjson-build", @@ -10104,7 +10104,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "444879275cb4fd84958b1a1d5420d15e6fcf7c235fe47f053c9c2a80aceb6001" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "prost-derive 0.9.0", ] @@ -10114,7 +10114,7 @@ version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "prost-derive 0.12.6", ] @@ -10124,7 +10124,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62941722fb675d463659e49c4f3fe1fe792ff24fe5bbaa9c08cd3b98a1c354f5" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "heck 0.3.3", "itertools 0.10.5", "lazy_static", @@ -10144,7 +10144,7 @@ version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "heck 0.5.0", "itertools 0.12.1", "log", @@ -10191,7 +10191,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "534b7a0e836e3c482d2693070f982e39e7611da9695d4d1f5a4b186b51faef0a" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "prost 0.9.0", ] @@ -10318,7 +10318,7 @@ version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62e96808277ec6f97351a2380e6c25114bc9e67037775464979f3037c92d05ef" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "pin-project-lite", "quinn-proto", "quinn-udp", @@ -10336,7 +10336,7 @@ version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2fe5ef3495d7d2e377ff17b1a8ce2ee2ec2a18cde8b6ad6619d65d0701c135d" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "getrandom 0.2.15", "rand 0.8.5", "ring", @@ -10841,7 +10841,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" dependencies = [ "base64 0.21.7", - "bytes 1.9.0", + "bytes 1.10.0", "encoding_rs", "futures-core", "futures-util", @@ -10884,7 +10884,7 @@ version = "0.12.8" source = "git+https://github.com/zed-industries/reqwest.git?rev=fd110f6998da16bbca97b6dddda9be7827c50e29#fd110f6998da16bbca97b6dddda9be7827c50e29" dependencies = [ "base64 0.22.1", - "bytes 1.9.0", + "bytes 1.10.0", "encoding_rs", "futures-core", "futures-util", @@ -10930,7 +10930,7 @@ name = "reqwest_client" version = "0.1.0" dependencies = [ "anyhow", - "bytes 1.9.0", + "bytes 1.10.0", "futures 0.3.31", "gpui", "http_client", @@ -11012,7 +11012,7 @@ checksum = "9008cd6385b9e161d8229e1f6549dd23c3d022f132a2ea37ac3a10ac4935779b" dependencies = [ "bitvec", "bytecheck", - "bytes 1.9.0", + "bytes 1.10.0", "hashbrown 0.12.3", "ptr_meta", "rend", @@ -11143,7 +11143,7 @@ dependencies = [ "async-dispatcher", "async-std", "base64 0.22.1", - "bytes 1.9.0", + "bytes 1.10.0", "chrono", "data-encoding", "dirs 5.0.1", @@ -11202,7 +11202,7 @@ checksum = "b082d80e3e3cc52b2ed634388d436fe1f4de6af5786cc2de9ba9737527bdf555" dependencies = [ "arrayvec", "borsh", - "bytes 1.9.0", + "bytes 1.10.0", "num-traits", "rand 0.8.5", "rkyv", @@ -12339,7 +12339,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a007b6936676aa9ab40207cde35daab0a04b823be8ae004368c0793b96a61e0" dependencies = [ "bigdecimal", - "bytes 1.9.0", + "bytes 1.10.0", "chrono", "crc", "crossbeam-queue", @@ -12423,7 +12423,7 @@ dependencies = [ "bigdecimal", "bitflags 2.8.0", "byteorder", - "bytes 1.9.0", + "bytes 1.10.0", "chrono", "crc", "digest", @@ -13543,7 +13543,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d61fa4ffa3de412bfea335c6ecff681de2b609ba3c77ef3e00e521813a9ed9e" dependencies = [ "backtrace", - "bytes 1.9.0", + "bytes 1.10.0", "libc", "mio 1.0.3", "parking_lot", @@ -13663,7 +13663,7 @@ version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "futures-core", "futures-io", "futures-sink", @@ -13758,7 +13758,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f873044bf02dd1e8239e9c1293ea39dad76dc594ec16185d0a1bf31d8dc8d858" dependencies = [ "bitflags 1.3.2", - "bytes 1.9.0", + "bytes 1.10.0", "futures-core", "futures-util", "http 0.2.12", @@ -13776,7 +13776,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140" dependencies = [ "bitflags 2.8.0", - "bytes 1.9.0", + "bytes 1.10.0", "futures-core", "futures-util", "http 0.2.12", @@ -14127,7 +14127,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" dependencies = [ "byteorder", - "bytes 1.9.0", + "bytes 1.10.0", "data-encoding", "http 0.2.12", "httparse", @@ -14147,7 +14147,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ef1a641ea34f399a848dea702823bbecfb4c486f911735368f1f137cb8257e1" dependencies = [ "byteorder", - "bytes 1.9.0", + "bytes 1.10.0", "data-encoding", "http 1.2.0", "httparse", @@ -14166,7 +14166,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "18e5b8366ee7a95b16d32197d0b2604b43a0be89dc5fac9f8e96ccafbaedda8a" dependencies = [ "byteorder", - "bytes 1.9.0", + "bytes 1.10.0", "data-encoding", "http 1.2.0", "httparse", @@ -14668,7 +14668,7 @@ version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4378d202ff965b011c64817db11d5829506d3404edeadb61f190d111da3f231c" dependencies = [ - "bytes 1.9.0", + "bytes 1.10.0", "futures-channel", "futures-util", "headers", @@ -15085,7 +15085,7 @@ dependencies = [ "anyhow", "async-trait", "bitflags 2.8.0", - "bytes 1.9.0", + "bytes 1.10.0", "cap-fs-ext", "cap-net-ext", "cap-rand", @@ -16813,7 +16813,7 @@ dependencies = [ "async-std", "async-trait", "asynchronous-codec", - "bytes 1.9.0", + "bytes 1.10.0", "crossbeam-queue", "dashmap 5.5.3", "futures 0.3.31", From 00b1964940ce385e154bc55f626b83bc6ba31041 Mon Sep 17 00:00:00 2001 From: James Roberts <82052595+contrast-jproberts@users.noreply.github.com> Date: Thu, 6 Feb 2025 03:05:41 -0500 Subject: [PATCH 069/130] auto_update_ui: Show update notification across workspaces (#23458) When Zed reopens after an auto-update is installed, a notification was previously displayed in the first window opened. If there were multiple windows open, the notification could be hidden because Zed reopens the last session's window stack in order from back to front. Now, the notification is opened in every workspace, and dismissing the notification in any workspace will dismisses it everywhere. Closes #23236 Release Notes: - Improved notification after Zed is updated to be visible in all workspaces. --------- Co-authored-by: Michael Sloan --- crates/auto_update_ui/src/auto_update_ui.rs | 27 ++++++++++++--------- crates/zed/src/zed.rs | 2 -- 2 files changed, 16 insertions(+), 13 deletions(-) diff --git a/crates/auto_update_ui/src/auto_update_ui.rs b/crates/auto_update_ui/src/auto_update_ui.rs index a54ad7ac2287afbdd8a021115e5f4fdc7808b8dc..f0fbd6f404a0fb953a22916a263b64104b87c457 100644 --- a/crates/auto_update_ui/src/auto_update_ui.rs +++ b/crates/auto_update_ui/src/auto_update_ui.rs @@ -9,7 +9,7 @@ use release_channel::{AppVersion, ReleaseChannel}; use serde::Deserialize; use smol::io::AsyncReadExt; use util::ResultExt as _; -use workspace::notifications::NotificationId; +use workspace::notifications::{show_app_notification, NotificationId}; use workspace::Workspace; use crate::update_notification::UpdateNotification; @@ -17,6 +17,7 @@ use crate::update_notification::UpdateNotification; actions!(auto_update, [ViewReleaseNotesLocally]); pub fn init(cx: &mut App) { + notify_if_app_was_updated(cx); cx.observe_new(|workspace: &mut Workspace, _window, _cx| { workspace.register_action(|workspace, _: &ViewReleaseNotesLocally, window, cx| { view_release_notes_locally(workspace, window, cx); @@ -124,31 +125,35 @@ fn view_release_notes_locally( .detach(); } -pub fn notify_of_any_new_update(window: &mut Window, cx: &mut Context) -> Option<()> { - let updater = AutoUpdater::get(cx)?; +/// Shows a notification across all workspaces if an update was previously automatically installed +/// and this notification had not yet been shown. +pub fn notify_if_app_was_updated(cx: &mut App) { + let Some(updater) = AutoUpdater::get(cx) else { + return; + }; let version = updater.read(cx).current_version(); let should_show_notification = updater.read(cx).should_show_update_notification(cx); - cx.spawn_in(window, |workspace, mut cx| async move { + cx.spawn(|cx| async move { let should_show_notification = should_show_notification.await?; if should_show_notification { - workspace.update(&mut cx, |workspace, cx| { - let workspace_handle = workspace.weak_handle(); - workspace.show_notification( + cx.update(|cx| { + show_app_notification( NotificationId::unique::(), cx, - |cx| cx.new(|_| UpdateNotification::new(version, workspace_handle)), + move |cx| { + let workspace_handle = cx.entity().downgrade(); + cx.new(|_| UpdateNotification::new(version, workspace_handle)) + }, ); updater.update(cx, |updater, cx| { updater .set_should_show_update_notification(false, cx) .detach_and_log_err(cx); - }); + }) })?; } anyhow::Ok(()) }) .detach(); - - None } diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 11d12f394ef31c29f451f3e7d01471cb24e11742..3b20a19fb4e17e86e9fe8f2054ca05e25bfa2f9a 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -213,8 +213,6 @@ pub fn initialize_workspace( status_bar.add_right_item(cursor_position, window, cx); }); - auto_update_ui::notify_of_any_new_update(window, cx); - let handle = cx.entity().downgrade(); window.on_window_should_close(cx, move |window, cx| { handle From b1b2e812b155a742a2a486e2b9c04aa86cd1b49a Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 6 Feb 2025 10:26:48 +0200 Subject: [PATCH 070/130] Update Rust crate toml to v0.8.20 (#24318) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [toml](https://redirect.github.com/toml-rs/toml) | workspace.dependencies | patch | `0.8.19` -> `0.8.20` | --- ### Release Notes
toml-rs/toml (toml) ### [`v0.8.20`](https://redirect.github.com/toml-rs/toml/compare/toml-v0.8.19...toml-v0.8.20) [Compare Source](https://redirect.github.com/toml-rs/toml/compare/toml-v0.8.19...toml-v0.8.20)
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 57 +++++++++++++++++++++++++++++++----------------------- 1 file changed, 33 insertions(+), 24 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 522d76c99888c8e9bb5245d26641245520603d99..18acc81bf4ddba529b0df2fbd3e22a08060eb53d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -631,7 +631,7 @@ dependencies = [ "smol", "terminal_view", "text", - "toml 0.8.19", + "toml 0.8.20", "ui", "util", "workspace", @@ -2311,7 +2311,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fbd1fe9db3ebf71b89060adaf7b0504c2d6a425cf061313099547e382c2e472" dependencies = [ "serde", - "toml 0.8.19", + "toml 0.8.20", ] [[package]] @@ -2345,7 +2345,7 @@ dependencies = [ "serde_json", "syn 2.0.90", "tempfile", - "toml 0.8.19", + "toml 0.8.20", ] [[package]] @@ -2363,7 +2363,7 @@ dependencies = [ "serde_json", "syn 2.0.90", "tempfile", - "toml 0.8.19", + "toml 0.8.20", ] [[package]] @@ -2817,7 +2817,7 @@ dependencies = [ "thiserror 1.0.69", "time", "tokio", - "toml 0.8.19", + "toml 0.8.20", "tower", "tower-http 0.4.4", "tracing", @@ -4112,7 +4112,7 @@ dependencies = [ "cc", "memchr", "rustc_version", - "toml 0.8.19", + "toml 0.8.20", "vswhom", "winreg 0.52.0", ] @@ -4403,7 +4403,7 @@ dependencies = [ "semantic_version", "serde", "serde_json", - "toml 0.8.19", + "toml 0.8.20", "util", "wasm-encoder 0.215.0", "wasmparser 0.215.0", @@ -4427,7 +4427,7 @@ dependencies = [ "serde_json", "theme", "tokio", - "toml 0.8.19", + "toml 0.8.20", "tree-sitter", "wasmtime", ] @@ -4472,7 +4472,7 @@ dependencies = [ "tempfile", "theme", "theme_extension", - "toml 0.8.19", + "toml 0.8.20", "url", "util", "wasmparser 0.215.0", @@ -7058,7 +7058,7 @@ dependencies = [ "task", "text", "theme", - "toml 0.8.19", + "toml 0.8.20", "tree-sitter", "tree-sitter-bash", "tree-sitter-c", @@ -9376,7 +9376,7 @@ dependencies = [ "serde", "serde_json", "sha2", - "toml 0.8.19", + "toml 0.8.20", ] [[package]] @@ -9986,7 +9986,7 @@ dependencies = [ "tempfile", "terminal", "text", - "toml 0.8.19", + "toml 0.8.20", "unindent", "url", "util", @@ -10768,7 +10768,7 @@ dependencies = [ "smol", "sysinfo", "telemetry_events", - "toml 0.8.19", + "toml 0.8.20", "unindent", "util", "worktree", @@ -12958,7 +12958,7 @@ dependencies = [ "cfg-expr", "heck 0.5.0", "pkg-config", - "toml 0.8.19", + "toml 0.8.20", "version-compare", ] @@ -13682,9 +13682,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.19" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" +checksum = "cd87a5cdd6ffab733b2f74bc4fd7ee5fff6634124999ac278c35fc78c6120148" dependencies = [ "serde", "serde_spanned", @@ -13703,15 +13703,15 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.22" +version = "0.22.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" +checksum = "02a8b472d1a3d7c18e2d61a489aee3453fd9031c33e4f55bd533f4a7adca1bee" dependencies = [ "indexmap", "serde", "serde_spanned", "toml_datetime", - "winnow", + "winnow 0.7.1", ] [[package]] @@ -15820,6 +15820,15 @@ dependencies = [ "memchr", ] +[[package]] +name = "winnow" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86e376c75f4f43f44db463cf729e0d3acbf954d13e22c51e26e4c264b4ab545f" +dependencies = [ + "memchr", +] + [[package]] name = "winreg" version = "0.50.0" @@ -15846,7 +15855,7 @@ version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7276691b353ad4547af8c3268488d1311f4be791ffdc0c65b8cfa8f41eed693b" dependencies = [ - "toml 0.8.19", + "toml 0.8.20", "version_check", ] @@ -16365,7 +16374,7 @@ dependencies = [ "tracing", "uds_windows", "windows-sys 0.59.0", - "winnow", + "winnow 0.6.20", "xdg-home", "zbus_macros 5.1.1", "zbus_names 4.1.0", @@ -16419,7 +16428,7 @@ checksum = "856b7a38811f71846fd47856ceee8bccaec8399ff53fb370247e66081ace647b" dependencies = [ "serde", "static_assertions", - "winnow", + "winnow 0.6.20", "zvariant 5.1.0", ] @@ -16996,7 +17005,7 @@ dependencies = [ "serde", "static_assertions", "url", - "winnow", + "winnow 0.6.20", "zvariant_derive 5.1.0", "zvariant_utils 3.0.2", ] @@ -17049,5 +17058,5 @@ dependencies = [ "serde", "static_assertions", "syn 2.0.90", - "winnow", + "winnow 0.6.20", ] From b96f62f4c90e0cf251dc44e434f4fff7bf65ecde Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 6 Feb 2025 10:30:15 +0200 Subject: [PATCH 071/130] Update Rust crate derive_more to v0.99.19 (#24312) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [derive_more](https://redirect.github.com/JelteF/derive_more) | workspace.dependencies | patch | `0.99.18` -> `0.99.19` | --- ### Release Notes
JelteF/derive_more (derive_more) ### [`v0.99.19`](https://redirect.github.com/JelteF/derive_more/blob/HEAD/CHANGELOG.md#09919---2025-02-03) [Compare Source](https://redirect.github.com/JelteF/derive_more/compare/v0.99.18...v0.99.19) - Add crate metadata for the Rust Playground.
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 18acc81bf4ddba529b0df2fbd3e22a08060eb53d..ed0d020b4e40035c35bd3822946d5f4c9aa3c46f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3775,9 +3775,9 @@ dependencies = [ [[package]] name = "derive_more" -version = "0.99.18" +version = "0.99.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" +checksum = "3da29a38df43d6f156149c9b43ded5e018ddff2a855cf2cfd62e8cd7d079c69f" dependencies = [ "convert_case 0.4.0", "proc-macro2", From 88ff44f2e8bf9d090b17f2355c2fb5200861132e Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 6 Feb 2025 10:30:51 +0200 Subject: [PATCH 072/130] Update Rust crate rustc-hash to v2.1.1 (#24317) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [rustc-hash](https://redirect.github.com/rust-lang/rustc-hash) | workspace.dependencies | patch | `2.1.0` -> `2.1.1` | --- ### Release Notes
rust-lang/rustc-hash (rustc-hash) ### [`v2.1.1`](https://redirect.github.com/rust-lang/rustc-hash/blob/HEAD/CHANGELOG.md#211) [Compare Source](https://redirect.github.com/rust-lang/rustc-hash/compare/v2.1.0...v2.1.1) - Change the internal algorithm to better accomodate large hashmaps. This mitigates a [regression with 2.0 in rustc](https://redirect.github.com/rust-lang/rust/issues/135477). See [PR#55](https://redirect.github.com/rust-lang/rustc-hash/pull/55) for more details on the change (this PR was not merged). This problem might be improved with changes to hashbrown in the future. #### 2.1.0 - Implement `Clone` for `FxRandomState` - Implement `Clone` for `FxSeededState` - Use SPDX license expression in license field #### 2.0.0 - Replace hash with faster and better finalized hash. This replaces the previous "fxhash" algorithm originating in Firefox with a custom hasher designed and implemented by Orson Peters ([`@orlp`](https://redirect.github.com/orlp)). It was measured to have slightly better performance for rustc, has better theoretical properties and also includes a significantly better string hasher. - Fix `no_std` builds #### 1.2.0 (**YANKED**) **Note: This version has been yanked due to issues with the `no_std` feature!** - Add a `FxBuildHasher` unit struct - Improve documentation - Add seed API for supplying custom seeds other than 0 - Add `FxRandomState` based on `rand` (behind the `rand` feature) for random seeds - Make many functions `const fn` - Implement `Clone` for `FxHasher` struct
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ed0d020b4e40035c35bd3822946d5f4c9aa3c46f..a6a40f260a1ad9af7f8c56555c4d51918aa51593 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2878,7 +2878,7 @@ name = "collections" version = "0.1.0" dependencies = [ "indexmap", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", ] [[package]] @@ -10322,7 +10322,7 @@ dependencies = [ "pin-project-lite", "quinn-proto", "quinn-udp", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "rustls 0.23.22", "socket2", "thiserror 2.0.6", @@ -10340,7 +10340,7 @@ dependencies = [ "getrandom 0.2.15", "rand 0.8.5", "ring", - "rustc-hash 2.1.0", + "rustc-hash 2.1.1", "rustls 0.23.22", "rustls-pki-types", "slab", @@ -11224,9 +11224,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc-hash" -version = "2.1.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" [[package]] name = "rustc_version" From 53fcd7cc922e767df383a7011fe80b46d12c74cd Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 6 Feb 2025 10:31:37 +0200 Subject: [PATCH 073/130] Update Rust crate clap to v4.5.28 (#24311) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [clap](https://redirect.github.com/clap-rs/clap) | workspace.dependencies | patch | `4.5.23` -> `4.5.28` | --- ### Release Notes
clap-rs/clap (clap) ### [`v4.5.28`](https://redirect.github.com/clap-rs/clap/blob/HEAD/CHANGELOG.md#4528---2025-02-03) [Compare Source](https://redirect.github.com/clap-rs/clap/compare/v4.5.27...v4.5.28) ##### Features - *(derive)* Unstable support for full markdown syntax for doc comments, enabled with `unstable-markdown` ### [`v4.5.27`](https://redirect.github.com/clap-rs/clap/blob/HEAD/CHANGELOG.md#4527---2025-01-20) [Compare Source](https://redirect.github.com/clap-rs/clap/compare/v4.5.26...v4.5.27) ##### Documentation - Iterate on tutorials and reference based on feedback ### [`v4.5.26`](https://redirect.github.com/clap-rs/clap/blob/HEAD/CHANGELOG.md#4526---2025-01-09) [Compare Source](https://redirect.github.com/clap-rs/clap/compare/v4.5.25...v4.5.26) ##### Fixes - *(error)* Reduce binary size with the `suggestions` feature ### [`v4.5.25`](https://redirect.github.com/clap-rs/clap/blob/HEAD/CHANGELOG.md#4525---2025-01-09) [Compare Source](https://redirect.github.com/clap-rs/clap/compare/v4.5.24...v4.5.25) ##### Fixes - *(help)* Reduce binary size ### [`v4.5.24`](https://redirect.github.com/clap-rs/clap/blob/HEAD/CHANGELOG.md#4524---2025-01-07) [Compare Source](https://redirect.github.com/clap-rs/clap/compare/v4.5.23...v4.5.24) ##### Fixes - *(parser)* Correctly handle defaults with `ignore_errors(true)` and when a suggestion is provided for an unknown argument
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a6a40f260a1ad9af7f8c56555c4d51918aa51593..851c2b50ee510d7384e2b67bb8f060d5c1b16545 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2213,7 +2213,7 @@ dependencies = [ "cap-primitives", "cap-std", "io-lifetimes", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -2241,7 +2241,7 @@ dependencies = [ "ipnet", "maybe-owned", "rustix", - "windows-sys 0.52.0", + "windows-sys 0.59.0", "winx", ] @@ -2515,9 +2515,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.23" +version = "4.5.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84" +checksum = "3e77c3243bd94243c03672cb5154667347c457ca271254724f9f393aee1c05ff" dependencies = [ "clap_builder", "clap_derive", @@ -2525,9 +2525,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.23" +version = "4.5.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838" +checksum = "1b26884eb4b57140e4d2d93652abfa49498b938b3c9179f9fc487b0acc3edad7" dependencies = [ "anstream", "anstyle", @@ -2547,9 +2547,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.18" +version = "4.5.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" +checksum = "bf4ced95c6f4a675af3da73304b9ac4ed991640c36374e4b46795c49e17cf1ed" dependencies = [ "heck 0.5.0", "proc-macro2", @@ -4259,7 +4259,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -4917,7 +4917,7 @@ checksum = "5e2e6123af26f0f2c51cc66869137080199406754903cc926a7690401ce09cb4" dependencies = [ "io-lifetimes", "rustix", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -6540,7 +6540,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2285ddfe3054097ef4b2fe909ef8c3bcd1ea52a8f0d274416caebeef39f04a65" dependencies = [ "io-lifetimes", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -7153,7 +7153,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-targets 0.48.5", ] [[package]] @@ -10145,7 +10145,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" dependencies = [ "bytes 1.10.0", - "heck 0.5.0", + "heck 0.4.1", "itertools 0.12.1", "log", "multimap 0.10.0", @@ -10361,7 +10361,7 @@ dependencies = [ "once_cell", "socket2", "tracing", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -11249,7 +11249,7 @@ dependencies = [ "libc", "linux-raw-sys", "once_cell", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -12974,7 +12974,7 @@ dependencies = [ "fd-lock", "io-lifetimes", "rustix", - "windows-sys 0.52.0", + "windows-sys 0.59.0", "winx", ] @@ -13108,7 +13108,7 @@ dependencies = [ "getrandom 0.3.1", "once_cell", "rustix", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -15872,7 +15872,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f3fd376f71958b862e7afb20cfe5a22830e1963462f3a17f49d82a6c1d1f42d" dependencies = [ "bitflags 2.8.0", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] From 1f2205d75ccbb2d2c53e4484317f7425755b5d8b Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Thu, 6 Feb 2025 01:37:46 -0700 Subject: [PATCH 074/130] Wrap AnyView.cached_style in an Rc to make the struct much smaller (#24363) Byte size before was 672, now is 56. The `cached` method is only used in two places, so this was a lot of extra bytes being shuffled around for every `AnyView` not using this. Release Notes: - N/A --- crates/gpui/src/view.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/gpui/src/view.rs b/crates/gpui/src/view.rs index 86d6def7cc77c719d758c9e2eb2fadb73443f382..c6483ba6ae6f0938ff6aed9b20018b13f92a32a1 100644 --- a/crates/gpui/src/view.rs +++ b/crates/gpui/src/view.rs @@ -8,6 +8,7 @@ use anyhow::Result; use collections::FxHashSet; use refineable::Refineable; use std::mem; +use std::rc::Rc; use std::{any::TypeId, fmt, ops::Range}; struct AnyViewState { @@ -73,7 +74,7 @@ impl Element for Entity { pub struct AnyView { entity: AnyEntity, render: fn(&AnyView, &mut Window, &mut App) -> AnyElement, - cached_style: Option, + cached_style: Option>, } impl From> for AnyView { @@ -91,7 +92,7 @@ impl AnyView { /// When using this method, the view's previous layout and paint will be recycled from the previous frame if [Context::notify] has not been called since it was rendered. /// The one exception is when [Window::refresh] is called, in which case caching is ignored. pub fn cached(mut self, style: StyleRefinement) -> Self { - self.cached_style = Some(style); + self.cached_style = Some(style.into()); self } From f08b1d78ec1655a56b07c5efcd74d225d2a4b7d5 Mon Sep 17 00:00:00 2001 From: Jason Lee Date: Thu, 6 Feb 2025 17:03:23 +0800 Subject: [PATCH 075/130] Revert "Revert recent anti-aliasing improvements (#24289)" and fix selection top right corner radius issue (#24342) Release Notes: - N/A ---- To fix #24289 mention issue and revert PathBuilder and MSAA. I'm sorry about of this, in #22808 I was forgotten this bit of detail. ![image](https://github.com/user-attachments/assets/112afda2-088c-41d0-83bd-808f6cd2f9d5) So, add `move_to` here, we can fix the selection top right corner radius issue. ## After change image --- Cargo.lock | 76 +++++- Cargo.toml | 6 +- crates/editor/src/element.rs | 49 ++-- crates/gpui/Cargo.toml | 2 + crates/gpui/examples/gradient.rs | 14 +- crates/gpui/examples/painting.rs | 141 ++++++---- crates/gpui/src/gpui.rs | 2 + crates/gpui/src/path_builder.rs | 241 ++++++++++++++++++ crates/gpui/src/platform/blade/blade_atlas.rs | 49 +++- .../gpui/src/platform/blade/blade_renderer.rs | 60 +++-- crates/gpui/src/platform/mac/metal_atlas.rs | 22 +- .../gpui/src/platform/mac/metal_renderer.rs | 27 +- crates/gpui/src/scene.rs | 10 +- 13 files changed, 586 insertions(+), 113 deletions(-) create mode 100644 crates/gpui/src/path_builder.rs diff --git a/Cargo.lock b/Cargo.lock index 851c2b50ee510d7384e2b67bb8f060d5c1b16545..3065f2f68d9296c6aa587807f12467b29763ed09 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1868,7 +1868,7 @@ dependencies = [ [[package]] name = "blade-graphics" version = "0.6.0" -source = "git+https://github.com/kvark/blade?rev=091a8401033847bb9b6ace3fcf70448d069621c5#091a8401033847bb9b6ace3fcf70448d069621c5" +source = "git+https://github.com/kvark/blade?rev=b16f5c7bd873c7126f48c82c39e7ae64602ae74f#b16f5c7bd873c7126f48c82c39e7ae64602ae74f" dependencies = [ "ash", "ash-window", @@ -1900,7 +1900,7 @@ dependencies = [ [[package]] name = "blade-macros" version = "0.3.0" -source = "git+https://github.com/kvark/blade?rev=091a8401033847bb9b6ace3fcf70448d069621c5#091a8401033847bb9b6ace3fcf70448d069621c5" +source = "git+https://github.com/kvark/blade?rev=b16f5c7bd873c7126f48c82c39e7ae64602ae74f#b16f5c7bd873c7126f48c82c39e7ae64602ae74f" dependencies = [ "proc-macro2", "quote", @@ -1910,7 +1910,7 @@ dependencies = [ [[package]] name = "blade-util" version = "0.2.0" -source = "git+https://github.com/kvark/blade?rev=091a8401033847bb9b6ace3fcf70448d069621c5#091a8401033847bb9b6ace3fcf70448d069621c5" +source = "git+https://github.com/kvark/blade?rev=b16f5c7bd873c7126f48c82c39e7ae64602ae74f#b16f5c7bd873c7126f48c82c39e7ae64602ae74f" dependencies = [ "blade-graphics", "bytemuck", @@ -4725,6 +4725,12 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ce81f49ae8a0482e4c55ea62ebbd7e5a686af544c00b9d090bba3ff9be97b3d" +[[package]] +name = "float_next_after" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8bf7cc16383c4b8d58b9905a8509f02926ce3058053c056376248d958c9df1e8" + [[package]] name = "flume" version = "0.11.1" @@ -5476,6 +5482,7 @@ dependencies = [ "inventory", "itertools 0.14.0", "log", + "lyon", "media", "metal", "naga", @@ -7498,6 +7505,69 @@ dependencies = [ "url", ] +[[package]] +name = "lyon" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e7f9cda98b5430809e63ca5197b06c7d191bf7e26dfc467d5a3f0290e2a74f" +dependencies = [ + "lyon_algorithms", + "lyon_extra", + "lyon_tessellation", +] + +[[package]] +name = "lyon_algorithms" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f13c9be19d257c7d37e70608ed858e8eab4b2afcea2e3c9a622e892acbf43c08" +dependencies = [ + "lyon_path", + "num-traits", +] + +[[package]] +name = "lyon_extra" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ca94c7bf1e2557c2798989c43416822c12fc5dcc5e17cc3307ef0e71894a955" +dependencies = [ + "lyon_path", + "thiserror 1.0.69", +] + +[[package]] +name = "lyon_geom" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8af69edc087272df438b3ee436c4bb6d7c04aa8af665cfd398feae627dbd8570" +dependencies = [ + "arrayvec", + "euclid", + "num-traits", +] + +[[package]] +name = "lyon_path" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e0b8aec2f58586f6eef237985b9a9b7cb3a3aff4417c575075cf95bf925252e" +dependencies = [ + "lyon_geom", + "num-traits", +] + +[[package]] +name = "lyon_tessellation" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "579d42360a4b09846eff2feef28f538696c7d6c7439bfa65874ff3cbe0951b2c" +dependencies = [ + "float_next_after", + "lyon_path", + "num-traits", +] + [[package]] name = "mac" version = "0.1.1" diff --git a/Cargo.toml b/Cargo.toml index 9f77103e625841da6dff11dc7aa6dd4bea65023d..73160b0cd17179b57ee839a78b27833ce992be19 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -376,9 +376,9 @@ async-watch = "0.3.1" async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] } base64 = "0.22" bitflags = "2.6.0" -blade-graphics = { git = "https://github.com/kvark/blade", rev = "091a8401033847bb9b6ace3fcf70448d069621c5" } -blade-macros = { git = "https://github.com/kvark/blade", rev = "091a8401033847bb9b6ace3fcf70448d069621c5" } -blade-util = { git = "https://github.com/kvark/blade", rev = "091a8401033847bb9b6ace3fcf70448d069621c5" } +blade-graphics = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" } +blade-macros = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" } +blade-util = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" } naga = { version = "23.1.0", features = ["wgsl-in"] } blake3 = "1.5.3" bytes = "1.0" diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 99c103934870eb15bcbba4b2fc5ecf015ba7bad7..32c2d4a5de2d4ecf6cceb2149cbe1f6f7356501c 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -8149,8 +8149,9 @@ impl HighlightedRange { }; let top_curve_width = curve_width(first_line.start_x, first_line.end_x); - let mut path = gpui::Path::new(first_top_right - top_curve_width); - path.curve_to(first_top_right + curve_height, first_top_right); + let mut builder = gpui::PathBuilder::fill(); + builder.move_to(first_top_right - top_curve_width); + builder.curve_to(first_top_right + curve_height, first_top_right); let mut iter = lines.iter().enumerate().peekable(); while let Some((ix, line)) = iter.next() { @@ -8161,42 +8162,42 @@ impl HighlightedRange { match next_top_right.x.partial_cmp(&bottom_right.x).unwrap() { Ordering::Equal => { - path.line_to(bottom_right); + builder.line_to(bottom_right); } Ordering::Less => { let curve_width = curve_width(next_top_right.x, bottom_right.x); - path.line_to(bottom_right - curve_height); + builder.line_to(bottom_right - curve_height); if self.corner_radius > Pixels::ZERO { - path.curve_to(bottom_right - curve_width, bottom_right); + builder.curve_to(bottom_right - curve_width, bottom_right); } - path.line_to(next_top_right + curve_width); + builder.line_to(next_top_right + curve_width); if self.corner_radius > Pixels::ZERO { - path.curve_to(next_top_right + curve_height, next_top_right); + builder.curve_to(next_top_right + curve_height, next_top_right); } } Ordering::Greater => { let curve_width = curve_width(bottom_right.x, next_top_right.x); - path.line_to(bottom_right - curve_height); + builder.line_to(bottom_right - curve_height); if self.corner_radius > Pixels::ZERO { - path.curve_to(bottom_right + curve_width, bottom_right); + builder.curve_to(bottom_right + curve_width, bottom_right); } - path.line_to(next_top_right - curve_width); + builder.line_to(next_top_right - curve_width); if self.corner_radius > Pixels::ZERO { - path.curve_to(next_top_right + curve_height, next_top_right); + builder.curve_to(next_top_right + curve_height, next_top_right); } } } } else { let curve_width = curve_width(line.start_x, line.end_x); - path.line_to(bottom_right - curve_height); + builder.line_to(bottom_right - curve_height); if self.corner_radius > Pixels::ZERO { - path.curve_to(bottom_right - curve_width, bottom_right); + builder.curve_to(bottom_right - curve_width, bottom_right); } let bottom_left = point(line.start_x, bottom_right.y); - path.line_to(bottom_left + curve_width); + builder.line_to(bottom_left + curve_width); if self.corner_radius > Pixels::ZERO { - path.curve_to(bottom_left - curve_height, bottom_left); + builder.curve_to(bottom_left - curve_height, bottom_left); } } } @@ -8204,24 +8205,26 @@ impl HighlightedRange { if first_line.start_x > last_line.start_x { let curve_width = curve_width(last_line.start_x, first_line.start_x); let second_top_left = point(last_line.start_x, start_y + self.line_height); - path.line_to(second_top_left + curve_height); + builder.line_to(second_top_left + curve_height); if self.corner_radius > Pixels::ZERO { - path.curve_to(second_top_left + curve_width, second_top_left); + builder.curve_to(second_top_left + curve_width, second_top_left); } let first_bottom_left = point(first_line.start_x, second_top_left.y); - path.line_to(first_bottom_left - curve_width); + builder.line_to(first_bottom_left - curve_width); if self.corner_radius > Pixels::ZERO { - path.curve_to(first_bottom_left - curve_height, first_bottom_left); + builder.curve_to(first_bottom_left - curve_height, first_bottom_left); } } - path.line_to(first_top_left + curve_height); + builder.line_to(first_top_left + curve_height); if self.corner_radius > Pixels::ZERO { - path.curve_to(first_top_left + top_curve_width, first_top_left); + builder.curve_to(first_top_left + top_curve_width, first_top_left); } - path.line_to(first_top_right - top_curve_width); + builder.line_to(first_top_right - top_curve_width); - window.paint_path(path, self.color); + if let Ok(path) = builder.build() { + window.paint_path(path, self.color); + } } } diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index a0220cd572c32dcf29524f2c86a7e52950d23ece..05a5b28e764dc56486858f87ee87bdced7b3a53d 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -108,6 +108,7 @@ thiserror.workspace = true util.workspace = true uuid.workspace = true waker-fn = "1.2.0" +lyon = "1.0" [target.'cfg(target_os = "macos")'.dependencies] block = "0.1" @@ -205,6 +206,7 @@ rand.workspace = true util = { workspace = true, features = ["test-support"] } http_client = { workspace = true, features = ["test-support"] } unicode-segmentation.workspace = true +lyon = { version = "1.0", features = ["extra"] } [target.'cfg(target_os = "windows")'.build-dependencies] embed-resource = "3.0" diff --git a/crates/gpui/examples/gradient.rs b/crates/gpui/examples/gradient.rs index 45de8cdd0afa3d190d046947a5308d5618ec22aa..ec4cdf9bfcdf97f8e0bc3287b94bc3e5a5e198d9 100644 --- a/crates/gpui/examples/gradient.rs +++ b/crates/gpui/examples/gradient.rs @@ -218,13 +218,17 @@ impl Render for GradientViewer { let height = square_bounds.size.height; let horizontal_offset = height; let vertical_offset = px(30.); - let mut path = gpui::Path::new(square_bounds.bottom_left()); - path.line_to(square_bounds.origin + point(horizontal_offset, vertical_offset)); - path.line_to( + let mut builder = gpui::PathBuilder::fill(); + builder.move_to(square_bounds.bottom_left()); + builder + .line_to(square_bounds.origin + point(horizontal_offset, vertical_offset)); + builder.line_to( square_bounds.top_right() + point(-horizontal_offset, vertical_offset), ); - path.line_to(square_bounds.bottom_right()); - path.line_to(square_bounds.bottom_left()); + + builder.line_to(square_bounds.bottom_right()); + builder.line_to(square_bounds.bottom_left()); + let path = builder.build().unwrap(); window.paint_path( path, linear_gradient( diff --git a/crates/gpui/examples/painting.rs b/crates/gpui/examples/painting.rs index 9a8ab790650130efeb43fbcdc8306963678e50ec..7c1a6a367d1335f58ed1ba4f09f3385ecb452980 100644 --- a/crates/gpui/examples/painting.rs +++ b/crates/gpui/examples/painting.rs @@ -1,46 +1,62 @@ use gpui::{ - canvas, div, point, prelude::*, px, size, App, Application, Bounds, Context, MouseDownEvent, - Path, Pixels, Point, Render, Window, WindowOptions, + canvas, div, linear_color_stop, linear_gradient, point, prelude::*, px, rgb, size, Application, + Background, Bounds, ColorSpace, Context, MouseDownEvent, Path, PathBuilder, PathStyle, Pixels, + Point, Render, StrokeOptions, Window, WindowOptions, }; + struct PaintingViewer { - default_lines: Vec>, + default_lines: Vec<(Path, Background)>, lines: Vec>>, start: Point, _painting: bool, } impl PaintingViewer { - fn new() -> Self { + fn new(_window: &mut Window, _cx: &mut Context) -> Self { let mut lines = vec![]; - // draw a line - let mut path = Path::new(point(px(50.), px(180.))); - path.line_to(point(px(100.), px(120.))); - // go back to close the path - path.line_to(point(px(100.), px(121.))); - path.line_to(point(px(50.), px(181.))); - lines.push(path); + // draw a Rust logo + let mut builder = lyon::path::Path::svg_builder(); + lyon::extra::rust_logo::build_logo_path(&mut builder); + // move down the Path + let mut builder: PathBuilder = builder.into(); + builder.translate(point(px(10.), px(100.))); + builder.scale(0.9); + let path = builder.build().unwrap(); + lines.push((path, gpui::black().into())); // draw a lightening bolt ⚡ - let mut path = Path::new(point(px(150.), px(200.))); - path.line_to(point(px(200.), px(125.))); - path.line_to(point(px(200.), px(175.))); - path.line_to(point(px(250.), px(100.))); - lines.push(path); + let mut builder = PathBuilder::fill(); + builder.move_to(point(px(150.), px(200.))); + builder.line_to(point(px(200.), px(125.))); + builder.line_to(point(px(200.), px(175.))); + builder.line_to(point(px(250.), px(100.))); + let path = builder.build().unwrap(); + lines.push((path, rgb(0x1d4ed8).into())); // draw a ⭐ - let mut path = Path::new(point(px(350.), px(100.))); - path.line_to(point(px(370.), px(160.))); - path.line_to(point(px(430.), px(160.))); - path.line_to(point(px(380.), px(200.))); - path.line_to(point(px(400.), px(260.))); - path.line_to(point(px(350.), px(220.))); - path.line_to(point(px(300.), px(260.))); - path.line_to(point(px(320.), px(200.))); - path.line_to(point(px(270.), px(160.))); - path.line_to(point(px(330.), px(160.))); - path.line_to(point(px(350.), px(100.))); - lines.push(path); + let mut builder = PathBuilder::fill(); + builder.move_to(point(px(350.), px(100.))); + builder.line_to(point(px(370.), px(160.))); + builder.line_to(point(px(430.), px(160.))); + builder.line_to(point(px(380.), px(200.))); + builder.line_to(point(px(400.), px(260.))); + builder.line_to(point(px(350.), px(220.))); + builder.line_to(point(px(300.), px(260.))); + builder.line_to(point(px(320.), px(200.))); + builder.line_to(point(px(270.), px(160.))); + builder.line_to(point(px(330.), px(160.))); + builder.line_to(point(px(350.), px(100.))); + let path = builder.build().unwrap(); + lines.push(( + path, + linear_gradient( + 180., + linear_color_stop(rgb(0xFACC15), 0.7), + linear_color_stop(rgb(0xD56D0C), 1.), + ) + .color_space(ColorSpace::Oklab), + )); let square_bounds = Bounds { origin: point(px(450.), px(100.)), @@ -49,18 +65,42 @@ impl PaintingViewer { let height = square_bounds.size.height; let horizontal_offset = height; let vertical_offset = px(30.); - let mut path = Path::new(square_bounds.bottom_left()); - path.curve_to( + let mut builder = PathBuilder::fill(); + builder.move_to(square_bounds.bottom_left()); + builder.curve_to( square_bounds.origin + point(horizontal_offset, vertical_offset), square_bounds.origin + point(px(0.0), vertical_offset), ); - path.line_to(square_bounds.top_right() + point(-horizontal_offset, vertical_offset)); - path.curve_to( + builder.line_to(square_bounds.top_right() + point(-horizontal_offset, vertical_offset)); + builder.curve_to( square_bounds.bottom_right(), square_bounds.top_right() + point(px(0.0), vertical_offset), ); - path.line_to(square_bounds.bottom_left()); - lines.push(path); + builder.line_to(square_bounds.bottom_left()); + let path = builder.build().unwrap(); + lines.push(( + path, + linear_gradient( + 180., + linear_color_stop(gpui::blue(), 0.4), + linear_color_stop(gpui::red(), 1.), + ), + )); + + // draw a wave + let options = StrokeOptions::default() + .with_line_width(1.) + .with_line_join(lyon::path::LineJoin::Bevel); + let mut builder = PathBuilder::stroke(px(1.)).with_style(PathStyle::Stroke(options)); + builder.move_to(point(px(40.), px(320.))); + for i in 0..50 { + builder.line_to(point( + px(40.0 + i as f32 * 10.0), + px(320.0 + (i as f32 * 10.0).sin() * 40.0), + )); + } + let path = builder.build().unwrap(); + lines.push((path, gpui::green().into())); Self { default_lines: lines.clone(), @@ -115,27 +155,28 @@ impl Render for PaintingViewer { canvas( move |_, _, _| {}, move |_, _, window, _| { - const STROKE_WIDTH: Pixels = px(2.0); - for path in default_lines { - window.paint_path(path, gpui::black()); + + for (path, color) in default_lines { + window.paint_path(path, color); } + for points in lines { - let mut path = Path::new(points[0]); - for p in points.iter().skip(1) { - path.line_to(*p); + if points.len() < 2 { + continue; } - let mut last = points.last().unwrap(); - for p in points.iter().rev() { - let mut offset_x = px(0.); - if last.x == p.x { - offset_x = STROKE_WIDTH; + let mut builder = PathBuilder::stroke(px(1.)); + for (i, p) in points.into_iter().enumerate() { + if i == 0 { + builder.move_to(p); + } else { + builder.line_to(p); } - path.line_to(point(p.x + offset_x, p.y + STROKE_WIDTH)); - last = p; } - window.paint_path(path, gpui::black()); + if let Ok(path) = builder.build() { + window.paint_path(path, gpui::black()); + } } }, ) @@ -185,13 +226,13 @@ impl Render for PaintingViewer { } fn main() { - Application::new().run(|cx: &mut App| { + Application::new().run(|cx| { cx.open_window( WindowOptions { focus: true, ..Default::default() }, - |_, cx| cx.new(|_| PaintingViewer::new()), + |window, cx| cx.new(|cx| PaintingViewer::new(window, cx)), ) .unwrap(); cx.activate(true); diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index db33bfca2e512671d58252341265ed62d02fd06d..1ebfc643ee57ccaaa23fe4cfd77a15187a06a146 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -82,6 +82,7 @@ mod input; mod interactive; mod key_dispatch; mod keymap; +mod path_builder; mod platform; pub mod prelude; mod scene; @@ -135,6 +136,7 @@ pub use input::*; pub use interactive::*; use key_dispatch::*; pub use keymap::*; +pub use path_builder::*; pub use platform::*; pub use refineable::*; pub use scene::*; diff --git a/crates/gpui/src/path_builder.rs b/crates/gpui/src/path_builder.rs new file mode 100644 index 0000000000000000000000000000000000000000..0fd8eb6fa5458315483db45dfb03bbc3a42d5800 --- /dev/null +++ b/crates/gpui/src/path_builder.rs @@ -0,0 +1,241 @@ +use anyhow::Error; +use etagere::euclid::Vector2D; +use lyon::geom::Angle; +use lyon::tessellation::{ + BuffersBuilder, FillTessellator, FillVertex, StrokeTessellator, StrokeVertex, VertexBuffers, +}; + +pub use lyon::math::Transform; +pub use lyon::tessellation::{FillOptions, FillRule, StrokeOptions}; + +use crate::{point, px, Path, Pixels, Point}; + +/// Style of the PathBuilder +pub enum PathStyle { + /// Stroke style + Stroke(StrokeOptions), + /// Fill style + Fill(FillOptions), +} + +/// A [`Path`] builder. +pub struct PathBuilder { + raw: lyon::path::builder::WithSvg, + transform: Option, + /// PathStyle of the PathBuilder + pub style: PathStyle, +} + +impl From for PathBuilder { + fn from(builder: lyon::path::Builder) -> Self { + Self { + raw: builder.with_svg(), + ..Default::default() + } + } +} + +impl From> for PathBuilder { + fn from(raw: lyon::path::builder::WithSvg) -> Self { + Self { + raw, + ..Default::default() + } + } +} + +impl From for Point { + fn from(p: lyon::math::Point) -> Self { + point(px(p.x), px(p.y)) + } +} + +impl From> for lyon::math::Point { + fn from(p: Point) -> Self { + lyon::math::point(p.x.0, p.y.0) + } +} + +impl Default for PathBuilder { + fn default() -> Self { + Self { + raw: lyon::path::Path::builder().with_svg(), + style: PathStyle::Fill(FillOptions::default()), + transform: None, + } + } +} + +impl PathBuilder { + /// Creates a new [`PathBuilder`] to build a Stroke path. + pub fn stroke(width: Pixels) -> Self { + Self { + style: PathStyle::Stroke(StrokeOptions::default().with_line_width(width.0)), + ..Self::default() + } + } + + /// Creates a new [`PathBuilder`] to build a Fill path. + pub fn fill() -> Self { + Self::default() + } + + /// Sets the style of the [`PathBuilder`]. + pub fn with_style(self, style: PathStyle) -> Self { + Self { style, ..self } + } + + /// Move the current point to the given point. + #[inline] + pub fn move_to(&mut self, to: Point) { + self.raw.move_to(to.into()); + } + + /// Draw a straight line from the current point to the given point. + #[inline] + pub fn line_to(&mut self, to: Point) { + self.raw.line_to(to.into()); + } + + /// Draw a curve from the current point to the given point, using the given control point. + #[inline] + pub fn curve_to(&mut self, to: Point, ctrl: Point) { + self.raw.quadratic_bezier_to(ctrl.into(), to.into()); + } + + /// Adds a cubic Bézier to the [`Path`] given its two control points + /// and its end point. + #[inline] + pub fn cubic_bezier_to( + &mut self, + to: Point, + control_a: Point, + control_b: Point, + ) { + self.raw + .cubic_bezier_to(control_a.into(), control_b.into(), to.into()); + } + + /// Close the current sub-path. + #[inline] + pub fn close(&mut self) { + self.raw.close(); + } + + /// Applies a transform to the path. + #[inline] + pub fn transform(&mut self, transform: Transform) { + self.transform = Some(transform); + } + + /// Applies a translation to the path. + #[inline] + pub fn translate(&mut self, to: Point) { + if let Some(transform) = self.transform { + self.transform = Some(transform.then_translate(Vector2D::new(to.x.0, to.y.0))); + } else { + self.transform = Some(Transform::translation(to.x.0, to.y.0)) + } + } + + /// Applies a scale to the path. + #[inline] + pub fn scale(&mut self, scale: f32) { + if let Some(transform) = self.transform { + self.transform = Some(transform.then_scale(scale, scale)); + } else { + self.transform = Some(Transform::scale(scale, scale)); + } + } + + /// Applies a rotation to the path. + /// + /// The `angle` is in degrees value in the range 0.0 to 360.0. + #[inline] + pub fn rotate(&mut self, angle: f32) { + let radians = angle.to_radians(); + if let Some(transform) = self.transform { + self.transform = Some(transform.then_rotate(Angle::radians(radians))); + } else { + self.transform = Some(Transform::rotation(Angle::radians(radians))); + } + } + + /// Builds into a [`Path`]. + #[inline] + pub fn build(self) -> Result, Error> { + let path = if let Some(transform) = self.transform { + self.raw.build().transformed(&transform) + } else { + self.raw.build() + }; + + match self.style { + PathStyle::Stroke(options) => Self::tessellate_stroke(&path, &options), + PathStyle::Fill(options) => Self::tessellate_fill(&path, &options), + } + } + + fn tessellate_fill( + path: &lyon::path::Path, + options: &FillOptions, + ) -> Result, Error> { + // Will contain the result of the tessellation. + let mut buf: VertexBuffers = VertexBuffers::new(); + let mut tessellator = FillTessellator::new(); + + // Compute the tessellation. + tessellator.tessellate_path( + path, + options, + &mut BuffersBuilder::new(&mut buf, |vertex: FillVertex| vertex.position()), + )?; + + Ok(Self::build_path(buf)) + } + + fn tessellate_stroke( + path: &lyon::path::Path, + options: &StrokeOptions, + ) -> Result, Error> { + // Will contain the result of the tessellation. + let mut buf: VertexBuffers = VertexBuffers::new(); + let mut tessellator = StrokeTessellator::new(); + + // Compute the tessellation. + tessellator.tessellate_path( + path, + options, + &mut BuffersBuilder::new(&mut buf, |vertex: StrokeVertex| vertex.position()), + )?; + + Ok(Self::build_path(buf)) + } + + /// Builds a [`Path`] from a [`lyon::VertexBuffers`]. + pub fn build_path(buf: VertexBuffers) -> Path { + if buf.vertices.is_empty() { + return Path::new(Point::default()); + } + + let first_point = buf.vertices[0]; + + let mut path = Path::new(first_point.into()); + for i in 0..buf.indices.len() / 3 { + let i0 = buf.indices[i * 3] as usize; + let i1 = buf.indices[i * 3 + 1] as usize; + let i2 = buf.indices[i * 3 + 2] as usize; + + let v0 = buf.vertices[i0]; + let v1 = buf.vertices[i1]; + let v2 = buf.vertices[i2]; + + path.push_triangle( + (v0.into(), v1.into(), v2.into()), + (point(0., 1.), point(0., 1.), point(0., 1.)), + ); + } + + path + } +} diff --git a/crates/gpui/src/platform/blade/blade_atlas.rs b/crates/gpui/src/platform/blade/blade_atlas.rs index fb703f2a411c078b7e2a52b861868976481e69c9..2783d57127e6c8d5e0b4bf6e2d5978ad1b8ff955 100644 --- a/crates/gpui/src/platform/blade/blade_atlas.rs +++ b/crates/gpui/src/platform/blade/blade_atlas.rs @@ -27,6 +27,7 @@ struct BladeAtlasState { tiles_by_key: FxHashMap, initializations: Vec, uploads: Vec, + path_sample_count: u32, } #[cfg(gles)] @@ -42,10 +43,11 @@ impl BladeAtlasState { pub struct BladeTextureInfo { pub size: gpu::Extent, pub raw_view: gpu::TextureView, + pub msaa_view: Option, } impl BladeAtlas { - pub(crate) fn new(gpu: &Arc) -> Self { + pub(crate) fn new(gpu: &Arc, path_sample_count: u32) -> Self { BladeAtlas(Mutex::new(BladeAtlasState { gpu: Arc::clone(gpu), upload_belt: BufferBelt::new(BufferBeltDescriptor { @@ -57,6 +59,7 @@ impl BladeAtlas { tiles_by_key: Default::default(), initializations: Vec::new(), uploads: Vec::new(), + path_sample_count, })) } @@ -106,6 +109,7 @@ impl BladeAtlas { depth: 1, }, raw_view: texture.raw_view, + msaa_view: texture.msaa_view, } } } @@ -204,6 +208,39 @@ impl BladeAtlasState { } } + // We currently only enable MSAA for path textures. + let (msaa, msaa_view) = if self.path_sample_count > 1 && kind == AtlasTextureKind::Path { + let msaa = self.gpu.create_texture(gpu::TextureDesc { + name: "msaa path texture", + format, + size: gpu::Extent { + width: size.width.into(), + height: size.height.into(), + depth: 1, + }, + array_layer_count: 1, + mip_level_count: 1, + sample_count: self.path_sample_count, + dimension: gpu::TextureDimension::D2, + usage: gpu::TextureUsage::TARGET, + }); + + ( + Some(msaa), + Some(self.gpu.create_texture_view( + msaa, + gpu::TextureViewDesc { + name: "msaa texture view", + format, + dimension: gpu::ViewDimension::D2, + subresources: &Default::default(), + }, + )), + ) + } else { + (None, None) + }; + let raw = self.gpu.create_texture(gpu::TextureDesc { name: "atlas", format, @@ -240,6 +277,8 @@ impl BladeAtlasState { format, raw, raw_view, + msaa, + msaa_view, live_atlas_keys: 0, }; @@ -354,6 +393,8 @@ struct BladeAtlasTexture { allocator: BucketedAtlasAllocator, raw: gpu::Texture, raw_view: gpu::TextureView, + msaa: Option, + msaa_view: Option, format: gpu::TextureFormat, live_atlas_keys: u32, } @@ -381,6 +422,12 @@ impl BladeAtlasTexture { fn destroy(&mut self, gpu: &gpu::Context) { gpu.destroy_texture(self.raw); gpu.destroy_texture_view(self.raw_view); + if let Some(msaa) = self.msaa { + gpu.destroy_texture(msaa); + } + if let Some(msaa_view) = self.msaa_view { + gpu.destroy_texture_view(msaa_view); + } } fn bytes_per_pixel(&self) -> u8 { diff --git a/crates/gpui/src/platform/blade/blade_renderer.rs b/crates/gpui/src/platform/blade/blade_renderer.rs index ee8ffdfda7fa26b6a4edb970c343ac5c0f26d3c7..200ebaaf07fd2b275fbc4b3510df264ecc404949 100644 --- a/crates/gpui/src/platform/blade/blade_renderer.rs +++ b/crates/gpui/src/platform/blade/blade_renderer.rs @@ -7,16 +7,18 @@ use crate::{ MonochromeSprite, Path, PathId, PathVertex, PolychromeSprite, PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size, Underline, }; +use blade_graphics as gpu; +use blade_util::{BufferBelt, BufferBeltDescriptor}; use bytemuck::{Pod, Zeroable}; use collections::HashMap; #[cfg(target_os = "macos")] use media::core_video::CVMetalTextureCache; - -use blade_graphics as gpu; -use blade_util::{BufferBelt, BufferBeltDescriptor}; use std::{mem, sync::Arc}; const MAX_FRAME_TIME_MS: u32 = 10000; +// Use 4x MSAA, all devices support it. +// https://developer.apple.com/documentation/metal/mtldevice/1433355-supportstexturesamplecount +const PATH_SAMPLE_COUNT: u32 = 4; #[repr(C)] #[derive(Clone, Copy, Pod, Zeroable)] @@ -208,7 +210,10 @@ impl BladePipelines { blend: Some(gpu::BlendState::ADDITIVE), write_mask: gpu::ColorWrites::default(), }], - multisample_state: gpu::MultisampleState::default(), + multisample_state: gpu::MultisampleState { + sample_count: PATH_SAMPLE_COUNT, + ..Default::default() + }, }), paths: gpu.create_render_pipeline(gpu::RenderPipelineDesc { name: "paths", @@ -348,7 +353,7 @@ impl BladeRenderer { min_chunk_size: 0x1000, alignment: 0x40, // Vulkan `minStorageBufferOffsetAlignment` on Intel Xe }); - let atlas = Arc::new(BladeAtlas::new(&context.gpu)); + let atlas = Arc::new(BladeAtlas::new(&context.gpu, PATH_SAMPLE_COUNT)); let atlas_sampler = context.gpu.create_sampler(gpu::SamplerDesc { name: "atlas", mag_filter: gpu::FilterMode::Linear, @@ -497,27 +502,38 @@ impl BladeRenderer { }; let vertex_buf = unsafe { self.instance_belt.alloc_typed(&vertices, &self.gpu) }; - let mut pass = self.command_encoder.render( + let frame_view = tex_info.raw_view; + let color_target = if let Some(msaa_view) = tex_info.msaa_view { + gpu::RenderTarget { + view: msaa_view, + init_op: gpu::InitOp::Clear(gpu::TextureColor::OpaqueBlack), + finish_op: gpu::FinishOp::ResolveTo(frame_view), + } + } else { + gpu::RenderTarget { + view: frame_view, + init_op: gpu::InitOp::Clear(gpu::TextureColor::OpaqueBlack), + finish_op: gpu::FinishOp::Store, + } + }; + + if let mut pass = self.command_encoder.render( "paths", gpu::RenderTargetSet { - colors: &[gpu::RenderTarget { - view: tex_info.raw_view, - init_op: gpu::InitOp::Clear(gpu::TextureColor::OpaqueBlack), - finish_op: gpu::FinishOp::Store, - }], + colors: &[color_target], depth_stencil: None, }, - ); - - let mut encoder = pass.with(&self.pipelines.path_rasterization); - encoder.bind( - 0, - &ShaderPathRasterizationData { - globals, - b_path_vertices: vertex_buf, - }, - ); - encoder.draw(0, vertices.len() as u32, 0, 1); + ) { + let mut encoder = pass.with(&self.pipelines.path_rasterization); + encoder.bind( + 0, + &ShaderPathRasterizationData { + globals, + b_path_vertices: vertex_buf, + }, + ); + encoder.draw(0, vertices.len() as u32, 0, 1); + } } } diff --git a/crates/gpui/src/platform/mac/metal_atlas.rs b/crates/gpui/src/platform/mac/metal_atlas.rs index ca595c5ce3475356e47ac886f90704ceb6fdecfd..4662761a7d32a26b5b22111f8eb00cc71d2c90c6 100644 --- a/crates/gpui/src/platform/mac/metal_atlas.rs +++ b/crates/gpui/src/platform/mac/metal_atlas.rs @@ -13,13 +13,14 @@ use std::borrow::Cow; pub(crate) struct MetalAtlas(Mutex); impl MetalAtlas { - pub(crate) fn new(device: Device) -> Self { + pub(crate) fn new(device: Device, path_sample_count: u32) -> Self { MetalAtlas(Mutex::new(MetalAtlasState { device: AssertSend(device), monochrome_textures: Default::default(), polychrome_textures: Default::default(), path_textures: Default::default(), tiles_by_key: Default::default(), + path_sample_count, })) } @@ -27,6 +28,10 @@ impl MetalAtlas { self.0.lock().texture(id).metal_texture.clone() } + pub(crate) fn msaa_texture(&self, id: AtlasTextureId) -> Option { + self.0.lock().texture(id).msaa_texture.clone() + } + pub(crate) fn allocate( &self, size: Size, @@ -54,6 +59,7 @@ struct MetalAtlasState { polychrome_textures: AtlasTextureList, path_textures: AtlasTextureList, tiles_by_key: FxHashMap, + path_sample_count: u32, } impl PlatformAtlas for MetalAtlas { @@ -176,6 +182,18 @@ impl MetalAtlasState { texture_descriptor.set_usage(usage); let metal_texture = self.device.new_texture(&texture_descriptor); + // We currently only enable MSAA for path textures. + let msaa_texture = if self.path_sample_count > 1 && kind == AtlasTextureKind::Path { + let mut descriptor = texture_descriptor.clone(); + descriptor.set_texture_type(metal::MTLTextureType::D2Multisample); + descriptor.set_storage_mode(metal::MTLStorageMode::Private); + descriptor.set_sample_count(self.path_sample_count as _); + let msaa_texture = self.device.new_texture(&descriptor); + Some(msaa_texture) + } else { + None + }; + let texture_list = match kind { AtlasTextureKind::Monochrome => &mut self.monochrome_textures, AtlasTextureKind::Polychrome => &mut self.polychrome_textures, @@ -191,6 +209,7 @@ impl MetalAtlasState { }, allocator: etagere::BucketedAtlasAllocator::new(size.into()), metal_texture: AssertSend(metal_texture), + msaa_texture: AssertSend(msaa_texture), live_atlas_keys: 0, }; @@ -217,6 +236,7 @@ struct MetalAtlasTexture { id: AtlasTextureId, allocator: BucketedAtlasAllocator, metal_texture: AssertSend, + msaa_texture: AssertSend>, live_atlas_keys: u32, } diff --git a/crates/gpui/src/platform/mac/metal_renderer.rs b/crates/gpui/src/platform/mac/metal_renderer.rs index c290d12f7e752191f64aa4dbd5f89fd246b10639..56109d2ff6e5cd9f39a02413f6dea26195522cfc 100644 --- a/crates/gpui/src/platform/mac/metal_renderer.rs +++ b/crates/gpui/src/platform/mac/metal_renderer.rs @@ -28,6 +28,9 @@ pub(crate) type PointF = crate::Point; const SHADERS_METALLIB: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib")); #[cfg(feature = "runtime_shaders")] const SHADERS_SOURCE_FILE: &str = include_str!(concat!(env!("OUT_DIR"), "/stitched_shaders.metal")); +// Use 4x MSAA, all devices support it. +// https://developer.apple.com/documentation/metal/mtldevice/1433355-supportstexturesamplecount +const PATH_SAMPLE_COUNT: u32 = 4; pub type Context = Arc>; pub type Renderer = MetalRenderer; @@ -170,6 +173,7 @@ impl MetalRenderer { "path_rasterization_vertex", "path_rasterization_fragment", MTLPixelFormat::R16Float, + PATH_SAMPLE_COUNT, ); let path_sprites_pipeline_state = build_pipeline_state( &device, @@ -229,7 +233,7 @@ impl MetalRenderer { ); let command_queue = device.new_command_queue(); - let sprite_atlas = Arc::new(MetalAtlas::new(device.clone())); + let sprite_atlas = Arc::new(MetalAtlas::new(device.clone(), PATH_SAMPLE_COUNT)); let core_video_texture_cache = unsafe { CVMetalTextureCache::new(device.as_ptr()).unwrap() }; @@ -531,10 +535,20 @@ impl MetalRenderer { .unwrap(); let texture = self.sprite_atlas.metal_texture(texture_id); - color_attachment.set_texture(Some(&texture)); - color_attachment.set_load_action(metal::MTLLoadAction::Clear); - color_attachment.set_store_action(metal::MTLStoreAction::Store); + let msaa_texture = self.sprite_atlas.msaa_texture(texture_id); + + if let Some(msaa_texture) = msaa_texture { + color_attachment.set_texture(Some(&msaa_texture)); + color_attachment.set_resolve_texture(Some(&texture)); + color_attachment.set_load_action(metal::MTLLoadAction::Clear); + color_attachment.set_store_action(metal::MTLStoreAction::MultisampleResolve); + } else { + color_attachment.set_texture(Some(&texture)); + color_attachment.set_load_action(metal::MTLLoadAction::Clear); + color_attachment.set_store_action(metal::MTLStoreAction::Store); + } color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.)); + let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor); command_encoder.set_render_pipeline_state(&self.paths_rasterization_pipeline_state); command_encoder.set_vertex_buffer( @@ -1160,6 +1174,7 @@ fn build_path_rasterization_pipeline_state( vertex_fn_name: &str, fragment_fn_name: &str, pixel_format: metal::MTLPixelFormat, + path_sample_count: u32, ) -> metal::RenderPipelineState { let vertex_fn = library .get_function(vertex_fn_name, None) @@ -1172,6 +1187,10 @@ fn build_path_rasterization_pipeline_state( descriptor.set_label(label); descriptor.set_vertex_function(Some(vertex_fn.as_ref())); descriptor.set_fragment_function(Some(fragment_fn.as_ref())); + if path_sample_count > 1 { + descriptor.set_raster_sample_count(path_sample_count as _); + descriptor.set_alpha_to_coverage_enabled(true); + } let color_attachment = descriptor.color_attachments().object_at(0).unwrap(); color_attachment.set_pixel_format(pixel_format); color_attachment.set_blending_enabled(true); diff --git a/crates/gpui/src/scene.rs b/crates/gpui/src/scene.rs index 778a5d1f2734181cf9a7320f347ef041666f05d6..b837f2ad9131b65e9f4db3d7bdc1bc3719fae991 100644 --- a/crates/gpui/src/scene.rs +++ b/crates/gpui/src/scene.rs @@ -715,6 +715,13 @@ impl Path { } } + /// Move the start, current point to the given point. + pub fn move_to(&mut self, to: Point) { + self.contour_count += 1; + self.start = to; + self.current = to; + } + /// Draw a straight line from the current point to the given point. pub fn line_to(&mut self, to: Point) { self.contour_count += 1; @@ -744,7 +751,8 @@ impl Path { self.current = to; } - fn push_triangle( + /// Push a triangle to the Path. + pub fn push_triangle( &mut self, xy: (Point, Point, Point), st: (Point, Point, Point), From 69e6910c9ccef13b0ac5cc083e1a52313847bccf Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Thu, 6 Feb 2025 02:09:24 -0700 Subject: [PATCH 076/130] Add build SHA to panic reports and `zed --version` (on nightly/dev) (#24258) Release Notes: - N/A --- crates/cli/build.rs | 16 +++++++++++ crates/cli/src/main.rs | 6 ++++- crates/remote_server/build.rs | 27 ++++++++----------- crates/remote_server/src/main.rs | 16 ++++++----- crates/remote_server/src/unix.rs | 18 ++++++++----- .../telemetry_events/src/telemetry_events.rs | 3 +++ crates/zed/src/main.rs | 7 +++-- crates/zed/src/reliability.rs | 16 +++++++++-- 8 files changed, 76 insertions(+), 33 deletions(-) diff --git a/crates/cli/build.rs b/crates/cli/build.rs index 54f1f587bc7bc7ce04df86d9aee02daffbeb8f3d..f07d12546a58254edafcb7b269b241785f427bb5 100644 --- a/crates/cli/build.rs +++ b/crates/cli/build.rs @@ -1,3 +1,5 @@ +use std::process::Command; + fn main() { if std::env::var("ZED_UPDATE_EXPLANATION").is_ok() { println!(r#"cargo:rustc-cfg=feature="no-bundled-uninstall""#); @@ -8,4 +10,18 @@ fn main() { // Weakly link ScreenCaptureKit to ensure can be used on macOS 10.15+. println!("cargo:rustc-link-arg=-Wl,-weak_framework,ScreenCaptureKit"); } + + // Populate git sha environment variable if git is available + println!("cargo:rerun-if-changed=../../.git/logs/HEAD"); + if let Some(output) = Command::new("git") + .args(["rev-parse", "HEAD"]) + .output() + .ok() + .filter(|output| output.status.success()) + { + let git_sha = String::from_utf8_lossy(&output.stdout); + let git_sha = git_sha.trim(); + + println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}"); + } } diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index b3436bc4e5310f83b1e05c06859f5d2eaa50e3ca..4698cee157fd1bc94679429d399b1a964d968639 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -339,13 +339,17 @@ mod linux { impl InstalledApp for App { fn zed_version_string(&self) -> String { format!( - "Zed {}{} – {}", + "Zed {}{}{} – {}", if *RELEASE_CHANNEL == "stable" { "".to_string() } else { format!("{} ", *RELEASE_CHANNEL) }, option_env!("RELEASE_VERSION").unwrap_or_default(), + match option_env!("ZED_COMMIT_SHA") { + Some(commit_sha) => format!(" {commit_sha} "), + None => "".to_string(), + }, self.0.display(), ) } diff --git a/crates/remote_server/build.rs b/crates/remote_server/build.rs index 34abdacef70363e6205021927ec3f0f9a58c27d8..0573d9b666061027a60319bacb59dca60a2f03c8 100644 --- a/crates/remote_server/build.rs +++ b/crates/remote_server/build.rs @@ -14,22 +14,17 @@ fn main() { std::env::var("TARGET").unwrap() ); - // If we're building this for nightly, we want to set the ZED_COMMIT_SHA - if let Some(release_channel) = std::env::var("ZED_RELEASE_CHANNEL").ok() { - if release_channel.as_str() == "nightly" { - // Populate git sha environment variable if git is available - println!("cargo:rerun-if-changed=../../.git/logs/HEAD"); - if let Some(output) = Command::new("git") - .args(["rev-parse", "HEAD"]) - .output() - .ok() - .filter(|output| output.status.success()) - { - let git_sha = String::from_utf8_lossy(&output.stdout); - let git_sha = git_sha.trim(); + // Populate git sha environment variable if git is available + println!("cargo:rerun-if-changed=../../.git/logs/HEAD"); + if let Some(output) = Command::new("git") + .args(["rev-parse", "HEAD"]) + .output() + .ok() + .filter(|output| output.status.success()) + { + let git_sha = String::from_utf8_lossy(&output.stdout); + let git_sha = git_sha.trim(); - println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}"); - } - } + println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}"); } } diff --git a/crates/remote_server/src/main.rs b/crates/remote_server/src/main.rs index 132bd36b7b2c6d7744cc4ac75b29ae8572094f44..080b7e1af062f097048b6215d7b351c720f7ed1b 100644 --- a/crates/remote_server/src/main.rs +++ b/crates/remote_server/src/main.rs @@ -40,6 +40,7 @@ fn main() { #[cfg(not(windows))] fn main() { + use release_channel::{ReleaseChannel, RELEASE_CHANNEL}; use remote::proxy::ProxyLaunchError; use remote_server::unix::{execute_proxy, execute_run}; @@ -72,12 +73,15 @@ fn main() { } }, Some(Commands::Version) => { - if let Some(build_sha) = option_env!("ZED_COMMIT_SHA") { - println!("{}", build_sha); - } else { - println!("{}", env!("ZED_PKG_VERSION")); - } - + let release_channel = *RELEASE_CHANNEL; + match release_channel { + ReleaseChannel::Stable | ReleaseChannel::Preview => { + println!("{}", env!("ZED_PKG_VERSION")) + } + ReleaseChannel::Nightly | ReleaseChannel::Dev => { + println!("{}", env!("ZED_COMMIT_SHA")) + } + }; std::process::exit(0); } None => { diff --git a/crates/remote_server/src/unix.rs b/crates/remote_server/src/unix.rs index 10ac263cc4e06818744ab1eee7b7c36d365a9b7d..71a770908abe7869c516c0da67567db80283ff66 100644 --- a/crates/remote_server/src/unix.rs +++ b/crates/remote_server/src/unix.rs @@ -16,7 +16,7 @@ use node_runtime::{NodeBinaryOptions, NodeRuntime}; use paths::logs_dir; use project::project_settings::ProjectSettings; -use release_channel::AppVersion; +use release_channel::{AppVersion, ReleaseChannel, RELEASE_CHANNEL}; use remote::proxy::ProxyLaunchError; use remote::ssh_session::ChannelClient; use remote::{ @@ -149,6 +149,14 @@ fn init_panic_hook() { (&backtrace).join("\n") ); + let release_channel = *RELEASE_CHANNEL; + let version = match release_channel { + ReleaseChannel::Stable | ReleaseChannel::Preview => env!("ZED_PKG_VERSION"), + ReleaseChannel::Nightly | ReleaseChannel::Dev => { + option_env!("ZED_COMMIT_SHA").unwrap_or("missing-zed-commit-sha") + } + }; + let panic_data = telemetry_events::Panic { thread: thread_name.into(), payload: payload.clone(), @@ -156,11 +164,9 @@ fn init_panic_hook() { file: location.file().into(), line: location.line(), }), - app_version: format!( - "remote-server-{}", - option_env!("ZED_COMMIT_SHA").unwrap_or(&env!("ZED_PKG_VERSION")) - ), - release_channel: release_channel::RELEASE_CHANNEL.display_name().into(), + app_version: format!("remote-server-{version}"), + app_commit_sha: option_env!("ZED_COMMIT_SHA").map(|sha| sha.into()), + release_channel: release_channel.display_name().into(), target: env!("TARGET").to_owned().into(), os_name: telemetry::os_name(), os_version: Some(telemetry::os_version()), diff --git a/crates/telemetry_events/src/telemetry_events.rs b/crates/telemetry_events/src/telemetry_events.rs index a39c514ced7a3cd83abb9ed6c42093c36f2b9762..81106b89da9ed06d664c715844562e01894582c3 100644 --- a/crates/telemetry_events/src/telemetry_events.rs +++ b/crates/telemetry_events/src/telemetry_events.rs @@ -267,6 +267,9 @@ pub struct Panic { pub backtrace: Vec, /// Zed version number pub app_version: String, + /// The Git commit SHA that Zed was built at. + #[serde(skip_serializing_if = "Option::is_none")] + pub app_commit_sha: Option, /// Zed release channel (stable, preview, dev) pub release_channel: String, pub target: Option, diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index ce4edc73b22833c24d8072edb0413225e808c7ed..019af54c541a0d42b90de72c19af624b9d7c26ed 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -188,9 +188,12 @@ fn main() { let session_id = Uuid::new_v4().to_string(); let session = app.background_executor().block(Session::new()); let app_version = AppVersion::init(env!("CARGO_PKG_VERSION")); + let app_commit_sha = + option_env!("ZED_COMMIT_SHA").map(|commit_sha| AppCommitSha(commit_sha.to_string())); reliability::init_panic_hook( app_version, + app_commit_sha.clone(), system_id.as_ref().map(|id| id.to_string()), installation_id.as_ref().map(|id| id.to_string()), session_id.clone(), @@ -281,8 +284,8 @@ fn main() { app.run(move |cx| { release_channel::init(app_version, cx); gpui_tokio::init(cx); - if let Some(build_sha) = option_env!("ZED_COMMIT_SHA") { - AppCommitSha::set_global(AppCommitSha(build_sha.into()), cx); + if let Some(app_commit_sha) = app_commit_sha { + AppCommitSha::set_global(app_commit_sha, cx); } settings::init(cx); handle_settings_file_changes(user_settings_file_rx, cx, handle_settings_changed); diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index 6c6f425654313dcd91f16fec78622cae366c54cd..dfd1ba8064ada68d0cf53bc1ef3e8c1c407f85a2 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -8,7 +8,7 @@ use gpui::{App, SemanticVersion}; use http_client::{self, HttpClient, HttpClientWithUrl, HttpRequestExt, Method}; use paths::{crashes_dir, crashes_retired_dir}; use project::Project; -use release_channel::{ReleaseChannel, RELEASE_CHANNEL}; +use release_channel::{AppCommitSha, ReleaseChannel, RELEASE_CHANNEL}; use settings::Settings; use smol::stream::StreamExt; use std::{ @@ -25,6 +25,7 @@ static PANIC_COUNT: AtomicU32 = AtomicU32::new(0); pub fn init_panic_hook( app_version: SemanticVersion, + app_commit_sha: Option, system_id: Option, installation_id: Option, session_id: String, @@ -54,12 +55,22 @@ pub fn init_panic_hook( let location = info.location().unwrap(); let backtrace = Backtrace::new(); eprintln!( - "Thread {:?} panicked with {:?} at {}:{}:{}\n{:?}", + "Thread {:?} panicked with {:?} at {}:{}:{}\n{}{:?}", thread_name, payload, location.file(), location.line(), location.column(), + match app_commit_sha.as_ref() { + Some(commit_sha) => format!( + "https://github.com/zed-industries/zed/blob/{}/src/{}#L{} \ + (may not be uploaded, line may be incorrect if files modified)\n", + commit_sha.0, + location.file(), + location.line() + ), + None => "".to_string(), + }, backtrace, ); std::process::exit(-1); @@ -103,6 +114,7 @@ pub fn init_panic_hook( line: location.line(), }), app_version: app_version.to_string(), + app_commit_sha: app_commit_sha.as_ref().map(|sha| sha.0.clone()), release_channel: RELEASE_CHANNEL.dev_name().into(), target: env!("TARGET").to_owned().into(), os_name: telemetry::os_name(), From d459cd517eb940f020b3cae384d409b5db5bd7b4 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 6 Feb 2025 11:53:25 +0200 Subject: [PATCH 077/130] Alter Windows CI disk limits (#24368) An attempt to fix https://github.com/zed-industries/zed/actions/runs/13174780143/job/36771552892 Release Notes: - N/A --- .github/workflows/ci.yml | 3 ++- script/setup-dev-driver.ps1 | 6 +++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f20495baeb423f47021c1cc8fa6e5fd510bbe680..aa6145c7f29eccd6813cf20ec9e2d7ef106b0252 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -279,7 +279,8 @@ jobs: - name: Check dev drive space working-directory: ${{ env.ZED_WORKSPACE }} - run: ./script/exit-ci-if-dev-drive-is-full.ps1 55 + # `setup-dev-driver.ps1` creates a 100GB drive, with CI taking up ~45GB of the drive. + run: ./script/exit-ci-if-dev-drive-is-full.ps1 95 # Since the Windows runners are stateful, so we need to remove the config file to prevent potential bug. - name: Clean CI config file diff --git a/script/setup-dev-driver.ps1 b/script/setup-dev-driver.ps1 index 51aa17c2678ac93879a409ecd52004520561fc85..2302ecfbfe3e6f9e74678f936a3848e186edd106 100644 --- a/script/setup-dev-driver.ps1 +++ b/script/setup-dev-driver.ps1 @@ -3,8 +3,8 @@ # The current version of the Windows runner is 10.0.20348 which does not support DevDrive option. # Ref: https://learn.microsoft.com/en-us/windows/dev-drive/ -# Currently, total CI requires almost 45GB of space, here we are creating a 60GB drive. -$Volume = New-VHD -Path C:/zed_dev_drive.vhdx -SizeBytes 60GB | +# Currently, total CI requires almost 45GB of space, here we are creating a 100GB drive. +$Volume = New-VHD -Path C:/zed_dev_drive.vhdx -SizeBytes 100GB | Mount-VHD -Passthru | Initialize-Disk -Passthru | New-Partition -AssignDriveLetter -UseMaximumSize | @@ -15,7 +15,7 @@ $Drive = "$($Volume.DriveLetter):" # Show some debug information Write-Output $Volume Write-Output "Using Dev Drive at $Drive" - + # Move Cargo to the dev drive New-Item -Path "$($Drive)/.cargo/bin" -ItemType Directory -Force Copy-Item -Path "C:/Users/runneradmin/.cargo/*" -Destination "$($Drive)/.cargo/" -Recurse -Force From b4d8b1be3f1eb2d1eea4bd9585affbd437986de8 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 6 Feb 2025 12:12:22 +0200 Subject: [PATCH 078/130] Preserve Wrangler logs during docs deployment CI runs (#24371) Adds a log collection step to debug errors like https://github.com/zed-industries/zed/actions/runs/13175284280/job/36773129216#step:8:29 During testing though, the CI had passed, so 500 seems to be unrelated to Zed changes: https://github.com/zed-industries/zed/actions/runs/13175800537/job/36774702686 Release Notes: - N/A --- .github/workflows/deploy_cloudflare.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/deploy_cloudflare.yml b/.github/workflows/deploy_cloudflare.yml index 284c10915eb57a8e81d32e9c9147df176055b7b0..b48b4c9a7af01ead14bc05bd053fbaab7ac9adfe 100644 --- a/.github/workflows/deploy_cloudflare.yml +++ b/.github/workflows/deploy_cloudflare.yml @@ -63,3 +63,10 @@ jobs: apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} command: deploy .cloudflare/docs-proxy/src/worker.js + + - name: Preserve Wrangler logs + uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4 + if: always() + with: + name: wrangler_logs + path: /home/runner/.config/.wrangler/logs/ From 8ed8b4d2ec87f299e6cf1ed536a8960720295205 Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Thu, 6 Feb 2025 09:58:19 -0300 Subject: [PATCH 079/130] edit predictions: Preview while holding modifier mode (#24316) This PR adds a new `inline_completions.inline_preview` config which can be set to `auto` (current behavior) or to `when_holding_modifier`. When set to the latter, instead of showing edit prediction previews inline in the buffer, we'll show it in a popover (even when there's no LSP completion) so your isn't constantly moving as completions arrive. https://github.com/user-attachments/assets/3615d151-3633-4ee4-98b9-66ee0aa735b8 Release Notes: - N/A --------- Co-authored-by: Danilo --- assets/settings/default.json | 9 +- .../src/context_editor.rs | 2 +- crates/editor/src/code_context_menus.rs | 13 +-- crates/editor/src/editor.rs | 91 +++++++++++-------- crates/editor/src/element.rs | 14 ++- crates/editor/src/signature_help.rs | 2 +- crates/language/src/language.rs | 1 + crates/language/src/language_settings.rs | 41 +++++++-- 8 files changed, 113 insertions(+), 60 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 732ee3e0abe03f3eb88e19484f96df7db0f348fa..f67f2fa965ef4fdb5b54ab9d7ce63227e8189766 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -783,7 +783,14 @@ "**/*.cert", "**/*.crt", "**/secrets.yml" - ] + ], + // When to show edit predictions previews in buffer. + // This setting takes two possible values: + // 1. Display inline when there are no language server completions available. + // "inline_preview": "auto" + // 2. Display inline when holding modifier key (alt by default). + // "inline_preview": "when_holding_modifier" + "inline_preview": "auto" }, // Settings specific to journaling "journal": { diff --git a/crates/assistant_context_editor/src/context_editor.rs b/crates/assistant_context_editor/src/context_editor.rs index 86febdecc4dc6acb823a587ba3327e18e0d70aa4..d1b9fbbda725ea3a77bdd00b91146945840d4bbf 100644 --- a/crates/assistant_context_editor/src/context_editor.rs +++ b/crates/assistant_context_editor/src/context_editor.rs @@ -459,7 +459,7 @@ impl ContextEditor { window: &mut Window, cx: &mut Context, ) { - if self.editor.read(cx).has_active_completions_menu() { + if self.editor.read(cx).has_visible_completions_menu() { return; } diff --git a/crates/editor/src/code_context_menus.rs b/crates/editor/src/code_context_menus.rs index a6f0e5c0f06ccc21eab301b7f8d0fae011c21ea3..f2752fb53262ed0d3ba089727d28fe5d32c4714b 100644 --- a/crates/editor/src/code_context_menus.rs +++ b/crates/editor/src/code_context_menus.rs @@ -169,7 +169,6 @@ pub struct CompletionsMenu { resolve_completions: bool, show_completion_documentation: bool, last_rendered_range: Rc>>>, - pub previewing_inline_completion: bool, } impl CompletionsMenu { @@ -200,7 +199,6 @@ impl CompletionsMenu { scroll_handle: UniformListScrollHandle::new(), resolve_completions: true, last_rendered_range: RefCell::new(None).into(), - previewing_inline_completion: false, } } @@ -257,7 +255,6 @@ impl CompletionsMenu { resolve_completions: false, show_completion_documentation: false, last_rendered_range: RefCell::new(None).into(), - previewing_inline_completion: false, } } @@ -410,12 +407,8 @@ impl CompletionsMenu { .detach(); } - pub fn is_empty(&self) -> bool { - self.entries.borrow().is_empty() - } - pub fn visible(&self) -> bool { - !self.is_empty() && !self.previewing_inline_completion + !self.entries.borrow().is_empty() } fn origin(&self) -> ContextMenuOrigin { @@ -709,10 +702,6 @@ impl CompletionsMenu { // This keeps the display consistent when y_flipped. self.scroll_handle.scroll_to_item(0, ScrollStrategy::Top); } - - pub fn set_previewing_inline_completion(&mut self, value: bool) { - self.previewing_inline_completion = value; - } } #[derive(Clone)] diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 170371164f532e8c069eaa87615ffc9f19ed804b..9fa54c5f065d8b8fd14063a26f0f0d7d8370d009 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -97,8 +97,8 @@ use language::{ language_settings::{self, all_language_settings, language_settings, InlayHintSettings}, markdown, point_from_lsp, AutoindentMode, BracketPair, Buffer, Capability, CharKind, CodeLabel, CompletionDocumentation, CursorShape, Diagnostic, EditPreview, HighlightedText, IndentKind, - IndentSize, Language, OffsetRangeExt, Point, Selection, SelectionGoal, TextObject, - TransactionId, TreeSitterOptions, + IndentSize, InlineCompletionPreviewMode, Language, OffsetRangeExt, Point, Selection, + SelectionGoal, TextObject, TransactionId, TreeSitterOptions, }; use language::{point_to_lsp, BufferRow, CharClassifier, Runnable, RunnableRange}; use linked_editing_ranges::refresh_linked_ranges; @@ -693,6 +693,7 @@ pub struct Editor { show_inline_completions: bool, show_inline_completions_override: Option, menu_inline_completions_policy: MenuInlineCompletionsPolicy, + previewing_inline_completion: bool, inlay_hint_cache: InlayHintCache, next_inlay_id: usize, _subscriptions: Vec, @@ -1384,6 +1385,7 @@ impl Editor { inline_completion_provider: None, active_inline_completion: None, stale_inline_completion_in_menu: None, + previewing_inline_completion: false, inlay_hint_cache: InlayHintCache::new(inlay_hint_settings), gutter_hovered: false, @@ -4662,6 +4664,18 @@ impl Editor { } } + fn inline_completion_preview_mode(&self, cx: &App) -> language::InlineCompletionPreviewMode { + let cursor = self.selections.newest_anchor().head(); + + self.buffer + .read(cx) + .text_anchor_for_position(cursor, cx) + .map(|(buffer, _)| { + all_language_settings(buffer.read(cx).file(), cx).inline_completions_preview_mode() + }) + .unwrap_or_default() + } + fn should_show_inline_completions_in_buffer( &self, buffer: &Entity, @@ -5009,11 +5023,28 @@ impl Editor { true } - pub fn is_previewing_inline_completion(&self) -> bool { - matches!( - self.context_menu.borrow().as_ref(), - Some(CodeContextMenu::Completions(menu)) if !menu.is_empty() && menu.previewing_inline_completion - ) + /// Returns true when we're displaying the inline completion popover below the cursor + /// like we are not previewing and the LSP autocomplete menu is visible + /// or we are in `when_holding_modifier` mode. + pub fn inline_completion_visible_in_cursor_popover( + &self, + has_completion: bool, + cx: &App, + ) -> bool { + if self.previewing_inline_completion + || !self.show_inline_completions_in_menu(cx) + || !self.should_show_inline_completions(cx) + { + return false; + } + + if self.has_visible_completions_menu() { + return true; + } + + has_completion + && self.inline_completion_preview_mode(cx) + == InlineCompletionPreviewMode::WhenHoldingModifier } fn update_inline_completion_preview( @@ -5022,13 +5053,13 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - // Moves jump directly with a preview step - + // Moves jump directly without a preview step if self .active_inline_completion .as_ref() .map_or(true, |c| c.is_move()) { + self.previewing_inline_completion = false; cx.notify(); return; } @@ -5037,20 +5068,7 @@ impl Editor { return; } - let mut menu_borrow = self.context_menu.borrow_mut(); - - let Some(CodeContextMenu::Completions(completions_menu)) = menu_borrow.as_mut() else { - return; - }; - - if completions_menu.is_empty() - || completions_menu.previewing_inline_completion == modifiers.alt - { - return; - } - - completions_menu.set_previewing_inline_completion(modifiers.alt); - drop(menu_borrow); + self.previewing_inline_completion = modifiers.alt; self.update_visible_inline_completion(window, cx); } @@ -5146,7 +5164,7 @@ impl Editor { snapshot, } } else { - if !show_in_menu || !self.has_active_completions_menu() { + if !self.inline_completion_visible_in_cursor_popover(true, cx) { if edits .iter() .all(|(range, _)| range.to_offset(&multibuffer).is_empty()) @@ -5180,7 +5198,7 @@ impl Editor { let display_mode = if all_edits_insertions_or_deletions(&edits, &multibuffer) { if provider.show_tab_accept_marker() { - EditDisplayMode::TabAccept(self.is_previewing_inline_completion()) + EditDisplayMode::TabAccept(self.previewing_inline_completion) } else { EditDisplayMode::Inline } @@ -5443,10 +5461,12 @@ impl Editor { } pub fn context_menu_visible(&self) -> bool { - self.context_menu - .borrow() - .as_ref() - .map_or(false, |menu| menu.visible()) + !self.previewing_inline_completion + && self + .context_menu + .borrow() + .as_ref() + .map_or(false, |menu| menu.visible()) } fn context_menu_origin(&self) -> Option { @@ -5848,9 +5868,7 @@ impl Editor { self.completion_tasks.clear(); let context_menu = self.context_menu.borrow_mut().take(); self.stale_inline_completion_in_menu.take(); - if context_menu.is_some() { - self.update_visible_inline_completion(window, cx); - } + self.update_visible_inline_completion(window, cx); context_menu } @@ -14438,10 +14456,11 @@ impl Editor { Some(gpui::Point::new(source_x, source_y)) } - pub fn has_active_completions_menu(&self) -> bool { - self.context_menu.borrow().as_ref().map_or(false, |menu| { - menu.visible() && matches!(menu, CodeContextMenu::Completions(_)) - }) + pub fn has_visible_completions_menu(&self) -> bool { + !self.previewing_inline_completion + && self.context_menu.borrow().as_ref().map_or(false, |menu| { + menu.visible() && matches!(menu, CodeContextMenu::Completions(_)) + }) } pub fn register_addon(&mut self, instance: T) { diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 32c2d4a5de2d4ecf6cceb2149cbe1f6f7356501c..3fce0fa36c904dd91058706c2406c97de111ae20 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -3109,7 +3109,10 @@ impl EditorElement { { let editor = self.editor.read(cx); - if editor.has_active_completions_menu() && editor.show_inline_completions_in_menu(cx) { + if editor.inline_completion_visible_in_cursor_popover( + editor.has_active_inline_completion(), + cx, + ) { height_above_menu += editor.edit_prediction_cursor_popover_height() + POPOVER_Y_PADDING; edit_prediction_popover_visible = true; @@ -3615,7 +3618,12 @@ impl EditorElement { const PADDING_X: Pixels = Pixels(24.); const PADDING_Y: Pixels = Pixels(2.); - let active_inline_completion = self.editor.read(cx).active_inline_completion.as_ref()?; + let editor = self.editor.read(cx); + let active_inline_completion = editor.active_inline_completion.as_ref()?; + + if editor.inline_completion_visible_in_cursor_popover(true, cx) { + return None; + } match &active_inline_completion.completion { InlineCompletion::Move { target, .. } => { @@ -3682,7 +3690,7 @@ impl EditorElement { display_mode, snapshot, } => { - if self.editor.read(cx).has_active_completions_menu() { + if self.editor.read(cx).has_visible_completions_menu() { return None; } diff --git a/crates/editor/src/signature_help.rs b/crates/editor/src/signature_help.rs index 7874df967c8f4f7087b6b54e4bfcd00cc161f186..f2e479f877ef353d8ae87efea46d4a43b8c2ae2a 100644 --- a/crates/editor/src/signature_help.rs +++ b/crates/editor/src/signature_help.rs @@ -158,7 +158,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if self.pending_rename.is_some() || self.has_active_completions_menu() { + if self.pending_rename.is_some() || self.has_visible_completions_menu() { return; } diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index beef29355809a03671913bc795601b51573c155d..f7aa838c7f096bb3ecf30a70e411ff53c32a5533 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -21,6 +21,7 @@ mod toolchain; pub mod buffer_tests; pub mod markdown; +pub use crate::language_settings::InlineCompletionPreviewMode; use crate::language_settings::SoftWrap; use anyhow::{anyhow, Context as _, Result}; use async_trait::async_trait; diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index ac57e566f4c36aabf505101843ffa35e4342c304..3f5d5ef8a2761cf0d5fb92f2cd4a54f4c4a8f484 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -214,6 +214,19 @@ pub struct InlineCompletionSettings { pub provider: InlineCompletionProvider, /// A list of globs representing files that edit predictions should be disabled for. pub disabled_globs: Vec, + /// When to show edit predictions previews in buffer. + pub inline_preview: InlineCompletionPreviewMode, +} + +/// The mode in which edit predictions should be displayed. +#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum InlineCompletionPreviewMode { + /// Display inline when there are no language server completions available. + #[default] + Auto, + /// Display inline when holding modifier key (alt by default). + WhenHoldingModifier, } /// The settings for all languages. @@ -406,6 +419,9 @@ pub struct InlineCompletionSettingsContent { /// A list of globs representing files that edit predictions should be disabled for. #[serde(default)] pub disabled_globs: Option>, + /// When to show edit predictions previews in buffer. + #[serde(default)] + pub inline_preview: InlineCompletionPreviewMode, } /// The settings for enabling/disabling features. @@ -890,6 +906,11 @@ impl AllLanguageSettings { self.language(None, language.map(|l| l.name()).as_ref(), cx) .show_inline_completions } + + /// Returns the edit predictions preview mode for the given language and path. + pub fn inline_completions_preview_mode(&self) -> InlineCompletionPreviewMode { + self.inline_completions.inline_preview + } } fn merge_with_editorconfig(settings: &mut LanguageSettings, cfg: &EditorconfigProperties) { @@ -987,6 +1008,12 @@ impl settings::Settings for AllLanguageSettings { .features .as_ref() .and_then(|f| f.inline_completion_provider); + let mut inline_completions_preview = default_value + .inline_completions + .as_ref() + .map(|inline_completions| inline_completions.inline_preview) + .ok_or_else(Self::missing_default)?; + let mut completion_globs: HashSet<&String> = default_value .inline_completions .as_ref() @@ -1017,12 +1044,13 @@ impl settings::Settings for AllLanguageSettings { { inline_completion_provider = Some(provider); } - if let Some(globs) = user_settings - .inline_completions - .as_ref() - .and_then(|f| f.disabled_globs.as_ref()) - { - completion_globs.extend(globs.iter()); + + if let Some(inline_completions) = user_settings.inline_completions.as_ref() { + inline_completions_preview = inline_completions.inline_preview; + + if let Some(disabled_globs) = inline_completions.disabled_globs.as_ref() { + completion_globs.extend(disabled_globs.iter()); + } } // A user's global settings override the default global settings and @@ -1075,6 +1103,7 @@ impl settings::Settings for AllLanguageSettings { .iter() .filter_map(|g| Some(globset::Glob::new(g).ok()?.compile_matcher())) .collect(), + inline_preview: inline_completions_preview, }, defaults, languages, From 35886e38e58ab2a69d3f41a45c9535ad95dcf8e8 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 6 Feb 2025 11:00:09 -0300 Subject: [PATCH 080/130] edit prediction: Add minor UI tweaks to the preview bar (#24174) Just little nudges of spacing, alignment, and treatment for overflowing content. Release Notes: - N/A --- crates/editor/src/editor.rs | 66 ++++++++++++++++++++++--------------- 1 file changed, 40 insertions(+), 26 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 9fa54c5f065d8b8fd14063a26f0f0d7d8370d009..54ba81cf610932f32263db84ba0bcb57333ed5bf 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -5477,7 +5477,7 @@ impl Editor { } fn edit_prediction_cursor_popover_height(&self) -> Pixels { - px(32.) + px(30.) } fn current_user_player_color(&self, cx: &mut App) -> PlayerColor { @@ -5531,7 +5531,11 @@ impl Editor { .child(Icon::new(IconName::ZedPredict)) .child(Label::new("Accept Terms of Service")) .child(div().w_full()) - .child(Icon::new(IconName::ArrowUpRight)) + .child( + Icon::new(IconName::ArrowUpRight) + .color(Color::Muted) + .size(IconSize::Small), + ) .into_any_element(), ) .into_any(), @@ -5542,8 +5546,9 @@ impl Editor { fn pending_completion_container() -> Div { h_flex() + .h_full() .flex_1() - .gap_3() + .gap_2() .child(Icon::new(IconName::ZedPredict)) } @@ -5599,6 +5604,14 @@ impl Editor { .as_ref() .map_or(false, |c| c.is_move()); + let modifier_color = if !has_completion { + Color::Muted + } else if window.modifiers() == accept_keystroke.modifiers { + Color::Accent + } else { + Color::Default + }; + Some( h_flex() .h(self.edit_prediction_cursor_popover_height()) @@ -5606,39 +5619,31 @@ impl Editor { .max_w(max_width) .flex_1() .px_2() - .gap_3() .elevation_2(cx) .child(completion) + .child(ui::Divider::vertical()) .child( h_flex() - .border_l_1() - .border_color(cx.theme().colors().border_variant) + .h_full() + .gap_1() .pl_2() - .child( - h_flex() - .font(buffer_font.clone()) - .p_1() - .rounded_sm() - .children(ui::render_modifiers( - &accept_keystroke.modifiers, - PlatformStyle::platform(), - if window.modifiers() == accept_keystroke.modifiers { - Some(Color::Accent) - } else { - None - }, - !is_move, - )), - ) - .opacity(if has_completion { 1.0 } else { 0.1 }) + .child(h_flex().font(buffer_font.clone()).gap_1().children( + ui::render_modifiers( + &accept_keystroke.modifiers, + PlatformStyle::platform(), + Some(modifier_color), + !is_move, + ), + )) .child(if is_move { div() .child(ui::Key::new(&accept_keystroke.key, None)) .font(buffer_font.clone()) .into_any() } else { - Label::new("Preview").color(Color::Muted).into_any_element() - }), + Label::new("Preview").into_any_element() + }) + .opacity(if has_completion { 1.0 } else { 0.4 }), ) .into_any(), ) @@ -5737,7 +5742,16 @@ impl Editor { Icon::new(IconName::ZedPredict).into_any_element() }; - Some(h_flex().flex_1().gap_3().child(left).child(preview)) + Some( + h_flex() + .h_full() + .flex_1() + .gap_2() + .pr_1() + .overflow_x_hidden() + .child(left) + .child(preview), + ) } InlineCompletion::Move { From 592642fbfcb91396b55524d677282626438d69de Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 6 Feb 2025 11:28:25 -0300 Subject: [PATCH 081/130] edit predictions: Tweak status bar menu copywriting (#24376) Just fine-tuning words on items of the status bar menu. Release Notes: - N/A --- .../src/inline_completion_button.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/inline_completion_button/src/inline_completion_button.rs b/crates/inline_completion_button/src/inline_completion_button.rs index 447141864688bf5757737e450c778fa508181ee8..c6a616dc1af71cd2f9e070c3ef12e081b9e35a9f 100644 --- a/crates/inline_completion_button/src/inline_completion_button.rs +++ b/crates/inline_completion_button/src/inline_completion_button.rs @@ -387,7 +387,7 @@ impl InlineCompletionButton { pub fn build_language_settings_menu(&self, mut menu: ContextMenu, cx: &mut App) -> ContextMenu { let fs = self.fs.clone(); - menu = menu.header("Show Predict Edits For"); + menu = menu.header("Show Edit Predictions For"); if let Some(language) = self.language.clone() { let fs = fs.clone(); @@ -451,9 +451,9 @@ impl InlineCompletionButton { } menu = menu.item( - ContextMenuEntry::new("Exclude Files") + ContextMenuEntry::new("Configure Excluded Files") .documentation_aside(|_| { - Label::new("This item takes you to the settings where you can specify files that will never be captured by any edit prediction model. You can list both specific file extensions and individual file names.").into_any_element() + Label::new("This item takes you to the settings where you can specify files that will never be captured by any edit prediction model. List both specific file extensions and individual file names.").into_any_element() }) .handler(move |window, cx| { if let Some(workspace) = window.root().flatten() { From 01bcbf3b0d0c50022c7a2db1759aa56d402f19c6 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Thu, 6 Feb 2025 10:13:56 -0500 Subject: [PATCH 082/130] Fix missing diff hunks in single-file worktrees (#24377) Release Notes: - Fixed diff hunks not appearing when opening a single file within a larger repository --- crates/project/src/project_tests.rs | 65 +++++++++++++++++++++++++++++ crates/worktree/src/worktree.rs | 20 +++++---- 2 files changed, 77 insertions(+), 8 deletions(-) diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index d2f36ae960ce70caf6f3a78b4b17c46081a95512..dfd5a5dc56fa2205cee56bd1e140bfe2539adf4c 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -5832,6 +5832,71 @@ async fn test_uncommitted_changes_for_buffer(cx: &mut gpui::TestAppContext) { }); } +#[gpui::test] +async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let committed_contents = r#" + fn main() { + println!("hello from HEAD"); + } + "# + .unindent(); + let file_contents = r#" + fn main() { + println!("hello from the working copy"); + } + "# + .unindent(); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/dir", + json!({ + ".git": {}, + "src": { + "main.rs": file_contents, + } + }), + ) + .await; + + fs.set_head_for_repo( + Path::new("/dir/.git"), + &[("src/main.rs".into(), committed_contents)], + ); + + let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/dir/src/main.rs", cx) + }) + .await + .unwrap(); + let uncommitted_changes = project + .update(cx, |project, cx| { + project.open_uncommitted_changes(buffer.clone(), cx) + }) + .await + .unwrap(); + + cx.run_until_parked(); + uncommitted_changes.update(cx, |uncommitted_changes, cx| { + let snapshot = buffer.read(cx).snapshot(); + assert_hunks( + uncommitted_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + &snapshot, + &uncommitted_changes.base_text.as_ref().unwrap().text(), + &[( + 1..2, + " println!(\"hello from HEAD\");\n", + " println!(\"hello from the working copy\");\n", + )], + ); + }); +} + async fn search( project: &Entity, query: SearchQuery, diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 8c3bd04657ac99d37670a485aa65fd15631498d5..da48baf0958245d390b25f47076d83c4afd9d53c 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -372,15 +372,19 @@ impl WorkDirectory { /// of the project root folder, then the returned RepoPath is relative to the root /// of the repository and not a valid path inside the project. pub fn relativize(&self, path: &Path) -> Result { - if let Some(location_in_repo) = &self.location_in_repo { - Ok(location_in_repo.join(path).into()) + let repo_path = if let Some(location_in_repo) = &self.location_in_repo { + // Avoid joining a `/` to location_in_repo in the case of a single-file worktree. + if path == Path::new("") { + RepoPath(location_in_repo.clone()) + } else { + location_in_repo.join(path).into() + } } else { - let relativized_path = path - .strip_prefix(&self.path) - .map_err(|_| anyhow!("could not relativize {:?} against {:?}", path, self.path))?; - - Ok(relativized_path.into()) - } + path.strip_prefix(&self.path) + .map_err(|_| anyhow!("could not relativize {:?} against {:?}", path, self.path))? + .into() + }; + Ok(repo_path) } /// This is the opposite operation to `relativize` above From 8fc5d227a40250164c168ec4b3deb07c5daed61d Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 6 Feb 2025 10:19:03 -0500 Subject: [PATCH 083/130] copilot: Sort dev dependencies in `Cargo.toml` (#24378) This PR sorts the dev dependencies in `copilot`'s `Cargo.toml`. Release Notes: - N/A --- crates/copilot/Cargo.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/copilot/Cargo.toml b/crates/copilot/Cargo.toml index bc2aea9b8095018ece2bd10ba68355e044f41894..d3c21084002d89696c5d84d56a8782a70d3b3c08 100644 --- a/crates/copilot/Cargo.toml +++ b/crates/copilot/Cargo.toml @@ -59,20 +59,20 @@ workspace.workspace = true async-std = { version = "1.12.0", features = ["unstable"] } [dev-dependencies] -indoc.workspace = true -serde_json.workspace = true -clock = { workspace = true, features = ["test-support"] } client = { workspace = true, features = ["test-support"] } +clock = { workspace = true, features = ["test-support"] } collections = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } fs = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } http_client = { workspace = true, features = ["test-support"] } +indoc.workspace = true language = { workspace = true, features = ["test-support"] } lsp = { workspace = true, features = ["test-support"] } node_runtime = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] } rpc = { workspace = true, features = ["test-support"] } +serde_json.workspace = true settings = { workspace = true, features = ["test-support"] } theme = { workspace = true, features = ["test-support"] } util = { workspace = true, features = ["test-support"] } From c24f22cd14bb60b6588662805533ff19a4d6e613 Mon Sep 17 00:00:00 2001 From: 0x2CA <2478557459@qq.com> Date: Thu, 6 Feb 2025 23:54:04 +0800 Subject: [PATCH 084/130] vim: Fix Around Subword not including whitespace (#24356) Closes #24271 Release Notes: - Fixed Around Subword No Include Whitespace --- crates/vim/src/object.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/vim/src/object.rs b/crates/vim/src/object.rs index 4131c02589ed1e7961dc2c26e444de6dc3a38790..cd1269e2641ffb43f5dd0da65e8b847a9f195b8f 100644 --- a/crates/vim/src/object.rs +++ b/crates/vim/src/object.rs @@ -671,7 +671,7 @@ fn around_subword( is_word_end || is_subword_end }); - Some(start..end) + Some(start..end).map(|range| expand_to_include_whitespace(map, range, true)) } fn around_containing_word( From 13089d7ec6a7f5a8deca565015af2eb26905bba0 Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Thu, 6 Feb 2025 13:13:21 -0300 Subject: [PATCH 085/130] =?UTF-8?q?edit=20predictions:=20Polish=20up=20?= =?UTF-8?q?=E2=8C=A5=20preview=20experience=20(#24380)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Do not accept with just `tab` in `when_holding_modifer` mode - Fix fake cursor for jumps when destination row is outside viewport - Use current preview state for deciding whether to show modifiers in popovers - Stay in preview state if ⌥ isn't released after accepting a jump Release Notes: - N/A --- assets/keymaps/default-linux.json | 2 +- assets/keymaps/default-macos.json | 2 +- crates/editor/src/editor.rs | 208 ++++++++++++++---------------- crates/editor/src/element.rs | 23 ++-- 4 files changed, 110 insertions(+), 125 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index ac4d604feea50dfadece8fa73157cdb57a2f16a5..fb7104f4d4c4022aa70ae877f6b6b39928de5507 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -510,7 +510,7 @@ } }, { - "context": "Editor && inline_completion && !showing_completions", + "context": "Editor && inline_completion && !inline_completion_requires_modifier", "use_key_equivalents": true, "bindings": { "tab": "editor::AcceptInlineCompletion" diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index e865bc14ad44caddd559cd8b2778e329520e3a9a..0a7bad1406a6943a03249da05c236ff2a9ef48ce 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -587,7 +587,7 @@ } }, { - "context": "Editor && inline_completion && !showing_completions", + "context": "Editor && inline_completion && !inline_completion_requires_modifier", "use_key_equivalents": true, "bindings": { "tab": "editor::AcceptInlineCompletion" diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 54ba81cf610932f32263db84ba0bcb57333ed5bf..95373616f00eddee8a80ce57af96bbc921de6b19 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -82,8 +82,8 @@ use gpui::{ Entity, EntityInputHandler, EventEmitter, FocusHandle, FocusOutEvent, Focusable, FontId, FontWeight, Global, HighlightStyle, Hsla, InteractiveText, KeyContext, Modifiers, MouseButton, MouseDownEvent, PaintQuad, ParentElement, Pixels, Render, SharedString, Size, Styled, - StyledText, Subscription, Task, TextStyle, TextStyleRefinement, UTF16Selection, UnderlineStyle, - UniformListScrollHandle, WeakEntity, WeakFocusHandle, Window, + StyledText, Subscription, Task, TextRun, TextStyle, TextStyleRefinement, UTF16Selection, + UnderlineStyle, UniformListScrollHandle, WeakEntity, WeakFocusHandle, Window, }; use highlight_matching_bracket::refresh_matching_bracket_highlights; use hover_popover::{hide_hover, HoverState}; @@ -468,7 +468,7 @@ pub fn make_suggestion_styles(cx: &mut App) -> InlineCompletionStyles { type CompletionId = usize; pub(crate) enum EditDisplayMode { - TabAccept(bool), + TabAccept, DiffPopover, Inline, } @@ -493,15 +493,6 @@ struct InlineCompletionState { invalidation_range: Range, } -impl InlineCompletionState { - pub fn is_move(&self) -> bool { - match &self.completion { - InlineCompletion::Move { .. } => true, - _ => false, - } - } -} - enum InlineCompletionHighlight {} pub enum MenuInlineCompletionsPolicy { @@ -1499,10 +1490,14 @@ impl Editor { if self.pending_rename.is_some() { key_context.add("renaming"); } + + let mut showing_completions = false; + match self.context_menu.borrow().as_ref() { Some(CodeContextMenu::Completions(_)) => { key_context.add("menu"); key_context.add("showing_completions"); + showing_completions = true; } Some(CodeContextMenu::CodeActions(_)) => { key_context.add("menu"); @@ -1532,6 +1527,10 @@ impl Editor { if self.has_active_inline_completion() { key_context.add("copilot_suggestion"); key_context.add("inline_completion"); + + if showing_completions || self.inline_completion_requires_modifier(cx) { + key_context.add("inline_completion_requires_modifier"); + } } if self.selection_mark_mode { @@ -4664,7 +4663,7 @@ impl Editor { } } - fn inline_completion_preview_mode(&self, cx: &App) -> language::InlineCompletionPreviewMode { + fn inline_completion_requires_modifier(&self, cx: &App) -> bool { let cursor = self.selections.newest_anchor().head(); self.buffer @@ -4672,8 +4671,9 @@ impl Editor { .text_anchor_for_position(cursor, cx) .map(|(buffer, _)| { all_language_settings(buffer.read(cx).file(), cx).inline_completions_preview_mode() + == InlineCompletionPreviewMode::WhenHoldingModifier }) - .unwrap_or_default() + .unwrap_or(false) } fn should_show_inline_completions_in_buffer( @@ -5042,9 +5042,7 @@ impl Editor { return true; } - has_completion - && self.inline_completion_preview_mode(cx) - == InlineCompletionPreviewMode::WhenHoldingModifier + has_completion && self.inline_completion_requires_modifier(cx) } fn update_inline_completion_preview( @@ -5053,23 +5051,13 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - // Moves jump directly without a preview step - if self - .active_inline_completion - .as_ref() - .map_or(true, |c| c.is_move()) - { - self.previewing_inline_completion = false; - cx.notify(); - return; - } - if !self.show_inline_completions_in_menu(cx) { return; } self.previewing_inline_completion = modifiers.alt; self.update_visible_inline_completion(window, cx); + cx.notify(); } fn update_visible_inline_completion( @@ -5198,7 +5186,7 @@ impl Editor { let display_mode = if all_edits_insertions_or_deletions(&edits, &multibuffer) { if provider.show_tab_accept_marker() { - EditDisplayMode::TabAccept(self.previewing_inline_completion) + EditDisplayMode::TabAccept } else { EditDisplayMode::Inline } @@ -5494,8 +5482,6 @@ impl Editor { min_width: Pixels, max_width: Pixels, cursor_point: Point, - start_row: DisplayRow, - line_layouts: &[LineWithInvisibles], style: &EditorStyle, accept_keystroke: &gpui::Keystroke, window: &Window, @@ -5556,9 +5542,8 @@ impl Editor { Some(completion) => self.render_edit_prediction_cursor_popover_preview( completion, cursor_point, - start_row, - line_layouts, style, + window, cx, )?, @@ -5566,9 +5551,8 @@ impl Editor { Some(stale_completion) => self.render_edit_prediction_cursor_popover_preview( stale_completion, cursor_point, - start_row, - line_layouts, style, + window, cx, )?, @@ -5599,19 +5583,6 @@ impl Editor { let has_completion = self.active_inline_completion.is_some(); - let is_move = self - .active_inline_completion - .as_ref() - .map_or(false, |c| c.is_move()); - - let modifier_color = if !has_completion { - Color::Muted - } else if window.modifiers() == accept_keystroke.modifiers { - Color::Accent - } else { - Color::Default - }; - Some( h_flex() .h(self.edit_prediction_cursor_popover_height()) @@ -5631,18 +5602,15 @@ impl Editor { ui::render_modifiers( &accept_keystroke.modifiers, PlatformStyle::platform(), - Some(modifier_color), - !is_move, + Some(if !has_completion { + Color::Muted + } else { + Color::Default + }), + true, ), )) - .child(if is_move { - div() - .child(ui::Key::new(&accept_keystroke.key, None)) - .font(buffer_font.clone()) - .into_any() - } else { - Label::new("Preview").into_any_element() - }) + .child(Label::new("Preview").into_any_element()) .opacity(if has_completion { 1.0 } else { 0.4 }), ) .into_any(), @@ -5653,9 +5621,8 @@ impl Editor { &self, completion: &InlineCompletionState, cursor_point: Point, - start_row: DisplayRow, - line_layouts: &[LineWithInvisibles], style: &EditorStyle, + window: &Window, cx: &mut Context, ) -> Option
{ use text::ToPoint as _; @@ -5732,6 +5699,7 @@ impl Editor { let preview = h_flex() .gap_1() + .min_w_16() .child(styled_text) .when(len_total > first_line_len, |parent| parent.child("…")); @@ -5764,18 +5732,46 @@ impl Editor { None, &style.syntax, ); + let base = h_flex().gap_3().flex_1().child(render_relative_row_jump( + "Jump ", + cursor_point.row, + target.text_anchor.to_point(&snapshot).row, + )); + + if highlighted_text.text.is_empty() { + return Some(base); + } + let cursor_color = self.current_user_player_color(cx).cursor; let start_point = range_around_target.start.to_point(&snapshot); let end_point = range_around_target.end.to_point(&snapshot); let target_point = target.text_anchor.to_point(&snapshot); - let cursor_relative_position = line_layouts - .get(start_point.row.saturating_sub(start_row.0) as usize) + let styled_text = highlighted_text.to_styled_text(&style.text); + let text_len = highlighted_text.text.len(); + + let cursor_relative_position = window + .text_system() + .layout_line( + highlighted_text.text, + style.text.font_size.to_pixels(window.rem_size()), + // We don't need to include highlights + // because we are only using this for the cursor position + &[TextRun { + len: text_len, + font: style.text.font(), + color: style.text.color, + background_color: None, + underline: None, + strikethrough: None, + }], + ) + .log_err() .map(|line| { - let start_column_x = line.x_for_index(start_point.column as usize); - let target_column_x = line.x_for_index(target_point.column as usize); - target_column_x - start_column_x + line.x_for_index( + target_point.column.saturating_sub(start_point.column) as usize + ) }); let fade_before = start_point.column > 0; @@ -5783,56 +5779,40 @@ impl Editor { let background = cx.theme().colors().elevated_surface_background; - Some( - h_flex() - .gap_3() - .flex_1() - .child(render_relative_row_jump( - "Jump ", - cursor_point.row, - target.text_anchor.to_point(&snapshot).row, + let preview = h_flex() + .relative() + .child(styled_text) + .when(fade_before, |parent| { + parent.child(div().absolute().top_0().left_0().w_4().h_full().bg( + linear_gradient( + 90., + linear_color_stop(background, 0.), + linear_color_stop(background.opacity(0.), 1.), + ), )) - .when(!highlighted_text.text.is_empty(), |parent| { - parent.child( - h_flex() - .relative() - .child(highlighted_text.to_styled_text(&style.text)) - .when(fade_before, |parent| { - parent.child( - div().absolute().top_0().left_0().w_4().h_full().bg( - linear_gradient( - 90., - linear_color_stop(background, 0.), - linear_color_stop(background.opacity(0.), 1.), - ), - ), - ) - }) - .when(fade_after, |parent| { - parent.child( - div().absolute().top_0().right_0().w_4().h_full().bg( - linear_gradient( - -90., - linear_color_stop(background, 0.), - linear_color_stop(background.opacity(0.), 1.), - ), - ), - ) - }) - .when_some(cursor_relative_position, |parent, position| { - parent.child( - div() - .w(px(2.)) - .h_full() - .bg(cursor_color) - .absolute() - .top_0() - .left(position), - ) - }), - ) - }), - ) + }) + .when(fade_after, |parent| { + parent.child(div().absolute().top_0().right_0().w_4().h_full().bg( + linear_gradient( + -90., + linear_color_stop(background, 0.), + linear_color_stop(background.opacity(0.), 1.), + ), + )) + }) + .when_some(cursor_relative_position, |parent, position| { + parent.child( + div() + .w(px(2.)) + .h_full() + .bg(cursor_color) + .absolute() + .top_0() + .left(position), + ) + }); + + Some(base.child(preview)) } } } diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 3fce0fa36c904dd91058706c2406c97de111ae20..4412426c82744d86748b04c2881285dac1312c2b 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1653,7 +1653,7 @@ impl EditorElement { if let Some(inline_completion) = editor.active_inline_completion.as_ref() { match &inline_completion.completion { InlineCompletion::Edit { - display_mode: EditDisplayMode::TabAccept(_), + display_mode: EditDisplayMode::TabAccept, .. } => padding += INLINE_ACCEPT_SUGGESTION_EM_WIDTHS, _ => {} @@ -3238,8 +3238,6 @@ impl EditorElement { min_width, max_width, cursor_point, - start_row, - &line_layouts, style, accept_keystroke.as_ref()?, window, @@ -3714,8 +3712,7 @@ impl EditorElement { } match display_mode { - EditDisplayMode::TabAccept(previewing) => { - let previewing = *previewing; + EditDisplayMode::TabAccept => { let range = &edits.first()?.0; let target_display_point = range.end.to_display_point(editor_snapshot); @@ -3723,14 +3720,22 @@ impl EditorElement { target_display_point.row(), editor_snapshot.line_len(target_display_point.row()), ); - let origin = self.editor.update(cx, |editor, _cx| { - editor.display_to_pixel_point(target_line_end, editor_snapshot, window) - })?; + let (previewing_inline_completion, origin) = + self.editor.update(cx, |editor, _cx| { + Some(( + editor.previewing_inline_completion, + editor.display_to_pixel_point( + target_line_end, + editor_snapshot, + window, + )?, + )) + })?; let mut element = inline_completion_accept_indicator( "Accept", None, - previewing, + previewing_inline_completion, self.editor.focus_handle(cx), window, cx, From 09967ac3d0a50a8afc821ceb8756b18b960ebaa9 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 6 Feb 2025 13:07:26 -0500 Subject: [PATCH 086/130] zeta: Send up diagnostics with prediction requests (#24384) This PR makes it so we send up the diagnostic groups as additional data with the edit prediction request. We're not yet making use of them, but we are recording them so we can use them later (e.g., to train the model). Release Notes: - N/A --------- Co-authored-by: Nathan --- Cargo.lock | 13 ++- Cargo.toml | 2 +- .../src/copilot_completion_provider.rs | 2 + crates/editor/src/editor.rs | 8 +- crates/editor/src/inline_completion_tests.rs | 2 + crates/inline_completion/Cargo.toml | 1 + .../src/inline_completion.rs | 6 +- crates/language/src/buffer.rs | 2 +- crates/language/src/diagnostic_set.rs | 19 +++- crates/language/src/language.rs | 9 +- crates/languages/src/lib.rs | 1 - crates/project/src/lsp_store.rs | 13 +++ crates/supermaven/Cargo.toml | 1 + .../src/supermaven_completion_provider.rs | 2 + crates/zeta/Cargo.toml | 1 + crates/zeta/src/zeta.rs | 94 ++++++++++++++++--- 16 files changed, 145 insertions(+), 31 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3065f2f68d9296c6aa587807f12467b29763ed09..5ad5d048bc88a23e547ff8eb62f38951d135f3fb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6428,6 +6428,7 @@ version = "0.1.0" dependencies = [ "gpui", "language", + "project", ] [[package]] @@ -7160,7 +7161,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ "cfg-if", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] @@ -10215,7 +10216,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" dependencies = [ "bytes 1.10.0", - "heck 0.4.1", + "heck 0.5.0", "itertools 0.12.1", "log", "multimap 0.10.0", @@ -15484,7 +15485,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] @@ -16729,11 +16730,12 @@ dependencies = [ [[package]] name = "zed_llm_client" -version = "0.1.2" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ab9496dc5c80b2c5fb9654a76d7208d31b53130fb282085fcdde07653831843" +checksum = "9ea4d8ead1e1158e5ebdd6735df25973781da70de5c8008e3a13595865ca4f31" dependencies = [ "serde", + "serde_json", ] [[package]] @@ -16956,6 +16958,7 @@ dependencies = [ "log", "menu", "postage", + "project", "regex", "reqwest_client", "rpc", diff --git a/Cargo.toml b/Cargo.toml index 73160b0cd17179b57ee839a78b27833ce992be19..ff50372ed342d2c4d6a60b6e8c1df06ffc89f5c9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -557,7 +557,7 @@ wasmtime = { version = "24", default-features = false, features = [ wasmtime-wasi = "24" which = "6.0.0" wit-component = "0.201" -zed_llm_client = "0.1.1" +zed_llm_client = "0.2" zstd = "0.11" metal = "0.31" diff --git a/crates/copilot/src/copilot_completion_provider.rs b/crates/copilot/src/copilot_completion_provider.rs index f953e5a1100371c6990e71e1208bb6e33b15d8bd..93ffeaf2e2d92164a4fd40062ba69aa2802d0b00 100644 --- a/crates/copilot/src/copilot_completion_provider.rs +++ b/crates/copilot/src/copilot_completion_provider.rs @@ -3,6 +3,7 @@ use anyhow::Result; use gpui::{App, Context, Entity, EntityId, Task}; use inline_completion::{Direction, InlineCompletion, InlineCompletionProvider}; use language::{language_settings::AllLanguageSettings, Buffer, OffsetRangeExt, ToOffset}; +use project::Project; use settings::Settings; use std::{path::Path, time::Duration}; @@ -79,6 +80,7 @@ impl InlineCompletionProvider for CopilotCompletionProvider { fn refresh( &mut self, + _project: Option>, buffer: Entity, cursor_position: language::Anchor, debounce: bool, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 95373616f00eddee8a80ce57af96bbc921de6b19..b533345e66aa9abfaa66a93c8795e8f2582ad958 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -4648,7 +4648,13 @@ impl Editor { } self.update_visible_inline_completion(window, cx); - provider.refresh(buffer, cursor_buffer_position, debounce, cx); + provider.refresh( + self.project.clone(), + buffer, + cursor_buffer_position, + debounce, + cx, + ); Some(()) } diff --git a/crates/editor/src/inline_completion_tests.rs b/crates/editor/src/inline_completion_tests.rs index 40f77bd35b97f1b049e684282132d69762b76dbc..c0ad941b7a67e06ac7697ac94d91d71306a17035 100644 --- a/crates/editor/src/inline_completion_tests.rs +++ b/crates/editor/src/inline_completion_tests.rs @@ -3,6 +3,7 @@ use indoc::indoc; use inline_completion::InlineCompletionProvider; use language::{Language, LanguageConfig}; use multi_buffer::{Anchor, MultiBufferSnapshot, ToPoint}; +use project::Project; use std::{num::NonZeroU32, ops::Range, sync::Arc}; use text::{Point, ToOffset}; @@ -394,6 +395,7 @@ impl InlineCompletionProvider for FakeInlineCompletionProvider { fn refresh( &mut self, + _project: Option>, _buffer: gpui::Entity, _cursor_position: language::Anchor, _debounce: bool, diff --git a/crates/inline_completion/Cargo.toml b/crates/inline_completion/Cargo.toml index b6b5e2a92ec84d08b333ccb177458787b4a77d95..b478db6f948ad139e127fc0e9ebf7f332c0d8547 100644 --- a/crates/inline_completion/Cargo.toml +++ b/crates/inline_completion/Cargo.toml @@ -14,3 +14,4 @@ path = "src/inline_completion.rs" [dependencies] gpui.workspace = true language.workspace = true +project.workspace = true diff --git a/crates/inline_completion/src/inline_completion.rs b/crates/inline_completion/src/inline_completion.rs index 7c1d89f097e9250a54f2ce5f26306f21ce3644b8..d262112e0380da4b9d352e9e54a9d95da8b31160 100644 --- a/crates/inline_completion/src/inline_completion.rs +++ b/crates/inline_completion/src/inline_completion.rs @@ -1,5 +1,6 @@ use gpui::{App, Context, Entity}; use language::Buffer; +use project::Project; use std::ops::Range; // TODO: Find a better home for `Direction`. @@ -58,6 +59,7 @@ pub trait InlineCompletionProvider: 'static + Sized { fn is_refreshing(&self) -> bool; fn refresh( &mut self, + project: Option>, buffer: Entity, cursor_position: language::Anchor, debounce: bool, @@ -101,6 +103,7 @@ pub trait InlineCompletionProviderHandle { fn is_refreshing(&self, cx: &App) -> bool; fn refresh( &self, + project: Option>, buffer: Entity, cursor_position: language::Anchor, debounce: bool, @@ -174,13 +177,14 @@ where fn refresh( &self, + project: Option>, buffer: Entity, cursor_position: language::Anchor, debounce: bool, cx: &mut App, ) { self.update(cx, |this, cx| { - this.refresh(buffer, cursor_position, debounce, cx) + this.refresh(project, buffer, cursor_position, debounce, cx) }) } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 5c9798c9b199b02a9fe409a32dc20553c318a7f1..defa935c2827152437b00d29e3bccbd35621403e 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -197,7 +197,7 @@ struct SelectionSet { } /// A diagnostic associated with a certain range of a buffer. -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct Diagnostic { /// The name of the service that produced this diagnostic. pub source: Option, diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index 0f2e39275cd6f2d6ddd31d7fc5bc9b3836c3166b..cff59c8004ce47f28c99ed542dbfb146cbb1041a 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -2,6 +2,7 @@ use crate::{range_to_lsp, Diagnostic}; use anyhow::Result; use collections::HashMap; use lsp::LanguageServerId; +use serde::Serialize; use std::{ cmp::{Ordering, Reverse}, iter, @@ -25,7 +26,7 @@ pub struct DiagnosticSet { /// the diagnostics are stored internally as [`Anchor`]s, but can be /// resolved to different coordinates types like [`usize`] byte offsets or /// [`Point`](gpui::Point)s. -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq, Serialize)] pub struct DiagnosticEntry { /// The range of the buffer where the diagnostic applies. pub range: Range, @@ -35,7 +36,7 @@ pub struct DiagnosticEntry { /// A group of related diagnostics, ordered by their start position /// in the buffer. -#[derive(Debug)] +#[derive(Debug, Serialize)] pub struct DiagnosticGroup { /// The diagnostics. pub entries: Vec>, @@ -43,6 +44,20 @@ pub struct DiagnosticGroup { pub primary_ix: usize, } +impl DiagnosticGroup { + /// Converts the entries in this [`DiagnosticGroup`] to a different buffer coordinate type. + pub fn resolve(&self, buffer: &text::BufferSnapshot) -> DiagnosticGroup { + DiagnosticGroup { + entries: self + .entries + .iter() + .map(|entry| entry.resolve(buffer)) + .collect(), + primary_ix: self.primary_ix, + } + } +} + #[derive(Clone, Debug)] pub struct Summary { start: Anchor, diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index f7aa838c7f096bb3ecf30a70e411ff53c32a5533..48438757fbf6cc143465db63190c0e76d37a8ba2 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -32,10 +32,7 @@ use gpui::{App, AsyncApp, Entity, SharedString, Task}; pub use highlight_map::HighlightMap; use http_client::HttpClient; pub use language_registry::{LanguageName, LoadedLanguage}; -use lsp::{ - CodeActionKind, InitializeParams, LanguageServerBinary, LanguageServerBinaryOptions, - LanguageServerName, -}; +use lsp::{CodeActionKind, InitializeParams, LanguageServerBinary, LanguageServerBinaryOptions}; use parking_lot::Mutex; use regex::Regex; use schemars::{ @@ -73,12 +70,12 @@ use util::serde::default_true; pub use buffer::Operation; pub use buffer::*; -pub use diagnostic_set::DiagnosticEntry; +pub use diagnostic_set::{DiagnosticEntry, DiagnosticGroup}; pub use language_registry::{ AvailableLanguage, LanguageNotFound, LanguageQueries, LanguageRegistry, LanguageServerBinaryStatus, QUERY_FILENAME_PREFIXES, }; -pub use lsp::LanguageServerId; +pub use lsp::{LanguageServerId, LanguageServerName}; pub use outline::*; pub use syntax_map::{OwnedSyntaxLayer, SyntaxLayer, ToTreeSitterPoint, TreeSitterOptions}; pub use text::{AnchorRangeExt, LineEnding}; diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index fbfe7b371ce1fc3b26a41b464064a342d8d9f34b..fc14962720853a30baad8c76f3e1a65cab8f6a12 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -2,7 +2,6 @@ use anyhow::Context as _; use gpui::{App, UpdateGlobal}; use json::json_task_context; pub use language::*; -use lsp::LanguageServerName; use node_runtime::NodeRuntime; use python::{PythonContextProvider, PythonToolchainProvider}; use rust_embed::RustEmbed; diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index e73bef795b59a0d4e6739ef1006c246a2d84cdd6..f85ba369f7590b3b1af20ee21ca85086dc76046f 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -166,6 +166,19 @@ pub struct LocalLspStore { } impl LocalLspStore { + /// Returns the running language server for the given ID. Note if the language server is starting, it will not be returned. + pub fn running_language_server_for_id( + &self, + id: LanguageServerId, + ) -> Option<&Arc> { + let language_server_state = self.language_servers.get(&id)?; + + match language_server_state { + LanguageServerState::Running { server, .. } => Some(server), + LanguageServerState::Starting(_) => None, + } + } + fn start_language_server( &mut self, worktree_handle: &Entity, diff --git a/crates/supermaven/Cargo.toml b/crates/supermaven/Cargo.toml index a4748754bcaa673a447f460d27884ad16b5250bf..aa173266fe5367e7d3fd7e86f1ac888bd601971d 100644 --- a/crates/supermaven/Cargo.toml +++ b/crates/supermaven/Cargo.toml @@ -22,6 +22,7 @@ inline_completion.workspace = true language.workspace = true log.workspace = true postage.workspace = true +project.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true diff --git a/crates/supermaven/src/supermaven_completion_provider.rs b/crates/supermaven/src/supermaven_completion_provider.rs index f80551a3f39d3f3a417bded1f4affa1bce46253b..c17053ca5514bf10600b88098c1c802a13edb879 100644 --- a/crates/supermaven/src/supermaven_completion_provider.rs +++ b/crates/supermaven/src/supermaven_completion_provider.rs @@ -4,6 +4,7 @@ use futures::StreamExt as _; use gpui::{App, Context, Entity, EntityId, Task}; use inline_completion::{Direction, InlineCompletion, InlineCompletionProvider}; use language::{Anchor, Buffer, BufferSnapshot}; +use project::Project; use std::{ ops::{AddAssign, Range}, path::Path, @@ -123,6 +124,7 @@ impl InlineCompletionProvider for SupermavenCompletionProvider { fn refresh( &mut self, + _project: Option>, buffer_handle: Entity, cursor_position: Anchor, debounce: bool, diff --git a/crates/zeta/Cargo.toml b/crates/zeta/Cargo.toml index e981460256eb977e9c166b1a89a89cd4fde9def7..1904a4d2bac484394e07ce3f708358c78a79e81d 100644 --- a/crates/zeta/Cargo.toml +++ b/crates/zeta/Cargo.toml @@ -37,6 +37,7 @@ language_models.workspace = true log.workspace = true menu.workspace = true postage.workspace = true +project.workspace = true regex.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index c20522b00b8610dc3b6dc4c20fc653712a9c0f5b..a2d660134294e33c86ae028a40da6cb9c3c0e973 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -30,6 +30,7 @@ use language::{ }; use language_models::LlmApiToken; use postage::watch; +use project::Project; use settings::WorktreeId; use std::{ borrow::Cow, @@ -363,6 +364,7 @@ impl Zeta { pub fn request_completion_impl( &mut self, + project: Option<&Entity>, buffer: &Entity, cursor: language::Anchor, can_collect_data: bool, @@ -374,6 +376,7 @@ impl Zeta { R: Future> + Send + 'static, { let snapshot = self.report_changes_for_buffer(&buffer, cx); + let diagnostic_groups = snapshot.diagnostic_groups(None); let cursor_point = cursor.to_point(&snapshot); let cursor_offset = cursor_point.to_offset(&snapshot); let events = self.events.clone(); @@ -387,10 +390,39 @@ impl Zeta { let is_staff = cx.is_staff(); let buffer = buffer.clone(); + + let local_lsp_store = + project.and_then(|project| project.read(cx).lsp_store().read(cx).as_local()); + let diagnostic_groups = if let Some(local_lsp_store) = local_lsp_store { + Some( + diagnostic_groups + .into_iter() + .filter_map(|(language_server_id, diagnostic_group)| { + let language_server = + local_lsp_store.running_language_server_for_id(language_server_id)?; + + Some(( + language_server.name(), + diagnostic_group.resolve::(&snapshot), + )) + }) + .collect::>(), + ) + } else { + None + }; + cx.spawn(|_, cx| async move { let request_sent_at = Instant::now(); - let (input_events, input_excerpt, excerpt_range, input_outline) = cx + struct BackgroundValues { + input_events: String, + input_excerpt: String, + excerpt_range: Range, + input_outline: String, + } + + let values = cx .background_executor() .spawn({ let snapshot = snapshot.clone(); @@ -419,18 +451,36 @@ impl Zeta { // is not counted towards TOTAL_BYTE_LIMIT. let input_outline = prompt_for_outline(&snapshot); - anyhow::Ok((input_events, input_excerpt, excerpt_range, input_outline)) + anyhow::Ok(BackgroundValues { + input_events, + input_excerpt, + excerpt_range, + input_outline, + }) } }) .await?; - log::debug!("Events:\n{}\nExcerpt:\n{}", input_events, input_excerpt); + log::debug!( + "Events:\n{}\nExcerpt:\n{}", + values.input_events, + values.input_excerpt + ); let body = PredictEditsBody { - input_events: input_events.clone(), - input_excerpt: input_excerpt.clone(), - outline: Some(input_outline.clone()), + input_events: values.input_events.clone(), + input_excerpt: values.input_excerpt.clone(), + outline: Some(values.input_outline.clone()), can_collect_data, + diagnostic_groups: diagnostic_groups.and_then(|diagnostic_groups| { + diagnostic_groups + .into_iter() + .map(|(name, diagnostic_group)| { + Ok((name.to_string(), serde_json::to_value(diagnostic_group)?)) + }) + .collect::>>() + .log_err() + }), }; let response = perform_predict_edits(client, llm_token, is_staff, body).await?; @@ -442,12 +492,12 @@ impl Zeta { output_excerpt, buffer, &snapshot, - excerpt_range, + values.excerpt_range, cursor_offset, path, - input_outline, - input_events, - input_excerpt, + values.input_outline, + values.input_events, + values.input_excerpt, request_sent_at, &cx, ) @@ -466,11 +516,13 @@ impl Zeta { and then another "#}; + let project = None; let buffer = cx.new(|cx| Buffer::local(test_buffer_text, cx)); let position = buffer.read(cx).anchor_before(Point::new(1, 0)); let completion_tasks = vec![ self.fake_completion( + project, &buffer, position, PredictEditsResponse { @@ -486,6 +538,7 @@ and then another cx, ), self.fake_completion( + project, &buffer, position, PredictEditsResponse { @@ -501,6 +554,7 @@ and then another cx, ), self.fake_completion( + project, &buffer, position, PredictEditsResponse { @@ -517,6 +571,7 @@ and then another cx, ), self.fake_completion( + project, &buffer, position, PredictEditsResponse { @@ -533,6 +588,7 @@ and then another cx, ), self.fake_completion( + project, &buffer, position, PredictEditsResponse { @@ -548,6 +604,7 @@ and then another cx, ), self.fake_completion( + project, &buffer, position, PredictEditsResponse { @@ -562,6 +619,7 @@ and then another cx, ), self.fake_completion( + project, &buffer, position, PredictEditsResponse { @@ -594,6 +652,7 @@ and then another #[cfg(any(test, feature = "test-support"))] pub fn fake_completion( &mut self, + project: Option<&Entity>, buffer: &Entity, position: language::Anchor, response: PredictEditsResponse, @@ -601,19 +660,21 @@ and then another ) -> Task>> { use std::future::ready; - self.request_completion_impl(buffer, position, false, cx, |_, _, _, _| { + self.request_completion_impl(project, buffer, position, false, cx, |_, _, _, _| { ready(Ok(response)) }) } pub fn request_completion( &mut self, + project: Option<&Entity>, buffer: &Entity, position: language::Anchor, can_collect_data: bool, cx: &mut Context, ) -> Task>> { self.request_completion_impl( + project, buffer, position, can_collect_data, @@ -1494,6 +1555,7 @@ impl inline_completion::InlineCompletionProvider for ZetaInlineCompletionProvide fn refresh( &mut self, + project: Option>, buffer: Entity, position: language::Anchor, _debounce: bool, @@ -1529,7 +1591,13 @@ impl inline_completion::InlineCompletionProvider for ZetaInlineCompletionProvide let completion_request = this.update(&mut cx, |this, cx| { this.last_request_timestamp = Instant::now(); this.zeta.update(cx, |zeta, cx| { - zeta.request_completion(&buffer, position, can_collect_data, cx) + zeta.request_completion( + project.as_ref(), + &buffer, + position, + can_collect_data, + cx, + ) }) }); @@ -1858,7 +1926,7 @@ mod tests { let buffer = cx.new(|cx| Buffer::local(buffer_content, cx)); let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 0))); let completion_task = zeta.update(cx, |zeta, cx| { - zeta.request_completion(&buffer, cursor, false, cx) + zeta.request_completion(None, &buffer, cursor, false, cx) }); let token_request = server.receive::().await.unwrap(); From e1bb9570dfe9a2a717065432ecb5db5e25dfe16c Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Thu, 6 Feb 2025 16:07:27 -0300 Subject: [PATCH 087/130] edit predictions: Reset onboarding action (#24387) https://github.com/user-attachments/assets/bb597b93-a616-4f8a-8608-013b8202799c Release Notes: - N/A --- crates/language/src/language_settings.rs | 11 ++++++ crates/zeta/src/init.rs | 19 ++++++++++ crates/zeta/src/onboarding_banner.rs | 44 +++++++++++------------- 3 files changed, 50 insertions(+), 24 deletions(-) diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 3f5d5ef8a2761cf0d5fb92f2cd4a54f4c4a8f484..fb8eb28a61194ce4e780453d462106cc34cff591 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -206,6 +206,17 @@ pub enum InlineCompletionProvider { Zed, } +impl InlineCompletionProvider { + pub fn is_zed(&self) -> bool { + match self { + InlineCompletionProvider::Zed => true, + InlineCompletionProvider::None + | InlineCompletionProvider::Copilot + | InlineCompletionProvider::Supermaven => false, + } + } +} + /// The settings for edit predictions, such as [GitHub Copilot](https://github.com/features/copilot) /// or [Supermaven](https://supermaven.com). #[derive(Clone, Debug, Default)] diff --git a/crates/zeta/src/init.rs b/crates/zeta/src/init.rs index 1559250cbc9befd3ab6ca5db95227ef426061388..a45f5a58a221ad7d504d1ab1e2ce3fa1ef6bf6b1 100644 --- a/crates/zeta/src/init.rs +++ b/crates/zeta/src/init.rs @@ -4,11 +4,16 @@ use command_palette_hooks::CommandPaletteFilter; use feature_flags::{ FeatureFlagAppExt as _, PredictEditsFeatureFlag, PredictEditsRateCompletionsFeatureFlag, }; +use gpui::actions; +use language::language_settings::{AllLanguageSettings, InlineCompletionProvider}; +use settings::update_settings_file; use ui::App; use workspace::Workspace; use crate::{onboarding_modal::ZedPredictModal, RateCompletionModal, RateCompletions}; +actions!(edit_predictions, [ResetOnboarding]); + pub fn init(cx: &mut App) { cx.observe_new(move |workspace: &mut Workspace, _, _cx| { workspace.register_action(|workspace, _: &RateCompletions, window, cx| { @@ -31,6 +36,20 @@ pub fn init(cx: &mut App) { } }, ); + + workspace.register_action(|workspace, _: &ResetOnboarding, _window, cx| { + update_settings_file::( + workspace.app_state().fs.clone(), + cx, + move |file, _| { + file.features + .get_or_insert(Default::default()) + .inline_completion_provider = Some(InlineCompletionProvider::None) + }, + ); + + crate::onboarding_banner::clear_dismissed(cx); + }); }) .detach(); diff --git a/crates/zeta/src/onboarding_banner.rs b/crates/zeta/src/onboarding_banner.rs index 54a6939d6292c505fcbd9a5419c3085b60366331..713b84604abc7176342c0f6a9465824994df8691 100644 --- a/crates/zeta/src/onboarding_banner.rs +++ b/crates/zeta/src/onboarding_banner.rs @@ -11,6 +11,7 @@ use crate::onboarding_event; /// Prompts the user to try Zed's Edit Prediction feature pub struct ZedPredictBanner { dismissed: bool, + provider: InlineCompletionProvider, _subscription: Subscription, } @@ -18,40 +19,30 @@ impl ZedPredictBanner { pub fn new(cx: &mut Context) -> Self { Self { dismissed: get_dismissed(), + provider: all_language_settings(None, cx).inline_completions.provider, _subscription: cx.observe_global::(Self::handle_settings_changed), } } fn should_show(&self, cx: &mut App) -> bool { - if !cx.has_flag::() || self.dismissed { - return false; - } - - let provider = all_language_settings(None, cx).inline_completions.provider; - - match provider { - InlineCompletionProvider::None - | InlineCompletionProvider::Copilot - | InlineCompletionProvider::Supermaven => true, - InlineCompletionProvider::Zed => false, - } + cx.has_flag::() && !self.dismissed && !self.provider.is_zed() } fn handle_settings_changed(&mut self, cx: &mut Context) { - if self.dismissed { + let new_provider = all_language_settings(None, cx).inline_completions.provider; + + if new_provider == self.provider { return; } - let provider = all_language_settings(None, cx).inline_completions.provider; - - match provider { - InlineCompletionProvider::None - | InlineCompletionProvider::Copilot - | InlineCompletionProvider::Supermaven => {} - InlineCompletionProvider::Zed => { - self.dismiss(cx); - } + if new_provider.is_zed() { + self.dismiss(cx); + } else { + self.dismissed = get_dismissed(); } + + self.provider = new_provider; + cx.notify(); } fn dismiss(&mut self, cx: &mut Context) { @@ -64,14 +55,14 @@ impl ZedPredictBanner { const DISMISSED_AT_KEY: &str = "zed_predict_banner_dismissed_at"; -pub(crate) fn get_dismissed() -> bool { +fn get_dismissed() -> bool { db::kvp::KEY_VALUE_STORE .read_kvp(DISMISSED_AT_KEY) .log_err() .map_or(false, |dismissed| dismissed.is_some()) } -pub(crate) fn persist_dismissed(cx: &mut App) { +fn persist_dismissed(cx: &mut App) { cx.spawn(|_| { let time = Utc::now().to_rfc3339(); db::kvp::KEY_VALUE_STORE.write_kvp(DISMISSED_AT_KEY.into(), time) @@ -79,6 +70,11 @@ pub(crate) fn persist_dismissed(cx: &mut App) { .detach_and_log_err(cx); } +pub(crate) fn clear_dismissed(cx: &mut App) { + cx.spawn(|_| db::kvp::KEY_VALUE_STORE.delete_kvp(DISMISSED_AT_KEY.into())) + .detach_and_log_err(cx); +} + impl Render for ZedPredictBanner { fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { if !self.should_show(cx) { From c28c767b40291539e552b2f885b86bcc18b9fdde Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 6 Feb 2025 16:49:13 -0300 Subject: [PATCH 088/130] edit prediction: Wordsmith status bar menu docs aside (#24390) Refining writing in the status bar menu docs aside. Release Notes: - N/A --------- Co-authored-by: Nathan Sobo <1789+nathansobo@users.noreply.github.com> --- crates/inline_completion_button/Cargo.toml | 1 + .../src/inline_completion_button.rs | 9 +++++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/crates/inline_completion_button/Cargo.toml b/crates/inline_completion_button/Cargo.toml index 973e7d327301d17a1830b816e9623ef0eab89083..93d7a3e80ea65de82753a1e743f0e40bd1a900d1 100644 --- a/crates/inline_completion_button/Cargo.toml +++ b/crates/inline_completion_button/Cargo.toml @@ -20,6 +20,7 @@ editor.workspace = true feature_flags.workspace = true fs.workspace = true gpui.workspace = true +indoc.workspace = true inline_completion.workspace = true language.workspace = true paths.workspace = true diff --git a/crates/inline_completion_button/src/inline_completion_button.rs b/crates/inline_completion_button/src/inline_completion_button.rs index c6a616dc1af71cd2f9e070c3ef12e081b9e35a9f..19cb8b4843ca01ce2d7f197901161aeecb2a3c11 100644 --- a/crates/inline_completion_button/src/inline_completion_button.rs +++ b/crates/inline_completion_button/src/inline_completion_button.rs @@ -11,6 +11,7 @@ use gpui::{ Corner, Entity, FocusHandle, Focusable, IntoElement, ParentElement, Render, Subscription, WeakEntity, }; +use indoc::indoc; use language::{ language_settings::{ self, all_language_settings, AllLanguageSettings, InlineCompletionProvider, @@ -429,7 +430,10 @@ impl InlineCompletionButton { ContextMenuEntry::new("Share Training Data") .toggleable(IconPosition::End, data_collection.is_enabled()) .documentation_aside(|_| { - Label::new("Zed automatically detects if your project is open-source. This setting is only applicable in such cases.").into_any_element() + Label::new(indoc!{" + Help us improve our open model by sharing data from open source repositories. \ + Zed must detect a license file in your repo for this setting to take effect.\ + "}).into_any_element() }) .handler(move |_, cx| { provider.toggle_data_collection(cx); @@ -453,7 +457,8 @@ impl InlineCompletionButton { menu = menu.item( ContextMenuEntry::new("Configure Excluded Files") .documentation_aside(|_| { - Label::new("This item takes you to the settings where you can specify files that will never be captured by any edit prediction model. List both specific file extensions and individual file names.").into_any_element() + Label::new(indoc!{" + Open your settings to add sensitive paths for which Zed will never predict edits."}).into_any_element() }) .handler(move |window, cx| { if let Some(workspace) = window.root().flatten() { From ad46c5b567edef734ad372170d1282160c719fa0 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 6 Feb 2025 16:58:53 -0300 Subject: [PATCH 089/130] edit prediction: Improve the onboarding modal (#24386) --- crates/zeta/src/onboarding_modal.rs | 38 +++++++++++++++++++++-------- 1 file changed, 28 insertions(+), 10 deletions(-) diff --git a/crates/zeta/src/onboarding_modal.rs b/crates/zeta/src/onboarding_modal.rs index c17289b78f13db3b6964e8b3bccefb7011d8345b..41fc289a5f786ab9fffe1209da335aee27e30726 100644 --- a/crates/zeta/src/onboarding_modal.rs +++ b/crates/zeta/src/onboarding_modal.rs @@ -160,15 +160,25 @@ impl Focusable for ZedPredictModal { impl ModalView for ZedPredictModal {} impl Render for ZedPredictModal { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let window_height = window.viewport_size().height; + let max_height = window_height - px(200.); + let base = v_flex() - .id("zed predict tos") + .id("edit-prediction-onboarding") .key_context("ZedPredictModal") + .relative() .w(px(440.)) + .h_full() + .max_h(max_height) .p_4() - .relative() .gap_2() - .overflow_hidden() + .when(self.data_collection_expanded, |element| { + element.overflow_y_scroll() + }) + .when(!self.data_collection_expanded, |element| { + element.overflow_hidden() + }) .elevation_3(cx) .track_focus(&self.focus_handle(cx)) .on_action(cx.listener(Self::cancel)) @@ -184,7 +194,7 @@ impl Render for ZedPredictModal { .p_1p5() .absolute() .top_1() - .left_1p5() + .left_1() .right_0() .h(px(200.)) .child( @@ -247,7 +257,7 @@ impl Render for ZedPredictModal { v_flex() .gap_2() .items_center() - .pr_4() + .pr_2p5() .child(tab(0).ml_neg_20()) .child(tab(1)) .child(tab(2).ml_20()) @@ -295,9 +305,14 @@ impl Render for ZedPredictModal { fn info_item(label_text: impl Into) -> impl Element { h_flex() + .items_start() .gap_2() - .child(Icon::new(IconName::Check).size(IconSize::XSmall)) - .child(label_item(label_text)) + .child( + div() + .mt_1p5() + .child(Icon::new(IconName::Check).size(IconSize::XSmall)), + ) + .child(div().w_full().child(label_item(label_text))) } fn multiline_info_item, E2: IntoElement>( @@ -333,6 +348,7 @@ impl Render for ZedPredictModal { v_flex() .child( h_flex() + .flex_wrap() .child( Checkbox::new( "training-data-checkbox", @@ -390,9 +406,11 @@ impl Render for ZedPredictModal { )) .child(info_item("Toggle it anytime via the status bar menu.")) .child(multiline_info_item( - "Files that can contain sensitive data, like `.env`, are", + "Files with sensitive data, like `.env`, are excluded", h_flex() - .child(label_item("excluded by default via the")) + .w_full() + .flex_wrap() + .child(label_item("by default via the")) .child( Button::new("doc-link", "disabled_globs").on_click( cx.listener(Self::inline_completions_doc), From 9c132fece533ff0359faa8df8e58eb6ac1426019 Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Thu, 6 Feb 2025 14:26:42 -0600 Subject: [PATCH 090/130] Fix #24314 - File icons missing for hidden files (#24391) - **fix ignoring ignored files when matching icons** - **remove poorly named and confusing method `PathExt.icon_stem_or_suffix` and refactor `PathExt.extension_or_hidden_file_name` to actually do what it says it does** Closes #24314 Release Notes: - Fixed an issue where hidden files would have the default icon instead of the correct one - Fixed an issue where files with specific icons (such as `eslint.config.js`) would not have the their specific icon without a leading `.` (`.eslint.config.js`) --- crates/file_icons/src/file_icons.rs | 45 ++++++++++++++++++------ crates/language/src/language_registry.rs | 4 +-- crates/util/src/paths.rs | 43 ++-------------------- 3 files changed, 40 insertions(+), 52 deletions(-) diff --git a/crates/file_icons/src/file_icons.rs b/crates/file_icons/src/file_icons.rs index 01fed04243a6cc8c2c998203b6b267d5d1d55a4a..88b200363528af67a018c05cb05309967e299b4f 100644 --- a/crates/file_icons/src/file_icons.rs +++ b/crates/file_icons/src/file_icons.rs @@ -7,7 +7,7 @@ use gpui::{App, AssetSource, Global, SharedString}; use serde_derive::Deserialize; use settings::Settings; use theme::{IconTheme, ThemeRegistry, ThemeSettings}; -use util::{maybe, paths::PathExt}; +use util::paths::PathExt; #[derive(Deserialize, Debug)] pub struct FileIcons { @@ -43,20 +43,45 @@ impl FileIcons { pub fn get_icon(path: &Path, cx: &App) -> Option { let this = cx.try_global::()?; + let get_icon_from_suffix = |suffix: &str| -> Option { + this.stems + .get(suffix) + .or_else(|| this.suffixes.get(suffix)) + .and_then(|typ| this.get_icon_for_type(typ, cx)) + }; // TODO: Associate a type with the languages and have the file's language // override these associations - maybe!({ - let suffix = path.icon_stem_or_suffix()?; - if let Some(type_str) = this.stems.get(suffix) { - return this.get_icon_for_type(type_str, cx); + // check if file name is in suffixes + // e.g. catch file named `eslint.config.js` instead of `.eslint.config.js` + if let Some(typ) = path.to_str().and_then(|typ| this.suffixes.get(typ)) { + let maybe_path = get_icon_from_suffix(typ); + if maybe_path.is_some() { + return maybe_path; } + } - this.suffixes - .get(suffix) - .and_then(|type_str| this.get_icon_for_type(type_str, cx)) - }) - .or_else(|| this.get_icon_for_type("default", cx)) + // primary case: check if the files extension or the hidden file name + // matches some icon path + if let Some(suffix) = path.extension_or_hidden_file_name() { + let maybe_path = get_icon_from_suffix(suffix); + if maybe_path.is_some() { + return maybe_path; + } + } + + // this _should_ only happen when the file is hidden (has leading '.') + // and is not a "special" file we have an icon (e.g. not `.eslint.config.js`) + // that should be caught above. In the remaining cases, we want to check + // for a normal supported extension e.g. `.data.json` -> `json` + let extension = path.extension().and_then(|ext| ext.to_str()); + if let Some(extension) = extension { + let maybe_path = get_icon_from_suffix(extension); + if maybe_path.is_some() { + return maybe_path; + } + } + return this.get_icon_for_type("default", cx); } fn default_icon_theme(cx: &App) -> Option> { diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index f0d314f87deb24c69da39de26ea1bf66604ad438..9d80b2609c62b57af86c9acb8d820365c06b00cc 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -31,7 +31,7 @@ use sum_tree::Bias; use text::{Point, Rope}; use theme::Theme; use unicase::UniCase; -use util::{maybe, paths::PathExt, post_inc, ResultExt}; +use util::{maybe, post_inc, ResultExt}; #[derive( Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, JsonSchema, @@ -659,7 +659,7 @@ impl LanguageRegistry { user_file_types: Option<&HashMap, GlobSet>>, ) -> Option { let filename = path.file_name().and_then(|name| name.to_str()); - let extension = path.extension_or_hidden_file_name(); + let extension = path.extension().and_then(|ext| ext.to_str()); let path_suffixes = [extension, filename, path.to_str()]; let empty = GlobSet::empty(); diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index 275895d228440ebe320e44ed0511e245b89eeed3..b3d0c28bbba40dddbc5049858d0a152093637b7c 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -21,7 +21,6 @@ pub fn home_dir() -> &'static PathBuf { pub trait PathExt { fn compact(&self) -> PathBuf; - fn icon_stem_or_suffix(&self) -> Option<&str>; fn extension_or_hidden_file_name(&self) -> Option<&str>; fn to_sanitized_string(&self) -> String; fn try_from_bytes<'a>(bytes: &'a [u8]) -> anyhow::Result @@ -74,8 +73,8 @@ impl> PathExt for T { } } - /// Returns either the suffix if available, or the file stem otherwise to determine which file icon to use - fn icon_stem_or_suffix(&self) -> Option<&str> { + /// Returns a file's extension or, if the file is hidden, its name without the leading dot + fn extension_or_hidden_file_name(&self) -> Option<&str> { let path = self.as_ref(); let file_name = path.file_name()?.to_str()?; if file_name.starts_with('.') { @@ -87,15 +86,6 @@ impl> PathExt for T { .or_else(|| path.file_stem()?.to_str()) } - /// Returns a file's extension or, if the file is hidden, its name without the leading dot - fn extension_or_hidden_file_name(&self) -> Option<&str> { - if let Some(extension) = self.as_ref().extension() { - return extension.to_str(); - } - - self.as_ref().file_name()?.to_str()?.split('.').last() - } - /// Returns a sanitized string representation of the path. /// Note, on Windows, this assumes that the path is a valid UTF-8 string and /// is not a UNC path. @@ -811,33 +801,6 @@ mod tests { } } - #[test] - fn test_icon_stem_or_suffix() { - // No dots in name - let path = Path::new("/a/b/c/file_name.rs"); - assert_eq!(path.icon_stem_or_suffix(), Some("rs")); - - // Single dot in name - let path = Path::new("/a/b/c/file.name.rs"); - assert_eq!(path.icon_stem_or_suffix(), Some("rs")); - - // No suffix - let path = Path::new("/a/b/c/file"); - assert_eq!(path.icon_stem_or_suffix(), Some("file")); - - // Multiple dots in name - let path = Path::new("/a/b/c/long.file.name.rs"); - assert_eq!(path.icon_stem_or_suffix(), Some("rs")); - - // Hidden file, no extension - let path = Path::new("/a/b/c/.gitignore"); - assert_eq!(path.icon_stem_or_suffix(), Some("gitignore")); - - // Hidden file, with extension - let path = Path::new("/a/b/c/.eslintrc.js"); - assert_eq!(path.icon_stem_or_suffix(), Some("eslintrc.js")); - } - #[test] fn test_extension_or_hidden_file_name() { // No dots in name @@ -858,7 +821,7 @@ mod tests { // Hidden file, with extension let path = Path::new("/a/b/c/.eslintrc.js"); - assert_eq!(path.extension_or_hidden_file_name(), Some("js")); + assert_eq!(path.extension_or_hidden_file_name(), Some("eslintrc.js")); } #[test] From 00971fbe415fdc4695307f192134093c7bcd138c Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Thu, 6 Feb 2025 16:45:03 -0500 Subject: [PATCH 091/130] Introduce KeybindingHint (#24397) - Implements scaling for `ui::Keybinding` and it's component parts - Adds the `ui::KeybindingHint` component for creating keybinding hints easily: ![CleanShot 2025-02-04 at 16 59 38@2x](https://github.com/user-attachments/assets/d781e401-8875-4edc-a4b0-5f8750777d86) Release Notes: - N/A --- crates/editor/src/editor.rs | 1 + crates/editor/src/element.rs | 1 + crates/ui/src/components.rs | 2 + crates/ui/src/components/button/button.rs | 18 +- .../ui/src/components/button/button_like.rs | 7 + crates/ui/src/components/icon.rs | 4 + crates/ui/src/components/keybinding.rs | 63 +++- crates/ui/src/components/keybinding_hint.rs | 307 ++++++++++++++++++ crates/workspace/src/theme_preview.rs | 3 +- 9 files changed, 390 insertions(+), 16 deletions(-) create mode 100644 crates/ui/src/components/keybinding_hint.rs diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index b533345e66aa9abfaa66a93c8795e8f2582ad958..8ac797b72cbbd5c496c68bda204c065c1b320a68 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -5613,6 +5613,7 @@ impl Editor { } else { Color::Default }), + None, true, ), )) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 4412426c82744d86748b04c2881285dac1312c2b..96d736888e82c69ab014ffdef4035161d60a8c45 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -5784,6 +5784,7 @@ fn inline_completion_accept_indicator( &accept_keystroke.modifiers, PlatformStyle::platform(), Some(Color::Default), + None, false, )) }) diff --git a/crates/ui/src/components.rs b/crates/ui/src/components.rs index f6626c745b2f317cca3d6e84821cb8d19306bea8..94ace5632c664bbd04dc2fa7be58b3c2dff2bcc0 100644 --- a/crates/ui/src/components.rs +++ b/crates/ui/src/components.rs @@ -11,6 +11,7 @@ mod image; mod indent_guides; mod indicator; mod keybinding; +mod keybinding_hint; mod label; mod list; mod modal; @@ -47,6 +48,7 @@ pub use image::*; pub use indent_guides::*; pub use indicator::*; pub use keybinding::*; +pub use keybinding_hint::*; pub use label::*; pub use list::*; pub use modal::*; diff --git a/crates/ui/src/components/button/button.rs b/crates/ui/src/components/button/button.rs index 46f181f3859aa52ff8ba3524707a0470a528ca14..c9b61866617731191a53cb03ca7ef5e470ecb1cf 100644 --- a/crates/ui/src/components/button/button.rs +++ b/crates/ui/src/components/button/button.rs @@ -2,7 +2,8 @@ use gpui::{AnyView, DefiniteLength}; use crate::{ - prelude::*, Color, DynamicSpacing, ElevationIndex, IconPosition, KeyBinding, TintColor, + prelude::*, Color, DynamicSpacing, ElevationIndex, IconPosition, KeyBinding, + KeybindingPosition, TintColor, }; use crate::{ ButtonCommon, ButtonLike, ButtonSize, ButtonStyle, IconName, IconSize, Label, LineHeightStyle, @@ -92,6 +93,7 @@ pub struct Button { selected_icon: Option, selected_icon_color: Option, key_binding: Option, + keybinding_position: KeybindingPosition, alpha: Option, } @@ -117,6 +119,7 @@ impl Button { selected_icon: None, selected_icon_color: None, key_binding: None, + keybinding_position: KeybindingPosition::default(), alpha: None, } } @@ -187,6 +190,15 @@ impl Button { self } + /// Sets the position of the keybinding relative to the button label. + /// + /// This method allows you to specify where the keybinding should be displayed + /// in relation to the button's label. + pub fn key_binding_position(mut self, position: KeybindingPosition) -> Self { + self.keybinding_position = position; + self + } + /// Sets the alpha property of the color of label. pub fn alpha(mut self, alpha: f32) -> Self { self.alpha = Some(alpha); @@ -412,6 +424,10 @@ impl RenderOnce for Button { }) .child( h_flex() + .when( + self.keybinding_position == KeybindingPosition::Start, + |this| this.flex_row_reverse(), + ) .gap(DynamicSpacing::Base06.rems(cx)) .justify_between() .child( diff --git a/crates/ui/src/components/button/button_like.rs b/crates/ui/src/components/button/button_like.rs index 75af3e3a0fab45e01963ac54df291892b05594c4..0b78be078669aeffc8d44342cc0a34a250a756b4 100644 --- a/crates/ui/src/components/button/button_like.rs +++ b/crates/ui/src/components/button/button_like.rs @@ -45,6 +45,13 @@ pub enum IconPosition { End, } +#[derive(Default, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] +pub enum KeybindingPosition { + Start, + #[default] + End, +} + #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Default)] pub enum TintColor { #[default] diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index ba4878898b140029e7962c1bfc1679f2dffb3a01..12346026e81000cc820932ec668b56d10369f52f 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -70,6 +70,7 @@ pub enum IconSize { Medium, /// 48px XLarge, + Custom(Pixels), } impl IconSize { @@ -80,6 +81,7 @@ impl IconSize { IconSize::Small => rems_from_px(14.), IconSize::Medium => rems_from_px(16.), IconSize::XLarge => rems_from_px(48.), + IconSize::Custom(size) => rems_from_px(size.into()), } } @@ -96,6 +98,8 @@ impl IconSize { IconSize::Small => DynamicSpacing::Base02.px(cx), IconSize::Medium => DynamicSpacing::Base02.px(cx), IconSize::XLarge => DynamicSpacing::Base02.px(cx), + // TODO: Wire into dynamic spacing + IconSize::Custom(size) => px(size.into()), }; (icon_size, padding) diff --git a/crates/ui/src/components/keybinding.rs b/crates/ui/src/components/keybinding.rs index c78fe1524f7ca13ec9f7d9aa8358e9c629025aba..c488e7999b00102cfca08e18eb5bb83deacc2c03 100644 --- a/crates/ui/src/components/keybinding.rs +++ b/crates/ui/src/components/keybinding.rs @@ -15,6 +15,7 @@ pub struct KeyBinding { /// The [`PlatformStyle`] to use when displaying this keybinding. platform_style: PlatformStyle, + size: Option, } impl KeyBinding { @@ -47,6 +48,7 @@ impl KeyBinding { Self { key_binding, platform_style: PlatformStyle::platform(), + size: None, } } @@ -55,6 +57,12 @@ impl KeyBinding { self.platform_style = platform_style; self } + + /// Sets the size for this [`KeyBinding`]. + pub fn size(mut self, size: Pixels) -> Self { + self.size = Some(size); + self + } } impl RenderOnce for KeyBinding { @@ -83,9 +91,12 @@ impl RenderOnce for KeyBinding { &keystroke.modifiers, self.platform_style, None, + self.size, false, )) - .map(|el| el.child(render_key(&keystroke, self.platform_style, None))) + .map(|el| { + el.child(render_key(&keystroke, self.platform_style, None, self.size)) + }) })) } } @@ -94,11 +105,14 @@ pub fn render_key( keystroke: &Keystroke, platform_style: PlatformStyle, color: Option, + size: Option, ) -> AnyElement { let key_icon = icon_for_key(keystroke, platform_style); match key_icon { - Some(icon) => KeyIcon::new(icon, color).into_any_element(), - None => Key::new(capitalize(&keystroke.key), color).into_any_element(), + Some(icon) => KeyIcon::new(icon, color).size(size).into_any_element(), + None => Key::new(capitalize(&keystroke.key), color) + .size(size) + .into_any_element(), } } @@ -130,6 +144,7 @@ pub fn render_modifiers( modifiers: &Modifiers, platform_style: PlatformStyle, color: Option, + size: Option, standalone: bool, ) -> impl Iterator { enum KeyOrIcon { @@ -200,8 +215,8 @@ pub fn render_modifiers( PlatformStyle::Windows => vec![modifier.windows, KeyOrIcon::Key("+")], }) .map(move |key_or_icon| match key_or_icon { - KeyOrIcon::Key(key) => Key::new(key, color).into_any_element(), - KeyOrIcon::Icon(icon) => KeyIcon::new(icon, color).into_any_element(), + KeyOrIcon::Key(key) => Key::new(key, color).size(size).into_any_element(), + KeyOrIcon::Icon(icon) => KeyIcon::new(icon, color).size(size).into_any_element(), }) } @@ -209,26 +224,26 @@ pub fn render_modifiers( pub struct Key { key: SharedString, color: Option, + size: Option, } impl RenderOnce for Key { fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { let single_char = self.key.len() == 1; + let size = self.size.unwrap_or(px(14.)); + let size_f32: f32 = size.into(); div() .py_0() .map(|this| { if single_char { - this.w(rems_from_px(14.)) - .flex() - .flex_none() - .justify_center() + this.w(size).flex().flex_none().justify_center() } else { this.px_0p5() } }) - .h(rems_from_px(14.)) - .text_ui(cx) + .h(rems_from_px(size_f32)) + .text_size(size) .line_height(relative(1.)) .text_color(self.color.unwrap_or(Color::Muted).color(cx)) .child(self.key.clone()) @@ -240,27 +255,47 @@ impl Key { Self { key: key.into(), color, + size: None, } } + + pub fn size(mut self, size: impl Into>) -> Self { + self.size = size.into(); + self + } } #[derive(IntoElement)] pub struct KeyIcon { icon: IconName, color: Option, + size: Option, } impl RenderOnce for KeyIcon { - fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { + fn render(self, window: &mut Window, _cx: &mut App) -> impl IntoElement { + let size = self + .size + .unwrap_or(IconSize::Small.rems().to_pixels(window.rem_size())); + Icon::new(self.icon) - .size(IconSize::XSmall) + .size(IconSize::Custom(size)) .color(self.color.unwrap_or(Color::Muted)) } } impl KeyIcon { pub fn new(icon: IconName, color: Option) -> Self { - Self { icon, color } + Self { + icon, + color, + size: None, + } + } + + pub fn size(mut self, size: impl Into>) -> Self { + self.size = size.into(); + self } } diff --git a/crates/ui/src/components/keybinding_hint.rs b/crates/ui/src/components/keybinding_hint.rs new file mode 100644 index 0000000000000000000000000000000000000000..2239cf0790608e5fb7953a0fb631543ad11147ec --- /dev/null +++ b/crates/ui/src/components/keybinding_hint.rs @@ -0,0 +1,307 @@ +use crate::{h_flex, prelude::*}; +use crate::{ElevationIndex, KeyBinding}; +use gpui::{point, App, BoxShadow, IntoElement, Window}; +use smallvec::smallvec; + +/// Represents a hint for a keybinding, optionally with a prefix and suffix. +/// +/// This struct allows for the creation and customization of a keybinding hint, +/// which can be used to display keyboard shortcuts or commands in a user interface. +/// +/// # Examples +/// +/// ``` +/// use ui::prelude::*; +/// +/// let hint = KeybindingHint::new(KeyBinding::from_str("Ctrl+S")) +/// .prefix("Save:") +/// .size(Pixels::from(14.0)); +/// ``` +#[derive(Debug, IntoElement, Clone)] +pub struct KeybindingHint { + prefix: Option, + suffix: Option, + keybinding: KeyBinding, + size: Option, + elevation: Option, +} + +impl KeybindingHint { + /// Creates a new `KeybindingHint` with the specified keybinding. + /// + /// This method initializes a new `KeybindingHint` instance with the given keybinding, + /// setting all other fields to their default values. + /// + /// # Examples + /// + /// ``` + /// use ui::prelude::*; + /// + /// let hint = KeybindingHint::new(KeyBinding::from_str("Ctrl+C")); + /// ``` + pub fn new(keybinding: KeyBinding) -> Self { + Self { + prefix: None, + suffix: None, + keybinding, + size: None, + elevation: None, + } + } + + /// Creates a new `KeybindingHint` with a prefix and keybinding. + /// + /// This method initializes a new `KeybindingHint` instance with the given prefix and keybinding, + /// setting all other fields to their default values. + /// + /// # Examples + /// + /// ``` + /// use ui::prelude::*; + /// + /// let hint = KeybindingHint::with_prefix("Copy:", KeyBinding::from_str("Ctrl+C")); + /// ``` + pub fn with_prefix(prefix: impl Into, keybinding: KeyBinding) -> Self { + Self { + prefix: Some(prefix.into()), + suffix: None, + keybinding, + size: None, + elevation: None, + } + } + + /// Creates a new `KeybindingHint` with a keybinding and suffix. + /// + /// This method initializes a new `KeybindingHint` instance with the given keybinding and suffix, + /// setting all other fields to their default values. + /// + /// # Examples + /// + /// ``` + /// use ui::prelude::*; + /// + /// let hint = KeybindingHint::with_suffix(KeyBinding::from_str("Ctrl+V"), "Paste"); + /// ``` + pub fn with_suffix(keybinding: KeyBinding, suffix: impl Into) -> Self { + Self { + prefix: None, + suffix: Some(suffix.into()), + keybinding, + size: None, + elevation: None, + } + } + + /// Sets the prefix for the keybinding hint. + /// + /// This method allows adding or changing the prefix text that appears before the keybinding. + /// + /// # Examples + /// + /// ``` + /// use ui::prelude::*; + /// + /// let hint = KeybindingHint::new(KeyBinding::from_str("Ctrl+X")) + /// .prefix("Cut:"); + /// ``` + pub fn prefix(mut self, prefix: impl Into) -> Self { + self.prefix = Some(prefix.into()); + self + } + + /// Sets the suffix for the keybinding hint. + /// + /// This method allows adding or changing the suffix text that appears after the keybinding. + /// + /// # Examples + /// + /// ``` + /// use ui::prelude::*; + /// + /// let hint = KeybindingHint::new(KeyBinding::from_str("Ctrl+F")) + /// .suffix("Find"); + /// ``` + pub fn suffix(mut self, suffix: impl Into) -> Self { + self.suffix = Some(suffix.into()); + self + } + + /// Sets the size of the keybinding hint. + /// + /// This method allows specifying the size of the keybinding hint in pixels. + /// + /// # Examples + /// + /// ``` + /// use ui::prelude::*; + /// + /// let hint = KeybindingHint::new(KeyBinding::from_str("Ctrl+Z")) + /// .size(Pixels::from(16.0)); + /// ``` + pub fn size(mut self, size: impl Into>) -> Self { + self.size = size.into(); + self + } + + /// Sets the elevation of the keybinding hint. + /// + /// This method allows specifying the elevation index for the keybinding hint, + /// which affects its visual appearance in terms of depth or layering. + /// + /// # Examples + /// + /// ``` + /// use ui::prelude::*; + /// + /// let hint = KeybindingHint::new(KeyBinding::from_str("Ctrl+A")) + /// .elevation(ElevationIndex::new(1)); + /// ``` + pub fn elevation(mut self, elevation: impl Into>) -> Self { + self.elevation = elevation.into(); + self + } +} + +impl RenderOnce for KeybindingHint { + fn render(self, window: &mut Window, cx: &mut App) -> impl IntoElement { + let colors = cx.theme().colors().clone(); + + let size = self + .size + .unwrap_or(TextSize::Small.rems(cx).to_pixels(window.rem_size())); + let kb_size = size - px(2.0); + let kb_bg = if let Some(elevation) = self.elevation { + elevation.on_elevation_bg(cx) + } else { + theme::color_alpha(colors.element_background, 0.6) + }; + + h_flex() + .items_center() + .gap_0p5() + .font_buffer(cx) + .text_size(size) + .text_color(colors.text_muted) + .children(self.prefix) + .child( + h_flex() + .items_center() + .rounded_md() + .px_0p5() + .mr_0p5() + .border_1() + .border_color(kb_bg) + .bg(kb_bg.opacity(0.8)) + .shadow(smallvec![BoxShadow { + color: cx.theme().colors().editor_background.opacity(0.8), + offset: point(px(0.), px(1.)), + blur_radius: px(0.), + spread_radius: px(0.), + }]) + .child(self.keybinding.size(kb_size)), + ) + .children(self.suffix) + } +} + +impl ComponentPreview for KeybindingHint { + fn description() -> impl Into> { + "Used to display hint text for keyboard shortcuts. Can have a prefix and suffix." + } + + fn examples(window: &mut Window, _cx: &mut App) -> Vec> { + let home_fallback = gpui::KeyBinding::new("home", menu::SelectFirst, None); + let home = KeyBinding::for_action(&menu::SelectFirst, window) + .unwrap_or(KeyBinding::new(home_fallback)); + + let end_fallback = gpui::KeyBinding::new("end", menu::SelectLast, None); + let end = KeyBinding::for_action(&menu::SelectLast, window) + .unwrap_or(KeyBinding::new(end_fallback)); + + let enter_fallback = gpui::KeyBinding::new("enter", menu::Confirm, None); + let enter = KeyBinding::for_action(&menu::Confirm, window) + .unwrap_or(KeyBinding::new(enter_fallback)); + + let escape_fallback = gpui::KeyBinding::new("escape", menu::Cancel, None); + let escape = KeyBinding::for_action(&menu::Cancel, window) + .unwrap_or(KeyBinding::new(escape_fallback)); + + vec![ + example_group_with_title( + "Basic", + vec![ + single_example( + "With Prefix", + KeybindingHint::with_prefix("Go to Start:", home.clone()), + ), + single_example( + "With Suffix", + KeybindingHint::with_suffix(end.clone(), "Go to End"), + ), + single_example( + "With Prefix and Suffix", + KeybindingHint::new(enter.clone()) + .prefix("Confirm:") + .suffix("Execute selected action"), + ), + ], + ), + example_group_with_title( + "Sizes", + vec![ + single_example( + "Small", + KeybindingHint::new(home.clone()) + .size(Pixels::from(12.0)) + .prefix("Small:"), + ), + single_example( + "Medium", + KeybindingHint::new(end.clone()) + .size(Pixels::from(16.0)) + .suffix("Medium"), + ), + single_example( + "Large", + KeybindingHint::new(enter.clone()) + .size(Pixels::from(20.0)) + .prefix("Large:") + .suffix("Size"), + ), + ], + ), + example_group_with_title( + "Elevations", + vec![ + single_example( + "Surface", + KeybindingHint::new(home.clone()) + .elevation(ElevationIndex::Surface) + .prefix("Surface:"), + ), + single_example( + "Elevated Surface", + KeybindingHint::new(end.clone()) + .elevation(ElevationIndex::ElevatedSurface) + .suffix("Elevated"), + ), + single_example( + "Editor Surface", + KeybindingHint::new(enter.clone()) + .elevation(ElevationIndex::EditorSurface) + .prefix("Editor:") + .suffix("Surface"), + ), + single_example( + "Modal Surface", + KeybindingHint::new(escape.clone()) + .elevation(ElevationIndex::ModalSurface) + .prefix("Modal:") + .suffix("Escape"), + ), + ], + ), + ] + } +} diff --git a/crates/workspace/src/theme_preview.rs b/crates/workspace/src/theme_preview.rs index 5062446fe52859f2751bca1f01d1af1b29ff0d80..656fb9a4aca2c8957290a10982068c398811c19b 100644 --- a/crates/workspace/src/theme_preview.rs +++ b/crates/workspace/src/theme_preview.rs @@ -6,7 +6,7 @@ use ui::{ element_cell, prelude::*, string_cell, utils::calculate_contrast_ratio, AudioStatus, Availability, Avatar, AvatarAudioStatusIndicator, AvatarAvailabilityIndicator, ButtonLike, Checkbox, CheckboxWithLabel, ContentGroup, DecoratedIcon, ElevationIndex, Facepile, - IconDecoration, Indicator, Switch, Table, TintColor, Tooltip, + IconDecoration, Indicator, KeybindingHint, Switch, Table, TintColor, Tooltip, }; use crate::{Item, Workspace}; @@ -408,6 +408,7 @@ impl ThemePreview { .child(Facepile::render_component_previews(window, cx)) .child(Icon::render_component_previews(window, cx)) .child(IconDecoration::render_component_previews(window, cx)) + .child(KeybindingHint::render_component_previews(window, cx)) .child(Indicator::render_component_previews(window, cx)) .child(Switch::render_component_previews(window, cx)) .child(Table::render_component_previews(window, cx)) From 3ab48b31a1fc26c5ed861d017cb835b057c80a24 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 6 Feb 2025 17:38:35 -0500 Subject: [PATCH 092/130] image_viewer: Fix `cargo test` errors (#24404) This PR fixes the errors when running `cargo test` in the `image_viewer` crate. Release Notes: - N/A --- crates/image_viewer/Cargo.toml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/crates/image_viewer/Cargo.toml b/crates/image_viewer/Cargo.toml index cd10ade67b668c75212793e1b161ec5abe723bf6..7e97775a5ad243f30b4ff6cbea510b3d7e150dbb 100644 --- a/crates/image_viewer/Cargo.toml +++ b/crates/image_viewer/Cargo.toml @@ -12,6 +12,9 @@ workspace = true path = "src/image_viewer.rs" doctest = false +[features] +test-support = ["gpui/test-support"] + [dependencies] anyhow.workspace = true db.workspace = true @@ -25,5 +28,5 @@ ui.workspace = true util.workspace = true workspace.workspace = true -[features] -test-support = ["gpui/test-support"] +[dev-dependencies] +editor = { workspace = true, features = ["test-support"] } From ffcad71bfabefb82daa02592e2c3658bfdcca3b9 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Fri, 7 Feb 2025 00:14:39 +0100 Subject: [PATCH 093/130] file_icons: Resolve icon properly for exact file name match (#24396) Follow-up to #24391 The current approach has two issues: - For the described case of `eslint.config.js`, for which a mapping exists in `suffixes`, this would get mapped from `eslint.config.js` to `eslint`. However, for `eslint`, there is no mapping within `suffixes`, thus currently `get_icon_from_suffix` would return `None` and a wrong item would be returned at a later step. - Paths passed to this method are relative to the worktree root, thus e.g. `eslint.config.js` files in subdirectories would still be assigned the wrong icon. --- Behaviour on `main`: main Behaviour with this change: pr CC @probably-neb Release Notes: - N/A --- crates/file_icons/src/file_icons.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/file_icons/src/file_icons.rs b/crates/file_icons/src/file_icons.rs index 88b200363528af67a018c05cb05309967e299b4f..37b0a6b225a7587c6ef9cb9c5fae9e7b6450de47 100644 --- a/crates/file_icons/src/file_icons.rs +++ b/crates/file_icons/src/file_icons.rs @@ -54,7 +54,7 @@ impl FileIcons { // check if file name is in suffixes // e.g. catch file named `eslint.config.js` instead of `.eslint.config.js` - if let Some(typ) = path.to_str().and_then(|typ| this.suffixes.get(typ)) { + if let Some(typ) = path.file_name().and_then(|typ| typ.to_str()) { let maybe_path = get_icon_from_suffix(typ); if maybe_path.is_some() { return maybe_path; From 73c487c2227660c6d745ef6bf59e0392bfd28c62 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Thu, 6 Feb 2025 18:52:32 -0500 Subject: [PATCH 094/130] Introduce diff crate to unite BufferDiff and BufferChangeSet (#24392) This is a refactoring PR that does three things: - First, it introduces a new `diff` crate that holds the previous contents of the `git::diff` module, plus the `BufferChangeSet` type formerly of `project::buffer_store`. The new crate is necessary since simply moving `BufferChangeSet` into `git::diff` results in a dependency cycle due to the use of `language::Buffer` to represent the diff base in `BufferChangeSet`. - Second, it renames the two main types in the new diff crate: `BufferDiff` becomes `BufferDiffSnapshot`, and `BufferChangeSet` becomes `BufferDiff`. This reflects that the relationship between these two types (immutable cheaply-cloneable "value" type + stateful "resource type" with subscriptions) mirrors existing pairs like `Buffer`/`BufferSnapshot`. References to "change sets" throughout the codebase are updated to refer to "diffs" instead. - Finally, it moves the base_text field of the new BufferDiff type to BufferDiffSnapshot. Release Notes: - N/A --------- Co-authored-by: maxbrunsfeld --- Cargo.lock | 26 +- Cargo.toml | 2 + crates/collab/Cargo.toml | 1 + crates/collab/src/rpc.rs | 4 +- crates/collab/src/tests/integration_tests.rs | 114 ++-- .../random_project_collaboration_tests.rs | 4 +- crates/diff/Cargo.toml | 32 + crates/diff/LICENSE-GPL | 1 + crates/{git => diff}/src/diff.rs | 366 +++++++++-- crates/editor/Cargo.toml | 1 + crates/editor/src/editor.rs | 23 +- crates/editor/src/editor_tests.rs | 22 +- crates/editor/src/element.rs | 3 +- crates/editor/src/hunk_diff.rs | 51 +- crates/editor/src/proposed_changes_editor.rs | 27 +- crates/editor/src/test/editor_test_context.rs | 3 +- crates/git/src/git.rs | 1 - crates/git_ui/Cargo.toml | 1 + crates/git_ui/src/project_diff.rs | 18 +- crates/multi_buffer/Cargo.toml | 13 +- crates/multi_buffer/src/anchor.rs | 36 +- crates/multi_buffer/src/multi_buffer.rs | 221 +++---- crates/multi_buffer/src/multi_buffer_tests.rs | 100 ++- crates/project/Cargo.toml | 2 + crates/project/src/buffer_store.rs | 586 ++++++------------ crates/project/src/project.rs | 15 +- crates/project/src/project_tests.rs | 48 +- crates/proto/proto/zed.proto | 16 +- crates/proto/src/proto.rs | 16 +- .../remote_server/src/remote_editing_tests.rs | 40 +- crates/worktree/src/worktree.rs | 4 +- 31 files changed, 922 insertions(+), 875 deletions(-) create mode 100644 crates/diff/Cargo.toml create mode 120000 crates/diff/LICENSE-GPL rename crates/{git => diff}/src/diff.rs (62%) diff --git a/Cargo.lock b/Cargo.lock index 5ad5d048bc88a23e547ff8eb62f38951d135f3fb..96a8df761bb8cab563cf66f1acfc36e2b2280f60 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2753,6 +2753,7 @@ dependencies = [ "ctor", "dashmap 6.1.0", "derive_more", + "diff 0.1.0", "editor", "env_logger 0.11.6", "envy", @@ -3837,6 +3838,24 @@ dependencies = [ "zeroize", ] +[[package]] +name = "diff" +version = "0.1.0" +dependencies = [ + "futures 0.3.31", + "git2", + "gpui", + "language", + "log", + "pretty_assertions", + "rope", + "serde_json", + "sum_tree", + "text", + "unindent", + "util", +] + [[package]] name = "diff" version = "0.1.13" @@ -4007,6 +4026,7 @@ dependencies = [ "convert_case 0.7.1", "ctor", "db", + "diff 0.1.0", "emojis", "env_logger 0.11.6", "file_icons", @@ -5306,6 +5326,7 @@ dependencies = [ "anyhow", "collections", "db", + "diff 0.1.0", "editor", "feature_flags", "futures 0.3.31", @@ -7910,9 +7931,9 @@ dependencies = [ "clock", "collections", "ctor", + "diff 0.1.0", "env_logger 0.11.6", "futures 0.3.31", - "git", "gpui", "indoc", "itertools 0.14.0", @@ -9919,7 +9940,7 @@ version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" dependencies = [ - "diff", + "diff 0.1.13", "yansi", ] @@ -10015,6 +10036,7 @@ dependencies = [ "client", "clock", "collections", + "diff 0.1.0", "env_logger 0.11.6", "fancy-regex 0.14.0", "fs", diff --git a/Cargo.toml b/Cargo.toml index ff50372ed342d2c4d6a60b6e8c1df06ffc89f5c9..e108955b785c63a52007ef07a09c249f658aa2ba 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,6 +32,7 @@ members = [ "crates/db", "crates/deepseek", "crates/diagnostics", + "crates/diff", "crates/docs_preprocessor", "crates/editor", "crates/evals", @@ -231,6 +232,7 @@ copilot = { path = "crates/copilot" } db = { path = "crates/db" } deepseek = { path = "crates/deepseek" } diagnostics = { path = "crates/diagnostics" } +diff = { path = "crates/diff" } editor = { path = "crates/editor" } extension = { path = "crates/extension" } extension_host = { path = "crates/extension_host" } diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index db293c5173806c804c52bb7d2ddc336801c93853..7d61621c955a9c06d7f68b2e45988c7db5b5faa5 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -33,6 +33,7 @@ clock.workspace = true collections.workspace = true dashmap.workspace = true derive_more.workspace = true +diff.workspace = true envy = "0.4.2" futures.workspace = true google_ai.workspace = true diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index eea17d45fb3ba9c4b39644c95d1971cf23964105..2fa325b6a77576b515809ae3772ae2d3ee46ae22 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -309,8 +309,8 @@ impl Server { .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) - .add_request_handler(forward_read_only_project_request::) - .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) .add_request_handler( forward_mutating_project_request::, ) diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index a512a9f10cb6a68556f14f1de4f119f18a98b6a0..e9c8ab39f1217c373ddc6a2e5184cd9f55caa6c7 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -2598,25 +2598,25 @@ async fn test_git_diff_base_change( .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) .await .unwrap(); - let local_unstaged_changes_a = project_local + let local_unstaged_diff_a = project_local .update(cx_a, |p, cx| { - p.open_unstaged_changes(buffer_local_a.clone(), cx) + p.open_unstaged_diff(buffer_local_a.clone(), cx) }) .await .unwrap(); // Wait for it to catch up to the new diff executor.run_until_parked(); - local_unstaged_changes_a.read_with(cx_a, |change_set, cx| { + local_unstaged_diff_a.read_with(cx_a, |diff, cx| { let buffer = buffer_local_a.read(cx); assert_eq!( - change_set.base_text_string().as_deref(), + diff.base_text_string().as_deref(), Some(staged_text.as_str()) ); - git::diff::assert_hunks( - change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), + diff::assert_hunks( + diff.snapshot.hunks_in_row_range(0..4, buffer), buffer, - &change_set.base_text_string().unwrap(), + &diff.base_text_string().unwrap(), &[(1..2, "", "two\n")], ); }); @@ -2626,47 +2626,47 @@ async fn test_git_diff_base_change( .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) .await .unwrap(); - let remote_unstaged_changes_a = project_remote + let remote_unstaged_diff_a = project_remote .update(cx_b, |p, cx| { - p.open_unstaged_changes(buffer_remote_a.clone(), cx) + p.open_unstaged_diff(buffer_remote_a.clone(), cx) }) .await .unwrap(); // Wait remote buffer to catch up to the new diff executor.run_until_parked(); - remote_unstaged_changes_a.read_with(cx_b, |change_set, cx| { + remote_unstaged_diff_a.read_with(cx_b, |diff, cx| { let buffer = buffer_remote_a.read(cx); assert_eq!( - change_set.base_text_string().as_deref(), + diff.base_text_string().as_deref(), Some(staged_text.as_str()) ); - git::diff::assert_hunks( - change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), + diff::assert_hunks( + diff.snapshot.hunks_in_row_range(0..4, buffer), buffer, - &change_set.base_text_string().unwrap(), + &diff.base_text_string().unwrap(), &[(1..2, "", "two\n")], ); }); // Open uncommitted changes on the guest, without opening them on the host first - let remote_uncommitted_changes_a = project_remote + let remote_uncommitted_diff_a = project_remote .update(cx_b, |p, cx| { - p.open_uncommitted_changes(buffer_remote_a.clone(), cx) + p.open_uncommitted_diff(buffer_remote_a.clone(), cx) }) .await .unwrap(); executor.run_until_parked(); - remote_uncommitted_changes_a.read_with(cx_b, |change_set, cx| { + remote_uncommitted_diff_a.read_with(cx_b, |diff, cx| { let buffer = buffer_remote_a.read(cx); assert_eq!( - change_set.base_text_string().as_deref(), + diff.base_text_string().as_deref(), Some(committed_text.as_str()) ); - git::diff::assert_hunks( - change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), + diff::assert_hunks( + diff.snapshot.hunks_in_row_range(0..4, buffer), buffer, - &change_set.base_text_string().unwrap(), + &diff.base_text_string().unwrap(), &[(1..2, "TWO\n", "two\n")], ); }); @@ -2683,44 +2683,44 @@ async fn test_git_diff_base_change( // Wait for buffer_local_a to receive it executor.run_until_parked(); - local_unstaged_changes_a.read_with(cx_a, |change_set, cx| { + local_unstaged_diff_a.read_with(cx_a, |diff, cx| { let buffer = buffer_local_a.read(cx); assert_eq!( - change_set.base_text_string().as_deref(), + diff.base_text_string().as_deref(), Some(new_staged_text.as_str()) ); - git::diff::assert_hunks( - change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), + diff::assert_hunks( + diff.snapshot.hunks_in_row_range(0..4, buffer), buffer, - &change_set.base_text_string().unwrap(), + &diff.base_text_string().unwrap(), &[(2..3, "", "three\n")], ); }); - remote_unstaged_changes_a.read_with(cx_b, |change_set, cx| { + remote_unstaged_diff_a.read_with(cx_b, |diff, cx| { let buffer = buffer_remote_a.read(cx); assert_eq!( - change_set.base_text_string().as_deref(), + diff.base_text_string().as_deref(), Some(new_staged_text.as_str()) ); - git::diff::assert_hunks( - change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), + diff::assert_hunks( + diff.snapshot.hunks_in_row_range(0..4, buffer), buffer, - &change_set.base_text_string().unwrap(), + &diff.base_text_string().unwrap(), &[(2..3, "", "three\n")], ); }); - remote_uncommitted_changes_a.read_with(cx_b, |change_set, cx| { + remote_uncommitted_diff_a.read_with(cx_b, |diff, cx| { let buffer = buffer_remote_a.read(cx); assert_eq!( - change_set.base_text_string().as_deref(), + diff.base_text_string().as_deref(), Some(new_committed_text.as_str()) ); - git::diff::assert_hunks( - change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), + diff::assert_hunks( + diff.snapshot.hunks_in_row_range(0..4, buffer), buffer, - &change_set.base_text_string().unwrap(), + &diff.base_text_string().unwrap(), &[(1..2, "TWO_HUNDRED\n", "two\n")], ); }); @@ -2748,25 +2748,25 @@ async fn test_git_diff_base_change( .update(cx_a, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx)) .await .unwrap(); - let local_unstaged_changes_b = project_local + let local_unstaged_diff_b = project_local .update(cx_a, |p, cx| { - p.open_unstaged_changes(buffer_local_b.clone(), cx) + p.open_unstaged_diff(buffer_local_b.clone(), cx) }) .await .unwrap(); // Wait for it to catch up to the new diff executor.run_until_parked(); - local_unstaged_changes_b.read_with(cx_a, |change_set, cx| { + local_unstaged_diff_b.read_with(cx_a, |diff, cx| { let buffer = buffer_local_b.read(cx); assert_eq!( - change_set.base_text_string().as_deref(), + diff.base_text_string().as_deref(), Some(staged_text.as_str()) ); - git::diff::assert_hunks( - change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), + diff::assert_hunks( + diff.snapshot.hunks_in_row_range(0..4, buffer), buffer, - &change_set.base_text_string().unwrap(), + &diff.base_text_string().unwrap(), &[(1..2, "", "two\n")], ); }); @@ -2776,22 +2776,22 @@ async fn test_git_diff_base_change( .update(cx_b, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx)) .await .unwrap(); - let remote_unstaged_changes_b = project_remote + let remote_unstaged_diff_b = project_remote .update(cx_b, |p, cx| { - p.open_unstaged_changes(buffer_remote_b.clone(), cx) + p.open_unstaged_diff(buffer_remote_b.clone(), cx) }) .await .unwrap(); executor.run_until_parked(); - remote_unstaged_changes_b.read_with(cx_b, |change_set, cx| { + remote_unstaged_diff_b.read_with(cx_b, |diff, cx| { let buffer = buffer_remote_b.read(cx); assert_eq!( - change_set.base_text_string().as_deref(), + diff.base_text_string().as_deref(), Some(staged_text.as_str()) ); - git::diff::assert_hunks( - change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), + diff::assert_hunks( + diff.snapshot.hunks_in_row_range(0..4, buffer), buffer, &staged_text, &[(1..2, "", "two\n")], @@ -2806,28 +2806,28 @@ async fn test_git_diff_base_change( // Wait for buffer_local_b to receive it executor.run_until_parked(); - local_unstaged_changes_b.read_with(cx_a, |change_set, cx| { + local_unstaged_diff_b.read_with(cx_a, |diff, cx| { let buffer = buffer_local_b.read(cx); assert_eq!( - change_set.base_text_string().as_deref(), + diff.base_text_string().as_deref(), Some(new_staged_text.as_str()) ); - git::diff::assert_hunks( - change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), + diff::assert_hunks( + diff.snapshot.hunks_in_row_range(0..4, buffer), buffer, &new_staged_text, &[(2..3, "", "three\n")], ); }); - remote_unstaged_changes_b.read_with(cx_b, |change_set, cx| { + remote_unstaged_diff_b.read_with(cx_b, |diff, cx| { let buffer = buffer_remote_b.read(cx); assert_eq!( - change_set.base_text_string().as_deref(), + diff.base_text_string().as_deref(), Some(new_staged_text.as_str()) ); - git::diff::assert_hunks( - change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), + diff::assert_hunks( + diff.snapshot.hunks_in_row_range(0..4, buffer), buffer, &new_staged_text, &[(2..3, "", "three\n")], diff --git a/crates/collab/src/tests/random_project_collaboration_tests.rs b/crates/collab/src/tests/random_project_collaboration_tests.rs index e4d1ae79a5c36d833484592429a947c23a8ad96a..655def73b90c2fdc05fa3b258050d3bda9ae0b74 100644 --- a/crates/collab/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab/src/tests/random_project_collaboration_tests.rs @@ -1339,7 +1339,7 @@ impl RandomizedTest for ProjectCollaborationTest { project .buffer_store() .read(cx) - .get_unstaged_changes(host_buffer.read(cx).remote_id(), cx) + .get_unstaged_diff(host_buffer.read(cx).remote_id(), cx) .unwrap() .read(cx) .base_text_string() @@ -1348,7 +1348,7 @@ impl RandomizedTest for ProjectCollaborationTest { project .buffer_store() .read(cx) - .get_unstaged_changes(guest_buffer.read(cx).remote_id(), cx) + .get_unstaged_diff(guest_buffer.read(cx).remote_id(), cx) .unwrap() .read(cx) .base_text_string() diff --git a/crates/diff/Cargo.toml b/crates/diff/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..6641fdf1cb67ff579eec4cf62ee3b2e0b902119a --- /dev/null +++ b/crates/diff/Cargo.toml @@ -0,0 +1,32 @@ +[package] +name = "diff" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/diff.rs" + +[dependencies] +futures.workspace = true +git2.workspace = true +gpui.workspace = true +language.workspace = true +log.workspace = true +rope.workspace = true +sum_tree.workspace = true +text.workspace = true +util.workspace = true + +[dev-dependencies] +unindent.workspace = true +serde_json.workspace = true +pretty_assertions.workspace = true +text = {workspace = true, features = ["test-support"]} + +[features] +test-support = [] diff --git a/crates/diff/LICENSE-GPL b/crates/diff/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/diff/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/git/src/diff.rs b/crates/diff/src/diff.rs similarity index 62% rename from crates/git/src/diff.rs rename to crates/diff/src/diff.rs index 764c254119321847260275372d1d9c4c7e16f836..adb25417a713048715df7bfc85ac4f09ab651458 100644 --- a/crates/git/src/diff.rs +++ b/crates/diff/src/diff.rs @@ -1,10 +1,12 @@ +use futures::{channel::oneshot, future::OptionFuture}; +use git2::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch}; +use gpui::{App, Context, Entity, EventEmitter}; +use language::{Language, LanguageRegistry}; use rope::Rope; -use std::{cmp, iter, ops::Range}; +use std::{cmp, future::Future, iter, ops::Range, sync::Arc}; use sum_tree::SumTree; -use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point}; - -pub use git2 as libgit; -use libgit::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch}; +use text::{Anchor, BufferId, OffsetRangeExt, Point}; +use util::ResultExt; #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum DiffHunkStatus { @@ -62,36 +64,110 @@ impl sum_tree::Summary for DiffHunkSummary { } } -#[derive(Debug, Clone)] -pub struct BufferDiff { - tree: SumTree, +#[derive(Clone)] +pub struct BufferDiffSnapshot { + hunks: SumTree, + pub base_text: Option, } -impl BufferDiff { - pub fn new(buffer: &BufferSnapshot) -> BufferDiff { - BufferDiff { - tree: SumTree::new(buffer), +impl std::fmt::Debug for BufferDiffSnapshot { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("BufferDiffSnapshot") + .field("hunks", &self.hunks) + .finish() + } +} + +impl BufferDiffSnapshot { + pub fn new(buffer: &text::BufferSnapshot) -> BufferDiffSnapshot { + BufferDiffSnapshot { + hunks: SumTree::new(buffer), + base_text: None, } } - pub fn new_with_single_insertion(buffer: &BufferSnapshot) -> Self { + pub fn new_with_single_insertion(cx: &mut App) -> Self { + let base_text = language::Buffer::build_empty_snapshot(cx); Self { - tree: SumTree::from_item( + hunks: SumTree::from_item( InternalDiffHunk { buffer_range: Anchor::MIN..Anchor::MAX, diff_base_byte_range: 0..0, }, - buffer, + &base_text, ), + base_text: Some(base_text), + } + } + + #[cfg(any(test, feature = "test-support"))] + pub fn build_sync( + buffer: text::BufferSnapshot, + diff_base: String, + cx: &mut gpui::TestAppContext, + ) -> Self { + let snapshot = + cx.update(|cx| Self::build(buffer, Some(Arc::new(diff_base)), None, None, cx)); + cx.executor().block(snapshot) + } + + pub fn build( + buffer: text::BufferSnapshot, + diff_base: Option>, + language: Option>, + language_registry: Option>, + cx: &mut App, + ) -> impl Future { + let base_text_snapshot = diff_base.as_ref().map(|base_text| { + language::Buffer::build_snapshot( + Rope::from(base_text.as_str()), + language.clone(), + language_registry.clone(), + cx, + ) + }); + let base_text_snapshot = cx + .background_executor() + .spawn(OptionFuture::from(base_text_snapshot)); + + let hunks = cx.background_executor().spawn({ + let buffer = buffer.clone(); + async move { Self::recalculate_hunks(diff_base, buffer) } + }); + + async move { + let (base_text, hunks) = futures::join!(base_text_snapshot, hunks); + Self { base_text, hunks } } } - pub fn build(diff_base: Option<&str>, buffer: &text::BufferSnapshot) -> Self { - let mut tree = SumTree::new(buffer); + pub fn build_with_base_buffer( + buffer: text::BufferSnapshot, + diff_base: Option>, + diff_base_buffer: Option, + cx: &App, + ) -> impl Future { + cx.background_executor().spawn({ + let buffer = buffer.clone(); + async move { + let hunks = Self::recalculate_hunks(diff_base, buffer); + Self { + hunks, + base_text: diff_base_buffer, + } + } + }) + } + + fn recalculate_hunks( + diff_base: Option>, + buffer: text::BufferSnapshot, + ) -> SumTree { + let mut tree = SumTree::new(&buffer); if let Some(diff_base) = diff_base { let buffer_text = buffer.as_rope().to_string(); - let patch = Self::diff(diff_base, &buffer_text); + let patch = Self::diff(&diff_base, &buffer_text); // A common case in Zed is that the empty buffer is represented as just a newline, // but if we just compute a naive diff you get a "preserved" line in the middle, @@ -102,32 +178,32 @@ impl BufferDiff { buffer_range: buffer.anchor_before(0)..buffer.anchor_before(0), diff_base_byte_range: 0..diff_base.len() - 1, }, - buffer, + &buffer, ); - return Self { tree }; + return tree; } if let Some(patch) = patch { let mut divergence = 0; for hunk_index in 0..patch.num_hunks() { let hunk = - Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence); - tree.push(hunk, buffer); + Self::process_patch_hunk(&patch, hunk_index, &buffer, &mut divergence); + tree.push(hunk, &buffer); } } } - Self { tree } + tree } pub fn is_empty(&self) -> bool { - self.tree.is_empty() + self.hunks.is_empty() } pub fn hunks_in_row_range<'a>( &'a self, range: Range, - buffer: &'a BufferSnapshot, + buffer: &'a text::BufferSnapshot, ) -> impl 'a + Iterator { let start = buffer.anchor_before(Point::new(range.start, 0)); let end = buffer.anchor_after(Point::new(range.end, 0)); @@ -138,12 +214,12 @@ impl BufferDiff { pub fn hunks_intersecting_range<'a>( &'a self, range: Range, - buffer: &'a BufferSnapshot, + buffer: &'a text::BufferSnapshot, ) -> impl 'a + Iterator { let range = range.to_offset(buffer); let mut cursor = self - .tree + .hunks .filter::<_, DiffHunkSummary>(buffer, move |summary| { let summary_range = summary.buffer_range.to_offset(buffer); let before_start = summary_range.end < range.start; @@ -194,10 +270,10 @@ impl BufferDiff { pub fn hunks_intersecting_range_rev<'a>( &'a self, range: Range, - buffer: &'a BufferSnapshot, + buffer: &'a text::BufferSnapshot, ) -> impl 'a + Iterator { let mut cursor = self - .tree + .hunks .filter::<_, DiffHunkSummary>(buffer, move |summary| { let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt(); let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt(); @@ -223,9 +299,13 @@ impl BufferDiff { }) } - pub fn compare(&self, old: &Self, new_snapshot: &BufferSnapshot) -> Option> { - let mut new_cursor = self.tree.cursor::<()>(new_snapshot); - let mut old_cursor = old.tree.cursor::<()>(new_snapshot); + pub fn compare( + &self, + old: &Self, + new_snapshot: &text::BufferSnapshot, + ) -> Option> { + let mut new_cursor = self.hunks.cursor::<()>(new_snapshot); + let mut old_cursor = old.hunks.cursor::<()>(new_snapshot); old_cursor.next(new_snapshot); new_cursor.next(new_snapshot); let mut start = None; @@ -288,15 +368,11 @@ impl BufferDiff { #[cfg(test)] fn clear(&mut self, buffer: &text::BufferSnapshot) { - self.tree = SumTree::new(buffer); - } - - pub fn update(&mut self, diff_base: &Rope, buffer: &text::BufferSnapshot) { - *self = Self::build(Some(&diff_base.to_string()), buffer); + self.hunks = SumTree::new(buffer); } #[cfg(test)] - fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator { + fn hunks<'a>(&'a self, text: &'a text::BufferSnapshot) -> impl 'a + Iterator { let start = text.anchor_before(Point::new(0, 0)); let end = text.anchor_after(Point::new(u32::MAX, u32::MAX)); self.hunks_intersecting_range(start..end, text) @@ -391,12 +467,171 @@ impl BufferDiff { } } +pub struct BufferDiff { + pub buffer_id: BufferId, + pub snapshot: BufferDiffSnapshot, + pub unstaged_diff: Option>, +} + +impl std::fmt::Debug for BufferDiff { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("BufferChangeSet") + .field("buffer_id", &self.buffer_id) + .field("snapshot", &self.snapshot) + .finish() + } +} + +pub enum BufferDiffEvent { + DiffChanged { changed_range: Range }, + LanguageChanged, +} + +impl EventEmitter for BufferDiff {} + +impl BufferDiff { + pub fn set_state( + &mut self, + snapshot: BufferDiffSnapshot, + buffer: &text::BufferSnapshot, + cx: &mut Context, + ) { + if let Some(base_text) = snapshot.base_text.as_ref() { + let changed_range = if Some(base_text.remote_id()) + != self + .snapshot + .base_text + .as_ref() + .map(|buffer| buffer.remote_id()) + { + Some(text::Anchor::MIN..text::Anchor::MAX) + } else { + snapshot.compare(&self.snapshot, buffer) + }; + if let Some(changed_range) = changed_range { + cx.emit(BufferDiffEvent::DiffChanged { changed_range }); + } + } + self.snapshot = snapshot; + } + + pub fn diff_hunks_intersecting_range<'a>( + &'a self, + range: Range, + buffer_snapshot: &'a text::BufferSnapshot, + ) -> impl 'a + Iterator { + self.snapshot + .hunks_intersecting_range(range, buffer_snapshot) + } + + pub fn diff_hunks_intersecting_range_rev<'a>( + &'a self, + range: Range, + buffer_snapshot: &'a text::BufferSnapshot, + ) -> impl 'a + Iterator { + self.snapshot + .hunks_intersecting_range_rev(range, buffer_snapshot) + } + + /// Used in cases where the change set isn't derived from git. + pub fn set_base_text( + &mut self, + base_buffer: Entity, + buffer: text::BufferSnapshot, + cx: &mut Context, + ) -> oneshot::Receiver<()> { + let (tx, rx) = oneshot::channel(); + let this = cx.weak_entity(); + let base_buffer = base_buffer.read(cx); + let language_registry = base_buffer.language_registry(); + let base_buffer = base_buffer.snapshot(); + let base_text = Arc::new(base_buffer.text()); + + let snapshot = BufferDiffSnapshot::build( + buffer.clone(), + Some(base_text), + base_buffer.language().cloned(), + language_registry, + cx, + ); + let complete_on_drop = util::defer(|| { + tx.send(()).ok(); + }); + cx.spawn(|_, mut cx| async move { + let snapshot = snapshot.await; + let Some(this) = this.upgrade() else { + return; + }; + this.update(&mut cx, |this, cx| { + this.set_state(snapshot, &buffer, cx); + }) + .log_err(); + drop(complete_on_drop) + }) + .detach(); + rx + } + + #[cfg(any(test, feature = "test-support"))] + pub fn base_text_string(&self) -> Option { + self.snapshot.base_text.as_ref().map(|buffer| buffer.text()) + } + + pub fn new(buffer: &Entity, cx: &mut App) -> Self { + BufferDiff { + buffer_id: buffer.read(cx).remote_id(), + snapshot: BufferDiffSnapshot::new(&buffer.read(cx)), + unstaged_diff: None, + } + } + + #[cfg(any(test, feature = "test-support"))] + pub fn new_with_base_text( + base_text: &str, + buffer: &Entity, + cx: &mut App, + ) -> Self { + let mut base_text = base_text.to_owned(); + text::LineEnding::normalize(&mut base_text); + let snapshot = BufferDiffSnapshot::build( + buffer.read(cx).text_snapshot(), + Some(base_text.into()), + None, + None, + cx, + ); + let snapshot = cx.background_executor().block(snapshot); + BufferDiff { + buffer_id: buffer.read(cx).remote_id(), + snapshot, + unstaged_diff: None, + } + } + + #[cfg(any(test, feature = "test-support"))] + pub fn recalculate_diff_sync(&mut self, buffer: text::BufferSnapshot, cx: &mut Context) { + let base_text = self + .snapshot + .base_text + .as_ref() + .map(|base_text| base_text.text()); + let snapshot = BufferDiffSnapshot::build_with_base_buffer( + buffer.clone(), + base_text.clone().map(Arc::new), + self.snapshot.base_text.clone(), + cx, + ); + let snapshot = cx.background_executor().block(snapshot); + self.set_state(snapshot, &buffer, cx); + } +} + /// Range (crossing new lines), old, new #[cfg(any(test, feature = "test-support"))] #[track_caller] pub fn assert_hunks( diff_hunks: Iter, - buffer: &BufferSnapshot, + buffer: &text::BufferSnapshot, diff_base: &str, expected_hunks: &[(Range, &str, &str)], ) where @@ -429,18 +664,18 @@ mod tests { use std::assert_eq; use super::*; + use gpui::TestAppContext; use text::{Buffer, BufferId}; use unindent::Unindent as _; - #[test] - fn test_buffer_diff_simple() { + #[gpui::test] + async fn test_buffer_diff_simple(cx: &mut gpui::TestAppContext) { let diff_base = " one two three " .unindent(); - let diff_base_rope = Rope::from(diff_base.clone()); let buffer_text = " one @@ -450,8 +685,7 @@ mod tests { .unindent(); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text); - let mut diff = BufferDiff::new(&buffer); - diff.update(&diff_base_rope, &buffer); + let mut diff = BufferDiffSnapshot::build_sync(buffer.clone(), diff_base.clone(), cx); assert_hunks( diff.hunks(&buffer), &buffer, @@ -460,7 +694,7 @@ mod tests { ); buffer.edit([(0..0, "point five\n")]); - diff.update(&diff_base_rope, &buffer); + diff = BufferDiffSnapshot::build_sync(buffer.clone(), diff_base.clone(), cx); assert_hunks( diff.hunks(&buffer), &buffer, @@ -472,9 +706,10 @@ mod tests { assert_hunks(diff.hunks(&buffer), &buffer, &diff_base, &[]); } - #[test] - fn test_buffer_diff_range() { - let diff_base = " + #[gpui::test] + async fn test_buffer_diff_range(cx: &mut TestAppContext) { + let diff_base = Arc::new( + " one two three @@ -486,8 +721,8 @@ mod tests { nine ten " - .unindent(); - let diff_base_rope = Rope::from(diff_base.clone()); + .unindent(), + ); let buffer_text = " A @@ -511,8 +746,17 @@ mod tests { .unindent(); let buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text); - let mut diff = BufferDiff::new(&buffer); - diff.update(&diff_base_rope, &buffer); + let diff = cx + .update(|cx| { + BufferDiffSnapshot::build( + buffer.snapshot(), + Some(diff_base.clone()), + None, + None, + cx, + ) + }) + .await; assert_eq!(diff.hunks(&buffer).count(), 8); assert_hunks( @@ -527,8 +771,8 @@ mod tests { ); } - #[test] - fn test_buffer_diff_compare() { + #[gpui::test] + async fn test_buffer_diff_compare(cx: &mut TestAppContext) { let base_text = " zero one @@ -557,8 +801,8 @@ mod tests { let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text_1); - let empty_diff = BufferDiff::new(&buffer); - let diff_1 = BufferDiff::build(Some(&base_text), &buffer); + let empty_diff = BufferDiffSnapshot::new(&buffer); + let diff_1 = BufferDiffSnapshot::build_sync(buffer.clone(), base_text.clone(), cx); let range = diff_1.compare(&empty_diff, &buffer).unwrap(); assert_eq!(range.to_point(&buffer), Point::new(0, 0)..Point::new(8, 0)); @@ -576,7 +820,7 @@ mod tests { " .unindent(), ); - let diff_2 = BufferDiff::build(Some(&base_text), &buffer); + let diff_2 = BufferDiffSnapshot::build_sync(buffer.clone(), base_text.clone(), cx); assert_eq!(None, diff_2.compare(&diff_1, &buffer)); // Edit turns a deletion hunk into a modification. @@ -593,7 +837,7 @@ mod tests { " .unindent(), ); - let diff_3 = BufferDiff::build(Some(&base_text), &buffer); + let diff_3 = BufferDiffSnapshot::build_sync(buffer.clone(), base_text.clone(), cx); let range = diff_3.compare(&diff_2, &buffer).unwrap(); assert_eq!(range.to_point(&buffer), Point::new(1, 0)..Point::new(2, 0)); @@ -610,7 +854,7 @@ mod tests { " .unindent(), ); - let diff_4 = BufferDiff::build(Some(&base_text), &buffer); + let diff_4 = BufferDiffSnapshot::build_sync(buffer.clone(), base_text.clone(), cx); let range = diff_4.compare(&diff_3, &buffer).unwrap(); assert_eq!(range.to_point(&buffer), Point::new(3, 4)..Point::new(4, 0)); @@ -628,7 +872,7 @@ mod tests { " .unindent(), ); - let diff_5 = BufferDiff::build(Some(&base_text), &buffer); + let diff_5 = BufferDiffSnapshot::build_sync(buffer.snapshot(), base_text.clone(), cx); let range = diff_5.compare(&diff_4, &buffer).unwrap(); assert_eq!(range.to_point(&buffer), Point::new(3, 0)..Point::new(4, 0)); @@ -646,7 +890,7 @@ mod tests { " .unindent(), ); - let diff_6 = BufferDiff::build(Some(&base_text), &buffer); + let diff_6 = BufferDiffSnapshot::build_sync(buffer.snapshot(), base_text, cx); let range = diff_6.compare(&diff_5, &buffer).unwrap(); assert_eq!(range.to_point(&buffer), Point::new(7, 0)..Point::new(8, 0)); } diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index 756fb9da7f962a98405c644e823a2e92720ec0af..d78dff8d2a1a0585b180d40346469bbee167d831 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -38,6 +38,7 @@ clock.workspace = true collections.workspace = true convert_case.workspace = true db.workspace = true +diff.workspace = true emojis.workspace = true file_icons.workspace = true futures.workspace = true diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 8ac797b72cbbd5c496c68bda204c065c1b320a68..d6ed7d27d9366ab0e1e37304ab8a1b0116462a2d 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -47,7 +47,6 @@ mod signature_help; #[cfg(any(test, feature = "test-support"))] pub mod test; -use ::git::diff::DiffHunkStatus; pub(crate) use actions::*; pub use actions::{OpenExcerpts, OpenExcerptsSplit}; use aho_corasick::AhoCorasick; @@ -74,6 +73,7 @@ use code_context_menus::{ AvailableCodeAction, CodeActionContents, CodeActionsItem, CodeActionsMenu, CodeContextMenu, CompletionsMenu, ContextMenuOrigin, }; +use diff::DiffHunkStatus; use git::blame::GitBlame; use gpui::{ div, impl_actions, linear_color_stop, linear_gradient, point, prelude::*, pulsating_between, @@ -1287,7 +1287,7 @@ impl Editor { let mut code_action_providers = Vec::new(); if let Some(project) = project.clone() { - get_uncommitted_changes_for_buffer( + get_uncommitted_diff_for_buffer( &project, buffer.read(cx).all_buffers(), buffer.clone(), @@ -6773,11 +6773,12 @@ impl Editor { cx: &mut App, ) -> Option<()> { let buffer = self.buffer.read(cx); - let change_set = buffer.change_set_for(hunk.buffer_id)?; + let diff = buffer.diff_for(hunk.buffer_id)?; let buffer = buffer.buffer(hunk.buffer_id)?; let buffer = buffer.read(cx); - let original_text = change_set + let original_text = diff .read(cx) + .snapshot .base_text .as_ref()? .as_rope() @@ -13731,9 +13732,9 @@ impl Editor { } => { self.tasks_update_task = Some(self.refresh_runnables(window, cx)); let buffer_id = buffer.read(cx).remote_id(); - if self.buffer.read(cx).change_set_for(buffer_id).is_none() { + if self.buffer.read(cx).diff_for(buffer_id).is_none() { if let Some(project) = &self.project { - get_uncommitted_changes_for_buffer( + get_uncommitted_diff_for_buffer( project, [buffer.clone()], self.buffer.clone(), @@ -14492,7 +14493,7 @@ impl Editor { } } -fn get_uncommitted_changes_for_buffer( +fn get_uncommitted_diff_for_buffer( project: &Entity, buffers: impl IntoIterator>, buffer: Entity, @@ -14501,15 +14502,15 @@ fn get_uncommitted_changes_for_buffer( let mut tasks = Vec::new(); project.update(cx, |project, cx| { for buffer in buffers { - tasks.push(project.open_uncommitted_changes(buffer.clone(), cx)) + tasks.push(project.open_uncommitted_diff(buffer.clone(), cx)) } }); cx.spawn(|mut cx| async move { - let change_sets = futures::future::join_all(tasks).await; + let diffs = futures::future::join_all(tasks).await; buffer .update(&mut cx, |buffer, cx| { - for change_set in change_sets.into_iter().flatten() { - buffer.add_change_set(change_set, cx); + for diff in diffs.into_iter().flatten() { + buffer.add_diff(diff, cx); } }) .ok(); diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 491510ed32b2b8efdef05ea0908a8c654e37e172..1c4839f4f9db57265d01958ed3d216b8bcb88fb2 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -7,6 +7,7 @@ use crate::{ }, JoinLines, }; +use diff::{BufferDiff, DiffHunkStatus}; use futures::StreamExt; use gpui::{ div, BackgroundExecutor, SemanticVersion, TestAppContext, UpdateGlobal, VisualTestContext, @@ -26,7 +27,7 @@ use language_settings::{Formatter, FormatterList, IndentGuideSettings}; use multi_buffer::IndentGuide; use parking_lot::Mutex; use pretty_assertions::{assert_eq, assert_ne}; -use project::{buffer_store::BufferChangeSet, FakeFs}; +use project::FakeFs; use project::{ lsp_command::SIGNATURE_HELP_HIGHLIGHT_CURRENT, project_settings::{LspSettings, ProjectSettings}, @@ -12440,11 +12441,10 @@ async fn test_multibuffer_reverts(cx: &mut gpui::TestAppContext) { (buffer_2.clone(), base_text_2), (buffer_3.clone(), base_text_3), ] { - let change_set = - cx.new(|cx| BufferChangeSet::new_with_base_text(&diff_base, &buffer, cx)); + let diff = cx.new(|cx| BufferDiff::new_with_base_text(&diff_base, &buffer, cx)); editor .buffer - .update(cx, |buffer, cx| buffer.add_change_set(change_set, cx)); + .update(cx, |buffer, cx| buffer.add_diff(diff, cx)); } }); cx.executor().run_until_parked(); @@ -13134,11 +13134,10 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) (buffer_2.clone(), file_2_old), (buffer_3.clone(), file_3_old), ] { - let change_set = - cx.new(|cx| BufferChangeSet::new_with_base_text(&diff_base, &buffer, cx)); + let diff = cx.new(|cx| BufferDiff::new_with_base_text(&diff_base, &buffer, cx)); editor .buffer - .update(cx, |buffer, cx| buffer.add_change_set(change_set, cx)); + .update(cx, |buffer, cx| buffer.add_diff(diff, cx)); } }) .unwrap(); @@ -13251,10 +13250,10 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext }); editor .update(cx, |editor, _window, cx| { - let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base, &buffer, cx)); + let diff = cx.new(|cx| BufferDiff::new_with_base_text(base, &buffer, cx)); editor .buffer - .update(cx, |buffer, cx| buffer.add_change_set(change_set, cx)) + .update(cx, |buffer, cx| buffer.add_diff(diff, cx)) }) .unwrap(); @@ -14420,11 +14419,10 @@ async fn test_indent_guide_with_expanded_diff_hunks(cx: &mut gpui::TestAppContex editor.buffer().update(cx, |multibuffer, cx| { let buffer = multibuffer.as_singleton().unwrap(); - let change_set = - cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx)); + let diff = cx.new(|cx| BufferDiff::new_with_base_text(base_text, &buffer, cx)); multibuffer.set_all_diff_hunks_expanded(cx); - multibuffer.add_change_set(change_set, cx); + multibuffer.add_diff(diff, cx); buffer.read(cx).remote_id() }) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 96d736888e82c69ab014ffdef4035161d60a8c45..c95b70b26338643671c63307549155304977c1ac 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -26,8 +26,9 @@ use crate::{ }; use client::ParticipantIndex; use collections::{BTreeMap, HashMap, HashSet}; +use diff::DiffHunkStatus; use file_icons::FileIcons; -use git::{blame::BlameEntry, diff::DiffHunkStatus, Oid}; +use git::{blame::BlameEntry, Oid}; use gpui::{ anchored, deferred, div, fill, linear_color_stop, linear_gradient, outline, point, px, quad, relative, size, svg, transparent_black, Action, AnyElement, App, AvailableSpace, Axis, Bounds, diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index d6e6be1c863ead9f50e44e73b237a72b219a3552..8bed3e2ccb8054427e35db0ba264c23152cafcef 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -56,7 +56,7 @@ pub(super) struct ExpandedHunk { pub(crate) struct DiffMapSnapshot(TreeMap); pub(crate) struct DiffBaseState { - pub(crate) change_set: Model, + pub(crate) diff: Model, pub(crate) last_version: Option, _subscription: Subscription, } @@ -80,38 +80,29 @@ impl DiffMap { self.snapshot.clone() } - pub fn add_change_set( + pub fn add_diff( &mut self, - change_set: Model, + diff: Model, window: &mut Window, cx: &mut Context, ) { - let buffer_id = change_set.read(cx).buffer_id; + let buffer_id = diff.read(cx).buffer_id; self.snapshot .0 - .insert(buffer_id, change_set.read(cx).diff_to_buffer.clone()); + .insert(buffer_id, diff.read(cx).diff_to_buffer.clone()); self.diff_bases.insert( buffer_id, DiffBaseState { last_version: None, - _subscription: cx.observe_in( - &change_set, - window, - move |editor, change_set, window, cx| { - editor - .diff_map - .snapshot - .0 - .insert(buffer_id, change_set.read(cx).diff_to_buffer.clone()); - Editor::sync_expanded_diff_hunks( - &mut editor.diff_map, - buffer_id, - window, - cx, - ); - }, - ), - change_set, + _subscription: cx.observe_in(&diff, window, move |editor, diff, window, cx| { + editor + .diff_map + .snapshot + .0 + .insert(buffer_id, diff.read(cx).diff_to_buffer.clone()); + Editor::sync_expanded_diff_hunks(&mut editor.diff_map, buffer_id, window, cx); + }), + diff, }, ); Editor::sync_expanded_diff_hunks(self, buffer_id, window, cx); @@ -399,7 +390,7 @@ impl Editor { self.diff_map .diff_bases .get(&buffer_id)? - .change_set + .diff .read(cx) .base_text .clone() @@ -953,12 +944,12 @@ impl Editor { let mut diff_base_buffer = None; let mut diff_base_buffer_unchanged = true; if let Some(diff_base_state) = diff_base_state { - diff_base_state.change_set.update(cx, |change_set, _| { - if diff_base_state.last_version != Some(change_set.base_text_version) { - diff_base_state.last_version = Some(change_set.base_text_version); + diff_base_state.diff.update(cx, |diff, _| { + if diff_base_state.last_version != Some(diff.base_text_version) { + diff_base_state.last_version = Some(diff.base_text_version); diff_base_buffer_unchanged = false; } - diff_base_buffer = change_set.base_text.clone(); + diff_base_buffer = diff.base_text.clone(); }) } @@ -1498,14 +1489,14 @@ mod tests { (buffer_1.clone(), diff_base_1), (buffer_2.clone(), diff_base_2), ] { - let change_set = cx.new(|cx| { + let diff = cx.new(|cx| { BufferChangeSet::new_with_base_text( diff_base.to_string(), buffer.read(cx).text_snapshot(), cx, ) }); - editor.diff_map.add_change_set(change_set, window, cx) + editor.diff_map.add_diff(diff, window, cx) } }) .unwrap(); diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs index 9a61656a58c34f1e0777b047adc3e6165b665a0a..2c7903296b13cc4372f48cb624512323b30bb228 100644 --- a/crates/editor/src/proposed_changes_editor.rs +++ b/crates/editor/src/proposed_changes_editor.rs @@ -1,10 +1,11 @@ use crate::{ApplyAllDiffHunks, Editor, EditorEvent, SemanticsProvider}; use collections::HashSet; +use diff::BufferDiff; use futures::{channel::mpsc, future::join_all}; use gpui::{App, Entity, EventEmitter, Focusable, Render, Subscription, Task}; use language::{Buffer, BufferEvent, Capability}; use multi_buffer::{ExcerptRange, MultiBuffer}; -use project::{buffer_store::BufferChangeSet, Project}; +use project::Project; use smol::stream::StreamExt; use std::{any::TypeId, ops::Range, rc::Rc, time::Duration}; use text::ToOffset; @@ -106,12 +107,10 @@ impl ProposedChangesEditor { let buffer = buffer.read(cx); let base_buffer = buffer.base_buffer()?; let buffer = buffer.text_snapshot(); - let change_set = this - .multibuffer - .read(cx) - .change_set_for(buffer.remote_id())?; - Some(change_set.update(cx, |change_set, cx| { - change_set.set_base_text(base_buffer.clone(), buffer, cx) + let diff = + this.multibuffer.read(cx).diff_for(buffer.remote_id())?; + Some(diff.update(cx, |diff, cx| { + diff.set_base_text(base_buffer.clone(), buffer, cx) })) }) .collect::>() @@ -172,7 +171,7 @@ impl ProposedChangesEditor { }); let mut buffer_entries = Vec::new(); - let mut new_change_sets = Vec::new(); + let mut new_diffs = Vec::new(); for location in locations { let branch_buffer; if let Some(ix) = self @@ -185,14 +184,14 @@ impl ProposedChangesEditor { buffer_entries.push(entry); } else { branch_buffer = location.buffer.update(cx, |buffer, cx| buffer.branch(cx)); - new_change_sets.push(cx.new(|cx| { - let mut change_set = BufferChangeSet::new(&branch_buffer, cx); - let _ = change_set.set_base_text( + new_diffs.push(cx.new(|cx| { + let mut diff = BufferDiff::new(&branch_buffer, cx); + let _ = diff.set_base_text( location.buffer.clone(), branch_buffer.read(cx).text_snapshot(), cx, ); - change_set + diff })); buffer_entries.push(BufferEntry { branch: branch_buffer.clone(), @@ -217,8 +216,8 @@ impl ProposedChangesEditor { self.editor.update(cx, |editor, cx| { editor.change_selections(None, window, cx, |selections| selections.refresh()); editor.buffer.update(cx, |buffer, cx| { - for change_set in new_change_sets { - buffer.add_change_set(change_set, cx) + for diff in new_diffs { + buffer.add_diff(diff, cx) } }) }); diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index c51dc0d6a62e5da2f4fc46a7e9b5ecdec1e303d1..7cfaf5622404000da500b90ea73a02a61e804f08 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -3,8 +3,9 @@ use crate::{ RowExt, }; use collections::BTreeMap; +use diff::DiffHunkStatus; use futures::Future; -use git::diff::DiffHunkStatus; + use gpui::{ prelude::*, AnyWindowHandle, App, Context, Entity, Focusable as _, Keystroke, Pixels, Point, VisualTestContext, Window, WindowHandle, diff --git a/crates/git/src/git.rs b/crates/git/src/git.rs index 3ee0d23be7b15078b5c8a4fbc9bed1f49773e522..1d0c11e813a5fe49f58c3f708a34ef016c71a8cc 100644 --- a/crates/git/src/git.rs +++ b/crates/git/src/git.rs @@ -1,6 +1,5 @@ pub mod blame; pub mod commit; -pub mod diff; mod hosting_provider; mod remote; pub mod repository; diff --git a/crates/git_ui/Cargo.toml b/crates/git_ui/Cargo.toml index 8a2519b8c007f41b9111fbbb1fe6f85c3b3d9ff7..ad4dbdf9905e40e7667738c591a3ae6b47bc1664 100644 --- a/crates/git_ui/Cargo.toml +++ b/crates/git_ui/Cargo.toml @@ -16,6 +16,7 @@ path = "src/git_ui.rs" anyhow.workspace = true collections.workspace = true db.workspace = true +diff.workspace = true editor.workspace = true feature_flags.workspace = true futures.workspace = true diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index 5d2689ed4cfa59c15023f394a3c7380f2ab34aa4..74d7c26c485c273e36bef6c67fa9447a4e870a62 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -2,6 +2,7 @@ use std::any::{Any, TypeId}; use anyhow::Result; use collections::HashSet; +use diff::BufferDiff; use editor::{scroll::Autoscroll, Editor, EditorEvent}; use feature_flags::FeatureFlagViewExt; use futures::StreamExt; @@ -11,7 +12,7 @@ use gpui::{ }; use language::{Anchor, Buffer, Capability, OffsetRangeExt, Point}; use multi_buffer::{MultiBuffer, PathKey}; -use project::{buffer_store::BufferChangeSet, git::GitState, Project, ProjectPath}; +use project::{git::GitState, Project, ProjectPath}; use theme::ActiveTheme; use ui::prelude::*; use util::ResultExt as _; @@ -43,7 +44,7 @@ pub(crate) struct ProjectDiff { struct DiffBuffer { path_key: PathKey, buffer: Entity, - change_set: Entity, + diff: Entity, } const CONFLICT_NAMESPACE: &'static str = "0"; @@ -285,13 +286,13 @@ impl ProjectDiff { let buffer = load_buffer.await?; let changes = project .update(&mut cx, |project, cx| { - project.open_uncommitted_changes(buffer.clone(), cx) + project.open_uncommitted_diff(buffer.clone(), cx) })? .await?; Ok(DiffBuffer { path_key, buffer, - change_set: changes, + diff: changes, }) })); } @@ -312,15 +313,14 @@ impl ProjectDiff { ) { let path_key = diff_buffer.path_key; let buffer = diff_buffer.buffer; - let change_set = diff_buffer.change_set; + let diff = diff_buffer.diff; let snapshot = buffer.read(cx).snapshot(); - let change_set = change_set.read(cx); - let diff_hunk_ranges = if change_set.base_text.is_none() { + let diff = diff.read(cx); + let diff_hunk_ranges = if diff.snapshot.base_text.is_none() { vec![Point::zero()..snapshot.max_point()] } else { - change_set - .diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot) + diff.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot) .map(|diff_hunk| diff_hunk.buffer_range.to_point(&snapshot)) .collect::>() }; diff --git a/crates/multi_buffer/Cargo.toml b/crates/multi_buffer/Cargo.toml index b8b625378d78592d4f6c4619344883f2fbd250f9..c9e1be241ea619b37e0c7fc9fa912df34b658e49 100644 --- a/crates/multi_buffer/Cargo.toml +++ b/crates/multi_buffer/Cargo.toml @@ -14,9 +14,10 @@ doctest = false [features] test-support = [ - "text/test-support", - "language/test-support", + "diff/test-support", "gpui/test-support", + "language/test-support", + "text/test-support", "util/test-support", ] @@ -25,15 +26,14 @@ anyhow.workspace = true clock.workspace = true collections.workspace = true ctor.workspace = true +diff.workspace = true env_logger.workspace = true futures.workspace = true -git.workspace = true gpui.workspace = true itertools.workspace = true language.workspace = true log.workspace = true parking_lot.workspace = true -project.workspace = true rand.workspace = true rope.workspace = true smol.workspace = true @@ -47,12 +47,13 @@ tree-sitter.workspace = true util.workspace = true [dev-dependencies] +diff = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } +indoc.workspace = true language = { workspace = true, features = ["test-support"] } +pretty_assertions.workspace = true project = { workspace = true, features = ["test-support"] } rand.workspace = true settings = { workspace = true, features = ["test-support"] } text = { workspace = true, features = ["test-support"] } util = { workspace = true, features = ["test-support"] } -pretty_assertions.workspace = true -indoc.workspace = true diff --git a/crates/multi_buffer/src/anchor.rs b/crates/multi_buffer/src/anchor.rs index 423f4af31f48a3ba97578c569429e1c1b355641d..ca6bc8cbf65c46cf5dba9f836fe6d74dd4a6fd97 100644 --- a/crates/multi_buffer/src/anchor.rs +++ b/crates/multi_buffer/src/anchor.rs @@ -70,15 +70,15 @@ impl Anchor { return text_cmp; } if self.diff_base_anchor.is_some() || other.diff_base_anchor.is_some() { - if let Some(diff_base) = snapshot.diffs.get(&excerpt.buffer_id) { - let self_anchor = self - .diff_base_anchor - .filter(|a| diff_base.base_text.can_resolve(a)); - let other_anchor = other - .diff_base_anchor - .filter(|a| diff_base.base_text.can_resolve(a)); + if let Some(base_text) = snapshot + .diffs + .get(&excerpt.buffer_id) + .and_then(|diff| diff.base_text.as_ref()) + { + let self_anchor = self.diff_base_anchor.filter(|a| base_text.can_resolve(a)); + let other_anchor = other.diff_base_anchor.filter(|a| base_text.can_resolve(a)); return match (self_anchor, other_anchor) { - (Some(a), Some(b)) => a.cmp(&b, &diff_base.base_text), + (Some(a), Some(b)) => a.cmp(&b, base_text), (Some(_), None) => match other.text_anchor.bias { Bias::Left => Ordering::Greater, Bias::Right => Ordering::Less, @@ -107,9 +107,13 @@ impl Anchor { excerpt_id: self.excerpt_id, text_anchor: self.text_anchor.bias_left(&excerpt.buffer), diff_base_anchor: self.diff_base_anchor.map(|a| { - if let Some(base) = snapshot.diffs.get(&excerpt.buffer_id) { - if a.buffer_id == Some(base.base_text.remote_id()) { - return a.bias_left(&base.base_text); + if let Some(base_text) = snapshot + .diffs + .get(&excerpt.buffer_id) + .and_then(|diff| diff.base_text.as_ref()) + { + if a.buffer_id == Some(base_text.remote_id()) { + return a.bias_left(base_text); } } a @@ -128,9 +132,13 @@ impl Anchor { excerpt_id: self.excerpt_id, text_anchor: self.text_anchor.bias_right(&excerpt.buffer), diff_base_anchor: self.diff_base_anchor.map(|a| { - if let Some(base) = snapshot.diffs.get(&excerpt.buffer_id) { - if a.buffer_id == Some(base.base_text.remote_id()) { - return a.bias_right(&base.base_text); + if let Some(base_text) = snapshot + .diffs + .get(&excerpt.buffer_id) + .and_then(|diff| diff.base_text.as_ref()) + { + if a.buffer_id == Some(base_text.remote_id()) { + return a.bias_right(&base_text); } } a diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 1986944a1d6e95f2b7dfd6a28d0c58ac54ca2e53..4f0f3a18bddb6c725c9ccbbdf3c6441d7594dffa 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -9,8 +9,8 @@ pub use position::{TypedOffset, TypedPoint, TypedRow}; use anyhow::{anyhow, Result}; use clock::ReplicaId; use collections::{BTreeMap, Bound, HashMap, HashSet}; +use diff::{BufferDiff, BufferDiffEvent, BufferDiffSnapshot, DiffHunkStatus}; use futures::{channel::mpsc, SinkExt}; -use git::diff::DiffHunkStatus; use gpui::{App, Context, Entity, EntityId, EventEmitter, Task}; use itertools::Itertools; use language::{ @@ -21,7 +21,7 @@ use language::{ TextDimension, TextObject, ToOffset as _, ToPoint as _, TransactionId, TreeSitterOptions, Unclipped, }; -use project::buffer_store::{BufferChangeSet, BufferChangeSetEvent}; + use rope::DimensionPair; use smallvec::SmallVec; use smol::future::yield_now; @@ -68,7 +68,7 @@ pub struct MultiBuffer { buffers: RefCell>, // only used by consumers using `set_excerpts_for_buffer` buffers_by_path: BTreeMap>, - diff_bases: HashMap, + diffs: HashMap, all_diff_hunks_expanded: bool, subscriptions: Topic, /// If true, the multi-buffer only contains a single [`Buffer`] and a single [`Excerpt`] @@ -215,23 +215,21 @@ struct BufferState { _subscriptions: [gpui::Subscription; 2], } -struct ChangeSetState { - change_set: Entity, +struct DiffState { + diff: Entity, _subscription: gpui::Subscription, } -impl ChangeSetState { - fn new(change_set: Entity, cx: &mut Context) -> Self { - ChangeSetState { - _subscription: cx.subscribe(&change_set, |this, change_set, event, cx| match event { - BufferChangeSetEvent::DiffChanged { changed_range } => { - this.buffer_diff_changed(change_set, changed_range.clone(), cx) - } - BufferChangeSetEvent::LanguageChanged => { - this.buffer_diff_language_changed(change_set, cx) +impl DiffState { + fn new(diff: Entity, cx: &mut Context) -> Self { + DiffState { + _subscription: cx.subscribe(&diff, |this, diff, event, cx| match event { + BufferDiffEvent::DiffChanged { changed_range } => { + this.buffer_diff_changed(diff, changed_range.clone(), cx) } + BufferDiffEvent::LanguageChanged => this.buffer_diff_language_changed(diff, cx), }), - change_set, + diff, } } } @@ -242,7 +240,7 @@ pub struct MultiBufferSnapshot { singleton: bool, excerpts: SumTree, excerpt_ids: SumTree, - diffs: TreeMap, + diffs: TreeMap, pub diff_transforms: SumTree, trailing_excerpt_update_count: usize, non_text_state_update_count: usize, @@ -268,12 +266,6 @@ pub enum DiffTransform { }, } -#[derive(Clone)] -struct DiffSnapshot { - diff: git::diff::BufferDiff, - base_text: language::BufferSnapshot, -} - #[derive(Clone)] pub struct ExcerptInfo { pub id: ExcerptId, @@ -318,7 +310,7 @@ pub struct RowInfo { pub buffer_id: Option, pub buffer_row: Option, pub multibuffer_row: Option, - pub diff_status: Option, + pub diff_status: Option, } /// A slice into a [`Buffer`] that is being edited in a [`MultiBuffer`]. @@ -397,7 +389,7 @@ pub struct MultiBufferRows<'a> { pub struct MultiBufferChunks<'a> { excerpts: Cursor<'a, Excerpt, ExcerptOffset>, diff_transforms: Cursor<'a, DiffTransform, (usize, ExcerptOffset)>, - diffs: &'a TreeMap, + diffs: &'a TreeMap, diff_base_chunks: Option<(BufferId, BufferChunks<'a>)>, buffer_chunk: Option>, range: Range, @@ -431,7 +423,7 @@ pub struct ReversedMultiBufferBytes<'a> { struct MultiBufferCursor<'a, D: TextDimension> { excerpts: Cursor<'a, Excerpt, ExcerptDimension>, diff_transforms: Cursor<'a, DiffTransform, (OutputDimension, ExcerptDimension)>, - diffs: &'a TreeMap, + diffs: &'a TreeMap, cached_region: Option>, } @@ -517,7 +509,7 @@ impl MultiBuffer { ..MultiBufferSnapshot::default() }), buffers: RefCell::default(), - diff_bases: HashMap::default(), + diffs: HashMap::default(), all_diff_hunks_expanded: false, subscriptions: Topic::default(), singleton: false, @@ -539,7 +531,7 @@ impl MultiBuffer { snapshot: Default::default(), buffers: Default::default(), buffers_by_path: Default::default(), - diff_bases: HashMap::default(), + diffs: HashMap::default(), all_diff_hunks_expanded: false, subscriptions: Default::default(), singleton: false, @@ -573,17 +565,14 @@ impl MultiBuffer { ); } let mut diff_bases = HashMap::default(); - for (buffer_id, change_set_state) in self.diff_bases.iter() { - diff_bases.insert( - *buffer_id, - ChangeSetState::new(change_set_state.change_set.clone(), new_cx), - ); + for (buffer_id, diff) in self.diffs.iter() { + diff_bases.insert(*buffer_id, DiffState::new(diff.diff.clone(), new_cx)); } Self { snapshot: RefCell::new(self.snapshot.borrow().clone()), buffers: RefCell::new(buffers), buffers_by_path: Default::default(), - diff_bases, + diffs: diff_bases, all_diff_hunks_expanded: self.all_diff_hunks_expanded, subscriptions: Default::default(), singleton: self.singleton, @@ -2152,71 +2141,49 @@ impl MultiBuffer { }); } - fn buffer_diff_language_changed( - &mut self, - change_set: Entity, - cx: &mut Context, - ) { + fn buffer_diff_language_changed(&mut self, diff: Entity, cx: &mut Context) { self.sync(cx); let mut snapshot = self.snapshot.borrow_mut(); - let change_set = change_set.read(cx); - let buffer_id = change_set.buffer_id; - let base_text = change_set.base_text.clone(); - let diff = change_set.diff_to_buffer.clone(); - if let Some(base_text) = base_text { - snapshot.diffs.insert( - buffer_id, - DiffSnapshot { - diff: diff.clone(), - base_text, - }, - ); - } else { - snapshot.diffs.remove(&buffer_id); - } + let diff = diff.read(cx); + let buffer_id = diff.buffer_id; + let diff = diff.snapshot.clone(); + snapshot.diffs.insert(buffer_id, diff); } fn buffer_diff_changed( &mut self, - change_set: Entity, + diff: Entity, range: Range, cx: &mut Context, ) { - let change_set = change_set.read(cx); - let buffer_id = change_set.buffer_id; - let diff = change_set.diff_to_buffer.clone(); - let base_text = change_set.base_text.clone(); self.sync(cx); - let mut snapshot = self.snapshot.borrow_mut(); - let base_text_changed = snapshot - .diffs - .get(&buffer_id) - .map_or(true, |diff_snapshot| { - change_set.base_text.as_ref().map_or(true, |base_text| { - base_text.remote_id() != diff_snapshot.base_text.remote_id() - }) - }); - if let Some(base_text) = base_text { - snapshot.diffs.insert( - buffer_id, - DiffSnapshot { - diff: diff.clone(), - base_text, - }, - ); - } else if self.all_diff_hunks_expanded { - let base_text = Buffer::build_empty_snapshot(cx); - snapshot.diffs.insert( - buffer_id, - DiffSnapshot { - diff: git::diff::BufferDiff::new_with_single_insertion(&base_text), - base_text, - }, - ); - } else { - snapshot.diffs.remove(&buffer_id); + let diff = diff.read(cx); + let buffer_id = diff.buffer_id; + let mut diff = diff.snapshot.clone(); + if diff.base_text.is_none() && self.all_diff_hunks_expanded { + diff = BufferDiffSnapshot::new_with_single_insertion(cx); } + + let mut snapshot = self.snapshot.borrow_mut(); + let base_text_changed = + snapshot + .diffs + .get(&buffer_id) + .map_or(true, |diff_snapshot| { + match (&diff_snapshot.base_text, &diff.base_text) { + (None, None) => false, + (None, Some(_)) => true, + (Some(_), None) => true, + (Some(old), Some(new)) => { + let (old_id, old_empty) = (old.remote_id(), old.is_empty()); + let (new_id, new_empty) = (new.remote_id(), new.is_empty()); + new_id != old_id && (!new_empty || !old_empty) + } + } + }); + snapshot.diffs.insert(buffer_id, diff); + let buffers = self.buffers.borrow(); let Some(buffer_state) = buffers.get(&buffer_id) else { return; @@ -2352,17 +2319,14 @@ impl MultiBuffer { self.as_singleton().unwrap().read(cx).is_parsing() } - pub fn add_change_set(&mut self, change_set: Entity, cx: &mut Context) { - let buffer_id = change_set.read(cx).buffer_id; - self.buffer_diff_changed(change_set.clone(), text::Anchor::MIN..text::Anchor::MAX, cx); - self.diff_bases - .insert(buffer_id, ChangeSetState::new(change_set, cx)); + pub fn add_diff(&mut self, diff: Entity, cx: &mut Context) { + let buffer_id = diff.read(cx).buffer_id; + self.buffer_diff_changed(diff.clone(), text::Anchor::MIN..text::Anchor::MAX, cx); + self.diffs.insert(buffer_id, DiffState::new(diff, cx)); } - pub fn change_set_for(&self, buffer_id: BufferId) -> Option> { - self.diff_bases - .get(&buffer_id) - .map(|state| state.change_set.clone()) + pub fn diff_for(&self, buffer_id: BufferId) -> Option> { + self.diffs.get(&buffer_id).map(|state| state.diff.clone()) } pub fn expand_diff_hunks(&mut self, ranges: Vec>, cx: &mut Context) { @@ -2920,9 +2884,11 @@ impl MultiBuffer { while let Some(excerpt) = excerpts.item() { // Recompute the expanded hunks in the portion of the excerpt that // intersects the edit. - if let Some(diff_state) = snapshot.diffs.get(&excerpt.buffer_id) { - let diff = &diff_state.diff; - let base_text = &diff_state.base_text; + if let Some((diff, base_text)) = snapshot + .diffs + .get(&excerpt.buffer_id) + .and_then(|diff| Some((diff, diff.base_text.as_ref()?))) + { let buffer = &excerpt.buffer; let excerpt_start = *excerpts.start(); let excerpt_end = excerpt_start + ExcerptOffset::new(excerpt.text_summary.len); @@ -3445,8 +3411,7 @@ impl MultiBufferSnapshot { let buffer_start = buffer.anchor_before(buffer_range.start); let buffer_end = buffer.anchor_after(buffer_range.end); Some( - diff.diff - .hunks_intersecting_range(buffer_start..buffer_end, buffer) + diff.hunks_intersecting_range(buffer_start..buffer_end, buffer) .map(|hunk| { ( Point::new(hunk.row_range.start, 0)..Point::new(hunk.row_range.end, 0), @@ -3782,8 +3747,8 @@ impl MultiBufferSnapshot { let buffer_end = excerpt.buffer.anchor_before(buffer_offset); let buffer_end_row = buffer_end.to_point(&excerpt.buffer).row; - if let Some(diff_state) = self.diffs.get(&excerpt.buffer_id) { - for hunk in diff_state.diff.hunks_intersecting_range_rev( + if let Some(diff) = self.diffs.get(&excerpt.buffer_id) { + for hunk in diff.hunks_intersecting_range_rev( excerpt.range.context.start..buffer_end, &excerpt.buffer, ) { @@ -3851,7 +3816,7 @@ impl MultiBufferSnapshot { } pub fn has_diff_hunks(&self) -> bool { - self.diffs.values().any(|diff| !diff.diff.is_empty()) + self.diffs.values().any(|diff| !diff.is_empty()) } pub fn surrounding_word( @@ -4313,7 +4278,11 @@ impl MultiBufferSnapshot { } => { let buffer_start = base_text_byte_range.start + start_overshoot; let mut buffer_end = base_text_byte_range.start + end_overshoot; - let Some(buffer_diff) = self.diffs.get(buffer_id) else { + let Some(base_text) = self + .diffs + .get(buffer_id) + .and_then(|diff| diff.base_text.as_ref()) + else { panic!("{:?} is in non-existent deleted hunk", range.start) }; @@ -4323,9 +4292,8 @@ impl MultiBufferSnapshot { buffer_end -= 1; } - let mut summary = buffer_diff - .base_text - .text_summary_for_range::(buffer_start..buffer_end); + let mut summary = + base_text.text_summary_for_range::(buffer_start..buffer_end); if include_trailing_newline { summary.add_assign(&D::from_text_summary(&TextSummary::newline())) @@ -4362,12 +4330,15 @@ impl MultiBufferSnapshot { .. } => { let buffer_end = base_text_byte_range.start + overshoot; - let Some(buffer_diff) = self.diffs.get(buffer_id) else { - panic!("{:?} is in non-extant deleted hunk", range.end) + let Some(base_text) = self + .diffs + .get(buffer_id) + .and_then(|diff| diff.base_text.as_ref()) + else { + panic!("{:?} is in non-existent deleted hunk", range.end) }; - let mut suffix = buffer_diff - .base_text + let mut suffix = base_text .text_summary_for_range::(base_text_byte_range.start..buffer_end); if *has_trailing_newline && buffer_end == base_text_byte_range.end + 1 { suffix.add_assign(&D::from_text_summary(&TextSummary::newline())) @@ -4467,14 +4438,18 @@ impl MultiBufferSnapshot { }) => { let mut in_deleted_hunk = false; if let Some(diff_base_anchor) = &anchor.diff_base_anchor { - if let Some(diff) = self.diffs.get(buffer_id) { - if diff.base_text.can_resolve(&diff_base_anchor) { - let base_text_offset = diff_base_anchor.to_offset(&diff.base_text); + if let Some(base_text) = self + .diffs + .get(buffer_id) + .and_then(|diff| diff.base_text.as_ref()) + { + if base_text.can_resolve(&diff_base_anchor) { + let base_text_offset = diff_base_anchor.to_offset(&base_text); if base_text_offset >= base_text_byte_range.start && base_text_offset <= base_text_byte_range.end { - let position_in_hunk = - diff.base_text.text_summary_for_range::( + let position_in_hunk = base_text + .text_summary_for_range::( base_text_byte_range.start..base_text_offset, ); position.add_assign(&position_in_hunk); @@ -4800,15 +4775,17 @@ impl MultiBufferSnapshot { .. }) = diff_transforms.item() { - let diff_base = self.diffs.get(buffer_id).expect("missing diff base"); + let base_text = self + .diffs + .get(buffer_id) + .and_then(|diff| diff.base_text.as_ref()) + .expect("missing diff base"); if offset_in_transform > base_text_byte_range.len() { debug_assert!(*has_trailing_newline); bias = Bias::Right; } else { diff_base_anchor = Some( - diff_base - .base_text - .anchor_at(base_text_byte_range.start + offset_in_transform, bias), + base_text.anchor_at(base_text_byte_range.start + offset_in_transform, bias), ); bias = Bias::Left; } @@ -6144,7 +6121,7 @@ where .. } => { let diff = self.diffs.get(&buffer_id)?; - let buffer = &diff.base_text; + let buffer = diff.base_text.as_ref()?; let mut rope_cursor = buffer.as_rope().cursor(0); let buffer_start = rope_cursor.summary::(base_text_byte_range.start); let buffer_range_len = rope_cursor.summary::(base_text_byte_range.end); @@ -7186,7 +7163,7 @@ impl<'a> Iterator for MultiBufferChunks<'a> { } chunks } else { - let base_buffer = &self.diffs.get(&buffer_id)?.base_text; + let base_buffer = &self.diffs.get(&buffer_id)?.base_text.as_ref()?; base_buffer.chunks(base_text_start..base_text_end, self.language_aware) }; diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index 25c3a4cf9124674b9409dc9789c96297a9978dfa..d98a9db30a5272b3e9498b86fb6345692fe96928 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -1,5 +1,5 @@ use super::*; -use git::diff::DiffHunkStatus; +use diff::DiffHunkStatus; use gpui::{App, TestAppContext}; use indoc::indoc; use language::{Buffer, Rope}; @@ -361,11 +361,9 @@ fn test_diff_boundary_anchors(cx: &mut TestAppContext) { let base_text = "one\ntwo\nthree\n"; let text = "one\nthree\n"; let buffer = cx.new(|cx| Buffer::local(text, cx)); - let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx)); + let diff = cx.new(|cx| BufferDiff::new_with_base_text(base_text, &buffer, cx)); let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.add_change_set(change_set, cx) - }); + multibuffer.update(cx, |multibuffer, cx| multibuffer.add_diff(diff, cx)); let (before, after) = multibuffer.update(cx, |multibuffer, cx| { let before = multibuffer.snapshot(cx).anchor_before(Point::new(1, 0)); @@ -405,14 +403,14 @@ fn test_diff_hunks_in_range(cx: &mut TestAppContext) { let base_text = "one\ntwo\nthree\nfour\nfive\nsix\nseven\neight\n"; let text = "one\nfour\nseven\n"; let buffer = cx.new(|cx| Buffer::local(text, cx)); - let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx)); + let diff = cx.new(|cx| BufferDiff::new_with_base_text(base_text, &buffer, cx)); let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); let (mut snapshot, mut subscription) = multibuffer.update(cx, |multibuffer, cx| { (multibuffer.snapshot(cx), multibuffer.subscribe()) }); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.add_change_set(change_set, cx); + multibuffer.add_diff(diff, cx); multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); }); @@ -498,11 +496,11 @@ fn test_editing_text_in_diff_hunks(cx: &mut TestAppContext) { let base_text = "one\ntwo\nfour\nfive\nsix\nseven\n"; let text = "one\ntwo\nTHREE\nfour\nfive\nseven\n"; let buffer = cx.new(|cx| Buffer::local(text, cx)); - let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(&base_text, &buffer, cx)); + let diff = cx.new(|cx| BufferDiff::new_with_base_text(&base_text, &buffer, cx)); let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx)); let (mut snapshot, mut subscription) = multibuffer.update(cx, |multibuffer, cx| { - multibuffer.add_change_set(change_set.clone(), cx); + multibuffer.add_diff(diff.clone(), cx); (multibuffer.snapshot(cx), multibuffer.subscribe()) }); @@ -979,10 +977,10 @@ fn test_empty_diff_excerpt(cx: &mut TestAppContext) { let buffer = cx.new(|cx| Buffer::local("", cx)); let base_text = "a\nb\nc"; - let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx)); + let diff = cx.new(|cx| BufferDiff::new_with_base_text(base_text, &buffer, cx)); multibuffer.update(cx, |multibuffer, cx| { multibuffer.set_all_diff_hunks_expanded(cx); - multibuffer.add_change_set(change_set.clone(), cx); + multibuffer.add_diff(diff.clone(), cx); multibuffer.push_excerpts( buffer.clone(), [ExcerptRange { @@ -1018,8 +1016,8 @@ fn test_empty_diff_excerpt(cx: &mut TestAppContext) { buffer.update(cx, |buffer, cx| { buffer.edit([(0..0, "a\nb\nc")], None, cx); - change_set.update(cx, |change_set, cx| { - change_set.recalculate_diff_sync(buffer.snapshot().text, cx); + diff.update(cx, |diff, cx| { + diff.recalculate_diff_sync(buffer.snapshot().text, cx); }); assert_eq!(buffer.text(), "a\nb\nc") }); @@ -1030,8 +1028,8 @@ fn test_empty_diff_excerpt(cx: &mut TestAppContext) { buffer.update(cx, |buffer, cx| { buffer.undo(cx); - change_set.update(cx, |change_set, cx| { - change_set.recalculate_diff_sync(buffer.snapshot().text, cx); + diff.update(cx, |diff, cx| { + diff.recalculate_diff_sync(buffer.snapshot().text, cx); }); assert_eq!(buffer.text(), "") }); @@ -1273,12 +1271,12 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) { ); let buffer = cx.new(|cx| Buffer::local(text, cx)); - let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx)); + let diff = cx.new(|cx| BufferDiff::new_with_base_text(base_text, &buffer, cx)); cx.run_until_parked(); let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::singleton(buffer.clone(), cx); - multibuffer.add_change_set(change_set.clone(), cx); + multibuffer.add_diff(diff.clone(), cx); multibuffer }); @@ -1463,8 +1461,8 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) { assert_line_indents(&snapshot); // Recalculate the diff, changing the first diff hunk. - change_set.update(cx, |change_set, cx| { - change_set.recalculate_diff_sync(buffer.read(cx).text_snapshot(), cx); + diff.update(cx, |diff, cx| { + diff.recalculate_diff_sync(buffer.read(cx).text_snapshot(), cx); }); cx.run_until_parked(); assert_new_snapshot( @@ -1516,12 +1514,12 @@ fn test_repeatedly_expand_a_diff_hunk(cx: &mut TestAppContext) { ); let buffer = cx.new(|cx| Buffer::local(text, cx)); - let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx)); + let diff = cx.new(|cx| BufferDiff::new_with_base_text(base_text, &buffer, cx)); cx.run_until_parked(); let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::singleton(buffer.clone(), cx); - multibuffer.add_change_set(change_set.clone(), cx); + multibuffer.add_diff(diff.clone(), cx); multibuffer }); @@ -1918,8 +1916,8 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { let buffer_1 = cx.new(|cx| Buffer::local(text_1, cx)); let buffer_2 = cx.new(|cx| Buffer::local(text_2, cx)); - let change_set_1 = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_1, &buffer_1, cx)); - let change_set_2 = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_2, &buffer_2, cx)); + let diff_1 = cx.new(|cx| BufferDiff::new_with_base_text(base_text_1, &buffer_1, cx)); + let diff_2 = cx.new(|cx| BufferDiff::new_with_base_text(base_text_2, &buffer_2, cx)); cx.run_until_parked(); let multibuffer = cx.new(|cx| { @@ -1940,8 +1938,8 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { }], cx, ); - multibuffer.add_change_set(change_set_1.clone(), cx); - multibuffer.add_change_set(change_set_2.clone(), cx); + multibuffer.add_diff(diff_1.clone(), cx); + multibuffer.add_diff(diff_2.clone(), cx); multibuffer }); @@ -2001,11 +1999,11 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { let id_1 = buffer_1.read_with(cx, |buffer, _| buffer.remote_id()); let id_2 = buffer_2.read_with(cx, |buffer, _| buffer.remote_id()); - let base_id_1 = change_set_1.read_with(cx, |change_set, _| { - change_set.base_text.as_ref().unwrap().remote_id() + let base_id_1 = diff_1.read_with(cx, |diff, _| { + diff.snapshot.base_text.as_ref().unwrap().remote_id() }); - let base_id_2 = change_set_2.read_with(cx, |change_set, _| { - change_set.base_text.as_ref().unwrap().remote_id() + let base_id_2 = diff_2.read_with(cx, |diff, _| { + diff.snapshot.base_text.as_ref().unwrap().remote_id() }); let buffer_lines = (0..=snapshot.max_row().0) @@ -2101,7 +2099,7 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { #[derive(Default)] struct ReferenceMultibuffer { excerpts: Vec, - change_sets: HashMap>, + diffs: HashMap>, } #[derive(Debug)] @@ -2190,10 +2188,10 @@ impl ReferenceMultibuffer { .unwrap(); let buffer = excerpt.buffer.read(cx).snapshot(); let buffer_id = buffer.remote_id(); - let Some(change_set) = self.change_sets.get(&buffer_id) else { + let Some(diff) = self.diffs.get(&buffer_id) else { return; }; - let diff = change_set.read(cx).diff_to_buffer.clone(); + let diff = diff.read(cx).snapshot.clone(); let excerpt_range = excerpt.range.to_offset(&buffer); for hunk in diff.hunks_intersecting_range(range, &buffer) { let hunk_range = hunk.buffer_range.to_offset(&buffer); @@ -2227,9 +2225,9 @@ impl ReferenceMultibuffer { excerpt_boundary_rows.insert(MultiBufferRow(text.matches('\n').count() as u32)); let buffer = excerpt.buffer.read(cx); let buffer_range = excerpt.range.to_offset(buffer); - let change_set = self.change_sets.get(&buffer.remote_id()).unwrap().read(cx); - let diff = change_set.diff_to_buffer.clone(); - let base_buffer = change_set.base_text.as_ref().unwrap(); + let diff = self.diffs.get(&buffer.remote_id()).unwrap().read(cx); + let diff = diff.snapshot.clone(); + let base_buffer = diff.base_text.as_ref().unwrap(); let mut offset = buffer_range.start; let mut hunks = diff @@ -2367,12 +2365,7 @@ impl ReferenceMultibuffer { let buffer = excerpt.buffer.read(cx).snapshot(); let excerpt_range = excerpt.range.to_offset(&buffer); let buffer_id = buffer.remote_id(); - let diff = &self - .change_sets - .get(&buffer_id) - .unwrap() - .read(cx) - .diff_to_buffer; + let diff = &self.diffs.get(&buffer_id).unwrap().read(cx).snapshot; let mut hunks = diff.hunks_in_row_range(0..u32::MAX, &buffer).peekable(); excerpt.expanded_diff_hunks.retain(|hunk_anchor| { if !hunk_anchor.is_valid(&buffer) { @@ -2396,9 +2389,9 @@ impl ReferenceMultibuffer { } } - fn add_change_set(&mut self, change_set: Entity, cx: &mut App) { - let buffer_id = change_set.read(cx).buffer_id; - self.change_sets.insert(buffer_id, change_set); + fn add_diff(&mut self, diff: Entity, cx: &mut App) { + let buffer_id = diff.read(cx).buffer_id; + self.diffs.insert(buffer_id, diff); } } @@ -2528,16 +2521,16 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { multibuffer.update(cx, |multibuffer, cx| { for buffer in multibuffer.all_buffers() { let snapshot = buffer.read(cx).snapshot(); - let _ = multibuffer - .change_set_for(snapshot.remote_id()) - .unwrap() - .update(cx, |change_set, cx| { + let _ = multibuffer.diff_for(snapshot.remote_id()).unwrap().update( + cx, + |diff, cx| { log::info!( "recalculating diff for buffer {:?}", snapshot.remote_id(), ); - change_set.recalculate_diff_sync(snapshot.text, cx); - }); + diff.recalculate_diff_sync(snapshot.text, cx); + }, + ); } reference.diffs_updated(cx); needs_diff_calculation = false; @@ -2550,12 +2543,11 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { .collect::(); let buffer = cx.new(|cx| Buffer::local(base_text.clone(), cx)); - let change_set = - cx.new(|cx| BufferChangeSet::new_with_base_text(&base_text, &buffer, cx)); + let diff = cx.new(|cx| BufferDiff::new_with_base_text(&base_text, &buffer, cx)); multibuffer.update(cx, |multibuffer, cx| { - reference.add_change_set(change_set.clone(), cx); - multibuffer.add_change_set(change_set, cx) + reference.add_diff(diff.clone(), cx); + multibuffer.add_diff(diff, cx) }); buffers.push(buffer); buffers.last().unwrap() diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index 5149a818cf55787e20f2f26af00f49bd0980187c..bb96d1b518cc6821068533a2f45022207770fd3e 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -30,6 +30,7 @@ async-trait.workspace = true client.workspace = true clock.workspace = true collections.workspace = true +diff.workspace = true fs.workspace = true futures.workspace = true fuzzy.workspace = true @@ -77,6 +78,7 @@ fancy-regex.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } collections = { workspace = true, features = ["test-support"] } +diff = { workspace = true, features = ["test-support"] } env_logger.workspace = true fs = { workspace = true, features = ["test-support"] } git2.workspace = true diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 82bfca95b7216f1c81d3e9f6f7ac93e8dc90b99f..57702120d9f9ac12f09b8bbbba4a2e63b0b8e9e4 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -8,13 +8,10 @@ use ::git::{parse_git_remote_url, BuildPermalinkParams, GitHostingProviderRegist use anyhow::{anyhow, bail, Context as _, Result}; use client::Client; use collections::{hash_map, HashMap, HashSet}; +use diff::{BufferDiff, BufferDiffEvent, BufferDiffSnapshot}; use fs::Fs; -use futures::{ - channel::oneshot, - future::{OptionFuture, Shared}, - Future, FutureExt as _, StreamExt, -}; -use git::{blame::Blame, diff::BufferDiff, repository::RepoPath}; +use futures::{channel::oneshot, future::Shared, Future, FutureExt as _, StreamExt}; +use git::{blame::Blame, repository::RepoPath}; use gpui::{ App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity, }; @@ -38,12 +35,12 @@ use std::{ sync::Arc, time::Instant, }; -use text::{BufferId, Rope}; +use text::BufferId; use util::{debug_panic, maybe, ResultExt as _, TryFutureExt}; use worktree::{File, PathChange, ProjectEntryId, UpdatedGitRepositoriesSet, Worktree, WorktreeId}; #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -enum ChangeSetKind { +enum DiffKind { Unstaged, Uncommitted, } @@ -54,10 +51,8 @@ pub struct BufferStore { #[allow(clippy::type_complexity)] loading_buffers: HashMap, Arc>>>>, #[allow(clippy::type_complexity)] - loading_change_sets: HashMap< - (BufferId, ChangeSetKind), - Shared, Arc>>>, - >, + loading_diffs: + HashMap<(BufferId, DiffKind), Shared, Arc>>>>, worktree_store: Entity, opened_buffers: HashMap, downstream_client: Option<(AnyProtoClient, u64)>, @@ -67,14 +62,14 @@ pub struct BufferStore { #[derive(Hash, Eq, PartialEq, Clone)] struct SharedBuffer { buffer: Entity, - change_set: Option>, + diff: Option>, lsp_handle: Option, } #[derive(Default)] -struct BufferChangeSetState { - unstaged_changes: Option>, - uncommitted_changes: Option>, +struct BufferDiffState { + unstaged_diff: Option>, + uncommitted_diff: Option>, recalculate_diff_task: Option>>, language: Option>, language_registry: Option>, @@ -99,21 +94,19 @@ enum DiffBasesChange { SetBoth(Option), } -impl BufferChangeSetState { +impl BufferDiffState { fn buffer_language_changed(&mut self, buffer: Entity, cx: &mut Context) { self.language = buffer.read(cx).language().cloned(); self.language_changed = true; let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx); } - fn unstaged_changes(&self) -> Option> { - self.unstaged_changes.as_ref().and_then(|set| set.upgrade()) + fn unstaged_diff(&self) -> Option> { + self.unstaged_diff.as_ref().and_then(|set| set.upgrade()) } - fn uncommitted_changes(&self) -> Option> { - self.uncommitted_changes - .as_ref() - .and_then(|set| set.upgrade()) + fn uncommitted_diff(&self) -> Option> { + self.uncommitted_diff.as_ref().and_then(|set| set.upgrade()) } fn handle_base_texts_updated( @@ -199,8 +192,8 @@ impl BufferChangeSetState { let language = self.language.clone(); let language_registry = self.language_registry.clone(); - let unstaged_changes = self.unstaged_changes(); - let uncommitted_changes = self.uncommitted_changes(); + let unstaged_diff = self.unstaged_diff(); + let uncommitted_diff = self.uncommitted_diff(); let head = self.head_text.clone(); let index = self.index_text.clone(); let index_changed = self.index_changed; @@ -212,90 +205,71 @@ impl BufferChangeSetState { _ => false, }; self.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move { - if let Some(unstaged_changes) = &unstaged_changes { - let staged_snapshot = if index_changed || language_changed { - let staged_snapshot = cx.update(|cx| { - index.as_ref().map(|head| { - language::Buffer::build_snapshot( - Rope::from(head.as_str()), - language.clone(), - language_registry.clone(), + if let Some(unstaged_diff) = &unstaged_diff { + let snapshot = if index_changed || language_changed { + cx.update(|cx| { + BufferDiffSnapshot::build( + buffer.clone(), + index, + language.clone(), + language_registry.clone(), + cx, + ) + })? + .await + } else { + unstaged_diff + .read_with(&cx, |changes, cx| { + BufferDiffSnapshot::build_with_base_buffer( + buffer.clone(), + index, + changes.snapshot.base_text.clone(), cx, ) - }) - })?; - cx.background_executor() - .spawn(OptionFuture::from(staged_snapshot)) - } else { - Task::ready( - unstaged_changes - .read_with(&cx, |change_set, _| change_set.base_text.clone())?, - ) + })? + .await }; - let diff = - cx.background_executor().spawn({ - let buffer = buffer.clone(); - async move { - BufferDiff::build(index.as_ref().map(|index| index.as_str()), &buffer) - } - }); - - let (staged_snapshot, diff) = futures::join!(staged_snapshot, diff); - - unstaged_changes.update(&mut cx, |unstaged_changes, cx| { - unstaged_changes.set_state(staged_snapshot.clone(), diff, &buffer, cx); + unstaged_diff.update(&mut cx, |unstaged_diff, cx| { + unstaged_diff.set_state(snapshot, &buffer, cx); if language_changed { - cx.emit(BufferChangeSetEvent::LanguageChanged); + cx.emit(BufferDiffEvent::LanguageChanged); } })?; } - if let Some(uncommitted_changes) = &uncommitted_changes { - let (snapshot, diff) = if let (Some(unstaged_changes), true) = - (&unstaged_changes, index_matches_head) - { - unstaged_changes.read_with(&cx, |change_set, _| { - ( - change_set.base_text.clone(), - change_set.diff_to_buffer.clone(), - ) - })? - } else { - let committed_snapshot = if head_changed || language_changed { - let committed_snapshot = cx.update(|cx| { - head.as_ref().map(|head| { - language::Buffer::build_snapshot( - Rope::from(head.as_str()), - language.clone(), - language_registry.clone(), + if let Some(uncommitted_diff) = &uncommitted_diff { + let snapshot = + if let (Some(unstaged_diff), true) = (&unstaged_diff, index_matches_head) { + unstaged_diff.read_with(&cx, |diff, _| diff.snapshot.clone())? + } else if head_changed || language_changed { + cx.update(|cx| { + BufferDiffSnapshot::build( + buffer.clone(), + head, + language.clone(), + language_registry.clone(), + cx, + ) + })? + .await + } else { + uncommitted_diff + .read_with(&cx, |changes, cx| { + BufferDiffSnapshot::build_with_base_buffer( + buffer.clone(), + head, + changes.snapshot.base_text.clone(), cx, ) - }) - })?; - cx.background_executor() - .spawn(OptionFuture::from(committed_snapshot)) - } else { - Task::ready( - uncommitted_changes - .read_with(&cx, |change_set, _| change_set.base_text.clone())?, - ) + })? + .await }; - let diff = cx.background_executor().spawn({ - let buffer = buffer.clone(); - let head = head.clone(); - async move { - BufferDiff::build(head.as_ref().map(|head| head.as_str()), &buffer) - } - }); - futures::join!(committed_snapshot, diff) - }; - - uncommitted_changes.update(&mut cx, |change_set, cx| { - change_set.set_state(snapshot, diff, &buffer, cx); + uncommitted_diff.update(&mut cx, |diff, cx| { + diff.set_state(snapshot, &buffer, cx); if language_changed { - cx.emit(BufferChangeSetEvent::LanguageChanged); + cx.emit(BufferDiffEvent::LanguageChanged); } })?; } @@ -317,28 +291,6 @@ impl BufferChangeSetState { } } -pub struct BufferChangeSet { - pub buffer_id: BufferId, - pub base_text: Option, - pub diff_to_buffer: BufferDiff, - pub unstaged_change_set: Option>, -} - -impl std::fmt::Debug for BufferChangeSet { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("BufferChangeSet") - .field("buffer_id", &self.buffer_id) - .field("base_text", &self.base_text.as_ref().map(|s| s.text())) - .field("diff_to_buffer", &self.diff_to_buffer) - .finish() - } -} - -pub enum BufferChangeSetEvent { - DiffChanged { changed_range: Range }, - LanguageChanged, -} - enum BufferStoreState { Local(LocalBufferStore), Remote(RemoteBufferStore), @@ -364,7 +316,7 @@ struct LocalBufferStore { enum OpenBuffer { Complete { buffer: WeakEntity, - change_set_state: Entity, + diff_state: Entity, }, Operations(Vec), } @@ -384,12 +336,12 @@ pub struct ProjectTransaction(pub HashMap, language::Transaction> impl EventEmitter for BufferStore {} impl RemoteBufferStore { - fn open_unstaged_changes(&self, buffer_id: BufferId, cx: &App) -> Task>> { + fn open_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Task>> { let project_id = self.project_id; let client = self.upstream_client.clone(); cx.background_executor().spawn(async move { let response = client - .request(proto::OpenUnstagedChanges { + .request(proto::OpenUnstagedDiff { project_id, buffer_id: buffer_id.to_proto(), }) @@ -398,18 +350,18 @@ impl RemoteBufferStore { }) } - fn open_uncommitted_changes( + fn open_uncommitted_diff( &self, buffer_id: BufferId, cx: &App, ) -> Task> { - use proto::open_uncommitted_changes_response::Mode; + use proto::open_uncommitted_diff_response::Mode; let project_id = self.project_id; let client = self.upstream_client.clone(); cx.background_executor().spawn(async move { let response = client - .request(proto::OpenUncommittedChanges { + .request(proto::OpenUncommittedDiff { project_id, buffer_id: buffer_id.to_proto(), }) @@ -839,13 +791,9 @@ impl LocalBufferStore { ) { debug_assert!(worktree_handle.read(cx).is_local()); - let mut change_set_state_updates = Vec::new(); + let mut diff_state_updates = Vec::new(); for buffer in this.opened_buffers.values() { - let OpenBuffer::Complete { - buffer, - change_set_state, - } = buffer - else { + let OpenBuffer::Complete { buffer, diff_state } = buffer else { continue; }; let Some(buffer) = buffer.upgrade() else { @@ -858,22 +806,22 @@ impl LocalBufferStore { if file.worktree != worktree_handle { continue; } - let change_set_state = change_set_state.read(cx); + let diff_state = diff_state.read(cx); if changed_repos .iter() .any(|(work_dir, _)| file.path.starts_with(work_dir)) { let snapshot = buffer.text_snapshot(); - change_set_state_updates.push(( + diff_state_updates.push(( snapshot.clone(), file.path.clone(), - change_set_state - .unstaged_changes + diff_state + .unstaged_diff .as_ref() .and_then(|set| set.upgrade()) .is_some(), - change_set_state - .uncommitted_changes + diff_state + .uncommitted_diff .as_ref() .and_then(|set| set.upgrade()) .is_some(), @@ -881,7 +829,7 @@ impl LocalBufferStore { } } - if change_set_state_updates.is_empty() { + if diff_state_updates.is_empty() { return; } @@ -891,7 +839,7 @@ impl LocalBufferStore { let diff_bases_changes_by_buffer = cx .background_executor() .spawn(async move { - change_set_state_updates + diff_state_updates .into_iter() .filter_map( |(buffer_snapshot, path, needs_staged_text, needs_committed_text)| { @@ -934,9 +882,8 @@ impl LocalBufferStore { this.update(&mut cx, |this, cx| { for (buffer_snapshot, diff_bases_change) in diff_bases_changes_by_buffer { - let Some(OpenBuffer::Complete { - change_set_state, .. - }) = this.opened_buffers.get_mut(&buffer_snapshot.remote_id()) + let Some(OpenBuffer::Complete { diff_state, .. }) = + this.opened_buffers.get_mut(&buffer_snapshot.remote_id()) else { continue; }; @@ -944,7 +891,7 @@ impl LocalBufferStore { continue; }; - change_set_state.update(cx, |change_set_state, cx| { + diff_state.update(cx, |diff_state, cx| { use proto::update_diff_bases::Mode; if let Some((client, project_id)) = this.downstream_client.as_ref() { @@ -972,11 +919,8 @@ impl LocalBufferStore { client.send(message).log_err(); } - let _ = change_set_state.diff_bases_changed( - buffer_snapshot, - diff_bases_change, - cx, - ); + let _ = + diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx); }); } }) @@ -1282,8 +1226,8 @@ impl BufferStore { client.add_entity_request_handler(Self::handle_blame_buffer); client.add_entity_request_handler(Self::handle_reload_buffers); client.add_entity_request_handler(Self::handle_get_permalink_to_line); - client.add_entity_request_handler(Self::handle_open_unstaged_changes); - client.add_entity_request_handler(Self::handle_open_uncommitted_changes); + client.add_entity_request_handler(Self::handle_open_unstaged_diff); + client.add_entity_request_handler(Self::handle_open_uncommitted_diff); client.add_entity_message_handler(Self::handle_update_diff_bases); } @@ -1305,7 +1249,7 @@ impl BufferStore { opened_buffers: Default::default(), shared_buffers: Default::default(), loading_buffers: Default::default(), - loading_change_sets: Default::default(), + loading_diffs: Default::default(), worktree_store, } } @@ -1328,7 +1272,7 @@ impl BufferStore { downstream_client: None, opened_buffers: Default::default(), loading_buffers: Default::default(), - loading_change_sets: Default::default(), + loading_diffs: Default::default(), shared_buffers: Default::default(), worktree_store, } @@ -1401,33 +1345,30 @@ impl BufferStore { .spawn(async move { task.await.map_err(|e| anyhow!("{e}")) }) } - pub fn open_unstaged_changes( + pub fn open_unstaged_diff( &mut self, buffer: Entity, cx: &mut Context, - ) -> Task>> { + ) -> Task>> { let buffer_id = buffer.read(cx).remote_id(); - if let Some(change_set) = self.get_unstaged_changes(buffer_id, cx) { - return Task::ready(Ok(change_set)); + if let Some(diff) = self.get_unstaged_diff(buffer_id, cx) { + return Task::ready(Ok(diff)); } - let task = match self - .loading_change_sets - .entry((buffer_id, ChangeSetKind::Unstaged)) - { + let task = match self.loading_diffs.entry((buffer_id, DiffKind::Unstaged)) { hash_map::Entry::Occupied(e) => e.get().clone(), hash_map::Entry::Vacant(entry) => { let staged_text = match &self.state { BufferStoreState::Local(this) => this.load_staged_text(&buffer, cx), - BufferStoreState::Remote(this) => this.open_unstaged_changes(buffer_id, cx), + BufferStoreState::Remote(this) => this.open_unstaged_diff(buffer_id, cx), }; entry .insert( cx.spawn(move |this, cx| async move { - Self::open_change_set_internal( + Self::open_diff_internal( this, - ChangeSetKind::Unstaged, + DiffKind::Unstaged, staged_text.await.map(DiffBasesChange::SetIndex), buffer, cx, @@ -1445,20 +1386,17 @@ impl BufferStore { .spawn(async move { task.await.map_err(|e| anyhow!("{e}")) }) } - pub fn open_uncommitted_changes( + pub fn open_uncommitted_diff( &mut self, buffer: Entity, cx: &mut Context, - ) -> Task>> { + ) -> Task>> { let buffer_id = buffer.read(cx).remote_id(); - if let Some(change_set) = self.get_uncommitted_changes(buffer_id, cx) { - return Task::ready(Ok(change_set)); + if let Some(diff) = self.get_uncommitted_diff(buffer_id, cx) { + return Task::ready(Ok(diff)); } - let task = match self - .loading_change_sets - .entry((buffer_id, ChangeSetKind::Uncommitted)) - { + let task = match self.loading_diffs.entry((buffer_id, DiffKind::Uncommitted)) { hash_map::Entry::Occupied(e) => e.get().clone(), hash_map::Entry::Vacant(entry) => { let changes = match &self.state { @@ -1479,15 +1417,15 @@ impl BufferStore { Ok(diff_bases_change) }) } - BufferStoreState::Remote(this) => this.open_uncommitted_changes(buffer_id, cx), + BufferStoreState::Remote(this) => this.open_uncommitted_diff(buffer_id, cx), }; entry .insert( cx.spawn(move |this, cx| async move { - Self::open_change_set_internal( + Self::open_diff_internal( this, - ChangeSetKind::Uncommitted, + DiffKind::Uncommitted, changes.await, buffer, cx, @@ -1505,30 +1443,18 @@ impl BufferStore { .spawn(async move { task.await.map_err(|e| anyhow!("{e}")) }) } - #[cfg(any(test, feature = "test-support"))] - pub fn set_unstaged_change_set( - &mut self, - buffer_id: BufferId, - change_set: Entity, - ) { - self.loading_change_sets.insert( - (buffer_id, ChangeSetKind::Unstaged), - Task::ready(Ok(change_set)).shared(), - ); - } - - async fn open_change_set_internal( + async fn open_diff_internal( this: WeakEntity, - kind: ChangeSetKind, + kind: DiffKind, texts: Result, buffer: Entity, mut cx: AsyncApp, - ) -> Result> { + ) -> Result> { let diff_bases_change = match texts { Err(e) => { this.update(&mut cx, |this, cx| { let buffer_id = buffer.read(cx).remote_id(); - this.loading_change_sets.remove(&(buffer_id, kind)); + this.loading_diffs.remove(&(buffer_id, kind)); })?; return Err(e); } @@ -1537,15 +1463,14 @@ impl BufferStore { this.update(&mut cx, |this, cx| { let buffer_id = buffer.read(cx).remote_id(); - this.loading_change_sets.remove(&(buffer_id, kind)); + this.loading_diffs.remove(&(buffer_id, kind)); - if let Some(OpenBuffer::Complete { - change_set_state, .. - }) = this.opened_buffers.get_mut(&buffer.read(cx).remote_id()) + if let Some(OpenBuffer::Complete { diff_state, .. }) = + this.opened_buffers.get_mut(&buffer.read(cx).remote_id()) { - change_set_state.update(cx, |change_set_state, cx| { + diff_state.update(cx, |diff_state, cx| { let buffer_id = buffer.read(cx).remote_id(); - change_set_state.buffer_subscription.get_or_insert_with(|| { + diff_state.buffer_subscription.get_or_insert_with(|| { cx.subscribe(&buffer, |this, buffer, event, cx| match event { BufferEvent::LanguageChanged => { this.buffer_language_changed(buffer, cx) @@ -1554,47 +1479,41 @@ impl BufferStore { }) }); - let change_set = cx.new(|cx| BufferChangeSet { + let diff = cx.new(|cx| BufferDiff { buffer_id, - base_text: None, - diff_to_buffer: BufferDiff::new(&buffer.read(cx).text_snapshot()), - unstaged_change_set: None, + snapshot: BufferDiffSnapshot::new(&buffer.read(cx).text_snapshot()), + unstaged_diff: None, }); match kind { - ChangeSetKind::Unstaged => { - change_set_state.unstaged_changes = Some(change_set.downgrade()) - } - ChangeSetKind::Uncommitted => { - let unstaged_change_set = - if let Some(change_set) = change_set_state.unstaged_changes() { - change_set - } else { - let unstaged_change_set = cx.new(|cx| BufferChangeSet { - buffer_id, - base_text: None, - diff_to_buffer: BufferDiff::new( - &buffer.read(cx).text_snapshot(), - ), - unstaged_change_set: None, - }); - change_set_state.unstaged_changes = - Some(unstaged_change_set.downgrade()); - unstaged_change_set - }; + DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()), + DiffKind::Uncommitted => { + let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() { + diff + } else { + let unstaged_diff = cx.new(|cx| BufferDiff { + buffer_id, + snapshot: BufferDiffSnapshot::new( + &buffer.read(cx).text_snapshot(), + ), + unstaged_diff: None, + }); + diff_state.unstaged_diff = Some(unstaged_diff.downgrade()); + unstaged_diff + }; - change_set.update(cx, |change_set, _| { - change_set.unstaged_change_set = Some(unstaged_change_set); + diff.update(cx, |diff, _| { + diff.unstaged_diff = Some(unstaged_diff); }); - change_set_state.uncommitted_changes = Some(change_set.downgrade()) + diff_state.uncommitted_diff = Some(diff.downgrade()) } }; let buffer = buffer.read(cx).text_snapshot(); - let rx = change_set_state.diff_bases_changed(buffer, diff_bases_change, cx); + let rx = diff_state.diff_bases_changed(buffer, diff_bases_change, cx); Ok(async move { rx.await.ok(); - Ok(change_set) + Ok(diff) }) }) } else { @@ -1807,7 +1726,7 @@ impl BufferStore { let is_remote = buffer.read(cx).replica_id() != 0; let open_buffer = OpenBuffer::Complete { buffer: buffer.downgrade(), - change_set_state: cx.new(|_| BufferChangeSetState::default()), + diff_state: cx.new(|_| BufferDiffState::default()), }; let handle = cx.entity().downgrade(); @@ -1888,39 +1807,21 @@ impl BufferStore { }) } - pub fn get_unstaged_changes( - &self, - buffer_id: BufferId, - cx: &App, - ) -> Option> { - if let OpenBuffer::Complete { - change_set_state, .. - } = self.opened_buffers.get(&buffer_id)? - { - change_set_state - .read(cx) - .unstaged_changes - .as_ref()? - .upgrade() + pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option> { + if let OpenBuffer::Complete { diff_state, .. } = self.opened_buffers.get(&buffer_id)? { + diff_state.read(cx).unstaged_diff.as_ref()?.upgrade() } else { None } } - pub fn get_uncommitted_changes( + pub fn get_uncommitted_diff( &self, buffer_id: BufferId, cx: &App, - ) -> Option> { - if let OpenBuffer::Complete { - change_set_state, .. - } = self.opened_buffers.get(&buffer_id)? - { - change_set_state - .read(cx) - .uncommitted_changes - .as_ref()? - .upgrade() + ) -> Option> { + if let OpenBuffer::Complete { diff_state, .. } = self.opened_buffers.get(&buffer_id)? { + diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade() } else { None } @@ -2040,13 +1941,12 @@ impl BufferStore { ) -> impl Future { let mut futures = Vec::new(); for buffer in buffers { - if let Some(OpenBuffer::Complete { - change_set_state, .. - }) = self.opened_buffers.get_mut(&buffer.read(cx).remote_id()) + if let Some(OpenBuffer::Complete { diff_state, .. }) = + self.opened_buffers.get_mut(&buffer.read(cx).remote_id()) { let buffer = buffer.read(cx).text_snapshot(); - futures.push(change_set_state.update(cx, |change_set_state, cx| { - change_set_state.recalculate_diffs(buffer, cx) + futures.push(diff_state.update(cx, |diff_state, cx| { + diff_state.recalculate_diffs(buffer, cx) })); } } @@ -2156,7 +2056,7 @@ impl BufferStore { .entry(buffer_id) .or_insert_with(|| SharedBuffer { buffer: buffer.clone(), - change_set: None, + diff: None, lsp_handle: None, }); @@ -2461,16 +2361,16 @@ impl BufferStore { }) } - pub async fn handle_open_unstaged_changes( + pub async fn handle_open_unstaged_diff( this: Entity, - request: TypedEnvelope, + request: TypedEnvelope, mut cx: AsyncApp, - ) -> Result { + ) -> Result { let buffer_id = BufferId::new(request.payload.buffer_id)?; - let change_set = this + let diff = this .update(&mut cx, |this, cx| { let buffer = this.get(buffer_id)?; - Some(this.open_unstaged_changes(buffer, cx)) + Some(this.open_unstaged_diff(buffer, cx)) })? .ok_or_else(|| anyhow!("no such buffer"))? .await?; @@ -2481,25 +2381,25 @@ impl BufferStore { .or_default(); debug_assert!(shared_buffers.contains_key(&buffer_id)); if let Some(shared) = shared_buffers.get_mut(&buffer_id) { - shared.change_set = Some(change_set.clone()); + shared.diff = Some(diff.clone()); } })?; - let staged_text = change_set.read_with(&cx, |change_set, _| { - change_set.base_text.as_ref().map(|buffer| buffer.text()) + let staged_text = diff.read_with(&cx, |diff, _| { + diff.snapshot.base_text.as_ref().map(|buffer| buffer.text()) })?; - Ok(proto::OpenUnstagedChangesResponse { staged_text }) + Ok(proto::OpenUnstagedDiffResponse { staged_text }) } - pub async fn handle_open_uncommitted_changes( + pub async fn handle_open_uncommitted_diff( this: Entity, - request: TypedEnvelope, + request: TypedEnvelope, mut cx: AsyncApp, - ) -> Result { + ) -> Result { let buffer_id = BufferId::new(request.payload.buffer_id)?; - let change_set = this + let diff = this .update(&mut cx, |this, cx| { let buffer = this.get(buffer_id)?; - Some(this.open_uncommitted_changes(buffer, cx)) + Some(this.open_uncommitted_diff(buffer, cx)) })? .ok_or_else(|| anyhow!("no such buffer"))? .await?; @@ -2510,21 +2410,21 @@ impl BufferStore { .or_default(); debug_assert!(shared_buffers.contains_key(&buffer_id)); if let Some(shared) = shared_buffers.get_mut(&buffer_id) { - shared.change_set = Some(change_set.clone()); + shared.diff = Some(diff.clone()); } })?; - change_set.read_with(&cx, |change_set, cx| { - use proto::open_uncommitted_changes_response::Mode; + diff.read_with(&cx, |diff, cx| { + use proto::open_uncommitted_diff_response::Mode; - let staged_buffer = change_set - .unstaged_change_set + let staged_buffer = diff + .unstaged_diff .as_ref() - .and_then(|change_set| change_set.read(cx).base_text.as_ref()); + .and_then(|diff| diff.read(cx).snapshot.base_text.as_ref()); let mode; let staged_text; let committed_text; - if let Some(committed_buffer) = &change_set.base_text { + if let Some(committed_buffer) = &diff.snapshot.base_text { committed_text = Some(committed_buffer.text()); if let Some(staged_buffer) = staged_buffer { if staged_buffer.remote_id() == committed_buffer.remote_id() { @@ -2544,7 +2444,7 @@ impl BufferStore { staged_text = staged_buffer.as_ref().map(|buffer| buffer.text()); } - proto::OpenUncommittedChangesResponse { + proto::OpenUncommittedDiffResponse { committed_text, staged_text, mode: mode.into(), @@ -2559,15 +2459,13 @@ impl BufferStore { ) -> Result<()> { let buffer_id = BufferId::new(request.payload.buffer_id)?; this.update(&mut cx, |this, cx| { - if let Some(OpenBuffer::Complete { - change_set_state, - buffer, - }) = this.opened_buffers.get_mut(&buffer_id) + if let Some(OpenBuffer::Complete { diff_state, buffer }) = + this.opened_buffers.get_mut(&buffer_id) { if let Some(buffer) = buffer.upgrade() { let buffer = buffer.read(cx).text_snapshot(); - change_set_state.update(cx, |change_set_state, cx| { - change_set_state.handle_base_texts_updated(buffer, request.payload, cx); + diff_state.update(cx, |diff_state, cx| { + diff_state.handle_base_texts_updated(buffer, request.payload, cx); }) } } @@ -2628,7 +2526,7 @@ impl BufferStore { buffer_id, SharedBuffer { buffer: buffer.clone(), - change_set: None, + diff: None, lsp_handle: None, }, ); @@ -2783,126 +2681,6 @@ impl BufferStore { } } -impl EventEmitter for BufferChangeSet {} - -impl BufferChangeSet { - fn set_state( - &mut self, - base_text: Option, - diff: BufferDiff, - buffer: &text::BufferSnapshot, - cx: &mut Context, - ) { - if let Some(base_text) = base_text.as_ref() { - let changed_range = if Some(base_text.remote_id()) - != self.base_text.as_ref().map(|buffer| buffer.remote_id()) - { - Some(text::Anchor::MIN..text::Anchor::MAX) - } else { - diff.compare(&self.diff_to_buffer, buffer) - }; - if let Some(changed_range) = changed_range { - cx.emit(BufferChangeSetEvent::DiffChanged { changed_range }); - } - } - self.base_text = base_text; - self.diff_to_buffer = diff; - } - - pub fn diff_hunks_intersecting_range<'a>( - &'a self, - range: Range, - buffer_snapshot: &'a text::BufferSnapshot, - ) -> impl 'a + Iterator { - self.diff_to_buffer - .hunks_intersecting_range(range, buffer_snapshot) - } - - pub fn diff_hunks_intersecting_range_rev<'a>( - &'a self, - range: Range, - buffer_snapshot: &'a text::BufferSnapshot, - ) -> impl 'a + Iterator { - self.diff_to_buffer - .hunks_intersecting_range_rev(range, buffer_snapshot) - } - - /// Used in cases where the change set isn't derived from git. - pub fn set_base_text( - &mut self, - base_buffer: Entity, - buffer: text::BufferSnapshot, - cx: &mut Context, - ) -> oneshot::Receiver<()> { - let (tx, rx) = oneshot::channel(); - let this = cx.weak_entity(); - let base_buffer = base_buffer.read(cx).snapshot(); - cx.spawn(|_, mut cx| async move { - let diff = cx - .background_executor() - .spawn({ - let base_buffer = base_buffer.clone(); - let buffer = buffer.clone(); - async move { BufferDiff::build(Some(&base_buffer.text()), &buffer) } - }) - .await; - let Some(this) = this.upgrade() else { - tx.send(()).ok(); - return; - }; - this.update(&mut cx, |this, cx| { - this.set_state(Some(base_buffer), diff, &buffer, cx); - }) - .log_err(); - tx.send(()).ok(); - }) - .detach(); - rx - } - - #[cfg(any(test, feature = "test-support"))] - pub fn base_text_string(&self) -> Option { - self.base_text.as_ref().map(|buffer| buffer.text()) - } - - pub fn new(buffer: &Entity, cx: &mut App) -> Self { - BufferChangeSet { - buffer_id: buffer.read(cx).remote_id(), - base_text: None, - diff_to_buffer: BufferDiff::new(&buffer.read(cx).text_snapshot()), - unstaged_change_set: None, - } - } - - #[cfg(any(test, feature = "test-support"))] - pub fn new_with_base_text(base_text: &str, buffer: &Entity, cx: &mut App) -> Self { - let mut base_text = base_text.to_owned(); - text::LineEnding::normalize(&mut base_text); - let diff_to_buffer = BufferDiff::build(Some(&base_text), &buffer.read(cx).text_snapshot()); - let base_text = language::Buffer::build_snapshot_sync(base_text.into(), None, None, cx); - BufferChangeSet { - buffer_id: buffer.read(cx).remote_id(), - base_text: Some(base_text), - diff_to_buffer, - unstaged_change_set: None, - } - } - - #[cfg(any(test, feature = "test-support"))] - pub fn recalculate_diff_sync( - &mut self, - snapshot: text::BufferSnapshot, - cx: &mut Context, - ) { - let mut base_text = self.base_text.as_ref().map(|buffer| buffer.text()); - if let Some(base_text) = base_text.as_mut() { - text::LineEnding::normalize(base_text); - } - let diff_to_buffer = BufferDiff::build(base_text.as_deref(), &snapshot); - self.set_state(self.base_text.clone(), diff_to_buffer, &snapshot, cx); - } -} - impl OpenBuffer { fn upgrade(&self) -> Option> { match self { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 2a7759daa4e556d7681d4c98332422323b0ea109..7f2f52aee63a2cc701150230693b60a91e9d2cdb 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -21,6 +21,7 @@ mod project_tests; mod direnv; mod environment; +use diff::BufferDiff; pub use environment::EnvironmentErrorMessage; use git::Repository; pub mod search_history; @@ -28,7 +29,7 @@ mod yarn; use crate::git::GitState; use anyhow::{anyhow, Context as _, Result}; -use buffer_store::{BufferChangeSet, BufferStore, BufferStoreEvent}; +use buffer_store::{BufferStore, BufferStoreEvent}; use client::{ proto, Client, Collaborator, PendingEntitySubscription, ProjectId, TypedEnvelope, UserStore, }; @@ -1955,31 +1956,31 @@ impl Project { }) } - pub fn open_unstaged_changes( + pub fn open_unstaged_diff( &mut self, buffer: Entity, cx: &mut Context, - ) -> Task>> { + ) -> Task>> { if self.is_disconnected(cx) { return Task::ready(Err(anyhow!(ErrorCode::Disconnected))); } self.buffer_store.update(cx, |buffer_store, cx| { - buffer_store.open_unstaged_changes(buffer, cx) + buffer_store.open_unstaged_diff(buffer, cx) }) } - pub fn open_uncommitted_changes( + pub fn open_uncommitted_diff( &mut self, buffer: Entity, cx: &mut Context, - ) -> Task>> { + ) -> Task>> { if self.is_disconnected(cx) { return Task::ready(Err(anyhow!(ErrorCode::Disconnected))); } self.buffer_store.update(cx, |buffer_store, cx| { - buffer_store.open_uncommitted_changes(buffer, cx) + buffer_store.open_uncommitted_diff(buffer, cx) }) } diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index dfd5a5dc56fa2205cee56bd1e140bfe2539adf4c..9d93a68783b5d869f1f82c460e22e3bc19ae982f 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -1,5 +1,5 @@ use crate::{Event, *}; -use ::git::diff::assert_hunks; +use diff::assert_hunks; use fs::FakeFs; use futures::{future, StreamExt}; use gpui::{App, SemanticVersion, UpdateGlobal}; @@ -5639,7 +5639,7 @@ async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) { } #[gpui::test] -async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) { +async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) { init_test(cx); let staged_contents = r#" @@ -5681,20 +5681,20 @@ async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) { }) .await .unwrap(); - let unstaged_changes = project + let unstaged_diff = project .update(cx, |project, cx| { - project.open_unstaged_changes(buffer.clone(), cx) + project.open_unstaged_diff(buffer.clone(), cx) }) .await .unwrap(); cx.run_until_parked(); - unstaged_changes.update(cx, |unstaged_changes, cx| { + unstaged_diff.update(cx, |unstaged_diff, cx| { let snapshot = buffer.read(cx).snapshot(); assert_hunks( - unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + unstaged_diff.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), &snapshot, - &unstaged_changes.base_text.as_ref().unwrap().text(), + &unstaged_diff.base_text_string().unwrap(), &[ (0..1, "", "// print goodbye\n"), ( @@ -5719,19 +5719,19 @@ async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) { ); cx.run_until_parked(); - unstaged_changes.update(cx, |unstaged_changes, cx| { + unstaged_diff.update(cx, |unstaged_diff, cx| { let snapshot = buffer.read(cx).snapshot(); assert_hunks( - unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + unstaged_diff.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), &snapshot, - &unstaged_changes.base_text.as_ref().unwrap().text(), + &unstaged_diff.snapshot.base_text.as_ref().unwrap().text(), &[(2..3, "", " println!(\"goodbye world\");\n")], ); }); } #[gpui::test] -async fn test_uncommitted_changes_for_buffer(cx: &mut gpui::TestAppContext) { +async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) { init_test(cx); let committed_contents = r#" @@ -5783,20 +5783,20 @@ async fn test_uncommitted_changes_for_buffer(cx: &mut gpui::TestAppContext) { }) .await .unwrap(); - let uncommitted_changes = project + let uncommitted_diff = project .update(cx, |project, cx| { - project.open_uncommitted_changes(buffer.clone(), cx) + project.open_uncommitted_diff(buffer.clone(), cx) }) .await .unwrap(); cx.run_until_parked(); - uncommitted_changes.update(cx, |uncommitted_changes, cx| { + uncommitted_diff.update(cx, |uncommitted_diff, cx| { let snapshot = buffer.read(cx).snapshot(); assert_hunks( - uncommitted_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + uncommitted_diff.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), &snapshot, - &uncommitted_changes.base_text.as_ref().unwrap().text(), + &uncommitted_diff.snapshot.base_text.as_ref().unwrap().text(), &[ (0..1, "", "// print goodbye\n"), ( @@ -5821,12 +5821,12 @@ async fn test_uncommitted_changes_for_buffer(cx: &mut gpui::TestAppContext) { ); cx.run_until_parked(); - uncommitted_changes.update(cx, |uncommitted_changes, cx| { + uncommitted_diff.update(cx, |uncommitted_diff, cx| { let snapshot = buffer.read(cx).snapshot(); assert_hunks( - uncommitted_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + uncommitted_diff.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), &snapshot, - &uncommitted_changes.base_text.as_ref().unwrap().text(), + &uncommitted_diff.snapshot.base_text.as_ref().unwrap().text(), &[(2..3, "", " println!(\"goodbye world\");\n")], ); }); @@ -5874,20 +5874,20 @@ async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) { }) .await .unwrap(); - let uncommitted_changes = project + let uncommitted_diff = project .update(cx, |project, cx| { - project.open_uncommitted_changes(buffer.clone(), cx) + project.open_uncommitted_diff(buffer.clone(), cx) }) .await .unwrap(); cx.run_until_parked(); - uncommitted_changes.update(cx, |uncommitted_changes, cx| { + uncommitted_diff.update(cx, |uncommitted_diff, cx| { let snapshot = buffer.read(cx).snapshot(); assert_hunks( - uncommitted_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + uncommitted_diff.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), &snapshot, - &uncommitted_changes.base_text.as_ref().unwrap().text(), + &uncommitted_diff.snapshot.base_text.as_ref().unwrap().text(), &[( 1..2, " println!(\"hello from HEAD\");\n", diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index fd10f0d113412619bb2e0ebdccc51753913fdcdc..1799f507292ec2b0a2332afed6254a654c9e5742 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -304,8 +304,8 @@ message Envelope { SyncExtensionsResponse sync_extensions_response = 286; InstallExtension install_extension = 287; - OpenUnstagedChanges open_unstaged_changes = 288; - OpenUnstagedChangesResponse open_unstaged_changes_response = 289; + OpenUnstagedDiff open_unstaged_diff = 288; + OpenUnstagedDiffResponse open_unstaged_diff_response = 289; RegisterBufferWithLanguageServers register_buffer_with_language_servers = 290; @@ -314,8 +314,8 @@ message Envelope { Commit commit = 295; OpenCommitMessageBuffer open_commit_message_buffer = 296; - OpenUncommittedChanges open_uncommitted_changes = 297; - OpenUncommittedChangesResponse open_uncommitted_changes_response = 298; // current max + OpenUncommittedDiff open_uncommitted_diff = 297; + OpenUncommittedDiffResponse open_uncommitted_diff_response = 298; // current max } reserved 87 to 88; @@ -2062,21 +2062,21 @@ message UpdateDiffBases { Mode mode = 5; } -message OpenUnstagedChanges { +message OpenUnstagedDiff { uint64 project_id = 1; uint64 buffer_id = 2; } -message OpenUnstagedChangesResponse { +message OpenUnstagedDiffResponse { optional string staged_text = 1; } -message OpenUncommittedChanges { +message OpenUncommittedDiff { uint64 project_id = 1; uint64 buffer_id = 2; } -message OpenUncommittedChangesResponse { +message OpenUncommittedDiffResponse { enum Mode { INDEX_MATCHES_HEAD = 0; INDEX_AND_HEAD = 1; diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index ec35aef44ed7670dc7efb2091df8c5f277af4452..0ba9b6ef19a8e71315e596bfc263bde8073d6ec9 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -219,10 +219,10 @@ messages!( (GetImplementationResponse, Background), (GetLlmToken, Background), (GetLlmTokenResponse, Background), - (OpenUnstagedChanges, Foreground), - (OpenUnstagedChangesResponse, Foreground), - (OpenUncommittedChanges, Foreground), - (OpenUncommittedChangesResponse, Foreground), + (OpenUnstagedDiff, Foreground), + (OpenUnstagedDiffResponse, Foreground), + (OpenUncommittedDiff, Foreground), + (OpenUncommittedDiffResponse, Foreground), (GetUsers, Foreground), (Hello, Foreground), (IncomingCall, Foreground), @@ -424,8 +424,8 @@ request_messages!( (GetProjectSymbols, GetProjectSymbolsResponse), (GetReferences, GetReferencesResponse), (GetSignatureHelp, GetSignatureHelpResponse), - (OpenUnstagedChanges, OpenUnstagedChangesResponse), - (OpenUncommittedChanges, OpenUncommittedChangesResponse), + (OpenUnstagedDiff, OpenUnstagedDiffResponse), + (OpenUncommittedDiff, OpenUncommittedDiffResponse), (GetSupermavenApiKey, GetSupermavenApiKeyResponse), (GetTypeDefinition, GetTypeDefinitionResponse), (LinkedEditingRange, LinkedEditingRangeResponse), @@ -546,8 +546,8 @@ entity_messages!( GetProjectSymbols, GetReferences, GetSignatureHelp, - OpenUnstagedChanges, - OpenUncommittedChanges, + OpenUnstagedDiff, + OpenUncommittedDiff, GetTypeDefinition, InlayHints, JoinProject, diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 4e34953ea26417866cf603a31bd7ae013d327ca5..7552e950aaca64a7a5a6ba78f70a5725671543aa 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -84,18 +84,15 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test }) .await .unwrap(); - let change_set = project + let diff = project .update(cx, |project, cx| { - project.open_unstaged_changes(buffer.clone(), cx) + project.open_unstaged_diff(buffer.clone(), cx) }) .await .unwrap(); - change_set.update(cx, |change_set, _| { - assert_eq!( - change_set.base_text_string().unwrap(), - "fn one() -> usize { 0 }" - ); + diff.update(cx, |diff, _| { + assert_eq!(diff.base_text_string().unwrap(), "fn one() -> usize { 0 }"); }); buffer.update(cx, |buffer, cx| { @@ -155,9 +152,9 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test &[("src/lib2.rs".into(), "fn one() -> usize { 100 }".into())], ); cx.executor().run_until_parked(); - change_set.update(cx, |change_set, _| { + diff.update(cx, |diff, _| { assert_eq!( - change_set.base_text_string().unwrap(), + diff.base_text_string().unwrap(), "fn one() -> usize { 100 }" ); }); @@ -1239,18 +1236,17 @@ async fn test_remote_git_diffs(cx: &mut TestAppContext, server_cx: &mut TestAppC }) .await .unwrap(); - let change_set = project + let diff = project .update(cx, |project, cx| { - project.open_uncommitted_changes(buffer.clone(), cx) + project.open_uncommitted_diff(buffer.clone(), cx) }) .await .unwrap(); - change_set.read_with(cx, |change_set, cx| { - assert_eq!(change_set.base_text_string().unwrap(), text_1); + diff.read_with(cx, |diff, cx| { + assert_eq!(diff.base_text_string().unwrap(), text_1); assert_eq!( - change_set - .unstaged_change_set + diff.unstaged_diff .as_ref() .unwrap() .read(cx) @@ -1267,11 +1263,10 @@ async fn test_remote_git_diffs(cx: &mut TestAppContext, server_cx: &mut TestAppC ); cx.executor().run_until_parked(); - change_set.read_with(cx, |change_set, cx| { - assert_eq!(change_set.base_text_string().unwrap(), text_1); + diff.read_with(cx, |diff, cx| { + assert_eq!(diff.base_text_string().unwrap(), text_1); assert_eq!( - change_set - .unstaged_change_set + diff.unstaged_diff .as_ref() .unwrap() .read(cx) @@ -1288,11 +1283,10 @@ async fn test_remote_git_diffs(cx: &mut TestAppContext, server_cx: &mut TestAppC ); cx.executor().run_until_parked(); - change_set.read_with(cx, |change_set, cx| { - assert_eq!(change_set.base_text_string().unwrap(), text_2); + diff.read_with(cx, |diff, cx| { + assert_eq!(diff.base_text_string().unwrap(), text_2); assert_eq!( - change_set - .unstaged_change_set + diff.unstaged_diff .as_ref() .unwrap() .read(cx) diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index da48baf0958245d390b25f47076d83c4afd9d53c..fc2603bd442f40261799f915b15b94c3e3c94cd6 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -5386,7 +5386,7 @@ fn send_status_update_inner( let new_snapshot = state.snapshot.clone(); let old_snapshot = mem::replace(&mut state.prev_snapshot, new_snapshot.snapshot.clone()); - let changes = build_change_set(phase, &old_snapshot, &new_snapshot, &state.changed_paths); + let changes = build_diff(phase, &old_snapshot, &new_snapshot, &state.changed_paths); state.changed_paths.clear(); status_updates_tx @@ -5399,7 +5399,7 @@ fn send_status_update_inner( .is_ok() } -fn build_change_set( +fn build_diff( phase: BackgroundScannerPhase, old_snapshot: &Snapshot, new_snapshot: &Snapshot, From 337b9e62d252225a056d369b992f8dd1044a5b3b Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Thu, 6 Feb 2025 17:57:24 -0600 Subject: [PATCH 095/130] Fix vim full line operations failing when no trailing newline (#24409) Closes #24270 Release Notes: - Fixed an issue where doing line-wise operations in vim mode on the last line of a file with no trailing newline would not work properly --- crates/vim/src/normal.rs | 36 +++++++++++++++++++ crates/vim/src/normal/yank.rs | 15 ++++---- ...d_then_paste_without_trailing_newline.json | 7 ++++ ...st_increment_bin_wrapping_and_padding.json | 4 +-- ...st_increment_hex_wrapping_and_padding.json | 4 +-- .../vim/test_data/test_increment_inline.json | 4 +-- .../test_data/test_increment_sign_change.json | 2 +- .../test_data/test_increment_wrapping.json | 8 ++--- .../test_yank_line_with_trailing_newline.json | 5 +++ ...st_yank_line_without_trailing_newline.json | 5 +++ ...nk_multiline_without_trailing_newline.json | 6 ++++ 11 files changed, 79 insertions(+), 17 deletions(-) create mode 100644 crates/vim/test_data/test_dd_then_paste_without_trailing_newline.json create mode 100644 crates/vim/test_data/test_yank_line_with_trailing_newline.json create mode 100644 crates/vim/test_data/test_yank_line_without_trailing_newline.json create mode 100644 crates/vim/test_data/test_yank_multiline_without_trailing_newline.json diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 7ea8ee899298075947c858b95675cac8e9d4a00a..4d537f8fd7dff14ca0b0050fa107500f31254ad4 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -1545,4 +1545,40 @@ mod test { cx.simulate_shared_keystrokes("x escape shift-o").await; cx.shared_state().await.assert_eq("// hello\n// ˇ\n// x\n"); } + + #[gpui::test] + async fn test_yank_line_with_trailing_newline(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state("heˇllo\n").await; + cx.simulate_shared_keystrokes("y y p").await; + cx.shared_state().await.assert_eq("hello\nˇhello\n"); + } + + #[gpui::test] + async fn test_yank_line_without_trailing_newline(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state("heˇllo").await; + cx.simulate_shared_keystrokes("y y p").await; + cx.shared_state().await.assert_eq("hello\nˇhello"); + } + + #[gpui::test] + async fn test_yank_multiline_without_trailing_newline(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state("heˇllo\nhello").await; + cx.simulate_shared_keystrokes("2 y y p").await; + cx.shared_state() + .await + .assert_eq("hello\nˇhello\nhello\nhello"); + } + + #[gpui::test] + async fn test_dd_then_paste_without_trailing_newline(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state("heˇllo").await; + cx.simulate_shared_keystrokes("d d").await; + cx.shared_state().await.assert_eq("ˇ"); + cx.simulate_shared_keystrokes("p p").await; + cx.shared_state().await.assert_eq("\nhello\nˇhello"); + } } diff --git a/crates/vim/src/normal/yank.rs b/crates/vim/src/normal/yank.rs index 0e248037d58e9a83f76358ee5fd3f59b6b24f750..aa521ab8728ed88d823d980d00441f11a700992e 100644 --- a/crates/vim/src/normal/yank.rs +++ b/crates/vim/src/normal/yank.rs @@ -162,13 +162,16 @@ impl Vim { // that line, we will have expanded the start of the selection to ensure it // contains a newline (so that delete works as expected). We undo that change // here. - let is_last_line = linewise - && end.row == buffer.max_row().0 - && buffer.max_point().column > 0 - && start.row < buffer.max_row().0 + let max_point = buffer.max_point(); + let should_adjust_start = linewise + && end.row == max_point.row + && max_point.column > 0 + && start.row < max_point.row && start == Point::new(start.row, buffer.line_len(MultiBufferRow(start.row))); + let should_add_newline = + should_adjust_start || (end == max_point && max_point.column > 0 && linewise); - if is_last_line { + if should_adjust_start { start = Point::new(start.row + 1, 0); } @@ -179,7 +182,7 @@ impl Vim { for chunk in buffer.text_for_range(start..end) { text.push_str(chunk); } - if is_last_line { + if should_add_newline { text.push('\n'); } clipboard_selections.push(ClipboardSelection { diff --git a/crates/vim/test_data/test_dd_then_paste_without_trailing_newline.json b/crates/vim/test_data/test_dd_then_paste_without_trailing_newline.json new file mode 100644 index 0000000000000000000000000000000000000000..5b10a2fe2879b621bca28ce73e7d86e9abe227eb --- /dev/null +++ b/crates/vim/test_data/test_dd_then_paste_without_trailing_newline.json @@ -0,0 +1,7 @@ +{"Put":{"state":"heˇllo"}} +{"Key":"d"} +{"Key":"d"} +{"Get":{"state":"ˇ","mode":"Normal"}} +{"Key":"p"} +{"Key":"p"} +{"Get":{"state":"\nhello\nˇhello","mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_bin_wrapping_and_padding.json b/crates/vim/test_data/test_increment_bin_wrapping_and_padding.json index 4f1a6aa1d364b7abe19d6b058e6b7053e7aadc2d..69c118c0adc0dc9f2e927de5892d363686728b06 100644 --- a/crates/vim/test_data/test_increment_bin_wrapping_and_padding.json +++ b/crates/vim/test_data/test_increment_bin_wrapping_and_padding.json @@ -1,10 +1,10 @@ {"Put":{"state":"0b111111111111111111111111111111111111111111111111111111111111111111111ˇ1\n"}} {"Key":"ctrl-a"} -{"Get":{"state":"0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1\n", "mode":"Normal"}} +{"Get":{"state":"0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1\n","mode":"Normal"}} {"Key":"ctrl-a"} {"Get":{"state":"0b000000000000000000000000000000000000000000000000000000000000000000000ˇ0\n","mode":"Normal"}} {"Key":"ctrl-a"} {"Get":{"state":"0b000000000000000000000000000000000000000000000000000000000000000000000ˇ1\n","mode":"Normal"}} {"Key":"2"} {"Key":"ctrl-x"} -{"Get":{"state":"0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1\n", "mode":"Normal"}} +{"Get":{"state":"0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1\n","mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_hex_wrapping_and_padding.json b/crates/vim/test_data/test_increment_hex_wrapping_and_padding.json index 23a561126487c6a20736cedbf156269c50ec411a..562b368812c03354af245b309ce5f39abb34eb82 100644 --- a/crates/vim/test_data/test_increment_hex_wrapping_and_padding.json +++ b/crates/vim/test_data/test_increment_hex_wrapping_and_padding.json @@ -1,10 +1,10 @@ {"Put":{"state":"0xfffffffffffffffffffˇf\n"}} {"Key":"ctrl-a"} -{"Get":{"state":"0x0000fffffffffffffffˇf\n", "mode":"Normal"}} +{"Get":{"state":"0x0000fffffffffffffffˇf\n","mode":"Normal"}} {"Key":"ctrl-a"} {"Get":{"state":"0x0000000000000000000ˇ0\n","mode":"Normal"}} {"Key":"ctrl-a"} {"Get":{"state":"0x0000000000000000000ˇ1\n","mode":"Normal"}} {"Key":"2"} {"Key":"ctrl-x"} -{"Get":{"state":"0x0000fffffffffffffffˇf\n", "mode":"Normal"}} +{"Get":{"state":"0x0000fffffffffffffffˇf\n","mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_inline.json b/crates/vim/test_data/test_increment_inline.json index 98c4fc280527249f3adc0138fa58791abd2b7390..1e3d8fbd90efd5ee1f9c2c92ff1fafb972e12140 100644 --- a/crates/vim/test_data/test_increment_inline.json +++ b/crates/vim/test_data/test_increment_inline.json @@ -2,9 +2,9 @@ {"Key":"ctrl-a"} {"Get":{"state":"inline0x3ˇau32\n","mode":"Normal"}} {"Key":"ctrl-a"} -{"Get":{"state":"inline0x3ˇbu32\n", "mode":"Normal"}} +{"Get":{"state":"inline0x3ˇbu32\n","mode":"Normal"}} {"Key":"l"} {"Key":"l"} {"Key":"l"} {"Key":"ctrl-a"} -{"Get":{"state":"inline0x3bu3ˇ3\n", "mode":"Normal"}} +{"Get":{"state":"inline0x3bu3ˇ3\n","mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_sign_change.json b/crates/vim/test_data/test_increment_sign_change.json index 1f4edd57b456af740153636adfba99efc8ab739f..8f2ee7f2f3d513bff143b81d1940a4f5801b3bf0 100644 --- a/crates/vim/test_data/test_increment_sign_change.json +++ b/crates/vim/test_data/test_increment_sign_change.json @@ -3,4 +3,4 @@ {"Get":{"state":"-ˇ1\n","mode":"Normal"}} {"Key":"2"} {"Key":"ctrl-a"} -{"Get":{"state":"ˇ1\n", "mode":"Normal"}} +{"Get":{"state":"ˇ1\n","mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_wrapping.json b/crates/vim/test_data/test_increment_wrapping.json index 9f84c8cb1145d4cbb0c0de62763f73f4fb55571a..9f189991a6ee67c68a37b2712686ab1108f05a85 100644 --- a/crates/vim/test_data/test_increment_wrapping.json +++ b/crates/vim/test_data/test_increment_wrapping.json @@ -2,12 +2,12 @@ {"Key":"ctrl-a"} {"Get":{"state":"1844674407370955161ˇ5\n","mode":"Normal"}} {"Key":"ctrl-a"} -{"Get":{"state":"-1844674407370955161ˇ5\n", "mode":"Normal"}} +{"Get":{"state":"-1844674407370955161ˇ5\n","mode":"Normal"}} {"Key":"ctrl-a"} -{"Get":{"state":"-1844674407370955161ˇ4\n", "mode":"Normal"}} +{"Get":{"state":"-1844674407370955161ˇ4\n","mode":"Normal"}} {"Key":"3"} {"Key":"ctrl-x"} -{"Get":{"state":"1844674407370955161ˇ4\n", "mode":"Normal"}} +{"Get":{"state":"1844674407370955161ˇ4\n","mode":"Normal"}} {"Key":"2"} {"Key":"ctrl-a"} -{"Get":{"state":"-1844674407370955161ˇ5\n", "mode":"Normal"}} +{"Get":{"state":"-1844674407370955161ˇ5\n","mode":"Normal"}} diff --git a/crates/vim/test_data/test_yank_line_with_trailing_newline.json b/crates/vim/test_data/test_yank_line_with_trailing_newline.json new file mode 100644 index 0000000000000000000000000000000000000000..8b4438737aaa476eb4f3afb817a7494bb464b058 --- /dev/null +++ b/crates/vim/test_data/test_yank_line_with_trailing_newline.json @@ -0,0 +1,5 @@ +{"Put":{"state":"heˇllo\n"}} +{"Key":"y"} +{"Key":"y"} +{"Key":"p"} +{"Get":{"state":"hello\nˇhello\n","mode":"Normal"}} diff --git a/crates/vim/test_data/test_yank_line_without_trailing_newline.json b/crates/vim/test_data/test_yank_line_without_trailing_newline.json new file mode 100644 index 0000000000000000000000000000000000000000..a1158ff2d5b80ff3ee457faa3be9176370475b20 --- /dev/null +++ b/crates/vim/test_data/test_yank_line_without_trailing_newline.json @@ -0,0 +1,5 @@ +{"Put":{"state":"heˇllo"}} +{"Key":"y"} +{"Key":"y"} +{"Key":"p"} +{"Get":{"state":"hello\nˇhello","mode":"Normal"}} diff --git a/crates/vim/test_data/test_yank_multiline_without_trailing_newline.json b/crates/vim/test_data/test_yank_multiline_without_trailing_newline.json new file mode 100644 index 0000000000000000000000000000000000000000..ec38e81f2ed29c2b1f6ac796d06b466a88817a8b --- /dev/null +++ b/crates/vim/test_data/test_yank_multiline_without_trailing_newline.json @@ -0,0 +1,6 @@ +{"Put":{"state":"heˇllo\nhello"}} +{"Key":"2"} +{"Key":"y"} +{"Key":"y"} +{"Key":"p"} +{"Get":{"state":"hello\nˇhello\nhello\nhello","mode":"Normal"}} From 5315d38cf4038d6e7f4e9fa8b0646302377b6256 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Thu, 6 Feb 2025 19:01:32 -0500 Subject: [PATCH 096/130] Update extension extraction docs (#24079) - Fixed a regex for finding tags. - Templatize the instructions with `$LANGNAME` to prevent manual errors from failing to edit commands (this bit me) - Ran formatting through Prettier --- docs/src/extensions/developing-extensions.md | 6 ++ extensions/EXTRACTION.md | 90 +++++++++++++++++--- 2 files changed, 84 insertions(+), 12 deletions(-) diff --git a/docs/src/extensions/developing-extensions.md b/docs/src/extensions/developing-extensions.md index d1009c8ed97c8219f0fbdc32ab48f81dee6e0afe..29906a7ae43c7a4a664da7b16e864225677a61b8 100644 --- a/docs/src/extensions/developing-extensions.md +++ b/docs/src/extensions/developing-extensions.md @@ -95,6 +95,12 @@ To publish an extension, open a PR to [the `zed-industries/extensions` repo](htt In your PR, do the following: 1. Add your extension as a Git submodule within the `extensions/` directory + +```sh +git submodule add https://github.com/your-username/foobar-zed.git extensions/foobar +git add extensions/foobar +``` + 2. Add a new entry to the top-level `extensions.toml` file containing your extension: ```toml diff --git a/extensions/EXTRACTION.md b/extensions/EXTRACTION.md index fe112e5b41d0fafbf0ca8d069df1dee44d8300dc..e5ff27bb680d91002d77c71957b83aa31ddf767b 100644 --- a/extensions/EXTRACTION.md +++ b/extensions/EXTRACTION.md @@ -1,6 +1,6 @@ # Extracting an extension to dedicated repo -These are some notes of how to extract an extension from the main zed repository and generate a new repository which preserves the history as best as possible. In the this example we will be extracting the `ruby` extension, substitute as appropriate. +These are some notes of how to extract an extension from the main zed repository and generate a new repository which preserves the history as best as possible. In the this example we will be extracting the `ruby` extension, substitute as appropriate. ## Pre-requisites @@ -23,7 +23,7 @@ regex:(?zed-industries/zed\1 ``` This file takes the form of `patern==>replacement`, where the replacement is optional. -Note whitespace matters so `ruby: ==>` is removing the `ruby:` prefix from a commit messages and adding a space after `==> ` means the replacement begins with a space. Regex capture groups are numbered `\1`, `\2`, etc. +Note whitespace matters so `ruby: ==>` is removing the `ruby:` prefix from a commit messages and adding a space after `==> ` means the replacement begins with a space. Regex capture groups are numbered `\1`, `\2`, etc. See: [Git Filter Repo Docs](https://htmlpreview.github.io/?https://github.com/newren/git-filter-repo/blob/docs/html/git-filter-repo.html) for more. @@ -34,16 +34,17 @@ See: [Git Filter Repo Docs](https://htmlpreview.github.io/?https://github.com/ne > `setopt interactive_comments && echo "setopt interactive_comments" >> ~/.zshrc` ```sh -rm -rf zed3 -git clone --single-branch --no-tags git@github.com:zed-industries/zed.git zed3 -cd zed3 +LANGNAME=ruby +rm -rf $LANGNAME +git clone --single-branch --no-tags git@github.com:zed-industries/zed.git $LANGNAME +cd $LANGNAME # This removes the LICENSE symlink -git filter-repo --invert-paths --path extensions/ruby/LICENSE-APACHE +git filter-repo --invert-paths --path extensions/$LANGNAME/LICENSE-APACHE git filter-repo \ --use-mailmap \ - --subdirectory-filter extensions/ruby/ \ + --subdirectory-filter extensions/$LANGNAME/ \ --path LICENSE-APACHE \ --replace-message ~/projects/expressions.txt ``` @@ -65,10 +66,11 @@ You can always add tags later, but it's a nice touch. Show you all commits that mention a version number: ```sh -git log --grep="(\d+\.\d+\.\d+\.)" --perl-regexp --oneline --reverse +git log --grep="(\d+\.\d+\.\d+)" --perl-regexp --oneline --reverse ``` Then just: + ``` git tag v0.0.2 abcd1234 git tag v0.0.3 deadbeef @@ -76,13 +78,77 @@ git tag v0.0.3 deadbeef Usually the initial extraction didn't mention a version number so you can just do that one manually. -4. Push to the new repo +4. [Optional] Add a README.md and commit. + +5. Push to the new repo -Create a new empty repo on github under the [zed-extensions](https://github.com/zed-extensions) organization. +Create a new empty repo on github under the [zed-extensions](https://github.com/organizations/zed-extensions/repositories/new) organization. ``` -git remote add origin git@github.com:zed-extensions/ruby +git remote add origin git@github.com:zed-extensions/$LANGNAME git push origin main --tags +git branch --set-upstream-to=origin/main main +``` + +6. Setup the new repository: + +- Go to the repository settings: + - Disable Wikis + - Uncheck "Allow Merge Commits" + - Check "Allow Squash Merging" + - Default commit message: "Pull request title and description" + +7. Publish a new version of the extension. + +``` +OLD_VERSION=$(grep '^version = ' extension.toml | cut -d'"' -f2) +NEW_VERSION=$(echo "$OLD_VERSION" | awk -F. '{$NF = $NF + 1;} 1' OFS=.) +echo $OLD_VERSION $NEW_VERSION +perl -i -pe "s/$OLD_VERSION/$NEW_VERSION/" extension.toml + +# if there's rust code, update this too. +test -f Cargo.toml && perl -i -pe "s/$OLD_VERSION/$NEW_VERSION/" cargo.toml +test -f Cargo.toml && cargo check + +# commit and push +git add -u +git checkout -b "bump_${NEW_VERSION}" +git commit -m "Bump to v${NEW_VERSION}" +git push +gh pr create --title "Bump to v${NEW_VERSION}" --web + +# merge PR in web interface +git checkout main +git pull +git tag v${NEW_VERSION} +git push origin v${NEW_VERSION} +``` + +7. In zed repository, `rm -rf extension/langname` and push a PR. + +8. Update extensions repository: + +```sh +cd ../extensions +git checkout main +git pull +git submodule init +git submodule update +git status + +git checkout -b ${LANGNAME}_v${NEW_VERSION} +git submodule add https://github.com/zed-extensions/${LANGNAME}.git extensions/${LANGNAME} +pnpm sort-extensions + +# edit extensions.toml: +# - bump version +# - change `submodule` from `extensions/zed` to new path +# - remove `path` line all together + +git add extensions.toml .gitmodules extensions/${LANGNAME} +git diff --cached +git commit -m "Bump ${LANGNAME} to v${NEW_VERSION}" +git push ``` -5. [Optional] +Create PR and reference the Zed PR with removal from tree. \ No newline at end of file From 59bba2a98c2e38a34caff5acb20f4a3fb43a4708 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Thu, 6 Feb 2025 19:14:47 -0500 Subject: [PATCH 097/130] Assign base text language earlier to fix missing highlighting in deletion hunks (#24413) Release Notes: - Fixed deletion diff hunks not being syntax highlighted in some cases Co-authored-by: Max --- crates/project/src/buffer_store.rs | 73 +++++++++++++++++------------ crates/project/src/project_tests.rs | 15 +++++- 2 files changed, 56 insertions(+), 32 deletions(-) diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 57702120d9f9ac12f09b8bbbba4a2e63b0b8e9e4..eed41f1f6b0e0167cfc246dfd617c4f533c58a85 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -74,7 +74,6 @@ struct BufferDiffState { language: Option>, language_registry: Option>, diff_updated_futures: Vec>, - buffer_subscription: Option, head_text: Option>, index_text: Option>, @@ -1447,13 +1446,14 @@ impl BufferStore { this: WeakEntity, kind: DiffKind, texts: Result, - buffer: Entity, + buffer_entity: Entity, mut cx: AsyncApp, ) -> Result> { let diff_bases_change = match texts { Err(e) => { this.update(&mut cx, |this, cx| { - let buffer_id = buffer.read(cx).remote_id(); + let buffer = buffer_entity.read(cx); + let buffer_id = buffer.remote_id(); this.loading_diffs.remove(&(buffer_id, kind)); })?; return Err(e); @@ -1462,26 +1462,23 @@ impl BufferStore { }; this.update(&mut cx, |this, cx| { - let buffer_id = buffer.read(cx).remote_id(); + let buffer = buffer_entity.read(cx); + let buffer_id = buffer.remote_id(); + let language = buffer.language().cloned(); + let language_registry = buffer.language_registry(); + let text_snapshot = buffer.text_snapshot(); this.loading_diffs.remove(&(buffer_id, kind)); if let Some(OpenBuffer::Complete { diff_state, .. }) = - this.opened_buffers.get_mut(&buffer.read(cx).remote_id()) + this.opened_buffers.get_mut(&buffer_id) { diff_state.update(cx, |diff_state, cx| { - let buffer_id = buffer.read(cx).remote_id(); - diff_state.buffer_subscription.get_or_insert_with(|| { - cx.subscribe(&buffer, |this, buffer, event, cx| match event { - BufferEvent::LanguageChanged => { - this.buffer_language_changed(buffer, cx) - } - _ => {} - }) - }); + diff_state.language = language; + diff_state.language_registry = language_registry; - let diff = cx.new(|cx| BufferDiff { + let diff = cx.new(|_| BufferDiff { buffer_id, - snapshot: BufferDiffSnapshot::new(&buffer.read(cx).text_snapshot()), + snapshot: BufferDiffSnapshot::new(&text_snapshot), unstaged_diff: None, }); match kind { @@ -1490,11 +1487,9 @@ impl BufferStore { let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() { diff } else { - let unstaged_diff = cx.new(|cx| BufferDiff { + let unstaged_diff = cx.new(|_| BufferDiff { buffer_id, - snapshot: BufferDiffSnapshot::new( - &buffer.read(cx).text_snapshot(), - ), + snapshot: BufferDiffSnapshot::new(&text_snapshot), unstaged_diff: None, }); diff_state.unstaged_diff = Some(unstaged_diff.downgrade()); @@ -1508,8 +1503,7 @@ impl BufferStore { } }; - let buffer = buffer.read(cx).text_snapshot(); - let rx = diff_state.diff_bases_changed(buffer, diff_bases_change, cx); + let rx = diff_state.diff_bases_changed(text_snapshot, diff_bases_change, cx); Ok(async move { rx.await.ok(); @@ -1721,16 +1715,23 @@ impl BufferStore { } } - fn add_buffer(&mut self, buffer: Entity, cx: &mut Context) -> Result<()> { - let remote_id = buffer.read(cx).remote_id(); - let is_remote = buffer.read(cx).replica_id() != 0; + fn add_buffer(&mut self, buffer_entity: Entity, cx: &mut Context) -> Result<()> { + let buffer = buffer_entity.read(cx); + let language = buffer.language().cloned(); + let language_registry = buffer.language_registry(); + let remote_id = buffer.remote_id(); + let is_remote = buffer.replica_id() != 0; let open_buffer = OpenBuffer::Complete { - buffer: buffer.downgrade(), - diff_state: cx.new(|_| BufferDiffState::default()), + buffer: buffer_entity.downgrade(), + diff_state: cx.new(|_| BufferDiffState { + language, + language_registry, + ..Default::default() + }), }; let handle = cx.entity().downgrade(); - buffer.update(cx, move |_, cx| { + buffer_entity.update(cx, move |_, cx| { cx.on_release(move |buffer, cx| { handle .update(cx, |_, cx| { @@ -1747,7 +1748,7 @@ impl BufferStore { } hash_map::Entry::Occupied(mut entry) => { if let OpenBuffer::Operations(operations) = entry.get_mut() { - buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx)); + buffer_entity.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx)); } else if entry.get().upgrade().is_some() { if is_remote { return Ok(()); @@ -1760,8 +1761,8 @@ impl BufferStore { } } - cx.subscribe(&buffer, Self::on_buffer_event).detach(); - cx.emit(BufferStoreEvent::BufferAdded(buffer)); + cx.subscribe(&buffer_entity, Self::on_buffer_event).detach(); + cx.emit(BufferStoreEvent::BufferAdded(buffer_entity)); Ok(()) } @@ -1982,6 +1983,16 @@ impl BufferStore { }) .log_err(); } + BufferEvent::LanguageChanged => { + let buffer_id = buffer.read(cx).remote_id(); + if let Some(OpenBuffer::Complete { diff_state, .. }) = + self.opened_buffers.get(&buffer_id) + { + diff_state.update(cx, |diff_state, cx| { + diff_state.buffer_language_changed(buffer, cx); + }); + } + } _ => {} } } diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 9d93a68783b5d869f1f82c460e22e3bc19ae982f..32fff6f1aa1520a9bcd0fa43687fafb6eb63be99 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -5776,6 +5776,9 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) { ); let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + let language = rust_lang(); + language_registry.add(language.clone()); let buffer = project .update(cx, |project, cx| { @@ -5790,13 +5793,23 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) { .await .unwrap(); + uncommitted_diff.read_with(cx, |diff, _| { + assert_eq!( + diff.snapshot + .base_text + .as_ref() + .and_then(|base| base.language().cloned()), + Some(language) + ) + }); + cx.run_until_parked(); uncommitted_diff.update(cx, |uncommitted_diff, cx| { let snapshot = buffer.read(cx).snapshot(); assert_hunks( uncommitted_diff.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), &snapshot, - &uncommitted_diff.snapshot.base_text.as_ref().unwrap().text(), + &uncommitted_diff.base_text_string().unwrap(), &[ (0..1, "", "// print goodbye\n"), ( From a190f42ccc23323d723f5c44cfee8984527a0d9a Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Thu, 6 Feb 2025 19:33:41 -0500 Subject: [PATCH 098/130] Fix double-lease panic in Repository::activate (#24414) Release Notes: - N/A Co-authored-by: maxbrunsfeld --- crates/project/src/git.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/project/src/git.rs b/crates/project/src/git.rs index fad10f1ba4c5131beeaf03864ae90e0099af7b70..2c24a63079d9e2e4b19b37f33da335b0c634efdd 100644 --- a/crates/project/src/git.rs +++ b/crates/project/src/git.rs @@ -314,16 +314,16 @@ impl Repository { .unwrap_or("".into()) } - pub fn activate(&self, cx: &mut App) { + pub fn activate(&self, cx: &mut Context) { let Some(git_state) = self.git_state.upgrade() else { return; }; + let entity = cx.entity(); git_state.update(cx, |git_state, cx| { - let Some((index, _)) = git_state + let Some(index) = git_state .repositories .iter() - .enumerate() - .find(|(_, handle)| handle.read(cx).id() == self.id()) + .position(|handle| *handle == entity) else { return; }; From a1ed1a00b39d55ded92c88c67694aa2fc1035ddc Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Thu, 6 Feb 2025 18:43:30 -0600 Subject: [PATCH 099/130] Fix issue with Vim test instead of cheating (#24411) Appears this test was failing, and someone edited the expected test output instead of fixing it. Well no longer! Release Notes: - N/A Co-authored-by: Conrad --- crates/vim/src/command.rs | 3 ++- crates/vim/test_data/test_command_replace.json | 8 +++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index bc1b4ba2017da6561404c32a0f3d1e183c32aff7..f73734d0e2fd4348be0ce4560b7d27fa75ff36cb 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -1517,7 +1517,8 @@ mod test { dd dd ˇcc"}); - cx.simulate_shared_keystrokes("k : s / dd / ee enter").await; + cx.simulate_shared_keystrokes("k : s / d d / e e enter") + .await; cx.shared_state().await.assert_eq(indoc! {" aa dd diff --git a/crates/vim/test_data/test_command_replace.json b/crates/vim/test_data/test_command_replace.json index 698ef2a3bff1620cb467c25906dc0357d5bea6df..d14a8a78ce688c61c4693c8fef505d2a06de5a4a 100644 --- a/crates/vim/test_data/test_command_replace.json +++ b/crates/vim/test_data/test_command_replace.json @@ -24,8 +24,10 @@ {"Key":":"} {"Key":"s"} {"Key":"/"} -{"Key":"dd"} +{"Key":"d"} +{"Key":"d"} {"Key":"/"} -{"Key":"ee"} +{"Key":"e"} +{"Key":"e"} {"Key":"enter"} -{"Get":{"state":"aa\ndd\nˇee\ncc", "mode":"Normal"}} \ No newline at end of file +{"Get":{"state":"aa\ndd\nˇee\ncc","mode":"Normal"}} From d6d0d7d3e4cdb20c678a52df431c38519ed663a0 Mon Sep 17 00:00:00 2001 From: Caleb! <48127194+kaf-lamed-beyt@users.noreply.github.com> Date: Fri, 7 Feb 2025 01:56:34 +0100 Subject: [PATCH 100/130] Add image dimension and file size information (#21675) Closes https://github.com/zed-industries/zed/issues/21281 @jansol, kindly take a look when you're free. ![image](https://github.com/user-attachments/assets/da9a54fa-6284-4012-a243-7e355a5290d3) Release Notes: - Added dimensions and file size information for images. --------- Co-authored-by: tims <0xtimsb@gmail.com> Co-authored-by: Marshall Bowers --- Cargo.lock | 2 + assets/settings/default.json | 7 + crates/image_viewer/Cargo.toml | 4 +- crates/image_viewer/src/image_info.rs | 124 ++++++++++++++++++ crates/image_viewer/src/image_viewer.rs | 16 ++- .../image_viewer/src/image_viewer_settings.rs | 42 ++++++ crates/project/src/image_store.rs | 102 +++++++++++++- crates/project/src/project.rs | 19 ++- crates/settings/src/settings_store.rs | 2 +- crates/zed/src/zed.rs | 4 + 10 files changed, 312 insertions(+), 10 deletions(-) create mode 100644 crates/image_viewer/src/image_info.rs create mode 100644 crates/image_viewer/src/image_viewer_settings.rs diff --git a/Cargo.lock b/Cargo.lock index 96a8df761bb8cab563cf66f1acfc36e2b2280f60..205ffc66937390a9d3cc9e182dea49f9377c0125 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6363,6 +6363,8 @@ dependencies = [ "file_icons", "gpui", "project", + "schemars", + "serde", "settings", "theme", "ui", diff --git a/assets/settings/default.json b/assets/settings/default.json index f67f2fa965ef4fdb5b54ab9d7ce63227e8189766..ca5247d19bc43103d4ad411f37479000d2c3cc27 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -93,6 +93,13 @@ // workspace when the centered layout is used. "right_padding": 0.2 }, + // All settings related to the image viewer. + "image_viewer": { + // The unit for image file sizes. + // By default we're setting it to binary. + // The second option is decimal. + "unit": "binary" + }, // The key to use for adding multiple cursors // Currently "alt" or "cmd_or_ctrl" (also aliased as // "cmd" and "ctrl") are supported. diff --git a/crates/image_viewer/Cargo.toml b/crates/image_viewer/Cargo.toml index 7e97775a5ad243f30b4ff6cbea510b3d7e150dbb..5648006042b61394fc7e3ee1c2a87b882b46817a 100644 --- a/crates/image_viewer/Cargo.toml +++ b/crates/image_viewer/Cargo.toml @@ -13,7 +13,7 @@ path = "src/image_viewer.rs" doctest = false [features] -test-support = ["gpui/test-support"] +test-support = ["gpui/test-support", "editor/test-support"] [dependencies] anyhow.workspace = true @@ -22,6 +22,8 @@ editor.workspace = true file_icons.workspace = true gpui.workspace = true project.workspace = true +schemars.workspace = true +serde.workspace = true settings.workspace = true theme.workspace = true ui.workspace = true diff --git a/crates/image_viewer/src/image_info.rs b/crates/image_viewer/src/image_info.rs new file mode 100644 index 0000000000000000000000000000000000000000..dfee77a22ff638f93302689142352919e9a09f1b --- /dev/null +++ b/crates/image_viewer/src/image_info.rs @@ -0,0 +1,124 @@ +use gpui::{div, Context, Entity, IntoElement, ParentElement, Render, Subscription}; +use project::image_store::{ImageFormat, ImageMetadata}; +use settings::Settings; +use ui::prelude::*; +use workspace::{ItemHandle, StatusItemView, Workspace}; + +use crate::{ImageFileSizeUnit, ImageView, ImageViewerSettings}; + +pub struct ImageInfo { + metadata: Option, + _observe_active_image: Option, + observe_image_item: Option, +} + +impl ImageInfo { + pub fn new(_workspace: &Workspace) -> Self { + Self { + metadata: None, + _observe_active_image: None, + observe_image_item: None, + } + } + + fn update_metadata(&mut self, image_view: &Entity, cx: &mut Context) { + let image_item = image_view.read(cx).image_item.clone(); + let current_metadata = image_item.read(cx).image_metadata; + if current_metadata.is_some() { + self.metadata = current_metadata; + cx.notify(); + } else { + self.observe_image_item = Some(cx.observe(&image_item, |this, item, cx| { + this.metadata = item.read(cx).image_metadata; + cx.notify(); + })); + } + } +} + +fn format_file_size(size: u64, image_unit_type: ImageFileSizeUnit) -> String { + match image_unit_type { + ImageFileSizeUnit::Binary => { + if size < 1024 { + format!("{size}B") + } else if size < 1024 * 1024 { + format!("{:.1}KiB", size as f64 / 1024.0) + } else { + format!("{:.1}MiB", size as f64 / (1024.0 * 1024.0)) + } + } + ImageFileSizeUnit::Decimal => { + if size < 1000 { + format!("{size}B") + } else if size < 1000 * 1000 { + format!("{:.1}KB", size as f64 / 1000.0) + } else { + format!("{:.1}MB", size as f64 / (1000.0 * 1000.0)) + } + } + } +} + +impl Render for ImageInfo { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + let settings = ImageViewerSettings::get_global(cx); + + let Some(metadata) = self.metadata.as_ref() else { + return div(); + }; + + let mut components = Vec::new(); + components.push(format!("{}x{}", metadata.width, metadata.height)); + components.push(format_file_size(metadata.file_size, settings.unit)); + + if let Some(colors) = metadata.colors { + components.push(format!( + "{} channels, {} bits per pixel", + colors.channels, + colors.bits_per_pixel() + )); + } + + components.push( + match metadata.format { + ImageFormat::Png => "PNG", + ImageFormat::Jpeg => "JPEG", + ImageFormat::Gif => "GIF", + ImageFormat::WebP => "WebP", + ImageFormat::Tiff => "TIFF", + ImageFormat::Bmp => "BMP", + ImageFormat::Ico => "ICO", + ImageFormat::Avif => "Avif", + _ => "Unknown", + } + .to_string(), + ); + + div().child( + Button::new("image-metadata", components.join(" • ")).label_size(LabelSize::Small), + ) + } +} + +impl StatusItemView for ImageInfo { + fn set_active_pane_item( + &mut self, + active_pane_item: Option<&dyn ItemHandle>, + _window: &mut Window, + cx: &mut Context, + ) { + self._observe_active_image = None; + self.observe_image_item = None; + + if let Some(image_view) = active_pane_item.and_then(|item| item.act_as::(cx)) { + self.update_metadata(&image_view, cx); + + self._observe_active_image = Some(cx.observe(&image_view, |this, view, cx| { + this.update_metadata(&view, cx); + })); + } else { + self.metadata = None; + } + cx.notify(); + } +} diff --git a/crates/image_viewer/src/image_viewer.rs b/crates/image_viewer/src/image_viewer.rs index c57c24e31942bf810ea15dad1871d60dda380fc3..178200933411263c3df4540b296520edb4816f83 100644 --- a/crates/image_viewer/src/image_viewer.rs +++ b/crates/image_viewer/src/image_viewer.rs @@ -1,3 +1,6 @@ +mod image_info; +mod image_viewer_settings; + use std::path::PathBuf; use anyhow::Context as _; @@ -19,7 +22,8 @@ use workspace::{ ItemId, ItemSettings, ToolbarItemLocation, Workspace, WorkspaceId, }; -const IMAGE_VIEWER_KIND: &str = "ImageView"; +pub use crate::image_info::*; +pub use crate::image_viewer_settings::*; pub struct ImageView { image_item: Entity, @@ -31,7 +35,6 @@ impl ImageView { pub fn new( image_item: Entity, project: Entity, - cx: &mut Context, ) -> Self { cx.subscribe(&image_item, Self::on_image_event).detach(); @@ -49,7 +52,9 @@ impl ImageView { cx: &mut Context, ) { match event { - ImageItemEvent::FileHandleChanged | ImageItemEvent::Reloaded => { + ImageItemEvent::MetadataUpdated + | ImageItemEvent::FileHandleChanged + | ImageItemEvent::Reloaded => { cx.emit(ImageViewEvent::TitleChanged); cx.notify(); } @@ -188,7 +193,7 @@ fn breadcrumbs_text_for_image(project: &Project, image: &ImageItem, cx: &App) -> impl SerializableItem for ImageView { fn serialized_item_kind() -> &'static str { - IMAGE_VIEWER_KIND + "ImageView" } fn deserialize( @@ -357,8 +362,9 @@ impl ProjectItem for ImageView { } pub fn init(cx: &mut App) { + ImageViewerSettings::register(cx); workspace::register_project_item::(cx); - workspace::register_serializable_item::(cx) + workspace::register_serializable_item::(cx); } mod persistence { diff --git a/crates/image_viewer/src/image_viewer_settings.rs b/crates/image_viewer/src/image_viewer_settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..165e3c4a44b0e76a96c19503640e3d6e2d4d853a --- /dev/null +++ b/crates/image_viewer/src/image_viewer_settings.rs @@ -0,0 +1,42 @@ +use gpui::App; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use settings::{Settings, SettingsSources}; + +/// The settings for the image viewer. +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Default)] +pub struct ImageViewerSettings { + /// The unit to use for displaying image file sizes. + /// + /// Default: "binary" + #[serde(default)] + pub unit: ImageFileSizeUnit, +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, Default)] +#[serde(rename_all = "snake_case")] +pub enum ImageFileSizeUnit { + /// Displays file size in binary units (e.g., KiB, MiB). + #[default] + Binary, + /// Displays file size in decimal units (e.g., KB, MB). + Decimal, +} + +impl Settings for ImageViewerSettings { + const KEY: Option<&'static str> = Some("image_viewer"); + + type FileContent = Self; + + fn load( + sources: SettingsSources, + _: &mut App, + ) -> Result { + SettingsSources::::json_merge_with( + [sources.default] + .into_iter() + .chain(sources.user) + .chain(sources.server), + ) + } +} diff --git a/crates/project/src/image_store.rs b/crates/project/src/image_store.rs index 84f6c7cd94f28c85ea2e9c67124f18502dfb40ac..4aa42e57ddac4b5118b30a2b363c9fdf927a593d 100644 --- a/crates/project/src/image_store.rs +++ b/crates/project/src/image_store.rs @@ -2,12 +2,15 @@ use crate::{ worktree_store::{WorktreeStore, WorktreeStoreEvent}, Project, ProjectEntryId, ProjectItem, ProjectPath, }; -use anyhow::{Context as _, Result}; +use anyhow::{anyhow, Context as _, Result}; use collections::{hash_map, HashMap, HashSet}; use futures::{channel::oneshot, StreamExt}; use gpui::{ - hash, prelude::*, App, Context, Entity, EventEmitter, Img, Subscription, Task, WeakEntity, + hash, prelude::*, App, AsyncApp, Context, Entity, EventEmitter, Img, Subscription, Task, + WeakEntity, }; +pub use image::ImageFormat; +use image::{ExtendedColorType, GenericImageView, ImageReader}; use language::{DiskState, File}; use rpc::{AnyProtoClient, ErrorExt as _}; use std::ffi::OsStr; @@ -32,10 +35,12 @@ impl From for ImageId { } } +#[derive(Debug)] pub enum ImageItemEvent { ReloadNeeded, Reloaded, FileHandleChanged, + MetadataUpdated, } impl EventEmitter for ImageItem {} @@ -46,14 +51,106 @@ pub enum ImageStoreEvent { impl EventEmitter for ImageStore {} +#[derive(Debug, Clone, Copy)] +pub struct ImageMetadata { + pub width: u32, + pub height: u32, + pub file_size: u64, + pub colors: Option, + pub format: ImageFormat, +} + +#[derive(Debug, Clone, Copy)] +pub struct ImageColorInfo { + pub channels: u8, + pub bits_per_channel: u8, +} + +impl ImageColorInfo { + pub fn from_color_type(color_type: impl Into) -> Option { + let (channels, bits_per_channel) = match color_type.into() { + ExtendedColorType::L8 => (1, 8), + ExtendedColorType::L16 => (1, 16), + ExtendedColorType::La8 => (2, 8), + ExtendedColorType::La16 => (2, 16), + ExtendedColorType::Rgb8 => (3, 8), + ExtendedColorType::Rgb16 => (3, 16), + ExtendedColorType::Rgba8 => (4, 8), + ExtendedColorType::Rgba16 => (4, 16), + ExtendedColorType::A8 => (1, 8), + ExtendedColorType::Bgr8 => (3, 8), + ExtendedColorType::Bgra8 => (4, 8), + ExtendedColorType::Cmyk8 => (4, 8), + _ => return None, + }; + + Some(Self { + channels, + bits_per_channel, + }) + } + + pub const fn bits_per_pixel(&self) -> u8 { + self.channels * self.bits_per_channel + } +} + pub struct ImageItem { pub id: ImageId, pub file: Arc, pub image: Arc, reload_task: Option>, + pub image_metadata: Option, } impl ImageItem { + pub async fn load_image_metadata( + image: Entity, + project: Entity, + cx: &mut AsyncApp, + ) -> Result { + let (fs, image_path) = cx.update(|cx| { + let project_path = image.read(cx).project_path(cx); + + let worktree = project + .read(cx) + .worktree_for_id(project_path.worktree_id, cx) + .ok_or_else(|| anyhow!("worktree not found"))?; + let worktree_root = worktree.read(cx).abs_path(); + let image_path = image.read(cx).path(); + let image_path = if image_path.is_absolute() { + image_path.to_path_buf() + } else { + worktree_root.join(image_path) + }; + + let fs = project.read(cx).fs().clone(); + + anyhow::Ok((fs, image_path)) + })??; + + let image_bytes = fs.load_bytes(&image_path).await?; + let image_format = image::guess_format(&image_bytes)?; + + let mut image_reader = ImageReader::new(std::io::Cursor::new(image_bytes)); + image_reader.set_format(image_format); + let image = image_reader.decode()?; + + let (width, height) = image.dimensions(); + let file_metadata = fs + .metadata(image_path.as_path()) + .await? + .ok_or_else(|| anyhow!("failed to load image metadata"))?; + + Ok(ImageMetadata { + width, + height, + file_size: file_metadata.len, + format: image_format, + colors: ImageColorInfo::from_color_type(image.color()), + }) + } + pub fn project_path(&self, cx: &App) -> ProjectPath { ProjectPath { worktree_id: self.file.worktree_id(cx), @@ -391,6 +488,7 @@ impl ImageStoreImpl for Entity { id: cx.entity_id().as_non_zero_u64().into(), file: file.clone(), image, + image_metadata: None, reload_task: None, })?; diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 7f2f52aee63a2cc701150230693b60a91e9d2cdb..9d670291b6d7d29649a249b09b83193fb6237115 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -2075,8 +2075,25 @@ impl Project { return Task::ready(Err(anyhow!(ErrorCode::Disconnected))); } - self.image_store.update(cx, |image_store, cx| { + let open_image_task = self.image_store.update(cx, |image_store, cx| { image_store.open_image(path.into(), cx) + }); + + let weak_project = cx.entity().downgrade(); + cx.spawn(move |_, mut cx| async move { + let image_item = open_image_task.await?; + let project = weak_project + .upgrade() + .ok_or_else(|| anyhow!("Project dropped"))?; + + let metadata = + ImageItem::load_image_metadata(image_item.clone(), project, &mut cx).await?; + image_item.update(&mut cx, |image_item, cx| { + image_item.image_metadata = Some(metadata); + cx.emit(ImageItemEvent::MetadataUpdated); + })?; + + Ok(image_item) }) } diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index fa86f0d50ee6ec0e4ce24fcf84dd0fea144d242b..b0ecd056ccbae3915791ff3b2661b8d7f25122b9 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -6,7 +6,7 @@ use futures::{channel::mpsc, future::LocalBoxFuture, FutureExt, StreamExt}; use gpui::{App, AsyncApp, BorrowAppContext, Global, Task, UpdateGlobal}; use paths::{local_settings_file_relative_path, EDITORCONFIG_NAME}; use schemars::{gen::SchemaGenerator, schema::RootSchema, JsonSchema}; -use serde::{de::DeserializeOwned, Deserialize as _, Serialize}; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; use smallvec::SmallVec; use std::{ any::{type_name, Any, TypeId}, diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 3b20a19fb4e17e86e9fe8f2054ca05e25bfa2f9a..3aa578de48615c77dce725df9b3f609f3c6d3678 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -26,6 +26,7 @@ use gpui::{ Entity, Focusable, KeyBinding, MenuItem, ParentElement, PathPromptOptions, PromptLevel, ReadGlobal, SharedString, Styled, Task, TitlebarOptions, Window, WindowKind, WindowOptions, }; +use image_viewer::ImageInfo; pub use open_listener::*; use outline_panel::OutlinePanel; use paths::{local_settings_file_relative_path, local_tasks_file_relative_path}; @@ -201,6 +202,7 @@ pub fn initialize_workspace( let active_toolchain_language = cx.new(|cx| toolchain_selector::ActiveToolchain::new(workspace, window, cx)); let vim_mode_indicator = cx.new(|cx| vim::ModeIndicator::new(window, cx)); + let image_info = cx.new(|_cx| ImageInfo::new(workspace)); let cursor_position = cx.new(|_| go_to_line::cursor_position::CursorPosition::new(workspace)); workspace.status_bar().update(cx, |status_bar, cx| { @@ -211,6 +213,7 @@ pub fn initialize_workspace( status_bar.add_right_item(active_toolchain_language, window, cx); status_bar.add_right_item(vim_mode_indicator, window, cx); status_bar.add_right_item(cursor_position, window, cx); + status_bar.add_right_item(image_info, window, cx); }); let handle = cx.entity().downgrade(); @@ -4053,6 +4056,7 @@ mod tests { app_state.client.http_client().clone(), cx, ); + image_viewer::init(cx); language_model::init(cx); language_models::init( app_state.user_store.clone(), From 888a2df3f0786f34e30ca86ff76c13cfd639b1d2 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 6 Feb 2025 21:14:57 -0500 Subject: [PATCH 101/130] Sort `Cargo.toml`s (#24417) This PR sorts the dependencies in a number of `Cargo.toml` files. Release Notes: - N/A --- crates/collab/Cargo.toml | 2 +- crates/diff/Cargo.toml | 12 ++++++------ crates/git/Cargo.toml | 12 ++++++------ crates/language_model/Cargo.toml | 4 ++-- crates/language_models/Cargo.toml | 4 ++-- crates/languages/Cargo.toml | 2 +- 6 files changed, 18 insertions(+), 18 deletions(-) diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index 7d61621c955a9c06d7f68b2e45988c7db5b5faa5..8f133affcbd9e1f56e735c235afb08c486b6484d 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -131,7 +131,7 @@ worktree = { workspace = true, features = ["test-support"] } livekit_client_macos = { workspace = true, features = ["test-support"] } [target.'cfg(not(target_os = "macos"))'.dev-dependencies] -livekit_client = {workspace = true, features = ["test-support"] } +livekit_client = { workspace = true, features = ["test-support"] } [package.metadata.cargo-machete] ignored = ["async-stripe"] diff --git a/crates/diff/Cargo.toml b/crates/diff/Cargo.toml index 6641fdf1cb67ff579eec4cf62ee3b2e0b902119a..7a4186f6e5aaf29dfa58a0b39b765c5f57dcd796 100644 --- a/crates/diff/Cargo.toml +++ b/crates/diff/Cargo.toml @@ -11,6 +11,9 @@ workspace = true [lib] path = "src/diff.rs" +[features] +test-support = [] + [dependencies] futures.workspace = true git2.workspace = true @@ -23,10 +26,7 @@ text.workspace = true util.workspace = true [dev-dependencies] -unindent.workspace = true -serde_json.workspace = true pretty_assertions.workspace = true -text = {workspace = true, features = ["test-support"]} - -[features] -test-support = [] +serde_json.workspace = true +text = { workspace = true, features = ["test-support"] } +unindent.workspace = true diff --git a/crates/git/Cargo.toml b/crates/git/Cargo.toml index 77b44ef23092617fc9c83b0f0654358e2f96e140..4eefe6c262fe59901cc020b70d084745e224691e 100644 --- a/crates/git/Cargo.toml +++ b/crates/git/Cargo.toml @@ -11,6 +11,9 @@ workspace = true [lib] path = "src/git.rs" +[features] +test-support = [] + [dependencies] anyhow.workspace = true async-trait.workspace = true @@ -32,10 +35,7 @@ url.workspace = true util.workspace = true [dev-dependencies] -unindent.workspace = true -serde_json.workspace = true pretty_assertions.workspace = true -text = {workspace = true, features = ["test-support"]} - -[features] -test-support = [] +serde_json.workspace = true +text = { workspace = true, features = ["test-support"] } +unindent.workspace = true diff --git a/crates/language_model/Cargo.toml b/crates/language_model/Cargo.toml index 74505b1780fd545fd3abde9d823bf2c278241212..8b4bc518f8c94e10937bb99e08c4018db5309356 100644 --- a/crates/language_model/Cargo.toml +++ b/crates/language_model/Cargo.toml @@ -20,16 +20,16 @@ anthropic = { workspace = true, features = ["schemars"] } anyhow.workspace = true base64.workspace = true collections.workspace = true +deepseek = { workspace = true, features = ["schemars"] } futures.workspace = true google_ai = { workspace = true, features = ["schemars"] } gpui.workspace = true http_client.workspace = true image.workspace = true +lmstudio = { workspace = true, features = ["schemars"] } log.workspace = true ollama = { workspace = true, features = ["schemars"] } open_ai = { workspace = true, features = ["schemars"] } -lmstudio = { workspace = true, features = ["schemars"] } -deepseek = { workspace = true, features = ["schemars"] } parking_lot.workspace = true proto.workspace = true schemars.workspace = true diff --git a/crates/language_models/Cargo.toml b/crates/language_models/Cargo.toml index 4d7590e40e031cdf6f45ad19d54c8a735ab73034..700ed739ace300ca22c782c5c8b73c574e917331 100644 --- a/crates/language_models/Cargo.toml +++ b/crates/language_models/Cargo.toml @@ -17,6 +17,7 @@ anyhow.workspace = true client.workspace = true collections.workspace = true copilot = { workspace = true, features = ["schemars"] } +deepseek = { workspace = true, features = ["schemars"] } editor.workspace = true feature_flags.workspace = true fs.workspace = true @@ -25,11 +26,10 @@ google_ai = { workspace = true, features = ["schemars"] } gpui.workspace = true http_client.workspace = true language_model.workspace = true +lmstudio = { workspace = true, features = ["schemars"] } menu.workspace = true ollama = { workspace = true, features = ["schemars"] } -lmstudio = { workspace = true, features = ["schemars"] } open_ai = { workspace = true, features = ["schemars"] } -deepseek = { workspace = true, features = ["schemars"] } project.workspace = true proto.workspace = true schemars.workspace = true diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index 99ee6997fda822b20d11c360cc3e6fbc9f0ba3a7..86ce3e5b1e41abd5eb1c8db834cb449697d84711 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -70,7 +70,7 @@ tree-sitter-c = { workspace = true, optional = true } tree-sitter-cpp = { workspace = true, optional = true } tree-sitter-css = { workspace = true, optional = true } tree-sitter-diff = { workspace = true, optional = true } -tree-sitter-gitcommit = {workspace = true, optional = true } +tree-sitter-gitcommit = { workspace = true, optional = true } tree-sitter-go = { workspace = true, optional = true } tree-sitter-go-mod = { workspace = true, optional = true } tree-sitter-gowork = { workspace = true, optional = true } From e689c8c01b2b04e076242e0748a4033fbe81730e Mon Sep 17 00:00:00 2001 From: Stanislav Alekseev <43210583+WeetHet@users.noreply.github.com> Date: Fri, 7 Feb 2025 04:37:50 +0200 Subject: [PATCH 102/130] markdown: Use parsed text (#24388) Fixes #15463 Release Notes: - Fixed display of symbols such as ` ` in hover popovers --- crates/editor/src/hover_popover.rs | 8 ++-- crates/markdown/examples/markdown_as_child.rs | 1 - crates/markdown/src/markdown.rs | 14 ++----- crates/markdown/src/parser.rs | 41 ++++++++++++++----- 4 files changed, 40 insertions(+), 24 deletions(-) diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index dd37c34afe65b1d66767f4bfb3d763b4d063b47a..128ee45341682e4dbf5a3f0561337d46c13c642d 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -598,7 +598,7 @@ async fn parse_blocks( }, syntax: cx.theme().syntax().clone(), selection_background_color: { cx.theme().players().local().selection }, - break_style: Default::default(), + heading: StyleRefinement::default() .font_weight(FontWeight::BOLD) .text_base() @@ -885,8 +885,10 @@ mod tests { let slice = data; for (range, event) in slice.iter() { - if [MarkdownEvent::Text, MarkdownEvent::Code].contains(event) { - rendered_text.push_str(&text[range.clone()]) + match event { + MarkdownEvent::Text(parsed) => rendered_text.push_str(parsed), + MarkdownEvent::Code => rendered_text.push_str(&text[range.clone()]), + _ => {} } } } diff --git a/crates/markdown/examples/markdown_as_child.rs b/crates/markdown/examples/markdown_as_child.rs index e2a919ae519e192fa7081fec8766ba60d6a97381..5aa543a4fcd8e220002963922920d9d3fa1b82a7 100644 --- a/crates/markdown/examples/markdown_as_child.rs +++ b/crates/markdown/examples/markdown_as_child.rs @@ -83,7 +83,6 @@ pub fn main() { selection.fade_out(0.7); selection }, - break_style: Default::default(), heading: Default::default(), }; let markdown = cx.new(|cx| { diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index cc525e4b596d892c38ec1c5279a2fa2f983bc38b..d6190c43dbed3a99091705218bc7f68993754c97 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -28,7 +28,6 @@ pub struct MarkdownStyle { pub block_quote_border_color: Hsla, pub syntax: Arc, pub selection_background_color: Hsla, - pub break_style: StyleRefinement, pub heading: StyleRefinement, } @@ -44,11 +43,11 @@ impl Default for MarkdownStyle { block_quote_border_color: Default::default(), syntax: Arc::new(SyntaxTheme::default()), selection_background_color: Default::default(), - break_style: Default::default(), heading: Default::default(), } } } + pub struct Markdown { source: String, selection: Selection, @@ -751,8 +750,8 @@ impl Element for MarkdownElement { } _ => log::error!("unsupported markdown tag end: {:?}", tag), }, - MarkdownEvent::Text => { - builder.push_text(&parsed_markdown.source[range.clone()], range.start); + MarkdownEvent::Text(parsed) => { + builder.push_text(parsed, range.start); } MarkdownEvent::Code => { builder.push_text_style(self.style.inline_code.clone()); @@ -777,12 +776,7 @@ impl Element for MarkdownElement { builder.pop_div() } MarkdownEvent::SoftBreak => builder.push_text(" ", range.start), - MarkdownEvent::HardBreak => { - let mut d = div().py_3(); - d.style().refine(&self.style.break_style); - builder.push_div(d, range, markdown_end); - builder.pop_div() - } + MarkdownEvent::HardBreak => builder.push_text("\n", range.start), _ => log::error!("unsupported markdown event {:?}", event), } } diff --git a/crates/markdown/src/parser.rs b/crates/markdown/src/parser.rs index b605a35af5ee6e8e8b3a4582b216f7eac90ff82b..9e69f3192e76c2134077e2c25503db7e4dbed341 100644 --- a/crates/markdown/src/parser.rs +++ b/crates/markdown/src/parser.rs @@ -37,9 +37,10 @@ pub fn parse_markdown(text: &str) -> Vec<(Range, MarkdownEvent)> { } events.push((range, MarkdownEvent::End(tag))); } - pulldown_cmark::Event::Text(_) => { + pulldown_cmark::Event::Text(parsed) => { // Automatically detect links in text if we're not already within a markdown // link. + let mut parsed = parsed.as_ref(); if !within_link { let mut finder = LinkFinder::new(); finder.kinds(&[linkify::LinkKind::Url]); @@ -49,7 +50,12 @@ pub fn parse_markdown(text: &str) -> Vec<(Range, MarkdownEvent)> { text_range.start + link.start()..text_range.start + link.end(); if link_range.start > range.start { - events.push((range.start..link_range.start, MarkdownEvent::Text)); + let (text, tail) = parsed.split_at(link_range.start - range.start); + events.push(( + range.start..link_range.start, + MarkdownEvent::Text(SharedString::new(text)), + )); + parsed = tail; } events.push(( @@ -61,15 +67,20 @@ pub fn parse_markdown(text: &str) -> Vec<(Range, MarkdownEvent)> { id: SharedString::default(), }), )); - events.push((link_range.clone(), MarkdownEvent::Text)); + + let (link_text, tail) = parsed.split_at(link_range.end - link_range.start); + events.push(( + link_range.clone(), + MarkdownEvent::Text(SharedString::new(link_text)), + )); events.push((link_range.clone(), MarkdownEvent::End(MarkdownTagEnd::Link))); range.start = link_range.end; + parsed = tail; } } - if range.start < range.end { - events.push((range, MarkdownEvent::Text)); + events.push((range, MarkdownEvent::Text(SharedString::new(parsed)))); } } pulldown_cmark::Event::Code(_) => { @@ -94,7 +105,7 @@ pub fn parse_markdown(text: &str) -> Vec<(Range, MarkdownEvent)> { events } -pub fn parse_links_only(text: &str) -> Vec<(Range, MarkdownEvent)> { +pub fn parse_links_only(mut text: &str) -> Vec<(Range, MarkdownEvent)> { let mut events = Vec::new(); let mut finder = LinkFinder::new(); finder.kinds(&[linkify::LinkKind::Url]); @@ -106,9 +117,15 @@ pub fn parse_links_only(text: &str) -> Vec<(Range, MarkdownEvent)> { let link_range = link.start()..link.end(); if link_range.start > text_range.start { - events.push((text_range.start..link_range.start, MarkdownEvent::Text)); + let (head, tail) = text.split_at(link_range.start - text_range.start); + events.push(( + text_range.start..link_range.start, + MarkdownEvent::Text(SharedString::new(head)), + )); + text = tail; } + let (link_text, tail) = text.split_at(link_range.end - link_range.start); events.push(( link_range.clone(), MarkdownEvent::Start(MarkdownTag::Link { @@ -118,14 +135,18 @@ pub fn parse_links_only(text: &str) -> Vec<(Range, MarkdownEvent)> { id: SharedString::default(), }), )); - events.push((link_range.clone(), MarkdownEvent::Text)); + events.push(( + link_range.clone(), + MarkdownEvent::Text(SharedString::new(link_text)), + )); events.push((link_range.clone(), MarkdownEvent::End(MarkdownTagEnd::Link))); text_range.start = link_range.end; + text = tail; } if text_range.end > text_range.start { - events.push((text_range, MarkdownEvent::Text)); + events.push((text_range, MarkdownEvent::Text(SharedString::new(text)))); } events @@ -142,7 +163,7 @@ pub enum MarkdownEvent { /// End of a tagged element. End(MarkdownTagEnd), /// A text node. - Text, + Text(SharedString), /// An inline code node. Code, /// An HTML node. From c5913899d9d776ecb3e5abe563b609216257f0f1 Mon Sep 17 00:00:00 2001 From: Jason Lee Date: Fri, 7 Feb 2025 10:51:00 +0800 Subject: [PATCH 103/130] gpui: Fix `text-align` with nowrap mode (#24116) Release Notes: - N/A ------ - Continue #24090 to fix text align for when used `whitespace_nowrap`. - Fix wrapped line length calculation. And add example ``` cargo run -p gpui --example text_layout ``` image --------- Co-authored-by: Owen Law --- crates/gpui/examples/text_layout.rs | 64 +++++++++++++++++++++++++++++ crates/gpui/src/elements/div.rs | 2 +- crates/gpui/src/elements/text.rs | 11 ++++- crates/gpui/src/text_system/line.rs | 22 ++++------ 4 files changed, 83 insertions(+), 16 deletions(-) create mode 100644 crates/gpui/examples/text_layout.rs diff --git a/crates/gpui/examples/text_layout.rs b/crates/gpui/examples/text_layout.rs new file mode 100644 index 0000000000000000000000000000000000000000..d855b6434857754a366cb39e6c81a92d4e76d0a4 --- /dev/null +++ b/crates/gpui/examples/text_layout.rs @@ -0,0 +1,64 @@ +use gpui::{ + div, prelude::*, px, size, App, Application, Bounds, Context, Window, WindowBounds, + WindowOptions, +}; + +struct HelloWorld {} + +impl Render for HelloWorld { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + div() + .bg(gpui::white()) + .flex() + .flex_col() + .gap_3() + .p_4() + .size_full() + .child(div().child("Text left")) + .child(div().text_center().child("Text center")) + .child(div().text_right().child("Text right")) + .child( + div() + .flex() + .gap_2() + .justify_between() + .child( + div() + .w(px(400.)) + .border_1() + .border_color(gpui::blue()) + .p_1() + .whitespace_nowrap() + .overflow_hidden() + .text_center() + .child("A long non-wrapping text align center"), + ) + .child( + div() + .w_32() + .border_1() + .border_color(gpui::blue()) + .p_1() + .whitespace_nowrap() + .overflow_hidden() + .text_right() + .child("100%"), + ), + ) + } +} + +fn main() { + Application::new().run(|cx: &mut App| { + let bounds = Bounds::centered(None, size(px(800.0), px(600.0)), cx); + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + ..Default::default() + }, + |_, cx| cx.new(|_| HelloWorld {}), + ) + .unwrap(); + cx.activate(true); + }); +} diff --git a/crates/gpui/src/elements/div.rs b/crates/gpui/src/elements/div.rs index 6460172168c255b80661a76f78e651db3d65f56b..26c43df702bd8efbadab59544ae877d53dc22394 100644 --- a/crates/gpui/src/elements/div.rs +++ b/crates/gpui/src/elements/div.rs @@ -1684,7 +1684,7 @@ impl Interactivity { .ok() .and_then(|mut text| text.pop()) { - text.paint(hitbox.origin, FONT_SIZE, TextAlign::Left, window, cx) + text.paint(hitbox.origin, FONT_SIZE, TextAlign::Left, None, window, cx) .ok(); let text_bounds = crate::Bounds { diff --git a/crates/gpui/src/elements/text.rs b/crates/gpui/src/elements/text.rs index 21913af93f2770d69629a107afb4a6a00d31d80b..132135d4d604bb7927025c4df4f54a6fe31cfe88 100644 --- a/crates/gpui/src/elements/text.rs +++ b/crates/gpui/src/elements/text.rs @@ -392,8 +392,15 @@ impl TextLayout { let mut line_origin = bounds.origin; let text_style = window.text_style(); for line in &element_state.lines { - line.paint(line_origin, line_height, text_style.text_align, window, cx) - .log_err(); + line.paint( + line_origin, + line_height, + text_style.text_align, + Some(bounds), + window, + cx, + ) + .log_err(); line_origin.y += line.size(line_height).height; } } diff --git a/crates/gpui/src/text_system/line.rs b/crates/gpui/src/text_system/line.rs index 5c14565c1572f75c188c5610e9f9d3e23d2a604f..27da7d66c59a87f896fc18591f0d8d26deb84e6f 100644 --- a/crates/gpui/src/text_system/line.rs +++ b/crates/gpui/src/text_system/line.rs @@ -107,15 +107,21 @@ impl WrappedLine { origin: Point, line_height: Pixels, align: TextAlign, + bounds: Option>, window: &mut Window, cx: &mut App, ) -> Result<()> { + let align_width = match bounds { + Some(bounds) => Some(bounds.size.width), + None => self.layout.wrap_width, + }; + paint_line( origin, &self.layout.unwrapped_layout, line_height, align, - self.layout.wrap_width, + align_width, &self.decoration_runs, &self.wrap_boundaries, window, @@ -222,7 +228,7 @@ fn paint_line( glyph_origin.x = aligned_origin_x( origin, align_width.unwrap_or(layout.width), - prev_glyph_position.x, + glyph.position.x, &align, layout, wraps.peek(), @@ -426,17 +432,7 @@ fn aligned_origin_x( wrap_boundary: Option<&&WrapBoundary>, ) -> Pixels { let end_of_line = if let Some(WrapBoundary { run_ix, glyph_ix }) = wrap_boundary { - if layout.runs[*run_ix].glyphs.len() == glyph_ix + 1 { - // Next glyph is in next run - layout - .runs - .get(run_ix + 1) - .and_then(|run| run.glyphs.first()) - .map_or(layout.width, |glyph| glyph.position.x) - } else { - // Get next glyph - layout.runs[*run_ix].glyphs[*glyph_ix + 1].position.x - } + layout.runs[*run_ix].glyphs[*glyph_ix].position.x } else { layout.width }; From d83c316e6d37130ac353e552a7430e432eaa033c Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Thu, 6 Feb 2025 22:04:02 -0500 Subject: [PATCH 104/130] Fix Project Panel `select_next_git_entry` action (#24217) ## Context I noticed that the project panel `select_next_git_entry` wasn't behaving correctly. Turns out it was searching in reverse, which caused the action to select itself or the last entry. This PR corrects the behavior and adds a unit test that should stop regressions. Note: Since select next/prev git entry uses the same function as select next/prev diagnostic, the test partially works for that as well. Release Notes: - Fix bug where `select_next_git_entry` project panel action would only select a previous entry or the currently selected entry. --------- Co-authored-by: Mikayla Maki --- Cargo.lock | 2 +- crates/project_panel/src/project_panel.rs | 282 +++++++++++++++++++++- 2 files changed, 282 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 205ffc66937390a9d3cc9e182dea49f9377c0125..eae4c7bea9f75d92fdc6ae531a2c414d5399c5bb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7184,7 +7184,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-targets 0.48.5", ] [[package]] diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index b2c0ab89236b018f88e67969208537658179df71..c4af13f6cbd7f8b13db0a9723abe3a346ebabb24 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -1866,7 +1866,7 @@ impl ProjectPanel { ) { let selection = self.find_entry( self.selection.as_ref(), - true, + false, |entry, worktree_id| { (self.selection.is_none() || self.selection.is_some_and(|selection| { @@ -6726,6 +6726,286 @@ mod tests { ); } + #[gpui::test] + async fn test_select_git_entry(cx: &mut gpui::TestAppContext) { + use git::status::{FileStatus, StatusCode, TrackedStatus}; + use std::path::Path; + + init_test_with_editor(cx); + + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/root", + json!({ + "tree1": { + ".git": {}, + "dir1": { + "modified1.txt": "", + "unmodified1.txt": "", + "modified2.txt": "", + }, + "dir2": { + "modified3.txt": "", + "unmodified2.txt": "", + }, + "modified4.txt": "", + "unmodified3.txt": "", + }, + "tree2": { + ".git": {}, + "dir3": { + "modified5.txt": "", + "unmodified4.txt": "", + }, + "modified6.txt": "", + "unmodified5.txt": "", + } + }), + ) + .await; + + // Mark files as git modified + let tree1_modified_files = [ + "dir1/modified1.txt", + "dir1/modified2.txt", + "modified4.txt", + "dir2/modified3.txt", + ]; + + let tree2_modified_files = ["dir3/modified5.txt", "modified6.txt"]; + + let root1_dot_git = Path::new("/root/tree1/.git"); + let root2_dot_git = Path::new("/root/tree2/.git"); + let set_value = FileStatus::Tracked(TrackedStatus { + index_status: StatusCode::Modified, + worktree_status: StatusCode::Modified, + }); + + fs.with_git_state(&root1_dot_git, true, |git_repo_state| { + for file_path in tree1_modified_files { + git_repo_state.statuses.insert(file_path.into(), set_value); + } + }); + + fs.with_git_state(&root2_dot_git, true, |git_repo_state| { + for file_path in tree2_modified_files { + git_repo_state.statuses.insert(file_path.into(), set_value); + } + }); + + let project = Project::test( + fs.clone(), + ["/root/tree1".as_ref(), "/root/tree2".as_ref()], + cx, + ) + .await; + let workspace = + cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let panel = workspace.update(cx, ProjectPanel::new).unwrap(); + + // Check initial state + assert_eq!( + visible_entries_as_strings(&panel, 0..15, cx), + &[ + "v tree1", + " > .git", + " > dir1", + " > dir2", + " modified4.txt", + " unmodified3.txt", + "v tree2", + " > .git", + " > dir3", + " modified6.txt", + " unmodified5.txt" + ], + ); + + // Test selecting next modified entry + panel.update_in(cx, |panel, window, cx| { + panel.select_next_git_entry(&SelectNextGitEntry, window, cx); + }); + + assert_eq!( + visible_entries_as_strings(&panel, 0..6, cx), + &[ + "v tree1", + " > .git", + " v dir1", + " modified1.txt <== selected", + " modified2.txt", + " unmodified1.txt", + ], + ); + + panel.update_in(cx, |panel, window, cx| { + panel.select_next_git_entry(&SelectNextGitEntry, window, cx); + }); + + assert_eq!( + visible_entries_as_strings(&panel, 0..6, cx), + &[ + "v tree1", + " > .git", + " v dir1", + " modified1.txt", + " modified2.txt <== selected", + " unmodified1.txt", + ], + ); + + panel.update_in(cx, |panel, window, cx| { + panel.select_next_git_entry(&SelectNextGitEntry, window, cx); + }); + + assert_eq!( + visible_entries_as_strings(&panel, 6..9, cx), + &[ + " v dir2", + " modified3.txt <== selected", + " unmodified2.txt", + ], + ); + + panel.update_in(cx, |panel, window, cx| { + panel.select_next_git_entry(&SelectNextGitEntry, window, cx); + }); + + assert_eq!( + visible_entries_as_strings(&panel, 9..11, cx), + &[" modified4.txt <== selected", " unmodified3.txt",], + ); + + panel.update_in(cx, |panel, window, cx| { + panel.select_next_git_entry(&SelectNextGitEntry, window, cx); + }); + + assert_eq!( + visible_entries_as_strings(&panel, 13..16, cx), + &[ + " v dir3", + " modified5.txt <== selected", + " unmodified4.txt", + ], + ); + + panel.update_in(cx, |panel, window, cx| { + panel.select_next_git_entry(&SelectNextGitEntry, window, cx); + }); + + assert_eq!( + visible_entries_as_strings(&panel, 16..18, cx), + &[" modified6.txt <== selected", " unmodified5.txt",], + ); + + // Wraps around to first modified file + panel.update_in(cx, |panel, window, cx| { + panel.select_next_git_entry(&SelectNextGitEntry, window, cx); + }); + + assert_eq!( + visible_entries_as_strings(&panel, 0..18, cx), + &[ + "v tree1", + " > .git", + " v dir1", + " modified1.txt <== selected", + " modified2.txt", + " unmodified1.txt", + " v dir2", + " modified3.txt", + " unmodified2.txt", + " modified4.txt", + " unmodified3.txt", + "v tree2", + " > .git", + " v dir3", + " modified5.txt", + " unmodified4.txt", + " modified6.txt", + " unmodified5.txt", + ], + ); + + // Wraps around again to last modified file + panel.update_in(cx, |panel, window, cx| { + panel.select_prev_git_entry(&SelectPrevGitEntry, window, cx); + }); + + assert_eq!( + visible_entries_as_strings(&panel, 16..18, cx), + &[" modified6.txt <== selected", " unmodified5.txt",], + ); + + panel.update_in(cx, |panel, window, cx| { + panel.select_prev_git_entry(&SelectPrevGitEntry, window, cx); + }); + + assert_eq!( + visible_entries_as_strings(&panel, 13..16, cx), + &[ + " v dir3", + " modified5.txt <== selected", + " unmodified4.txt", + ], + ); + + panel.update_in(cx, |panel, window, cx| { + panel.select_prev_git_entry(&SelectPrevGitEntry, window, cx); + }); + + assert_eq!( + visible_entries_as_strings(&panel, 9..11, cx), + &[" modified4.txt <== selected", " unmodified3.txt",], + ); + + panel.update_in(cx, |panel, window, cx| { + panel.select_prev_git_entry(&SelectPrevGitEntry, window, cx); + }); + + assert_eq!( + visible_entries_as_strings(&panel, 6..9, cx), + &[ + " v dir2", + " modified3.txt <== selected", + " unmodified2.txt", + ], + ); + + panel.update_in(cx, |panel, window, cx| { + panel.select_prev_git_entry(&SelectPrevGitEntry, window, cx); + }); + + assert_eq!( + visible_entries_as_strings(&panel, 0..6, cx), + &[ + "v tree1", + " > .git", + " v dir1", + " modified1.txt", + " modified2.txt <== selected", + " unmodified1.txt", + ], + ); + + panel.update_in(cx, |panel, window, cx| { + panel.select_prev_git_entry(&SelectPrevGitEntry, window, cx); + }); + + assert_eq!( + visible_entries_as_strings(&panel, 0..6, cx), + &[ + "v tree1", + " > .git", + " v dir1", + " modified1.txt <== selected", + " modified2.txt", + " unmodified1.txt", + ], + ); + } + #[gpui::test] async fn test_select_directory(cx: &mut gpui::TestAppContext) { init_test_with_editor(cx); From 8646d37c0c2afcc5a877f46cee30b5e5c07a41e4 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 6 Feb 2025 20:24:41 -0700 Subject: [PATCH 105/130] vim: Replace with Register (#24326) Closes #18813 Release Notes: - vim: Add `gr` for [replace with register](https://github.com/vim-scripts/ReplaceWithRegister) --- assets/keymaps/vim.json | 1 + crates/vim/src/normal.rs | 6 ++ crates/vim/src/normal/paste.rs | 110 ++++++++++++++++++++++++++++++++- crates/vim/src/state.rs | 3 + docs/src/vim.md | 1 + 5 files changed, 120 insertions(+), 1 deletion(-) diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 4b6aa72b68441a5b17360b2932b6af20c6f8a8c6..22bc32cec8548b580eedfacd250847c382467837 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -102,6 +102,7 @@ "ctrl-e": "vim::LineDown", "ctrl-y": "vim::LineUp", // "g" commands + "g r": ["vim::PushOperator", "ReplaceWithRegister"], "g g": "vim::StartOfDocument", "g h": "editor::Hover", "g t": "pane::ActivateNextItem", diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 4d537f8fd7dff14ca0b0050fa107500f31254ad4..d84285fad67114417c02815c08cfbe1dd6acd767 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -182,6 +182,9 @@ impl Vim { Some(Operator::ToggleComments) => { self.toggle_comments_motion(motion, times, window, cx) } + Some(Operator::ReplaceWithRegister) => { + self.replace_with_register_motion(motion, times, window, cx) + } Some(operator) => { // Can't do anything for text objects, Ignoring error!("Unexpected normal mode motion operator: {:?}", operator) @@ -228,6 +231,9 @@ impl Vim { Some(Operator::ToggleComments) => { self.toggle_comments_object(object, around, window, cx) } + Some(Operator::ReplaceWithRegister) => { + self.replace_with_register_object(object, around, window, cx) + } _ => { // Can't do anything for namespace operators. Ignoring } diff --git a/crates/vim/src/normal/paste.rs b/crates/vim/src/normal/paste.rs index e47e48a6b496f4853bec780506791e2b7cae4f27..eefd92d0d14023bde230228235ee3007689db626 100644 --- a/crates/vim/src/normal/paste.rs +++ b/crates/vim/src/normal/paste.rs @@ -6,6 +6,8 @@ use serde::Deserialize; use std::cmp; use crate::{ + motion::Motion, + object::Object, state::{Mode, Register}, Vim, }; @@ -192,12 +194,85 @@ impl Vim { }); self.switch_mode(Mode::Normal, true, window, cx); } + + pub fn replace_with_register_object( + &mut self, + object: Object, + around: bool, + window: &mut Window, + cx: &mut Context, + ) { + self.stop_recording(cx); + let selected_register = self.selected_register.take(); + self.update_editor(window, cx, |_, editor, window, cx| { + editor.transact(window, cx, |editor, window, cx| { + editor.set_clip_at_line_ends(false, cx); + editor.change_selections(None, window, cx, |s| { + s.move_with(|map, selection| { + object.expand_selection(map, selection, around); + }); + }); + + let Some(Register { text, .. }) = Vim::update_globals(cx, |globals, cx| { + globals.read_register(selected_register, Some(editor), cx) + }) + .filter(|reg| !reg.text.is_empty()) else { + return; + }; + editor.insert(&text, window, cx); + editor.set_clip_at_line_ends(true, cx); + editor.change_selections(None, window, cx, |s| { + s.move_with(|map, selection| { + selection.start = map.clip_point(selection.start, Bias::Left); + selection.end = selection.start + }) + }) + }); + }); + } + + pub fn replace_with_register_motion( + &mut self, + motion: Motion, + times: Option, + window: &mut Window, + cx: &mut Context, + ) { + self.stop_recording(cx); + let selected_register = self.selected_register.take(); + self.update_editor(window, cx, |_, editor, window, cx| { + let text_layout_details = editor.text_layout_details(window); + editor.transact(window, cx, |editor, window, cx| { + editor.set_clip_at_line_ends(false, cx); + editor.change_selections(None, window, cx, |s| { + s.move_with(|map, selection| { + motion.expand_selection(map, selection, times, false, &text_layout_details); + }); + }); + + let Some(Register { text, .. }) = Vim::update_globals(cx, |globals, cx| { + globals.read_register(selected_register, Some(editor), cx) + }) + .filter(|reg| !reg.text.is_empty()) else { + return; + }; + editor.insert(&text, window, cx); + editor.set_clip_at_line_ends(true, cx); + editor.change_selections(None, window, cx, |s| { + s.move_with(|map, selection| { + selection.start = map.clip_point(selection.start, Bias::Left); + selection.end = selection.start + }) + }) + }); + }); + } } #[cfg(test)] mod test { use crate::{ - state::Mode, + state::{Mode, Register}, test::{NeovimBackedTestContext, VimTestContext}, UseSystemClipboard, VimSettings, }; @@ -742,4 +817,37 @@ mod test { Mode::Normal, ); } + + #[gpui::test] + async fn test_replace_with_register(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.set_state( + indoc! {" + ˇfish one + two three + "}, + Mode::Normal, + ); + cx.simulate_keystrokes("y i w"); + cx.simulate_keystrokes("w"); + cx.simulate_keystrokes("g r i w"); + cx.assert_state( + indoc! {" + fish fisˇh + two three + "}, + Mode::Normal, + ); + cx.simulate_keystrokes("j b g r e"); + cx.assert_state( + indoc! {" + fish fish + two fisˇh + "}, + Mode::Normal, + ); + let clipboard: Register = cx.read_from_clipboard().unwrap().into(); + assert_eq!(clipboard.text, "fish"); + } } diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs index ddd83f4666b759335481732b5a6172d3015bac1e..a4994fb4d3db07c02841ea7c12dccf4234377d63 100644 --- a/crates/vim/src/state.rs +++ b/crates/vim/src/state.rs @@ -111,6 +111,7 @@ pub enum Operator { RecordRegister, ReplayRegister, ToggleComments, + ReplaceWithRegister, } #[derive(Default, Clone, Debug)] @@ -499,6 +500,7 @@ impl Operator { Operator::AutoIndent => "eq", Operator::ShellCommand => "sh", Operator::Rewrap => "gq", + Operator::ReplaceWithRegister => "gr", Operator::Outdent => "<", Operator::Uppercase => "gU", Operator::Lowercase => "gu", @@ -551,6 +553,7 @@ impl Operator { | Operator::ShellCommand | Operator::Lowercase | Operator::Uppercase + | Operator::ReplaceWithRegister | Operator::Object { .. } | Operator::ChangeSurrounds { target: None } | Operator::OppositeCase diff --git a/docs/src/vim.md b/docs/src/vim.md index 96f124897f25ecf7d93715cfceaf7ebf137ca3a6..25e44dda3f0efcc1487f562da0f54b715f05afda 100644 --- a/docs/src/vim.md +++ b/docs/src/vim.md @@ -159,6 +159,7 @@ Zed's vim mode includes some features that are usually provided by very popular - You can comment and uncomment selections with `gc` in visual mode and `gcc` in normal mode. - The project panel supports many shortcuts modeled after the Vim plugin `netrw`: navigation with `hjkl`, open file with `o`, open file in a new tab with `t`, etc. - You can add key bindings to your keymap to navigate "camelCase" names. [Head down to the Optional key bindings](#optional-key-bindings) section to learn how. +- You can use `gr` to do [ReplaceWithRegister](https://github.com/vim-scripts/ReplaceWithRegister). ## Command palette From a42e04066012b93aa9e17a7d450f32e057875345 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Thu, 6 Feb 2025 20:46:15 -0700 Subject: [PATCH 106/130] Remove use of `use_key_equivalents` from linux keymap as it does nothing (#24422) `use_key_equivalents` does nothing on linux, as key equivalents are only supported on mac. While it could be sensible to anticipate support, right now it is only used in these few spots, so removing it. Release Notes: - N/A --- assets/keymaps/default-linux.json | 8 -------- 1 file changed, 8 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index fb7104f4d4c4022aa70ae877f6b6b39928de5507..ae858b6a029aaf3b6179370322581bdbf2c19a2f 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -496,7 +496,6 @@ }, { "context": "Editor && showing_completions", - "use_key_equivalents": true, "bindings": { "enter": "editor::ConfirmCompletion", "tab": "editor::ComposeCompletion" @@ -511,7 +510,6 @@ }, { "context": "Editor && inline_completion && !inline_completion_requires_modifier", - "use_key_equivalents": true, "bindings": { "tab": "editor::AcceptInlineCompletion" } @@ -602,14 +600,12 @@ }, { "context": "MessageEditor > Editor", - "use_key_equivalents": true, "bindings": { "enter": "assistant2::Chat" } }, { "context": "ContextStrip", - "use_key_equivalents": true, "bindings": { "up": "assistant2::FocusUp", "right": "assistant2::FocusRight", @@ -702,14 +698,12 @@ }, { "context": "GitPanel && !CommitEditor", - "use_key_equivalents": true, "bindings": { "escape": "git_panel::Close" } }, { "context": "GitPanel && ChangesList", - "use_key_equivalents": true, "bindings": { "up": "menu::SelectPrev", "down": "menu::SelectNext", @@ -721,7 +715,6 @@ }, { "context": "GitPanel && CommitEditor > Editor", - "use_key_equivalents": true, "bindings": { "escape": "git_panel::FocusChanges", "ctrl-enter": "git::CommitChanges", @@ -833,7 +826,6 @@ }, { "context": "ZedPredictModal", - "use_key_equivalents": true, "bindings": { "escape": "menu::Cancel" } From d97adfc540afd3f4947c6641aa63bc13759dac7f Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Thu, 6 Feb 2025 23:18:59 -0500 Subject: [PATCH 107/130] Fix pairs of almost-adjacent hunks toggling together (#24355) Release Notes: - Fixed a bug where toggling a diff hunk that immediately precedes another hunk would act on both hunks --- crates/editor/src/editor.rs | 18 ++++++--- crates/editor/src/element.rs | 2 +- crates/multi_buffer/src/multi_buffer.rs | 53 ++++++++++++++++++++----- 3 files changed, 56 insertions(+), 17 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index d6ed7d27d9366ab0e1e37304ab8a1b0116462a2d..b2c87d63b5cb96f264f8e2367df121c2d8e267c1 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -12237,11 +12237,19 @@ impl Editor { cx: &mut Context<'_, Editor>, ) { self.buffer.update(cx, |buffer, cx| { - if buffer.has_expanded_diff_hunks_in_ranges(&ranges, cx) { - buffer.collapse_diff_hunks(ranges, cx) - } else { - buffer.expand_diff_hunks(ranges, cx) - } + let expand = !buffer.has_expanded_diff_hunks_in_ranges(&ranges, cx); + buffer.expand_or_collapse_diff_hunks(ranges, expand, cx); + }) + } + + fn toggle_diff_hunks_in_ranges_narrow( + &mut self, + ranges: Vec>, + cx: &mut Context<'_, Editor>, + ) { + self.buffer.update(cx, |buffer, cx| { + let expand = !buffer.has_expanded_diff_hunks_in_ranges(&ranges, cx); + buffer.expand_or_collapse_diff_hunks_narrow(ranges, expand, cx); }) } diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index c95b70b26338643671c63307549155304977c1ac..1a56c8954abaa93a3ac525151a8865b1b67afe84 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -559,7 +559,7 @@ impl EditorElement { let mut modifiers = event.modifiers; if let Some(hovered_hunk) = hovered_hunk { - editor.toggle_diff_hunks_in_ranges(vec![hovered_hunk], cx); + editor.toggle_diff_hunks_in_ranges_narrow(vec![hovered_hunk], cx); cx.notify(); return; } else if gutter_hitbox.is_hovered(window) { diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 4f0f3a18bddb6c725c9ccbbdf3c6441d7594dffa..9c80fcedd3ebbaf443d1cce3c5c7ae81a59763d5 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -2376,24 +2376,17 @@ impl MultiBuffer { false } - fn expand_or_collapse_diff_hunks( + fn expand_or_collapse_diff_hunks_internal( &mut self, - ranges: Vec>, + ranges: impl Iterator, ExcerptId)>, expand: bool, cx: &mut Context, ) { self.sync(cx); let mut snapshot = self.snapshot.borrow_mut(); let mut excerpt_edits = Vec::new(); - for range in ranges.iter() { - let end_excerpt_id = range.end.excerpt_id; - let range = range.to_point(&snapshot); - let mut peek_end = range.end; - if range.end.row < snapshot.max_row().0 { - peek_end = Point::new(range.end.row + 1, 0); - }; - - for diff_hunk in snapshot.diff_hunks_in_range(range.start..peek_end) { + for (range, end_excerpt_id) in ranges { + for diff_hunk in snapshot.diff_hunks_in_range(range) { if diff_hunk.excerpt_id.cmp(&end_excerpt_id, &snapshot).is_gt() { continue; } @@ -2428,6 +2421,44 @@ impl MultiBuffer { }); } + pub fn expand_or_collapse_diff_hunks_narrow( + &mut self, + ranges: Vec>, + expand: bool, + cx: &mut Context, + ) { + let snapshot = self.snapshot.borrow().clone(); + self.expand_or_collapse_diff_hunks_internal( + ranges + .iter() + .map(move |range| (range.to_point(&snapshot), range.end.excerpt_id)), + expand, + cx, + ); + } + + pub fn expand_or_collapse_diff_hunks( + &mut self, + ranges: Vec>, + expand: bool, + cx: &mut Context, + ) { + let snapshot = self.snapshot.borrow().clone(); + self.expand_or_collapse_diff_hunks_internal( + ranges.iter().map(move |range| { + let end_excerpt_id = range.end.excerpt_id; + let range = range.to_point(&snapshot); + let mut peek_end = range.end; + if range.end.row < snapshot.max_row().0 { + peek_end = Point::new(range.end.row + 1, 0); + }; + (range.start..peek_end, end_excerpt_id) + }), + expand, + cx, + ); + } + pub fn resize_excerpt( &mut self, id: ExcerptId, From 35ef269233007cb1c0542323976aa5c68a6b1794 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Thu, 6 Feb 2025 21:35:22 -0700 Subject: [PATCH 108/130] Fix build of remote_server when not in git repo (#24424) Followup to #24258 Release Notes: - N/A --- crates/remote_server/src/main.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/remote_server/src/main.rs b/crates/remote_server/src/main.rs index 080b7e1af062f097048b6215d7b351c720f7ed1b..715380dbf9a9b8539767f97159efb889bfed77bf 100644 --- a/crates/remote_server/src/main.rs +++ b/crates/remote_server/src/main.rs @@ -79,7 +79,10 @@ fn main() { println!("{}", env!("ZED_PKG_VERSION")) } ReleaseChannel::Nightly | ReleaseChannel::Dev => { - println!("{}", env!("ZED_COMMIT_SHA")) + println!( + "{}", + option_env!("ZED_COMMIT_SHA").unwrap_or(release_channel.dev_name()) + ) } }; std::process::exit(0); From 864c1ff00c9be8368d1cb567171558ca72b2a7e4 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Thu, 6 Feb 2025 21:38:09 -0700 Subject: [PATCH 109/130] Use `commondir` from libgit2 instead of walking fs (#22028) Release Notes: - N/A --- crates/git/src/repository.rs | 38 ++++++++++++++++++++++++--------- crates/worktree/src/worktree.rs | 32 ++++++++++++--------------- 2 files changed, 42 insertions(+), 28 deletions(-) diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 58dc9a9dce724167e242165a4dae6e6f6369efdb..9bf4c4da12d60d5aa0533c54c69a279a29172c66 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -58,8 +58,17 @@ pub trait GitRepository: Send + Sync { fn blame(&self, path: &Path, content: Rope) -> Result; - /// Returns the path to the repository, typically the `.git` folder. - fn dot_git_dir(&self) -> PathBuf; + /// Returns the absolute path to the repository. For worktrees, this will be the path to the + /// worktree's gitdir within the main repository (typically `.git/worktrees/`). + fn path(&self) -> PathBuf; + + /// Returns the absolute path to the ".git" dir for the main repository, typically a `.git` + /// folder. For worktrees, this will be the path to the repository the worktree was created + /// from. Otherwise, this is the same value as `path()`. + /// + /// Git documentation calls this the "commondir", and for git CLI is overridden by + /// `GIT_COMMON_DIR`. + fn main_repository_path(&self) -> PathBuf; /// Updates the index to match the worktree at the given paths. /// @@ -109,11 +118,16 @@ impl GitRepository for RealGitRepository { } } - fn dot_git_dir(&self) -> PathBuf { + fn path(&self) -> PathBuf { let repo = self.repository.lock(); repo.path().into() } + fn main_repository_path(&self) -> PathBuf { + let repo = self.repository.lock(); + repo.commondir().into() + } + fn load_index_text(&self, path: &RepoPath) -> Option { fn logic(repo: &git2::Repository, path: &RepoPath) -> Result> { const STAGE_NORMAL: i32 = 0; @@ -344,7 +358,7 @@ pub struct FakeGitRepository { #[derive(Debug, Clone)] pub struct FakeGitRepositoryState { - pub dot_git_dir: PathBuf, + pub path: PathBuf, pub event_emitter: smol::channel::Sender, pub head_contents: HashMap, pub index_contents: HashMap, @@ -361,9 +375,9 @@ impl FakeGitRepository { } impl FakeGitRepositoryState { - pub fn new(dot_git_dir: PathBuf, event_emitter: smol::channel::Sender) -> Self { + pub fn new(path: PathBuf, event_emitter: smol::channel::Sender) -> Self { FakeGitRepositoryState { - dot_git_dir, + path, event_emitter, head_contents: Default::default(), index_contents: Default::default(), @@ -405,9 +419,13 @@ impl GitRepository for FakeGitRepository { vec![] } - fn dot_git_dir(&self) -> PathBuf { + fn path(&self) -> PathBuf { let state = self.state.lock(); - state.dot_git_dir.clone() + state.path.clone() + } + + fn main_repository_path(&self) -> PathBuf { + self.path() } fn status(&self, path_prefixes: &[RepoPath]) -> Result { @@ -458,7 +476,7 @@ impl GitRepository for FakeGitRepository { state.current_branch_name = Some(name.to_owned()); state .event_emitter - .try_send(state.dot_git_dir.clone()) + .try_send(state.path.clone()) .expect("Dropped repo change event"); Ok(()) } @@ -468,7 +486,7 @@ impl GitRepository for FakeGitRepository { state.branches.insert(name.to_owned()); state .event_emitter - .try_send(state.dot_git_dir.clone()) + .try_send(state.path.clone()) .expect("Dropped repo change event"); Ok(()) } diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index fc2603bd442f40261799f915b15b94c3e3c94cd6..915e4e9954a723361800de27e5faaef05b0c0e1c 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -3354,18 +3354,23 @@ impl BackgroundScannerState { let t0 = Instant::now(); let repository = fs.open_repo(&dot_git_abs_path)?; - let actual_repo_path = repository.dot_git_dir(); + let repository_path = repository.path(); + watcher.add(&repository_path).log_err()?; - let actual_dot_git_dir_abs_path = smol::block_on(find_git_dir(&actual_repo_path, fs))?; - watcher.add(&actual_repo_path).log_err()?; - - let dot_git_worktree_abs_path = if actual_dot_git_dir_abs_path.as_ref() == dot_git_abs_path - { + let actual_dot_git_dir_abs_path = repository.main_repository_path(); + let dot_git_worktree_abs_path = if actual_dot_git_dir_abs_path == dot_git_abs_path { None } else { // The two paths could be different because we opened a git worktree. - // When that happens, the .git path in the worktree (`dot_git_abs_path`) is a file that - // points to the worktree-subdirectory in the actual .git directory (`git_dir_path`) + // When that happens: + // + // * `dot_git_abs_path` is a file that points to the worktree-subdirectory in the actual + // .git directory. + // + // * `repository_path` is the worktree-subdirectory. + // + // * `actual_dot_git_dir_abs_path` is the path to the actual .git directory. In git + // documentation this is called the "commondir". watcher.add(&dot_git_abs_path).log_err()?; Some(Arc::from(dot_git_abs_path)) }; @@ -3400,7 +3405,7 @@ impl BackgroundScannerState { git_dir_scan_id: 0, status_scan_id: 0, repo_ptr: repository.clone(), - dot_git_dir_abs_path: actual_dot_git_dir_abs_path, + dot_git_dir_abs_path: actual_dot_git_dir_abs_path.into(), dot_git_worktree_abs_path, current_merge_head_shas: Default::default(), }; @@ -3429,15 +3434,6 @@ async fn is_git_dir(path: &Path, fs: &dyn Fs) -> bool { matches!(config_metadata, Ok(Some(_))) } -async fn find_git_dir(path: &Path, fs: &dyn Fs) -> Option> { - for ancestor in path.ancestors() { - if is_git_dir(ancestor, fs).await { - return Some(Arc::from(ancestor)); - } - } - None -} - async fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result { let contents = fs.load(abs_path).await?; let parent = abs_path.parent().unwrap_or_else(|| Path::new("/")); From 5ffacb9ca5aed6a27e1913a861156250098777c2 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Thu, 6 Feb 2025 23:46:43 -0500 Subject: [PATCH 110/130] Revert "Move git status updates to a background thread (#24307)" (#24415) This reverts commit 980ce5fbf2d0de7e954c32dd982268d3b58dfccc. Release Notes: - N/A --------- Co-authored-by: Conrad Irwin --- crates/worktree/src/worktree.rs | 535 +++++++++++++------------- crates/worktree/src/worktree_tests.rs | 2 - 2 files changed, 270 insertions(+), 267 deletions(-) diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 915e4e9954a723361800de27e5faaef05b0c0e1c..c2e0a1551e717c9ff9ed7a7b515334d3a9acaea8 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -13,7 +13,6 @@ use futures::{ mpsc::{self, UnboundedSender}, oneshot, }, - future::join_all, select_biased, task::Poll, FutureExt as _, Stream, StreamExt, @@ -466,7 +465,6 @@ struct BackgroundScannerState { changed_paths: Vec>, prev_snapshot: Snapshot, git_hosting_provider_registry: Option>, - repository_scans: HashMap, Task<()>>, } #[derive(Debug, Clone)] @@ -1355,7 +1353,7 @@ impl LocalWorktree { scan_requests_rx, path_prefixes_to_scan_rx, next_entry_id, - state: Arc::new(Mutex::new(BackgroundScannerState { + state: Mutex::new(BackgroundScannerState { prev_snapshot: snapshot.snapshot.clone(), snapshot, scanned_dirs: Default::default(), @@ -1363,9 +1361,8 @@ impl LocalWorktree { paths_to_scan: Default::default(), removed_entries: Default::default(), changed_paths: Default::default(), - repository_scans: HashMap::default(), git_hosting_provider_registry, - })), + }), phase: BackgroundScannerPhase::InitialScan, share_private_files, settings, @@ -4111,7 +4108,7 @@ impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey { } struct BackgroundScanner { - state: Arc>, + state: Mutex, fs: Arc, fs_case_sensitive: bool, status_updates_tx: UnboundedSender, @@ -4125,7 +4122,7 @@ struct BackgroundScanner { share_private_files: bool, } -#[derive(Copy, Clone, PartialEq)] +#[derive(PartialEq)] enum BackgroundScannerPhase { InitialScan, EventsReceivedDuringInitialScan, @@ -4134,6 +4131,8 @@ enum BackgroundScannerPhase { impl BackgroundScanner { async fn run(&mut self, mut fs_events_rx: Pin>>>) { + use futures::FutureExt as _; + // If the worktree root does not contain a git repository, then find // the git repository in an ancestor directory. Find any gitignore files // in ancestor directories. @@ -4444,33 +4443,22 @@ impl BackgroundScanner { self.update_ignore_statuses(scan_job_tx).await; self.scan_dirs(false, scan_job_rx).await; - let status_update = if !dot_git_abs_paths.is_empty() { - Some(self.schedule_git_repositories_update(dot_git_abs_paths)) - } else { - None - }; + if !dot_git_abs_paths.is_empty() { + self.update_git_repositories(dot_git_abs_paths).await; + } - let phase = self.phase; - let status_update_tx = self.status_updates_tx.clone(); - let state = self.state.clone(); - self.executor - .spawn(async move { - if let Some(status_update) = status_update { - status_update.await; - } + { + let mut state = self.state.lock(); + state.snapshot.completed_scan_id = state.snapshot.scan_id; + for (_, entry) in mem::take(&mut state.removed_entries) { + state.scanned_dirs.remove(&entry.id); + } + } - { - let mut state = state.lock(); - state.snapshot.completed_scan_id = state.snapshot.scan_id; - for (_, entry) in mem::take(&mut state.removed_entries) { - state.scanned_dirs.remove(&entry.id); - } - #[cfg(test)] - state.snapshot.check_git_invariants(); - } - send_status_update_inner(phase, state, status_update_tx, false, SmallVec::new()); - }) - .detach(); + #[cfg(test)] + self.state.lock().snapshot.check_git_invariants(); + + self.send_status_update(false, SmallVec::new()); } async fn forcibly_load_paths(&self, paths: &[Arc]) -> bool { @@ -4504,6 +4492,8 @@ impl BackgroundScanner { enable_progress_updates: bool, scan_jobs_rx: channel::Receiver, ) { + use futures::FutureExt as _; + if self .status_updates_tx .unbounded_send(ScanState::Started) @@ -4571,13 +4561,24 @@ impl BackgroundScanner { } fn send_status_update(&self, scanning: bool, barrier: SmallVec<[barrier::Sender; 1]>) -> bool { - send_status_update_inner( - self.phase, - self.state.clone(), - self.status_updates_tx.clone(), - scanning, - barrier, - ) + let mut state = self.state.lock(); + if state.changed_paths.is_empty() && scanning { + return true; + } + + let new_snapshot = state.snapshot.clone(); + let old_snapshot = mem::replace(&mut state.prev_snapshot, new_snapshot.snapshot.clone()); + let changes = self.build_change_set(&old_snapshot, &new_snapshot, &state.changed_paths); + state.changed_paths.clear(); + + self.status_updates_tx + .unbounded_send(ScanState::Updated { + snapshot: new_snapshot, + changes, + scanning, + barrier, + }) + .is_ok() } async fn scan_dir(&self, job: &ScanJob) -> Result<()> { @@ -4633,7 +4634,9 @@ impl BackgroundScanner { ); if let Some(local_repo) = repo { - let _ = self.schedule_git_statuses_update(local_repo); + self.update_git_statuses(UpdateGitStatusesJob { + local_repository: local_repo, + }); } } else if child_name == *GITIGNORE { match build_gitignore(&child_abs_path, self.fs.as_ref()).await { @@ -4990,6 +4993,8 @@ impl BackgroundScanner { } async fn update_ignore_statuses(&self, scan_job_tx: Sender) { + use futures::FutureExt as _; + let mut ignores_to_update = Vec::new(); let (ignore_queue_tx, ignore_queue_rx) = channel::unbounded(); let prev_snapshot; @@ -5139,10 +5144,10 @@ impl BackgroundScanner { state.snapshot.entries_by_id.edit(entries_by_id_edits, &()); } - fn schedule_git_repositories_update(&self, dot_git_paths: Vec) -> Task<()> { + async fn update_git_repositories(&self, dot_git_paths: Vec) { log::debug!("reloading repositories: {dot_git_paths:?}"); - let mut repos_to_update = Vec::new(); + let mut repo_updates = Vec::new(); { let mut state = self.state.lock(); let scan_id = state.snapshot.scan_id; @@ -5205,7 +5210,7 @@ impl BackgroundScanner { } }; - repos_to_update.push(local_repository); + repo_updates.push(UpdateGitStatusesJob { local_repository }); } // Remove any git repositories whose .git entry no longer exists. @@ -5236,109 +5241,238 @@ impl BackgroundScanner { }); } - let mut status_updates = Vec::new(); - for local_repository in repos_to_update { - status_updates.push(self.schedule_git_statuses_update(local_repository)); - } - self.executor.spawn(async move { - let _updates_finished: Vec> = - join_all(status_updates).await; - }) + let (mut updates_done_tx, mut updates_done_rx) = barrier::channel(); + self.executor + .scoped(|scope| { + scope.spawn(async { + for repo_update in repo_updates { + self.update_git_statuses(repo_update); + } + updates_done_tx.blocking_send(()).ok(); + }); + + scope.spawn(async { + loop { + select_biased! { + // Process any path refresh requests before moving on to process + // the queue of git statuses. + request = self.next_scan_request().fuse() => { + let Ok(request) = request else { break }; + if !self.process_scan_request(request, true).await { + return; + } + } + _ = updates_done_rx.recv().fuse() => break, + } + } + }); + }) + .await; } /// Update the git statuses for a given batch of entries. - fn schedule_git_statuses_update( - &self, - local_repository: LocalRepositoryEntry, - ) -> oneshot::Receiver<()> { - let repository_path = local_repository.work_directory.path.clone(); - let state = self.state.clone(); - let (tx, rx) = oneshot::channel(); + fn update_git_statuses(&self, job: UpdateGitStatusesJob) { + log::trace!( + "updating git statuses for repo {:?}", + job.local_repository.work_directory.path + ); + let t0 = Instant::now(); - self.state.lock().repository_scans.insert( - repository_path.clone(), - self.executor.spawn(async move { - log::trace!("updating git statuses for repo {repository_path:?}",); - let t0 = Instant::now(); - - let Some(statuses) = local_repository - .repo() - .status(&[git::WORK_DIRECTORY_REPO_PATH.clone()]) - .log_err() - else { - return; - }; - log::trace!( - "computed git statuses for repo {:?} in {:?}", - repository_path, - t0.elapsed() - ); + let Some(statuses) = job + .local_repository + .repo() + .status(&[git::WORK_DIRECTORY_REPO_PATH.clone()]) + .log_err() + else { + return; + }; + log::trace!( + "computed git statuses for repo {:?} in {:?}", + job.local_repository.work_directory.path, + t0.elapsed() + ); - let t0 = Instant::now(); - let mut changed_paths = Vec::new(); - let snapshot = state.lock().snapshot.snapshot.clone(); + let t0 = Instant::now(); + let mut changed_paths = Vec::new(); + let snapshot = self.state.lock().snapshot.snapshot.clone(); + + let Some(mut repository) = + snapshot.repository(job.local_repository.work_directory.path_key()) + else { + log::error!("Got an UpdateGitStatusesJob for a repository that isn't in the snapshot"); + debug_assert!(false); + return; + }; - let Some(mut repository) = - snapshot.repository(local_repository.work_directory.path_key()) - else { - log::error!( - "Tried to update git statuses for a repository that isn't in the snapshot" - ); - debug_assert!(false); - return; - }; + let merge_head_shas = job.local_repository.repo().merge_head_shas(); + if merge_head_shas != job.local_repository.current_merge_head_shas { + mem::take(&mut repository.current_merge_conflicts); + } - let merge_head_shas = local_repository.repo().merge_head_shas(); - if merge_head_shas != local_repository.current_merge_head_shas { - mem::take(&mut repository.current_merge_conflicts); - } + let mut new_entries_by_path = SumTree::new(&()); + for (repo_path, status) in statuses.entries.iter() { + let project_path = repository.work_directory.unrelativize(repo_path); - let mut new_entries_by_path = SumTree::new(&()); - for (repo_path, status) in statuses.entries.iter() { - let project_path = repository.work_directory.unrelativize(repo_path); + new_entries_by_path.insert_or_replace( + StatusEntry { + repo_path: repo_path.clone(), + status: *status, + }, + &(), + ); + if status.is_conflicted() { + repository.current_merge_conflicts.insert(repo_path.clone()); + } - new_entries_by_path.insert_or_replace( - StatusEntry { - repo_path: repo_path.clone(), - status: *status, - }, - &(), - ); + if let Some(path) = project_path { + changed_paths.push(path); + } + } - if let Some(path) = project_path { - changed_paths.push(path); - } - } + repository.statuses_by_path = new_entries_by_path; + let mut state = self.state.lock(); + state + .snapshot + .repositories + .insert_or_replace(repository, &()); - repository.statuses_by_path = new_entries_by_path; - let mut state = state.lock(); - state - .snapshot - .repositories - .insert_or_replace(repository, &()); - state.snapshot.git_repositories.update( - &local_repository.work_directory_id, - |entry| { - entry.current_merge_head_shas = merge_head_shas; - }, - ); + state + .snapshot + .git_repositories + .update(&job.local_repository.work_directory_id, |entry| { + entry.current_merge_head_shas = merge_head_shas; + }); - util::extend_sorted( - &mut state.changed_paths, - changed_paths, - usize::MAX, - Ord::cmp, - ); + util::extend_sorted( + &mut state.changed_paths, + changed_paths, + usize::MAX, + Ord::cmp, + ); - log::trace!( - "applied git status updates for repo {:?} in {:?}", - repository_path, - t0.elapsed(), - ); - tx.send(()).ok(); - }), + log::trace!( + "applied git status updates for repo {:?} in {:?}", + job.local_repository.work_directory.path, + t0.elapsed(), ); - rx + } + + fn build_change_set( + &self, + old_snapshot: &Snapshot, + new_snapshot: &Snapshot, + event_paths: &[Arc], + ) -> UpdatedEntriesSet { + use BackgroundScannerPhase::*; + use PathChange::{Added, AddedOrUpdated, Loaded, Removed, Updated}; + + // Identify which paths have changed. Use the known set of changed + // parent paths to optimize the search. + let mut changes = Vec::new(); + let mut old_paths = old_snapshot.entries_by_path.cursor::(&()); + let mut new_paths = new_snapshot.entries_by_path.cursor::(&()); + let mut last_newly_loaded_dir_path = None; + old_paths.next(&()); + new_paths.next(&()); + for path in event_paths { + let path = PathKey(path.clone()); + if old_paths.item().map_or(false, |e| e.path < path.0) { + old_paths.seek_forward(&path, Bias::Left, &()); + } + if new_paths.item().map_or(false, |e| e.path < path.0) { + new_paths.seek_forward(&path, Bias::Left, &()); + } + loop { + match (old_paths.item(), new_paths.item()) { + (Some(old_entry), Some(new_entry)) => { + if old_entry.path > path.0 + && new_entry.path > path.0 + && !old_entry.path.starts_with(&path.0) + && !new_entry.path.starts_with(&path.0) + { + break; + } + + match Ord::cmp(&old_entry.path, &new_entry.path) { + Ordering::Less => { + changes.push((old_entry.path.clone(), old_entry.id, Removed)); + old_paths.next(&()); + } + Ordering::Equal => { + if self.phase == EventsReceivedDuringInitialScan { + if old_entry.id != new_entry.id { + changes.push(( + old_entry.path.clone(), + old_entry.id, + Removed, + )); + } + // If the worktree was not fully initialized when this event was generated, + // we can't know whether this entry was added during the scan or whether + // it was merely updated. + changes.push(( + new_entry.path.clone(), + new_entry.id, + AddedOrUpdated, + )); + } else if old_entry.id != new_entry.id { + changes.push((old_entry.path.clone(), old_entry.id, Removed)); + changes.push((new_entry.path.clone(), new_entry.id, Added)); + } else if old_entry != new_entry { + if old_entry.kind.is_unloaded() { + last_newly_loaded_dir_path = Some(&new_entry.path); + changes.push(( + new_entry.path.clone(), + new_entry.id, + Loaded, + )); + } else { + changes.push(( + new_entry.path.clone(), + new_entry.id, + Updated, + )); + } + } + old_paths.next(&()); + new_paths.next(&()); + } + Ordering::Greater => { + let is_newly_loaded = self.phase == InitialScan + || last_newly_loaded_dir_path + .as_ref() + .map_or(false, |dir| new_entry.path.starts_with(dir)); + changes.push(( + new_entry.path.clone(), + new_entry.id, + if is_newly_loaded { Loaded } else { Added }, + )); + new_paths.next(&()); + } + } + } + (Some(old_entry), None) => { + changes.push((old_entry.path.clone(), old_entry.id, Removed)); + old_paths.next(&()); + } + (None, Some(new_entry)) => { + let is_newly_loaded = self.phase == InitialScan + || last_newly_loaded_dir_path + .as_ref() + .map_or(false, |dir| new_entry.path.starts_with(dir)); + changes.push(( + new_entry.path.clone(), + new_entry.id, + if is_newly_loaded { Loaded } else { Added }, + )); + new_paths.next(&()); + } + (None, None) => break, + } + } + } + + changes.into() } async fn progress_timer(&self, running: bool) { @@ -5368,139 +5502,6 @@ impl BackgroundScanner { } } -fn send_status_update_inner( - phase: BackgroundScannerPhase, - state: Arc>, - status_updates_tx: UnboundedSender, - scanning: bool, - barrier: SmallVec<[barrier::Sender; 1]>, -) -> bool { - let mut state = state.lock(); - if state.changed_paths.is_empty() && scanning { - return true; - } - - let new_snapshot = state.snapshot.clone(); - let old_snapshot = mem::replace(&mut state.prev_snapshot, new_snapshot.snapshot.clone()); - let changes = build_diff(phase, &old_snapshot, &new_snapshot, &state.changed_paths); - state.changed_paths.clear(); - - status_updates_tx - .unbounded_send(ScanState::Updated { - snapshot: new_snapshot, - changes, - scanning, - barrier, - }) - .is_ok() -} - -fn build_diff( - phase: BackgroundScannerPhase, - old_snapshot: &Snapshot, - new_snapshot: &Snapshot, - event_paths: &[Arc], -) -> UpdatedEntriesSet { - use BackgroundScannerPhase::*; - use PathChange::{Added, AddedOrUpdated, Loaded, Removed, Updated}; - - // Identify which paths have changed. Use the known set of changed - // parent paths to optimize the search. - let mut changes = Vec::new(); - let mut old_paths = old_snapshot.entries_by_path.cursor::(&()); - let mut new_paths = new_snapshot.entries_by_path.cursor::(&()); - let mut last_newly_loaded_dir_path = None; - old_paths.next(&()); - new_paths.next(&()); - for path in event_paths { - let path = PathKey(path.clone()); - if old_paths.item().map_or(false, |e| e.path < path.0) { - old_paths.seek_forward(&path, Bias::Left, &()); - } - if new_paths.item().map_or(false, |e| e.path < path.0) { - new_paths.seek_forward(&path, Bias::Left, &()); - } - loop { - match (old_paths.item(), new_paths.item()) { - (Some(old_entry), Some(new_entry)) => { - if old_entry.path > path.0 - && new_entry.path > path.0 - && !old_entry.path.starts_with(&path.0) - && !new_entry.path.starts_with(&path.0) - { - break; - } - - match Ord::cmp(&old_entry.path, &new_entry.path) { - Ordering::Less => { - changes.push((old_entry.path.clone(), old_entry.id, Removed)); - old_paths.next(&()); - } - Ordering::Equal => { - if phase == EventsReceivedDuringInitialScan { - if old_entry.id != new_entry.id { - changes.push((old_entry.path.clone(), old_entry.id, Removed)); - } - // If the worktree was not fully initialized when this event was generated, - // we can't know whether this entry was added during the scan or whether - // it was merely updated. - changes.push(( - new_entry.path.clone(), - new_entry.id, - AddedOrUpdated, - )); - } else if old_entry.id != new_entry.id { - changes.push((old_entry.path.clone(), old_entry.id, Removed)); - changes.push((new_entry.path.clone(), new_entry.id, Added)); - } else if old_entry != new_entry { - if old_entry.kind.is_unloaded() { - last_newly_loaded_dir_path = Some(&new_entry.path); - changes.push((new_entry.path.clone(), new_entry.id, Loaded)); - } else { - changes.push((new_entry.path.clone(), new_entry.id, Updated)); - } - } - old_paths.next(&()); - new_paths.next(&()); - } - Ordering::Greater => { - let is_newly_loaded = phase == InitialScan - || last_newly_loaded_dir_path - .as_ref() - .map_or(false, |dir| new_entry.path.starts_with(dir)); - changes.push(( - new_entry.path.clone(), - new_entry.id, - if is_newly_loaded { Loaded } else { Added }, - )); - new_paths.next(&()); - } - } - } - (Some(old_entry), None) => { - changes.push((old_entry.path.clone(), old_entry.id, Removed)); - old_paths.next(&()); - } - (None, Some(new_entry)) => { - let is_newly_loaded = phase == InitialScan - || last_newly_loaded_dir_path - .as_ref() - .map_or(false, |dir| new_entry.path.starts_with(dir)); - changes.push(( - new_entry.path.clone(), - new_entry.id, - if is_newly_loaded { Loaded } else { Added }, - )); - new_paths.next(&()); - } - (None, None) => break, - } - } - } - - changes.into() -} - fn swap_to_front(child_paths: &mut Vec, file: &OsStr) { let position = child_paths .iter() @@ -5563,6 +5564,10 @@ struct UpdateIgnoreStatusJob { scan_queue: Sender, } +struct UpdateGitStatusesJob { + local_repository: LocalRepositoryEntry, +} + pub trait WorktreeModelHandle { #[cfg(any(test, feature = "test-support"))] fn flush_fs_events<'a>( diff --git a/crates/worktree/src/worktree_tests.rs b/crates/worktree/src/worktree_tests.rs index 34e1f0063e102b751a85f3e86ece2eaac2bd9d3b..2cee728aec89e40500700c182ed617400085739e 100644 --- a/crates/worktree/src/worktree_tests.rs +++ b/crates/worktree/src/worktree_tests.rs @@ -24,7 +24,6 @@ use std::{ mem, path::{Path, PathBuf}, sync::Arc, - time::Duration, }; use util::{test::TempTree, ResultExt}; @@ -1505,7 +1504,6 @@ async fn test_bump_mtime_of_git_repo_workdir(cx: &mut TestAppContext) { &[(Path::new("b/c.txt"), StatusCode::Modified.index())], ); cx.executor().run_until_parked(); - cx.executor().advance_clock(Duration::from_secs(1)); let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); From 6534e0bafda9d119a0a28fcda8dbe76afb7016a4 Mon Sep 17 00:00:00 2001 From: smit <0xtimsb@gmail.com> Date: Fri, 7 Feb 2025 07:01:46 +0000 Subject: [PATCH 111/130] linux: Fix crash when NoKeymap event is received on Wayland (#24379) Closes #24139 For weird reasons, Sway on few linux distoros sends `NoKeymap` event when switching windows. Zed crashes due to assertion on this event to be `XkbV1`. To fix this, we ignore `NoKeymap` event instead crashing Zed. Release Notes: - Fixed a crash in Wayland-based compositors like Sway when switching windows via the keyboard. --- crates/gpui/src/platform/linux/wayland/client.rs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index 624114b089bcf2cc9e1cf7b2646ed99c9613a9f4..5c5ab5a3929ab5a6396abdc781a8ce1f44f90b1f 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -1132,11 +1132,10 @@ impl Dispatch for WaylandClientStatePtr { size, .. } => { - assert_eq!( - format, - wl_keyboard::KeymapFormat::XkbV1, - "Unsupported keymap format" - ); + if format != wl_keyboard::KeymapFormat::XkbV1 { + log::error!("Received keymap format {:?}, expected XkbV1", format); + return; + } let xkb_context = xkb::Context::new(xkb::CONTEXT_NO_FLAGS); let keymap = unsafe { xkb::Keymap::new_from_fd( From 1f9d02607b46fe9b91521e531c7f4af14b85eea4 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 7 Feb 2025 00:21:28 -0700 Subject: [PATCH 112/130] Fixes to commit button in Git Panel (#24425) Git Panel updates: * Fixes commit/commit all button to work (and be disabled correctly in merge conflict status) * Updates keyboard shortcuts and sets focus on the button (enter now does the same as click; tab cycles between editor and change list) Closes #ISSUE Release Notes: - N/A *or* Added/Fixed/Improved ... --------- Co-authored-by: Cole Miller --- assets/keymaps/default-linux.json | 13 +- assets/keymaps/default-macos.json | 22 +- assets/keymaps/vim.json | 4 +- crates/git/src/git.rs | 3 +- crates/git/src/repository.rs | 34 ++- crates/git_ui/src/git_panel.rs | 393 ++++++++++++------------------ crates/git_ui/src/project_diff.rs | 2 +- crates/proto/proto/zed.proto | 1 + 8 files changed, 203 insertions(+), 269 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index ae858b6a029aaf3b6179370322581bdbf2c19a2f..7fd499f204713bf1682834e7d4c890f8bf3de247 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -710,15 +710,20 @@ "enter": "menu::Confirm", "space": "git::ToggleStaged", "ctrl-space": "git::StageAll", - "ctrl-shift-space": "git::UnstageAll" + "ctrl-shift-space": "git::UnstageAll", + "tab": "git_panel::FocusEditor", + "shift-tab": "git_panel::FocusEditor", + "escape": "git_panel::ToggleFocus" } }, { - "context": "GitPanel && CommitEditor > Editor", + "context": "GitPanel > Editor", "bindings": { "escape": "git_panel::FocusChanges", - "ctrl-enter": "git::CommitChanges", - "ctrl-shift-enter": "git::CommitAllChanges" + "ctrl-enter": "git::Commit", + "tab": "git_panel::FocusChanges", + "shift-tab": "git_panel::FocusChanges", + "alt-up": "git_panel::FocusChanges" } }, { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 0a7bad1406a6943a03249da05c236ff2a9ef48ce..1ddd31e0535b131a785695016dd47206067404fa 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -715,13 +715,6 @@ "space": "project_panel::Open" } }, - { - "context": "GitPanel && !CommitEditor", - "use_key_equivalents": true, - "bindings": { - "escape": "git_panel::Close" - } - }, { "context": "GitPanel && ChangesList", "use_key_equivalents": true, @@ -734,17 +727,20 @@ "space": "git::ToggleStaged", "cmd-shift-space": "git::StageAll", "ctrl-shift-space": "git::UnstageAll", - "alt-down": "git_panel::FocusEditor" + "alt-down": "git_panel::FocusEditor", + "tab": "git_panel::FocusEditor", + "shift-tab": "git_panel::FocusEditor", + "escape": "git_panel::ToggleFocus" } }, { - "context": "GitPanel && CommitEditor > Editor", + "context": "GitPanel > Editor", "use_key_equivalents": true, "bindings": { - "alt-up": "git_panel::FocusChanges", - "escape": "git_panel::FocusChanges", - "cmd-enter": "git::CommitChanges", - "cmd-alt-enter": "git::CommitAllChanges" + "cmd-enter": "git::Commit", + "tab": "git_panel::FocusChanges", + "shift-tab": "git_panel::FocusChanges", + "alt-up": "git_panel::FocusChanges" } }, { diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 22bc32cec8548b580eedfacd250847c382467837..bad0b4e604909584dc8c9ae4f54ab1476730cea3 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -567,7 +567,7 @@ } }, { - "context": "ProjectPanel || CollabPanel || OutlinePanel || ChatPanel || VimControl || EmptyPane || SharedScreen || MarkdownPreview || KeyContextView", + "context": "GitPanel || ProjectPanel || CollabPanel || OutlinePanel || ChatPanel || VimControl || EmptyPane || SharedScreen || MarkdownPreview || KeyContextView", "bindings": { // window related commands (ctrl-w X) "ctrl-w": null, @@ -625,7 +625,7 @@ } }, { - "context": "EmptyPane || SharedScreen || MarkdownPreview || KeyContextView || Welcome", + "context": "GitPanel || EmptyPane || SharedScreen || MarkdownPreview || KeyContextView || Welcome", "bindings": { ":": "command_palette::Toggle", "g /": "pane::DeploySearch" diff --git a/crates/git/src/git.rs b/crates/git/src/git.rs index 1d0c11e813a5fe49f58c3f708a34ef016c71a8cc..42da2e917083d1efef03363591b9fbc3cf51fc4e 100644 --- a/crates/git/src/git.rs +++ b/crates/git/src/git.rs @@ -38,8 +38,7 @@ actions!( StageAll, UnstageAll, RevertAll, - CommitChanges, - CommitAllChanges, + Commit, ClearCommitMessage ] ); diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 9bf4c4da12d60d5aa0533c54c69a279a29172c66..39f244f12535ecbc58dd260630ca6d526b58455a 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -293,13 +293,16 @@ impl GitRepository for RealGitRepository { .to_path_buf(); if !paths.is_empty() { - let status = new_std_command(&self.git_binary_path) + let output = new_std_command(&self.git_binary_path) .current_dir(&working_directory) .args(["update-index", "--add", "--remove", "--"]) .args(paths.iter().map(|p| p.as_ref())) - .status()?; - if !status.success() { - return Err(anyhow!("Failed to stage paths: {status}")); + .output()?; + if !output.status.success() { + return Err(anyhow!( + "Failed to stage paths:\n{}", + String::from_utf8_lossy(&output.stderr) + )); } } Ok(()) @@ -314,13 +317,16 @@ impl GitRepository for RealGitRepository { .to_path_buf(); if !paths.is_empty() { - let cmd = new_std_command(&self.git_binary_path) + let output = new_std_command(&self.git_binary_path) .current_dir(&working_directory) .args(["reset", "--quiet", "--"]) .args(paths.iter().map(|p| p.as_ref())) - .status()?; - if !cmd.success() { - return Err(anyhow!("Failed to unstage paths: {cmd}")); + .output()?; + if !output.status.success() { + return Err(anyhow!( + "Failed to unstage:\n{}", + String::from_utf8_lossy(&output.stderr) + )); } } Ok(()) @@ -340,12 +346,16 @@ impl GitRepository for RealGitRepository { args.push(author); } - let cmd = new_std_command(&self.git_binary_path) + let output = new_std_command(&self.git_binary_path) .current_dir(&working_directory) .args(args) - .status()?; - if !cmd.success() { - return Err(anyhow!("Failed to commit: {cmd}")); + .output()?; + + if !output.status.success() { + return Err(anyhow!( + "Failed to commit:\n{}", + String::from_utf8_lossy(&output.stderr) + )); } Ok(()) } diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index a65b55bcb7fa52c4f1eadb6a55343b491a3a9f12..7071504cff286e4844171a252517a965a082d89e 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -4,7 +4,6 @@ use crate::ProjectDiff; use crate::{ git_panel_settings::GitPanelSettings, git_status_icon, repository_selector::RepositorySelector, }; -use anyhow::Result; use collections::HashMap; use db::kvp::KEY_VALUE_STORE; use editor::actions::MoveToEnd; @@ -12,7 +11,7 @@ use editor::scroll::ScrollbarAutoHide; use editor::{Editor, EditorMode, EditorSettings, MultiBuffer, ShowScrollbar}; use git::repository::RepoPath; use git::status::FileStatus; -use git::{CommitAllChanges, CommitChanges, ToggleStaged}; +use git::{Commit, ToggleStaged}; use gpui::*; use language::{Buffer, File}; use menu::{SelectFirst, SelectLast, SelectNext, SelectPrev}; @@ -26,7 +25,7 @@ use std::{collections::HashSet, path::PathBuf, sync::Arc, time::Duration, usize} use theme::ThemeSettings; use ui::{ prelude::*, ButtonLike, Checkbox, Divider, DividerColor, ElevationIndex, IndentGuideColors, - ListHeader, ListItem, ListItemSpacing, Scrollbar, ScrollbarState, Tooltip, + ListItem, ListItemSpacing, Scrollbar, ScrollbarState, Tooltip, }; use util::{maybe, ResultExt, TryFutureExt}; use workspace::notifications::{DetachAndPromptErr, NotificationId}; @@ -58,6 +57,17 @@ pub fn init(cx: &mut App) { workspace.register_action(|workspace, _: &ToggleFocus, window, cx| { workspace.toggle_panel_focus::(window, cx); }); + + workspace.register_action(|workspace, _: &Commit, window, cx| { + workspace.open_panel::(window, cx); + if let Some(git_panel) = workspace.panel::(cx) { + git_panel + .read(cx) + .commit_editor + .focus_handle(cx) + .focus(window); + } + }); }, ) .detach(); @@ -156,8 +166,7 @@ pub struct GitPanel { entries_by_path: collections::HashMap, width: Option, pending: Vec, - commit_task: Task>, - commit_pending: bool, + pending_commit: Option>, conflicted_staged_count: usize, conflicted_count: usize, @@ -269,8 +278,7 @@ impl GitPanel { show_scrollbar: false, hide_scrollbar_task: None, update_visible_entries_task: Task::ready(()), - commit_task: Task::ready(Ok(())), - commit_pending: false, + pending_commit: None, active_repository, scroll_handle, fs, @@ -308,7 +316,12 @@ impl GitPanel { git_panel } - pub fn set_focused_path(&mut self, path: ProjectPath, _: &mut Window, cx: &mut Context) { + pub fn select_entry_by_path( + &mut self, + path: ProjectPath, + _: &mut Window, + cx: &mut Context, + ) { let Some(git_repo) = self.active_repository.as_ref() else { return; }; @@ -318,7 +331,6 @@ impl GitPanel { let Some(ix) = self.entries_by_path.get(&repo_path) else { return; }; - self.selected_entry = Some(*ix); cx.notify(); } @@ -555,10 +567,17 @@ impl GitPanel { self.selected_entry.and_then(|i| self.entries.get(i)) } - fn open_selected(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context) { - if let Some(entry) = self.selected_entry.and_then(|i| self.entries.get(i)) { - self.open_entry(entry, cx); - } + fn open_selected(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context) { + maybe!({ + let entry = self.entries.get(self.selected_entry?)?.status_entry()?; + + self.workspace + .update(cx, |workspace, cx| { + ProjectDiff::deploy_at(workspace, Some(entry.clone()), window, cx); + }) + .ok() + }); + self.focus_handle.focus(window); } fn toggle_staged_for_entry( @@ -652,135 +671,89 @@ impl GitPanel { } } - fn open_entry(&self, entry: &GitListEntry, cx: &mut Context) { - let Some(status_entry) = entry.status_entry() else { - return; - }; - let Some(active_repository) = self.active_repository.as_ref() else { - return; - }; - let Some(path) = active_repository - .read(cx) - .repo_path_to_project_path(&status_entry.repo_path) - else { - return; - }; - let path_exists = self.project.update(cx, |project, cx| { - project.entry_for_path(&path, cx).is_some() - }); - if !path_exists { - return; - } - // TODO maybe move all of this into project? - cx.emit(Event::OpenedEntry { path }); - } - /// Commit all staged changes - fn commit_changes( - &mut self, - _: &git::CommitChanges, - name_and_email: Option<(SharedString, SharedString)>, - window: &mut Window, - cx: &mut Context, - ) { - let Some(active_repository) = self.active_repository.clone() else { - return; - }; - if !self.has_staged_changes() { - self.commit_tracked_changes(&Default::default(), name_and_email, window, cx); - return; - } - let message = self.commit_editor.read(cx).text(cx); - if message.trim().is_empty() { - return; + fn commit(&mut self, _: &git::Commit, window: &mut Window, cx: &mut Context) { + let editor = self.commit_editor.read(cx); + if editor.is_empty(cx) { + if !editor.focus_handle(cx).contains_focused(window, cx) { + editor.focus_handle(cx).focus(window); + return; + } } - self.commit_pending = true; - let commit_editor = self.commit_editor.clone(); - self.commit_task = cx.spawn_in(window, |git_panel, mut cx| async move { - let commit = active_repository.update(&mut cx, |active_repository, _| { - active_repository.commit(SharedString::from(message), name_and_email) - })?; - let result = maybe!(async { - commit.await??; - cx.update(|window, cx| { - commit_editor.update(cx, |editor, cx| editor.clear(window, cx)); - }) - }) - .await; - git_panel.update(&mut cx, |git_panel, cx| { - git_panel.commit_pending = false; - result - .map_err(|e| { - git_panel.show_err_toast(e, cx); - }) - .ok(); - }) - }); + self.commit_changes(window, cx) } - /// Commit all changes, regardless of whether they are staged or not - fn commit_tracked_changes( - &mut self, - _: &git::CommitAllChanges, - name_and_email: Option<(SharedString, SharedString)>, - window: &mut Window, - cx: &mut Context, - ) { + fn commit_changes(&mut self, window: &mut Window, cx: &mut Context) { let Some(active_repository) = self.active_repository.clone() else { return; }; - if !self.has_staged_changes() || !self.has_tracked_changes() { + let error_spawn = |message, window: &mut Window, cx: &mut App| { + let prompt = window.prompt(PromptLevel::Warning, message, None, &["Ok"], cx); + cx.spawn(|_| async move { + prompt.await.ok(); + }) + .detach(); + }; + + if self.has_unstaged_conflicts() { + error_spawn( + "There are still conflicts. You must stage these before committing", + window, + cx, + ); return; } let message = self.commit_editor.read(cx).text(cx); if message.trim().is_empty() { + self.commit_editor.read(cx).focus_handle(cx).focus(window); return; } - self.commit_pending = true; - let commit_editor = self.commit_editor.clone(); - let tracked_files = self - .entries - .iter() - .filter_map(|entry| entry.status_entry()) - .filter(|status_entry| { - !status_entry.status.is_created() && !status_entry.is_staged.unwrap_or(false) - }) - .map(|status_entry| status_entry.repo_path.clone()) - .collect::>(); - self.commit_task = cx.spawn_in(window, |git_panel, mut cx| async move { - let result = maybe!(async { - cx.update(|_, cx| active_repository.read(cx).stage_entries(tracked_files))? - .await??; - cx.update(|_, cx| { - active_repository - .read(cx) - .commit(SharedString::from(message), name_and_email) - })? - .await??; - Ok(()) - }) - .await; - cx.update(|window, cx| match result { - Ok(_) => commit_editor.update(cx, |editor, cx| { - editor.clear(window, cx); - }), + let task = if self.has_staged_changes() { + // Repository serializes all git operations, so we can just send a commit immediately + let commit_task = active_repository.read(cx).commit(message.into(), None); + cx.background_executor() + .spawn(async move { commit_task.await? }) + } else { + let changed_files = self + .entries + .iter() + .filter_map(|entry| entry.status_entry()) + .filter(|status_entry| !status_entry.status.is_created()) + .map(|status_entry| status_entry.repo_path.clone()) + .collect::>(); + + if changed_files.is_empty() { + error_spawn("No changes to commit", window, cx); + return; + } - Err(e) => { - git_panel - .update(cx, |git_panel, cx| { - git_panel.show_err_toast(e, cx); - }) - .ok(); + let stage_task = active_repository.read(cx).stage_entries(changed_files); + cx.spawn(|_, mut cx| async move { + stage_task.await??; + let commit_task = active_repository + .update(&mut cx, |repo, _| repo.commit(message.into(), None))?; + commit_task.await? + }) + }; + let task = cx.spawn_in(window, |this, mut cx| async move { + let result = task.await; + this.update_in(&mut cx, |this, window, cx| { + this.pending_commit.take(); + match result { + Ok(()) => { + this.commit_editor + .update(cx, |editor, cx| editor.clear(window, cx)); + } + Err(e) => this.show_err_toast(e, cx), } - })?; - - git_panel.update(&mut cx, |git_panel, _| { - git_panel.commit_pending = false; }) + .ok(); }); + + self.pending_commit = Some(task); } fn fill_co_authors(&mut self, _: &FillCoAuthors, window: &mut Window, cx: &mut Context) { @@ -1057,13 +1030,19 @@ impl GitPanel { } fn has_staged_changes(&self) -> bool { - self.tracked_staged_count > 0 || self.new_staged_count > 0 + self.tracked_staged_count > 0 + || self.new_staged_count > 0 + || self.conflicted_staged_count > 0 } fn has_tracked_changes(&self) -> bool { self.tracked_count > 0 } + fn has_unstaged_conflicts(&self) -> bool { + self.conflicted_count > 0 && self.conflicted_count != self.conflicted_staged_count + } + fn header_state(&self, header_type: Section) -> ToggleState { let (staged_count, count) = match header_type { Section::New => (self.new_staged_count, self.new_count), @@ -1084,6 +1063,7 @@ impl GitPanel { return; }; let notif_id = NotificationId::Named("git-operation-error".into()); + let message = e.to_string(); workspace.update(cx, |workspace, cx| { let toast = Toast::new(notif_id, message).on_click("Open Zed Log", |window, cx| { @@ -1092,9 +1072,7 @@ impl GitPanel { workspace.show_toast(toast, cx); }); } -} -impl GitPanel { pub fn panel_button( &self, id: impl Into, @@ -1200,14 +1178,13 @@ impl GitPanel { ) } - pub fn render_commit_editor( - &self, - name_and_email: Option<(SharedString, SharedString)>, - cx: &Context, - ) -> impl IntoElement { + pub fn render_commit_editor(&self, cx: &Context) -> impl IntoElement { let editor = self.commit_editor.clone(); - let can_commit = - (self.has_staged_changes() || self.has_tracked_changes()) && !self.commit_pending; + let can_commit = (self.has_staged_changes() || self.has_tracked_changes()) + && self.pending_commit.is_none() + && !editor.read(cx).is_empty(cx) + && !self.has_unstaged_conflicts() + && self.has_write_access(cx); let editor_focus_handle = editor.read(cx).focus_handle(cx).clone(); let focus_handle_1 = self.focus_handle(cx).clone(); @@ -1226,14 +1203,11 @@ impl GitPanel { .panel_button("commit-changes", title) .tooltip(move |window, cx| { let focus_handle = focus_handle_1.clone(); - Tooltip::for_action_in(tooltip, &CommitChanges, &focus_handle, window, cx) + Tooltip::for_action_in(tooltip, &Commit, &focus_handle, window, cx) }) .disabled(!can_commit) .on_click({ - let name_and_email = name_and_email.clone(); - cx.listener(move |this, _: &ClickEvent, window, cx| { - this.commit_changes(&CommitChanges, name_and_email.clone(), window, cx) - }) + cx.listener(move |this, _: &ClickEvent, window, cx| this.commit_changes(window, cx)) }); div().w_full().h(px(140.)).px_2().pt_1().pb_2().child( @@ -1488,9 +1462,10 @@ impl GitPanel { ix: usize, header: &GitHeaderEntry, has_write_access: bool, - _window: &Window, + window: &Window, cx: &Context, ) -> AnyElement { + let selected = self.selected_entry == Some(ix); let header_state = if self.has_staged_changes() { self.header_state(header.header) } else { @@ -1499,34 +1474,46 @@ impl GitPanel { Section::New => ToggleState::Unselected, } }; - let checkbox = Checkbox::new(header.title(), header_state) + + let checkbox = Checkbox::new(("checkbox", ix), header_state) .disabled(!has_write_access) - .placeholder(!self.has_staged_changes()) .fill() - .elevation(ElevationIndex::Surface); - let selected = self.selected_entry == Some(ix); + .placeholder(!self.has_staged_changes()) + .elevation(ElevationIndex::Surface) + .on_click({ + let header = header.clone(); + cx.listener(move |this, _, window, cx| { + this.toggle_staged_for_entry(&GitListEntry::Header(header.clone()), window, cx); + cx.stop_propagation(); + }) + }); + + let start_slot = h_flex() + .id(("start-slot", ix)) + .gap(DynamicSpacing::Base04.rems(cx)) + .child(checkbox) + .tooltip(|window, cx| Tooltip::for_action("Stage File", &ToggleStaged, window, cx)) + .on_mouse_down(MouseButton::Left, |_, _, cx| { + // prevent the list item active state triggering when toggling checkbox + cx.stop_propagation(); + }); div() .w_full() .child( - ListHeader::new(header.title()) - .start_slot(checkbox) + ListItem::new(ix) + .spacing(ListItemSpacing::Sparse) + .start_slot(start_slot) .toggle_state(selected) - .on_toggle({ - let header = header.clone(); - cx.listener(move |this, _, window, cx| { - if !has_write_access { - return; - } + .focused(selected && self.focus_handle.is_focused(window)) + .disabled(!has_write_access) + .on_click({ + cx.listener(move |this, _, _, cx| { this.selected_entry = Some(ix); - this.toggle_staged_for_entry( - &GitListEntry::Header(header.clone()), - window, - cx, - ) + cx.notify(); }) }) - .inset(true), + .child(h_flex().child(self.entry_label(header.title(), Color::Muted))), ) .into_any_element() } @@ -1614,7 +1601,6 @@ impl GitPanel { div() .w_full() - .px_0p5() .child( ListItem::new(id) .indent_level(1) @@ -1622,17 +1608,13 @@ impl GitPanel { .spacing(ListItemSpacing::Sparse) .start_slot(start_slot) .toggle_state(selected) + .focused(selected && self.focus_handle.is_focused(window)) .disabled(!has_write_access) .on_click({ - let entry = entry.clone(); cx.listener(move |this, _, window, cx| { this.selected_entry = Some(ix); - let Some(workspace) = this.workspace.upgrade() else { - return; - }; - workspace.update(cx, |workspace, cx| { - ProjectDiff::deploy_at(workspace, Some(entry.clone()), window, cx); - }) + cx.notify(); + this.open_selected(&Default::default(), window, cx); }) }) .child( @@ -1660,13 +1642,7 @@ impl GitPanel { } fn has_write_access(&self, cx: &App) -> bool { - let room = self - .workspace - .upgrade() - .and_then(|workspace| workspace.read(cx).active_call()?.read(cx).room().cloned()); - - room.as_ref() - .map_or(true, |room| room.read(cx).local_participant().can_write()) + !self.project.read(cx).is_read_only(cx) } } @@ -1684,43 +1660,14 @@ impl Render for GitPanel { .upgrade() .and_then(|workspace| workspace.read(cx).active_call()?.read(cx).room().cloned()); - let has_write_access = room - .as_ref() - .map_or(true, |room| room.read(cx).local_participant().can_write()); - let (can_commit, name_and_email) = match &room { - Some(room) => { - if project.is_via_collab() { - if has_write_access { - let name_and_email = - room.read(cx).local_participant_user(cx).and_then(|user| { - let email = SharedString::from(user.email.clone()?); - let name = user - .name - .clone() - .map(SharedString::from) - .unwrap_or(SharedString::from(user.github_login.clone())); - Some((name, email)) - }); - (name_and_email.is_some(), name_and_email) - } else { - (false, None) - } - } else { - (has_write_access, None) - } - } - None => (has_write_access, None), - }; - let can_commit = !self.commit_pending && can_commit; - - let has_co_authors = can_commit - && has_write_access - && room.map_or(false, |room| { - room.read(cx) - .remote_participants() - .values() - .any(|remote_participant| remote_participant.can_write()) - }); + let has_write_access = self.has_write_access(cx); + + let has_co_authors = room.map_or(false, |room| { + room.read(cx) + .remote_participants() + .values() + .any(|remote_participant| remote_participant.can_write()) + }); v_flex() .id("git_panel") @@ -1731,31 +1678,7 @@ impl Render for GitPanel { this.on_action(cx.listener(|this, &ToggleStaged, window, cx| { this.toggle_staged_for_selected(&ToggleStaged, window, cx) })) - .when(can_commit, |git_panel| { - git_panel - .on_action({ - let name_and_email = name_and_email.clone(); - cx.listener(move |git_panel, &CommitChanges, window, cx| { - git_panel.commit_changes( - &CommitChanges, - name_and_email.clone(), - window, - cx, - ) - }) - }) - .on_action({ - let name_and_email = name_and_email.clone(); - cx.listener(move |git_panel, &CommitAllChanges, window, cx| { - git_panel.commit_tracked_changes( - &CommitAllChanges, - name_and_email.clone(), - window, - cx, - ) - }) - }) - }) + .on_action(cx.listener(GitPanel::commit)) }) .when(self.is_focused(window, cx), |this| { this.on_action(cx.listener(Self::select_first)) @@ -1768,7 +1691,7 @@ impl Render for GitPanel { .on_action(cx.listener(Self::focus_changes_list)) .on_action(cx.listener(Self::focus_editor)) .on_action(cx.listener(Self::toggle_staged_for_selected)) - .when(has_co_authors, |git_panel| { + .when(has_write_access && has_co_authors, |git_panel| { git_panel.on_action(cx.listener(Self::fill_co_authors)) }) // .on_action(cx.listener(|this, &OpenSelected, cx| this.open_selected(&OpenSelected, cx))) @@ -1791,7 +1714,7 @@ impl Render for GitPanel { } else { self.render_empty_state(cx).into_any_element() }) - .child(self.render_commit_editor(name_and_email, cx)) + .child(self.render_commit_editor(cx)) } } diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index 74d7c26c485c273e36bef6c67fa9447a4e870a62..ea54fb1bfdf140098e26dbeea150e75cad12b6df 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -235,7 +235,7 @@ impl ProjectDiff { .update(cx, |workspace, cx| { if let Some(git_panel) = workspace.panel::(cx) { git_panel.update(cx, |git_panel, cx| { - git_panel.set_focused_path(project_path.into(), window, cx) + git_panel.select_entry_by_path(project_path.into(), window, cx) }) } }) diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 1799f507292ec2b0a2332afed6254a654c9e5742..e94bd1479df3960ff8ab97454a999c205d8f9ba3 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -368,6 +368,7 @@ enum ErrorCode { DevServerProjectPathDoesNotExist = 16; RemoteUpgradeRequired = 17; RateLimitExceeded = 18; + CommitFailed = 19; reserved 6; reserved 14 to 15; } From 92c21a28147c8337e9541aecdb8cd4e6c2394c25 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Fri, 7 Feb 2025 03:29:05 -0700 Subject: [PATCH 113/130] Fix undismissed app notifications appearing on new workspaces (#24437) Bug in #23817 Release Notes: - N/A --- crates/workspace/src/notifications.rs | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/crates/workspace/src/notifications.rs b/crates/workspace/src/notifications.rs index dacca6067ef03a9cc9f9b5a6db333e869ef322ed..ad491910d06fc0e4fc5cf2cddd46ca2739b16c2e 100644 --- a/crates/workspace/src/notifications.rs +++ b/crates/workspace/src/notifications.rs @@ -1,7 +1,7 @@ use crate::{Toast, Workspace}; use gpui::{ svg, AnyView, App, AppContext as _, AsyncWindowContext, ClipboardItem, Context, DismissEvent, - Entity, EventEmitter, Global, PromptLevel, Render, ScrollHandle, Task, + Entity, EventEmitter, PromptLevel, Render, ScrollHandle, Task, }; use parking_lot::Mutex; use std::sync::{Arc, LazyLock}; @@ -156,10 +156,11 @@ impl Workspace { pub fn show_initial_notifications(&mut self, cx: &mut Context) { // Allow absence of the global so that tests don't need to initialize it. - let app_notifications = cx - .try_global::() + let app_notifications = GLOBAL_APP_NOTIFICATIONS + .lock() + .app_notifications .iter() - .flat_map(|global| global.app_notifications.iter().cloned()) + .cloned() .collect::>(); for (id, build_notification) in app_notifications { self.show_notification_without_handling_dismiss_events(&id, cx, |cx| { @@ -614,8 +615,6 @@ struct AppNotifications { )>, } -impl Global for AppNotifications {} - impl AppNotifications { pub fn insert( &mut self, From f700268029163c683fe8f62320e1ffc4a346d616 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Fri, 7 Feb 2025 03:53:38 -0700 Subject: [PATCH 114/130] Improve vim interactions with edit predictions (#24418) * When an edit prediction is present in non-insertion modes, hide it but show `tab Jump to edit`. * Removes discarding of edit predictions when going from insert mode to normal mode, instead just hide them in non-insertion modes. * Removes zeta-specific showing of predictions in normal mode. This behavior was only happening in special cases anyway - where the discard of completions wasn't happening due to some other thing taking precedence in `dismiss_menus_and_popups`. Release Notes: - N/A --------- Co-authored-by: Conrad Co-authored-by: Mikayla --- .../src/copilot_completion_provider.rs | 4 - crates/editor/src/editor.rs | 60 +++++++++----- crates/editor/src/element.rs | 80 +++++++++++-------- crates/editor/src/inline_completion_tests.rs | 4 - .../src/inline_completion.rs | 6 -- .../src/supermaven_completion_provider.rs | 4 - crates/vim/src/vim.rs | 28 ++++--- crates/zeta/src/zeta.rs | 4 - 8 files changed, 102 insertions(+), 88 deletions(-) diff --git a/crates/copilot/src/copilot_completion_provider.rs b/crates/copilot/src/copilot_completion_provider.rs index 93ffeaf2e2d92164a4fd40062ba69aa2802d0b00..51aa112849d9c3459916bd6f542ab75a4704f996 100644 --- a/crates/copilot/src/copilot_completion_provider.rs +++ b/crates/copilot/src/copilot_completion_provider.rs @@ -61,10 +61,6 @@ impl InlineCompletionProvider for CopilotCompletionProvider { false } - fn show_completions_in_normal_mode() -> bool { - false - } - fn is_refreshing(&self) -> bool { self.pending_refresh.is_some() } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index b2c87d63b5cb96f264f8e2367df121c2d8e267c1..29b266d970093b739d9bd57eae2145aa47d637fb 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -679,9 +679,7 @@ pub struct Editor { active_inline_completion: Option, /// Used to prevent flickering as the user types while the menu is open stale_inline_completion_in_menu: Option, - // enable_inline_completions is a switch that Vim can use to disable - // edit predictions based on its mode. - show_inline_completions: bool, + inline_completions_hidden_for_vim_mode: bool, show_inline_completions_override: Option, menu_inline_completions_policy: MenuInlineCompletionsPolicy, previewing_inline_completion: bool, @@ -1390,8 +1388,8 @@ impl Editor { hovered_cursors: Default::default(), next_editor_action_id: EditorActionId::default(), editor_actions: Rc::default(), + inline_completions_hidden_for_vim_mode: false, show_inline_completions_override: None, - show_inline_completions: true, menu_inline_completions_policy: MenuInlineCompletionsPolicy::ByProvider, custom_context_menu: None, show_git_blame_gutter: false, @@ -1829,11 +1827,19 @@ impl Editor { self.input_enabled = input_enabled; } - pub fn set_show_inline_completions_enabled(&mut self, enabled: bool, cx: &mut Context) { - self.show_inline_completions = enabled; - if !self.show_inline_completions { - self.take_active_inline_completion(cx); - cx.notify(); + pub fn set_inline_completions_hidden_for_vim_mode( + &mut self, + hidden: bool, + window: &mut Window, + cx: &mut Context, + ) { + if hidden != self.inline_completions_hidden_for_vim_mode { + self.inline_completions_hidden_for_vim_mode = hidden; + if hidden { + self.update_visible_inline_completion(window, cx); + } else { + self.refresh_inline_completion(true, false, window, cx); + } } } @@ -1902,6 +1908,15 @@ impl Editor { self.refresh_inline_completion(false, true, window, cx); } + pub fn inline_completion_start_anchor(&self) -> Option { + let active_completion = self.active_inline_completion.as_ref()?; + let result = match &active_completion.completion { + InlineCompletion::Edit { edits, .. } => edits.first()?.0.start, + InlineCompletion::Move { target, .. } => *target, + }; + Some(result) + } + fn inline_completions_disabled_in_scope( &self, buffer: &Entity, @@ -2566,7 +2581,7 @@ impl Editor { pub fn dismiss_menus_and_popups( &mut self, - should_report_inline_completion_event: bool, + is_user_requested: bool, window: &mut Window, cx: &mut Context, ) -> bool { @@ -2590,7 +2605,7 @@ impl Editor { return true; } - if self.discard_inline_completion(should_report_inline_completion_event, cx) { + if is_user_requested && self.discard_inline_completion(true, cx) { return true; } @@ -4634,12 +4649,7 @@ impl Editor { } if !user_requested - && (!self.show_inline_completions - || !self.should_show_inline_completions_in_buffer( - &buffer, - cursor_buffer_position, - cx, - ) + && (!self.should_show_inline_completions_in_buffer(&buffer, cursor_buffer_position, cx) || !self.is_focused(window) || buffer.read(cx).is_empty()) { @@ -4752,7 +4762,7 @@ impl Editor { let cursor = self.selections.newest_anchor().head(); let (buffer, cursor_buffer_position) = self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; - if !self.show_inline_completions + if self.inline_completions_hidden_for_vim_mode || !self.should_show_inline_completions_in_buffer(&buffer, cursor_buffer_position, cx) { return None; @@ -4873,6 +4883,7 @@ impl Editor { match &active_inline_completion.completion { InlineCompletion::Move { target, .. } => { let target = *target; + // Note that this is also done in vim's handler of the Tab action. self.change_selections(Some(Autoscroll::newest()), window, cx, |selections| { selections.select_anchor_ranges([target..target]); }); @@ -5083,7 +5094,6 @@ impl Editor { || (!self.completion_tasks.is_empty() && !self.has_active_inline_completion())); if completions_menu_has_precedence || !offset_selection.is_empty() - || !self.show_inline_completions || self .active_inline_completion .as_ref() @@ -5138,8 +5148,11 @@ impl Editor { } else { None }; - let completion = if let Some(move_invalidation_row_range) = move_invalidation_row_range { - invalidation_row_range = move_invalidation_row_range; + let is_move = + move_invalidation_row_range.is_some() || self.inline_completions_hidden_for_vim_mode; + let completion = if is_move { + invalidation_row_range = + move_invalidation_row_range.unwrap_or(edit_start_row..edit_end_row); let target = first_edit_start; let target_point = text::ToPoint::to_point(&target.text_anchor, &snapshot); // TODO: Base this off of TreeSitter or word boundaries? @@ -5158,7 +5171,10 @@ impl Editor { snapshot, } } else { - if !self.inline_completion_visible_in_cursor_popover(true, cx) { + let show_completions_in_buffer = !self + .inline_completion_visible_in_cursor_popover(true, cx) + && !self.inline_completions_hidden_for_vim_mode; + if show_completions_in_buffer { if edits .iter() .all(|(range, _)| range.to_offset(&multibuffer).is_empty()) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 1a56c8954abaa93a3ac525151a8865b1b67afe84..ab211f3d3e38d592f540f861466492c6ac0546c3 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -3636,7 +3636,7 @@ impl EditorElement { self.editor.focus_handle(cx), window, cx, - ); + )?; let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); let offset = point((text_bounds.size.width - size.width) / 2., PADDING_Y); element.prepaint_at(text_bounds.origin + offset, window, cx); @@ -3649,7 +3649,7 @@ impl EditorElement { self.editor.focus_handle(cx), window, cx, - ); + )?; let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); let offset = point( (text_bounds.size.width - size.width) / 2., @@ -3665,7 +3665,7 @@ impl EditorElement { self.editor.focus_handle(cx), window, cx, - ); + )?; let target_line_end = DisplayPoint::new( target_display_point.row(), @@ -3740,7 +3740,7 @@ impl EditorElement { self.editor.focus_handle(cx), window, cx, - ); + )?; element.prepaint_as_root( text_bounds.origin + origin + point(PADDING_X, px(0.)), @@ -5742,24 +5742,32 @@ fn inline_completion_accept_indicator( focus_handle: FocusHandle, window: &Window, cx: &App, -) -> AnyElement { - let use_hardcoded_linux_preview_binding; +) -> Option { + let use_hardcoded_linux_bindings; #[cfg(target_os = "macos")] { - use_hardcoded_linux_preview_binding = false; + use_hardcoded_linux_bindings = false; } #[cfg(not(target_os = "macos"))] { - use_hardcoded_linux_preview_binding = previewing; + use_hardcoded_linux_bindings = true; } - let accept_keystroke = if use_hardcoded_linux_preview_binding { - Keystroke { - modifiers: Default::default(), - key: "enter".to_string(), - key_char: None, + let accept_keystroke = if use_hardcoded_linux_bindings { + if previewing { + Keystroke { + modifiers: Default::default(), + key: "enter".to_string(), + key_char: None, + } + } else { + Keystroke { + modifiers: Default::default(), + key: "tab".to_string(), + key_char: None, + } } } else { let bindings = window.bindings_for_action_in(&crate::AcceptInlineCompletion, &focus_handle); @@ -5767,10 +5775,10 @@ fn inline_completion_accept_indicator( .last() .and_then(|binding| binding.keystrokes().first()) { - // TODO: clone unnecessary once `use_hardcoded_linux_preview_binding` is removed. + // TODO: clone unnecessary once `use_hardcoded_linux_bindings` is removed. keystroke.clone() } else { - return div().into_any(); + return None; } }; @@ -5793,26 +5801,28 @@ fn inline_completion_accept_indicator( let padding_right = if icon.is_some() { px(4.) } else { px(8.) }; - h_flex() - .py_0p5() - .pl_1() - .pr(padding_right) - .gap_1() - .bg(cx.theme().colors().text_accent.opacity(0.15)) - .border_1() - .border_color(cx.theme().colors().text_accent.opacity(0.8)) - .rounded_md() - .shadow_sm() - .child(accept_key) - .child(Label::new(label).size(LabelSize::Small)) - .when_some(icon, |element, icon| { - element.child( - div() - .mt(px(1.5)) - .child(Icon::new(icon).size(IconSize::Small)), - ) - }) - .into_any() + Some( + h_flex() + .py_0p5() + .pl_1() + .pr(padding_right) + .gap_1() + .bg(cx.theme().colors().text_accent.opacity(0.15)) + .border_1() + .border_color(cx.theme().colors().text_accent.opacity(0.8)) + .rounded_md() + .shadow_sm() + .child(accept_key) + .child(Label::new(label).size(LabelSize::Small)) + .when_some(icon, |element, icon| { + element.child( + div() + .mt(px(1.5)) + .child(Icon::new(icon).size(IconSize::Small)), + ) + }) + .into_any(), + ) } #[allow(clippy::too_many_arguments)] diff --git a/crates/editor/src/inline_completion_tests.rs b/crates/editor/src/inline_completion_tests.rs index c0ad941b7a67e06ac7697ac94d91d71306a17035..a5b9e8a3e06d5397bf6502c6239c948fa405de78 100644 --- a/crates/editor/src/inline_completion_tests.rs +++ b/crates/editor/src/inline_completion_tests.rs @@ -376,10 +376,6 @@ impl InlineCompletionProvider for FakeInlineCompletionProvider { false } - fn show_completions_in_normal_mode() -> bool { - false - } - fn is_enabled( &self, _buffer: &gpui::Entity, diff --git a/crates/inline_completion/src/inline_completion.rs b/crates/inline_completion/src/inline_completion.rs index d262112e0380da4b9d352e9e54a9d95da8b31160..7810d9978510bc5297127d4125e6366c79d6cb1f 100644 --- a/crates/inline_completion/src/inline_completion.rs +++ b/crates/inline_completion/src/inline_completion.rs @@ -42,7 +42,6 @@ pub trait InlineCompletionProvider: 'static + Sized { fn name() -> &'static str; fn display_name() -> &'static str; fn show_completions_in_menu() -> bool; - fn show_completions_in_normal_mode() -> bool; fn show_tab_accept_marker() -> bool { false } @@ -95,7 +94,6 @@ pub trait InlineCompletionProviderHandle { cx: &App, ) -> bool; fn show_completions_in_menu(&self) -> bool; - fn show_completions_in_normal_mode(&self) -> bool; fn show_tab_accept_marker(&self) -> bool; fn data_collection_state(&self, cx: &App) -> DataCollectionState; fn toggle_data_collection(&self, cx: &mut App); @@ -142,10 +140,6 @@ where T::show_completions_in_menu() } - fn show_completions_in_normal_mode(&self) -> bool { - T::show_completions_in_normal_mode() - } - fn show_tab_accept_marker(&self) -> bool { T::show_tab_accept_marker() } diff --git a/crates/supermaven/src/supermaven_completion_provider.rs b/crates/supermaven/src/supermaven_completion_provider.rs index c17053ca5514bf10600b88098c1c802a13edb879..b14d7d54c2bff99981e364dd4ebf7c22a33d2b7f 100644 --- a/crates/supermaven/src/supermaven_completion_provider.rs +++ b/crates/supermaven/src/supermaven_completion_provider.rs @@ -110,10 +110,6 @@ impl InlineCompletionProvider for SupermavenCompletionProvider { false } - fn show_completions_in_normal_mode() -> bool { - false - } - fn is_enabled(&self, _buffer: &Entity, _cursor_position: Anchor, cx: &App) -> bool { self.supermaven.read(cx).is_enabled() } diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index e331260faa22db5c87eb4c084cd2fd528cf2e58b..f479acee316638f35419a5c63e9be2263e050897 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -23,6 +23,7 @@ use anyhow::Result; use collections::HashMap; use editor::{ movement::{self, FindRange}, + scroll::Autoscroll, Anchor, Bias, Editor, EditorEvent, EditorMode, ToPoint, }; use gpui::{ @@ -344,7 +345,19 @@ impl Vim { vim.push_count_digit(n.0, window, cx); }); Vim::action(editor, cx, |vim, _: &Tab, window, cx| { - vim.input_ignored(" ".into(), window, cx) + let Some(anchor) = vim + .editor() + .and_then(|editor| editor.read(cx).inline_completion_start_anchor()) + else { + return; + }; + + vim.update_editor(window, cx, |_, editor, window, cx| { + editor.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.select_anchor_ranges([anchor..anchor]) + }); + }); + vim.switch_mode(Mode::Insert, true, window, cx); }); Vim::action(editor, cx, |vim, _: &Enter, window, cx| { vim.input_ignored("\n".into(), window, cx) @@ -1274,7 +1287,7 @@ impl Vim { } fn sync_vim_settings(&mut self, window: &mut Window, cx: &mut Context) { - self.update_editor(window, cx, |vim, editor, _, cx| { + self.update_editor(window, cx, |vim, editor, window, cx| { editor.set_cursor_shape(vim.cursor_shape(), cx); editor.set_clip_at_line_ends(vim.clip_at_line_ends(), cx); editor.set_collapse_matches(true); @@ -1282,14 +1295,11 @@ impl Vim { editor.set_autoindent(vim.should_autoindent()); editor.selections.line_mode = matches!(vim.mode, Mode::VisualLine); - let enable_inline_completions = match vim.mode { - Mode::Insert | Mode::Replace => true, - Mode::Normal => editor - .inline_completion_provider() - .map_or(false, |provider| provider.show_completions_in_normal_mode()), - _ => false, + let hide_inline_completions = match vim.mode { + Mode::Insert | Mode::Replace => false, + _ => true, }; - editor.set_show_inline_completions_enabled(enable_inline_completions, cx); + editor.set_inline_completions_hidden_for_vim_mode(hide_inline_completions, window, cx); }); cx.notify() } diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index a2d660134294e33c86ae028a40da6cb9c3c0e973..a2be4811fa6cf70ad8197b944b54f30c3b19c866 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -1513,10 +1513,6 @@ impl inline_completion::InlineCompletionProvider for ZetaInlineCompletionProvide true } - fn show_completions_in_normal_mode() -> bool { - true - } - fn show_tab_accept_marker() -> bool { true } From b6b06cf6d8379ef3f8b02bd2867ee8d66cf73c1d Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Fri, 7 Feb 2025 12:33:35 +0100 Subject: [PATCH 115/130] lsp: Send DidOpen notifications when changing selections in multi buffer (#22958) Fixes #22773 Release Notes: - Fixed an edge case with multibuffers that could break language features within them. --- crates/diagnostics/src/diagnostics_tests.rs | 34 ++++++++++----------- crates/editor/src/editor.rs | 17 ++++++++++- crates/editor/src/editor_tests.rs | 12 ++++---- crates/project/src/lsp_store.rs | 7 ++++- crates/search/src/project_search.rs | 18 +++++------ 5 files changed, 54 insertions(+), 34 deletions(-) diff --git a/crates/diagnostics/src/diagnostics_tests.rs b/crates/diagnostics/src/diagnostics_tests.rs index af46b1b07c5600e6dbc1058f2a8fa132ecb0994f..550aea56a30b6e9eef42fb22102701ef205d987d 100644 --- a/crates/diagnostics/src/diagnostics_tests.rs +++ b/crates/diagnostics/src/diagnostics_tests.rs @@ -18,7 +18,7 @@ use std::{ path::{Path, PathBuf}, }; use unindent::Unindent as _; -use util::{post_inc, RandomCharIter}; +use util::{path, post_inc, RandomCharIter}; #[ctor::ctor] fn init_logger() { @@ -33,7 +33,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/test", + path!("/test"), json!({ "consts.rs": " const a: i32 = 'a'; @@ -59,7 +59,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) { .await; let language_server_id = LanguageServerId(0); - let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await; let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let cx = &mut VisualTestContext::from_window(*window, cx); @@ -70,7 +70,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) { lsp_store .update_diagnostic_entries( language_server_id, - PathBuf::from("/test/main.rs"), + PathBuf::from(path!("/test/main.rs")), None, vec![ DiagnosticEntry { @@ -234,7 +234,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) { lsp_store .update_diagnostic_entries( language_server_id, - PathBuf::from("/test/consts.rs"), + PathBuf::from(path!("/test/consts.rs")), None, vec![DiagnosticEntry { range: Unclipped(PointUtf16::new(0, 15))..Unclipped(PointUtf16::new(0, 15)), @@ -341,7 +341,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) { lsp_store .update_diagnostic_entries( language_server_id, - PathBuf::from("/test/consts.rs"), + PathBuf::from(path!("/test/consts.rs")), None, vec![ DiagnosticEntry { @@ -464,7 +464,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/test", + path!("/test"), json!({ "main.js": " a(); @@ -479,7 +479,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { let server_id_1 = LanguageServerId(100); let server_id_2 = LanguageServerId(101); - let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await; let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let cx = &mut VisualTestContext::from_window(*window, cx); @@ -504,7 +504,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { lsp_store .update_diagnostic_entries( server_id_1, - PathBuf::from("/test/main.js"), + PathBuf::from(path!("/test/main.js")), None, vec![DiagnosticEntry { range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 1)), @@ -557,7 +557,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { lsp_store .update_diagnostic_entries( server_id_2, - PathBuf::from("/test/main.js"), + PathBuf::from(path!("/test/main.js")), None, vec![DiagnosticEntry { range: Unclipped(PointUtf16::new(1, 0))..Unclipped(PointUtf16::new(1, 1)), @@ -619,7 +619,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { lsp_store .update_diagnostic_entries( server_id_1, - PathBuf::from("/test/main.js"), + PathBuf::from(path!("/test/main.js")), None, vec![DiagnosticEntry { range: Unclipped(PointUtf16::new(2, 0))..Unclipped(PointUtf16::new(2, 1)), @@ -638,7 +638,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { lsp_store .update_diagnostic_entries( server_id_2, - PathBuf::from("/test/main.rs"), + PathBuf::from(path!("/test/main.rs")), None, vec![], cx, @@ -689,7 +689,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { lsp_store .update_diagnostic_entries( server_id_2, - PathBuf::from("/test/main.js"), + PathBuf::from(path!("/test/main.js")), None, vec![DiagnosticEntry { range: Unclipped(PointUtf16::new(3, 0))..Unclipped(PointUtf16::new(3, 1)), @@ -755,9 +755,9 @@ async fn test_random_diagnostics(cx: &mut TestAppContext, mut rng: StdRng) { .unwrap_or(10); let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/test", json!({})).await; + fs.insert_tree(path!("/test"), json!({})).await; - let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await; let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let cx = &mut VisualTestContext::from_window(*window, cx); @@ -817,7 +817,7 @@ async fn test_random_diagnostics(cx: &mut TestAppContext, mut rng: StdRng) { // insert a set of diagnostics for a new path _ => { let path: PathBuf = - format!("/test/{}.rs", post_inc(&mut next_filename)).into(); + format!(path!("/test/{}.rs"), post_inc(&mut next_filename)).into(); let len = rng.gen_range(128..256); let content = RandomCharIter::new(&mut rng).take(len).collect::(); @@ -891,7 +891,7 @@ async fn test_random_diagnostics(cx: &mut TestAppContext, mut rng: StdRng) { for diagnostic in diagnostics { let found_excerpt = reference_excerpts.iter().any(|info| { let row_range = info.range.context.start.row..info.range.context.end.row; - info.path == path.strip_prefix("/test").unwrap() + info.path == path.strip_prefix(path!("/test")).unwrap() && info.language_server == language_server_id && row_range.contains(&diagnostic.range.start.0.row) }); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 29b266d970093b739d9bd57eae2145aa47d637fb..680171413de3625775932d2894989a9884cd67d4 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1793,7 +1793,7 @@ impl Editor { self.collapse_matches = collapse_matches; } - pub fn register_buffers_with_language_servers(&mut self, cx: &mut Context) { + fn register_buffers_with_language_servers(&mut self, cx: &mut Context) { let buffers = self.buffer.read(cx).all_buffers(); let Some(lsp_store) = self.lsp_store(cx) else { return; @@ -2020,6 +2020,21 @@ impl Editor { None } }; + if let Some(buffer_id) = new_cursor_position.buffer_id { + if !self.registered_buffers.contains_key(&buffer_id) { + if let Some(lsp_store) = self.lsp_store(cx) { + lsp_store.update(cx, |lsp_store, cx| { + let Some(buffer) = self.buffer.read(cx).buffer(buffer_id) else { + return; + }; + self.registered_buffers.insert( + buffer_id, + lsp_store.register_buffer_with_language_servers(&buffer, cx), + ); + }) + } + } + } if let Some(completion_menu) = completion_menu { let cursor_position = new_cursor_position.to_offset(buffer); diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 1c4839f4f9db57265d01958ed3d216b8bcb88fb2..8fa37aaf7bb69c050f0a57e9490bcd7c6330cea5 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -14875,7 +14875,7 @@ async fn test_multi_buffer_folding(cx: &mut gpui::TestAppContext) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/a", + path!("/a"), json!({ "first.rs": sample_text_1, "second.rs": sample_text_2, @@ -14883,7 +14883,7 @@ async fn test_multi_buffer_folding(cx: &mut gpui::TestAppContext) { }), ) .await; - let project = Project::test(fs, ["/a".as_ref()], cx).await; + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx); let worktree = project.update(cx, |project, cx| { @@ -15059,7 +15059,7 @@ async fn test_multi_buffer_single_excerpts_folding(cx: &mut gpui::TestAppContext let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/a", + path!("/a"), json!({ "first.rs": sample_text_1, "second.rs": sample_text_2, @@ -15067,7 +15067,7 @@ async fn test_multi_buffer_single_excerpts_folding(cx: &mut gpui::TestAppContext }), ) .await; - let project = Project::test(fs, ["/a".as_ref()], cx).await; + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx); let worktree = project.update(cx, |project, cx| { @@ -15206,13 +15206,13 @@ async fn test_multi_buffer_with_single_excerpt_folding(cx: &mut gpui::TestAppCon let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/a", + path!("/a"), json!({ "main.rs": sample_text, }), ) .await; - let project = Project::test(fs, ["/a".as_ref()], cx).await; + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx); let worktree = project.update(cx, |project, cx| { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index f85ba369f7590b3b1af20ee21ca85086dc76046f..df0c833ab019cd750e0d175e3af5f23600b5c1b6 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -1977,7 +1977,12 @@ impl LocalLspStore { Some(local) => local.abs_path(cx), None => return, }; - let file_url = lsp::Url::from_file_path(old_path).unwrap(); + let file_url = lsp::Url::from_file_path(old_path.as_path()).unwrap_or_else(|_| { + panic!( + "`{}` is not parseable as an URI", + old_path.to_string_lossy() + ) + }); self.unregister_buffer_from_language_servers(buffer, file_url, cx); } diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 3fe85f13f20ad78209b675071ec28a72b676ed4d..71887c067ddb623ac65bf3ed327ae0225b8a80ef 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -2197,7 +2197,7 @@ pub mod tests { let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "one.rs": "const ONE: usize = 1;", "two.rs": "const TWO: usize = one::ONE + one::ONE;", @@ -2206,7 +2206,7 @@ pub mod tests { }), ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let workspace = window.root(cx).unwrap(); let search = cx.new(|cx| ProjectSearch::new(project.clone(), cx)); @@ -2564,7 +2564,7 @@ pub mod tests { let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "one.rs": "const ONE: usize = 1;", "two.rs": "const TWO: usize = one::ONE + one::ONE;", @@ -2573,7 +2573,7 @@ pub mod tests { }), ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let window = cx.add_window(|window, cx| Workspace::test_new(project, window, cx)); let workspace = window; let search_bar = window.build_entity(cx, |_, _| ProjectSearchBar::new()); @@ -2859,7 +2859,7 @@ pub mod tests { let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "a": { "one.rs": "const ONE: usize = 1;", @@ -2984,7 +2984,7 @@ pub mod tests { let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "one.rs": "const ONE: usize = 1;", "two.rs": "const TWO: usize = one::ONE + one::ONE;", @@ -2993,7 +2993,7 @@ pub mod tests { }), ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let window = cx.add_window(|window, cx| Workspace::test_new(project, window, cx)); let workspace = window.root(cx).unwrap(); let search_bar = window.build_entity(cx, |_, _| ProjectSearchBar::new()); @@ -3693,7 +3693,7 @@ pub mod tests { // We need many lines in the search results to be able to scroll the window let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( - "/dir", + path!("/dir"), json!({ "1.txt": "\n\n\n\n\n A \n\n\n\n\n", "2.txt": "\n\n\n\n\n A \n\n\n\n\n", @@ -3718,7 +3718,7 @@ pub mod tests { }), ) .await; - let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let workspace = window.root(cx).unwrap(); let search = cx.new(|cx| ProjectSearch::new(project, cx)); From 4f65cfa93d183295289a4313e550c434b8ab7fc7 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 7 Feb 2025 16:49:13 +0200 Subject: [PATCH 116/130] Fix `editor::GoToDiagnostics` cycle (#24446) https://github.com/user-attachments/assets/45f665f0-473a-49bd-b013-b9d1bdb902bd After activating 2nd diagnostics group, `find_map` code for next diagnostics did not skip the previous group for the same place. Release Notes: - Fixed `editor::GoToDiagnostics` action stuck when multiple diagnostics groups belong to the same place --- crates/editor/src/editor.rs | 24 +++-- crates/editor/src/editor_tests.rs | 170 ++++++++++++++++++++++++++++++ 2 files changed, 188 insertions(+), 6 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 680171413de3625775932d2894989a9884cd67d4..16fb9b23e435649468cf504b7fb1ab9c9c1e464b 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -10283,14 +10283,26 @@ impl Editor { if entry.diagnostic.is_primary && entry.diagnostic.severity <= DiagnosticSeverity::WARNING && entry.range.start != entry.range.end - // if we match with the active diagnostic, skip it - && Some(entry.diagnostic.group_id) - != self.active_diagnostics.as_ref().map(|d| d.group_id) { - Some((entry.range, entry.diagnostic.group_id)) - } else { - None + let entry_group = entry.diagnostic.group_id; + let in_next_group = self.active_diagnostics.as_ref().map_or( + true, + |active| match direction { + Direction::Prev => { + entry_group != active.group_id + && (active.group_id == 0 || entry_group < active.group_id) + } + Direction::Next => { + entry_group != active.group_id + && (entry_group == 0 || entry_group > active.group_id) + } + }, + ); + if in_next_group { + return Some((entry.range, entry.diagnostic.group_id)); + } } + None }); if let Some((primary_range, group_id)) = group { diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 8fa37aaf7bb69c050f0a57e9490bcd7c6330cea5..5fed6d16434bf1cccaf987270b57b2d8e93825de 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -10653,6 +10653,176 @@ async fn go_to_prev_overlapping_diagnostic( "}); } +#[gpui::test] +async fn cycle_through_same_place_diagnostics( + executor: BackgroundExecutor, + cx: &mut gpui::TestAppContext, +) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + let lsp_store = + cx.update_editor(|editor, _, cx| editor.project.as_ref().unwrap().read(cx).lsp_store()); + + cx.set_state(indoc! {" + ˇfn func(abc def: i32) -> u32 { + } + "}); + + cx.update(|_, cx| { + lsp_store.update(cx, |lsp_store, cx| { + lsp_store + .update_diagnostics( + LanguageServerId(0), + lsp::PublishDiagnosticsParams { + uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(), + version: None, + diagnostics: vec![ + lsp::Diagnostic { + range: lsp::Range::new( + lsp::Position::new(0, 11), + lsp::Position::new(0, 12), + ), + severity: Some(lsp::DiagnosticSeverity::ERROR), + ..Default::default() + }, + lsp::Diagnostic { + range: lsp::Range::new( + lsp::Position::new(0, 12), + lsp::Position::new(0, 15), + ), + severity: Some(lsp::DiagnosticSeverity::ERROR), + ..Default::default() + }, + lsp::Diagnostic { + range: lsp::Range::new( + lsp::Position::new(0, 12), + lsp::Position::new(0, 15), + ), + severity: Some(lsp::DiagnosticSeverity::ERROR), + ..Default::default() + }, + lsp::Diagnostic { + range: lsp::Range::new( + lsp::Position::new(0, 25), + lsp::Position::new(0, 28), + ), + severity: Some(lsp::DiagnosticSeverity::ERROR), + ..Default::default() + }, + ], + }, + &[], + cx, + ) + .unwrap() + }); + }); + executor.run_until_parked(); + + //// Backward + + // Fourth diagnostic + cx.update_editor(|editor, window, cx| { + editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, window, cx); + }); + cx.assert_editor_state(indoc! {" + fn func(abc def: i32) -> ˇu32 { + } + "}); + + // Third diagnostic + cx.update_editor(|editor, window, cx| { + editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, window, cx); + }); + cx.assert_editor_state(indoc! {" + fn func(abc ˇdef: i32) -> u32 { + } + "}); + + // Second diagnostic, same place + cx.update_editor(|editor, window, cx| { + editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, window, cx); + }); + cx.assert_editor_state(indoc! {" + fn func(abc ˇdef: i32) -> u32 { + } + "}); + + // First diagnostic + cx.update_editor(|editor, window, cx| { + editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, window, cx); + }); + cx.assert_editor_state(indoc! {" + fn func(abcˇ def: i32) -> u32 { + } + "}); + + // Wrapped over, fourth diagnostic + cx.update_editor(|editor, window, cx| { + editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, window, cx); + }); + cx.assert_editor_state(indoc! {" + fn func(abc def: i32) -> ˇu32 { + } + "}); + + cx.update_editor(|editor, window, cx| { + editor.move_to_beginning(&MoveToBeginning, window, cx); + }); + cx.assert_editor_state(indoc! {" + ˇfn func(abc def: i32) -> u32 { + } + "}); + + //// Forward + + // First diagnostic + cx.update_editor(|editor, window, cx| { + editor.go_to_diagnostic(&GoToDiagnostic, window, cx); + }); + cx.assert_editor_state(indoc! {" + fn func(abcˇ def: i32) -> u32 { + } + "}); + + // Second diagnostic + cx.update_editor(|editor, window, cx| { + editor.go_to_diagnostic(&GoToDiagnostic, window, cx); + }); + cx.assert_editor_state(indoc! {" + fn func(abc ˇdef: i32) -> u32 { + } + "}); + + // Third diagnostic, same place + cx.update_editor(|editor, window, cx| { + editor.go_to_diagnostic(&GoToDiagnostic, window, cx); + }); + cx.assert_editor_state(indoc! {" + fn func(abc ˇdef: i32) -> u32 { + } + "}); + + // Fourth diagnostic + cx.update_editor(|editor, window, cx| { + editor.go_to_diagnostic(&GoToDiagnostic, window, cx); + }); + cx.assert_editor_state(indoc! {" + fn func(abc def: i32) -> ˇu32 { + } + "}); + + // Wrapped around, first diagnostic + cx.update_editor(|editor, window, cx| { + editor.go_to_diagnostic(&GoToDiagnostic, window, cx); + }); + cx.assert_editor_state(indoc! {" + fn func(abcˇ def: i32) -> u32 { + } + "}); +} + #[gpui::test] async fn test_diagnostics_with_links(cx: &mut TestAppContext) { init_test(cx, |_| {}); From a1544f47ad101d0c054a4044ccee325b5d4f9986 Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Fri, 7 Feb 2025 09:37:07 -0600 Subject: [PATCH 117/130] Fix incorrect assumption about `Path.extension()` (#24443) Release Notes: - N/A --- crates/language/src/language_registry.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index 9d80b2609c62b57af86c9acb8d820365c06b00cc..21c083696f65c53451dbc66bea54d0197a652390 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -659,7 +659,10 @@ impl LanguageRegistry { user_file_types: Option<&HashMap, GlobSet>>, ) -> Option { let filename = path.file_name().and_then(|name| name.to_str()); - let extension = path.extension().and_then(|ext| ext.to_str()); + // `Path.extension()` returns None for files with a leading '.' + // and no other extension which is not the desired behavior here, + // as we want `.zshrc` to result in extension being `Some("zshrc")` + let extension = filename.and_then(|filename| filename.split('.').last()); let path_suffixes = [extension, filename, path.to_str()]; let empty = GlobSet::empty(); From 00c2a300592a38e33b9e03000c1ba67d232bebee Mon Sep 17 00:00:00 2001 From: smit <0xtimsb@gmail.com> Date: Fri, 7 Feb 2025 21:17:07 +0530 Subject: [PATCH 118/130] Migrate keymap and settings + edit predictions rename (#23834) - [x] snake case keymap properties - [x] flatten actions - [x] keymap migration + notfication - [x] settings migration + notification - [x] inline completions -> edit predictions ### future: - keymap notification doesn't show up on start up, only on keymap save. this is existing bug in zed, will be addressed in seperate PR. Release Notes: - Added a notification for deprecated settings and keymaps, allowing you to migrate them with a single click. A backup of your existing keymap and settings will be created in your home directory. - Modified some keymap actions and settings for consistency. --------- Co-authored-by: Piotr Osiewicz Co-authored-by: Max Brunsfeld --- Cargo.lock | 12 + Cargo.toml | 2 + assets/keymaps/default-linux.json | 60 +- assets/keymaps/default-macos.json | 56 +- assets/keymaps/vim.json | 196 ++-- assets/settings/default.json | 12 +- crates/copilot/src/copilot.rs | 6 +- .../src/copilot_completion_provider.rs | 34 +- crates/editor/src/actions.rs | 43 +- crates/editor/src/code_context_menus.rs | 1 - crates/editor/src/editor.rs | 100 +- crates/editor/src/editor_settings.rs | 4 +- crates/editor/src/editor_tests.rs | 6 +- crates/editor/src/element.rs | 10 +- crates/editor/src/inline_completion_tests.rs | 8 +- .../src/inline_completion.rs | 4 +- .../src/inline_completion_button.rs | 58 +- crates/language/src/language_settings.rs | 70 +- crates/migrator/Cargo.toml | 22 + crates/migrator/LICENSE-GPL | 1 + crates/migrator/src/migrator.rs | 863 ++++++++++++++++++ crates/picker/src/picker.rs | 1 + crates/project_panel/src/project_panel.rs | 2 + crates/search/src/buffer_search.rs | 1 + crates/settings/Cargo.toml | 1 + crates/settings/src/keymap_file.rs | 85 +- crates/settings/src/settings_file.rs | 6 +- crates/settings/src/settings_store.rs | 65 +- crates/supermaven/src/supermaven.rs | 8 +- .../src/supermaven_completion_provider.rs | 4 +- crates/tab_switcher/src/tab_switcher.rs | 1 + crates/terminal_view/src/terminal_panel.rs | 104 ++- crates/title_bar/src/application_menu.rs | 35 +- crates/title_bar/src/title_bar.rs | 50 +- crates/vim/src/motion.rs | 30 +- crates/vim/src/normal/increment.rs | 4 +- crates/vim/src/normal/paste.rs | 2 +- crates/vim/src/normal/search.rs | 6 +- crates/vim/src/object.rs | 7 +- crates/vim/src/state.rs | 6 +- crates/vim/src/surrounds.rs | 8 +- crates/vim/src/test.rs | 13 +- crates/vim/src/vim.rs | 370 +++++++- crates/workspace/src/pane.rs | 17 +- crates/workspace/src/pane_group.rs | 9 +- crates/workspace/src/workspace.rs | 73 +- crates/zed/src/zed.rs | 101 +- .../zed/src/zed/inline_completion_registry.rs | 54 +- crates/zed/src/zed/quick_action_bar.rs | 4 +- crates/zed_actions/src/lib.rs | 7 + crates/zeta/src/init.rs | 4 +- crates/zeta/src/onboarding_banner.rs | 8 +- crates/zeta/src/onboarding_modal.rs | 4 +- crates/zeta/src/zeta.rs | 2 +- docs/src/completions.md | 24 +- docs/src/configuring-zed.md | 14 +- docs/src/key-bindings.md | 2 +- docs/src/vim.md | 19 +- 58 files changed, 2104 insertions(+), 615 deletions(-) create mode 100644 crates/migrator/Cargo.toml create mode 120000 crates/migrator/LICENSE-GPL create mode 100644 crates/migrator/src/migrator.rs diff --git a/Cargo.lock b/Cargo.lock index eae4c7bea9f75d92fdc6ae531a2c414d5399c5bb..3888da0dd0161121ac3ccd1164279575fe8a78de 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7836,6 +7836,17 @@ dependencies = [ "paste", ] +[[package]] +name = "migrator" +version = "0.1.0" +dependencies = [ + "collections", + "convert_case 0.7.1", + "pretty_assertions", + "tree-sitter", + "tree-sitter-json", +] + [[package]] name = "mimalloc" version = "0.1.43" @@ -11980,6 +11991,7 @@ dependencies = [ "gpui", "indoc", "log", + "migrator", "paths", "pretty_assertions", "release_channel", diff --git a/Cargo.toml b/Cargo.toml index e108955b785c63a52007ef07a09c249f658aa2ba..217cdd9d1f27072e1188de71b7063ddbb9aaa176 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -81,6 +81,7 @@ members = [ "crates/markdown_preview", "crates/media", "crates/menu", + "crates/migrator", "crates/multi_buffer", "crates/node_runtime", "crates/notifications", @@ -279,6 +280,7 @@ markdown = { path = "crates/markdown" } markdown_preview = { path = "crates/markdown_preview" } media = { path = "crates/media" } menu = { path = "crates/menu" } +migrator = { path = "crates/migrator" } multi_buffer = { path = "crates/multi_buffer" } node_runtime = { path = "crates/node_runtime" } notifications = { path = "crates/notifications" } diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 7fd499f204713bf1682834e7d4c890f8bf3de247..217af10cc6a61e0b5f0718eda3b73eb4595b1408 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -32,7 +32,7 @@ "ctrl-q": "zed::Quit", "f11": "zed::ToggleFullScreen", "ctrl-alt-z": "zeta::RateCompletions", - "ctrl-shift-i": "inline_completion::ToggleMenu" + "ctrl-shift-i": "edit_prediction::ToggleMenu" } }, { @@ -145,17 +145,17 @@ } }, { - "context": "Editor && mode == full && inline_completion", + "context": "Editor && mode == full && edit_prediction", "bindings": { - "alt-]": "editor::NextInlineCompletion", - "alt-[": "editor::PreviousInlineCompletion", - "alt-right": "editor::AcceptPartialInlineCompletion" + "alt-]": "editor::NextEditPrediction", + "alt-[": "editor::PreviousEditPrediction", + "alt-right": "editor::AcceptPartialEditPrediction" } }, { - "context": "Editor && !inline_completion", + "context": "Editor && !edit_prediction", "bindings": { - "alt-\\": "editor::ShowInlineCompletion" + "alt-\\": "editor::ShowEditPrediction" } }, { @@ -348,15 +348,15 @@ "ctrl-k ctrl-l": "editor::ToggleFold", "ctrl-k ctrl-[": "editor::FoldRecursive", "ctrl-k ctrl-]": "editor::UnfoldRecursive", - "ctrl-k ctrl-1": ["editor::FoldAtLevel", { "level": 1 }], - "ctrl-k ctrl-2": ["editor::FoldAtLevel", { "level": 2 }], - "ctrl-k ctrl-3": ["editor::FoldAtLevel", { "level": 3 }], - "ctrl-k ctrl-4": ["editor::FoldAtLevel", { "level": 4 }], - "ctrl-k ctrl-5": ["editor::FoldAtLevel", { "level": 5 }], - "ctrl-k ctrl-6": ["editor::FoldAtLevel", { "level": 6 }], - "ctrl-k ctrl-7": ["editor::FoldAtLevel", { "level": 7 }], - "ctrl-k ctrl-8": ["editor::FoldAtLevel", { "level": 8 }], - "ctrl-k ctrl-9": ["editor::FoldAtLevel", { "level": 9 }], + "ctrl-k ctrl-1": ["editor::FoldAtLevel", 1], + "ctrl-k ctrl-2": ["editor::FoldAtLevel", 2], + "ctrl-k ctrl-3": ["editor::FoldAtLevel", 3], + "ctrl-k ctrl-4": ["editor::FoldAtLevel", 4], + "ctrl-k ctrl-5": ["editor::FoldAtLevel", 5], + "ctrl-k ctrl-6": ["editor::FoldAtLevel", 6], + "ctrl-k ctrl-7": ["editor::FoldAtLevel", 7], + "ctrl-k ctrl-8": ["editor::FoldAtLevel", 8], + "ctrl-k ctrl-9": ["editor::FoldAtLevel", 9], "ctrl-k ctrl-0": "editor::FoldAll", "ctrl-k ctrl-j": "editor::UnfoldAll", "ctrl-space": "editor::ShowCompletions", @@ -432,14 +432,14 @@ "ctrl-alt-s": "workspace::SaveAll", "ctrl-k m": "language_selector::Toggle", "escape": "workspace::Unfollow", - "ctrl-k ctrl-left": ["workspace::ActivatePaneInDirection", "Left"], - "ctrl-k ctrl-right": ["workspace::ActivatePaneInDirection", "Right"], - "ctrl-k ctrl-up": ["workspace::ActivatePaneInDirection", "Up"], - "ctrl-k ctrl-down": ["workspace::ActivatePaneInDirection", "Down"], - "ctrl-k shift-left": ["workspace::SwapPaneInDirection", "Left"], - "ctrl-k shift-right": ["workspace::SwapPaneInDirection", "Right"], - "ctrl-k shift-up": ["workspace::SwapPaneInDirection", "Up"], - "ctrl-k shift-down": ["workspace::SwapPaneInDirection", "Down"], + "ctrl-k ctrl-left": "workspace::ActivatePaneLeft", + "ctrl-k ctrl-right": "workspace::ActivatePaneRight", + "ctrl-k ctrl-up": "workspace::ActivatePaneUp", + "ctrl-k ctrl-down": "workspace::ActivatePaneDown", + "ctrl-k shift-left": "workspace::SwapPaneLeft", + "ctrl-k shift-right": "workspace::SwapPaneRight", + "ctrl-k shift-up": "workspace::SwapPaneUp", + "ctrl-k shift-down": "workspace::SwapPaneDown", "ctrl-shift-x": "zed::Extensions", "ctrl-shift-r": "task::Rerun", "ctrl-alt-r": "task::Rerun", @@ -453,8 +453,8 @@ { "context": "ApplicationMenu", "bindings": { - "left": ["app_menu::NavigateApplicationMenuInDirection", "Left"], - "right": ["app_menu::NavigateApplicationMenuInDirection", "Right"] + "left": "app_menu::ActivateMenuLeft", + "right": "app_menu::ActivateMenuRight" } }, // Bindings from Sublime Text @@ -502,16 +502,16 @@ } }, { - "context": "Editor && inline_completion", + "context": "Editor && edit_prediction", "bindings": { // Changing the modifier currently breaks accepting while you also an LSP completions menu open - "alt-enter": "editor::AcceptInlineCompletion" + "alt-enter": "editor::AcceptEditPrediction" } }, { - "context": "Editor && inline_completion && !inline_completion_requires_modifier", + "context": "Editor && edit_prediction && !edit_prediction_requires_modifier", "bindings": { - "tab": "editor::AcceptInlineCompletion" + "tab": "editor::AcceptEditPrediction" } }, { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 1ddd31e0535b131a785695016dd47206067404fa..fe3d7c413ea00988d6653364fad9e0edea33efb4 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -40,7 +40,7 @@ "fn-f": "zed::ToggleFullScreen", "ctrl-cmd-f": "zed::ToggleFullScreen", "ctrl-shift-z": "zeta::RateCompletions", - "ctrl-shift-i": "inline_completion::ToggleMenu" + "ctrl-shift-i": "edit_prediction::ToggleMenu" } }, { @@ -155,19 +155,19 @@ } }, { - "context": "Editor && mode == full && inline_completion", + "context": "Editor && mode == full && edit_prediction", "use_key_equivalents": true, "bindings": { - "alt-tab": "editor::NextInlineCompletion", - "alt-shift-tab": "editor::PreviousInlineCompletion", - "ctrl-cmd-right": "editor::AcceptPartialInlineCompletion" + "alt-tab": "editor::NextEditPrediction", + "alt-shift-tab": "editor::PreviousEditPrediction", + "ctrl-cmd-right": "editor::AcceptPartialEditPrediction" } }, { - "context": "Editor && !inline_completion", + "context": "Editor && !edit_prediction", "use_key_equivalents": true, "bindings": { - "alt-tab": "editor::ShowInlineCompletion" + "alt-tab": "editor::ShowEditPrediction" } }, { @@ -413,15 +413,15 @@ "cmd-k cmd-l": "editor::ToggleFold", "cmd-k cmd-[": "editor::FoldRecursive", "cmd-k cmd-]": "editor::UnfoldRecursive", - "cmd-k cmd-1": ["editor::FoldAtLevel", { "level": 1 }], - "cmd-k cmd-2": ["editor::FoldAtLevel", { "level": 2 }], - "cmd-k cmd-3": ["editor::FoldAtLevel", { "level": 3 }], - "cmd-k cmd-4": ["editor::FoldAtLevel", { "level": 4 }], - "cmd-k cmd-5": ["editor::FoldAtLevel", { "level": 5 }], - "cmd-k cmd-6": ["editor::FoldAtLevel", { "level": 6 }], - "cmd-k cmd-7": ["editor::FoldAtLevel", { "level": 7 }], - "cmd-k cmd-8": ["editor::FoldAtLevel", { "level": 8 }], - "cmd-k cmd-9": ["editor::FoldAtLevel", { "level": 9 }], + "cmd-k cmd-1": ["editor::FoldAtLevel", 1], + "cmd-k cmd-2": ["editor::FoldAtLevel", 2], + "cmd-k cmd-3": ["editor::FoldAtLevel", 3], + "cmd-k cmd-4": ["editor::FoldAtLevel", 4], + "cmd-k cmd-5": ["editor::FoldAtLevel", 5], + "cmd-k cmd-6": ["editor::FoldAtLevel", 6], + "cmd-k cmd-7": ["editor::FoldAtLevel", 7], + "cmd-k cmd-8": ["editor::FoldAtLevel", 8], + "cmd-k cmd-9": ["editor::FoldAtLevel", 9], "cmd-k cmd-0": "editor::FoldAll", "cmd-k cmd-j": "editor::UnfoldAll", // Using `ctrl-space` in Zed requires disabling the macOS global shortcut. @@ -509,14 +509,14 @@ "cmd-alt-s": "workspace::SaveAll", "cmd-k m": "language_selector::Toggle", "escape": "workspace::Unfollow", - "cmd-k cmd-left": ["workspace::ActivatePaneInDirection", "Left"], - "cmd-k cmd-right": ["workspace::ActivatePaneInDirection", "Right"], - "cmd-k cmd-up": ["workspace::ActivatePaneInDirection", "Up"], - "cmd-k cmd-down": ["workspace::ActivatePaneInDirection", "Down"], - "cmd-k shift-left": ["workspace::SwapPaneInDirection", "Left"], - "cmd-k shift-right": ["workspace::SwapPaneInDirection", "Right"], - "cmd-k shift-up": ["workspace::SwapPaneInDirection", "Up"], - "cmd-k shift-down": ["workspace::SwapPaneInDirection", "Down"], + "cmd-k cmd-left": "workspace::ActivatePaneLeft", + "cmd-k cmd-right": "workspace::ActivatePaneRight", + "cmd-k cmd-up": "workspace::ActivatePaneUp", + "cmd-k cmd-down": "workspace::ActivatePaneDown", + "cmd-k shift-left": "workspace::SwapPaneLeft", + "cmd-k shift-right": "workspace::SwapPaneRight", + "cmd-k shift-up": "workspace::SwapPaneUp", + "cmd-k shift-down": "workspace::SwapPaneDown", "cmd-shift-x": "zed::Extensions" } }, @@ -580,17 +580,17 @@ } }, { - "context": "Editor && inline_completion", + "context": "Editor && edit_prediction", "bindings": { // Changing the modifier currently breaks accepting while you also an LSP completions menu open - "alt-tab": "editor::AcceptInlineCompletion" + "alt-tab": "editor::AcceptEditPrediction" } }, { - "context": "Editor && inline_completion && !inline_completion_requires_modifier", + "context": "Editor && edit_prediction && !edit_prediction_requires_modifier", "use_key_equivalents": true, "bindings": { - "tab": "editor::AcceptInlineCompletion" + "tab": "editor::AcceptEditPrediction" } }, { diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index bad0b4e604909584dc8c9ae4f54ab1476730cea3..af1822d706aee72f84e73981758c5c7b2c8328e0 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -2,8 +2,8 @@ { "context": "VimControl && !menu", "bindings": { - "i": ["vim::PushOperator", { "Object": { "around": false } }], - "a": ["vim::PushOperator", { "Object": { "around": true } }], + "i": ["vim::PushObject", { "around": false }], + "a": ["vim::PushObject", { "around": true }], "left": "vim::Left", "h": "vim::Left", "backspace": "vim::Backspace", @@ -54,10 +54,10 @@ // "b": "vim::PreviousSubwordStart", // "e": "vim::NextSubwordEnd", // "g e": "vim::PreviousSubwordEnd", - "shift-w": ["vim::NextWordStart", { "ignorePunctuation": true }], - "shift-e": ["vim::NextWordEnd", { "ignorePunctuation": true }], - "shift-b": ["vim::PreviousWordStart", { "ignorePunctuation": true }], - "g shift-e": ["vim::PreviousWordEnd", { "ignorePunctuation": true }], + "shift-w": ["vim::NextWordStart", { "ignore_punctuation": true }], + "shift-e": ["vim::NextWordEnd", { "ignore_punctuation": true }], + "shift-b": ["vim::PreviousWordStart", { "ignore_punctuation": true }], + "g shift-e": ["vim::PreviousWordEnd", { "ignore_punctuation": true }], "/": "vim::Search", "g /": "pane::DeploySearch", "?": ["vim::Search", { "backwards": true }], @@ -70,20 +70,20 @@ "[ {": ["vim::UnmatchedBackward", { "char": "{" }], "] )": ["vim::UnmatchedForward", { "char": ")" }], "[ (": ["vim::UnmatchedBackward", { "char": "(" }], - "f": ["vim::PushOperator", { "FindForward": { "before": false } }], - "t": ["vim::PushOperator", { "FindForward": { "before": true } }], - "shift-f": ["vim::PushOperator", { "FindBackward": { "after": false } }], - "shift-t": ["vim::PushOperator", { "FindBackward": { "after": true } }], - "m": ["vim::PushOperator", "Mark"], - "'": ["vim::PushOperator", { "Jump": { "line": true } }], - "`": ["vim::PushOperator", { "Jump": { "line": false } }], + "f": ["vim::PushFindForward", { "before": false }], + "t": ["vim::PushFindForward", { "before": true }], + "shift-f": ["vim::PushFindBackward", { "after": false }], + "shift-t": ["vim::PushFindBackward", { "after": true }], + "m": "vim::PushMark", + "'": ["vim::PushJump", { "line": true }], + "`": ["vim::PushJump", { "line": false }], ";": "vim::RepeatFind", ",": "vim::RepeatFindReversed", "ctrl-o": "pane::GoBack", "ctrl-i": "pane::GoForward", "ctrl-]": "editor::GoToDefinition", - "escape": ["vim::SwitchMode", "Normal"], - "ctrl-[": ["vim::SwitchMode", "Normal"], + "escape": "vim::SwitchToNormalMode", + "ctrl-[": "vim::SwitchToNormalMode", "v": "vim::ToggleVisual", "shift-v": "vim::ToggleVisualLine", "ctrl-g": "vim::ShowLocation", @@ -102,7 +102,7 @@ "ctrl-e": "vim::LineDown", "ctrl-y": "vim::LineUp", // "g" commands - "g r": ["vim::PushOperator", "ReplaceWithRegister"], + "g r": "vim::PushReplaceWithRegister", "g g": "vim::StartOfDocument", "g h": "editor::Hover", "g t": "pane::ActivateNextItem", @@ -125,17 +125,17 @@ "g .": "editor::ToggleCodeActions", // zed specific "g shift-a": "editor::FindAllReferences", // zed specific "g space": "editor::OpenExcerpts", // zed specific - "g *": ["vim::MoveToNext", { "partialWord": true }], - "g #": ["vim::MoveToPrev", { "partialWord": true }], - "g j": ["vim::Down", { "displayLines": true }], - "g down": ["vim::Down", { "displayLines": true }], - "g k": ["vim::Up", { "displayLines": true }], - "g up": ["vim::Up", { "displayLines": true }], - "g $": ["vim::EndOfLine", { "displayLines": true }], - "g end": ["vim::EndOfLine", { "displayLines": true }], - "g 0": ["vim::StartOfLine", { "displayLines": true }], - "g home": ["vim::StartOfLine", { "displayLines": true }], - "g ^": ["vim::FirstNonWhitespace", { "displayLines": true }], + "g *": ["vim::MoveToNext", { "partial_word": true }], + "g #": ["vim::MoveToPrev", { "partial_word": true }], + "g j": ["vim::Down", { "display_lines": true }], + "g down": ["vim::Down", { "display_lines": true }], + "g k": ["vim::Up", { "display_lines": true }], + "g up": ["vim::Up", { "display_lines": true }], + "g $": ["vim::EndOfLine", { "display_lines": true }], + "g end": ["vim::EndOfLine", { "display_lines": true }], + "g 0": ["vim::StartOfLine", { "display_lines": true }], + "g home": ["vim::StartOfLine", { "display_lines": true }], + "g ^": ["vim::FirstNonWhitespace", { "display_lines": true }], "g v": "vim::RestoreVisualSelection", "g ]": "editor::GoToDiagnostic", "g [": "editor::GoToPrevDiagnostic", @@ -147,7 +147,7 @@ "shift-l": "vim::WindowBottom", "q": "vim::ToggleRecord", "shift-q": "vim::ReplayLastRecording", - "@": ["vim::PushOperator", "ReplayRegister"], + "@": "vim::PushReplayRegister", // z commands "z enter": ["workspace::SendKeystrokes", "z t ^"], "z -": ["workspace::SendKeystrokes", "z b ^"], @@ -166,8 +166,8 @@ "z f": "editor::FoldSelectedRanges", "z shift-m": "editor::FoldAll", "z shift-r": "editor::UnfoldAll", - "shift-z shift-q": ["pane::CloseActiveItem", { "saveIntent": "skip" }], - "shift-z shift-z": ["pane::CloseActiveItem", { "saveIntent": "saveAll" }], + "shift-z shift-q": ["pane::CloseActiveItem", { "save_intent": "skip" }], + "shift-z shift-z": ["pane::CloseActiveItem", { "save_intent": "save_all" }], // Count support "1": ["vim::Number", 1], "2": ["vim::Number", 2], @@ -194,13 +194,13 @@ "escape": "editor::Cancel", ":": "command_palette::Toggle", ".": "vim::Repeat", - "c": ["vim::PushOperator", "Change"], + "c": "vim::PushChange", "shift-c": "vim::ChangeToEndOfLine", - "d": ["vim::PushOperator", "Delete"], + "d": "vim::PushDelete", "shift-d": "vim::DeleteToEndOfLine", "shift-j": "vim::JoinLines", "g shift-j": "vim::JoinLinesNoWhitespace", - "y": ["vim::PushOperator", "Yank"], + "y": "vim::PushYank", "shift-y": "vim::YankLine", "i": "vim::InsertBefore", "shift-i": "vim::InsertFirstNonWhitespace", @@ -217,19 +217,19 @@ "shift-p": ["vim::Paste", { "before": true }], "u": "vim::Undo", "ctrl-r": "vim::Redo", - "r": ["vim::PushOperator", "Replace"], + "r": "vim::PushReplace", "s": "vim::Substitute", "shift-s": "vim::SubstituteLine", - ">": ["vim::PushOperator", "Indent"], - "<": ["vim::PushOperator", "Outdent"], - "=": ["vim::PushOperator", "AutoIndent"], - "!": ["vim::PushOperator", "ShellCommand"], - "g u": ["vim::PushOperator", "Lowercase"], - "g shift-u": ["vim::PushOperator", "Uppercase"], - "g ~": ["vim::PushOperator", "OppositeCase"], - "\"": ["vim::PushOperator", "Register"], - "g w": ["vim::PushOperator", "Rewrap"], - "g q": ["vim::PushOperator", "Rewrap"], + ">": "vim::PushIndent", + "<": "vim::PushOutdent", + "=": "vim::PushAutoIndent", + "!": "vim::PushShellCommand", + "g u": "vim::PushLowercase", + "g shift-u": "vim::PushUppercase", + "g ~": "vim::PushOppositeCase", + "\"": "vim::PushRegister", + "g w": "vim::PushRewrap", + "g q": "vim::PushRewrap", "ctrl-pagedown": "pane::ActivateNextItem", "ctrl-pageup": "pane::ActivatePrevItem", "insert": "vim::InsertBefore", @@ -240,7 +240,7 @@ "[ d": "editor::GoToPrevDiagnostic", "] c": "editor::GoToHunk", "[ c": "editor::GoToPrevHunk", - "g c": ["vim::PushOperator", "ToggleComments"] + "g c": "vim::PushToggleComments" } }, { @@ -265,14 +265,14 @@ "y": "vim::VisualYank", "shift-y": "vim::VisualYankLine", "p": "vim::Paste", - "shift-p": ["vim::Paste", { "preserveClipboard": true }], + "shift-p": ["vim::Paste", { "preserve_clipboard": true }], "c": "vim::Substitute", "s": "vim::Substitute", "shift-r": "vim::SubstituteLine", "shift-s": "vim::SubstituteLine", "~": "vim::ChangeCase", - "*": ["vim::MoveToNext", { "partialWord": true }], - "#": ["vim::MoveToPrev", { "partialWord": true }], + "*": ["vim::MoveToNext", { "partial_word": true }], + "#": ["vim::MoveToPrev", { "partial_word": true }], "ctrl-a": "vim::Increment", "ctrl-x": "vim::Decrement", "g ctrl-a": ["vim::Increment", { "step": true }], @@ -283,19 +283,19 @@ "g shift-a": "vim::VisualInsertEndOfLine", "shift-j": "vim::JoinLines", "g shift-j": "vim::JoinLinesNoWhitespace", - "r": ["vim::PushOperator", "Replace"], - "ctrl-c": ["vim::SwitchMode", "Normal"], - "ctrl-[": ["vim::SwitchMode", "Normal"], - "escape": ["vim::SwitchMode", "Normal"], + "r": "vim::PushReplace", + "ctrl-c": "vim::SwitchToNormalMode", + "ctrl-[": "vim::SwitchToNormalMode", + "escape": "vim::SwitchToNormalMode", ">": "vim::Indent", "<": "vim::Outdent", "=": "vim::AutoIndent", "!": "vim::ShellCommand", - "i": ["vim::PushOperator", { "Object": { "around": false } }], - "a": ["vim::PushOperator", { "Object": { "around": true } }], + "i": ["vim::PushObject", { "around": false }], + "a": ["vim::PushObject", { "around": true }], "g c": "vim::ToggleComments", "g q": "vim::Rewrap", - "\"": ["vim::PushOperator", "Register"], + "\"": "vim::PushRegister", // tree-sitter related commands "[ x": "editor::SelectLargerSyntaxNode", "] x": "editor::SelectSmallerSyntaxNode" @@ -310,19 +310,19 @@ "ctrl-x": null, "ctrl-x ctrl-o": "editor::ShowCompletions", "ctrl-x ctrl-a": "assistant::InlineAssist", // zed specific - "ctrl-x ctrl-c": "editor::ShowInlineCompletion", // zed specific + "ctrl-x ctrl-c": "editor::ShowEditPrediction", // zed specific "ctrl-x ctrl-l": "editor::ToggleCodeActions", // zed specific "ctrl-x ctrl-z": "editor::Cancel", "ctrl-w": "editor::DeleteToPreviousWordStart", "ctrl-u": "editor::DeleteToBeginningOfLine", "ctrl-t": "vim::Indent", "ctrl-d": "vim::Outdent", - "ctrl-k": ["vim::PushOperator", { "Digraph": {} }], - "ctrl-v": ["vim::PushOperator", { "Literal": {} }], + "ctrl-k": ["vim::PushDigraph", {}], + "ctrl-v": ["vim::PushLiteral", {}], "ctrl-shift-v": "editor::Paste", // note: this is *very* similar to ctrl-v in vim, but ctrl-shift-v on linux is the typical shortcut for paste when ctrl-v is already in use. - "ctrl-q": ["vim::PushOperator", { "Literal": {} }], - "ctrl-shift-q": ["vim::PushOperator", { "Literal": {} }], - "ctrl-r": ["vim::PushOperator", "Register"], + "ctrl-q": ["vim::PushLiteral", {}], + "ctrl-shift-q": ["vim::PushLiteral", {}], + "ctrl-r": "vim::PushRegister", "insert": "vim::ToggleReplace", "ctrl-o": "vim::TemporaryNormal" } @@ -357,11 +357,11 @@ "ctrl-c": "vim::NormalBefore", "ctrl-[": "vim::NormalBefore", "escape": "vim::NormalBefore", - "ctrl-k": ["vim::PushOperator", { "Digraph": {} }], - "ctrl-v": ["vim::PushOperator", { "Literal": {} }], + "ctrl-k": ["vim::PushDigraph", {}], + "ctrl-v": ["vim::PushLiteral", {}], "ctrl-shift-v": "editor::Paste", // note: this is *very* similar to ctrl-v in vim, but ctrl-shift-v on linux is the typical shortcut for paste when ctrl-v is already in use. - "ctrl-q": ["vim::PushOperator", { "Literal": {} }], - "ctrl-shift-q": ["vim::PushOperator", { "Literal": {} }], + "ctrl-q": ["vim::PushLiteral", {}], + "ctrl-shift-q": ["vim::PushLiteral", {}], "backspace": "vim::UndoReplace", "tab": "vim::Tab", "enter": "vim::Enter", @@ -376,9 +376,9 @@ "ctrl-c": "vim::ClearOperators", "ctrl-[": "vim::ClearOperators", "escape": "vim::ClearOperators", - "ctrl-k": ["vim::PushOperator", { "Digraph": {} }], - "ctrl-v": ["vim::PushOperator", { "Literal": {} }], - "ctrl-q": ["vim::PushOperator", { "Literal": {} }] + "ctrl-k": ["vim::PushDigraph", {}], + "ctrl-v": ["vim::PushLiteral", {}], + "ctrl-q": ["vim::PushLiteral", {}] } }, { @@ -394,10 +394,10 @@ "context": "vim_operator == a || vim_operator == i || vim_operator == cs", "bindings": { "w": "vim::Word", - "shift-w": ["vim::Word", { "ignorePunctuation": true }], + "shift-w": ["vim::Word", { "ignore_punctuation": true }], // Subword TextObject // "w": "vim::Subword", - // "shift-w": ["vim::Subword", { "ignorePunctuation": true }], + // "shift-w": ["vim::Subword", { "ignore_punctuation": true }], "t": "vim::Tag", "s": "vim::Sentence", "p": "vim::Paragraph", @@ -420,7 +420,7 @@ ">": "vim::AngleBrackets", "a": "vim::Argument", "i": "vim::IndentObj", - "shift-i": ["vim::IndentObj", { "includeBelow": true }], + "shift-i": ["vim::IndentObj", { "include_below": true }], "f": "vim::Method", "c": "vim::Class", "e": "vim::EntireFile" @@ -431,14 +431,14 @@ "bindings": { "c": "vim::CurrentLine", "d": "editor::Rename", // zed specific - "s": ["vim::PushOperator", { "ChangeSurrounds": {} }] + "s": ["vim::PushChangeSurrounds", {}] } }, { "context": "vim_operator == d", "bindings": { "d": "vim::CurrentLine", - "s": ["vim::PushOperator", "DeleteSurrounds"], + "s": "vim::PushDeleteSurrounds", "o": "editor::ToggleSelectedDiffHunks", // "d o" "p": "editor::RevertSelectedHunks" // "d p" } @@ -477,7 +477,7 @@ "context": "vim_operator == y", "bindings": { "y": "vim::CurrentLine", - "s": ["vim::PushOperator", { "AddSurrounds": {} }] + "s": ["vim::PushAddSurrounds", {}] } }, { @@ -571,30 +571,30 @@ "bindings": { // window related commands (ctrl-w X) "ctrl-w": null, - "ctrl-w left": ["workspace::ActivatePaneInDirection", "Left"], - "ctrl-w right": ["workspace::ActivatePaneInDirection", "Right"], - "ctrl-w up": ["workspace::ActivatePaneInDirection", "Up"], - "ctrl-w down": ["workspace::ActivatePaneInDirection", "Down"], - "ctrl-w ctrl-h": ["workspace::ActivatePaneInDirection", "Left"], - "ctrl-w ctrl-l": ["workspace::ActivatePaneInDirection", "Right"], - "ctrl-w ctrl-k": ["workspace::ActivatePaneInDirection", "Up"], - "ctrl-w ctrl-j": ["workspace::ActivatePaneInDirection", "Down"], - "ctrl-w h": ["workspace::ActivatePaneInDirection", "Left"], - "ctrl-w l": ["workspace::ActivatePaneInDirection", "Right"], - "ctrl-w k": ["workspace::ActivatePaneInDirection", "Up"], - "ctrl-w j": ["workspace::ActivatePaneInDirection", "Down"], - "ctrl-w shift-left": ["workspace::SwapPaneInDirection", "Left"], - "ctrl-w shift-right": ["workspace::SwapPaneInDirection", "Right"], - "ctrl-w shift-up": ["workspace::SwapPaneInDirection", "Up"], - "ctrl-w shift-down": ["workspace::SwapPaneInDirection", "Down"], - "ctrl-w shift-h": ["workspace::SwapPaneInDirection", "Left"], - "ctrl-w shift-l": ["workspace::SwapPaneInDirection", "Right"], - "ctrl-w shift-k": ["workspace::SwapPaneInDirection", "Up"], - "ctrl-w shift-j": ["workspace::SwapPaneInDirection", "Down"], - "ctrl-w >": ["vim::ResizePane", "Widen"], - "ctrl-w <": ["vim::ResizePane", "Narrow"], - "ctrl-w -": ["vim::ResizePane", "Shorten"], - "ctrl-w +": ["vim::ResizePane", "Lengthen"], + "ctrl-w left": "workspace::ActivatePaneLeft", + "ctrl-w right": "workspace::ActivatePaneRight", + "ctrl-w up": "workspace::ActivatePaneUp", + "ctrl-w down": "workspace::ActivatePaneDown", + "ctrl-w ctrl-h": "workspace::ActivatePaneLeft", + "ctrl-w ctrl-l": "workspace::ActivatePaneRight", + "ctrl-w ctrl-k": "workspace::ActivatePaneUp", + "ctrl-w ctrl-j": "workspace::ActivatePaneDown", + "ctrl-w h": "workspace::ActivatePaneLeft", + "ctrl-w l": "workspace::ActivatePaneRight", + "ctrl-w k": "workspace::ActivatePaneUp", + "ctrl-w j": "workspace::ActivatePaneDown", + "ctrl-w shift-left": "workspace::SwapPaneLeft", + "ctrl-w shift-right": "workspace::SwapPaneRight", + "ctrl-w shift-up": "workspace::SwapPaneUp", + "ctrl-w shift-down": "workspace::SwapPaneDown", + "ctrl-w shift-h": "workspace::SwapPaneLeft", + "ctrl-w shift-l": "workspace::SwapPaneRight", + "ctrl-w shift-k": "workspace::SwapPaneUp", + "ctrl-w shift-j": "workspace::SwapPaneDown", + "ctrl-w >": "vim::ResizePaneRight", + "ctrl-w <": "vim::ResizePaneLeft", + "ctrl-w -": "vim::ResizePaneDown", + "ctrl-w +": "vim::ResizePaneUp", "ctrl-w _": "vim::MaximizePane", "ctrl-w =": "vim::ResetPaneSizes", "ctrl-w g t": "pane::ActivateNextItem", diff --git a/assets/settings/default.json b/assets/settings/default.json index ca5247d19bc43103d4ad411f37479000d2c3cc27..3369807b324a8f4e15979e1f0d950179c5406b25 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -25,7 +25,7 @@ // Features that can be globally enabled or disabled "features": { // Which edit prediction provider to use. - "inline_completion_provider": "copilot" + "edit_prediction_provider": "copilot" }, // The name of a font to use for rendering text in the editor "buffer_font_family": "Zed Plex Mono", @@ -170,7 +170,7 @@ "show_signature_help_after_edits": false, /// Whether to show the edit predictions next to the completions provided by a language server. /// Only has an effect if edit prediction provider supports it. - "show_inline_completions_in_menu": true, + "show_edit_predictions_in_menu": true, // Whether to show wrap guides (vertical rulers) in the editor. // Setting this to true will show a guide at the 'preferred_line_length' value // if 'soft_wrap' is set to 'preferred_line_length', and will show any @@ -204,11 +204,11 @@ // no matter how they were inserted. "always_treat_brackets_as_autoclosed": false, // Controls whether edit predictions are shown immediately (true) - // or manually by triggering `editor::ShowInlineCompletion` (false). - "show_inline_completions": true, + // or manually by triggering `editor::ShowEditPrediction` (false). + "show_edit_predictions": true, // Controls whether edit predictions are shown in a given language scope. // Example: ["string", "comment"] - "inline_completions_disabled_in": [], + "edit_predictions_disabled_in": [], // Whether to show tabs and spaces in the editor. // This setting can take four values: // @@ -781,7 +781,7 @@ // 2. Load direnv configuration through the shell hook, works for POSIX shells and fish. // "load_direnv": "shell_hook" "load_direnv": "direct", - "inline_completions": { + "edit_predictions": { // A list of globs representing files that edit predictions should be disabled for. "disabled_globs": [ "**/.env*", diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index 6b65b8057c009127cd1e24de438e87730bdf0eef..497f36f6034e11e1983f8e1b914fa9f2a2b79647 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -17,7 +17,7 @@ use gpui::{ use http_client::github::get_release_by_tag_name; use http_client::HttpClient; use language::{ - language_settings::{all_language_settings, language_settings, InlineCompletionProvider}, + language_settings::{all_language_settings, language_settings, EditPredictionProvider}, point_from_lsp, point_to_lsp, Anchor, Bias, Buffer, BufferSnapshot, Language, PointUtf16, ToPointUtf16, }; @@ -368,8 +368,8 @@ impl Copilot { let server_id = self.server_id; let http = self.http.clone(); let node_runtime = self.node_runtime.clone(); - if all_language_settings(None, cx).inline_completions.provider - == InlineCompletionProvider::Copilot + if all_language_settings(None, cx).edit_predictions.provider + == EditPredictionProvider::Copilot { if matches!(self.server, CopilotServer::Disabled) { let start_task = cx diff --git a/crates/copilot/src/copilot_completion_provider.rs b/crates/copilot/src/copilot_completion_provider.rs index 51aa112849d9c3459916bd6f542ab75a4704f996..0e494056ec4521e1fe24e31e68258812d501ba38 100644 --- a/crates/copilot/src/copilot_completion_provider.rs +++ b/crates/copilot/src/copilot_completion_provider.rs @@ -1,7 +1,7 @@ use crate::{Completion, Copilot}; use anyhow::Result; use gpui::{App, Context, Entity, EntityId, Task}; -use inline_completion::{Direction, InlineCompletion, InlineCompletionProvider}; +use inline_completion::{Direction, EditPredictionProvider, InlineCompletion}; use language::{language_settings::AllLanguageSettings, Buffer, OffsetRangeExt, ToOffset}; use project::Project; use settings::Settings; @@ -48,7 +48,7 @@ impl CopilotCompletionProvider { } } -impl InlineCompletionProvider for CopilotCompletionProvider { +impl EditPredictionProvider for CopilotCompletionProvider { fn name() -> &'static str { "copilot" } @@ -301,7 +301,7 @@ mod tests { .await; let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot)); cx.update_editor(|editor, window, cx| { - editor.set_inline_completion_provider(Some(copilot_provider), window, cx) + editor.set_edit_prediction_provider(Some(copilot_provider), window, cx) }); cx.set_state(indoc! {" @@ -436,8 +436,8 @@ mod tests { assert_eq!(editor.display_text(cx), "one.copilot2\ntwo\nthree\n"); assert_eq!(editor.text(cx), "one.co\ntwo\nthree\n"); - // AcceptInlineCompletion when there is an active suggestion inserts it. - editor.accept_inline_completion(&Default::default(), window, cx); + // AcceptEditPrediction when there is an active suggestion inserts it. + editor.accept_edit_prediction(&Default::default(), window, cx); assert!(!editor.has_active_inline_completion()); assert_eq!(editor.display_text(cx), "one.copilot2\ntwo\nthree\n"); assert_eq!(editor.text(cx), "one.copilot2\ntwo\nthree\n"); @@ -482,7 +482,7 @@ mod tests { ); cx.update_editor(|editor, window, cx| { - editor.next_inline_completion(&Default::default(), window, cx) + editor.next_edit_prediction(&Default::default(), window, cx) }); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); cx.update_editor(|editor, window, cx| { @@ -496,8 +496,8 @@ mod tests { assert_eq!(editor.text(cx), "fn foo() {\n \n}"); assert_eq!(editor.display_text(cx), "fn foo() {\n let x = 4;\n}"); - // Using AcceptInlineCompletion again accepts the suggestion. - editor.accept_inline_completion(&Default::default(), window, cx); + // Using AcceptEditPrediction again accepts the suggestion. + editor.accept_edit_prediction(&Default::default(), window, cx); assert!(!editor.has_active_inline_completion()); assert_eq!(editor.text(cx), "fn foo() {\n let x = 4;\n}"); assert_eq!(editor.display_text(cx), "fn foo() {\n let x = 4;\n}"); @@ -526,7 +526,7 @@ mod tests { .await; let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot)); cx.update_editor(|editor, window, cx| { - editor.set_inline_completion_provider(Some(copilot_provider), window, cx) + editor.set_edit_prediction_provider(Some(copilot_provider), window, cx) }); // Setup the editor with a completion request. @@ -650,7 +650,7 @@ mod tests { .await; let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot)); cx.update_editor(|editor, window, cx| { - editor.set_inline_completion_provider(Some(copilot_provider), window, cx) + editor.set_edit_prediction_provider(Some(copilot_provider), window, cx) }); cx.set_state(indoc! {" @@ -669,7 +669,7 @@ mod tests { vec![], ); cx.update_editor(|editor, window, cx| { - editor.next_inline_completion(&Default::default(), window, cx) + editor.next_edit_prediction(&Default::default(), window, cx) }); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); cx.update_editor(|editor, window, cx| { @@ -740,7 +740,7 @@ mod tests { let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot)); editor .update(cx, |editor, window, cx| { - editor.set_inline_completion_provider(Some(copilot_provider), window, cx) + editor.set_edit_prediction_provider(Some(copilot_provider), window, cx) }) .unwrap(); @@ -758,7 +758,7 @@ mod tests { editor.change_selections(None, window, cx, |s| { s.select_ranges([Point::new(1, 5)..Point::new(1, 5)]) }); - editor.next_inline_completion(&Default::default(), window, cx); + editor.next_edit_prediction(&Default::default(), window, cx); }); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); _ = editor.update(cx, |editor, _, cx| { @@ -834,7 +834,7 @@ mod tests { .await; let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot)); cx.update_editor(|editor, window, cx| { - editor.set_inline_completion_provider(Some(copilot_provider), window, cx) + editor.set_edit_prediction_provider(Some(copilot_provider), window, cx) }); cx.set_state(indoc! {" @@ -862,7 +862,7 @@ mod tests { vec![], ); cx.update_editor(|editor, window, cx| { - editor.next_inline_completion(&Default::default(), window, cx) + editor.next_edit_prediction(&Default::default(), window, cx) }); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); cx.update_editor(|editor, _, cx| { @@ -930,7 +930,7 @@ mod tests { async fn test_copilot_disabled_globs(executor: BackgroundExecutor, cx: &mut TestAppContext) { init_test(cx, |settings| { settings - .inline_completions + .edit_predictions .get_or_insert(Default::default()) .disabled_globs = Some(vec![".env*".to_string()]); }); @@ -992,7 +992,7 @@ mod tests { let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot)); editor .update(cx, |editor, window, cx| { - editor.set_inline_completion_provider(Some(copilot_provider), window, cx) + editor.set_edit_prediction_provider(Some(copilot_provider), window, cx) }) .unwrap(); diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index fe2ae0be49ed074886dc9a366a113961277afbc6..27266f3dd6ee89d4ef425059c29e6527932184f5 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -3,56 +3,64 @@ use super::*; use gpui::{action_as, action_with_deprecated_aliases}; use schemars::JsonSchema; use util::serde::default_true; - #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct SelectNext { #[serde(default)] pub replace_newest: bool, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct SelectPrevious { #[serde(default)] pub replace_newest: bool, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct MoveToBeginningOfLine { #[serde(default = "default_true")] pub stop_at_soft_wraps: bool, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct SelectToBeginningOfLine { #[serde(default)] pub(super) stop_at_soft_wraps: bool, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct MovePageUp { #[serde(default)] pub(super) center_cursor: bool, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct MovePageDown { #[serde(default)] pub(super) center_cursor: bool, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct MoveToEndOfLine { #[serde(default = "default_true")] pub stop_at_soft_wraps: bool, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct SelectToEndOfLine { #[serde(default)] pub(super) stop_at_soft_wraps: bool, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct ToggleCodeActions { // Display row from which the action was deployed. #[serde(default)] @@ -61,24 +69,28 @@ pub struct ToggleCodeActions { } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct ConfirmCompletion { #[serde(default)] pub item_ix: Option, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct ComposeCompletion { #[serde(default)] pub item_ix: Option, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct ConfirmCodeAction { #[serde(default)] pub item_ix: Option, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct ToggleComments { #[serde(default)] pub advance_downwards: bool, @@ -87,60 +99,70 @@ pub struct ToggleComments { } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct FoldAt { #[serde(skip)] pub buffer_row: MultiBufferRow, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct UnfoldAt { #[serde(skip)] pub buffer_row: MultiBufferRow, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct MoveUpByLines { #[serde(default)] pub(super) lines: u32, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct MoveDownByLines { #[serde(default)] pub(super) lines: u32, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct SelectUpByLines { #[serde(default)] pub(super) lines: u32, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct SelectDownByLines { #[serde(default)] pub(super) lines: u32, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct ExpandExcerpts { #[serde(default)] pub(super) lines: u32, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct ExpandExcerptsUp { #[serde(default)] pub(super) lines: u32, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct ExpandExcerptsDown { #[serde(default)] pub(super) lines: u32, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct ShowCompletions { #[serde(default)] pub(super) trigger: Option, @@ -150,23 +172,24 @@ pub struct ShowCompletions { pub struct HandleInput(pub String); #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct DeleteToNextWordEnd { #[serde(default)] pub ignore_newlines: bool, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct DeleteToPreviousWordStart { #[serde(default)] pub ignore_newlines: bool, } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] -pub struct FoldAtLevel { - pub level: u32, -} +pub struct FoldAtLevel(pub u32); #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct SpawnNearestTask { #[serde(default)] pub reveal: task::RevealStrategy, @@ -216,9 +239,9 @@ impl_actions!( gpui::actions!( editor, [ - AcceptInlineCompletion, + AcceptEditPrediction, AcceptPartialCopilotSuggestion, - AcceptPartialInlineCompletion, + AcceptPartialEditPrediction, AddSelectionAbove, AddSelectionBelow, ApplyAllDiffHunks, @@ -310,7 +333,7 @@ gpui::actions!( Newline, NewlineAbove, NewlineBelow, - NextInlineCompletion, + NextEditPrediction, NextScreen, OpenContextMenu, OpenExcerpts, @@ -325,7 +348,7 @@ gpui::actions!( PageDown, PageUp, Paste, - PreviousInlineCompletion, + PreviousEditPrediction, Redo, RedoSelection, Rename, @@ -361,7 +384,7 @@ gpui::actions!( SelectToStartOfParagraph, SelectUp, ShowCharacterPalette, - ShowInlineCompletion, + ShowEditPrediction, ShowSignatureHelp, ShuffleLines, SortLinesCaseInsensitive, @@ -375,7 +398,7 @@ gpui::actions!( ToggleGitBlameInline, ToggleIndentGuides, ToggleInlayHints, - ToggleInlineCompletions, + ToggleEditPrediction, ToggleLineNumbers, SwapSelectionEnds, SetMark, diff --git a/crates/editor/src/code_context_menus.rs b/crates/editor/src/code_context_menus.rs index f2752fb53262ed0d3ba089727d28fe5d32c4714b..a967750cccbbb63893ca1ede25d81068de024ae0 100644 --- a/crates/editor/src/code_context_menus.rs +++ b/crates/editor/src/code_context_menus.rs @@ -517,7 +517,6 @@ impl CompletionsMenu { } else { None }; - let color_swatch = completion .color() .map(|color| div().size_4().bg(color).rounded_sm()); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 16fb9b23e435649468cf504b7fb1ab9c9c1e464b..24ec46ca61bba155db16921ad01ebc436062d6af 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -90,7 +90,7 @@ use hover_popover::{hide_hover, HoverState}; use indent_guides::ActiveIndentGuidesState; use inlay_hint_cache::{InlayHintCache, InlaySplice, InvalidationStrategy}; pub use inline_completion::Direction; -use inline_completion::{InlineCompletionProvider, InlineCompletionProviderHandle}; +use inline_completion::{EditPredictionProvider, InlineCompletionProviderHandle}; pub use items::MAX_TAB_TITLE_LEN; use itertools::Itertools; use language::{ @@ -674,7 +674,7 @@ pub struct Editor { pending_mouse_down: Option>>>, gutter_hovered: bool, hovered_link_state: Option, - inline_completion_provider: Option, + edit_prediction_provider: Option, code_action_providers: Vec>, active_inline_completion: Option, /// Used to prevent flickering as the user types while the menu is open @@ -1371,7 +1371,7 @@ impl Editor { hover_state: Default::default(), pending_mouse_down: None, hovered_link_state: Default::default(), - inline_completion_provider: None, + edit_prediction_provider: None, active_inline_completion: None, stale_inline_completion_in_menu: None, previewing_inline_completion: false, @@ -1524,10 +1524,10 @@ impl Editor { if self.has_active_inline_completion() { key_context.add("copilot_suggestion"); - key_context.add("inline_completion"); + key_context.add("edit_prediction"); - if showing_completions || self.inline_completion_requires_modifier(cx) { - key_context.add("inline_completion_requires_modifier"); + if showing_completions || self.edit_prediction_requires_modifier(cx) { + key_context.add("edit_prediction_requires_modifier"); } } @@ -1737,15 +1737,15 @@ impl Editor { self.semantics_provider = provider; } - pub fn set_inline_completion_provider( + pub fn set_edit_prediction_provider( &mut self, provider: Option>, window: &mut Window, cx: &mut Context, ) where - T: InlineCompletionProvider, + T: EditPredictionProvider, { - self.inline_completion_provider = + self.edit_prediction_provider = provider.map(|provider| RegisteredInlineCompletionProvider { _subscription: cx.observe_in(&provider, window, |this, _, window, cx| { if this.focus_handle.is_focused(window) { @@ -1877,7 +1877,7 @@ impl Editor { pub fn toggle_inline_completions( &mut self, - _: &ToggleInlineCompletions, + _: &ToggleEditPrediction, window: &mut Window, cx: &mut Context, ) { @@ -1900,11 +1900,11 @@ impl Editor { pub fn set_show_inline_completions( &mut self, - show_inline_completions: Option, + show_edit_predictions: Option, window: &mut Window, cx: &mut Context, ) { - self.show_inline_completions_override = show_inline_completions; + self.show_inline_completions_override = show_edit_predictions; self.refresh_inline_completion(false, true, window, cx); } @@ -1932,7 +1932,7 @@ impl Editor { scope.override_name().map_or(false, |scope_name| { settings - .inline_completions_disabled_in + .edit_predictions_disabled_in .iter() .any(|s| s == scope_name) }) @@ -3015,7 +3015,7 @@ impl Editor { } let trigger_in_words = - this.show_inline_completions_in_menu(cx) || !had_active_inline_completion; + this.show_edit_predictions_in_menu(cx) || !had_active_inline_completion; this.trigger_completion_on_input(&text, trigger_in_words, window, cx); linked_editing_ranges::refresh_linked_ranges(this, window, cx); this.refresh_inline_completion(true, false, window, cx); @@ -3908,7 +3908,7 @@ impl Editor { *editor.context_menu.borrow_mut() = Some(CodeContextMenu::Completions(menu)); - if editor.show_inline_completions_in_menu(cx) { + if editor.show_edit_predictions_in_menu(cx) { editor.update_visible_inline_completion(window, cx); } else { editor.discard_inline_completion(false, cx); @@ -3922,7 +3922,7 @@ impl Editor { // If it was already hidden and we don't show inline // completions in the menu, we should also show the // inline-completion when available. - if was_hidden && editor.show_inline_completions_in_menu(cx) { + if was_hidden && editor.show_edit_predictions_in_menu(cx) { editor.update_visible_inline_completion(window, cx); } } @@ -3972,7 +3972,7 @@ impl Editor { let entries = completions_menu.entries.borrow(); let mat = entries.get(item_ix.unwrap_or(completions_menu.selected_item))?; - if self.show_inline_completions_in_menu(cx) { + if self.show_edit_predictions_in_menu(cx) { self.discard_inline_completion(true, cx); } let candidate_id = mat.candidate_id; @@ -4653,7 +4653,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Option<()> { - let provider = self.inline_completion_provider()?; + let provider = self.edit_prediction_provider()?; let cursor = self.selections.newest_anchor().head(); let (buffer, cursor_buffer_position) = self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; @@ -4694,7 +4694,7 @@ impl Editor { } } - fn inline_completion_requires_modifier(&self, cx: &App) -> bool { + fn edit_prediction_requires_modifier(&self, cx: &App) -> bool { let cursor = self.selections.newest_anchor().head(); self.buffer @@ -4731,7 +4731,7 @@ impl Editor { buffer.file(), cx, ) - .show_inline_completions + .show_edit_predictions } } @@ -4753,7 +4753,7 @@ impl Editor { cx: &App, ) -> bool { maybe!({ - let provider = self.inline_completion_provider()?; + let provider = self.edit_prediction_provider()?; if !provider.is_enabled(&buffer, buffer_position, cx) { return Some(false); } @@ -4773,7 +4773,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Option<()> { - let provider = self.inline_completion_provider()?; + let provider = self.edit_prediction_provider()?; let cursor = self.selections.newest_anchor().head(); let (buffer, cursor_buffer_position) = self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; @@ -4791,7 +4791,7 @@ impl Editor { pub fn show_inline_completion( &mut self, - _: &ShowInlineCompletion, + _: &ShowEditPrediction, window: &mut Window, cx: &mut Context, ) { @@ -4826,9 +4826,9 @@ impl Editor { .detach(); } - pub fn next_inline_completion( + pub fn next_edit_prediction( &mut self, - _: &NextInlineCompletion, + _: &NextEditPrediction, window: &mut Window, cx: &mut Context, ) { @@ -4844,9 +4844,9 @@ impl Editor { } } - pub fn previous_inline_completion( + pub fn previous_edit_prediction( &mut self, - _: &PreviousInlineCompletion, + _: &PreviousEditPrediction, window: &mut Window, cx: &mut Context, ) { @@ -4862,9 +4862,9 @@ impl Editor { } } - pub fn accept_inline_completion( + pub fn accept_edit_prediction( &mut self, - _: &AcceptInlineCompletion, + _: &AcceptEditPrediction, window: &mut Window, cx: &mut Context, ) { @@ -4885,7 +4885,7 @@ impl Editor { } } - if self.show_inline_completions_in_menu(cx) { + if self.show_edit_predictions_in_menu(cx) { self.hide_context_menu(window, cx); } @@ -4904,7 +4904,7 @@ impl Editor { }); } InlineCompletion::Edit { edits, .. } => { - if let Some(provider) = self.inline_completion_provider() { + if let Some(provider) = self.edit_prediction_provider() { provider.accept(cx); } @@ -4931,7 +4931,7 @@ impl Editor { pub fn accept_partial_inline_completion( &mut self, - _: &AcceptPartialInlineCompletion, + _: &AcceptPartialEditPrediction, window: &mut Window, cx: &mut Context, ) { @@ -4988,7 +4988,7 @@ impl Editor { self.refresh_inline_completion(true, true, window, cx); cx.notify(); } else { - self.accept_inline_completion(&Default::default(), window, cx); + self.accept_edit_prediction(&Default::default(), window, cx); } } } @@ -5003,7 +5003,7 @@ impl Editor { self.report_inline_completion_event(false, cx); } - if let Some(provider) = self.inline_completion_provider() { + if let Some(provider) = self.edit_prediction_provider() { provider.discard(cx); } @@ -5011,7 +5011,7 @@ impl Editor { } fn report_inline_completion_event(&self, accepted: bool, cx: &App) { - let Some(provider) = self.inline_completion_provider() else { + let Some(provider) = self.edit_prediction_provider() else { return; }; @@ -5064,7 +5064,7 @@ impl Editor { cx: &App, ) -> bool { if self.previewing_inline_completion - || !self.show_inline_completions_in_menu(cx) + || !self.show_edit_predictions_in_menu(cx) || !self.should_show_inline_completions(cx) { return false; @@ -5074,7 +5074,7 @@ impl Editor { return true; } - has_completion && self.inline_completion_requires_modifier(cx) + has_completion && self.edit_prediction_requires_modifier(cx) } fn update_inline_completion_preview( @@ -5083,7 +5083,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if !self.show_inline_completions_in_menu(cx) { + if !self.show_edit_predictions_in_menu(cx) { return; } @@ -5103,7 +5103,7 @@ impl Editor { let offset_selection = selection.map(|endpoint| endpoint.to_offset(&multibuffer)); let excerpt_id = cursor.excerpt_id; - let show_in_menu = self.show_inline_completions_in_menu(cx); + let show_in_menu = self.show_edit_predictions_in_menu(cx); let completions_menu_has_precedence = !show_in_menu && (self.context_menu.borrow().is_some() || (!self.completion_tasks.is_empty() && !self.has_active_inline_completion())); @@ -5123,7 +5123,7 @@ impl Editor { } self.take_active_inline_completion(cx); - let provider = self.inline_completion_provider()?; + let provider = self.edit_prediction_provider()?; let (buffer, cursor_buffer_position) = self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; @@ -5258,20 +5258,20 @@ impl Editor { Some(()) } - pub fn inline_completion_provider(&self) -> Option> { - Some(self.inline_completion_provider.as_ref()?.provider.clone()) + pub fn edit_prediction_provider(&self) -> Option> { + Some(self.edit_prediction_provider.as_ref()?.provider.clone()) } - fn show_inline_completions_in_menu(&self, cx: &App) -> bool { + fn show_edit_predictions_in_menu(&self, cx: &App) -> bool { let by_provider = matches!( self.menu_inline_completions_policy, MenuInlineCompletionsPolicy::ByProvider ); by_provider - && EditorSettings::get_global(cx).show_inline_completions_in_menu + && EditorSettings::get_global(cx).show_edit_predictions_in_menu && self - .inline_completion_provider() + .edit_prediction_provider() .map_or(false, |provider| provider.show_completions_in_menu()) } @@ -5524,7 +5524,7 @@ impl Editor { window: &Window, cx: &mut Context, ) -> Option { - let provider = self.inline_completion_provider.as_ref()?; + let provider = self.edit_prediction_provider.as_ref()?; if provider.provider.needs_terms_acceptance(cx) { return Some( @@ -11808,7 +11808,7 @@ impl Editor { return; } - let fold_at_level = fold_at.level; + let fold_at_level = fold_at.0; let snapshot = self.buffer.read(cx).snapshot(cx); let mut to_fold = Vec::new(); let mut stack = vec![(0, snapshot.max_row().0, 1)]; @@ -14202,14 +14202,14 @@ impl Editor { .get("vim_mode") == Some(&serde_json::Value::Bool(true)); - let edit_predictions_provider = all_language_settings(file, cx).inline_completions.provider; + let edit_predictions_provider = all_language_settings(file, cx).edit_predictions.provider; let copilot_enabled = edit_predictions_provider - == language::language_settings::InlineCompletionProvider::Copilot; + == language::language_settings::EditPredictionProvider::Copilot; let copilot_enabled_for_language = self .buffer .read(cx) .settings_at(0, cx) - .show_inline_completions; + .show_edit_predictions; let project = project.read(cx); telemetry::event!( diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index 098ff62dae5f5d03642fdb943a3d3273348aab5b..9203a8f95366dbd3bde7cb95f63d2bd78c7ed7a6 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -35,7 +35,7 @@ pub struct EditorSettings { pub auto_signature_help: bool, pub show_signature_help_after_edits: bool, pub jupyter: Jupyter, - pub show_inline_completions_in_menu: bool, + pub show_edit_predictions_in_menu: bool, } #[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] @@ -372,7 +372,7 @@ pub struct EditorSettingsContent { /// Only has an effect if edit prediction provider supports it. /// /// Default: true - pub show_inline_completions_in_menu: Option, + pub show_edit_predictions_in_menu: Option, /// Jupyter REPL settings. pub jupyter: Option, diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 5fed6d16434bf1cccaf987270b57b2d8e93825de..5247c629c06919ce503c02545193de7143e7cafc 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -1159,7 +1159,7 @@ fn test_fold_at_level(cx: &mut TestAppContext) { }); _ = editor.update(cx, |editor, window, cx| { - editor.fold_at_level(&FoldAtLevel { level: 2 }, window, cx); + editor.fold_at_level(&FoldAtLevel(2), window, cx); assert_eq!( editor.display_text(cx), " @@ -1183,7 +1183,7 @@ fn test_fold_at_level(cx: &mut TestAppContext) { .unindent(), ); - editor.fold_at_level(&FoldAtLevel { level: 1 }, window, cx); + editor.fold_at_level(&FoldAtLevel(1), window, cx); assert_eq!( editor.display_text(cx), " @@ -1198,7 +1198,7 @@ fn test_fold_at_level(cx: &mut TestAppContext) { ); editor.unfold_all(&UnfoldAll, window, cx); - editor.fold_at_level(&FoldAtLevel { level: 0 }, window, cx); + editor.fold_at_level(&FoldAtLevel(0), window, cx); assert_eq!( editor.display_text(cx), " diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index ab211f3d3e38d592f540f861466492c6ac0546c3..04bcf722625bfb40016f4d7b16b0b246f97c877e 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -475,8 +475,8 @@ impl EditorElement { } }); register_action(editor, window, Editor::show_signature_help); - register_action(editor, window, Editor::next_inline_completion); - register_action(editor, window, Editor::previous_inline_completion); + register_action(editor, window, Editor::next_edit_prediction); + register_action(editor, window, Editor::previous_edit_prediction); register_action(editor, window, Editor::show_inline_completion); register_action(editor, window, Editor::context_menu_first); register_action(editor, window, Editor::context_menu_prev); @@ -486,7 +486,7 @@ impl EditorElement { register_action(editor, window, Editor::unique_lines_case_insensitive); register_action(editor, window, Editor::unique_lines_case_sensitive); register_action(editor, window, Editor::accept_partial_inline_completion); - register_action(editor, window, Editor::accept_inline_completion); + register_action(editor, window, Editor::accept_edit_prediction); register_action(editor, window, Editor::revert_file); register_action(editor, window, Editor::revert_selected_hunks); register_action(editor, window, Editor::apply_all_diff_hunks); @@ -3197,7 +3197,7 @@ impl EditorElement { #[cfg(target_os = "macos")] { // let bindings = window.bindings_for_action_in( - // &crate::AcceptInlineCompletion, + // &crate::AcceptEditPrediction, // &self.editor.focus_handle(cx), // ); @@ -5770,7 +5770,7 @@ fn inline_completion_accept_indicator( } } } else { - let bindings = window.bindings_for_action_in(&crate::AcceptInlineCompletion, &focus_handle); + let bindings = window.bindings_for_action_in(&crate::AcceptEditPrediction, &focus_handle); if let Some(keystroke) = bindings .last() .and_then(|binding| binding.keystrokes().first()) diff --git a/crates/editor/src/inline_completion_tests.rs b/crates/editor/src/inline_completion_tests.rs index a5b9e8a3e06d5397bf6502c6239c948fa405de78..258a8780944052a1c398fba33cce843cc585de6d 100644 --- a/crates/editor/src/inline_completion_tests.rs +++ b/crates/editor/src/inline_completion_tests.rs @@ -1,6 +1,6 @@ use gpui::{prelude::*, Entity}; use indoc::indoc; -use inline_completion::InlineCompletionProvider; +use inline_completion::EditPredictionProvider; use language::{Language, LanguageConfig}; use multi_buffer::{Anchor, MultiBufferSnapshot, ToPoint}; use project::Project; @@ -315,7 +315,7 @@ fn assert_editor_active_move_completion( fn accept_completion(cx: &mut EditorTestContext) { cx.update_editor(|editor, window, cx| { - editor.accept_inline_completion(&crate::AcceptInlineCompletion, window, cx) + editor.accept_edit_prediction(&crate::AcceptEditPrediction, window, cx) }) } @@ -345,7 +345,7 @@ fn assign_editor_completion_provider( cx: &mut EditorTestContext, ) { cx.update_editor(|editor, window, cx| { - editor.set_inline_completion_provider(Some(provider), window, cx); + editor.set_edit_prediction_provider(Some(provider), window, cx); }) } @@ -363,7 +363,7 @@ impl FakeInlineCompletionProvider { } } -impl InlineCompletionProvider for FakeInlineCompletionProvider { +impl EditPredictionProvider for FakeInlineCompletionProvider { fn name() -> &'static str { "fake-completion-provider" } diff --git a/crates/inline_completion/src/inline_completion.rs b/crates/inline_completion/src/inline_completion.rs index 7810d9978510bc5297127d4125e6366c79d6cb1f..6a1754c3773e5f1361930d81e6c9be9fc0c3e8d9 100644 --- a/crates/inline_completion/src/inline_completion.rs +++ b/crates/inline_completion/src/inline_completion.rs @@ -38,7 +38,7 @@ impl DataCollectionState { } } -pub trait InlineCompletionProvider: 'static + Sized { +pub trait EditPredictionProvider: 'static + Sized { fn name() -> &'static str; fn display_name() -> &'static str; fn show_completions_in_menu() -> bool; @@ -126,7 +126,7 @@ pub trait InlineCompletionProviderHandle { impl InlineCompletionProviderHandle for Entity where - T: InlineCompletionProvider, + T: EditPredictionProvider, { fn name(&self) -> &'static str { T::name() diff --git a/crates/inline_completion_button/src/inline_completion_button.rs b/crates/inline_completion_button/src/inline_completion_button.rs index 19cb8b4843ca01ce2d7f197901161aeecb2a3c11..5ba3d527722e52a9d3006e240fa5a705a5927928 100644 --- a/crates/inline_completion_button/src/inline_completion_button.rs +++ b/crates/inline_completion_button/src/inline_completion_button.rs @@ -1,7 +1,7 @@ use anyhow::Result; use client::UserStore; use copilot::{Copilot, Status}; -use editor::{actions::ShowInlineCompletion, scroll::Autoscroll, Editor}; +use editor::{actions::ShowEditPrediction, scroll::Autoscroll, Editor}; use feature_flags::{ FeatureFlagAppExt, PredictEditsFeatureFlag, PredictEditsRateCompletionsFeatureFlag, }; @@ -13,9 +13,7 @@ use gpui::{ }; use indoc::indoc; use language::{ - language_settings::{ - self, all_language_settings, AllLanguageSettings, InlineCompletionProvider, - }, + language_settings::{self, all_language_settings, AllLanguageSettings, EditPredictionProvider}, File, Language, }; use regex::Regex; @@ -37,7 +35,7 @@ use zed_actions::OpenBrowser; use zeta::RateCompletionModal; actions!(zeta, [RateCompletions]); -actions!(inline_completion, [ToggleMenu]); +actions!(edit_prediction, [ToggleMenu]); const COPILOT_SETTINGS_URL: &str = "https://github.com/settings/copilot"; @@ -49,7 +47,7 @@ pub struct InlineCompletionButton { editor_focus_handle: Option, language: Option>, file: Option>, - inline_completion_provider: Option>, + edit_prediction_provider: Option>, fs: Arc, workspace: WeakEntity, user_store: Entity, @@ -67,10 +65,10 @@ impl Render for InlineCompletionButton { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { let all_language_settings = all_language_settings(None, cx); - match all_language_settings.inline_completions.provider { - InlineCompletionProvider::None => div(), + match all_language_settings.edit_predictions.provider { + EditPredictionProvider::None => div(), - InlineCompletionProvider::Copilot => { + EditPredictionProvider::Copilot => { let Some(copilot) = Copilot::global(cx) else { return div(); }; @@ -146,7 +144,7 @@ impl Render for InlineCompletionButton { ) } - InlineCompletionProvider::Supermaven => { + EditPredictionProvider::Supermaven => { let Some(supermaven) = Supermaven::global(cx) else { return div(); }; @@ -196,7 +194,7 @@ impl Render for InlineCompletionButton { set_completion_provider( fs.clone(), cx, - InlineCompletionProvider::Copilot, + EditPredictionProvider::Copilot, ) }, ) @@ -226,7 +224,7 @@ impl Render for InlineCompletionButton { ); } - InlineCompletionProvider::Zed => { + EditPredictionProvider::Zed => { if !cx.has_flag::() { return div(); } @@ -307,7 +305,7 @@ impl Render for InlineCompletionButton { .with_handle(self.popover_menu_handle.clone()); let is_refreshing = self - .inline_completion_provider + .edit_prediction_provider .as_ref() .map_or(false, |provider| provider.is_refreshing(cx)); @@ -352,7 +350,7 @@ impl InlineCompletionButton { editor_focus_handle: None, language: None, file: None, - inline_completion_provider: None, + edit_prediction_provider: None, popover_menu_handle, workspace, fs, @@ -375,11 +373,7 @@ impl InlineCompletionButton { .entry("Use Supermaven", None, { let fs = fs.clone(); move |_window, cx| { - set_completion_provider( - fs.clone(), - cx, - InlineCompletionProvider::Supermaven, - ) + set_completion_provider(fs.clone(), cx, EditPredictionProvider::Supermaven) } }) }) @@ -394,7 +388,7 @@ impl InlineCompletionButton { let fs = fs.clone(); let language_enabled = language_settings::language_settings(Some(language.name()), None, cx) - .show_inline_completions; + .show_edit_predictions; menu = menu.toggleable_entry( language.name(), @@ -418,7 +412,7 @@ impl InlineCompletionButton { ); menu = menu.separator().header("Privacy Settings"); - if let Some(provider) = &self.inline_completion_provider { + if let Some(provider) = &self.edit_prediction_provider { let data_collection = provider.data_collection_state(cx); if data_collection.is_supported() { let provider = provider.clone(); @@ -491,12 +485,12 @@ impl InlineCompletionButton { .separator() .entry( "Predict Edit at Cursor", - Some(Box::new(ShowInlineCompletion)), + Some(Box::new(ShowEditPrediction)), { let editor_focus_handle = editor_focus_handle.clone(); move |window, cx| { - editor_focus_handle.dispatch_action(&ShowInlineCompletion, window, cx); + editor_focus_handle.dispatch_action(&ShowEditPrediction, window, cx); } }, ) @@ -579,7 +573,7 @@ impl InlineCompletionButton { .unwrap_or(true), ) }; - self.inline_completion_provider = editor.inline_completion_provider(); + self.edit_prediction_provider = editor.edit_prediction_provider(); self.language = language.cloned(); self.file = file; self.editor_focus_handle = Some(editor.focus_handle(cx)); @@ -664,7 +658,7 @@ async fn open_disabled_globs_setting_in_editor( // Ensure that we always have "inline_completions { "disabled_globs": [] }" let edits = settings.edits_for_update::(&text, |file| { - file.inline_completions + file.edit_predictions .get_or_insert_with(Default::default) .disabled_globs .get_or_insert_with(Vec::new); @@ -696,17 +690,17 @@ async fn open_disabled_globs_setting_in_editor( } fn toggle_inline_completions_globally(fs: Arc, cx: &mut App) { - let show_inline_completions = all_language_settings(None, cx).show_inline_completions(None, cx); + let show_edit_predictions = all_language_settings(None, cx).show_inline_completions(None, cx); update_settings_file::(fs, cx, move |file, _| { - file.defaults.show_inline_completions = Some(!show_inline_completions) + file.defaults.show_edit_predictions = Some(!show_edit_predictions) }); } -fn set_completion_provider(fs: Arc, cx: &mut App, provider: InlineCompletionProvider) { +fn set_completion_provider(fs: Arc, cx: &mut App, provider: EditPredictionProvider) { update_settings_file::(fs, cx, move |file, _| { file.features .get_or_insert(Default::default()) - .inline_completion_provider = Some(provider); + .edit_prediction_provider = Some(provider); }); } @@ -715,13 +709,13 @@ fn toggle_show_inline_completions_for_language( fs: Arc, cx: &mut App, ) { - let show_inline_completions = + let show_edit_predictions = all_language_settings(None, cx).show_inline_completions(Some(&language), cx); update_settings_file::(fs, cx, move |file, _| { file.languages .entry(language.name()) .or_default() - .show_inline_completions = Some(!show_inline_completions); + .show_edit_predictions = Some(!show_edit_predictions); }); } @@ -729,6 +723,6 @@ fn hide_copilot(fs: Arc, cx: &mut App) { update_settings_file::(fs, cx, move |file, _| { file.features .get_or_insert(Default::default()) - .inline_completion_provider = Some(InlineCompletionProvider::None); + .edit_prediction_provider = Some(EditPredictionProvider::None); }); } diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index fb8eb28a61194ce4e780453d462106cc34cff591..c8f47f7c8362221610d3369e25ea7a447bb450dd 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -60,7 +60,7 @@ pub fn all_language_settings<'a>( #[derive(Debug, Clone)] pub struct AllLanguageSettings { /// The edit prediction settings. - pub inline_completions: InlineCompletionSettings, + pub edit_predictions: EditPredictionSettings, defaults: LanguageSettings, languages: HashMap, pub(crate) file_types: HashMap, GlobSet>, @@ -110,11 +110,11 @@ pub struct LanguageSettings { /// - `"..."` - A placeholder to refer to the **rest** of the registered language servers for this language. pub language_servers: Vec, /// Controls whether edit predictions are shown immediately (true) - /// or manually by triggering `editor::ShowInlineCompletion` (false). - pub show_inline_completions: bool, + /// or manually by triggering `editor::ShowEditPrediction` (false). + pub show_edit_predictions: bool, /// Controls whether edit predictions are shown in the given language /// scopes. - pub inline_completions_disabled_in: Vec, + pub edit_predictions_disabled_in: Vec, /// Whether to show tabs and spaces in the editor. pub show_whitespaces: ShowWhitespaceSetting, /// Whether to start a new line with a comment when a previous line is a comment as well. @@ -198,7 +198,7 @@ impl LanguageSettings { /// The provider that supplies edit predictions. #[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] -pub enum InlineCompletionProvider { +pub enum EditPredictionProvider { None, #[default] Copilot, @@ -206,13 +206,13 @@ pub enum InlineCompletionProvider { Zed, } -impl InlineCompletionProvider { +impl EditPredictionProvider { pub fn is_zed(&self) -> bool { match self { - InlineCompletionProvider::Zed => true, - InlineCompletionProvider::None - | InlineCompletionProvider::Copilot - | InlineCompletionProvider::Supermaven => false, + EditPredictionProvider::Zed => true, + EditPredictionProvider::None + | EditPredictionProvider::Copilot + | EditPredictionProvider::Supermaven => false, } } } @@ -220,9 +220,9 @@ impl InlineCompletionProvider { /// The settings for edit predictions, such as [GitHub Copilot](https://github.com/features/copilot) /// or [Supermaven](https://supermaven.com). #[derive(Clone, Debug, Default)] -pub struct InlineCompletionSettings { +pub struct EditPredictionSettings { /// The provider that supplies edit predictions. - pub provider: InlineCompletionProvider, + pub provider: EditPredictionProvider, /// A list of globs representing files that edit predictions should be disabled for. pub disabled_globs: Vec, /// When to show edit predictions previews in buffer. @@ -248,7 +248,7 @@ pub struct AllLanguageSettingsContent { pub features: Option, /// The edit prediction settings. #[serde(default)] - pub inline_completions: Option, + pub edit_predictions: Option, /// The default language settings. #[serde(flatten)] pub defaults: LanguageSettingsContent, @@ -347,11 +347,11 @@ pub struct LanguageSettingsContent { #[serde(default)] pub language_servers: Option>, /// Controls whether edit predictions are shown immediately (true) - /// or manually by triggering `editor::ShowInlineCompletion` (false). + /// or manually by triggering `editor::ShowEditPrediction` (false). /// /// Default: true #[serde(default)] - pub show_inline_completions: Option, + pub show_edit_predictions: Option, /// Controls whether edit predictions are shown in the given language /// scopes. /// @@ -359,7 +359,7 @@ pub struct LanguageSettingsContent { /// /// Default: [] #[serde(default)] - pub inline_completions_disabled_in: Option>, + pub edit_predictions_disabled_in: Option>, /// Whether to show tabs and spaces in the editor. #[serde(default)] pub show_whitespaces: Option, @@ -442,7 +442,7 @@ pub struct FeaturesContent { /// Whether the GitHub Copilot feature is enabled. pub copilot: Option, /// Determines which edit prediction provider to use. - pub inline_completion_provider: Option, + pub edit_prediction_provider: Option, } /// Controls the soft-wrapping behavior in the editor. @@ -906,7 +906,7 @@ impl AllLanguageSettings { /// Returns whether edit predictions are enabled for the given path. pub fn inline_completions_enabled_for_path(&self, path: &Path) -> bool { !self - .inline_completions + .edit_predictions .disabled_globs .iter() .any(|glob| glob.is_match(path)) @@ -915,12 +915,12 @@ impl AllLanguageSettings { /// Returns whether edit predictions are enabled for the given language and path. pub fn show_inline_completions(&self, language: Option<&Arc>, cx: &App) -> bool { self.language(None, language.map(|l| l.name()).as_ref(), cx) - .show_inline_completions + .show_edit_predictions } /// Returns the edit predictions preview mode for the given language and path. pub fn inline_completions_preview_mode(&self) -> InlineCompletionPreviewMode { - self.inline_completions.inline_preview + self.edit_predictions.inline_preview } } @@ -1015,18 +1015,18 @@ impl settings::Settings for AllLanguageSettings { } let mut copilot_enabled = default_value.features.as_ref().and_then(|f| f.copilot); - let mut inline_completion_provider = default_value + let mut edit_prediction_provider = default_value .features .as_ref() - .and_then(|f| f.inline_completion_provider); + .and_then(|f| f.edit_prediction_provider); let mut inline_completions_preview = default_value - .inline_completions + .edit_predictions .as_ref() .map(|inline_completions| inline_completions.inline_preview) .ok_or_else(Self::missing_default)?; let mut completion_globs: HashSet<&String> = default_value - .inline_completions + .edit_predictions .as_ref() .and_then(|c| c.disabled_globs.as_ref()) .map(|globs| globs.iter().collect()) @@ -1051,12 +1051,12 @@ impl settings::Settings for AllLanguageSettings { if let Some(provider) = user_settings .features .as_ref() - .and_then(|f| f.inline_completion_provider) + .and_then(|f| f.edit_prediction_provider) { - inline_completion_provider = Some(provider); + edit_prediction_provider = Some(provider); } - if let Some(inline_completions) = user_settings.inline_completions.as_ref() { + if let Some(inline_completions) = user_settings.edit_predictions.as_ref() { inline_completions_preview = inline_completions.inline_preview; if let Some(disabled_globs) = inline_completions.disabled_globs.as_ref() { @@ -1102,13 +1102,13 @@ impl settings::Settings for AllLanguageSettings { } Ok(Self { - inline_completions: InlineCompletionSettings { - provider: if let Some(provider) = inline_completion_provider { + edit_predictions: EditPredictionSettings { + provider: if let Some(provider) = edit_prediction_provider { provider } else if copilot_enabled.unwrap_or(true) { - InlineCompletionProvider::Copilot + EditPredictionProvider::Copilot } else { - InlineCompletionProvider::None + EditPredictionProvider::None }, disabled_globs: completion_globs .iter() @@ -1219,12 +1219,12 @@ fn merge_settings(settings: &mut LanguageSettings, src: &LanguageSettingsContent ); merge(&mut settings.language_servers, src.language_servers.clone()); merge( - &mut settings.show_inline_completions, - src.show_inline_completions, + &mut settings.show_edit_predictions, + src.show_edit_predictions, ); merge( - &mut settings.inline_completions_disabled_in, - src.inline_completions_disabled_in.clone(), + &mut settings.edit_predictions_disabled_in, + src.edit_predictions_disabled_in.clone(), ); merge(&mut settings.show_whitespaces, src.show_whitespaces); merge( diff --git a/crates/migrator/Cargo.toml b/crates/migrator/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..f5be671960f5530eb475da583067fc298f4c4f01 --- /dev/null +++ b/crates/migrator/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "migrator" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/migrator.rs" +doctest = false + +[dependencies] +collections.workspace = true +tree-sitter-json.workspace = true +tree-sitter.workspace = true +convert_case.workspace = true + +[dev-dependencies] +pretty_assertions.workspace = true diff --git a/crates/migrator/LICENSE-GPL b/crates/migrator/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/migrator/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/migrator/src/migrator.rs b/crates/migrator/src/migrator.rs new file mode 100644 index 0000000000000000000000000000000000000000..0d02a45fb890aaf787b4d750caa6a15c2b3f67fc --- /dev/null +++ b/crates/migrator/src/migrator.rs @@ -0,0 +1,863 @@ +use collections::HashMap; +use convert_case::{Case, Casing}; +use std::{cmp::Reverse, ops::Range, sync::LazyLock}; +use tree_sitter::{Query, QueryMatch}; + +fn migrate(text: &str, patterns: MigrationPatterns, query: &Query) -> Option { + let mut parser = tree_sitter::Parser::new(); + parser + .set_language(&tree_sitter_json::LANGUAGE.into()) + .unwrap(); + let syntax_tree = parser.parse(&text, None).unwrap(); + + let mut cursor = tree_sitter::QueryCursor::new(); + let matches = cursor.matches(query, syntax_tree.root_node(), text.as_bytes()); + + let mut edits = vec![]; + for mat in matches { + if let Some((_, callback)) = patterns.get(mat.pattern_index) { + edits.extend(callback(&text, &mat, query)); + } + } + + edits.sort_by_key(|(range, _)| (range.start, Reverse(range.end))); + edits.dedup_by(|(range_b, _), (range_a, _)| { + range_a.contains(&range_b.start) || range_a.contains(&range_b.end) + }); + + if edits.is_empty() { + None + } else { + let mut text = text.to_string(); + for (range, replacement) in edits.into_iter().rev() { + text.replace_range(range, &replacement); + } + Some(text) + } +} + +pub fn migrate_keymap(text: &str) -> Option { + let transformed_text = migrate( + text, + KEYMAP_MIGRATION_TRANSFORMATION_PATTERNS, + &KEYMAP_MIGRATION_TRANSFORMATION_QUERY, + ); + let replacement_text = migrate( + &transformed_text.as_ref().unwrap_or(&text.to_string()), + KEYMAP_MIGRATION_REPLACEMENT_PATTERNS, + &KEYMAP_MIGRATION_REPLACEMENT_QUERY, + ); + replacement_text.or(transformed_text) +} + +pub fn migrate_settings(text: &str) -> Option { + migrate( + &text, + SETTINGS_MIGRATION_PATTERNS, + &SETTINGS_MIGRATION_QUERY, + ) +} + +type MigrationPatterns = &'static [( + &'static str, + fn(&str, &QueryMatch, &Query) -> Option<(Range, String)>, +)]; + +static KEYMAP_MIGRATION_TRANSFORMATION_PATTERNS: MigrationPatterns = &[ + (ACTION_ARRAY_PATTERN, replace_array_with_single_string), + ( + ACTION_ARGUMENT_OBJECT_PATTERN, + replace_action_argument_object_with_single_value, + ), + (ACTION_STRING_PATTERN, rename_string_action), + (CONTEXT_PREDICATE_PATTERN, rename_context_key), +]; + +static KEYMAP_MIGRATION_TRANSFORMATION_QUERY: LazyLock = LazyLock::new(|| { + Query::new( + &tree_sitter_json::LANGUAGE.into(), + &KEYMAP_MIGRATION_TRANSFORMATION_PATTERNS + .iter() + .map(|pattern| pattern.0) + .collect::(), + ) + .unwrap() +}); + +const ACTION_ARRAY_PATTERN: &str = r#"(document + (array + (object + (pair + key: (string (string_content) @name) + value: ( + (object + (pair + key: (string) + value: ((array + . (string (string_content) @action_name) + . (string (string_content) @argument) + .)) @array + ) + ) + ) + ) + ) + ) + (#eq? @name "bindings") +)"#; + +fn replace_array_with_single_string( + contents: &str, + mat: &QueryMatch, + query: &Query, +) -> Option<(Range, String)> { + let array_ix = query.capture_index_for_name("array").unwrap(); + let action_name_ix = query.capture_index_for_name("action_name").unwrap(); + let argument_ix = query.capture_index_for_name("argument").unwrap(); + + let action_name = contents.get( + mat.nodes_for_capture_index(action_name_ix) + .next()? + .byte_range(), + )?; + let argument = contents.get( + mat.nodes_for_capture_index(argument_ix) + .next()? + .byte_range(), + )?; + + let replacement = TRANSFORM_ARRAY.get(&(action_name, argument))?; + let replacement_as_string = format!("\"{replacement}\""); + let range_to_replace = mat.nodes_for_capture_index(array_ix).next()?.byte_range(); + + Some((range_to_replace, replacement_as_string)) +} + +#[rustfmt::skip] +static TRANSFORM_ARRAY: LazyLock> = LazyLock::new(|| { + HashMap::from_iter([ + // activate + (("workspace::ActivatePaneInDirection", "Up"), "workspace::ActivatePaneUp"), + (("workspace::ActivatePaneInDirection", "Down"), "workspace::ActivatePaneDown"), + (("workspace::ActivatePaneInDirection", "Left"), "workspace::ActivatePaneLeft"), + (("workspace::ActivatePaneInDirection", "Right"), "workspace::ActivatePaneRight"), + // swap + (("workspace::SwapPaneInDirection", "Up"), "workspace::SwapPaneUp"), + (("workspace::SwapPaneInDirection", "Down"), "workspace::SwapPaneDown"), + (("workspace::SwapPaneInDirection", "Left"), "workspace::SwapPaneLeft"), + (("workspace::SwapPaneInDirection", "Right"), "workspace::SwapPaneRight"), + // menu + (("app_menu::NavigateApplicationMenuInDirection", "Left"), "app_menu::ActivateMenuLeft"), + (("app_menu::NavigateApplicationMenuInDirection", "Right"), "app_menu::ActivateMenuRight"), + // vim push + (("vim::PushOperator", "Change"), "vim::PushChange"), + (("vim::PushOperator", "Delete"), "vim::PushDelete"), + (("vim::PushOperator", "Yank"), "vim::PushYank"), + (("vim::PushOperator", "Replace"), "vim::PushReplace"), + (("vim::PushOperator", "DeleteSurrounds"), "vim::PushDeleteSurrounds"), + (("vim::PushOperator", "Mark"), "vim::PushMark"), + (("vim::PushOperator", "Indent"), "vim::PushIndent"), + (("vim::PushOperator", "Outdent"), "vim::PushOutdent"), + (("vim::PushOperator", "AutoIndent"), "vim::PushAutoIndent"), + (("vim::PushOperator", "Rewrap"), "vim::PushRewrap"), + (("vim::PushOperator", "ShellCommand"), "vim::PushShellCommand"), + (("vim::PushOperator", "Lowercase"), "vim::PushLowercase"), + (("vim::PushOperator", "Uppercase"), "vim::PushUppercase"), + (("vim::PushOperator", "OppositeCase"), "vim::PushOppositeCase"), + (("vim::PushOperator", "Register"), "vim::PushRegister"), + (("vim::PushOperator", "RecordRegister"), "vim::PushRecordRegister"), + (("vim::PushOperator", "ReplayRegister"), "vim::PushReplayRegister"), + (("vim::PushOperator", "ReplaceWithRegister"), "vim::PushReplaceWithRegister"), + (("vim::PushOperator", "ToggleComments"), "vim::PushToggleComments"), + // vim switch + (("vim::SwitchMode", "Normal"), "vim::SwitchToNormalMode"), + (("vim::SwitchMode", "Insert"), "vim::SwitchToInsertMode"), + (("vim::SwitchMode", "Replace"), "vim::SwitchToReplaceMode"), + (("vim::SwitchMode", "Visual"), "vim::SwitchToVisualMode"), + (("vim::SwitchMode", "VisualLine"), "vim::SwitchToVisualLineMode"), + (("vim::SwitchMode", "VisualBlock"), "vim::SwitchToVisualBlockMode"), + (("vim::SwitchMode", "HelixNormal"), "vim::SwitchToHelixNormalMode"), + // vim resize + (("vim::ResizePane", "Widen"), "vim::ResizePaneRight"), + (("vim::ResizePane", "Narrow"), "vim::ResizePaneLeft"), + (("vim::ResizePane", "Shorten"), "vim::ResizePaneDown"), + (("vim::ResizePane", "Lengthen"), "vim::ResizePaneUp"), + ]) +}); + +const ACTION_ARGUMENT_OBJECT_PATTERN: &str = r#"(document + (array + (object + (pair + key: (string (string_content) @name) + value: ( + (object + (pair + key: (string) + value: ((array + . (string (string_content) @action_name) + . (object + (pair + key: (string (string_content) @action_key) + value: (_) @argument)) + . ) @array + )) + ) + ) + ) + ) + ) + (#eq? @name "bindings") +)"#; + +/// [ "editor::FoldAtLevel", { "level": 1 } ] -> [ "editor::FoldAtLevel", 1 ] +fn replace_action_argument_object_with_single_value( + contents: &str, + mat: &QueryMatch, + query: &Query, +) -> Option<(Range, String)> { + let array_ix = query.capture_index_for_name("array").unwrap(); + let action_name_ix = query.capture_index_for_name("action_name").unwrap(); + let action_key_ix = query.capture_index_for_name("action_key").unwrap(); + let argument_ix = query.capture_index_for_name("argument").unwrap(); + + let action_name = contents.get( + mat.nodes_for_capture_index(action_name_ix) + .next()? + .byte_range(), + )?; + let action_key = contents.get( + mat.nodes_for_capture_index(action_key_ix) + .next()? + .byte_range(), + )?; + let argument = contents.get( + mat.nodes_for_capture_index(argument_ix) + .next()? + .byte_range(), + )?; + + let new_action_name = UNWRAP_OBJECTS.get(&action_name)?.get(&action_key)?; + + let range_to_replace = mat.nodes_for_capture_index(array_ix).next()?.byte_range(); + let replacement = format!("[\"{}\", {}]", new_action_name, argument); + Some((range_to_replace, replacement)) +} + +// "ctrl-k ctrl-1": [ "editor::PushOperator", { "Object": {} } ] -> [ "editor::vim::PushObject", {} ] +static UNWRAP_OBJECTS: LazyLock>> = LazyLock::new(|| { + HashMap::from_iter([ + ( + "editor::FoldAtLevel", + HashMap::from_iter([("level", "editor::FoldAtLevel")]), + ), + ( + "vim::PushOperator", + HashMap::from_iter([ + ("Object", "vim::PushObject"), + ("FindForward", "vim::PushFindForward"), + ("FindBackward", "vim::PushFindBackward"), + ("Sneak", "vim::PushSneak"), + ("SneakBackward", "vim::PushSneakBackward"), + ("AddSurrounds", "vim::PushAddSurrounds"), + ("ChangeSurrounds", "vim::PushChangeSurrounds"), + ("Jump", "vim::PushJump"), + ("Digraph", "vim::PushDigraph"), + ("Literal", "vim::PushLiteral"), + ]), + ), + ]) +}); + +static KEYMAP_MIGRATION_REPLACEMENT_PATTERNS: MigrationPatterns = &[( + ACTION_ARGUMENT_SNAKE_CASE_PATTERN, + action_argument_snake_case, +)]; + +static KEYMAP_MIGRATION_REPLACEMENT_QUERY: LazyLock = LazyLock::new(|| { + Query::new( + &tree_sitter_json::LANGUAGE.into(), + &KEYMAP_MIGRATION_REPLACEMENT_PATTERNS + .iter() + .map(|pattern| pattern.0) + .collect::(), + ) + .unwrap() +}); + +const ACTION_STRING_PATTERN: &str = r#"(document + (array + (object + (pair + key: (string (string_content) @name) + value: ( + (object + (pair + key: (string) + value: (string (string_content) @action_name) + ) + ) + ) + ) + ) + ) + (#eq? @name "bindings") +)"#; + +fn rename_string_action( + contents: &str, + mat: &QueryMatch, + query: &Query, +) -> Option<(Range, String)> { + let action_name_ix = query.capture_index_for_name("action_name").unwrap(); + let action_name_range = mat + .nodes_for_capture_index(action_name_ix) + .next()? + .byte_range(); + let action_name = contents.get(action_name_range.clone())?; + let new_action_name = STRING_REPLACE.get(&action_name)?; + Some((action_name_range, new_action_name.to_string())) +} + +// "ctrl-k ctrl-1": "inline_completion::ToggleMenu" -> "edit_prediction::ToggleMenu" +#[rustfmt::skip] +static STRING_REPLACE: LazyLock> = LazyLock::new(|| { + HashMap::from_iter([ + ("inline_completion::ToggleMenu", "edit_prediction::ToggleMenu"), + ("editor::NextInlineCompletion", "editor::NextEditPrediction"), + ("editor::PreviousInlineCompletion", "editor::PreviousEditPrediction"), + ("editor::AcceptPartialInlineCompletion", "editor::AcceptPartialEditPrediction"), + ("editor::ShowInlineCompletion", "editor::ShowEditPrediction"), + ("editor::AcceptInlineCompletion", "editor::AcceptEditPrediction"), + ("editor::ToggleInlineCompletions", "editor::ToggleEditPrediction"), + ]) +}); + +const CONTEXT_PREDICATE_PATTERN: &str = r#" +(array + (object + (pair + key: (string (string_content) @name) + value: (string (string_content) @context_predicate) + ) + ) +) +(#eq? @name "context") +"#; + +fn rename_context_key( + contents: &str, + mat: &QueryMatch, + query: &Query, +) -> Option<(Range, String)> { + let context_predicate_ix = query.capture_index_for_name("context_predicate").unwrap(); + let context_predicate_range = mat + .nodes_for_capture_index(context_predicate_ix) + .next()? + .byte_range(); + let old_predicate = contents.get(context_predicate_range.clone())?.to_string(); + let mut new_predicate = old_predicate.to_string(); + for (old_key, new_key) in CONTEXT_REPLACE.iter() { + new_predicate = new_predicate.replace(old_key, new_key); + } + if new_predicate != old_predicate { + Some((context_predicate_range, new_predicate.to_string())) + } else { + None + } +} + +const ACTION_ARGUMENT_SNAKE_CASE_PATTERN: &str = r#"(document + (array + (object + (pair + key: (string (string_content) @name) + value: ( + (object + (pair + key: (string) + value: ((array + . (string (string_content) @action_name) + . (object + (pair + key: (string (string_content) @argument_key) + value: (_) @argument_value)) + . ) @array + )) + ) + ) + ) + ) + ) + (#eq? @name "bindings") +)"#; + +fn is_snake_case(text: &str) -> bool { + text == text.to_case(Case::Snake) +} + +fn to_snake_case(text: &str) -> String { + text.to_case(Case::Snake) +} + +/// [ "editor::FoldAtLevel", { "SomeKey": "Value" } ] -> [ "editor::FoldAtLevel", { "some_key" : "value" } ] +fn action_argument_snake_case( + contents: &str, + mat: &QueryMatch, + query: &Query, +) -> Option<(Range, String)> { + let array_ix = query.capture_index_for_name("array").unwrap(); + let action_name_ix = query.capture_index_for_name("action_name").unwrap(); + let argument_key_ix = query.capture_index_for_name("argument_key").unwrap(); + let argument_value_ix = query.capture_index_for_name("argument_value").unwrap(); + let action_name = contents.get( + mat.nodes_for_capture_index(action_name_ix) + .next()? + .byte_range(), + )?; + + let argument_key = contents.get( + mat.nodes_for_capture_index(argument_key_ix) + .next()? + .byte_range(), + )?; + + let argument_value_node = mat.nodes_for_capture_index(argument_value_ix).next()?; + let argument_value = contents.get(argument_value_node.byte_range())?; + + let mut needs_replacement = false; + let mut new_key = argument_key.to_string(); + if !is_snake_case(argument_key) { + new_key = to_snake_case(argument_key); + needs_replacement = true; + } + + let mut new_value = argument_value.to_string(); + if argument_value_node.kind() == "string" { + let inner_value = argument_value.trim_matches('"'); + if !is_snake_case(inner_value) { + new_value = format!("\"{}\"", to_snake_case(inner_value)); + needs_replacement = true; + } + } + + if !needs_replacement { + return None; + } + + let range_to_replace = mat.nodes_for_capture_index(array_ix).next()?.byte_range(); + let replacement = format!( + "[\"{}\", {{ \"{}\": {} }}]", + action_name, new_key, new_value + ); + + Some((range_to_replace, replacement)) +} + +// "context": "Editor && inline_completion && !showing_completions" -> "Editor && edit_prediction && !showing_completions" +pub static CONTEXT_REPLACE: LazyLock> = LazyLock::new(|| { + HashMap::from_iter([ + ("inline_completion", "edit_prediction"), + ( + "inline_completion_requires_modifier", + "edit_prediction_requires_modifier", + ), + ]) +}); + +static SETTINGS_MIGRATION_PATTERNS: MigrationPatterns = &[ + (SETTINGS_STRING_REPLACE_QUERY, replace_setting_name), + (SETTINGS_REPLACE_NESTED_KEY, replace_setting_nested_key), + ( + SETTINGS_REPLACE_IN_LANGUAGES_QUERY, + replace_setting_in_languages, + ), +]; + +static SETTINGS_MIGRATION_QUERY: LazyLock = LazyLock::new(|| { + Query::new( + &tree_sitter_json::LANGUAGE.into(), + &SETTINGS_MIGRATION_PATTERNS + .iter() + .map(|pattern| pattern.0) + .collect::(), + ) + .unwrap() +}); + +static SETTINGS_STRING_REPLACE_QUERY: &str = r#"(document + (object + (pair + key: (string (string_content) @name) + value: (_) + ) + ) +)"#; + +fn replace_setting_name( + contents: &str, + mat: &QueryMatch, + query: &Query, +) -> Option<(Range, String)> { + let setting_capture_ix = query.capture_index_for_name("name").unwrap(); + let setting_name_range = mat + .nodes_for_capture_index(setting_capture_ix) + .next()? + .byte_range(); + let setting_name = contents.get(setting_name_range.clone())?; + let new_setting_name = SETTINGS_STRING_REPLACE.get(&setting_name)?; + Some((setting_name_range, new_setting_name.to_string())) +} + +#[rustfmt::skip] +pub static SETTINGS_STRING_REPLACE: LazyLock> = LazyLock::new(|| { + HashMap::from_iter([ + ("show_inline_completions_in_menu", "show_edit_predictions_in_menu"), + ("show_inline_completions", "show_edit_predictions"), + ("inline_completions_disabled_in", "edit_predictions_disabled_in"), + ("inline_completions", "edit_predictions") + ]) +}); + +static SETTINGS_REPLACE_NESTED_KEY: &str = r#" +(object + (pair + key: (string (string_content) @parent_key) + value: (object + (pair + key: (string (string_content) @setting_name) + value: (_) @value + ) + ) + ) +) +"#; + +fn replace_setting_nested_key( + contents: &str, + mat: &QueryMatch, + query: &Query, +) -> Option<(Range, String)> { + let parent_object_capture_ix = query.capture_index_for_name("parent_key").unwrap(); + let parent_object_range = mat + .nodes_for_capture_index(parent_object_capture_ix) + .next()? + .byte_range(); + let parent_object_name = contents.get(parent_object_range.clone())?; + + let setting_name_ix = query.capture_index_for_name("setting_name").unwrap(); + let setting_range = mat + .nodes_for_capture_index(setting_name_ix) + .next()? + .byte_range(); + let setting_name = contents.get(setting_range.clone())?; + + let new_setting_name = SETTINGS_NESTED_STRING_REPLACE + .get(&parent_object_name)? + .get(setting_name)?; + + Some((setting_range, new_setting_name.to_string())) +} + +// "features": { +// "inline_completion_provider": "copilot" +// }, +pub static SETTINGS_NESTED_STRING_REPLACE: LazyLock< + HashMap<&'static str, HashMap<&'static str, &'static str>>, +> = LazyLock::new(|| { + HashMap::from_iter([( + "features", + HashMap::from_iter([("inline_completion_provider", "edit_prediction_provider")]), + )]) +}); + +static SETTINGS_REPLACE_IN_LANGUAGES_QUERY: &str = r#" +(object + (pair + key: (string (string_content) @languages) + value: (object + (pair + key: (string) + value: (object + (pair + key: (string (string_content) @setting_name) + value: (_) @value + ) + ) + )) + ) +) +(#eq? @languages "languages") +"#; + +fn replace_setting_in_languages( + contents: &str, + mat: &QueryMatch, + query: &Query, +) -> Option<(Range, String)> { + let setting_capture_ix = query.capture_index_for_name("setting_name").unwrap(); + let setting_name_range = mat + .nodes_for_capture_index(setting_capture_ix) + .next()? + .byte_range(); + let setting_name = contents.get(setting_name_range.clone())?; + let new_setting_name = LANGUAGE_SETTINGS_REPLACE.get(&setting_name)?; + + Some((setting_name_range, new_setting_name.to_string())) +} + +#[rustfmt::skip] +static LANGUAGE_SETTINGS_REPLACE: LazyLock< + HashMap<&'static str, &'static str>, +> = LazyLock::new(|| { + HashMap::from_iter([ + ("show_inline_completions", "show_edit_predictions"), + ("inline_completions_disabled_in", "edit_predictions_disabled_in"), + ]) +}); + +#[cfg(test)] +mod tests { + use super::*; + + fn assert_migrate_keymap(input: &str, output: Option<&str>) { + let migrated = migrate_keymap(&input); + pretty_assertions::assert_eq!(migrated.as_deref(), output); + } + + fn assert_migrate_settings(input: &str, output: Option<&str>) { + let migrated = migrate_settings(&input); + pretty_assertions::assert_eq!(migrated.as_deref(), output); + } + + #[test] + fn test_replace_array_with_single_string() { + assert_migrate_keymap( + r#" + [ + { + "bindings": { + "cmd-1": ["workspace::ActivatePaneInDirection", "Up"] + } + } + ] + "#, + Some( + r#" + [ + { + "bindings": { + "cmd-1": "workspace::ActivatePaneUp" + } + } + ] + "#, + ), + ) + } + + #[test] + fn test_replace_action_argument_object_with_single_value() { + assert_migrate_keymap( + r#" + [ + { + "bindings": { + "cmd-1": ["editor::FoldAtLevel", { "level": 1 }] + } + } + ] + "#, + Some( + r#" + [ + { + "bindings": { + "cmd-1": ["editor::FoldAtLevel", 1] + } + } + ] + "#, + ), + ) + } + + #[test] + fn test_replace_action_argument_object_with_single_value_2() { + assert_migrate_keymap( + r#" + [ + { + "bindings": { + "cmd-1": ["vim::PushOperator", { "Object": { "some" : "value" } }] + } + } + ] + "#, + Some( + r#" + [ + { + "bindings": { + "cmd-1": ["vim::PushObject", { "some" : "value" }] + } + } + ] + "#, + ), + ) + } + + #[test] + fn test_rename_string_action() { + assert_migrate_keymap( + r#" + [ + { + "bindings": { + "cmd-1": "inline_completion::ToggleMenu" + } + } + ] + "#, + Some( + r#" + [ + { + "bindings": { + "cmd-1": "edit_prediction::ToggleMenu" + } + } + ] + "#, + ), + ) + } + + #[test] + fn test_rename_context_key() { + assert_migrate_keymap( + r#" + [ + { + "context": "Editor && inline_completion && !showing_completions" + } + ] + "#, + Some( + r#" + [ + { + "context": "Editor && edit_prediction && !showing_completions" + } + ] + "#, + ), + ) + } + + #[test] + fn test_action_argument_snake_case() { + // First performs transformations, then replacements + assert_migrate_keymap( + r#" + [ + { + "bindings": { + "cmd-1": ["vim::PushOperator", { "Object": { "SomeKey": "Value" } }], + "cmd-2": ["vim::SomeOtherAction", { "OtherKey": "Value" }], + "cmd-3": ["vim::SomeDifferentAction", { "OtherKey": true }], + "cmd-4": ["vim::OneMore", { "OtherKey": 4 }] + } + } + ] + "#, + Some( + r#" + [ + { + "bindings": { + "cmd-1": ["vim::PushObject", { "some_key": "value" }], + "cmd-2": ["vim::SomeOtherAction", { "other_key": "value" }], + "cmd-3": ["vim::SomeDifferentAction", { "other_key": true }], + "cmd-4": ["vim::OneMore", { "other_key": 4 }] + } + } + ] + "#, + ), + ) + } + + #[test] + fn test_replace_setting_name() { + assert_migrate_settings( + r#" + { + "show_inline_completions_in_menu": true, + "show_inline_completions": true, + "inline_completions_disabled_in": ["string"], + "inline_completions": { "some" : "value" } + } + "#, + Some( + r#" + { + "show_edit_predictions_in_menu": true, + "show_edit_predictions": true, + "edit_predictions_disabled_in": ["string"], + "edit_predictions": { "some" : "value" } + } + "#, + ), + ) + } + + #[test] + fn test_nested_string_replace_for_settings() { + assert_migrate_settings( + r#" + { + "features": { + "inline_completion_provider": "zed" + }, + } + "#, + Some( + r#" + { + "features": { + "edit_prediction_provider": "zed" + }, + } + "#, + ), + ) + } + + #[test] + fn test_replace_settings_in_languages() { + assert_migrate_settings( + r#" + { + "languages": { + "Astro": { + "show_inline_completions": true + } + } + } + "#, + Some( + r#" + { + "languages": { + "Astro": { + "show_edit_predictions": true + } + } + } + "#, + ), + ) + } +} diff --git a/crates/picker/src/picker.rs b/crates/picker/src/picker.rs index cf7793730c5848417848e6fd416ce85dea3873fb..f004e50787b4dbc24775a31b5a373e68185fcab5 100644 --- a/crates/picker/src/picker.rs +++ b/crates/picker/src/picker.rs @@ -26,6 +26,7 @@ actions!(picker, [ConfirmCompletion]); /// ConfirmInput is an alternative editor action which - instead of selecting active picker entry - treats pickers editor input literally, /// performing some kind of action on it. #[derive(Clone, PartialEq, Deserialize, JsonSchema, Default)] +#[serde(deny_unknown_fields)] pub struct ConfirmInput { pub secondary: bool, } diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index c4af13f6cbd7f8b13db0a9723abe3a346ebabb24..c308e8ca4eacd51ff234dfbac1d8481293d40653 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -162,12 +162,14 @@ struct EntryDetails { } #[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema)] +#[serde(deny_unknown_fields)] struct Delete { #[serde(default)] pub skip_prompt: bool, } #[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema)] +#[serde(deny_unknown_fields)] struct Trash { #[serde(default)] pub skip_prompt: bool, diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index d8cba0d0a60af2788c7aed5a938ccd69d84a83e7..e983682225d74d083d43b8695b8e662c83cd2c63 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -44,6 +44,7 @@ use registrar::{ForDeployed, ForDismissed, SearchActionsRegistrar, WithResults}; const MAX_BUFFER_SEARCH_HISTORY_SIZE: usize = 50; #[derive(PartialEq, Clone, Deserialize, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct Deploy { #[serde(default = "util::serde::default_true")] pub focus: bool, diff --git a/crates/settings/Cargo.toml b/crates/settings/Cargo.toml index 4ef3e27c94466d7d48e07cb22ce8d1d085377510..b2d46f6ee8e07ae2f5a8b4f83fbc55402a3fc4b9 100644 --- a/crates/settings/Cargo.toml +++ b/crates/settings/Cargo.toml @@ -35,6 +35,7 @@ smallvec.workspace = true tree-sitter-json.workspace = true tree-sitter.workspace = true util.workspace = true +migrator.workspace = true [dev-dependencies] fs = { workspace = true, features = ["test-support"] } diff --git a/crates/settings/src/keymap_file.rs b/crates/settings/src/keymap_file.rs index 3cb0d2c3a85e1e767ed0c84610b49c7ab2937cc0..58c7915b915d6971fd0567502a64d7d26b58dd1b 100644 --- a/crates/settings/src/keymap_file.rs +++ b/crates/settings/src/keymap_file.rs @@ -1,22 +1,24 @@ -use std::rc::Rc; - -use crate::{settings_store::parse_json_with_comments, SettingsAssets}; -use anyhow::anyhow; +use anyhow::{anyhow, Context as _, Result}; use collections::{HashMap, IndexMap}; +use fs::Fs; use gpui::{ Action, ActionBuildError, App, InvalidKeystrokeError, KeyBinding, KeyBindingContextPredicate, NoAction, SharedString, KEYSTROKE_PARSE_EXPECTED_MESSAGE, }; +use migrator::migrate_keymap; use schemars::{ gen::{SchemaGenerator, SchemaSettings}, schema::{ArrayValidation, InstanceType, Schema, SchemaObject, SubschemaValidation}, JsonSchema, }; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use serde_json::Value; -use std::fmt::Write; +use std::rc::Rc; +use std::{fmt::Write, sync::Arc}; use util::{asset_str, markdown::MarkdownString}; +use crate::{settings_store::parse_json_with_comments, SettingsAssets}; + // Note that the doc comments on these are shown by json-language-server when editing the keymap, so // they should be considered user-facing documentation. Documentation is not handled well with // schemars-0.8 - when there are newlines, it is rendered as plaintext (see @@ -28,12 +30,12 @@ use util::{asset_str, markdown::MarkdownString}; /// Keymap configuration consisting of sections. Each section may have a context predicate which /// determines whether its bindings are used. -#[derive(Debug, Deserialize, Default, Clone, JsonSchema)] +#[derive(Debug, Deserialize, Default, Clone, JsonSchema, Serialize)] #[serde(transparent)] pub struct KeymapFile(Vec); /// Keymap section which binds keystrokes to actions. -#[derive(Debug, Deserialize, Default, Clone, JsonSchema)] +#[derive(Debug, Deserialize, Default, Clone, JsonSchema, Serialize)] pub struct KeymapSection { /// Determines when these bindings are active. When just a name is provided, like `Editor` or /// `Workspace`, the bindings will be active in that context. Boolean expressions like `X && Y`, @@ -78,9 +80,9 @@ impl KeymapSection { /// Unlike the other json types involved in keymaps (including actions), this doc-comment will not /// be included in the generated JSON schema, as it manually defines its `JsonSchema` impl. The /// actual schema used for it is automatically generated in `KeymapFile::generate_json_schema`. -#[derive(Debug, Deserialize, Default, Clone)] +#[derive(Debug, Deserialize, Default, Clone, Serialize)] #[serde(transparent)] -pub struct KeymapAction(Value); +pub struct KeymapAction(pub(crate) Value); impl std::fmt::Display for KeymapAction { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { @@ -114,9 +116,11 @@ impl JsonSchema for KeymapAction { pub enum KeymapFileLoadResult { Success { key_bindings: Vec, + keymap_file: KeymapFile, }, SomeFailedToLoad { key_bindings: Vec, + keymap_file: KeymapFile, error_message: MarkdownString, }, JsonParseFailure { @@ -150,6 +154,7 @@ impl KeymapFile { KeymapFileLoadResult::SomeFailedToLoad { key_bindings, error_message, + .. } if key_bindings.is_empty() => Err(anyhow!( "Error loading built-in keymap \"{asset_path}\": {error_message}" )), @@ -164,7 +169,7 @@ impl KeymapFile { #[cfg(feature = "test-support")] pub fn load_panic_on_failure(content: &str, cx: &App) -> Vec { match Self::load(content, cx) { - KeymapFileLoadResult::Success { key_bindings } => key_bindings, + KeymapFileLoadResult::Success { key_bindings, .. } => key_bindings, KeymapFileLoadResult::SomeFailedToLoad { error_message, .. } => { panic!("{error_message}"); } @@ -180,6 +185,7 @@ impl KeymapFile { if content.is_empty() { return KeymapFileLoadResult::Success { key_bindings: Vec::new(), + keymap_file: KeymapFile(Vec::new()), }; } let keymap_file = match parse_json_with_comments::(content) { @@ -266,7 +272,10 @@ impl KeymapFile { } if errors.is_empty() { - KeymapFileLoadResult::Success { key_bindings } + KeymapFileLoadResult::Success { + key_bindings, + keymap_file, + } } else { let mut error_message = "Errors in user keymap file.\n".to_owned(); for (context, section_errors) in errors { @@ -284,6 +293,7 @@ impl KeymapFile { } KeymapFileLoadResult::SomeFailedToLoad { key_bindings, + keymap_file, error_message: MarkdownString(error_message), } } @@ -551,6 +561,55 @@ impl KeymapFile { pub fn sections(&self) -> impl DoubleEndedIterator { self.0.iter() } + + async fn load_keymap_file(fs: &Arc) -> Result { + match fs.load(paths::keymap_file()).await { + result @ Ok(_) => result, + Err(err) => { + if let Some(e) = err.downcast_ref::() { + if e.kind() == std::io::ErrorKind::NotFound { + return Ok(crate::initial_keymap_content().to_string()); + } + } + Err(err) + } + } + } + + pub fn should_migrate_keymap(keymap_file: Self) -> bool { + let Ok(old_text) = serde_json::to_string(&keymap_file) else { + return false; + }; + migrate_keymap(&old_text).is_some() + } + + pub async fn migrate_keymap(fs: Arc) -> Result<()> { + let old_text = Self::load_keymap_file(&fs).await?; + let Some(new_text) = migrate_keymap(&old_text) else { + return Ok(()); + }; + let initial_path = paths::keymap_file().as_path(); + if fs.is_file(initial_path).await { + let backup_path = paths::home_dir().join(".zed_keymap_backup"); + fs.atomic_write(backup_path, old_text) + .await + .with_context(|| { + "Failed to create settings backup in home directory".to_string() + })?; + let resolved_path = fs.canonicalize(initial_path).await.with_context(|| { + format!("Failed to canonicalize keymap path {:?}", initial_path) + })?; + fs.atomic_write(resolved_path.clone(), new_text) + .await + .with_context(|| format!("Failed to write keymap to file {:?}", resolved_path))?; + } else { + fs.atomic_write(initial_path.to_path_buf(), new_text) + .await + .with_context(|| format!("Failed to write keymap to file {:?}", initial_path))?; + } + + Ok(()) + } } // Double quotes a string and wraps it in backticks for markdown inline code.. @@ -560,7 +619,7 @@ fn inline_code_string(text: &str) -> MarkdownString { #[cfg(test)] mod tests { - use crate::KeymapFile; + use super::KeymapFile; #[test] fn can_deserialize_keymap_with_trailing_comma() { diff --git a/crates/settings/src/settings_file.rs b/crates/settings/src/settings_file.rs index 622c42d006df7a0bf89280ad666afab20f60d035..ba93391804b548d6bec0703da45acec8eb0bb36e 100644 --- a/crates/settings/src/settings_file.rs +++ b/crates/settings/src/settings_file.rs @@ -81,7 +81,7 @@ pub fn watch_config_file( pub fn handle_settings_file_changes( mut user_settings_file_rx: mpsc::UnboundedReceiver, cx: &mut App, - settings_changed: impl Fn(Option, &mut App) + 'static, + settings_changed: impl Fn(Result, &mut App) + 'static, ) { let user_settings_content = cx .background_executor() @@ -92,7 +92,7 @@ pub fn handle_settings_file_changes( if let Err(err) = &result { log::error!("Failed to load user settings: {err}"); } - settings_changed(result.err(), cx); + settings_changed(result, cx); }); cx.spawn(move |cx| async move { while let Some(user_settings_content) = user_settings_file_rx.next().await { @@ -101,7 +101,7 @@ pub fn handle_settings_file_changes( if let Err(err) = &result { log::error!("Failed to load user settings: {err}"); } - settings_changed(result.err(), cx); + settings_changed(result, cx); cx.refresh_windows(); }); if result.is_err() { diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index b0ecd056ccbae3915791ff3b2661b8d7f25122b9..2337f7fef3d0b1190231bcd020466d9224364391 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -4,6 +4,7 @@ use ec4rs::{ConfigParser, PropertiesSource, Section}; use fs::Fs; use futures::{channel::mpsc, future::LocalBoxFuture, FutureExt, StreamExt}; use gpui::{App, AsyncApp, BorrowAppContext, Global, Task, UpdateGlobal}; +use migrator::migrate_settings; use paths::{local_settings_file_relative_path, EDITORCONFIG_NAME}; use schemars::{gen::SchemaGenerator, schema::RootSchema, JsonSchema}; use serde::{de::DeserializeOwned, Deserialize, Serialize}; @@ -17,7 +18,9 @@ use std::{ sync::{Arc, LazyLock}, }; use tree_sitter::Query; -use util::{merge_non_null_json_value_into, RangeExt, ResultExt as _}; +use util::RangeExt; + +use util::{merge_non_null_json_value_into, ResultExt as _}; pub type EditorconfigProperties = ec4rs::Properties; @@ -544,7 +547,11 @@ impl SettingsStore { } /// Sets the user settings via a JSON string. - pub fn set_user_settings(&mut self, user_settings_content: &str, cx: &mut App) -> Result<()> { + pub fn set_user_settings( + &mut self, + user_settings_content: &str, + cx: &mut App, + ) -> Result { let settings: serde_json::Value = if user_settings_content.is_empty() { parse_json_with_comments("{}")? } else { @@ -552,9 +559,9 @@ impl SettingsStore { }; anyhow::ensure!(settings.is_object(), "settings must be an object"); - self.raw_user_settings = settings; + self.raw_user_settings = settings.clone(); self.recompute_values(None, cx)?; - Ok(()) + Ok(settings) } pub fn set_server_settings( @@ -988,6 +995,52 @@ impl SettingsStore { properties.use_fallbacks(); Some(properties) } + + pub fn should_migrate_settings(settings: &serde_json::Value) -> bool { + let Ok(old_text) = serde_json::to_string(settings) else { + return false; + }; + migrate_settings(&old_text).is_some() + } + + pub fn migrate_settings(&self, fs: Arc) { + self.setting_file_updates_tx + .unbounded_send(Box::new(move |_: AsyncApp| { + async move { + let old_text = Self::load_settings(&fs).await?; + let Some(new_text) = migrate_settings(&old_text) else { + return anyhow::Ok(()); + }; + let initial_path = paths::settings_file().as_path(); + if fs.is_file(initial_path).await { + let backup_path = paths::home_dir().join(".zed_settings_backup"); + fs.atomic_write(backup_path, old_text) + .await + .with_context(|| { + "Failed to create settings backup in home directory".to_string() + })?; + let resolved_path = + fs.canonicalize(initial_path).await.with_context(|| { + format!("Failed to canonicalize settings path {:?}", initial_path) + })?; + fs.atomic_write(resolved_path.clone(), new_text) + .await + .with_context(|| { + format!("Failed to write settings to file {:?}", resolved_path) + })?; + } else { + fs.atomic_write(initial_path.to_path_buf(), new_text) + .await + .with_context(|| { + format!("Failed to write settings to file {:?}", initial_path) + })?; + } + anyhow::Ok(()) + } + .boxed_local() + })) + .ok(); + } } #[derive(Debug, Clone, PartialEq)] @@ -1235,7 +1288,9 @@ fn replace_value_in_json_text( let found_key = text .get(key_range.clone()) - .map(|key_text| key_text == format!("\"{}\"", key_path[depth])) + .map(|key_text| { + depth < key_path.len() && key_text == format!("\"{}\"", key_path[depth]) + }) .unwrap_or(false); if found_key { diff --git a/crates/supermaven/src/supermaven.rs b/crates/supermaven/src/supermaven.rs index f64ac48b48d36e7be839a2e7e8e4da1beb6ec318..a289c7f68b8d9c95bd7550712706d39e3b955156 100644 --- a/crates/supermaven/src/supermaven.rs +++ b/crates/supermaven/src/supermaven.rs @@ -31,16 +31,16 @@ pub fn init(client: Arc, cx: &mut App) { let supermaven = cx.new(|_| Supermaven::Starting); Supermaven::set_global(supermaven.clone(), cx); - let mut provider = all_language_settings(None, cx).inline_completions.provider; - if provider == language::language_settings::InlineCompletionProvider::Supermaven { + let mut provider = all_language_settings(None, cx).edit_predictions.provider; + if provider == language::language_settings::EditPredictionProvider::Supermaven { supermaven.update(cx, |supermaven, cx| supermaven.start(client.clone(), cx)); } cx.observe_global::(move |cx| { - let new_provider = all_language_settings(None, cx).inline_completions.provider; + let new_provider = all_language_settings(None, cx).edit_predictions.provider; if new_provider != provider { provider = new_provider; - if provider == language::language_settings::InlineCompletionProvider::Supermaven { + if provider == language::language_settings::EditPredictionProvider::Supermaven { supermaven.update(cx, |supermaven, cx| supermaven.start(client.clone(), cx)); } else { supermaven.update(cx, |supermaven, _cx| supermaven.stop()); diff --git a/crates/supermaven/src/supermaven_completion_provider.rs b/crates/supermaven/src/supermaven_completion_provider.rs index b14d7d54c2bff99981e364dd4ebf7c22a33d2b7f..3e70a1c57672e1e6908404ce5ff95ca55f02be0e 100644 --- a/crates/supermaven/src/supermaven_completion_provider.rs +++ b/crates/supermaven/src/supermaven_completion_provider.rs @@ -2,7 +2,7 @@ use crate::{Supermaven, SupermavenCompletionStateId}; use anyhow::Result; use futures::StreamExt as _; use gpui::{App, Context, Entity, EntityId, Task}; -use inline_completion::{Direction, InlineCompletion, InlineCompletionProvider}; +use inline_completion::{Direction, EditPredictionProvider, InlineCompletion}; use language::{Anchor, Buffer, BufferSnapshot}; use project::Project; use std::{ @@ -97,7 +97,7 @@ fn completion_from_diff( } } -impl InlineCompletionProvider for SupermavenCompletionProvider { +impl EditPredictionProvider for SupermavenCompletionProvider { fn name() -> &'static str { "supermaven" } diff --git a/crates/tab_switcher/src/tab_switcher.rs b/crates/tab_switcher/src/tab_switcher.rs index 5af89a92d2e8e107e0349fb5c74fade5d14d8767..0446444b88ea4ca46c14544eaafea48b5b826c98 100644 --- a/crates/tab_switcher/src/tab_switcher.rs +++ b/crates/tab_switcher/src/tab_switcher.rs @@ -25,6 +25,7 @@ use workspace::{ const PANEL_WIDTH_REMS: f32 = 28.; #[derive(PartialEq, Clone, Deserialize, JsonSchema, Default)] +#[serde(deny_unknown_fields)] pub struct Toggle { #[serde(default)] pub select_last: bool, diff --git a/crates/terminal_view/src/terminal_panel.rs b/crates/terminal_view/src/terminal_panel.rs index f2e74b71f4e732a861ce05560d015b383c8501be..14a3e111b304a8911f5dad0a3b47b0c028a1dc85 100644 --- a/crates/terminal_view/src/terminal_panel.rs +++ b/crates/terminal_view/src/terminal_panel.rs @@ -35,10 +35,11 @@ use workspace::{ item::SerializableItem, move_active_item, move_item, pane, ui::IconName, - ActivateNextPane, ActivatePane, ActivatePaneInDirection, ActivatePreviousPane, - DraggedSelection, DraggedTab, ItemId, MoveItemToPane, MoveItemToPaneInDirection, NewTerminal, - Pane, PaneGroup, SplitDirection, SplitDown, SplitLeft, SplitRight, SplitUp, - SwapPaneInDirection, ToggleZoom, Workspace, + ActivateNextPane, ActivatePane, ActivatePaneDown, ActivatePaneLeft, ActivatePaneRight, + ActivatePaneUp, ActivatePreviousPane, DraggedSelection, DraggedTab, ItemId, MoveItemToPane, + MoveItemToPaneInDirection, NewTerminal, Pane, PaneGroup, SplitDirection, SplitDown, SplitLeft, + SplitRight, SplitUp, SwapPaneDown, SwapPaneLeft, SwapPaneRight, SwapPaneUp, ToggleZoom, + Workspace, }; use anyhow::{anyhow, Context as _, Result}; @@ -889,6 +890,37 @@ impl TerminalPanel { is_enabled_in_workspace(workspace.read(cx), cx) }) } + + fn activate_pane_in_direction( + &mut self, + direction: SplitDirection, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(pane) = self + .center + .find_pane_in_direction(&self.active_pane, direction, cx) + { + window.focus(&pane.focus_handle(cx)); + } else { + self.workspace + .update(cx, |workspace, cx| { + workspace.activate_pane_in_direction(direction, window, cx) + }) + .ok(); + } + } + + fn swap_pane_in_direction(&mut self, direction: SplitDirection, cx: &mut Context) { + if let Some(to) = self + .center + .find_pane_in_direction(&self.active_pane, direction, cx) + .cloned() + { + self.center.swap(&self.active_pane, &to); + cx.notify(); + } + } } fn is_enabled_in_workspace(workspace: &Workspace, cx: &App) -> bool { @@ -1145,24 +1177,28 @@ impl Render for TerminalPanel { .ok() .map(|div| { div.on_action({ - cx.listener( - |terminal_panel, action: &ActivatePaneInDirection, window, cx| { - if let Some(pane) = terminal_panel.center.find_pane_in_direction( - &terminal_panel.active_pane, - action.0, - cx, - ) { - window.focus(&pane.focus_handle(cx)); - } else { - terminal_panel - .workspace - .update(cx, |workspace, cx| { - workspace.activate_pane_in_direction(action.0, window, cx) - }) - .ok(); - } - }, - ) + cx.listener(|terminal_panel, _: &ActivatePaneLeft, window, cx| { + terminal_panel.activate_pane_in_direction(SplitDirection::Left, window, cx); + }) + }) + .on_action({ + cx.listener(|terminal_panel, _: &ActivatePaneRight, window, cx| { + terminal_panel.activate_pane_in_direction( + SplitDirection::Right, + window, + cx, + ); + }) + }) + .on_action({ + cx.listener(|terminal_panel, _: &ActivatePaneUp, window, cx| { + terminal_panel.activate_pane_in_direction(SplitDirection::Up, window, cx); + }) + }) + .on_action({ + cx.listener(|terminal_panel, _: &ActivatePaneDown, window, cx| { + terminal_panel.activate_pane_in_direction(SplitDirection::Down, window, cx); + }) }) .on_action( cx.listener(|terminal_panel, _action: &ActivateNextPane, window, cx| { @@ -1210,18 +1246,18 @@ impl Render for TerminalPanel { } }), ) - .on_action( - cx.listener(|terminal_panel, action: &SwapPaneInDirection, _, cx| { - if let Some(to) = terminal_panel - .center - .find_pane_in_direction(&terminal_panel.active_pane, action.0, cx) - .cloned() - { - terminal_panel.center.swap(&terminal_panel.active_pane, &to); - cx.notify(); - } - }), - ) + .on_action(cx.listener(|terminal_panel, _: &SwapPaneLeft, _, cx| { + terminal_panel.swap_pane_in_direction(SplitDirection::Left, cx); + })) + .on_action(cx.listener(|terminal_panel, _: &SwapPaneRight, _, cx| { + terminal_panel.swap_pane_in_direction(SplitDirection::Right, cx); + })) + .on_action(cx.listener(|terminal_panel, _: &SwapPaneUp, _, cx| { + terminal_panel.swap_pane_in_direction(SplitDirection::Up, cx); + })) + .on_action(cx.listener(|terminal_panel, _: &SwapPaneDown, _, cx| { + terminal_panel.swap_pane_in_direction(SplitDirection::Down, cx); + })) .on_action( cx.listener(|terminal_panel, action: &MoveItemToPane, window, cx| { let Some(&target_pane) = diff --git a/crates/title_bar/src/application_menu.rs b/crates/title_bar/src/application_menu.rs index eb4acfaf6732a3d8615496f77d35af5505819f58..955550596d69d31b1f50f17bc63bccd3d75147f1 100644 --- a/crates/title_bar/src/application_menu.rs +++ b/crates/title_bar/src/application_menu.rs @@ -1,19 +1,31 @@ -use gpui::{impl_actions, Entity, OwnedMenu, OwnedMenuItem}; +use gpui::{Entity, OwnedMenu, OwnedMenuItem}; + +#[cfg(not(target_os = "macos"))] +use gpui::{actions, impl_actions}; + +#[cfg(not(target_os = "macos"))] use schemars::JsonSchema; +#[cfg(not(target_os = "macos"))] use serde::Deserialize; + use smallvec::SmallVec; use ui::{prelude::*, ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip}; -impl_actions!( - app_menu, - [OpenApplicationMenu, NavigateApplicationMenuInDirection] -); +#[cfg(not(target_os = "macos"))] +impl_actions!(app_menu, [OpenApplicationMenu]); +#[cfg(not(target_os = "macos"))] +actions!(app_menu, [ActivateMenuRight, ActivateMenuLeft]); + +#[cfg(not(target_os = "macos"))] #[derive(Clone, Deserialize, JsonSchema, PartialEq, Default)] pub struct OpenApplicationMenu(String); -#[derive(Clone, Deserialize, JsonSchema, PartialEq, Default)] -pub struct NavigateApplicationMenuInDirection(String); +#[cfg(not(target_os = "macos"))] +pub enum ActivateDirection { + Left, + Right, +} #[derive(Clone)] struct MenuEntry { @@ -190,7 +202,7 @@ impl ApplicationMenu { #[cfg(not(target_os = "macos"))] pub fn navigate_menus_in_direction( &mut self, - action: &NavigateApplicationMenuInDirection, + direction: ActivateDirection, window: &mut Window, cx: &mut Context, ) { @@ -202,22 +214,21 @@ impl ApplicationMenu { return; }; - let next_index = match action.0.as_str() { - "Left" => { + let next_index = match direction { + ActivateDirection::Left => { if current_index == 0 { self.entries.len() - 1 } else { current_index - 1 } } - "Right" => { + ActivateDirection::Right => { if current_index == self.entries.len() - 1 { 0 } else { current_index + 1 } } - _ => return, }; self.entries[current_index].handle.hide(cx); diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 4b3b89decbe5b442006b375038a820d2a0b4c842..801e701e785ab3ca5f95b2950d72d7fad7737ec2 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -9,7 +9,9 @@ mod stories; use crate::application_menu::ApplicationMenu; #[cfg(not(target_os = "macos"))] -use crate::application_menu::{NavigateApplicationMenuInDirection, OpenApplicationMenu}; +use crate::application_menu::{ + ActivateDirection, ActivateMenuLeft, ActivateMenuRight, OpenApplicationMenu, +}; use crate::platforms::{platform_linux, platform_mac, platform_windows}; use auto_update::AutoUpdateStatus; @@ -78,22 +80,36 @@ pub fn init(cx: &mut App) { }); #[cfg(not(target_os = "macos"))] - workspace.register_action( - |workspace, action: &NavigateApplicationMenuInDirection, window, cx| { - if let Some(titlebar) = workspace - .titlebar_item() - .and_then(|item| item.downcast::().ok()) - { - titlebar.update(cx, |titlebar, cx| { - if let Some(ref menu) = titlebar.application_menu { - menu.update(cx, |menu, cx| { - menu.navigate_menus_in_direction(action, window, cx) - }); - } - }); - } - }, - ); + workspace.register_action(|workspace, _: &ActivateMenuRight, window, cx| { + if let Some(titlebar) = workspace + .titlebar_item() + .and_then(|item| item.downcast::().ok()) + { + titlebar.update(cx, |titlebar, cx| { + if let Some(ref menu) = titlebar.application_menu { + menu.update(cx, |menu, cx| { + menu.navigate_menus_in_direction(ActivateDirection::Right, window, cx) + }); + } + }); + } + }); + + #[cfg(not(target_os = "macos"))] + workspace.register_action(|workspace, _: &ActivateMenuLeft, window, cx| { + if let Some(titlebar) = workspace + .titlebar_item() + .and_then(|item| item.downcast::().ok()) + { + titlebar.update(cx, |titlebar, cx| { + if let Some(ref menu) = titlebar.application_menu { + menu.update(cx, |menu, cx| { + menu.navigate_menus_in_direction(ActivateDirection::Left, window, cx) + }); + } + }); + } + }); }) .detach(); } diff --git a/crates/vim/src/motion.rs b/crates/vim/src/motion.rs index a097a6ebb2bd571cace78bc7d4d601effc84d6cf..b8a375860a146bb29fc78ad937be666357cb90b3 100644 --- a/crates/vim/src/motion.rs +++ b/crates/vim/src/motion.rs @@ -141,105 +141,105 @@ pub enum Motion { } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] struct NextWordStart { #[serde(default)] ignore_punctuation: bool, } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] struct NextWordEnd { #[serde(default)] ignore_punctuation: bool, } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] struct PreviousWordStart { #[serde(default)] ignore_punctuation: bool, } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] struct PreviousWordEnd { #[serde(default)] ignore_punctuation: bool, } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub(crate) struct NextSubwordStart { #[serde(default)] pub(crate) ignore_punctuation: bool, } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub(crate) struct NextSubwordEnd { #[serde(default)] pub(crate) ignore_punctuation: bool, } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub(crate) struct PreviousSubwordStart { #[serde(default)] pub(crate) ignore_punctuation: bool, } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub(crate) struct PreviousSubwordEnd { #[serde(default)] pub(crate) ignore_punctuation: bool, } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub(crate) struct Up { #[serde(default)] pub(crate) display_lines: bool, } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub(crate) struct Down { #[serde(default)] pub(crate) display_lines: bool, } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] struct FirstNonWhitespace { #[serde(default)] display_lines: bool, } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] struct EndOfLine { #[serde(default)] display_lines: bool, } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub struct StartOfLine { #[serde(default)] pub(crate) display_lines: bool, } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] struct UnmatchedForward { #[serde(default)] char: char, } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] struct UnmatchedBackward { #[serde(default)] char: char, diff --git a/crates/vim/src/normal/increment.rs b/crates/vim/src/normal/increment.rs index 36bcce1b8e2dab5f2e377b8647b00118652b177d..56b91cdd1cebed12ff5d5872958d9437e9924b23 100644 --- a/crates/vim/src/normal/increment.rs +++ b/crates/vim/src/normal/increment.rs @@ -8,14 +8,14 @@ use std::ops::Range; use crate::{state::Mode, Vim}; #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] struct Increment { #[serde(default)] step: bool, } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] struct Decrement { #[serde(default)] step: bool, diff --git a/crates/vim/src/normal/paste.rs b/crates/vim/src/normal/paste.rs index eefd92d0d14023bde230228235ee3007689db626..417a4aa67e372555ba5f3647eb015ae712ab5ab3 100644 --- a/crates/vim/src/normal/paste.rs +++ b/crates/vim/src/normal/paste.rs @@ -13,7 +13,7 @@ use crate::{ }; #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub struct Paste { #[serde(default)] before: bool, diff --git a/crates/vim/src/normal/search.rs b/crates/vim/src/normal/search.rs index 948141c4e7577bf7a2bdb81b7b5eb5aa681b1031..89564af52e5c354ca79fa17662aa544b1380414f 100644 --- a/crates/vim/src/normal/search.rs +++ b/crates/vim/src/normal/search.rs @@ -16,7 +16,7 @@ use crate::{ }; #[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub(crate) struct MoveToNext { #[serde(default = "default_true")] case_sensitive: bool, @@ -27,7 +27,7 @@ pub(crate) struct MoveToNext { } #[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub(crate) struct MoveToPrev { #[serde(default = "default_true")] case_sensitive: bool, @@ -38,6 +38,7 @@ pub(crate) struct MoveToPrev { } #[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq)] +#[serde(deny_unknown_fields)] pub(crate) struct Search { #[serde(default)] backwards: bool, @@ -46,6 +47,7 @@ pub(crate) struct Search { } #[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq)] +#[serde(deny_unknown_fields)] pub struct FindCommand { pub query: String, pub backwards: bool, diff --git a/crates/vim/src/object.rs b/crates/vim/src/object.rs index cd1269e2641ffb43f5dd0da65e8b847a9f195b8f..285f79095a904e86cc361bc561e20a22f0b37a5f 100644 --- a/crates/vim/src/object.rs +++ b/crates/vim/src/object.rs @@ -19,6 +19,7 @@ use serde::Deserialize; use ui::Context; #[derive(Copy, Clone, Debug, PartialEq, Eq, Deserialize, JsonSchema)] +#[serde(rename_all = "snake_case")] pub enum Object { Word { ignore_punctuation: bool }, Subword { ignore_punctuation: bool }, @@ -44,20 +45,20 @@ pub enum Object { } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] struct Word { #[serde(default)] ignore_punctuation: bool, } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] struct Subword { #[serde(default)] ignore_punctuation: bool, } #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] struct IndentObj { #[serde(default)] include_below: bool, diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs index a4994fb4d3db07c02841ea7c12dccf4234377d63..4c09984a753f466994a159d5c78b52f5b05801d4 100644 --- a/crates/vim/src/state.rs +++ b/crates/vim/src/state.rs @@ -10,7 +10,6 @@ use gpui::{ Action, App, BorrowAppContext, ClipboardEntry, ClipboardItem, Entity, Global, WeakEntity, }; use language::Point; -use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore}; use std::borrow::BorrowMut; @@ -18,7 +17,7 @@ use std::{fmt::Display, ops::Range, sync::Arc}; use ui::{Context, SharedString}; use workspace::searchable::Direction; -#[derive(Clone, Copy, Debug, PartialEq, Eq, Deserialize, JsonSchema, Serialize)] +#[derive(Clone, Copy, Debug, PartialEq, Serialize, Deserialize)] pub enum Mode { Normal, Insert, @@ -59,7 +58,7 @@ impl Default for Mode { } } -#[derive(Clone, Debug, PartialEq, Eq, Deserialize, JsonSchema)] +#[derive(Clone, Debug, PartialEq)] pub enum Operator { Change, Delete, @@ -82,7 +81,6 @@ pub enum Operator { }, AddSurrounds { // Typically no need to configure this as `SendKeystrokes` can be used - see #23088. - #[serde(skip)] target: Option, }, ChangeSurrounds { diff --git a/crates/vim/src/surrounds.rs b/crates/vim/src/surrounds.rs index fd774d6159a0874e1f5ce831c89311527d89f71b..fcf33d9f773228fe796dfd98d7b4979a19b87856 100644 --- a/crates/vim/src/surrounds.rs +++ b/crates/vim/src/surrounds.rs @@ -554,11 +554,7 @@ mod test { use gpui::KeyBinding; use indoc::indoc; - use crate::{ - state::{Mode, Operator}, - test::VimTestContext, - PushOperator, - }; + use crate::{state::Mode, test::VimTestContext, PushAddSurrounds}; #[gpui::test] async fn test_add_surrounds(cx: &mut gpui::TestAppContext) { @@ -749,7 +745,7 @@ mod test { cx.update(|_, cx| { cx.bind_keys([KeyBinding::new( "shift-s", - PushOperator(Operator::AddSurrounds { target: None }), + PushAddSurrounds {}, Some("vim_mode == visual"), )]) }); diff --git a/crates/vim/src/test.rs b/crates/vim/src/test.rs index 34ae719fbf22dfd16248638c11cb37ce30a7b372..6c336375ae65cb1c5d1449bea164e2c44e60b17b 100644 --- a/crates/vim/src/test.rs +++ b/crates/vim/src/test.rs @@ -17,12 +17,7 @@ use indoc::indoc; use search::BufferSearchBar; use workspace::WorkspaceSettings; -use crate::{ - insert::NormalBefore, - motion, - state::{Mode, Operator}, - PushOperator, -}; +use crate::{insert::NormalBefore, motion, state::Mode, PushSneak, PushSneakBackward}; #[gpui::test] async fn test_initially_disabled(cx: &mut gpui::TestAppContext) { @@ -1347,17 +1342,17 @@ async fn test_sneak(cx: &mut gpui::TestAppContext) { cx.bind_keys([ KeyBinding::new( "s", - PushOperator(Operator::Sneak { first_char: None }), + PushSneak { first_char: None }, Some("vim_mode == normal"), ), KeyBinding::new( "S", - PushOperator(Operator::SneakBackward { first_char: None }), + PushSneakBackward { first_char: None }, Some("vim_mode == normal"), ), KeyBinding::new( "S", - PushOperator(Operator::SneakBackward { first_char: None }), + PushSneakBackward { first_char: None }, Some("vim_mode == visual"), ), ]) diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index f479acee316638f35419a5c63e9be2263e050897..a3a3afc4438a1b28bb2ce6893a678c21f725354c 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -35,6 +35,7 @@ use language::{CursorShape, Point, Selection, SelectionGoal, TransactionId}; pub use mode_indicator::ModeIndicator; use motion::Motion; use normal::search::SearchSubmit; +use object::Object; use schemars::JsonSchema; use serde::Deserialize; use serde_derive::Serialize; @@ -45,55 +46,138 @@ use surrounds::SurroundsType; use theme::ThemeSettings; use ui::{px, IntoElement, SharedString}; use vim_mode_setting::VimModeSetting; -use workspace::{self, Pane, ResizeIntent, Workspace}; +use workspace::{self, Pane, Workspace}; use crate::state::ReplayableAction; -/// Used to resize the current pane +/// Number is used to manage vim's count. Pushing a digit +/// multiplies the current value by 10 and adds the digit. #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -pub struct ResizePane(pub ResizeIntent); +struct Number(usize); -/// An Action to Switch between modes #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -pub struct SwitchMode(pub Mode); +struct SelectRegister(String); -/// PushOperator is used to put vim into a "minor" mode, -/// where it's waiting for a specific next set of keystrokes. -/// For example 'd' needs a motion to complete. #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -pub struct PushOperator(pub Operator); +#[serde(deny_unknown_fields)] +struct PushObject { + around: bool, +} -/// Number is used to manage vim's count. Pushing a digit -/// multiplies the current value by 10 and adds the digit. #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -struct Number(usize); +#[serde(deny_unknown_fields)] +struct PushFindForward { + before: bool, +} #[derive(Clone, Deserialize, JsonSchema, PartialEq)] -struct SelectRegister(String); +#[serde(deny_unknown_fields)] +struct PushFindBackward { + after: bool, +} + +#[derive(Clone, Deserialize, JsonSchema, PartialEq)] +#[serde(deny_unknown_fields)] +struct PushSneak { + first_char: Option, +} + +#[derive(Clone, Deserialize, JsonSchema, PartialEq)] +#[serde(deny_unknown_fields)] +struct PushSneakBackward { + first_char: Option, +} + +#[derive(Clone, Deserialize, JsonSchema, PartialEq)] +#[serde(deny_unknown_fields)] +struct PushAddSurrounds {} + +#[derive(Clone, Deserialize, JsonSchema, PartialEq)] +#[serde(deny_unknown_fields)] +struct PushChangeSurrounds { + target: Option, +} + +#[derive(Clone, Deserialize, JsonSchema, PartialEq)] +#[serde(deny_unknown_fields)] +struct PushJump { + line: bool, +} + +#[derive(Clone, Deserialize, JsonSchema, PartialEq)] +#[serde(deny_unknown_fields)] +struct PushDigraph { + first_char: Option, +} + +#[derive(Clone, Deserialize, JsonSchema, PartialEq)] +#[serde(deny_unknown_fields)] +struct PushLiteral { + prefix: Option, +} actions!( vim, [ + SwitchToNormalMode, + SwitchToInsertMode, + SwitchToReplaceMode, + SwitchToVisualMode, + SwitchToVisualLineMode, + SwitchToVisualBlockMode, + SwitchToHelixNormalMode, ClearOperators, Tab, Enter, InnerObject, - FindForward, - FindBackward, MaximizePane, OpenDefaultKeymap, ResetPaneSizes, - Sneak, - SneakBackward, + ResizePaneRight, + ResizePaneLeft, + ResizePaneUp, + ResizePaneDown, + PushChange, + PushDelete, + PushYank, + PushReplace, + PushDeleteSurrounds, + PushMark, + PushIndent, + PushOutdent, + PushAutoIndent, + PushRewrap, + PushShellCommand, + PushLowercase, + PushUppercase, + PushOppositeCase, + PushRegister, + PushRecordRegister, + PushReplayRegister, + PushReplaceWithRegister, + PushToggleComments, ] ); // in the workspace namespace so it's not filtered out when vim is disabled. -actions!(workspace, [ToggleVimMode]); +actions!(workspace, [ToggleVimMode,]); impl_actions!( vim, - [ResizePane, SwitchMode, PushOperator, Number, SelectRegister] + [ + Number, + SelectRegister, + PushObject, + PushFindForward, + PushFindBackward, + PushSneak, + PushSneakBackward, + PushAddSurrounds, + PushChangeSurrounds, + PushJump, + PushDigraph, + PushLiteral + ] ); /// Initializes the `vim` crate. @@ -142,7 +226,7 @@ pub fn init(cx: &mut App) { workspace.resize_pane(Axis::Vertical, desired_size - size.size.height, window, cx) }); - workspace.register_action(|workspace, action: &ResizePane, window, cx| { + workspace.register_action(|workspace, _: &ResizePaneRight, window, cx| { let count = Vim::take_count(cx).unwrap_or(1) as f32; let theme = ThemeSettings::get_global(cx); let Ok(font_id) = window.text_system().font_id(&theme.buffer_font) else { @@ -154,16 +238,36 @@ pub fn init(cx: &mut App) { else { return; }; - let height = theme.buffer_font_size() * theme.buffer_line_height.value(); + workspace.resize_pane(Axis::Horizontal, width.width * count, window, cx); + }); - let (axis, amount) = match action.0 { - ResizeIntent::Lengthen => (Axis::Vertical, height), - ResizeIntent::Shorten => (Axis::Vertical, height * -1.), - ResizeIntent::Widen => (Axis::Horizontal, width.width), - ResizeIntent::Narrow => (Axis::Horizontal, width.width * -1.), + workspace.register_action(|workspace, _: &ResizePaneLeft, window, cx| { + let count = Vim::take_count(cx).unwrap_or(1) as f32; + let theme = ThemeSettings::get_global(cx); + let Ok(font_id) = window.text_system().font_id(&theme.buffer_font) else { + return; + }; + let Ok(width) = window + .text_system() + .advance(font_id, theme.buffer_font_size(), 'm') + else { + return; }; + workspace.resize_pane(Axis::Horizontal, -width.width * count, window, cx); + }); - workspace.resize_pane(axis, amount * count, window, cx); + workspace.register_action(|workspace, _: &ResizePaneUp, window, cx| { + let count = Vim::take_count(cx).unwrap_or(1) as f32; + let theme = ThemeSettings::get_global(cx); + let height = theme.buffer_font_size() * theme.buffer_line_height.value(); + workspace.resize_pane(Axis::Vertical, height * count, window, cx); + }); + + workspace.register_action(|workspace, _: &ResizePaneDown, window, cx| { + let count = Vim::take_count(cx).unwrap_or(1) as f32; + let theme = ThemeSettings::get_global(cx); + let height = theme.buffer_font_size() * theme.buffer_line_height.value(); + workspace.resize_pane(Axis::Vertical, -height * count, window, cx); }); workspace.register_action(|workspace, _: &SearchSubmit, window, cx| { @@ -330,12 +434,212 @@ impl Vim { }); vim.update(cx, |_, cx| { - Vim::action(editor, cx, |vim, action: &SwitchMode, window, cx| { - vim.switch_mode(action.0, false, window, cx) + Vim::action(editor, cx, |vim, _: &SwitchToNormalMode, window, cx| { + vim.switch_mode(Mode::Normal, false, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &SwitchToInsertMode, window, cx| { + vim.switch_mode(Mode::Insert, false, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &SwitchToReplaceMode, window, cx| { + vim.switch_mode(Mode::Replace, false, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &SwitchToVisualMode, window, cx| { + vim.switch_mode(Mode::Visual, false, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &SwitchToVisualLineMode, window, cx| { + vim.switch_mode(Mode::VisualLine, false, window, cx) + }); + + Vim::action( + editor, + cx, + |vim, _: &SwitchToVisualBlockMode, window, cx| { + vim.switch_mode(Mode::VisualBlock, false, window, cx) + }, + ); + + Vim::action( + editor, + cx, + |vim, _: &SwitchToHelixNormalMode, window, cx| { + vim.switch_mode(Mode::HelixNormal, false, window, cx) + }, + ); + + Vim::action(editor, cx, |vim, action: &PushObject, window, cx| { + vim.push_operator( + Operator::Object { + around: action.around, + }, + window, + cx, + ) + }); + + Vim::action(editor, cx, |vim, action: &PushFindForward, window, cx| { + vim.push_operator( + Operator::FindForward { + before: action.before, + }, + window, + cx, + ) + }); + + Vim::action(editor, cx, |vim, action: &PushFindBackward, window, cx| { + vim.push_operator( + Operator::FindBackward { + after: action.after, + }, + window, + cx, + ) + }); + + Vim::action(editor, cx, |vim, action: &PushSneak, window, cx| { + vim.push_operator( + Operator::Sneak { + first_char: action.first_char, + }, + window, + cx, + ) + }); + + Vim::action(editor, cx, |vim, action: &PushSneakBackward, window, cx| { + vim.push_operator( + Operator::SneakBackward { + first_char: action.first_char, + }, + window, + cx, + ) + }); + + Vim::action(editor, cx, |vim, _: &PushAddSurrounds, window, cx| { + vim.push_operator(Operator::AddSurrounds { target: None }, window, cx) + }); + + Vim::action( + editor, + cx, + |vim, action: &PushChangeSurrounds, window, cx| { + vim.push_operator( + Operator::ChangeSurrounds { + target: action.target, + }, + window, + cx, + ) + }, + ); + + Vim::action(editor, cx, |vim, action: &PushJump, window, cx| { + vim.push_operator(Operator::Jump { line: action.line }, window, cx) + }); + + Vim::action(editor, cx, |vim, action: &PushDigraph, window, cx| { + vim.push_operator( + Operator::Digraph { + first_char: action.first_char, + }, + window, + cx, + ) + }); + + Vim::action(editor, cx, |vim, action: &PushLiteral, window, cx| { + vim.push_operator( + Operator::Literal { + prefix: action.prefix.clone(), + }, + window, + cx, + ) + }); + + Vim::action(editor, cx, |vim, _: &PushChange, window, cx| { + vim.push_operator(Operator::Change, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &PushDelete, window, cx| { + vim.push_operator(Operator::Delete, window, cx) }); - Vim::action(editor, cx, |vim, action: &PushOperator, window, cx| { - vim.push_operator(action.0.clone(), window, cx) + Vim::action(editor, cx, |vim, _: &PushYank, window, cx| { + vim.push_operator(Operator::Yank, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &PushReplace, window, cx| { + vim.push_operator(Operator::Replace, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &PushDeleteSurrounds, window, cx| { + vim.push_operator(Operator::DeleteSurrounds, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &PushMark, window, cx| { + vim.push_operator(Operator::Mark, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &PushIndent, window, cx| { + vim.push_operator(Operator::Indent, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &PushOutdent, window, cx| { + vim.push_operator(Operator::Outdent, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &PushAutoIndent, window, cx| { + vim.push_operator(Operator::AutoIndent, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &PushRewrap, window, cx| { + vim.push_operator(Operator::Rewrap, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &PushShellCommand, window, cx| { + vim.push_operator(Operator::ShellCommand, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &PushLowercase, window, cx| { + vim.push_operator(Operator::Lowercase, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &PushUppercase, window, cx| { + vim.push_operator(Operator::Uppercase, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &PushOppositeCase, window, cx| { + vim.push_operator(Operator::OppositeCase, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &PushRegister, window, cx| { + vim.push_operator(Operator::Register, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &PushRecordRegister, window, cx| { + vim.push_operator(Operator::RecordRegister, window, cx) + }); + + Vim::action(editor, cx, |vim, _: &PushReplayRegister, window, cx| { + vim.push_operator(Operator::ReplayRegister, window, cx) + }); + + Vim::action( + editor, + cx, + |vim, _: &PushReplaceWithRegister, window, cx| { + vim.push_operator(Operator::ReplaceWithRegister, window, cx) + }, + ); + + Vim::action(editor, cx, |vim, _: &PushToggleComments, window, cx| { + vim.push_operator(Operator::ToggleComments, window, cx) }); Vim::action(editor, cx, |vim, _: &ClearOperators, window, cx| { @@ -1275,8 +1579,8 @@ impl Vim { if self.mode == Mode::Normal { self.update_editor(window, cx, |_, editor, window, cx| { - editor.accept_inline_completion( - &editor::actions::AcceptInlineCompletion {}, + editor.accept_edit_prediction( + &editor::actions::AcceptEditPrediction {}, window, cx, ); diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 4480f4acbfc835e29c0444cf3a19f0b4d3497a68..d687f3bfd71ee65df73014ae84184b88d54df62a 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -72,7 +72,7 @@ impl DraggedSelection { } #[derive(Clone, Copy, PartialEq, Debug, Deserialize, JsonSchema)] -#[serde(rename_all = "camelCase")] +#[serde(rename_all = "snake_case")] pub enum SaveIntent { /// write all files (even if unchanged) /// prompt before overwriting on-disk changes @@ -96,13 +96,13 @@ pub enum SaveIntent { pub struct ActivateItem(pub usize); #[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub struct CloseActiveItem { pub save_intent: Option, } #[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub struct CloseInactiveItems { pub save_intent: Option, #[serde(default)] @@ -110,7 +110,7 @@ pub struct CloseInactiveItems { } #[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub struct CloseAllItems { pub save_intent: Option, #[serde(default)] @@ -118,34 +118,35 @@ pub struct CloseAllItems { } #[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub struct CloseCleanItems { #[serde(default)] pub close_pinned: bool, } #[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub struct CloseItemsToTheRight { #[serde(default)] pub close_pinned: bool, } #[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub struct CloseItemsToTheLeft { #[serde(default)] pub close_pinned: bool, } #[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub struct RevealInProjectPanel { #[serde(skip)] pub entry_id: Option, } #[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default)] +#[serde(deny_unknown_fields)] pub struct DeploySearch { #[serde(default)] pub replace_enabled: bool, diff --git a/crates/workspace/src/pane_group.rs b/crates/workspace/src/pane_group.rs index 7409dead1451239d07af5151fd10f6a5eedeec19..0b2cc55e3bc33794d0a115baab3b6689936f4b4f 100644 --- a/crates/workspace/src/pane_group.rs +++ b/crates/workspace/src/pane_group.rs @@ -725,6 +725,7 @@ impl PaneAxis { } #[derive(Clone, Copy, Debug, Deserialize, PartialEq, JsonSchema)] +#[serde(rename_all = "snake_case")] pub enum SplitDirection { Up, Down, @@ -807,14 +808,6 @@ impl SplitDirection { } } -#[derive(Clone, Copy, Debug, Deserialize, JsonSchema, PartialEq)] -pub enum ResizeIntent { - Lengthen, - Shorten, - Widen, - Narrow, -} - mod element { use std::mem; use std::{cell::RefCell, iter, rc::Rc, sync::Arc}; diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 0472d1ce9806695562fecc089293c25cbc5a7b63..2778382f3efffc8c0e0c38e8f1f8917783a31ee8 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -170,12 +170,7 @@ pub struct OpenPaths { pub struct ActivatePane(pub usize); #[derive(Clone, Deserialize, PartialEq, JsonSchema)] -pub struct ActivatePaneInDirection(pub SplitDirection); - -#[derive(Clone, Deserialize, PartialEq, JsonSchema)] -pub struct SwapPaneInDirection(pub SplitDirection); - -#[derive(Clone, Deserialize, PartialEq, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct MoveItemToPane { pub destination: usize, #[serde(default = "default_true")] @@ -183,6 +178,7 @@ pub struct MoveItemToPane { } #[derive(Clone, Deserialize, PartialEq, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct MoveItemToPaneInDirection { pub direction: SplitDirection, #[serde(default = "default_true")] @@ -190,25 +186,25 @@ pub struct MoveItemToPaneInDirection { } #[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub struct SaveAll { pub save_intent: Option, } #[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub struct Save { pub save_intent: Option, } #[derive(Clone, PartialEq, Debug, Deserialize, Default, JsonSchema)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub struct CloseAllItemsAndPanes { pub save_intent: Option, } #[derive(Clone, PartialEq, Debug, Deserialize, Default, JsonSchema)] -#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] pub struct CloseInactiveTabsAndPanes { pub save_intent: Option, } @@ -217,6 +213,7 @@ pub struct CloseInactiveTabsAndPanes { pub struct SendKeystrokes(pub String); #[derive(Clone, Deserialize, PartialEq, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct Reload { pub binary_path: Option, } @@ -235,7 +232,6 @@ impl_actions!( workspace, [ ActivatePane, - ActivatePaneInDirection, CloseAllItemsAndPanes, CloseInactiveTabsAndPanes, MoveItemToPane, @@ -244,11 +240,24 @@ impl_actions!( Reload, Save, SaveAll, - SwapPaneInDirection, SendKeystrokes, ] ); +actions!( + workspace, + [ + ActivatePaneLeft, + ActivatePaneRight, + ActivatePaneUp, + ActivatePaneDown, + SwapPaneLeft, + SwapPaneRight, + SwapPaneUp, + SwapPaneDown, + ] +); + #[derive(PartialEq, Eq, Debug)] pub enum CloseIntent { /// Quit the program entirely. @@ -301,6 +310,7 @@ impl PartialEq for Toast { } #[derive(Debug, Default, Clone, Deserialize, PartialEq, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct OpenTerminal { pub working_directory: PathBuf, } @@ -4821,29 +4831,38 @@ impl Workspace { workspace.activate_previous_window(cx) }), ) - .on_action( - cx.listener(|workspace, action: &ActivatePaneInDirection, window, cx| { - workspace.activate_pane_in_direction(action.0, window, cx) - }), - ) + .on_action(cx.listener(|workspace, _: &ActivatePaneLeft, window, cx| { + workspace.activate_pane_in_direction(SplitDirection::Left, window, cx) + })) + .on_action(cx.listener(|workspace, _: &ActivatePaneRight, window, cx| { + workspace.activate_pane_in_direction(SplitDirection::Right, window, cx) + })) + .on_action(cx.listener(|workspace, _: &ActivatePaneUp, window, cx| { + workspace.activate_pane_in_direction(SplitDirection::Up, window, cx) + })) + .on_action(cx.listener(|workspace, _: &ActivatePaneDown, window, cx| { + workspace.activate_pane_in_direction(SplitDirection::Down, window, cx) + })) .on_action(cx.listener(|workspace, _: &ActivateNextPane, window, cx| { workspace.activate_next_pane(window, cx) })) - .on_action( - cx.listener(|workspace, action: &ActivatePaneInDirection, window, cx| { - workspace.activate_pane_in_direction(action.0, window, cx) - }), - ) .on_action(cx.listener( |workspace, action: &MoveItemToPaneInDirection, window, cx| { workspace.move_item_to_pane_in_direction(action, window, cx) }, )) - .on_action( - cx.listener(|workspace, action: &SwapPaneInDirection, _, cx| { - workspace.swap_pane_in_direction(action.0, cx) - }), - ) + .on_action(cx.listener(|workspace, _: &SwapPaneLeft, _, cx| { + workspace.swap_pane_in_direction(SplitDirection::Left, cx) + })) + .on_action(cx.listener(|workspace, _: &SwapPaneRight, _, cx| { + workspace.swap_pane_in_direction(SplitDirection::Right, cx) + })) + .on_action(cx.listener(|workspace, _: &SwapPaneUp, _, cx| { + workspace.swap_pane_in_direction(SplitDirection::Up, cx) + })) + .on_action(cx.listener(|workspace, _: &SwapPaneDown, _, cx| { + workspace.swap_pane_in_direction(SplitDirection::Down, cx) + })) .on_action(cx.listener(|this, _: &ToggleLeftDock, window, cx| { this.toggle_dock(DockPosition::Left, window, cx); })) diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 3aa578de48615c77dce725df9b3f609f3c6d3678..56a493bed89de59e73918352ad8b5e8ecf0d25ce 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -20,6 +20,7 @@ use command_palette_hooks::CommandPaletteFilter; use editor::ProposedChangesEditorToolbar; use editor::{scroll::Autoscroll, Editor, MultiBuffer}; use feature_flags::{FeatureFlagAppExt, FeatureFlagViewExt, GitUiFeatureFlag}; +use fs::Fs; use futures::{channel::mpsc, select_biased, StreamExt}; use gpui::{ actions, point, px, Action, App, AppContext as _, AsyncApp, Context, DismissEvent, Element, @@ -1144,18 +1145,34 @@ pub fn handle_keymap_file_changes( cx.update(|cx| { let load_result = KeymapFile::load(&user_keymap_content, cx); match load_result { - KeymapFileLoadResult::Success { key_bindings } => { + KeymapFileLoadResult::Success { + key_bindings, + keymap_file, + } => { reload_keymaps(cx, key_bindings); dismiss_app_notification(¬ification_id, cx); + show_keymap_migration_notification_if_needed( + keymap_file, + notification_id.clone(), + cx, + ); } KeymapFileLoadResult::SomeFailedToLoad { key_bindings, + keymap_file, error_message, } => { if !key_bindings.is_empty() { reload_keymaps(cx, key_bindings); } - show_keymap_file_load_error(notification_id.clone(), error_message, cx) + dismiss_app_notification(¬ification_id, cx); + if !show_keymap_migration_notification_if_needed( + keymap_file, + notification_id.clone(), + cx, + ) { + show_keymap_file_load_error(notification_id.clone(), error_message, cx); + } } KeymapFileLoadResult::JsonParseFailure { error } => { show_keymap_file_json_error(notification_id.clone(), &error, cx) @@ -1187,6 +1204,61 @@ fn show_keymap_file_json_error( }); } +fn show_keymap_migration_notification_if_needed( + keymap_file: KeymapFile, + notification_id: NotificationId, + cx: &mut App, +) -> bool { + if !KeymapFile::should_migrate_keymap(keymap_file) { + return false; + } + show_app_notification(notification_id, cx, move |cx| { + cx.new(move |_cx| { + let message = "A newer version of Zed has simplified several keymaps. Your existing keymaps may be deprecated. You can migrate them by clicking below. A backup will be created in your home directory."; + let button_text = "Backup and Migrate Keymap"; + MessageNotification::new_from_builder(move |_, _| { + gpui::div().text_xs().child(message).into_any() + }) + .primary_message(button_text) + .primary_on_click(move |_, cx| { + let fs = ::global(cx); + cx.spawn(move |weak_notification, mut cx| async move { + KeymapFile::migrate_keymap(fs).await.ok(); + weak_notification.update(&mut cx, |_, cx| { + cx.emit(DismissEvent); + }).ok(); + }).detach(); + }) + }) + }); + return true; +} + +fn show_settings_migration_notification_if_needed( + notification_id: NotificationId, + settings: serde_json::Value, + cx: &mut App, +) { + if !SettingsStore::should_migrate_settings(&settings) { + return; + } + show_app_notification(notification_id, cx, move |cx| { + cx.new(move |_cx| { + let message = "A newer version of Zed has updated some settings. Your existing settings may be deprecated. You can migrate them by clicking below. A backup will be created in your home directory."; + let button_text = "Backup and Migrate Settings"; + MessageNotification::new_from_builder(move |_, _| { + gpui::div().text_xs().child(message).into_any() + }) + .primary_message(button_text) + .primary_on_click(move |_, cx| { + let fs = ::global(cx); + cx.update_global(|store: &mut SettingsStore, _| store.migrate_settings(fs)); + cx.emit(DismissEvent); + }) + }) + }); +} + fn show_keymap_file_load_error( notification_id: NotificationId, markdown_error_message: MarkdownString, @@ -1259,12 +1331,12 @@ pub fn load_default_keymap(cx: &mut App) { } } -pub fn handle_settings_changed(error: Option, cx: &mut App) { +pub fn handle_settings_changed(result: Result, cx: &mut App) { struct SettingsParseErrorNotification; let id = NotificationId::unique::(); - match error { - Some(error) => { + match result { + Err(error) => { if let Some(InvalidSettingsError::LocalSettings { .. }) = error.downcast_ref::() { @@ -1283,7 +1355,10 @@ pub fn handle_settings_changed(error: Option, cx: &mut App) { }) }); } - None => dismiss_app_notification(&id, cx), + Ok(settings) => { + dismiss_app_notification(&id, cx); + show_settings_migration_notification_if_needed(id, settings, cx); + } } } @@ -3925,24 +4000,28 @@ mod tests { "vim::FindCommand" | "vim::Literal" | "vim::ResizePane" - | "vim::SwitchMode" - | "vim::PushOperator" + | "vim::PushObject" + | "vim::PushFindForward" + | "vim::PushFindBackward" + | "vim::PushSneak" + | "vim::PushSneakBackward" + | "vim::PushChangeSurrounds" + | "vim::PushJump" + | "vim::PushDigraph" + | "vim::PushLiteral" | "vim::Number" | "vim::SelectRegister" | "terminal::SendText" | "terminal::SendKeystroke" | "app_menu::OpenApplicationMenu" - | "app_menu::NavigateApplicationMenuInDirection" | "picker::ConfirmInput" | "editor::HandleInput" | "editor::FoldAtLevel" | "pane::ActivateItem" | "workspace::ActivatePane" - | "workspace::ActivatePaneInDirection" | "workspace::MoveItemToPane" | "workspace::MoveItemToPaneInDirection" | "workspace::OpenTerminal" - | "workspace::SwapPaneInDirection" | "workspace::SendKeystrokes" | "zed::OpenBrowser" | "zed::OpenZedUrl" => {} diff --git a/crates/zed/src/zed/inline_completion_registry.rs b/crates/zed/src/zed/inline_completion_registry.rs index 6e2879a6c98a15c79965b6a78d49c03708a1003e..8639ad51f9948ffbc8d77b9cbf774c3e29f6123a 100644 --- a/crates/zed/src/zed/inline_completion_registry.rs +++ b/crates/zed/src/zed/inline_completion_registry.rs @@ -4,7 +4,7 @@ use copilot::{Copilot, CopilotCompletionProvider}; use editor::{Editor, EditorMode}; use feature_flags::{FeatureFlagAppExt, PredictEditsFeatureFlag}; use gpui::{AnyWindowHandle, App, AppContext, Context, Entity, WeakEntity}; -use language::language_settings::{all_language_settings, InlineCompletionProvider}; +use language::language_settings::{all_language_settings, EditPredictionProvider}; use settings::SettingsStore; use std::{cell::RefCell, rc::Rc, sync::Arc}; use supermaven::{Supermaven, SupermavenCompletionProvider}; @@ -41,8 +41,8 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { editors .borrow_mut() .insert(editor_handle, window.window_handle()); - let provider = all_language_settings(None, cx).inline_completions.provider; - assign_inline_completion_provider( + let provider = all_language_settings(None, cx).edit_predictions.provider; + assign_edit_prediction_provider( editor, provider, &client, @@ -54,11 +54,11 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { }) .detach(); - let mut provider = all_language_settings(None, cx).inline_completions.provider; + let mut provider = all_language_settings(None, cx).edit_predictions.provider; for (editor, window) in editors.borrow().iter() { _ = window.update(cx, |_window, window, cx| { _ = editor.update(cx, |editor, cx| { - assign_inline_completion_provider( + assign_edit_prediction_provider( editor, provider, &client, @@ -79,8 +79,8 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { let client = client.clone(); let user_store = user_store.clone(); move |active, cx| { - let provider = all_language_settings(None, cx).inline_completions.provider; - assign_inline_completion_providers(&editors, provider, &client, user_store.clone(), cx); + let provider = all_language_settings(None, cx).edit_predictions.provider; + assign_edit_prediction_providers(&editors, provider, &client, user_store.clone(), cx); if active && !cx.is_action_available(&zeta::ClearHistory) { cx.on_action(clear_zeta_edit_history); } @@ -93,7 +93,7 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { let client = client.clone(); let user_store = user_store.clone(); move |cx| { - let new_provider = all_language_settings(None, cx).inline_completions.provider; + let new_provider = all_language_settings(None, cx).edit_predictions.provider; if new_provider != provider { let tos_accepted = user_store @@ -109,7 +109,7 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { ); provider = new_provider; - assign_inline_completion_providers( + assign_edit_prediction_providers( &editors, provider, &client, @@ -119,7 +119,7 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { if !tos_accepted { match provider { - InlineCompletionProvider::Zed => { + EditPredictionProvider::Zed => { let Some(window) = cx.active_window() else { return; }; @@ -133,9 +133,9 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { }) .ok(); } - InlineCompletionProvider::None - | InlineCompletionProvider::Copilot - | InlineCompletionProvider::Supermaven => {} + EditPredictionProvider::None + | EditPredictionProvider::Copilot + | EditPredictionProvider::Supermaven => {} } } } @@ -150,9 +150,9 @@ fn clear_zeta_edit_history(_: &zeta::ClearHistory, cx: &mut App) { } } -fn assign_inline_completion_providers( +fn assign_edit_prediction_providers( editors: &Rc, AnyWindowHandle>>>, - provider: InlineCompletionProvider, + provider: EditPredictionProvider, client: &Arc, user_store: Entity, cx: &mut App, @@ -160,7 +160,7 @@ fn assign_inline_completion_providers( for (editor, window) in editors.borrow().iter() { _ = window.update(cx, |_window, window, cx| { _ = editor.update(cx, |editor, cx| { - assign_inline_completion_provider( + assign_edit_prediction_provider( editor, provider, &client, @@ -187,7 +187,7 @@ fn register_backward_compatible_actions(editor: &mut Editor, cx: &mut Context| { - editor.next_inline_completion(&Default::default(), window, cx); + editor.next_edit_prediction(&Default::default(), window, cx); }, )) .detach(); @@ -197,7 +197,7 @@ fn register_backward_compatible_actions(editor: &mut Editor, cx: &mut Context| { - editor.previous_inline_completion(&Default::default(), window, cx); + editor.previous_edit_prediction(&Default::default(), window, cx); }, )) .detach(); @@ -213,9 +213,9 @@ fn register_backward_compatible_actions(editor: &mut Editor, cx: &mut Context, user_store: Entity, window: &mut Window, @@ -225,8 +225,8 @@ fn assign_inline_completion_provider( let singleton_buffer = editor.buffer().read(cx).as_singleton(); match provider { - InlineCompletionProvider::None => {} - InlineCompletionProvider::Copilot => { + EditPredictionProvider::None => {} + EditPredictionProvider::Copilot => { if let Some(copilot) = Copilot::global(cx) { if let Some(buffer) = singleton_buffer { if buffer.read(cx).file().is_some() { @@ -236,16 +236,16 @@ fn assign_inline_completion_provider( } } let provider = cx.new(|_| CopilotCompletionProvider::new(copilot)); - editor.set_inline_completion_provider(Some(provider), window, cx); + editor.set_edit_prediction_provider(Some(provider), window, cx); } } - InlineCompletionProvider::Supermaven => { + EditPredictionProvider::Supermaven => { if let Some(supermaven) = Supermaven::global(cx) { let provider = cx.new(|_| SupermavenCompletionProvider::new(supermaven)); - editor.set_inline_completion_provider(Some(provider), window, cx); + editor.set_edit_prediction_provider(Some(provider), window, cx); } } - InlineCompletionProvider::Zed => { + EditPredictionProvider::Zed => { if cx.has_flag::() || (cfg!(debug_assertions) && client.status().borrow().is_connected()) { @@ -280,7 +280,7 @@ fn assign_inline_completion_provider( let provider = cx.new(|_| zeta::ZetaInlineCompletionProvider::new(zeta, data_collection)); - editor.set_inline_completion_provider(Some(provider), window, cx); + editor.set_edit_prediction_provider(Some(provider), window, cx); } } } diff --git a/crates/zed/src/zed/quick_action_bar.rs b/crates/zed/src/zed/quick_action_bar.rs index 96e839d523b506a8f0fe5d3fc7ca2498512c3f2e..67161de75f3e95dbd87ccd792736dd257d2d42f6 100644 --- a/crates/zed/src/zed/quick_action_bar.rs +++ b/crates/zed/src/zed/quick_action_bar.rs @@ -301,14 +301,14 @@ impl Render for QuickActionBar { .toggleable(IconPosition::Start, inline_completion_enabled && show_inline_completions) .disabled(!inline_completion_enabled) .action(Some( - editor::actions::ToggleInlineCompletions.boxed_clone(), + editor::actions::ToggleEditPrediction.boxed_clone(), )).handler({ let editor = editor.clone(); move |window, cx| { editor .update(cx, |editor, cx| { editor.toggle_inline_completions( - &editor::actions::ToggleInlineCompletions, + &editor::actions::ToggleEditPrediction, window, cx, ); diff --git a/crates/zed_actions/src/lib.rs b/crates/zed_actions/src/lib.rs index 10075e5e12129fd83384c44451b8577c314fdfb2..2299bf58bc2c7bd537f7af5bd65e1a6eec3ab870 100644 --- a/crates/zed_actions/src/lib.rs +++ b/crates/zed_actions/src/lib.rs @@ -12,11 +12,13 @@ use serde::{Deserialize, Serialize}; pub fn init() {} #[derive(Clone, PartialEq, Deserialize, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct OpenBrowser { pub url: String, } #[derive(Clone, PartialEq, Deserialize, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct OpenZedUrl { pub url: String, } @@ -69,6 +71,7 @@ pub mod theme_selector { use serde::Deserialize; #[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema)] + #[serde(deny_unknown_fields)] pub struct Toggle { /// A list of theme names to filter the theme selector down to. pub themes_filter: Option>, @@ -83,6 +86,7 @@ pub mod icon_theme_selector { use serde::Deserialize; #[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema)] + #[serde(deny_unknown_fields)] pub struct Toggle { /// A list of icon theme names to filter the theme selector down to. pub themes_filter: Option>, @@ -99,6 +103,7 @@ pub mod assistant { actions!(assistant, [ToggleFocus, DeployPromptLibrary]); #[derive(Clone, Default, Deserialize, PartialEq, JsonSchema)] + #[serde(deny_unknown_fields)] pub struct InlineAssist { pub prompt: Option, } @@ -107,6 +112,7 @@ pub mod assistant { } #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct OpenRecent { #[serde(default)] pub create_new_window: bool, @@ -154,6 +160,7 @@ impl Spawn { /// Rerun the last task. #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)] +#[serde(deny_unknown_fields)] pub struct Rerun { /// Controls whether the task context is reevaluated prior to execution of a task. /// If it is not, environment variables such as ZED_COLUMN, ZED_FILE are gonna be the same as in the last execution of a task diff --git a/crates/zeta/src/init.rs b/crates/zeta/src/init.rs index a45f5a58a221ad7d504d1ab1e2ce3fa1ef6bf6b1..ee0d810e5b10ca7f55413791b0a676b70fb19463 100644 --- a/crates/zeta/src/init.rs +++ b/crates/zeta/src/init.rs @@ -5,7 +5,7 @@ use feature_flags::{ FeatureFlagAppExt as _, PredictEditsFeatureFlag, PredictEditsRateCompletionsFeatureFlag, }; use gpui::actions; -use language::language_settings::{AllLanguageSettings, InlineCompletionProvider}; +use language::language_settings::{AllLanguageSettings, EditPredictionProvider}; use settings::update_settings_file; use ui::App; use workspace::Workspace; @@ -44,7 +44,7 @@ pub fn init(cx: &mut App) { move |file, _| { file.features .get_or_insert(Default::default()) - .inline_completion_provider = Some(InlineCompletionProvider::None) + .edit_prediction_provider = Some(EditPredictionProvider::None) }, ); diff --git a/crates/zeta/src/onboarding_banner.rs b/crates/zeta/src/onboarding_banner.rs index 713b84604abc7176342c0f6a9465824994df8691..4b5ef95c61495c369fd4807f9bf1a5afac8c094d 100644 --- a/crates/zeta/src/onboarding_banner.rs +++ b/crates/zeta/src/onboarding_banner.rs @@ -1,7 +1,7 @@ use chrono::Utc; use feature_flags::{FeatureFlagAppExt as _, PredictEditsFeatureFlag}; use gpui::Subscription; -use language::language_settings::{all_language_settings, InlineCompletionProvider}; +use language::language_settings::{all_language_settings, EditPredictionProvider}; use settings::SettingsStore; use ui::{prelude::*, ButtonLike, Tooltip}; use util::ResultExt; @@ -11,7 +11,7 @@ use crate::onboarding_event; /// Prompts the user to try Zed's Edit Prediction feature pub struct ZedPredictBanner { dismissed: bool, - provider: InlineCompletionProvider, + provider: EditPredictionProvider, _subscription: Subscription, } @@ -19,7 +19,7 @@ impl ZedPredictBanner { pub fn new(cx: &mut Context) -> Self { Self { dismissed: get_dismissed(), - provider: all_language_settings(None, cx).inline_completions.provider, + provider: all_language_settings(None, cx).edit_predictions.provider, _subscription: cx.observe_global::(Self::handle_settings_changed), } } @@ -29,7 +29,7 @@ impl ZedPredictBanner { } fn handle_settings_changed(&mut self, cx: &mut Context) { - let new_provider = all_language_settings(None, cx).inline_completions.provider; + let new_provider = all_language_settings(None, cx).edit_predictions.provider; if new_provider == self.provider { return; diff --git a/crates/zeta/src/onboarding_modal.rs b/crates/zeta/src/onboarding_modal.rs index 41fc289a5f786ab9fffe1209da335aee27e30726..7ba7f4b50bfa75d0d9ff38ac81ce2978350eaf4e 100644 --- a/crates/zeta/src/onboarding_modal.rs +++ b/crates/zeta/src/onboarding_modal.rs @@ -9,7 +9,7 @@ use gpui::{ ease_in_out, svg, Animation, AnimationExt as _, ClickEvent, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, MouseDownEvent, Render, }; -use language::language_settings::{AllLanguageSettings, InlineCompletionProvider}; +use language::language_settings::{AllLanguageSettings, EditPredictionProvider}; use settings::{update_settings_file, Settings}; use ui::{prelude::*, Checkbox, TintColor}; use util::ResultExt; @@ -105,7 +105,7 @@ impl ZedPredictModal { update_settings_file::(this.fs.clone(), cx, move |file, _| { file.features .get_or_insert(Default::default()) - .inline_completion_provider = Some(InlineCompletionProvider::Zed); + .edit_prediction_provider = Some(EditPredictionProvider::Zed); }); cx.emit(DismissEvent); diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index a2be4811fa6cf70ad8197b944b54f30c3b19c866..7741e52f3102be22d6452c21835e215e9d037a60 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -1500,7 +1500,7 @@ impl ZetaInlineCompletionProvider { } } -impl inline_completion::InlineCompletionProvider for ZetaInlineCompletionProvider { +impl inline_completion::EditPredictionProvider for ZetaInlineCompletionProvider { fn name() -> &'static str { "zed-predict" } diff --git a/docs/src/completions.md b/docs/src/completions.md index f7f0520092f0a7eee34a6004c1acce5445fe77a2..80c2d150c74228aab5c06a4524133b00b756fddc 100644 --- a/docs/src/completions.md +++ b/docs/src/completions.md @@ -29,7 +29,7 @@ To use GitHub Copilot (enabled by default), add the following to your `settings. ```json { "features": { - "inline_completion_provider": "copilot" + "edit_prediction_provider": "copilot" } } ``` @@ -43,7 +43,7 @@ To use Supermaven, add the following to your `settings.json`: ```json { "features": { - "inline_completion_provider": "supermaven" + "edit_prediction_provider": "supermaven" } } ``` @@ -56,23 +56,23 @@ Once you have configured an Edit Prediction provider, you can start using edit p There are a number of actions/shortcuts available to interact with edit predictions: -- `editor: accept inline completion` (`tab`): To accept the current edit prediction -- `editor: accept partial inline completion` (`ctrl-cmd-right`): To accept the current edit prediction up to the next word boundary -- `editor: show inline completion` (`alt-tab`): Trigger an edit prediction request manually -- `editor: next inline completion` (`alt-tab`): To cycle to the next edit prediction -- `editor: previous inline completion` (`alt-shift-tab`): To cycle to the previous edit prediction +- `editor: accept edit prediction` (`tab`): To accept the current edit prediction +- `editor: accept partial edit prediction` (`ctrl-cmd-right`): To accept the current edit prediction up to the next word boundary +- `editor: show edit prediction` (`alt-tab`): Trigger an edit prediction request manually +- `editor: next edit prediction` (`alt-tab`): To cycle to the next edit prediction +- `editor: previous edit prediction` (`alt-shift-tab`): To cycle to the previous edit prediction -### Disabling Inline-Completions +### Disabling Edit Prediction -To disable completions that appear automatically as you type, add the following to your `settings.json`: +To disable predictions that appear automatically as you type, add the following to your `settings.json`: ```json { - "show_inline_completions": false + "show_edit_predictions": false } ``` -You can trigger edit predictions manually by executing `editor: show inline completion` (`alt-tab`). +You can trigger edit predictions manually by executing `editor: show edit prediction` (`alt-tab`). You can also add this as a language-specific setting in your `settings.json` to disable edit predictions for a specific language: @@ -80,7 +80,7 @@ You can also add this as a language-specific setting in your `settings.json` to { "language": { "python": { - "show_inline_completions": false + "show_edit_predictions": false } } } diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 07374cc25c58705b8bf0d5abc6d85108c8c55df6..779c9e2a5930eceba83d79716ebc481e26963390 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -378,11 +378,11 @@ There are two options to choose from: ## Edit Predictions - Description: Settings for edit predictions. -- Setting: `inline_completions` +- Setting: `edit_predictions` - Default: ```json - "inline_completions": { + "edit_predictions": { "disabled_globs": [ "**/.env*", "**/*.pem", @@ -409,7 +409,7 @@ List of `string` values ## Edit Predictions Disabled in - Description: A list of language scopes in which edit predictions should be disabled. -- Setting: `inline_completions_disabled_in` +- Setting: `edit_predictions_disabled_in` - Default: `[]` **Options** @@ -434,7 +434,7 @@ List of `string` values { "languages": { "Go": { - "inline_completions_disabled_in": ["comment", "string"] + "edit_predictions_disabled_in": ["comment", "string"] } } } @@ -1478,7 +1478,7 @@ The following settings can be overridden for each specific language: - [`hard_tabs`](#hard-tabs) - [`preferred_line_length`](#preferred-line-length) - [`remove_trailing_whitespace_on_save`](#remove-trailing-whitespace-on-save) -- [`show_inline_completions`](#show-inline-completions) +- [`show_edit_predictions`](#show-edit-predictions) - [`show_whitespaces`](#show-whitespaces) - [`soft_wrap`](#soft-wrap) - [`tab_size`](#tab-size) @@ -1654,8 +1654,8 @@ Or to set a `socks5` proxy: ## Show Edit Predictions -- Description: Whether to show edit predictions as you type or manually by triggering `editor::ShowInlineCompletion`. -- Setting: `show_inline_completions` +- Description: Whether to show edit predictions as you type or manually by triggering `editor::ShowEditPrediction`. +- Setting: `show_edit_predictions` - Default: `true` **Options** diff --git a/docs/src/key-bindings.md b/docs/src/key-bindings.md index 4d0a33ce5507f8e46ffd1603af352a6e92c7faa8..7482c8563cc4114ec73ea3623f43b32994247403 100644 --- a/docs/src/key-bindings.md +++ b/docs/src/key-bindings.md @@ -119,7 +119,7 @@ command palette, by looking in the default keymaps for or [Linux](https://github.com/zed-industries/zed/blob/main/assets/keymaps/default-linux.json), or by using Zed's autocomplete in your keymap file. -Most actions do not require any arguments, and so you can bind them as strings: `"ctrl-a": "language_selector::Toggle"`. Some require a single argument, and must be bound as an array: `"ctrl-a": ["workspace::ActivatePaneInDirection", "down"]`. Some actions require multiple arguments, and are bound as an array of a string and an object: `"ctrl-a": ["pane::DeploySearch", { "replace_enabled": true }]`. +Most actions do not require any arguments, and so you can bind them as strings: `"ctrl-a": "language_selector::Toggle"`. Some require a single argument, and must be bound as an array: `"cmd-1": ["workspace::ActivatePane", 0]`. Some actions require multiple arguments, and are bound as an array of a string and an object: `"ctrl-a": ["pane::DeploySearch", { "replace_enabled": true }]`. ### Precedence diff --git a/docs/src/vim.md b/docs/src/vim.md index 25e44dda3f0efcc1487f562da0f54b715f05afda..0555cd817e84c134c07ed158a8a9630800ed08f9 100644 --- a/docs/src/vim.md +++ b/docs/src/vim.md @@ -368,10 +368,10 @@ But you cannot use the same shortcuts to move between all the editor docks (the { "context": "Dock", "bindings": { - "ctrl-w h": ["workspace::ActivatePaneInDirection", "Left"], - "ctrl-w l": ["workspace::ActivatePaneInDirection", "Right"], - "ctrl-w k": ["workspace::ActivatePaneInDirection", "Up"], - "ctrl-w j": ["workspace::ActivatePaneInDirection", "Down"] + "ctrl-w h": "workspace::ActivatePaneLeft", + "ctrl-w l": "workspace::ActivatePaneRight", + "ctrl-w k": "workspace::ActivatePaneUp", + "ctrl-w j": "workspace::ActivatePaneDown" // ... or other keybindings } } @@ -399,12 +399,7 @@ Vim mode comes with shortcuts to surround the selection in normal mode (`ys`), b { "context": "vim_mode == visual", "bindings": { - "shift-s": [ - "vim::PushOperator", - { - "AddSurrounds": {} - } - ] + "shift-s": ["vim::PushAddSurrounds", {}] } } ``` @@ -416,8 +411,8 @@ The [Sneak motion](https://github.com/justinmk/vim-sneak) feature allows for qui { "context": "vim_mode == normal || vim_mode == visual", "bindings": { - "s": ["vim::PushOperator", { "Sneak": {} }], - "S": ["vim::PushOperator", { "SneakBackward": {} }] + "s": ["vim::PushSneak", {}], + "S": ["vim::PushSneakBackward", {}] } } ] From 07929229aeec4b0c911d168f05a6958be006f58d Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 7 Feb 2025 11:10:07 -0500 Subject: [PATCH 119/130] migrator: Sort dependencies in `Cargo.toml` (#24455) This PR sorts the dependencies in the `Cargo.toml` for the `migrator` crate. Release Notes: - N/A --- crates/migrator/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/migrator/Cargo.toml b/crates/migrator/Cargo.toml index f5be671960f5530eb475da583067fc298f4c4f01..7a65db5b9959941c5c916e67a67379ae7df58f9c 100644 --- a/crates/migrator/Cargo.toml +++ b/crates/migrator/Cargo.toml @@ -14,9 +14,9 @@ doctest = false [dependencies] collections.workspace = true +convert_case.workspace = true tree-sitter-json.workspace = true tree-sitter.workspace = true -convert_case.workspace = true [dev-dependencies] pretty_assertions.workspace = true From c7cd5b019b0497cab257f5095e2f4e18870acdd0 Mon Sep 17 00:00:00 2001 From: Sanjeev Shrestha Date: Fri, 7 Feb 2025 21:59:26 +0545 Subject: [PATCH 120/130] file_icons: Use separate icon key for JSON files (#24432) This PR updates the file icon mappings for JSON (`.json`) file map to the`json` key. Also, updates `.json` icon from `storage` to `code`. This allows for the JSON file icons to be replaced in icon themes. Release Notes: - Icon themes: Added the ability to change the file icon for JSON (`.json`) files. --------- Co-authored-by: Marshall Bowers --- assets/icons/file_icons/file_types.json | 2 +- crates/theme/src/icon_theme.rs | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/assets/icons/file_icons/file_types.json b/assets/icons/file_icons/file_types.json index 9580c8fd4ec5b7820ee9661f1b2ec0257b194f2f..f36ef2737ff4d246b8b73160e93a88a9902787ca 100644 --- a/assets/icons/file_icons/file_types.json +++ b/assets/icons/file_icons/file_types.json @@ -101,7 +101,7 @@ "jpeg": "image", "jpg": "image", "js": "javascript", - "json": "storage", + "json": "json", "jsonc": "storage", "jsx": "react", "jxl": "image", diff --git a/crates/theme/src/icon_theme.rs b/crates/theme/src/icon_theme.rs index 1a99e42ed817aafd606a2efaa95baedb3697dfa9..465391b26c8bb595dde6b6ae28ada5d55529e4fd 100644 --- a/crates/theme/src/icon_theme.rs +++ b/crates/theme/src/icon_theme.rs @@ -88,6 +88,7 @@ const FILE_ICONS: &[(&str, &str)] = &[ ("image", "icons/file_icons/image.svg"), ("java", "icons/file_icons/java.svg"), ("javascript", "icons/file_icons/javascript.svg"), + ("json", "icons/file_icons/code.svg"), ("julia", "icons/file_icons/julia.svg"), ("kotlin", "icons/file_icons/kotlin.svg"), ("lock", "icons/file_icons/lock.svg"), From 8114d17cba142a8e32dc72de300c06c082a9549f Mon Sep 17 00:00:00 2001 From: IaVashik <105387234+IaVashik@users.noreply.github.com> Date: Fri, 7 Feb 2025 19:18:18 +0300 Subject: [PATCH 121/130] google_ai: Add support for Gemini 2.0 models (#24448) Add support for the newly released Gemini 2.0 models from Google announced this new family of models earlier this week (2025-02-05). Release Notes: - Added support for Google's new Gemini 2.0 models. --- crates/google_ai/src/google_ai.rs | 19 +++++++++++++++++-- .../language_model/src/model/cloud_model.rs | 3 +++ 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/crates/google_ai/src/google_ai.rs b/crates/google_ai/src/google_ai.rs index b40c5714b87fbb4e445c3503ab5587fd815f1714..ace7ea22c4589d3b5dc5af6490d4a889883cfe43 100644 --- a/crates/google_ai/src/google_ai.rs +++ b/crates/google_ai/src/google_ai.rs @@ -305,8 +305,14 @@ pub enum Model { Gemini15Pro, #[serde(rename = "gemini-1.5-flash")] Gemini15Flash, - #[serde(rename = "gemini-2.0-flash-exp")] + #[serde(rename = "gemini-2.0-pro-exp")] + Gemini20Pro, + #[serde(rename = "gemini-2.0-flash")] Gemini20Flash, + #[serde(rename = "gemini-2.0-flash-thinking-exp")] + Gemini20FlashThinking, + #[serde(rename = "gemini-2.0-flash-lite-preview")] + Gemini20FlashLite, #[serde(rename = "custom")] Custom { name: String, @@ -321,7 +327,10 @@ impl Model { match self { Model::Gemini15Pro => "gemini-1.5-pro", Model::Gemini15Flash => "gemini-1.5-flash", - Model::Gemini20Flash => "gemini-2.0-flash-exp", + Model::Gemini20Pro => "gemini-2.0-pro-exp", + Model::Gemini20Flash => "gemini-2.0-flash", + Model::Gemini20FlashThinking => "gemini-2.0-flash-thinking-exp", + Model::Gemini20FlashLite => "gemini-2.0-flash-lite-preview", Model::Custom { name, .. } => name, } } @@ -330,7 +339,10 @@ impl Model { match self { Model::Gemini15Pro => "Gemini 1.5 Pro", Model::Gemini15Flash => "Gemini 1.5 Flash", + Model::Gemini20Pro => "Gemini 2.0 Pro", Model::Gemini20Flash => "Gemini 2.0 Flash", + Model::Gemini20FlashThinking => "Gemini 2.0 Flash Thinking", + Model::Gemini20FlashLite => "Gemini 2.0 Flash Lite", Self::Custom { name, display_name, .. } => display_name.as_ref().unwrap_or(name), @@ -341,7 +353,10 @@ impl Model { match self { Model::Gemini15Pro => 2_000_000, Model::Gemini15Flash => 1_000_000, + Model::Gemini20Pro => 2_000_000, Model::Gemini20Flash => 1_000_000, + Model::Gemini20FlashThinking => 1_000_000, + Model::Gemini20FlashLite => 1_000_000, Model::Custom { max_tokens, .. } => *max_tokens, } } diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index ec51f1f073654678d0218eb54c51d0bcc7e1aca1..ead33e7b9b34fa8ddaf59c6b51d20081d28d3045 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -90,7 +90,10 @@ impl CloudModel { Self::Google(model) => match model { google_ai::Model::Gemini15Pro | google_ai::Model::Gemini15Flash + | google_ai::Model::Gemini20Pro | google_ai::Model::Gemini20Flash + | google_ai::Model::Gemini20FlashThinking + | google_ai::Model::Gemini20FlashLite | google_ai::Model::Custom { .. } => { LanguageModelAvailability::RequiresPlan(Plan::ZedPro) } From c484374b2fb74cec46928c9d9b204466cee1ef3a Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Fri, 7 Feb 2025 11:20:27 -0500 Subject: [PATCH 122/130] Make OpenKeyContextView open to the right (#24452) Match the behavior of OpenSyntaxTreeView logs and OpenLanguageServerLogs Release Notes: - Make `debug::OpenSyntaxTreeView` automatically open in split to the right --- crates/language_tools/src/key_context_view.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/crates/language_tools/src/key_context_view.rs b/crates/language_tools/src/key_context_view.rs index 7961c894fb680b609bb540cdca8c8f153efbf8f4..c34663e3edd1a98bc1a5f8ce64eeda2984ff98c8 100644 --- a/crates/language_tools/src/key_context_view.rs +++ b/crates/language_tools/src/key_context_view.rs @@ -11,8 +11,7 @@ use ui::{ Window, }; use ui::{Button, ButtonStyle}; -use workspace::Item; -use workspace::Workspace; +use workspace::{Item, SplitDirection, Workspace}; actions!(debug, [OpenKeyContextView]); @@ -20,7 +19,12 @@ pub fn init(cx: &mut App) { cx.observe_new(|workspace: &mut Workspace, _, _| { workspace.register_action(|workspace, _: &OpenKeyContextView, window, cx| { let key_context_view = cx.new(|cx| KeyContextView::new(window, cx)); - workspace.add_item_to_active_pane(Box::new(key_context_view), None, true, window, cx) + workspace.split_item( + SplitDirection::Right, + Box::new(key_context_view), + window, + cx, + ) }); }) .detach(); From 2d57e43e34b71f25d140c84abc62500e7bb61a26 Mon Sep 17 00:00:00 2001 From: Wilhelm Klopp Date: Fri, 7 Feb 2025 17:29:45 +0100 Subject: [PATCH 123/130] docs: Emphasize that Rust must be installed via rustup (#24447) Just tried installing a dev extension and kept getting "error: failed to install dev extension". Turns out this was because I had rust installed via homebrew and not rust. Once I switched to rustup, it worked perfectly fine. Release Notes: - N/A --- docs/src/extensions/developing-extensions.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/src/extensions/developing-extensions.md b/docs/src/extensions/developing-extensions.md index 29906a7ae43c7a4a664da7b16e864225677a61b8..75d094c9b4af5b157ce63fe1c3b852814afc5abb 100644 --- a/docs/src/extensions/developing-extensions.md +++ b/docs/src/extensions/developing-extensions.md @@ -14,6 +14,8 @@ Extensions can add the following capabilities to Zed: Before starting to develop an extension for Zed, be sure to [install Rust via rustup](https://www.rust-lang.org/tools/install). +> Rust must be installed via rustup. If you have Rust installed via homebrew or otherwise, installing dev extensions will not work. + When developing an extension, you can use it in Zed without needing to publish it by installing it as a _dev extension_. From the extensions page, click the `Install Dev Extension` button and select the directory containing your extension. From 144487bf1ac3b21e16f5ae03b715a3e13f88f1d7 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Fri, 7 Feb 2025 17:30:53 +0100 Subject: [PATCH 124/130] theme: Implement icon theme reloading (#24449) Closes #24353 This PR implements icon theme reload to ensure file icons are properly updated whenever an icon theme extension is upgraded or uninstalled. Currently, on both upgrade and uninstall of an icon theme extension the file icons from the previously installed version will stay visibile and will not be updated as shown in the linked issue. With this change, file icons will properly be updated on extension upgrade or reinstall. The code is primarily a copy for reloading the current color theme adapted to work for icon themes. Happy for any feedback! Release Notes: - Fixed file icons not being properly updated upon icon theme upgrade or uninstall. --- crates/extension/src/extension_host_proxy.rs | 10 +++++ crates/extension_host/src/extension_host.rs | 1 + crates/theme/src/settings.rs | 42 +++++++++++++++++++ crates/theme_extension/src/theme_extension.rs | 4 ++ 4 files changed, 57 insertions(+) diff --git a/crates/extension/src/extension_host_proxy.rs b/crates/extension/src/extension_host_proxy.rs index 25d4a4e539cc1d18f49e73a7dc95c5953798ec7c..a692795e87f2f07d918b424cf0bb11cb71f67a78 100644 --- a/crates/extension/src/extension_host_proxy.rs +++ b/crates/extension/src/extension_host_proxy.rs @@ -118,6 +118,8 @@ pub trait ExtensionThemeProxy: Send + Sync + 'static { icons_root_dir: PathBuf, fs: Arc, ) -> Task>; + + fn reload_current_icon_theme(&self, cx: &mut App); } impl ExtensionThemeProxy for ExtensionHostProxy { @@ -185,6 +187,14 @@ impl ExtensionThemeProxy for ExtensionHostProxy { proxy.load_icon_theme(icon_theme_path, icons_root_dir, fs) } + + fn reload_current_icon_theme(&self, cx: &mut App) { + let Some(proxy) = self.theme_proxy.read().clone() else { + return; + }; + + proxy.reload_current_icon_theme(cx) + } } pub trait ExtensionGrammarProxy: Send + Sync + 'static { diff --git a/crates/extension_host/src/extension_host.rs b/crates/extension_host/src/extension_host.rs index 69c26d44a46b61ed41906498337d0899670cd1fa..e1e866705ac7447367df376c60ac1b03cd078dc1 100644 --- a/crates/extension_host/src/extension_host.rs +++ b/crates/extension_host/src/extension_host.rs @@ -1292,6 +1292,7 @@ impl ExtensionStore { this.wasm_extensions.extend(wasm_extensions); this.proxy.reload_current_theme(cx); + this.proxy.reload_current_icon_theme(cx); }) .ok(); }) diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index 2032b98983906da6fd0cf9ca06e9845a5f9a5cb2..f44e45d549dd4b0c7692c3c027047109884a9781 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -164,6 +164,30 @@ impl ThemeSettings { } } } + + /// Reloads the current icon theme. + /// + /// Reads the [`ThemeSettings`] to know which icon theme should be loaded. + pub fn reload_current_icon_theme(cx: &mut App) { + let mut theme_settings = ThemeSettings::get_global(cx).clone(); + + let active_theme = theme_settings.active_icon_theme.clone(); + let mut icon_theme_name = active_theme.name.as_ref(); + + // If the selected theme doesn't exist, fall back to the default theme. + let theme_registry = ThemeRegistry::global(cx); + if theme_registry + .get_icon_theme(icon_theme_name) + .ok() + .is_none() + { + icon_theme_name = DEFAULT_ICON_THEME_NAME; + }; + + if let Some(_theme) = theme_settings.switch_icon_theme(icon_theme_name, cx) { + ThemeSettings::override_global(theme_settings, cx); + } + } } /// The appearance of the system. @@ -487,6 +511,24 @@ impl ThemeSettings { self.active_theme = Arc::new(base_theme); } } + + /// Switches to the icon theme with the given name, if it exists. + /// + /// Returns a `Some` containing the new icon theme if it was successful. + /// Returns `None` otherwise. + pub fn switch_icon_theme(&mut self, icon_theme: &str, cx: &mut App) -> Option> { + let themes = ThemeRegistry::default_global(cx); + + let mut new_icon_theme = None; + + if let Some(icon_theme) = themes.get_icon_theme(icon_theme).log_err() { + self.active_icon_theme = icon_theme.clone(); + new_icon_theme = Some(icon_theme); + cx.refresh_windows(); + } + + new_icon_theme + } } // TODO: Make private, change usages to use `get_ui_font_size` instead. diff --git a/crates/theme_extension/src/theme_extension.rs b/crates/theme_extension/src/theme_extension.rs index bce271840c962598520aa05fe9eea5a91167b854..83903da6c69147af52f9d714c497f21674ed1adb 100644 --- a/crates/theme_extension/src/theme_extension.rs +++ b/crates/theme_extension/src/theme_extension.rs @@ -77,4 +77,8 @@ impl ExtensionThemeProxy for ThemeRegistryProxy { .await }) } + + fn reload_current_icon_theme(&self, cx: &mut App) { + ThemeSettings::reload_current_icon_theme(cx) + } } From a6e15dda4b034c5f8c49fbbad6c16794200f7be8 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 7 Feb 2025 09:57:37 -0700 Subject: [PATCH 125/130] Make it a bit clearer when people are running dev builds (#24457) Release Notes: - Include an indicator in About/CopySystemSpecs when running in debug mode --- crates/feedback/src/system_specs.rs | 9 +++++++-- crates/zed/src/zed.rs | 7 ++++++- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/crates/feedback/src/system_specs.rs b/crates/feedback/src/system_specs.rs index 38af3b633d14b2d2b99520b71dd485987d2ef054..0bd16d22addf4530b1e5f8138fdc06a77d955967 100644 --- a/crates/feedback/src/system_specs.rs +++ b/crates/feedback/src/system_specs.rs @@ -64,12 +64,17 @@ impl Display for SystemSpecs { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let os_information = format!("OS: {} {}", self.os_name, self.os_version); let app_version_information = format!( - "Zed: v{} ({})", + "Zed: v{} ({}) {}", self.app_version, match &self.commit_sha { Some(commit_sha) => format!("{} {}", self.release_channel, commit_sha), None => self.release_channel.to_string(), - } + }, + if cfg!(debug_assertions) { + "(Taylor's Version)" + } else { + "" + }, ); let system_specs = [ app_version_information, diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 56a493bed89de59e73918352ad8b5e8ecf0d25ce..c1244c42992341b12bf5024e8cc514ec91820bc5 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -881,7 +881,12 @@ fn about( ) { let release_channel = ReleaseChannel::global(cx).display_name(); let version = env!("CARGO_PKG_VERSION"); - let message = format!("{release_channel} {version}"); + let debug = if cfg!(debug_assertions) { + "(debug)" + } else { + "" + }; + let message = format!("{release_channel} {version} {debug}"); let detail = AppCommitSha::try_global(cx).map(|sha| sha.0.clone()); let prompt = window.prompt(PromptLevel::Info, &message, detail.as_deref(), &["OK"], cx); From f0565b4e2e739bdac77771da49eaacbc2f549d88 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Fri, 7 Feb 2025 18:02:14 +0100 Subject: [PATCH 126/130] edit prediction: Do not show icon as disabled when there is no buffer open (#24458) Release Notes: - N/A --- .../src/inline_completion_button.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/crates/inline_completion_button/src/inline_completion_button.rs b/crates/inline_completion_button/src/inline_completion_button.rs index 5ba3d527722e52a9d3006e240fa5a705a5927928..a28cfe99c1206acd16c8a262d00ff32f57d6f53d 100644 --- a/crates/inline_completion_button/src/inline_completion_button.rs +++ b/crates/inline_completion_button/src/inline_completion_button.rs @@ -229,7 +229,7 @@ impl Render for InlineCompletionButton { return div(); } - let enabled = self.editor_enabled.unwrap_or(false); + let enabled = self.editor_enabled.unwrap_or(true); let zeta_icon = if enabled { IconName::ZedPredict @@ -469,9 +469,7 @@ impl InlineCompletionButton { }), ); - if self.file.as_ref().map_or(false, |file| { - !all_language_settings(Some(file), cx).inline_completions_enabled_for_path(file.path()) - }) { + if !self.editor_enabled.unwrap_or(true) { menu = menu.item( ContextMenuEntry::new("This file is excluded.") .disabled(true) From 44c6a54f95628381ede4a7299247342a48617870 Mon Sep 17 00:00:00 2001 From: smit <0xtimsb@gmail.com> Date: Fri, 7 Feb 2025 22:54:57 +0530 Subject: [PATCH 127/130] pane: Improve close active item to better handle pinned tabs (#23488) Closes #22247 - [x] Do not close pinned tab on keyboard shortcuts like `ctrl+w` or `alt+f4` - [x] Close pinned tab on context menu action, menu bar action, or vim bang - [x] While closing pinned tab via shortcut (where it won't close), instead activate any other non-pinned tab in same pane - [x] Else, if any other pane contains non-pinned tab, activate that - [x] Tests Co-authored-by: uncenter <47499684+uncenter@users.noreply.github.com> Release Notes: - Pinned tab now stay open when using close shortcuts, auto focuses to any other non-pinned tab instead. --- assets/keymaps/default-linux.json | 4 +- assets/keymaps/default-macos.json | 2 +- crates/file_finder/src/file_finder_tests.rs | 10 +- crates/terminal_view/src/terminal_view.rs | 8 +- crates/vim/src/command.rs | 11 + crates/workspace/src/pane.rs | 256 ++++++++++++++++++-- crates/workspace/src/workspace.rs | 28 ++- crates/zed/src/zed.rs | 10 +- crates/zed/src/zed/app_menus.rs | 5 +- 9 files changed, 298 insertions(+), 36 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 217af10cc6a61e0b5f0718eda3b73eb4595b1408..42c879a534722699147a59d768791b07a8f66b5d 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -274,8 +274,8 @@ "ctrl-pagedown": "pane::ActivateNextItem", "ctrl-shift-pageup": "pane::SwapItemLeft", "ctrl-shift-pagedown": "pane::SwapItemRight", - "ctrl-f4": "pane::CloseActiveItem", - "ctrl-w": "pane::CloseActiveItem", + "ctrl-f4": ["pane::CloseActiveItem", { "close_pinned": false }], + "ctrl-w": ["pane::CloseActiveItem", { "close_pinned": false }], "alt-ctrl-t": ["pane::CloseInactiveItems", { "close_pinned": false }], "alt-ctrl-shift-w": "workspace::CloseInactiveTabsAndPanes", "ctrl-k e": ["pane::CloseItemsToTheLeft", { "close_pinned": false }], diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index fe3d7c413ea00988d6653364fad9e0edea33efb4..7f852ee4f76797dad7e56fb7d65278b45646e6b5 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -349,7 +349,7 @@ "cmd-}": "pane::ActivateNextItem", "ctrl-shift-pageup": "pane::SwapItemLeft", "ctrl-shift-pagedown": "pane::SwapItemRight", - "cmd-w": "pane::CloseActiveItem", + "cmd-w": ["pane::CloseActiveItem", { "close_pinned": false }], "alt-cmd-t": ["pane::CloseInactiveItems", { "close_pinned": false }], "ctrl-alt-cmd-w": "workspace::CloseInactiveTabsAndPanes", "cmd-k e": ["pane::CloseItemsToTheLeft", { "close_pinned": false }], diff --git a/crates/file_finder/src/file_finder_tests.rs b/crates/file_finder/src/file_finder_tests.rs index 8555da775e693b9c48068668b040ae84ebb30156..228a0799ee719ed3ca20cfe602cdfd4bcf03af2d 100644 --- a/crates/file_finder/src/file_finder_tests.rs +++ b/crates/file_finder/src/file_finder_tests.rs @@ -817,7 +817,10 @@ async fn test_external_files_history(cx: &mut gpui::TestAppContext) { .as_u64() as usize, ) }); - cx.dispatch_action(workspace::CloseActiveItem { save_intent: None }); + cx.dispatch_action(workspace::CloseActiveItem { + save_intent: None, + close_pinned: false, + }); let initial_history_items = open_close_queried_buffer("sec", 1, "second.rs", &workspace, cx).await; @@ -2000,7 +2003,10 @@ async fn open_close_queried_buffer( ) .await; - cx.dispatch_action(workspace::CloseActiveItem { save_intent: None }); + cx.dispatch_action(workspace::CloseActiveItem { + save_intent: None, + close_pinned: false, + }); history_items } diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index fb63c6f966958935958ffa62e66a72956a766f20..6d2540940ad595532b4bef32df1b04744967b3f4 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -257,7 +257,13 @@ impl TerminalView { .action("Inline Assist", Box::new(InlineAssist::default())) }) .separator() - .action("Close", Box::new(CloseActiveItem { save_intent: None })) + .action( + "Close", + Box::new(CloseActiveItem { + save_intent: None, + close_pinned: true, + }), + ) }); window.focus(&context_menu.focus_handle(cx)); diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index f73734d0e2fd4348be0ce4560b7d27fa75ff36cb..bbd579218a16e10e235ddea2039dc95b195fff77 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -567,37 +567,45 @@ fn generate_commands(_: &App) -> Vec { ("q", "uit"), workspace::CloseActiveItem { save_intent: Some(SaveIntent::Close), + close_pinned: false, }, ) .bang(workspace::CloseActiveItem { save_intent: Some(SaveIntent::Skip), + close_pinned: true, }), VimCommand::new( ("wq", ""), workspace::CloseActiveItem { save_intent: Some(SaveIntent::Save), + close_pinned: false, }, ) .bang(workspace::CloseActiveItem { save_intent: Some(SaveIntent::Overwrite), + close_pinned: true, }), VimCommand::new( ("x", "it"), workspace::CloseActiveItem { save_intent: Some(SaveIntent::SaveAll), + close_pinned: false, }, ) .bang(workspace::CloseActiveItem { save_intent: Some(SaveIntent::Overwrite), + close_pinned: true, }), VimCommand::new( ("ex", "it"), workspace::CloseActiveItem { save_intent: Some(SaveIntent::SaveAll), + close_pinned: false, }, ) .bang(workspace::CloseActiveItem { save_intent: Some(SaveIntent::Overwrite), + close_pinned: true, }), VimCommand::new( ("up", "date"), @@ -657,10 +665,12 @@ fn generate_commands(_: &App) -> Vec { ("bd", "elete"), workspace::CloseActiveItem { save_intent: Some(SaveIntent::Close), + close_pinned: false, }, ) .bang(workspace::CloseActiveItem { save_intent: Some(SaveIntent::Skip), + close_pinned: true, }), VimCommand::new(("bn", "ext"), workspace::ActivateNextItem).count(), VimCommand::new(("bN", "ext"), workspace::ActivatePrevItem).count(), @@ -679,6 +689,7 @@ fn generate_commands(_: &App) -> Vec { ("tabc", "lose"), workspace::CloseActiveItem { save_intent: Some(SaveIntent::Close), + close_pinned: false, }, ), VimCommand::new( diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index d687f3bfd71ee65df73014ae84184b88d54df62a..7f8596112016f4a56ae99f477d26763311cddbb1 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -99,6 +99,8 @@ pub struct ActivateItem(pub usize); #[serde(deny_unknown_fields)] pub struct CloseActiveItem { pub save_intent: Option, + #[serde(default)] + pub close_pinned: bool, } #[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default)] @@ -1224,6 +1226,37 @@ impl Pane { return None; } + if self.is_tab_pinned(self.active_item_index) && !action.close_pinned { + // Activate any non-pinned tab in same pane + let non_pinned_tab_index = self + .items() + .enumerate() + .find(|(index, _item)| !self.is_tab_pinned(*index)) + .map(|(index, _item)| index); + if let Some(index) = non_pinned_tab_index { + self.activate_item(index, false, false, window, cx); + return None; + } + + // Activate any non-pinned tab in different pane + let current_pane = cx.entity(); + self.workspace + .update(cx, |workspace, cx| { + let panes = workspace.center.panes(); + let pane_with_unpinned_tab = panes.iter().find(|pane| { + if **pane == ¤t_pane { + return false; + } + pane.read(cx).has_unpinned_tabs() + }); + if let Some(pane) = pane_with_unpinned_tab { + pane.update(cx, |pane, cx| pane.activate_unpinned_tab(window, cx)); + } + }) + .ok(); + + return None; + }; let active_item_id = self.items[self.active_item_index].item_id(); Some(self.close_item_by_id( active_item_id, @@ -2105,6 +2138,24 @@ impl Pane { self.pinned_tab_count != 0 } + fn has_unpinned_tabs(&self) -> bool { + self.pinned_tab_count < self.items.len() + } + + fn activate_unpinned_tab(&mut self, window: &mut Window, cx: &mut Context) { + if self.items.is_empty() { + return; + } + let Some(index) = self + .items() + .enumerate() + .find_map(|(index, _item)| (!self.is_tab_pinned(index)).then_some(index)) + else { + return; + }; + self.activate_item(index, true, true, window, cx); + } + fn render_tab( &self, ix: usize, @@ -2280,7 +2331,10 @@ impl Pane { pane.unpin_tab_at(ix, window, cx); })) } else { - end_slot_action = &CloseActiveItem { save_intent: None }; + end_slot_action = &CloseActiveItem { + save_intent: None, + close_pinned: false, + }; end_slot_tooltip_text = "Close Tab"; IconButton::new("close tab", IconName::Close) .when(!always_show_close_button, |button| { @@ -2350,7 +2404,10 @@ impl Pane { menu = menu .entry( "Close", - Some(Box::new(CloseActiveItem { save_intent: None })), + Some(Box::new(CloseActiveItem { + save_intent: None, + close_pinned: true, + })), window.handler_for(&pane, move |pane, window, cx| { pane.close_item_by_id(item_id, SaveIntent::Close, window, cx) .detach_and_log_err(cx); @@ -2991,14 +3048,9 @@ impl Pane { self.items .iter() - .map(|item| item.item_id()) - .filter(|item_id| { - if let Some(ix) = self.index_for_item_id(*item_id) { - self.is_tab_pinned(ix) - } else { - true - } - }) + .enumerate() + .filter(|(index, _item)| self.is_tab_pinned(*index)) + .map(|(_, item)| item.item_id()) .collect() } @@ -3561,7 +3613,14 @@ mod tests { pane.update_in(cx, |pane, window, cx| { assert!(pane - .close_active_item(&CloseActiveItem { save_intent: None }, window, cx) + .close_active_item( + &CloseActiveItem { + save_intent: None, + close_pinned: false + }, + window, + cx + ) .is_none()) }); } @@ -3902,7 +3961,14 @@ mod tests { assert_item_labels(&pane, ["A", "B", "1*", "C", "D"], cx); pane.update_in(cx, |pane, window, cx| { - pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx) + pane.close_active_item( + &CloseActiveItem { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ) }) .unwrap() .await @@ -3915,7 +3981,14 @@ mod tests { assert_item_labels(&pane, ["A", "B", "C", "D*"], cx); pane.update_in(cx, |pane, window, cx| { - pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx) + pane.close_active_item( + &CloseActiveItem { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ) }) .unwrap() .await @@ -3923,7 +3996,14 @@ mod tests { assert_item_labels(&pane, ["A", "B*", "C"], cx); pane.update_in(cx, |pane, window, cx| { - pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx) + pane.close_active_item( + &CloseActiveItem { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ) }) .unwrap() .await @@ -3931,7 +4011,14 @@ mod tests { assert_item_labels(&pane, ["A", "C*"], cx); pane.update_in(cx, |pane, window, cx| { - pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx) + pane.close_active_item( + &CloseActiveItem { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ) }) .unwrap() .await @@ -3967,7 +4054,14 @@ mod tests { assert_item_labels(&pane, ["A", "B", "1*", "C", "D"], cx); pane.update_in(cx, |pane, window, cx| { - pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx) + pane.close_active_item( + &CloseActiveItem { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ) }) .unwrap() .await @@ -3980,7 +4074,14 @@ mod tests { assert_item_labels(&pane, ["A", "B", "C", "D*"], cx); pane.update_in(cx, |pane, window, cx| { - pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx) + pane.close_active_item( + &CloseActiveItem { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ) }) .unwrap() .await @@ -3988,7 +4089,14 @@ mod tests { assert_item_labels(&pane, ["A", "B", "C*"], cx); pane.update_in(cx, |pane, window, cx| { - pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx) + pane.close_active_item( + &CloseActiveItem { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ) }) .unwrap() .await @@ -3996,7 +4104,14 @@ mod tests { assert_item_labels(&pane, ["A", "B*"], cx); pane.update_in(cx, |pane, window, cx| { - pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx) + pane.close_active_item( + &CloseActiveItem { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ) }) .unwrap() .await @@ -4032,7 +4147,14 @@ mod tests { assert_item_labels(&pane, ["A", "B", "1*", "C", "D"], cx); pane.update_in(cx, |pane, window, cx| { - pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx) + pane.close_active_item( + &CloseActiveItem { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ) }) .unwrap() .await @@ -4045,7 +4167,14 @@ mod tests { assert_item_labels(&pane, ["A", "B", "C", "D*"], cx); pane.update_in(cx, |pane, window, cx| { - pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx) + pane.close_active_item( + &CloseActiveItem { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ) }) .unwrap() .await @@ -4058,7 +4187,14 @@ mod tests { assert_item_labels(&pane, ["A*", "B", "C"], cx); pane.update_in(cx, |pane, window, cx| { - pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx) + pane.close_active_item( + &CloseActiveItem { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ) }) .unwrap() .await @@ -4066,7 +4202,14 @@ mod tests { assert_item_labels(&pane, ["B*", "C"], cx); pane.update_in(cx, |pane, window, cx| { - pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx) + pane.close_active_item( + &CloseActiveItem { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ) }) .unwrap() .await @@ -4300,7 +4443,7 @@ mod tests { let project = Project::test(fs, None, cx).await; let (workspace, cx) = - cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); let pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone()); let item_a = add_labeled_item(&pane, "A", false, cx); @@ -4326,6 +4469,71 @@ mod tests { assert_item_labels(&pane, [], cx); } + #[gpui::test] + async fn test_close_pinned_tab_with_non_pinned_in_same_pane(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, None, cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + + // Non-pinned tabs in same pane + let pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone()); + add_labeled_item(&pane, "A", false, cx); + add_labeled_item(&pane, "B", false, cx); + add_labeled_item(&pane, "C", false, cx); + pane.update_in(cx, |pane, window, cx| { + pane.pin_tab_at(0, window, cx); + }); + set_labeled_items(&pane, ["A*", "B", "C"], cx); + pane.update_in(cx, |pane, window, cx| { + pane.close_active_item( + &CloseActiveItem { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ); + }); + // Non-pinned tab should be active + assert_item_labels(&pane, ["A", "B*", "C"], cx); + } + + #[gpui::test] + async fn test_close_pinned_tab_with_non_pinned_in_different_pane(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, None, cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + + // No non-pinned tabs in same pane, non-pinned tabs in another pane + let pane1 = workspace.update(cx, |workspace, _| workspace.active_pane().clone()); + let pane2 = workspace.update_in(cx, |workspace, window, cx| { + workspace.split_pane(pane1.clone(), SplitDirection::Right, window, cx) + }); + add_labeled_item(&pane1, "A", false, cx); + pane1.update_in(cx, |pane, window, cx| { + pane.pin_tab_at(0, window, cx); + }); + set_labeled_items(&pane1, ["A*"], cx); + add_labeled_item(&pane2, "B", false, cx); + set_labeled_items(&pane2, ["B"], cx); + pane1.update_in(cx, |pane, window, cx| { + pane.close_active_item( + &CloseActiveItem { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ); + }); + // Non-pinned tab of other pane should be active + assert_item_labels(&pane2, ["B*"], cx); + } + fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let settings_store = SettingsStore::test(cx); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 2778382f3efffc8c0e0c38e8f1f8917783a31ee8..e4087fad4f50438e98bb89bf68b0e0087a10b707 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -8175,6 +8175,7 @@ mod tests { pane.close_active_item( &CloseActiveItem { save_intent: Some(SaveIntent::Close), + close_pinned: false, }, window, cx, @@ -8279,7 +8280,14 @@ mod tests { }); let close_singleton_buffer_task = pane .update_in(cx, |pane, window, cx| { - pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx) + pane.close_active_item( + &CloseActiveItem { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ) }) .expect("should have active singleton buffer to close"); cx.background_executor.run_until_parked(); @@ -8385,7 +8393,14 @@ mod tests { }); let _close_multi_buffer_task = pane .update_in(cx, |pane, window, cx| { - pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx) + pane.close_active_item( + &CloseActiveItem { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ) }) .expect("should have active multi buffer to close"); cx.background_executor.run_until_parked(); @@ -8476,7 +8491,14 @@ mod tests { }); let close_multi_buffer_task = pane .update_in(cx, |pane, window, cx| { - pane.close_active_item(&CloseActiveItem { save_intent: None }, window, cx) + pane.close_active_item( + &CloseActiveItem { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ) }) .expect("should have active multi buffer to close"); cx.background_executor.run_until_parked(); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index c1244c42992341b12bf5024e8cc514ec91820bc5..9d4cb83e08bddcdb6c686e867baf8bce46bf7efa 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -3103,7 +3103,10 @@ mod tests { }); cx.dispatch_action( window.into(), - workspace::CloseActiveItem { save_intent: None }, + workspace::CloseActiveItem { + save_intent: None, + close_pinned: false, + }, ); cx.background_executor.run_until_parked(); @@ -3116,7 +3119,10 @@ mod tests { cx.dispatch_action( window.into(), - workspace::CloseActiveItem { save_intent: None }, + workspace::CloseActiveItem { + save_intent: None, + close_pinned: false, + }, ); cx.background_executor.run_until_parked(); cx.simulate_prompt_answer(1); diff --git a/crates/zed/src/zed/app_menus.rs b/crates/zed/src/zed/app_menus.rs index c50de09f3dd4f852916c76617ca72af82a751dd1..bd76fa77788aa33ba6bc7c499a3424cdf06fb9a6 100644 --- a/crates/zed/src/zed/app_menus.rs +++ b/crates/zed/src/zed/app_menus.rs @@ -76,7 +76,10 @@ pub fn app_menus() -> Vec { MenuItem::action("Save All", workspace::SaveAll { save_intent: None }), MenuItem::action( "Close Editor", - workspace::CloseActiveItem { save_intent: None }, + workspace::CloseActiveItem { + save_intent: None, + close_pinned: true, + }, ), MenuItem::action("Close Window", workspace::CloseWindow), ], From 8ff8dbdb2baa990ab2c3bcb202f1e8c2a070d76c Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 7 Feb 2025 13:03:19 -0500 Subject: [PATCH 128/130] assistant_context_editor: Fix patch block not rendering due to window reborrow (#24461) This PR fixes an issue where the Assistant patch block was not being rendered when using "Suggest Edits". The issue was that the `BlockContext` already has a borrow of the `Window`, so we can't use `update_in` to reborrow the window. The fix is to reuse the existing `&mut Window` reference from the `BlockContext` so we don't need to `update_in`. Closes #24169. Release Notes: - Assistant: Fixed an issue where the patch block was not being rendered when using "Suggest Edits". --------- Co-authored-by: Max --- crates/assistant_context_editor/src/context_editor.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/assistant_context_editor/src/context_editor.rs b/crates/assistant_context_editor/src/context_editor.rs index d1b9fbbda725ea3a77bdd00b91146945840d4bbf..290cff13fae047039d32ab6987f2d4e2ab69e002 100644 --- a/crates/assistant_context_editor/src/context_editor.rs +++ b/crates/assistant_context_editor/src/context_editor.rs @@ -832,12 +832,13 @@ impl ContextEditor { let render_block: RenderBlock = Arc::new({ let this = this.clone(); let patch_range = range.clone(); - move |cx: &mut BlockContext<'_, '_>| { + move |cx: &mut BlockContext| { let max_width = cx.max_width; let gutter_width = cx.gutter_dimensions.full_width(); let block_id = cx.block_id; let selected = cx.selected; - this.update_in(cx, |this, window, cx| { + let window = &mut cx.window; + this.update(cx.app, |this, cx| { this.render_patch_block( patch_range.clone(), max_width, From a7a14e59bf792d38af3b4008c127459e956f58e1 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 7 Feb 2025 15:35:55 -0300 Subject: [PATCH 129/130] edit predictions: Clarify `disabled_globs` documentation (#24460) This PR clarifies how the `disabled_globs` work. Release Notes: - N/A --------- Co-authored-by: Bennet Bo Fenner <53836821+bennetbo@users.noreply.github.com> --- assets/settings/default.json | 2 ++ crates/language/src/language_settings.rs | 4 ++++ docs/src/configuring-zed.md | 6 +++--- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 3369807b324a8f4e15979e1f0d950179c5406b25..884583167cd54a7d4835148885a623b628dbcfde 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -783,6 +783,8 @@ "load_direnv": "direct", "edit_predictions": { // A list of globs representing files that edit predictions should be disabled for. + // There's a sensible default list of globs already included. + // Any addition to this list will be merged with the default list. "disabled_globs": [ "**/.env*", "**/*.pem", diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index c8f47f7c8362221610d3369e25ea7a447bb450dd..b9c0821721bc15fbf5ba8b0fa286f9f25695731d 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -224,6 +224,8 @@ pub struct EditPredictionSettings { /// The provider that supplies edit predictions. pub provider: EditPredictionProvider, /// A list of globs representing files that edit predictions should be disabled for. + /// This list adds to a pre-existing, sensible default set of globs. + /// Any additional ones you add are combined with them. pub disabled_globs: Vec, /// When to show edit predictions previews in buffer. pub inline_preview: InlineCompletionPreviewMode, @@ -428,6 +430,8 @@ pub struct LanguageSettingsContent { #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq)] pub struct InlineCompletionSettingsContent { /// A list of globs representing files that edit predictions should be disabled for. + /// This list adds to a pre-existing, sensible default set of globs. + /// Any additional ones you add are combined with them. #[serde(default)] pub disabled_globs: Option>, /// When to show edit predictions previews in buffer. diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 779c9e2a5930eceba83d79716ebc481e26963390..91cd144406937d67d78b3f72b604eb7a5d70ae9e 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -398,13 +398,13 @@ There are two options to choose from: ### Disabled Globs -- Description: A list of globs representing files that edit predictions should be disabled for. +- Description: A list of globs for which edit predictions should be disabled for. This list adds to a pre-existing, sensible default set of globs. Any additional ones you add are combined with them. - Setting: `disabled_globs` -- Default: `[".env"]` +- Default: `["**/.env*", "**/*.pem", "**/*.key", "**/*.cert", "**/*.crt", "**/secrets.yml"]` **Options** -List of `string` values +List of `string` values. ## Edit Predictions Disabled in From fd7fa87939a720d1e7f7041b9a62eb55df50ad2a Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Fri, 7 Feb 2025 15:42:27 -0300 Subject: [PATCH 130/130] edit predictions: Restore red dot in status buttons when pending ToS (#24408) In one of the recent changes to the edit predictions status bar menu, we lost the red dot that is displayed when the user has Zed as the provider but hasn't accepted terms of service. Note: All the checks were still in place, just the visual indicator was missing. ![CleanShot 2025-02-06 at 20 22 21@2x](https://github.com/user-attachments/assets/da8f25dd-5ed2-4bf9-8453-10b80f00bf63) Release Notes: - N/A --------- Co-authored-by: Danilo Leal --- crates/editor/src/editor.rs | 2 +- .../src/inline_completion_button.rs | 96 +++++++++---------- .../ui/src/components/button/button_icon.rs | 26 ++++- .../ui/src/components/button/icon_button.rs | 22 ++++- 4 files changed, 93 insertions(+), 53 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 24ec46ca61bba155db16921ad01ebc436062d6af..b5ef81bfbabaf1019a31a4aa2e2e41cb37617455 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -5549,7 +5549,7 @@ impl Editor { })) .child( h_flex() - .w_full() + .flex_1() .gap_2() .child(Icon::new(IconName::ZedPredict)) .child(Label::new("Accept Terms of Service")) diff --git a/crates/inline_completion_button/src/inline_completion_button.rs b/crates/inline_completion_button/src/inline_completion_button.rs index a28cfe99c1206acd16c8a262d00ff32f57d6f53d..1864e0c26603885738d73d6040b826a3435e62ef 100644 --- a/crates/inline_completion_button/src/inline_completion_button.rs +++ b/crates/inline_completion_button/src/inline_completion_button.rs @@ -24,8 +24,8 @@ use std::{ }; use supermaven::{AccountStatus, Supermaven}; use ui::{ - prelude::*, Clickable, ContextMenu, ContextMenuEntry, IconButton, IconButtonShape, PopoverMenu, - PopoverMenuHandle, Tooltip, + prelude::*, Clickable, ContextMenu, ContextMenuEntry, IconButton, IconButtonShape, Indicator, + PopoverMenu, PopoverMenuHandle, Tooltip, }; use workspace::{ create_and_open_local_file, item::ItemHandle, notifications::NotificationId, StatusItemView, @@ -240,24 +240,20 @@ impl Render for InlineCompletionButton { let current_user_terms_accepted = self.user_store.read(cx).current_user_has_accepted_terms(); - let icon_button = || { - let base = IconButton::new("zed-predict-pending-button", zeta_icon) - .shape(IconButtonShape::Square); - - match ( - current_user_terms_accepted, - self.popover_menu_handle.is_deployed(), - enabled, - ) { - (Some(false) | None, _, _) => { - let signed_in = current_user_terms_accepted.is_some(); - let tooltip_meta = if signed_in { - "Read Terms of Service" - } else { - "Sign in to use" - }; + if !current_user_terms_accepted.unwrap_or(false) { + let signed_in = current_user_terms_accepted.is_some(); + let tooltip_meta = if signed_in { + "Read Terms of Service" + } else { + "Sign in to use" + }; - base.tooltip(move |window, cx| { + return div().child( + IconButton::new("zed-predict-pending-button", zeta_icon) + .shape(IconButtonShape::Square) + .indicator(Indicator::dot().color(Color::Error)) + .indicator_border_color(Some(cx.theme().colors().status_bar_background)) + .tooltip(move |window, cx| { Tooltip::with_meta( "Edit Predictions", None, @@ -266,34 +262,38 @@ impl Render for InlineCompletionButton { cx, ) }) - .on_click(cx.listener( - move |_, _, window, cx| { - telemetry::event!( - "Pending ToS Clicked", - source = "Edit Prediction Status Button" - ); - window.dispatch_action( - zed_actions::OpenZedPredictOnboarding.boxed_clone(), - cx, - ); - }, - )) + .on_click(cx.listener(move |_, _, window, cx| { + telemetry::event!( + "Pending ToS Clicked", + source = "Edit Prediction Status Button" + ); + window.dispatch_action( + zed_actions::OpenZedPredictOnboarding.boxed_clone(), + cx, + ); + })), + ); + } + + let icon_button = IconButton::new("zed-predict-pending-button", zeta_icon) + .shape(IconButtonShape::Square) + .when(!self.popover_menu_handle.is_deployed(), |element| { + if enabled { + element.tooltip(|window, cx| { + Tooltip::for_action("Edit Prediction", &ToggleMenu, window, cx) + }) + } else { + element.tooltip(|window, cx| { + Tooltip::with_meta( + "Edit Prediction", + Some(&ToggleMenu), + "Disabled For This File", + window, + cx, + ) + }) } - (Some(true), true, _) => base, - (Some(true), false, true) => base.tooltip(|window, cx| { - Tooltip::for_action("Edit Prediction", &ToggleMenu, window, cx) - }), - (Some(true), false, false) => base.tooltip(|window, cx| { - Tooltip::with_meta( - "Edit Prediction", - Some(&ToggleMenu), - "Disabled For This File", - window, - cx, - ) - }), - } - }; + }); let this = cx.entity().clone(); @@ -311,7 +311,7 @@ impl Render for InlineCompletionButton { if is_refreshing { popover_menu = popover_menu.trigger( - icon_button().with_animation( + icon_button.with_animation( "pulsating-label", Animation::new(Duration::from_secs(2)) .repeat() @@ -320,7 +320,7 @@ impl Render for InlineCompletionButton { ), ); } else { - popover_menu = popover_menu.trigger(icon_button()); + popover_menu = popover_menu.trigger(icon_button); } div().child(popover_menu.into_any_element()) diff --git a/crates/ui/src/components/button/button_icon.rs b/crates/ui/src/components/button/button_icon.rs index a2a146ee76f478259ebbbd682706b608d694ef2d..adacd12f27039f5289074f5bb2664afd43113493 100644 --- a/crates/ui/src/components/button/button_icon.rs +++ b/crates/ui/src/components/button/button_icon.rs @@ -1,5 +1,6 @@ #![allow(missing_docs)] -use crate::{prelude::*, Icon, IconName, IconSize}; +use crate::{prelude::*, Icon, IconName, IconSize, IconWithIndicator, Indicator}; +use gpui::Hsla; /// An icon that appears within a button. /// @@ -15,6 +16,8 @@ pub(super) struct ButtonIcon { selected_icon: Option, selected_icon_color: Option, selected_style: Option, + indicator: Option, + indicator_border_color: Option, } impl ButtonIcon { @@ -28,6 +31,8 @@ impl ButtonIcon { selected_icon: None, selected_icon_color: None, selected_style: None, + indicator: None, + indicator_border_color: None, } } @@ -56,6 +61,16 @@ impl ButtonIcon { self.selected_icon_color = color.into(); self } + + pub fn indicator(mut self, indicator: Indicator) -> Self { + self.indicator = Some(indicator); + self + } + + pub fn indicator_border_color(mut self, color: Option) -> Self { + self.indicator_border_color = color; + self + } } impl Disableable for ButtonIcon { @@ -96,6 +111,13 @@ impl RenderOnce for ButtonIcon { self.color }; - Icon::new(icon).size(self.size).color(icon_color) + let icon = Icon::new(icon).size(self.size).color(icon_color); + + match self.indicator { + Some(indicator) => IconWithIndicator::new(icon, Some(indicator)) + .indicator_border_color(self.indicator_border_color) + .into_any_element(), + None => icon.into_any_element(), + } } } diff --git a/crates/ui/src/components/button/icon_button.rs b/crates/ui/src/components/button/icon_button.rs index 840f0fc394842c44780dab5125f890c37ee60539..c28c5ae9ac0dc1ff2f4e678a9b82b23a017b1cff 100644 --- a/crates/ui/src/components/button/icon_button.rs +++ b/crates/ui/src/components/button/icon_button.rs @@ -1,8 +1,8 @@ #![allow(missing_docs)] -use gpui::{AnyView, DefiniteLength}; +use gpui::{AnyView, DefiniteLength, Hsla}; use super::button_like::{ButtonCommon, ButtonLike, ButtonSize, ButtonStyle}; -use crate::{prelude::*, ElevationIndex, SelectableButton}; +use crate::{prelude::*, ElevationIndex, Indicator, SelectableButton}; use crate::{IconName, IconSize}; use super::button_icon::ButtonIcon; @@ -22,6 +22,8 @@ pub struct IconButton { icon_size: IconSize, icon_color: Color, selected_icon: Option, + indicator: Option, + indicator_border_color: Option, alpha: Option, } @@ -34,6 +36,8 @@ impl IconButton { icon_size: IconSize::default(), icon_color: Color::Default, selected_icon: None, + indicator: None, + indicator_border_color: None, alpha: None, }; this.base.base = this.base.base.debug_selector(|| format!("ICON-{:?}", icon)); @@ -64,6 +68,16 @@ impl IconButton { self.selected_icon = icon.into(); self } + + pub fn indicator(mut self, indicator: Indicator) -> Self { + self.indicator = Some(indicator); + self + } + + pub fn indicator_border_color(mut self, color: Option) -> Self { + self.indicator_border_color = color; + self + } } impl Disableable for IconButton { @@ -168,6 +182,10 @@ impl RenderOnce for IconButton { .toggle_state(is_selected) .selected_icon(self.selected_icon) .when_some(selected_style, |this, style| this.selected_style(style)) + .when_some(self.indicator, |this, indicator| { + this.indicator(indicator) + .indicator_border_color(self.indicator_border_color) + }) .size(self.icon_size) .color(Color::Custom(color)), )