From e5cea54cbbdb37c4e047a344d5fd245860ccd529 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 28 Aug 2025 20:09:20 -0600 Subject: [PATCH 01/54] acp: Load agent panel even if serialized config is bogus (#37134) Closes #ISSUE Release Notes: - N/A --- crates/agent_ui/src/agent_panel.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 232311c5b02cdaa9edad4c0e9053163f450378e8..3eb171054a2c4d529bbc4b89063bf58f69ce5c45 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -86,7 +86,7 @@ use zed_actions::{ const AGENT_PANEL_KEY: &str = "agent_panel"; -#[derive(Serialize, Deserialize)] +#[derive(Serialize, Deserialize, Debug)] struct SerializedAgentPanel { width: Option, selected_agent: Option, @@ -592,7 +592,7 @@ impl AgentPanel { .log_err() .flatten() { - Some(serde_json::from_str::(&panel)?) + serde_json::from_str::(&panel).log_err() } else { None }; From c3ccdc0b4421d34d541d592b0184345c2ac08f7e Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 28 Aug 2025 20:50:24 -0700 Subject: [PATCH 02/54] Add a setting to control the number of context lines in excerpts (#37138) Fixes https://github.com/zed-industries/zed/discussions/28739 Release Notes: - Added a setting, `excerpt_context_lines`, for setting the number of context lines shown in a multibuffer --- assets/settings/default.json | 2 ++ crates/acp_thread/src/diff.rs | 8 +++---- crates/agent_ui/src/agent_diff.rs | 3 ++- crates/assistant_tools/src/edit_file_tool.rs | 10 +++++---- crates/diagnostics/src/diagnostics.rs | 6 +++-- crates/editor/src/editor.rs | 12 +++++++--- crates/editor/src/editor_settings.rs | 6 +++++ crates/editor/src/editor_tests.rs | 2 +- crates/git_ui/src/commit_view.rs | 4 ++-- crates/git_ui/src/project_diff.rs | 3 ++- crates/search/src/project_search.rs | 3 ++- crates/settings/src/settings_store.rs | 23 ++++++++++++++++++++ docs/src/configuring-zed.md | 10 +++++++++ docs/src/development/releases.md | 2 -- docs/src/visual-customization.md | 4 +++- 15 files changed, 76 insertions(+), 22 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 57a5d13eab281d6bffec2f299fbb1e2d5a3a01c5..297c932e5b54ca75eb34b2399c0a1f427dcc9f77 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -279,6 +279,8 @@ "redact_private_values": false, // The default number of lines to expand excerpts in the multibuffer by. "expand_excerpt_lines": 5, + // The default number of context lines shown in multibuffer excerpts. + "excerpt_context_lines": 2, // Globs to match against file paths to determine if a file is private. "private_files": ["**/.env*", "**/*.pem", "**/*.key", "**/*.cert", "**/*.crt", "**/secrets.yml"], // Whether to use additional LSP queries to format (and amend) the code after diff --git a/crates/acp_thread/src/diff.rs b/crates/acp_thread/src/diff.rs index 0fec6809e01ff3f85acc7ad80effe95197200d60..f75af0543e373b47b0c6de36760ba18b5d9da318 100644 --- a/crates/acp_thread/src/diff.rs +++ b/crates/acp_thread/src/diff.rs @@ -1,6 +1,6 @@ use anyhow::Result; use buffer_diff::{BufferDiff, BufferDiffSnapshot}; -use editor::{MultiBuffer, PathKey}; +use editor::{MultiBuffer, PathKey, multibuffer_context_lines}; use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task}; use itertools::Itertools; use language::{ @@ -64,7 +64,7 @@ impl Diff { PathKey::for_buffer(&buffer, cx), buffer.clone(), hunk_ranges, - editor::DEFAULT_MULTIBUFFER_CONTEXT, + multibuffer_context_lines(cx), cx, ); multibuffer.add_diff(diff, cx); @@ -279,7 +279,7 @@ impl PendingDiff { path_key, buffer, ranges, - editor::DEFAULT_MULTIBUFFER_CONTEXT, + multibuffer_context_lines(cx), cx, ); multibuffer.add_diff(buffer_diff.clone(), cx); @@ -305,7 +305,7 @@ impl PendingDiff { PathKey::for_buffer(&self.new_buffer, cx), self.new_buffer.clone(), ranges, - editor::DEFAULT_MULTIBUFFER_CONTEXT, + multibuffer_context_lines(cx), cx, ); let end = multibuffer.len(cx); diff --git a/crates/agent_ui/src/agent_diff.rs b/crates/agent_ui/src/agent_diff.rs index 1e1ff95178308e20988019305b0546a169acba8f..4bd525e9d0461a7a180cccc1748e7f8983c0b665 100644 --- a/crates/agent_ui/src/agent_diff.rs +++ b/crates/agent_ui/src/agent_diff.rs @@ -10,6 +10,7 @@ use editor::{ Direction, Editor, EditorEvent, EditorSettings, MultiBuffer, MultiBufferSnapshot, SelectionEffects, ToPoint, actions::{GoToHunk, GoToPreviousHunk}, + multibuffer_context_lines, scroll::Autoscroll, }; use gpui::{ @@ -257,7 +258,7 @@ impl AgentDiffPane { path_key.clone(), buffer.clone(), diff_hunk_ranges, - editor::DEFAULT_MULTIBUFFER_CONTEXT, + multibuffer_context_lines(cx), cx, ); multibuffer.add_diff(diff_handle, cx); diff --git a/crates/assistant_tools/src/edit_file_tool.rs b/crates/assistant_tools/src/edit_file_tool.rs index 95b01c40eb96472caf85f239b2212f25e06fe9e2..7b208ccc7768c9c0df2904573e2d47504a8eb61f 100644 --- a/crates/assistant_tools/src/edit_file_tool.rs +++ b/crates/assistant_tools/src/edit_file_tool.rs @@ -11,7 +11,9 @@ use assistant_tool::{ AnyToolCard, Tool, ToolCard, ToolResult, ToolResultContent, ToolResultOutput, ToolUseStatus, }; use buffer_diff::{BufferDiff, BufferDiffSnapshot}; -use editor::{Editor, EditorMode, MinimapVisibility, MultiBuffer, PathKey}; +use editor::{ + Editor, EditorMode, MinimapVisibility, MultiBuffer, PathKey, multibuffer_context_lines, +}; use futures::StreamExt; use gpui::{ Animation, AnimationExt, AnyWindowHandle, App, AppContext, AsyncApp, Entity, Task, @@ -474,7 +476,7 @@ impl Tool for EditFileTool { PathKey::for_buffer(&buffer, cx), buffer, diff_hunk_ranges, - editor::DEFAULT_MULTIBUFFER_CONTEXT, + multibuffer_context_lines(cx), cx, ); multibuffer.add_diff(buffer_diff, cx); @@ -703,7 +705,7 @@ impl EditFileToolCard { PathKey::for_buffer(buffer, cx), buffer.clone(), ranges, - editor::DEFAULT_MULTIBUFFER_CONTEXT, + multibuffer_context_lines(cx), cx, ); let end = multibuffer.len(cx); @@ -791,7 +793,7 @@ impl EditFileToolCard { path_key, buffer, ranges, - editor::DEFAULT_MULTIBUFFER_CONTEXT, + multibuffer_context_lines(cx), cx, ); multibuffer.add_diff(buffer_diff.clone(), cx); diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 1c27e820a0d8afb64c5c67e66e125caf8720593d..53d03718475da1eeaf2b6b3faa22baabb1695f2d 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -10,8 +10,9 @@ use anyhow::Result; use collections::{BTreeSet, HashMap}; use diagnostic_renderer::DiagnosticBlock; use editor::{ - DEFAULT_MULTIBUFFER_CONTEXT, Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey, + Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey, display_map::{BlockPlacement, BlockProperties, BlockStyle, CustomBlockId}, + multibuffer_context_lines, }; use gpui::{ AnyElement, AnyView, App, AsyncApp, Context, Entity, EventEmitter, FocusHandle, Focusable, @@ -493,10 +494,11 @@ impl ProjectDiagnosticsEditor { } let mut excerpt_ranges: Vec> = Vec::new(); + let context_lines = cx.update(|_, cx| multibuffer_context_lines(cx))?; for b in blocks.iter() { let excerpt_range = context_range_for_entry( b.initial_range.clone(), - DEFAULT_MULTIBUFFER_CONTEXT, + context_lines, buffer_snapshot.clone(), cx, ) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index ea7cce5d8b741268fe0d4182b66638c0495bb211..04780e79f84c6f762b246bfb662eb693675e5d38 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -219,7 +219,6 @@ use crate::{ pub const FILE_HEADER_HEIGHT: u32 = 2; pub const MULTI_BUFFER_EXCERPT_HEADER_HEIGHT: u32 = 1; -pub const DEFAULT_MULTIBUFFER_CONTEXT: u32 = 2; const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500); const MAX_LINE_LEN: usize = 1024; const MIN_NAVIGATION_HISTORY_ROW_DELTA: i64 = 10; @@ -6402,7 +6401,7 @@ impl Editor { PathKey::for_buffer(buffer_handle, cx), buffer_handle.clone(), edited_ranges, - DEFAULT_MULTIBUFFER_CONTEXT, + multibuffer_context_lines(cx), cx, ); @@ -16237,7 +16236,7 @@ impl Editor { PathKey::for_buffer(&location.buffer, cx), location.buffer.clone(), ranges_for_buffer, - DEFAULT_MULTIBUFFER_CONTEXT, + multibuffer_context_lines(cx), cx, ); ranges.extend(new_ranges) @@ -24078,3 +24077,10 @@ fn render_diff_hunk_controls( ) .into_any_element() } + +pub fn multibuffer_context_lines(cx: &App) -> u32 { + EditorSettings::try_get(cx) + .map(|settings| settings.excerpt_context_lines) + .unwrap_or(2) + .clamp(1, 32) +} diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index 1d7e04cae021dd7b755f1f80e78fd3ea83197539..9b110d782a0bbcf789791240ef42a935b7ecd47b 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -37,6 +37,7 @@ pub struct EditorSettings { pub multi_cursor_modifier: MultiCursorModifier, pub redact_private_values: bool, pub expand_excerpt_lines: u32, + pub excerpt_context_lines: u32, pub middle_click_paste: bool, #[serde(default)] pub double_click_in_multibuffer: DoubleClickInMultibuffer, @@ -515,6 +516,11 @@ pub struct EditorSettingsContent { /// Default: 3 pub expand_excerpt_lines: Option, + /// How many lines of context to provide in multibuffer excerpts by default + /// + /// Default: 2 + pub excerpt_context_lines: Option, + /// Whether to enable middle-click paste on Linux /// /// Default: true diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 85471c7ce96e172f7bd5ade399ed0ba1cd6d4a02..dfef8a92f064e3c8785f92d26e058fc43519dca2 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -19867,7 +19867,7 @@ async fn test_display_diff_hunks(cx: &mut TestAppContext) { PathKey::namespaced(0, buffer.read(cx).file().unwrap().path().clone()), buffer.clone(), vec![text::Anchor::MIN.to_point(&snapshot)..text::Anchor::MAX.to_point(&snapshot)], - DEFAULT_MULTIBUFFER_CONTEXT, + 2, cx, ); } diff --git a/crates/git_ui/src/commit_view.rs b/crates/git_ui/src/commit_view.rs index d428ccbb0509702ee2535fb8c8e95b059fa24499..ac51cee8e42567a607891dd242c2bf103ae7fc0e 100644 --- a/crates/git_ui/src/commit_view.rs +++ b/crates/git_ui/src/commit_view.rs @@ -1,6 +1,6 @@ use anyhow::{Context as _, Result}; use buffer_diff::{BufferDiff, BufferDiffSnapshot}; -use editor::{Editor, EditorEvent, MultiBuffer, SelectionEffects}; +use editor::{Editor, EditorEvent, MultiBuffer, SelectionEffects, multibuffer_context_lines}; use git::repository::{CommitDetails, CommitDiff, CommitSummary, RepoPath}; use gpui::{ AnyElement, AnyView, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, @@ -195,7 +195,7 @@ impl CommitView { PathKey::namespaced(FILE_NAMESPACE, path), buffer, diff_hunk_ranges, - editor::DEFAULT_MULTIBUFFER_CONTEXT, + multibuffer_context_lines(cx), cx, ); multibuffer.add_diff(buffer_diff, cx); diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index 524dbf13d30e4539dcc80ec37625333a37cc2206..69ebd83ea8c1a78f13f2218c020bd8654f2b4374 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -10,6 +10,7 @@ use collections::HashSet; use editor::{ Editor, EditorEvent, SelectionEffects, actions::{GoToHunk, GoToPreviousHunk}, + multibuffer_context_lines, scroll::Autoscroll, }; use futures::StreamExt; @@ -465,7 +466,7 @@ impl ProjectDiff { path_key.clone(), buffer, excerpt_ranges, - editor::DEFAULT_MULTIBUFFER_CONTEXT, + multibuffer_context_lines(cx), cx, ); (was_empty, is_newly_added) diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 1ee959f111bd5741a655551aa71030fd9d7c15c9..2668d270d7f008d49d6d067ba01d951d44a43a00 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -11,6 +11,7 @@ use editor::{ Anchor, Editor, EditorEvent, EditorSettings, MAX_TAB_TITLE_LEN, MultiBuffer, SelectionEffects, actions::{Backtab, SelectAll, Tab}, items::active_match_index, + multibuffer_context_lines, }; use futures::{StreamExt, stream::FuturesOrdered}; use gpui::{ @@ -345,7 +346,7 @@ impl ProjectSearch { excerpts.set_anchored_excerpts_for_path( buffer, ranges, - editor::DEFAULT_MULTIBUFFER_CONTEXT, + multibuffer_context_lines(cx), cx, ) }) diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 3deaed8b9d0b9cba46a955409f6013d133a08358..c83719141067c8271e4d64344c957454740febea 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -103,6 +103,18 @@ pub trait Settings: 'static + Send + Sync { cx.global::().get(None) } + #[track_caller] + fn try_get(cx: &App) -> Option<&Self> + where + Self: Sized, + { + if cx.has_global::() { + cx.global::().try_get(None) + } else { + None + } + } + #[track_caller] fn try_read_global(cx: &AsyncApp, f: impl FnOnce(&Self) -> R) -> Option where @@ -407,6 +419,17 @@ impl SettingsStore { .expect("no default value for setting type") } + /// Get the value of a setting. + /// + /// Panics if the given setting type has not been registered, or if there is no + /// value for this setting. + pub fn try_get(&self, path: Option) -> Option<&T> { + self.setting_values + .get(&TypeId::of::()) + .map(|value| value.value_for_path(path)) + .and_then(|value| value.downcast_ref::()) + } + /// Get all values from project specific settings pub fn get_all_locals(&self) -> Vec<(WorktreeId, Arc, &T)> { self.setting_values diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index fb9306acc5a4b21b709904618a6438e58c30039f..2b1d801f8010c8ad00f1295c38803bd80df1c282 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -1461,6 +1461,16 @@ This setting enables integration with macOS’s native window tabbing feature. W Positive `integer` values +## Excerpt Context Lines + +- Description: The number of lines of context to provide when showing excerpts in the multibuffer. +- Setting: `excerpt_context_lines` +- Default: `2` + +**Options** + +Positive `integer` value between 1 and 32. Values outside of this range will be clamped to this range. + ## Extend Comment On Newline - Description: Whether to start a new line with a comment when a previous line is a comment as well. diff --git a/docs/src/development/releases.md b/docs/src/development/releases.md index d1f99401d6b78545c34a64b47a146cecacc7eec1..76432d93f002dc7dd9d9d119d24ed1348863c73e 100644 --- a/docs/src/development/releases.md +++ b/docs/src/development/releases.md @@ -51,7 +51,6 @@ Credentials for various services used in this process can be found in 1Password. - We sometimes correct things here and there that didn't translate from GitHub's renderer to Kit's. 1. Build social media posts based on the popular items in stable. - - You can use the [prior week's post chain](https://zed.dev/channel/tweets-23331) as your outline. - Stage the copy and assets using [Buffer](https://buffer.com), for both X and BlueSky. - Publish both, one at a time, ensuring both are posted to each respective platform. @@ -89,7 +88,6 @@ You will need write access to the Zed repository to do this: - Download the artifacts for each release draft and test that you can run them locally. 1. Publish stable / preview drafts, one at a time. - - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild. The release will be public once the rebuild has completed. diff --git a/docs/src/visual-customization.md b/docs/src/visual-customization.md index 4fc5a9ba8864bc3a721d4d7d101977d729082e59..1df76d17f026c9457b296230f93bec0e10c4aa19 100644 --- a/docs/src/visual-customization.md +++ b/docs/src/visual-customization.md @@ -334,7 +334,9 @@ TBD: Centered layout related settings ```json { // The default number of lines to expand excerpts in the multibuffer by. - "expand_excerpt_lines": 5 + "expand_excerpt_lines": 5, + // The default number of lines of context provided for excerpts in the multibuffer by. + "excerpt_context_lines": 2 } ``` From 384ffb883f472546609b3cc0513623b6bb223c01 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 28 Aug 2025 21:07:52 -0700 Subject: [PATCH 03/54] Fix method documentation (#37140) Release Notes: - N/A --- crates/settings/src/settings_store.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index c83719141067c8271e4d64344c957454740febea..fbd0f75aefc2173a3affbb7423d4ccc718679919 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -421,8 +421,7 @@ impl SettingsStore { /// Get the value of a setting. /// - /// Panics if the given setting type has not been registered, or if there is no - /// value for this setting. + /// Does not panic pub fn try_get(&self, path: Option) -> Option<&T> { self.setting_values .get(&TypeId::of::()) From 52da72d80af8a985db74e04c081fef0453e55e00 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Fri, 29 Aug 2025 00:16:49 -0400 Subject: [PATCH 04/54] acp: Install new versions of agent binaries in the background (#37141) Release Notes: - acp: New releases of external agents are now installed in the background. Co-authored-by: Conrad Irwin --- crates/agent_servers/src/agent_servers.rs | 186 ++++++++++++++++------ 1 file changed, 139 insertions(+), 47 deletions(-) diff --git a/crates/agent_servers/src/agent_servers.rs b/crates/agent_servers/src/agent_servers.rs index e1b4057b71b0b4aee84548df74935d9b0598f598..83b3be76ce709c9b8c4d9f13ca55632a79e7b677 100644 --- a/crates/agent_servers/src/agent_servers.rs +++ b/crates/agent_servers/src/agent_servers.rs @@ -7,20 +7,24 @@ mod settings; #[cfg(any(test, feature = "test-support"))] pub mod e2e_tests; +use anyhow::Context as _; pub use claude::*; pub use custom::*; +use fs::Fs; +use fs::RemoveOptions; +use fs::RenameOptions; +use futures::StreamExt as _; pub use gemini::*; +use gpui::AppContext; +use node_runtime::NodeRuntime; pub use settings::*; use acp_thread::AgentConnection; use acp_thread::LoadError; use anyhow::Result; use anyhow::anyhow; -use anyhow::bail; use collections::HashMap; -use gpui::AppContext as _; use gpui::{App, AsyncApp, Entity, SharedString, Task}; -use node_runtime::VersionStrategy; use project::Project; use schemars::JsonSchema; use semver::Version; @@ -64,70 +68,158 @@ impl AgentServerDelegate { let project = self.project; let fs = project.read(cx).fs().clone(); let Some(node_runtime) = project.read(cx).node_runtime().cloned() else { - return Task::ready(Err(anyhow!("Missing node runtime"))); + return Task::ready(Err(anyhow!( + "External agents are not yet available in remote projects." + ))); }; let mut status_tx = self.status_tx; cx.spawn(async move |cx| { if !ignore_system_version { if let Some(bin) = find_bin_in_path(binary_name.clone(), &project, cx).await { - return Ok(AgentServerCommand { path: bin, args: Vec::new(), env: Default::default() }) + return Ok(AgentServerCommand { + path: bin, + args: Vec::new(), + env: Default::default(), + }); } } - cx.background_spawn(async move { + cx.spawn(async move |cx| { let node_path = node_runtime.binary_path().await?; - let dir = paths::data_dir().join("external_agents").join(binary_name.as_str()); + let dir = paths::data_dir() + .join("external_agents") + .join(binary_name.as_str()); fs.create_dir(&dir).await?; - let local_executable_path = dir.join(entrypoint_path); - let command = AgentServerCommand { - path: node_path, - args: vec![local_executable_path.to_string_lossy().to_string()], - env: Default::default(), - }; - let installed_version = node_runtime - .npm_package_installed_version(&dir, &package_name) - .await? - .filter(|version| { - Version::from_str(&version) - .is_ok_and(|version| Some(version) >= minimum_version) - }); + let mut stream = fs.read_dir(&dir).await?; + let mut versions = Vec::new(); + let mut to_delete = Vec::new(); + while let Some(entry) = stream.next().await { + let Ok(entry) = entry else { continue }; + let Some(file_name) = entry.file_name() else { + continue; + }; + + if let Some(version) = file_name + .to_str() + .and_then(|name| semver::Version::from_str(&name).ok()) + { + versions.push((file_name.to_owned(), version)); + } else { + to_delete.push(file_name.to_owned()) + } + } - status_tx.send("Checking for latest version…".into())?; - let latest_version = match node_runtime.npm_package_latest_version(&package_name).await + versions.sort(); + let newest_version = if let Some((file_name, version)) = versions.last().cloned() + && minimum_version.is_none_or(|minimum_version| version > minimum_version) { - Ok(latest_version) => latest_version, - Err(e) => { - if let Some(installed_version) = installed_version { - log::error!("{e}"); - log::warn!("failed to fetch latest version of {package_name}, falling back to cached version {installed_version}"); - return Ok(command); - } else { - bail!(e); + versions.pop(); + Some(file_name) + } else { + None + }; + to_delete.extend(versions.into_iter().map(|(file_name, _)| file_name)); + + cx.background_spawn({ + let fs = fs.clone(); + let dir = dir.clone(); + async move { + for file_name in to_delete { + fs.remove_dir( + &dir.join(file_name), + RemoveOptions { + recursive: true, + ignore_if_not_exists: false, + }, + ) + .await + .ok(); } } + }) + .detach(); + + let version = if let Some(file_name) = newest_version { + cx.background_spawn({ + let file_name = file_name.clone(); + let dir = dir.clone(); + async move { + let latest_version = + node_runtime.npm_package_latest_version(&package_name).await; + if let Ok(latest_version) = latest_version + && &latest_version != &file_name.to_string_lossy() + { + Self::download_latest_version( + fs, + dir.clone(), + node_runtime, + package_name, + ) + .await + .log_err(); + } + } + }) + .detach(); + file_name + } else { + status_tx.send("Installing…".into()).ok(); + let dir = dir.clone(); + cx.background_spawn(Self::download_latest_version( + fs, + dir.clone(), + node_runtime, + package_name, + )) + .await? + .into() }; + anyhow::Ok(AgentServerCommand { + path: node_path, + args: vec![ + dir.join(version) + .join(entrypoint_path) + .to_string_lossy() + .to_string(), + ], + env: Default::default(), + }) + }) + .await + .map_err(|e| LoadError::FailedToInstall(e.to_string().into()).into()) + }) + } - let should_install = node_runtime - .should_install_npm_package( - &package_name, - &local_executable_path, - &dir, - VersionStrategy::Latest(&latest_version), - ) - .await; + async fn download_latest_version( + fs: Arc, + dir: PathBuf, + node_runtime: NodeRuntime, + package_name: SharedString, + ) -> Result { + let tmp_dir = tempfile::tempdir_in(&dir)?; - if should_install { - status_tx.send("Installing latest version…".into())?; - node_runtime - .npm_install_packages(&dir, &[(&package_name, &latest_version)]) - .await?; - } + node_runtime + .npm_install_packages(tmp_dir.path(), &[(&package_name, "latest")]) + .await?; - Ok(command) - }).await.map_err(|e| LoadError::FailedToInstall(e.to_string().into()).into()) - }) + let version = node_runtime + .npm_package_installed_version(tmp_dir.path(), &package_name) + .await? + .context("expected package to be installed")?; + + fs.rename( + &tmp_dir.keep(), + &dir.join(&version), + RenameOptions { + ignore_if_exists: true, + overwrite: false, + }, + ) + .await?; + + anyhow::Ok(version) } } From 7403a4ba17d05e8ea02f80b5f4ea25d1d3c1cb71 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 29 Aug 2025 12:19:27 +0200 Subject: [PATCH 05/54] Add basic PyEnv and pixi support for python environments (#37156) cc https://github.com/zed-industries/zed/issues/29807 Release Notes: - Fixed terminals and tasks not respecting python pyenv and pixi environments --- crates/language/src/toolchain.rs | 8 +- crates/languages/src/python.rs | 71 ++++++++++--- crates/project/src/debugger/dap_store.rs | 1 - crates/project/src/project_tests.rs | 6 +- crates/project/src/terminals.rs | 125 +++++++++++++---------- crates/remote/src/remote_client.rs | 12 +-- crates/remote/src/transport/ssh.rs | 12 +-- crates/terminal/src/terminal.rs | 20 +++- 8 files changed, 155 insertions(+), 100 deletions(-) diff --git a/crates/language/src/toolchain.rs b/crates/language/src/toolchain.rs index 2a8dfd58418812b94c625845dce9724e145c7388..84b10c7961eddb130f88b24c9e3438ff2882f8d3 100644 --- a/crates/language/src/toolchain.rs +++ b/crates/language/src/toolchain.rs @@ -14,6 +14,7 @@ use collections::HashMap; use fs::Fs; use gpui::{AsyncApp, SharedString}; use settings::WorktreeId; +use task::ShellKind; use crate::{LanguageName, ManifestName}; @@ -68,7 +69,12 @@ pub trait ToolchainLister: Send + Sync { fn term(&self) -> SharedString; /// Returns the name of the manifest file for this toolchain. fn manifest_name(&self) -> ManifestName; - async fn activation_script(&self, toolchain: &Toolchain, fs: &dyn Fs) -> Option; + async fn activation_script( + &self, + toolchain: &Toolchain, + shell: ShellKind, + fs: &dyn Fs, + ) -> Vec; } #[async_trait(?Send)] diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 37d38de9dab6bb5968b446e7009a42c5f2e86e86..f76bd8e793d8e391654cb6391086ade528d56264 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -34,7 +34,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use task::{TaskTemplate, TaskTemplates, VariableName}; +use task::{ShellKind, TaskTemplate, TaskTemplates, VariableName}; use util::ResultExt; pub(crate) struct PyprojectTomlManifestProvider; @@ -894,20 +894,65 @@ impl ToolchainLister for PythonToolchainProvider { fn term(&self) -> SharedString { self.term.clone() } - async fn activation_script(&self, toolchain: &Toolchain, fs: &dyn Fs) -> Option { - let toolchain = serde_json::from_value::( + async fn activation_script( + &self, + toolchain: &Toolchain, + shell: ShellKind, + fs: &dyn Fs, + ) -> Vec { + let Ok(toolchain) = serde_json::from_value::( toolchain.as_json.clone(), - ) - .ok()?; - let mut activation_script = None; - if let Some(prefix) = &toolchain.prefix { - #[cfg(not(target_os = "windows"))] - let path = prefix.join(BINARY_DIR).join("activate"); - #[cfg(target_os = "windows")] - let path = prefix.join(BINARY_DIR).join("activate.ps1"); - if fs.is_file(&path).await { - activation_script = Some(format!(". {}", path.display())); + ) else { + return vec![]; + }; + let mut activation_script = vec![]; + + match toolchain.kind { + Some(PythonEnvironmentKind::Pixi) => { + let env = toolchain.name.as_deref().unwrap_or("default"); + activation_script.push(format!("pixi shell -e {env}")) + } + Some(PythonEnvironmentKind::Venv | PythonEnvironmentKind::VirtualEnv) => { + if let Some(prefix) = &toolchain.prefix { + let activate_keyword = match shell { + ShellKind::Cmd => ".", + ShellKind::Nushell => "overlay use", + ShellKind::Powershell => ".", + ShellKind::Fish => "source", + ShellKind::Csh => "source", + ShellKind::Posix => "source", + }; + let activate_script_name = match shell { + ShellKind::Posix => "activate", + ShellKind::Csh => "activate.csh", + ShellKind::Fish => "activate.fish", + ShellKind::Nushell => "activate.nu", + ShellKind::Powershell => "activate.ps1", + ShellKind::Cmd => "activate.bat", + }; + let path = prefix.join(BINARY_DIR).join(activate_script_name); + if fs.is_file(&path).await { + activation_script.push(format!("{activate_keyword} {}", path.display())); + } + } + } + Some(PythonEnvironmentKind::Pyenv) => { + let Some(manager) = toolchain.manager else { + return vec![]; + }; + let version = toolchain.version.as_deref().unwrap_or("system"); + let pyenv = manager.executable; + let pyenv = pyenv.display(); + activation_script.extend(match shell { + ShellKind::Fish => Some(format!("{pyenv} shell - fish {version}")), + ShellKind::Posix => Some(format!("{pyenv} shell - sh {version}")), + ShellKind::Nushell => Some(format!("{pyenv} shell - nu {version}")), + ShellKind::Powershell => None, + ShellKind::Csh => None, + ShellKind::Cmd => None, + }) } + _ => {} } activation_script } diff --git a/crates/project/src/debugger/dap_store.rs b/crates/project/src/debugger/dap_store.rs index 859574c82a5b4470d477df555b314498cbfcd0e0..d8c6d3acc1116e9a97b2f6ca3fc54ec098029cbe 100644 --- a/crates/project/src/debugger/dap_store.rs +++ b/crates/project/src/debugger/dap_store.rs @@ -276,7 +276,6 @@ impl DapStore { &binary.arguments, &binary.envs, binary.cwd.map(|path| path.display().to_string()), - None, port_forwarding, ) })??; diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index c814d6207e92608c13502a4da3a0781836acce0e..96f891d9c380fe6feec490627cd782955c833eda 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -40,7 +40,7 @@ use serde_json::json; #[cfg(not(windows))] use std::os; use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll}; -use task::{ResolvedTask, TaskContext}; +use task::{ResolvedTask, ShellKind, TaskContext}; use unindent::Unindent as _; use util::{ TryFutureExt as _, assert_set_eq, maybe, path, @@ -9222,8 +9222,8 @@ fn python_lang(fs: Arc) -> Arc { fn manifest_name(&self) -> ManifestName { SharedString::new_static("pyproject.toml").into() } - async fn activation_script(&self, _: &Toolchain, _: &dyn Fs) -> Option { - None + async fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &dyn Fs) -> Vec { + vec![] } } Arc::new( diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index aad5ce941125c2c747df3a76473a9dbffba0b80e..c189242fadc2948593186edb5dcd2c56879f07af 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -1,7 +1,8 @@ use anyhow::Result; use collections::HashMap; use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity}; -use itertools::Itertools; + +use itertools::Itertools as _; use language::LanguageName; use remote::RemoteClient; use settings::{Settings, SettingsLocation}; @@ -11,7 +12,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use task::{Shell, ShellBuilder, SpawnInTerminal}; +use task::{Shell, ShellBuilder, ShellKind, SpawnInTerminal}; use terminal::{ TaskState, TaskStatus, Terminal, TerminalBuilder, terminal_settings::TerminalSettings, }; @@ -131,33 +132,62 @@ impl Project { cx.spawn(async move |project, cx| { let activation_script = maybe!(async { let toolchain = toolchain?.await?; - lang_registry - .language_for_name(&toolchain.language_name.0) - .await - .ok()? - .toolchain_lister()? - .activation_script(&toolchain, fs.as_ref()) - .await + Some( + lang_registry + .language_for_name(&toolchain.language_name.0) + .await + .ok()? + .toolchain_lister()? + .activation_script(&toolchain, ShellKind::new(&shell), fs.as_ref()) + .await, + ) }) - .await; + .await + .unwrap_or_default(); project.update(cx, move |this, cx| { let shell = { env.extend(spawn_task.env); match remote_client { - Some(remote_client) => create_remote_shell( - spawn_task - .command - .as_ref() - .map(|command| (command, &spawn_task.args)), - &mut env, - path, - remote_client, - activation_script.clone(), - cx, - )?, + Some(remote_client) => match activation_script.clone() { + activation_script if !activation_script.is_empty() => { + let activation_script = activation_script.join("; "); + let to_run = if let Some(command) = spawn_task.command { + let command: Option> = shlex::try_quote(&command).ok(); + let args = spawn_task + .args + .iter() + .filter_map(|arg| shlex::try_quote(arg).ok()); + command.into_iter().chain(args).join(" ") + } else { + format!("exec {shell} -l") + }; + let args = vec![ + "-c".to_owned(), + format!("{activation_script}; {to_run}",), + ]; + create_remote_shell( + Some((&shell, &args)), + &mut env, + path, + remote_client, + cx, + )? + } + _ => create_remote_shell( + spawn_task + .command + .as_ref() + .map(|command| (command, &spawn_task.args)), + &mut env, + path, + remote_client, + cx, + )?, + }, None => match activation_script.clone() { - Some(activation_script) => { + activation_script if !activation_script.is_empty() => { + let activation_script = activation_script.join("; "); let to_run = if let Some(command) = spawn_task.command { let command: Option> = shlex::try_quote(&command).ok(); let args = spawn_task @@ -169,7 +199,7 @@ impl Project { format!("exec {shell} -l") }; Shell::WithArguments { - program: get_default_system_shell(), + program: shell, args: vec![ "-c".to_owned(), format!("{activation_script}; {to_run}",), @@ -177,7 +207,7 @@ impl Project { title_override: None, } } - None => { + _ => { if let Some(program) = spawn_task.command { Shell::WithArguments { program, @@ -302,31 +332,21 @@ impl Project { .await .ok(); let lister = language?.toolchain_lister(); - lister?.activation_script(&toolchain, fs.as_ref()).await + Some( + lister? + .activation_script(&toolchain, ShellKind::new(&shell), fs.as_ref()) + .await, + ) }) - .await; + .await + .unwrap_or_default(); project.update(cx, move |this, cx| { let shell = { match remote_client { - Some(remote_client) => create_remote_shell( - None, - &mut env, - path, - remote_client, - activation_script.clone(), - cx, - )?, - None => match activation_script.clone() { - Some(activation_script) => Shell::WithArguments { - program: get_default_system_shell(), - args: vec![ - "-c".to_owned(), - format!("{activation_script}; exec {shell} -l",), - ], - title_override: Some(shell.into()), - }, - None => settings.shell, - }, + Some(remote_client) => { + create_remote_shell(None, &mut env, path, remote_client, cx)? + } + None => settings.shell, } }; TerminalBuilder::new( @@ -437,15 +457,10 @@ impl Project { match remote_client { Some(remote_client) => { - let command_template = remote_client.read(cx).build_command( - Some(command), - &args, - &env, - None, - // todo - None, - None, - )?; + let command_template = + remote_client + .read(cx) + .build_command(Some(command), &args, &env, None, None)?; let mut command = std::process::Command::new(command_template.program); command.args(command_template.args); command.envs(command_template.env); @@ -473,7 +488,6 @@ fn create_remote_shell( env: &mut HashMap, working_directory: Option>, remote_client: Entity, - activation_script: Option, cx: &mut App, ) -> Result { // Alacritty sets its terminfo to `alacritty`, this requiring hosts to have it installed @@ -493,7 +507,6 @@ fn create_remote_shell( args.as_slice(), env, working_directory.map(|path| path.display().to_string()), - activation_script, None, )?; *env = command.env; diff --git a/crates/remote/src/remote_client.rs b/crates/remote/src/remote_client.rs index 2b8d9e4a94fb9988e801c5ef9202ee603959d36b..dd529ca87499b0daf2061fd990f7149828e3fce4 100644 --- a/crates/remote/src/remote_client.rs +++ b/crates/remote/src/remote_client.rs @@ -757,7 +757,6 @@ impl RemoteClient { args: &[String], env: &HashMap, working_dir: Option, - activation_script: Option, port_forward: Option<(u16, String, u16)>, ) -> Result { let Some(connection) = self @@ -767,14 +766,7 @@ impl RemoteClient { else { return Err(anyhow!("no connection")); }; - connection.build_command( - program, - args, - env, - working_dir, - activation_script, - port_forward, - ) + connection.build_command(program, args, env, working_dir, port_forward) } pub fn upload_directory( @@ -1006,7 +998,6 @@ pub(crate) trait RemoteConnection: Send + Sync { args: &[String], env: &HashMap, working_dir: Option, - activation_script: Option, port_forward: Option<(u16, String, u16)>, ) -> Result; fn connection_options(&self) -> SshConnectionOptions; @@ -1373,7 +1364,6 @@ mod fake { args: &[String], env: &HashMap, _: Option, - _: Option, _: Option<(u16, String, u16)>, ) -> Result { let ssh_program = program.unwrap_or_else(|| "sh".to_string()); diff --git a/crates/remote/src/transport/ssh.rs b/crates/remote/src/transport/ssh.rs index 0036a687a6f73b57723e8c3c9fcffc56cab626c2..b6698014024ab48d171631a190b421dcb614edae 100644 --- a/crates/remote/src/transport/ssh.rs +++ b/crates/remote/src/transport/ssh.rs @@ -30,10 +30,7 @@ use std::{ time::Instant, }; use tempfile::TempDir; -use util::{ - get_default_system_shell, - paths::{PathStyle, RemotePathBuf}, -}; +use util::paths::{PathStyle, RemotePathBuf}; pub(crate) struct SshRemoteConnection { socket: SshSocket, @@ -116,7 +113,6 @@ impl RemoteConnection for SshRemoteConnection { input_args: &[String], input_env: &HashMap, working_dir: Option, - activation_script: Option, port_forward: Option<(u16, String, u16)>, ) -> Result { use std::fmt::Write as _; @@ -138,9 +134,6 @@ impl RemoteConnection for SshRemoteConnection { } else { write!(&mut script, "cd; ").unwrap(); }; - if let Some(activation_script) = activation_script { - write!(&mut script, " {activation_script};").unwrap(); - } for (k, v) in input_env.iter() { if let Some((k, v)) = shlex::try_quote(k).ok().zip(shlex::try_quote(v).ok()) { @@ -162,8 +155,7 @@ impl RemoteConnection for SshRemoteConnection { write!(&mut script, "exec {shell} -l").unwrap(); }; - let sys_shell = get_default_system_shell(); - let shell_invocation = format!("{sys_shell} -c {}", shlex::try_quote(&script).unwrap()); + let shell_invocation = format!("{shell} -c {}", shlex::try_quote(&script).unwrap()); let mut args = Vec::new(); args.extend(self.socket.ssh_args()); diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index a5e0227533cf0e3ecbc9a8f2c6c55fa1254473e3..0f4f2ae97b67b9fd43a63b54088f66c74ca1c855 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -354,7 +354,7 @@ impl TerminalBuilder { window_id: u64, completion_tx: Option>>, cx: &App, - activation_script: Option, + activation_script: Vec, ) -> Result { // If the parent environment doesn't have a locale set // (As is the case when launched from a .app on MacOS), @@ -493,7 +493,9 @@ impl TerminalBuilder { let pty_tx = event_loop.channel(); let _io_thread = event_loop.spawn(); // DANGER - let terminal = Terminal { + let no_task = task.is_none(); + + let mut terminal = Terminal { task, pty_tx: Notifier(pty_tx), completion_tx, @@ -518,7 +520,7 @@ impl TerminalBuilder { last_hyperlink_search_position: None, #[cfg(windows)] shell_program, - activation_script, + activation_script: activation_script.clone(), template: CopyTemplate { shell, env, @@ -529,6 +531,14 @@ impl TerminalBuilder { }, }; + if !activation_script.is_empty() && no_task { + for activation_script in activation_script { + terminal.input(activation_script.into_bytes()); + terminal.write_to_pty(b"\n"); + } + terminal.clear(); + } + Ok(TerminalBuilder { terminal, events_rx, @@ -712,7 +722,7 @@ pub struct Terminal { #[cfg(windows)] shell_program: Option, template: CopyTemplate, - activation_script: Option, + activation_script: Vec, } struct CopyTemplate { @@ -2218,7 +2228,7 @@ mod tests { 0, Some(completion_tx), cx, - None, + vec![], ) .unwrap() .subscribe(cx) From d13ba0162ae5d6d200b3e4509e691b57e0a27dda Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 29 Aug 2025 12:44:47 +0200 Subject: [PATCH 06/54] Require authorization for MCP tools (#37155) Release Notes: - Fixed a regression that caused MCP tools to run without requesting authorization first. --- crates/agent2/src/tests/mod.rs | 1 + crates/agent2/src/tools/context_server_registry.rs | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/agent2/src/tests/mod.rs b/crates/agent2/src/tests/mod.rs index fbeee46a484a71742dd4ce52b537bebb5da91924..4527cdb056164efa8e3bc81c19969a3fa02d7036 100644 --- a/crates/agent2/src/tests/mod.rs +++ b/crates/agent2/src/tests/mod.rs @@ -950,6 +950,7 @@ async fn test_mcp_tools(cx: &mut TestAppContext) { paths::settings_file(), json!({ "agent": { + "always_allow_tool_actions": true, "profiles": { "test": { "name": "Test Profile", diff --git a/crates/agent2/src/tools/context_server_registry.rs b/crates/agent2/src/tools/context_server_registry.rs index c7963fa6e6e14ffa34d076dc2ca5dbdc23c78cab..e13f47fb2399d7408c5047ff6491ce2d2e76d948 100644 --- a/crates/agent2/src/tools/context_server_registry.rs +++ b/crates/agent2/src/tools/context_server_registry.rs @@ -169,15 +169,18 @@ impl AnyAgentTool for ContextServerTool { fn run( self: Arc, input: serde_json::Value, - _event_stream: ToolCallEventStream, + event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { let Some(server) = self.store.read(cx).get_running_server(&self.server_id) else { return Task::ready(Err(anyhow!("Context server not found"))); }; let tool_name = self.tool.name.clone(); + let authorize = event_stream.authorize(self.initial_title(input.clone()), cx); cx.spawn(async move |_cx| { + authorize.await?; + let Some(protocol) = server.client() else { bail!("Context server not initialized"); }; From 4507f60b8d8b43be7770dfcd0ca52bcb655d5d66 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 29 Aug 2025 13:39:38 +0200 Subject: [PATCH 07/54] languages: Fix python activation scripts not being quoted (#37159) Release Notes: - N/A --- crates/languages/src/python.rs | 115 +++++++++++++---------------- crates/remote/src/transport/ssh.rs | 9 ++- 2 files changed, 57 insertions(+), 67 deletions(-) diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index f76bd8e793d8e391654cb6391086ade528d56264..5bdc4aa0d94a7355c60ab8912d9a328a657ad77f 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -328,41 +328,35 @@ impl LspAdapter for PythonLspAdapter { .unwrap_or_default(); // If we have a detected toolchain, configure Pyright to use it - if let Some(toolchain) = toolchain { + if let Some(toolchain) = toolchain + && let Ok(env) = serde_json::from_value::< + pet_core::python_environment::PythonEnvironment, + >(toolchain.as_json.clone()) + { if user_settings.is_null() { user_settings = Value::Object(serde_json::Map::default()); } let object = user_settings.as_object_mut().unwrap(); let interpreter_path = toolchain.path.to_string(); + if let Some(venv_dir) = env.prefix { + // Set venvPath and venv at the root level + // This matches the format of a pyrightconfig.json file + if let Some(parent) = venv_dir.parent() { + // Use relative path if the venv is inside the workspace + let venv_path = if parent == adapter.worktree_root_path() { + ".".to_string() + } else { + parent.to_string_lossy().into_owned() + }; + object.insert("venvPath".to_string(), Value::String(venv_path)); + } - // Detect if this is a virtual environment - if let Some(interpreter_dir) = Path::new(&interpreter_path).parent() - && let Some(venv_dir) = interpreter_dir.parent() - { - // Check if this looks like a virtual environment - if venv_dir.join("pyvenv.cfg").exists() - || venv_dir.join("bin/activate").exists() - || venv_dir.join("Scripts/activate.bat").exists() - { - // Set venvPath and venv at the root level - // This matches the format of a pyrightconfig.json file - if let Some(parent) = venv_dir.parent() { - // Use relative path if the venv is inside the workspace - let venv_path = if parent == adapter.worktree_root_path() { - ".".to_string() - } else { - parent.to_string_lossy().into_owned() - }; - object.insert("venvPath".to_string(), Value::String(venv_path)); - } - - if let Some(venv_name) = venv_dir.file_name() { - object.insert( - "venv".to_owned(), - Value::String(venv_name.to_string_lossy().into_owned()), - ); - } + if let Some(venv_name) = venv_dir.file_name() { + object.insert( + "venv".to_owned(), + Value::String(venv_name.to_string_lossy().into_owned()), + ); } } @@ -932,7 +926,8 @@ impl ToolchainLister for PythonToolchainProvider { }; let path = prefix.join(BINARY_DIR).join(activate_script_name); if fs.is_file(&path).await { - activation_script.push(format!("{activate_keyword} {}", path.display())); + activation_script + .push(format!("{activate_keyword} \"{}\"", path.display())); } } } @@ -944,9 +939,9 @@ impl ToolchainLister for PythonToolchainProvider { let pyenv = manager.executable; let pyenv = pyenv.display(); activation_script.extend(match shell { - ShellKind::Fish => Some(format!("{pyenv} shell - fish {version}")), - ShellKind::Posix => Some(format!("{pyenv} shell - sh {version}")), - ShellKind::Nushell => Some(format!("{pyenv} shell - nu {version}")), + ShellKind::Fish => Some(format!("\"{pyenv}\" shell - fish {version}")), + ShellKind::Posix => Some(format!("\"{pyenv}\" shell - sh {version}")), + ShellKind::Nushell => Some(format!("\"{pyenv}\" shell - nu {version}")), ShellKind::Powershell => None, ShellKind::Csh => None, ShellKind::Cmd => None, @@ -1108,10 +1103,10 @@ impl LspAdapter for PyLspAdapter { arguments: vec![], }) } else { - let venv = toolchain?; - let pylsp_path = Path::new(venv.path.as_ref()).parent()?.join("pylsp"); + let toolchain = toolchain?; + let pylsp_path = Path::new(toolchain.path.as_ref()).parent()?.join("pylsp"); pylsp_path.exists().then(|| LanguageServerBinary { - path: venv.path.to_string().into(), + path: toolchain.path.to_string().into(), arguments: vec![pylsp_path.into()], env: None, }) @@ -1575,41 +1570,35 @@ impl LspAdapter for BasedPyrightLspAdapter { .unwrap_or_default(); // If we have a detected toolchain, configure Pyright to use it - if let Some(toolchain) = toolchain { + if let Some(toolchain) = toolchain + && let Ok(env) = serde_json::from_value::< + pet_core::python_environment::PythonEnvironment, + >(toolchain.as_json.clone()) + { if user_settings.is_null() { user_settings = Value::Object(serde_json::Map::default()); } let object = user_settings.as_object_mut().unwrap(); let interpreter_path = toolchain.path.to_string(); + if let Some(venv_dir) = env.prefix { + // Set venvPath and venv at the root level + // This matches the format of a pyrightconfig.json file + if let Some(parent) = venv_dir.parent() { + // Use relative path if the venv is inside the workspace + let venv_path = if parent == adapter.worktree_root_path() { + ".".to_string() + } else { + parent.to_string_lossy().into_owned() + }; + object.insert("venvPath".to_string(), Value::String(venv_path)); + } - // Detect if this is a virtual environment - if let Some(interpreter_dir) = Path::new(&interpreter_path).parent() - && let Some(venv_dir) = interpreter_dir.parent() - { - // Check if this looks like a virtual environment - if venv_dir.join("pyvenv.cfg").exists() - || venv_dir.join("bin/activate").exists() - || venv_dir.join("Scripts/activate.bat").exists() - { - // Set venvPath and venv at the root level - // This matches the format of a pyrightconfig.json file - if let Some(parent) = venv_dir.parent() { - // Use relative path if the venv is inside the workspace - let venv_path = if parent == adapter.worktree_root_path() { - ".".to_string() - } else { - parent.to_string_lossy().into_owned() - }; - object.insert("venvPath".to_string(), Value::String(venv_path)); - } - - if let Some(venv_name) = venv_dir.file_name() { - object.insert( - "venv".to_owned(), - Value::String(venv_name.to_string_lossy().into_owned()), - ); - } + if let Some(venv_name) = venv_dir.file_name() { + object.insert( + "venv".to_owned(), + Value::String(venv_name.to_string_lossy().into_owned()), + ); } } diff --git a/crates/remote/src/transport/ssh.rs b/crates/remote/src/transport/ssh.rs index b6698014024ab48d171631a190b421dcb614edae..34f1ebf71c278538b57e486856f9b3315a41cf91 100644 --- a/crates/remote/src/transport/ssh.rs +++ b/crates/remote/src/transport/ssh.rs @@ -125,12 +125,13 @@ impl RemoteConnection for SshRemoteConnection { // shlex will wrap the command in single quotes (''), disabling ~ expansion, // replace ith with something that works const TILDE_PREFIX: &'static str = "~/"; - if working_dir.starts_with(TILDE_PREFIX) { + let working_dir = if working_dir.starts_with(TILDE_PREFIX) { let working_dir = working_dir.trim_start_matches("~").trim_start_matches("/"); - write!(&mut script, "cd \"$HOME/{working_dir}\"; ").unwrap(); + format!("$HOME/{working_dir}") } else { - write!(&mut script, "cd \"{working_dir}\"; ").unwrap(); - } + working_dir + }; + write!(&mut script, "cd \"{working_dir}\"; ",).unwrap(); } else { write!(&mut script, "cd; ").unwrap(); }; From 01266d10d60269723c6b8d41bbcbe6363bc38ca0 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 29 Aug 2025 15:23:45 +0300 Subject: [PATCH 08/54] Do not send any LSP logs by default to collab clients (#37163) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Follow-up https://github.com/zed-industries/zed/pull/37083 Noisy RPC LSP logs were functioning this way already, but to keep Collab loaded even less, do not send any kind of logs to the client if the client has a corresponding log tab not opened. This change is pretty raw and does not fully cover scenarious with multiple clients: if one client has a log tab open and another opens tab with another kind of log, the 2nd kind of logs will be streamed only. Also, it should be possible to forward the host logs to the client on enabling — that is not done to keep the change smaller. Release Notes: - N/A --- crates/language_tools/src/language_tools.rs | 2 +- crates/language_tools/src/lsp_log_view.rs | 96 ++++++++++++++----- .../language_tools/src/lsp_log_view_tests.rs | 2 +- crates/project/src/lsp_store/log_store.rs | 66 ++++++++----- crates/project/src/project.rs | 12 ++- crates/remote_server/src/headless_project.rs | 6 +- 6 files changed, 135 insertions(+), 49 deletions(-) diff --git a/crates/language_tools/src/language_tools.rs b/crates/language_tools/src/language_tools.rs index c784a67313a904df34c9f2ae071ed5b0e4c11751..aa1672806417493c0c5a877a28fc7906f3da6ff8 100644 --- a/crates/language_tools/src/language_tools.rs +++ b/crates/language_tools/src/language_tools.rs @@ -14,7 +14,7 @@ use ui::{Context, Window}; use workspace::{Item, ItemHandle, SplitDirection, Workspace}; pub fn init(cx: &mut App) { - lsp_log_view::init(true, cx); + lsp_log_view::init(false, cx); syntax_tree_view::init(cx); key_context_view::init(cx); } diff --git a/crates/language_tools/src/lsp_log_view.rs b/crates/language_tools/src/lsp_log_view.rs index e54411f1d43a6e99352b8ef4dfc48cca423badb6..b1f1e5c4f62b4c14b88cdd3de27a1624c7c7158f 100644 --- a/crates/language_tools/src/lsp_log_view.rs +++ b/crates/language_tools/src/lsp_log_view.rs @@ -16,6 +16,7 @@ use project::{ lsp_store::log_store::{self, Event, LanguageServerKind, LogKind, LogStore, Message}, search::SearchQuery, }; +use proto::toggle_lsp_logs::LogType; use std::{any::TypeId, borrow::Cow, sync::Arc}; use ui::{Button, Checkbox, ContextMenu, Label, PopoverMenu, ToggleState, prelude::*}; use util::ResultExt as _; @@ -111,8 +112,8 @@ actions!( ] ); -pub fn init(store_logs: bool, cx: &mut App) { - let log_store = log_store::init(store_logs, cx); +pub fn init(on_headless_host: bool, cx: &mut App) { + let log_store = log_store::init(on_headless_host, cx); log_store.update(cx, |_, cx| { Copilot::global(cx).map(|copilot| { @@ -266,6 +267,19 @@ impl LspLogView { window.focus(&log_view.editor.focus_handle(cx)); }); + cx.on_release(|log_view, cx| { + log_view.log_store.update(cx, |log_store, cx| { + for (server_id, state) in &log_store.language_servers { + if let Some(log_kind) = state.toggled_log_kind { + if let Some(log_type) = log_type(log_kind) { + send_toggle_log_message(state, *server_id, false, log_type, cx); + } + } + } + }); + }) + .detach(); + let mut lsp_log_view = Self { focus_handle, editor, @@ -436,6 +450,12 @@ impl LspLogView { cx.notify(); } self.editor.read(cx).focus_handle(cx).focus(window); + self.log_store.update(cx, |log_store, cx| { + let state = log_store.get_language_server_state(server_id)?; + state.toggled_log_kind = Some(LogKind::Logs); + send_toggle_log_message(state, server_id, true, LogType::Log, cx); + Some(()) + }); } fn update_log_level( @@ -472,8 +492,8 @@ impl LspLogView { ) { let trace_level = self .log_store - .update(cx, |this, _| { - Some(this.get_language_server_state(server_id)?.trace_level) + .update(cx, |log_store, _| { + Some(log_store.get_language_server_state(server_id)?.trace_level) }) .unwrap_or(TraceValue::Messages); let log_contents = self @@ -487,6 +507,12 @@ impl LspLogView { let (editor, editor_subscriptions) = Self::editor_for_logs(log_contents, window, cx); self.editor = editor; self.editor_subscriptions = editor_subscriptions; + self.log_store.update(cx, |log_store, cx| { + let state = log_store.get_language_server_state(server_id)?; + state.toggled_log_kind = Some(LogKind::Trace); + send_toggle_log_message(state, server_id, true, LogType::Trace, cx); + Some(()) + }); cx.notify(); } self.editor.read(cx).focus_handle(cx).focus(window); @@ -551,24 +577,7 @@ impl LspLogView { } if let Some(server_state) = log_store.language_servers.get(&server_id) { - if let LanguageServerKind::Remote { project } = &server_state.kind { - project - .update(cx, |project, cx| { - if let Some((client, project_id)) = - project.lsp_store().read(cx).upstream_client() - { - client - .send(proto::ToggleLspLogs { - project_id, - log_type: proto::toggle_lsp_logs::LogType::Rpc as i32, - server_id: server_id.to_proto(), - enabled, - }) - .log_err(); - } - }) - .ok(); - } + send_toggle_log_message(server_state, server_id, enabled, LogType::Rpc, cx); }; }); if !enabled && Some(server_id) == self.current_server_id { @@ -644,6 +653,49 @@ impl LspLogView { self.editor_subscriptions = editor_subscriptions; cx.notify(); self.editor.read(cx).focus_handle(cx).focus(window); + self.log_store.update(cx, |log_store, cx| { + let state = log_store.get_language_server_state(server_id)?; + if let Some(log_kind) = state.toggled_log_kind.take() { + if let Some(log_type) = log_type(log_kind) { + send_toggle_log_message(state, server_id, false, log_type, cx); + } + }; + Some(()) + }); + } +} + +fn log_type(log_kind: LogKind) -> Option { + match log_kind { + LogKind::Rpc => Some(LogType::Rpc), + LogKind::Trace => Some(LogType::Trace), + LogKind::Logs => Some(LogType::Log), + LogKind::ServerInfo => None, + } +} + +fn send_toggle_log_message( + server_state: &log_store::LanguageServerState, + server_id: LanguageServerId, + enabled: bool, + log_type: LogType, + cx: &mut App, +) { + if let LanguageServerKind::Remote { project } = &server_state.kind { + project + .update(cx, |project, cx| { + if let Some((client, project_id)) = project.lsp_store().read(cx).upstream_client() { + client + .send(proto::ToggleLspLogs { + project_id, + log_type: log_type as i32, + server_id: server_id.to_proto(), + enabled, + }) + .log_err(); + } + }) + .ok(); } } diff --git a/crates/language_tools/src/lsp_log_view_tests.rs b/crates/language_tools/src/lsp_log_view_tests.rs index bfd093e3db1c1bc0dc04b111d2072339f1314b8e..d572c4375ed09997dc57d6c58e6c90f3e55775b6 100644 --- a/crates/language_tools/src/lsp_log_view_tests.rs +++ b/crates/language_tools/src/lsp_log_view_tests.rs @@ -53,7 +53,7 @@ async fn test_lsp_log_view(cx: &mut TestAppContext) { }, ); - let log_store = cx.new(|cx| LogStore::new(true, cx)); + let log_store = cx.new(|cx| LogStore::new(false, cx)); log_store.update(cx, |store, cx| store.add_project(&project, cx)); let _rust_buffer = project diff --git a/crates/project/src/lsp_store/log_store.rs b/crates/project/src/lsp_store/log_store.rs index 1fbdb494a303b47bea181c5046e51f3c0b21c5c1..67a20dd6cd8b2f5d6ca48d7790fc0b2e60aff370 100644 --- a/crates/project/src/lsp_store/log_store.rs +++ b/crates/project/src/lsp_store/log_store.rs @@ -21,8 +21,8 @@ const SERVER_LOGS: &str = "Server Logs"; const SERVER_TRACE: &str = "Server Trace"; const SERVER_INFO: &str = "Server Info"; -pub fn init(store_logs: bool, cx: &mut App) -> Entity { - let log_store = cx.new(|cx| LogStore::new(store_logs, cx)); +pub fn init(on_headless_host: bool, cx: &mut App) -> Entity { + let log_store = cx.new(|cx| LogStore::new(on_headless_host, cx)); cx.set_global(GlobalLogStore(log_store.clone())); log_store } @@ -43,7 +43,7 @@ pub enum Event { impl EventEmitter for LogStore {} pub struct LogStore { - store_logs: bool, + on_headless_host: bool, projects: HashMap, ProjectState>, pub copilot_log_subscription: Option, pub language_servers: HashMap, @@ -138,6 +138,7 @@ pub struct LanguageServerState { pub trace_level: TraceValue, pub log_level: MessageType, io_logs_subscription: Option, + pub toggled_log_kind: Option, } impl std::fmt::Debug for LanguageServerState { @@ -151,6 +152,7 @@ impl std::fmt::Debug for LanguageServerState { .field("rpc_state", &self.rpc_state) .field("trace_level", &self.trace_level) .field("log_level", &self.log_level) + .field("toggled_log_kind", &self.toggled_log_kind) .finish_non_exhaustive() } } @@ -226,14 +228,14 @@ impl LogKind { } impl LogStore { - pub fn new(store_logs: bool, cx: &mut Context) -> Self { + pub fn new(on_headless_host: bool, cx: &mut Context) -> Self { let (io_tx, mut io_rx) = mpsc::unbounded(); let log_store = Self { projects: HashMap::default(), language_servers: HashMap::default(), copilot_log_subscription: None, - store_logs, + on_headless_host, io_tx, }; cx.spawn(async move |log_store, cx| { @@ -351,12 +353,26 @@ impl LogStore { } } } - crate::Event::ToggleLspLogs { server_id, enabled } => { - // we do not support any other log toggling yet - if *enabled { - log_store.enable_rpc_trace_for_language_server(*server_id); - } else { - log_store.disable_rpc_trace_for_language_server(*server_id); + crate::Event::ToggleLspLogs { + server_id, + enabled, + toggled_log_kind, + } => { + if let Some(server_state) = + log_store.get_language_server_state(*server_id) + { + if *enabled { + server_state.toggled_log_kind = Some(*toggled_log_kind); + } else { + server_state.toggled_log_kind = None; + } + } + if LogKind::Rpc == *toggled_log_kind { + if *enabled { + log_store.enable_rpc_trace_for_language_server(*server_id); + } else { + log_store.disable_rpc_trace_for_language_server(*server_id); + } } } _ => {} @@ -395,6 +411,7 @@ impl LogStore { trace_level: TraceValue::Off, log_level: MessageType::LOG, io_logs_subscription: None, + toggled_log_kind: None, } }); @@ -425,7 +442,7 @@ impl LogStore { message: &str, cx: &mut Context, ) -> Option<()> { - let store_logs = self.store_logs; + let store_logs = !self.on_headless_host; let language_server_state = self.get_language_server_state(id)?; let log_lines = &mut language_server_state.log_messages; @@ -464,7 +481,7 @@ impl LogStore { verbose_info: Option, cx: &mut Context, ) -> Option<()> { - let store_logs = self.store_logs; + let store_logs = !self.on_headless_host; let language_server_state = self.get_language_server_state(id)?; let log_lines = &mut language_server_state.trace_messages; @@ -530,7 +547,7 @@ impl LogStore { message: &str, cx: &mut Context<'_, Self>, ) { - let store_logs = self.store_logs; + let store_logs = !self.on_headless_host; let Some(state) = self .get_language_server_state(language_server_id) .and_then(|state| state.rpc_state.as_mut()) @@ -673,6 +690,7 @@ impl LogStore { } fn emit_event(&mut self, e: Event, cx: &mut Context) { + let on_headless_host = self.on_headless_host; match &e { Event::NewServerLogEntry { id, kind, text } => { if let Some(state) = self.get_language_server_state(*id) { @@ -686,14 +704,18 @@ impl LogStore { } .and_then(|lsp_store| lsp_store.read(cx).downstream_client()); if let Some((client, project_id)) = downstream_client { - client - .send(proto::LanguageServerLog { - project_id, - language_server_id: id.to_proto(), - message: text.clone(), - log_type: Some(kind.to_proto()), - }) - .ok(); + if on_headless_host + || Some(LogKind::from_server_log_type(kind)) == state.toggled_log_kind + { + client + .send(proto::LanguageServerLog { + project_id, + language_server_id: id.to_proto(), + message: text.clone(), + log_type: Some(kind.to_proto()), + }) + .ok(); + } } } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 68e04cfd3bec25638964e8fccd675279c450795d..8c289c935cd2bc4ebb919d171f0a9e4f0334b334 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -33,7 +33,7 @@ mod yarn; use dap::inline_value::{InlineValueLocation, VariableLookupKind, VariableScope}; -use crate::git_store::GitStore; +use crate::{git_store::GitStore, lsp_store::log_store::LogKind}; pub use git_store::{ ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate, git_traversal::{ChildEntriesGitIter, GitEntry, GitEntryRef, GitTraversal}, @@ -285,6 +285,7 @@ pub enum Event { ToggleLspLogs { server_id: LanguageServerId, enabled: bool, + toggled_log_kind: LogKind, }, Toast { notification_id: SharedString, @@ -4719,10 +4720,19 @@ impl Project { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result<()> { + let toggled_log_kind = + match proto::toggle_lsp_logs::LogType::from_i32(envelope.payload.log_type) + .context("invalid log type")? + { + proto::toggle_lsp_logs::LogType::Log => LogKind::Logs, + proto::toggle_lsp_logs::LogType::Trace => LogKind::Trace, + proto::toggle_lsp_logs::LogType::Rpc => LogKind::Rpc, + }; project.update(&mut cx, |_, cx| { cx.emit(Event::ToggleLspLogs { server_id: LanguageServerId::from_proto(envelope.payload.server_id), enabled: envelope.payload.enabled, + toggled_log_kind, }) })?; Ok(()) diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index c81a69c2b308d2623ada78b1f38df80f96f8fe14..f55826631b46b4f9eaaa17d8a9f4b0603a07fcc3 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -67,7 +67,7 @@ impl HeadlessProject { settings::init(cx); language::init(cx); project::Project::init_settings(cx); - log_store::init(false, cx); + log_store::init(true, cx); } pub fn new( @@ -546,7 +546,9 @@ impl HeadlessProject { .context("lsp logs store is missing")?; lsp_logs.update(&mut cx, |lsp_logs, _| { - // we do not support any other log toggling yet + // RPC logs are very noisy and we need to toggle it on the headless server too. + // The rest of the logs for the ssh project are very important to have toggled always, + // to e.g. send language server error logs to the client before anything is toggled. if envelope.payload.enabled { lsp_logs.enable_rpc_trace_for_language_server(server_id); } else { From ff035e8a22fb40ea29af97f974351151af226198 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 29 Aug 2025 09:26:52 -0300 Subject: [PATCH 09/54] agent: Add CC item in the settings view (#37164) Release Notes: - N/A --- crates/agent_ui/src/agent_configuration.rs | 49 ++++++++++++---------- 1 file changed, 27 insertions(+), 22 deletions(-) diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index 23b6e69a56886ca2e5d7c4bdbd27ee8fb1307629..5f0b6f33c38b0b064fcb8b287a901a33e9e7186b 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -331,6 +331,7 @@ impl AgentConfiguration { .gap_0p5() .child( h_flex() + .pr_1() .w_full() .gap_2() .justify_between() @@ -1022,6 +1023,7 @@ impl AgentConfiguration { .gap_0p5() .child( h_flex() + .pr_1() .w_full() .gap_2() .justify_between() @@ -1052,7 +1054,7 @@ impl AgentConfiguration { ) .child( Label::new( - "Bring the agent of your choice to Zed via our new Agent Client Protocol.", + "All agents connected through the Agent Client Protocol.", ) .color(Color::Muted), ), @@ -1063,7 +1065,12 @@ impl AgentConfiguration { ExternalAgent::Gemini, cx, )) - // TODO add CC + .child(self.render_agent_server( + IconName::AiClaude, + "Claude Code", + ExternalAgent::ClaudeCode, + cx, + )) .children(user_defined_agents), ) } @@ -1093,26 +1100,24 @@ impl AgentConfiguration { .child(Label::new(name.clone())), ) .child( - h_flex().gap_1().child( - Button::new( - SharedString::from(format!("start_acp_thread-{name}")), - "Start New Thread", - ) - .label_size(LabelSize::Small) - .icon(IconName::Thread) - .icon_position(IconPosition::Start) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) - .on_click(move |_, window, cx| { - window.dispatch_action( - NewExternalAgentThread { - agent: Some(agent.clone()), - } - .boxed_clone(), - cx, - ); - }), - ), + Button::new( + SharedString::from(format!("start_acp_thread-{name}")), + "Start New Thread", + ) + .label_size(LabelSize::Small) + .icon(IconName::Thread) + .icon_position(IconPosition::Start) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .on_click(move |_, window, cx| { + window.dispatch_action( + NewExternalAgentThread { + agent: Some(agent.clone()), + } + .boxed_clone(), + cx, + ); + }), ) } } From 20d32d111c15dfef49a1c9c1267d33250d58b67b Mon Sep 17 00:00:00 2001 From: Wouter Kayser Date: Fri, 29 Aug 2025 16:08:42 +0200 Subject: [PATCH 10/54] Update lsp-types to properly handle brackets (#37166) Closes #21062 See also this pull request: https://github.com/zed-industries/lsp-types/pull/6. Release Notes: - Fixed incorrect URL encoding of file paths with `[` `]` in them --- Cargo.lock | 2 +- Cargo.toml | 2 +- crates/agent_ui/src/acp/message_editor.rs | 2 +- crates/collab/src/tests/editor_tests.rs | 52 ++++++------ crates/collab/src/tests/integration_tests.rs | 26 +++--- .../random_project_collaboration_tests.rs | 2 +- crates/copilot/src/copilot.rs | 12 +-- crates/copilot/src/request.rs | 2 +- crates/diagnostics/src/diagnostics_tests.rs | 45 +++++----- crates/editor/src/editor_tests.rs | 36 ++++---- crates/editor/src/inlay_hint_cache.rs | 28 +++--- .../src/test/editor_lsp_test_context.rs | 6 +- crates/language/src/buffer.rs | 2 +- crates/language/src/proto.rs | 2 +- crates/languages/src/rust.rs | 2 +- crates/lsp/src/lsp.rs | 38 ++++----- crates/project/src/lsp_command.rs | 30 +++---- crates/project/src/lsp_store.rs | 54 +++++++----- .../project/src/lsp_store/lsp_ext_command.rs | 2 +- crates/project/src/project.rs | 4 +- crates/project/src/project_tests.rs | 85 +++++++++---------- crates/project_symbols/src/project_symbols.rs | 2 +- .../remote_server/src/remote_editing_tests.rs | 2 +- 23 files changed, 223 insertions(+), 215 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index aeacdd899685e46ebd1d38df7cd58b19810de9c5..e493c99a2fc0f9514503b7cee8ef41cca582c387 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9770,7 +9770,7 @@ dependencies = [ [[package]] name = "lsp-types" version = "0.95.1" -source = "git+https://github.com/zed-industries/lsp-types?rev=39f629bdd03d59abd786ed9fc27e8bca02c0c0ec#39f629bdd03d59abd786ed9fc27e8bca02c0c0ec" +source = "git+https://github.com/zed-industries/lsp-types?rev=0874f8742fe55b4dc94308c1e3c0069710d8eeaf#0874f8742fe55b4dc94308c1e3c0069710d8eeaf" dependencies = [ "bitflags 1.3.2", "serde", diff --git a/Cargo.toml b/Cargo.toml index 974796a5e5ff4a3093fc8b492628e1c6d33a616a..d346043c0ef64b3cce0827c2553c5b3c254d66f7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -519,7 +519,7 @@ libc = "0.2" libsqlite3-sys = { version = "0.30.1", features = ["bundled"] } linkify = "0.10.0" log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] } -lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "39f629bdd03d59abd786ed9fc27e8bca02c0c0ec" } +lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "0874f8742fe55b4dc94308c1e3c0069710d8eeaf" } mach2 = "0.5" markup5ever_rcdom = "0.3.0" metal = "0.29" diff --git a/crates/agent_ui/src/acp/message_editor.rs b/crates/agent_ui/src/acp/message_editor.rs index f4ce2652d60c76848827967f8a34a23376e7406f..bd5e4faf7aedba4644206794a1c7a837517c52d6 100644 --- a/crates/agent_ui/src/acp/message_editor.rs +++ b/crates/agent_ui/src/acp/message_editor.rs @@ -2128,7 +2128,7 @@ mod tests { lsp::SymbolInformation { name: "MySymbol".into(), location: lsp::Location { - uri: lsp::Url::from_file_path(path!("/dir/a/one.txt")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/dir/a/one.txt")).unwrap(), range: lsp::Range::new( lsp::Position::new(0, 0), lsp::Position::new(0, 1), diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index 59d66f1821e60ecbf3a7550c1385fa6de7ae047d..bfea497e9b57d806af1f13bb3af7e88521d03816 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -369,7 +369,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu .set_request_handler::(|params, _| async move { assert_eq!( params.text_document_position.text_document.uri, - lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), ); assert_eq!( params.text_document_position.position, @@ -488,7 +488,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu .set_request_handler::(|params, _| async move { assert_eq!( params.text_document_position.text_document.uri, - lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), ); assert_eq!( params.text_document_position.position, @@ -615,7 +615,7 @@ async fn test_collaborating_with_code_actions( .set_request_handler::(|params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), ); assert_eq!(params.range.start, lsp::Position::new(0, 0)); assert_eq!(params.range.end, lsp::Position::new(0, 0)); @@ -637,7 +637,7 @@ async fn test_collaborating_with_code_actions( .set_request_handler::(|params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), ); assert_eq!(params.range.start, lsp::Position::new(1, 31)); assert_eq!(params.range.end, lsp::Position::new(1, 31)); @@ -649,7 +649,7 @@ async fn test_collaborating_with_code_actions( changes: Some( [ ( - lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), vec![lsp::TextEdit::new( lsp::Range::new( lsp::Position::new(1, 22), @@ -659,7 +659,7 @@ async fn test_collaborating_with_code_actions( )], ), ( - lsp::Url::from_file_path(path!("/a/other.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/other.rs")).unwrap(), vec![lsp::TextEdit::new( lsp::Range::new( lsp::Position::new(0, 0), @@ -721,7 +721,7 @@ async fn test_collaborating_with_code_actions( changes: Some( [ ( - lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), vec![lsp::TextEdit::new( lsp::Range::new( lsp::Position::new(1, 22), @@ -731,7 +731,7 @@ async fn test_collaborating_with_code_actions( )], ), ( - lsp::Url::from_file_path(path!("/a/other.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/other.rs")).unwrap(), vec![lsp::TextEdit::new( lsp::Range::new( lsp::Position::new(0, 0), @@ -949,14 +949,14 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T changes: Some( [ ( - lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(), vec![lsp::TextEdit::new( lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)), "THREE".to_string(), )], ), ( - lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(), vec![ lsp::TextEdit::new( lsp::Range::new( @@ -1574,7 +1574,7 @@ async fn test_on_input_format_from_host_to_guest( |params, _| async move { assert_eq!( params.text_document_position.text_document.uri, - lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), ); assert_eq!( params.text_document_position.position, @@ -1717,7 +1717,7 @@ async fn test_on_input_format_from_guest_to_host( .set_request_handler::(|params, _| async move { assert_eq!( params.text_document_position.text_document.uri, - lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), ); assert_eq!( params.text_document_position.position, @@ -1901,7 +1901,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), ); let edits_made = task_edits_made.load(atomic::Ordering::Acquire); Ok(Some(vec![lsp::InlayHint { @@ -2151,7 +2151,7 @@ async fn test_inlay_hint_refresh_is_forwarded( async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), ); let other_hints = task_other_hints.load(atomic::Ordering::Acquire); let character = if other_hints { 0 } else { 2 }; @@ -2332,7 +2332,7 @@ async fn test_lsp_document_color(cx_a: &mut TestAppContext, cx_b: &mut TestAppCo async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), ); requests_made.fetch_add(1, atomic::Ordering::Release); Ok(vec![lsp::ColorInformation { @@ -2621,11 +2621,11 @@ async fn test_lsp_pull_diagnostics( let requests_made = closure_diagnostics_pulls_made.clone(); let diagnostics_pulls_result_ids = closure_diagnostics_pulls_result_ids.clone(); async move { - let message = if lsp::Url::from_file_path(path!("/a/main.rs")).unwrap() + let message = if lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap() == params.text_document.uri { expected_pull_diagnostic_main_message.to_string() - } else if lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap() + } else if lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap() == params.text_document.uri { expected_pull_diagnostic_lib_message.to_string() @@ -2717,7 +2717,7 @@ async fn test_lsp_pull_diagnostics( items: vec![ lsp::WorkspaceDocumentDiagnosticReport::Full( lsp::WorkspaceFullDocumentDiagnosticReport { - uri: lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), version: None, full_document_diagnostic_report: lsp::FullDocumentDiagnosticReport { @@ -2746,7 +2746,7 @@ async fn test_lsp_pull_diagnostics( ), lsp::WorkspaceDocumentDiagnosticReport::Full( lsp::WorkspaceFullDocumentDiagnosticReport { - uri: lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap(), version: None, full_document_diagnostic_report: lsp::FullDocumentDiagnosticReport { @@ -2821,7 +2821,7 @@ async fn test_lsp_pull_diagnostics( fake_language_server.notify::( &lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), diagnostics: vec![lsp::Diagnostic { range: lsp::Range { start: lsp::Position { @@ -2842,7 +2842,7 @@ async fn test_lsp_pull_diagnostics( ); fake_language_server.notify::( &lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap(), diagnostics: vec![lsp::Diagnostic { range: lsp::Range { start: lsp::Position { @@ -2870,7 +2870,7 @@ async fn test_lsp_pull_diagnostics( items: vec![ lsp::WorkspaceDocumentDiagnosticReport::Full( lsp::WorkspaceFullDocumentDiagnosticReport { - uri: lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), version: None, full_document_diagnostic_report: lsp::FullDocumentDiagnosticReport { @@ -2902,7 +2902,7 @@ async fn test_lsp_pull_diagnostics( ), lsp::WorkspaceDocumentDiagnosticReport::Full( lsp::WorkspaceFullDocumentDiagnosticReport { - uri: lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap(), version: None, full_document_diagnostic_report: lsp::FullDocumentDiagnosticReport { @@ -3051,7 +3051,7 @@ async fn test_lsp_pull_diagnostics( lsp::WorkspaceDiagnosticReportResult::Report(lsp::WorkspaceDiagnosticReport { items: vec![lsp::WorkspaceDocumentDiagnosticReport::Full( lsp::WorkspaceFullDocumentDiagnosticReport { - uri: lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap(), version: None, full_document_diagnostic_report: lsp::FullDocumentDiagnosticReport { result_id: Some(format!( @@ -4040,7 +4040,7 @@ async fn test_client_can_query_lsp_ext(cx_a: &mut TestAppContext, cx_b: &mut Tes |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), ); assert_eq!(params.position, lsp::Position::new(0, 0)); Ok(Some(ExpandedMacro { @@ -4075,7 +4075,7 @@ async fn test_client_can_query_lsp_ext(cx_a: &mut TestAppContext, cx_b: &mut Tes |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), ); assert_eq!( params.position, diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 5c732530480a14ab28e231aa0fae1b79ef2703fb..6bb2db05201ea464053a758b390e84ccdfc6527a 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -4075,7 +4075,7 @@ async fn test_collaborating_with_diagnostics( .await; fake_language_server.notify::( &lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { severity: Some(lsp::DiagnosticSeverity::WARNING), @@ -4095,7 +4095,7 @@ async fn test_collaborating_with_diagnostics( .unwrap(); fake_language_server.notify::( &lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { severity: Some(lsp::DiagnosticSeverity::ERROR), @@ -4169,7 +4169,7 @@ async fn test_collaborating_with_diagnostics( // Simulate a language server reporting more errors for a file. fake_language_server.notify::( &lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(), version: None, diagnostics: vec![ lsp::Diagnostic { @@ -4265,7 +4265,7 @@ async fn test_collaborating_with_diagnostics( // Simulate a language server reporting no errors for a file. fake_language_server.notify::( &lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(), version: None, diagnostics: Vec::new(), }, @@ -4372,7 +4372,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering( for file_name in file_names { fake_language_server.notify::( &lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(Path::new(path!("/test")).join(file_name)).unwrap(), + uri: lsp::Uri::from_file_path(Path::new(path!("/test")).join(file_name)).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { severity: Some(lsp::DiagnosticSeverity::WARNING), @@ -4838,7 +4838,7 @@ async fn test_definition( |_, _| async move { Ok(Some(lsp::GotoDefinitionResponse::Scalar( lsp::Location::new( - lsp::Url::from_file_path(path!("/root/dir-2/b.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/root/dir-2/b.rs")).unwrap(), lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)), ), ))) @@ -4876,7 +4876,7 @@ async fn test_definition( |_, _| async move { Ok(Some(lsp::GotoDefinitionResponse::Scalar( lsp::Location::new( - lsp::Url::from_file_path(path!("/root/dir-2/b.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/root/dir-2/b.rs")).unwrap(), lsp::Range::new(lsp::Position::new(1, 6), lsp::Position::new(1, 11)), ), ))) @@ -4914,7 +4914,7 @@ async fn test_definition( ); Ok(Some(lsp::GotoDefinitionResponse::Scalar( lsp::Location::new( - lsp::Url::from_file_path(path!("/root/dir-2/c.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/root/dir-2/c.rs")).unwrap(), lsp::Range::new(lsp::Position::new(0, 5), lsp::Position::new(0, 7)), ), ))) @@ -5049,15 +5049,15 @@ async fn test_references( lsp_response_tx .unbounded_send(Ok(Some(vec![ lsp::Location { - uri: lsp::Url::from_file_path(path!("/root/dir-1/two.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/root/dir-1/two.rs")).unwrap(), range: lsp::Range::new(lsp::Position::new(0, 24), lsp::Position::new(0, 27)), }, lsp::Location { - uri: lsp::Url::from_file_path(path!("/root/dir-1/two.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/root/dir-1/two.rs")).unwrap(), range: lsp::Range::new(lsp::Position::new(0, 35), lsp::Position::new(0, 38)), }, lsp::Location { - uri: lsp::Url::from_file_path(path!("/root/dir-2/three.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/root/dir-2/three.rs")).unwrap(), range: lsp::Range::new(lsp::Position::new(0, 37), lsp::Position::new(0, 40)), }, ]))) @@ -5625,7 +5625,7 @@ async fn test_project_symbols( lsp::SymbolInformation { name: "TWO".into(), location: lsp::Location { - uri: lsp::Url::from_file_path(path!("/code/crate-2/two.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/code/crate-2/two.rs")).unwrap(), range: lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)), }, kind: lsp::SymbolKind::CONSTANT, @@ -5737,7 +5737,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it( |_, _| async move { Ok(Some(lsp::GotoDefinitionResponse::Scalar( lsp::Location::new( - lsp::Url::from_file_path(path!("/root/b.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/root/b.rs")).unwrap(), lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)), ), ))) diff --git a/crates/collab/src/tests/random_project_collaboration_tests.rs b/crates/collab/src/tests/random_project_collaboration_tests.rs index ac5c4c54ca570bf5545505419cb20a021ca97202..bfe05c4a1d600bb280d3821350204d0b2d0d6e08 100644 --- a/crates/collab/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab/src/tests/random_project_collaboration_tests.rs @@ -1101,7 +1101,7 @@ impl RandomizedTest for ProjectCollaborationTest { files .into_iter() .map(|file| lsp::Location { - uri: lsp::Url::from_file_path(file).unwrap(), + uri: lsp::Uri::from_file_path(file).unwrap(), range: Default::default(), }) .collect(), diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index b7d8423fd7d4d601250172a5789cbe83620849af..d0a57735ab5a0342b245aa8db72e6b021b3943de 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -197,7 +197,7 @@ impl Status { } struct RegisteredBuffer { - uri: lsp::Url, + uri: lsp::Uri, language_id: String, snapshot: BufferSnapshot, snapshot_version: i32, @@ -1108,9 +1108,9 @@ fn id_for_language(language: Option<&Arc>) -> String { .unwrap_or_else(|| "plaintext".to_string()) } -fn uri_for_buffer(buffer: &Entity, cx: &App) -> Result { +fn uri_for_buffer(buffer: &Entity, cx: &App) -> Result { if let Some(file) = buffer.read(cx).file().and_then(|file| file.as_local()) { - lsp::Url::from_file_path(file.abs_path(cx)) + lsp::Uri::from_file_path(file.abs_path(cx)) } else { format!("buffer://{}", buffer.entity_id()) .parse() @@ -1201,7 +1201,7 @@ mod tests { let (copilot, mut lsp) = Copilot::fake(cx); let buffer_1 = cx.new(|cx| Buffer::local("Hello", cx)); - let buffer_1_uri: lsp::Url = format!("buffer://{}", buffer_1.entity_id().as_u64()) + let buffer_1_uri: lsp::Uri = format!("buffer://{}", buffer_1.entity_id().as_u64()) .parse() .unwrap(); copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_1, cx)); @@ -1219,7 +1219,7 @@ mod tests { ); let buffer_2 = cx.new(|cx| Buffer::local("Goodbye", cx)); - let buffer_2_uri: lsp::Url = format!("buffer://{}", buffer_2.entity_id().as_u64()) + let buffer_2_uri: lsp::Uri = format!("buffer://{}", buffer_2.entity_id().as_u64()) .parse() .unwrap(); copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_2, cx)); @@ -1270,7 +1270,7 @@ mod tests { text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri), } ); - let buffer_1_uri = lsp::Url::from_file_path(path!("/root/child/buffer-1")).unwrap(); + let buffer_1_uri = lsp::Uri::from_file_path(path!("/root/child/buffer-1")).unwrap(); assert_eq!( lsp.receive_notification::() .await, diff --git a/crates/copilot/src/request.rs b/crates/copilot/src/request.rs index 0deabe16d15c4a502b278c4a631720094ad18af7..85d6254dc060824a9b2686e8f53090fccb39980e 100644 --- a/crates/copilot/src/request.rs +++ b/crates/copilot/src/request.rs @@ -102,7 +102,7 @@ pub struct GetCompletionsDocument { pub tab_size: u32, pub indent_size: u32, pub insert_spaces: bool, - pub uri: lsp::Url, + pub uri: lsp::Uri, pub relative_path: String, pub position: lsp::Position, pub version: usize, diff --git a/crates/diagnostics/src/diagnostics_tests.rs b/crates/diagnostics/src/diagnostics_tests.rs index 4a544f9ea718f0df037fb3012c48efec1c804b43..fdca32520d1e08d562ac6f533968c146b5ec0673 100644 --- a/crates/diagnostics/src/diagnostics_tests.rs +++ b/crates/diagnostics/src/diagnostics_tests.rs @@ -24,6 +24,7 @@ use settings::SettingsStore; use std::{ env, path::{Path, PathBuf}, + str::FromStr, }; use unindent::Unindent as _; use util::{RandomCharIter, path, post_inc}; @@ -70,7 +71,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) { let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let cx = &mut VisualTestContext::from_window(*window, cx); let workspace = window.root(cx).unwrap(); - let uri = lsp::Url::from_file_path(path!("/test/main.rs")).unwrap(); + let uri = lsp::Uri::from_file_path(path!("/test/main.rs")).unwrap(); // Create some diagnostics lsp_store.update(cx, |lsp_store, cx| { @@ -167,7 +168,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) { .update_diagnostics( language_server_id, lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/test/consts.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/test/consts.rs")).unwrap(), diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new( lsp::Position::new(0, 15), @@ -243,7 +244,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) { .update_diagnostics( language_server_id, lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/test/consts.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/test/consts.rs")).unwrap(), diagnostics: vec![ lsp::Diagnostic { range: lsp::Range::new( @@ -356,14 +357,14 @@ async fn test_diagnostics_with_folds(cx: &mut TestAppContext) { .update_diagnostics( server_id_1, lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/test/main.js")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/test/main.js")).unwrap(), diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(4, 0), lsp::Position::new(4, 4)), severity: Some(lsp::DiagnosticSeverity::WARNING), message: "no method `tset`".to_string(), related_information: Some(vec![lsp::DiagnosticRelatedInformation { location: lsp::Location::new( - lsp::Url::from_file_path(path!("/test/main.js")).unwrap(), + lsp::Uri::from_file_path(path!("/test/main.js")).unwrap(), lsp::Range::new( lsp::Position::new(0, 9), lsp::Position::new(0, 13), @@ -465,7 +466,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { .update_diagnostics( server_id_1, lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/test/main.js")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/test/main.js")).unwrap(), diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 1)), severity: Some(lsp::DiagnosticSeverity::WARNING), @@ -509,7 +510,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { .update_diagnostics( server_id_2, lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/test/main.js")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/test/main.js")).unwrap(), diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 1)), severity: Some(lsp::DiagnosticSeverity::ERROR), @@ -552,7 +553,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { .update_diagnostics( server_id_1, lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/test/main.js")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/test/main.js")).unwrap(), diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(2, 0), lsp::Position::new(2, 1)), severity: Some(lsp::DiagnosticSeverity::WARNING), @@ -571,7 +572,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { .update_diagnostics( server_id_2, lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/test/main.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/test/main.rs")).unwrap(), diagnostics: vec![], version: None, }, @@ -608,7 +609,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { .update_diagnostics( server_id_2, lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/test/main.js")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/test/main.js")).unwrap(), diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(3, 0), lsp::Position::new(3, 1)), severity: Some(lsp::DiagnosticSeverity::WARNING), @@ -745,8 +746,8 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng .update_diagnostics( server_id, lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(&path).unwrap_or_else(|_| { - lsp::Url::parse("file:///test/fallback.rs").unwrap() + uri: lsp::Uri::from_file_path(&path).unwrap_or_else(|_| { + lsp::Uri::from_str("file:///test/fallback.rs").unwrap() }), diagnostics: diagnostics.clone(), version: None, @@ -934,8 +935,8 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S .update_diagnostics( server_id, lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(&path).unwrap_or_else(|_| { - lsp::Url::parse("file:///test/fallback.rs").unwrap() + uri: lsp::Uri::from_file_path(&path).unwrap_or_else(|_| { + lsp::Uri::from_str("file:///test/fallback.rs").unwrap() }), diagnostics: diagnostics.clone(), version: None, @@ -985,7 +986,7 @@ async fn active_diagnostics_dismiss_after_invalidation(cx: &mut TestAppContext) .update_diagnostics( LanguageServerId(0), lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/root/file")).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new( @@ -1028,7 +1029,7 @@ async fn active_diagnostics_dismiss_after_invalidation(cx: &mut TestAppContext) .update_diagnostics( LanguageServerId(0), lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/root/file")).unwrap(), version: None, diagnostics: Vec::new(), }, @@ -1078,7 +1079,7 @@ async fn cycle_through_same_place_diagnostics(cx: &mut TestAppContext) { .update_diagnostics( LanguageServerId(0), lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/root/file")).unwrap(), version: None, diagnostics: vec![ lsp::Diagnostic { @@ -1246,7 +1247,7 @@ async fn test_diagnostics_with_links(cx: &mut TestAppContext) { lsp_store.update_diagnostics( LanguageServerId(0), lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/root/file")).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 12)), @@ -1299,7 +1300,7 @@ async fn test_hover_diagnostic_and_info_popovers(cx: &mut gpui::TestAppContext) lsp_store.update_diagnostics( LanguageServerId(0), lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/root/dir/file.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/root/dir/file.rs")).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { range, @@ -1376,7 +1377,7 @@ async fn test_diagnostics_with_code(cx: &mut TestAppContext) { let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let cx = &mut VisualTestContext::from_window(*window, cx); let workspace = window.root(cx).unwrap(); - let uri = lsp::Url::from_file_path(path!("/root/main.js")).unwrap(); + let uri = lsp::Uri::from_file_path(path!("/root/main.js")).unwrap(); // Create diagnostics with code fields lsp_store.update(cx, |lsp_store, cx| { @@ -1460,7 +1461,7 @@ async fn go_to_diagnostic_with_severity(cx: &mut TestAppContext) { .update_diagnostics( LanguageServerId(0), lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/root/file")).unwrap(), version: None, diagnostics: vec![ lsp::Diagnostic { @@ -1673,7 +1674,7 @@ fn random_lsp_diagnostic( ); related_info.push(lsp::DiagnosticRelatedInformation { - location: lsp::Location::new(lsp::Url::from_file_path(path).unwrap(), info_range), + location: lsp::Location::new(lsp::Uri::from_file_path(path).unwrap(), info_range), message: format!("related info {i} for diagnostic {unique_id}"), }); } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index dfef8a92f064e3c8785f92d26e058fc43519dca2..10ebae8e27a07115de1e202187f491026bd7f503 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -9909,7 +9909,7 @@ async fn test_document_format_during_save(cx: &mut TestAppContext) { move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/file.rs")).unwrap() + lsp::Uri::from_file_path(path!("/file.rs")).unwrap() ); assert_eq!(params.options.tab_size, 4); Ok(Some(vec![lsp::TextEdit::new( @@ -9952,7 +9952,7 @@ async fn test_document_format_during_save(cx: &mut TestAppContext) { move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/file.rs")).unwrap() + lsp::Uri::from_file_path(path!("/file.rs")).unwrap() ); futures::future::pending::<()>().await; unreachable!() @@ -10000,7 +10000,7 @@ async fn test_document_format_during_save(cx: &mut TestAppContext) { .set_request_handler::(move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/file.rs")).unwrap() + lsp::Uri::from_file_path(path!("/file.rs")).unwrap() ); assert_eq!(params.options.tab_size, 8); Ok(Some(vec![])) @@ -10548,7 +10548,7 @@ async fn test_range_format_on_save_success(cx: &mut TestAppContext) { .set_request_handler::(move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/file.rs")).unwrap() + lsp::Uri::from_file_path(path!("/file.rs")).unwrap() ); assert_eq!(params.options.tab_size, 4); Ok(Some(vec![lsp::TextEdit::new( @@ -10581,7 +10581,7 @@ async fn test_range_format_on_save_timeout(cx: &mut TestAppContext) { move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/file.rs")).unwrap() + lsp::Uri::from_file_path(path!("/file.rs")).unwrap() ); futures::future::pending::<()>().await; unreachable!() @@ -10674,7 +10674,7 @@ async fn test_range_format_respects_language_tab_size_override(cx: &mut TestAppC .set_request_handler::(move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/file.rs")).unwrap() + lsp::Uri::from_file_path(path!("/file.rs")).unwrap() ); assert_eq!(params.options.tab_size, 8); Ok(Some(Vec::new())) @@ -10761,7 +10761,7 @@ async fn test_document_format_manual_trigger(cx: &mut TestAppContext) { .set_request_handler::(move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/file.rs")).unwrap() + lsp::Uri::from_file_path(path!("/file.rs")).unwrap() ); assert_eq!(params.options.tab_size, 4); Ok(Some(vec![lsp::TextEdit::new( @@ -10786,7 +10786,7 @@ async fn test_document_format_manual_trigger(cx: &mut TestAppContext) { move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/file.rs")).unwrap() + lsp::Uri::from_file_path(path!("/file.rs")).unwrap() ); futures::future::pending::<()>().await; unreachable!() @@ -10882,7 +10882,7 @@ async fn test_multiple_formatters(cx: &mut TestAppContext) { params.context.only, Some(vec!["code-action-1".into(), "code-action-2".into()]) ); - let uri = lsp::Url::from_file_path(path!("/file.rs")).unwrap(); + let uri = lsp::Uri::from_file_path(path!("/file.rs")).unwrap(); Ok(Some(vec![ lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction { kind: Some("code-action-1".into()), @@ -10942,7 +10942,7 @@ async fn test_multiple_formatters(cx: &mut TestAppContext) { edit: lsp::WorkspaceEdit { changes: Some( [( - lsp::Url::from_file_path(path!("/file.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/file.rs")).unwrap(), vec![lsp::TextEdit { range: lsp::Range::new( lsp::Position::new(0, 0), @@ -11153,7 +11153,7 @@ async fn test_organize_imports_manual_trigger(cx: &mut TestAppContext) { .set_request_handler::(move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/file.ts")).unwrap() + lsp::Uri::from_file_path(path!("/file.ts")).unwrap() ); Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction( lsp::CodeAction { @@ -11201,7 +11201,7 @@ async fn test_organize_imports_manual_trigger(cx: &mut TestAppContext) { move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/file.ts")).unwrap() + lsp::Uri::from_file_path(path!("/file.ts")).unwrap() ); futures::future::pending::<()>().await; unreachable!() @@ -15478,7 +15478,7 @@ async fn go_to_prev_overlapping_diagnostic(executor: BackgroundExecutor, cx: &mu .update_diagnostics( LanguageServerId(0), lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/root/file")).unwrap(), version: None, diagnostics: vec![ lsp::Diagnostic { @@ -15874,7 +15874,7 @@ async fn test_on_type_formatting_not_triggered(cx: &mut TestAppContext) { |params, _| async move { assert_eq!( params.text_document_position.text_document.uri, - lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), ); assert_eq!( params.text_document_position.position, @@ -16399,7 +16399,7 @@ async fn test_context_menus_hide_hover_popover(cx: &mut gpui::TestAppContext) { edit: Some(lsp::WorkspaceEdit { changes: Some( [( - lsp::Url::from_file_path(path!("/file.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/file.rs")).unwrap(), vec![lsp::TextEdit { range: lsp::Range::new( lsp::Position::new(5, 4), @@ -22067,7 +22067,7 @@ async fn test_apply_code_lens_actions_with_commands(cx: &mut gpui::TestAppContex edit: lsp::WorkspaceEdit { changes: Some( [( - lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(), + lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(), vec![lsp::TextEdit { range: lsp::Range::new( lsp::Position::new(0, 0), @@ -24039,7 +24039,7 @@ async fn test_pulling_diagnostics(cx: &mut TestAppContext) { let result_id = Some(new_result_id.to_string()); assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/a/first.rs")).unwrap() + lsp::Uri::from_file_path(path!("/a/first.rs")).unwrap() ); async move { Ok(lsp::DocumentDiagnosticReportResult::Report( @@ -24254,7 +24254,7 @@ async fn test_document_colors(cx: &mut TestAppContext) { async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/a/first.rs")).unwrap() + lsp::Uri::from_file_path(path!("/a/first.rs")).unwrap() ); requests_made.fetch_add(1, atomic::Ordering::Release); Ok(vec![ diff --git a/crates/editor/src/inlay_hint_cache.rs b/crates/editor/src/inlay_hint_cache.rs index dbf5ac95b78433c9a67da110e804a8973e51dee1..c1b0a7640c155fff02f0b778e8996a9b68ea452e 100644 --- a/crates/editor/src/inlay_hint_cache.rs +++ b/crates/editor/src/inlay_hint_cache.rs @@ -1339,7 +1339,7 @@ pub mod tests { let i = task_lsp_request_count.fetch_add(1, Ordering::Release) + 1; assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(file_with_hints).unwrap(), + lsp::Uri::from_file_path(file_with_hints).unwrap(), ); Ok(Some(vec![lsp::InlayHint { position: lsp::Position::new(0, i), @@ -1449,7 +1449,7 @@ pub mod tests { async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(file_with_hints).unwrap(), + lsp::Uri::from_file_path(file_with_hints).unwrap(), ); let current_call_id = Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst); @@ -1594,7 +1594,7 @@ pub mod tests { "Rust" => { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/a/main.rs")) + lsp::Uri::from_file_path(path!("/a/main.rs")) .unwrap(), ); rs_lsp_request_count.fetch_add(1, Ordering::Release) @@ -1603,7 +1603,7 @@ pub mod tests { "Markdown" => { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/a/other.md")) + lsp::Uri::from_file_path(path!("/a/other.md")) .unwrap(), ); md_lsp_request_count.fetch_add(1, Ordering::Release) @@ -1789,7 +1789,7 @@ pub mod tests { async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(file_with_hints).unwrap(), + lsp::Uri::from_file_path(file_with_hints).unwrap(), ); Ok(Some(vec![ lsp::InlayHint { @@ -2127,7 +2127,7 @@ pub mod tests { let i = lsp_request_count.fetch_add(1, Ordering::SeqCst) + 1; assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(file_with_hints).unwrap(), + lsp::Uri::from_file_path(file_with_hints).unwrap(), ); Ok(Some(vec![lsp::InlayHint { position: lsp::Position::new(0, i), @@ -2290,7 +2290,7 @@ pub mod tests { async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), ); task_lsp_request_ranges.lock().push(params.range); @@ -2633,11 +2633,11 @@ pub mod tests { let task_editor_edited = Arc::clone(&closure_editor_edited); async move { let hint_text = if params.text_document.uri - == lsp::Url::from_file_path(path!("/a/main.rs")).unwrap() + == lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap() { "main hint" } else if params.text_document.uri - == lsp::Url::from_file_path(path!("/a/other.rs")).unwrap() + == lsp::Uri::from_file_path(path!("/a/other.rs")).unwrap() { "other hint" } else { @@ -2944,11 +2944,11 @@ pub mod tests { let task_editor_edited = Arc::clone(&closure_editor_edited); async move { let hint_text = if params.text_document.uri - == lsp::Url::from_file_path(path!("/a/main.rs")).unwrap() + == lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap() { "main hint" } else if params.text_document.uri - == lsp::Url::from_file_path(path!("/a/other.rs")).unwrap() + == lsp::Uri::from_file_path(path!("/a/other.rs")).unwrap() { "other hint" } else { @@ -3116,7 +3116,7 @@ pub mod tests { async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), ); let query_start = params.range.start; Ok(Some(vec![lsp::InlayHint { @@ -3188,7 +3188,7 @@ pub mod tests { async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(file_with_hints).unwrap(), + lsp::Uri::from_file_path(file_with_hints).unwrap(), ); let i = lsp_request_count.fetch_add(1, Ordering::SeqCst) + 1; @@ -3351,7 +3351,7 @@ pub mod tests { move |params, _| async move { assert_eq!( params.text_document.uri, - lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), ); Ok(Some( serde_json::from_value(json!([ diff --git a/crates/editor/src/test/editor_lsp_test_context.rs b/crates/editor/src/test/editor_lsp_test_context.rs index 3f78fa2f3e9bcd592ba5d2a9f29c42967a27c126..79935340358662350dbbc640d96f5d60ec8aaf6b 100644 --- a/crates/editor/src/test/editor_lsp_test_context.rs +++ b/crates/editor/src/test/editor_lsp_test_context.rs @@ -29,7 +29,7 @@ pub struct EditorLspTestContext { pub cx: EditorTestContext, pub lsp: lsp::FakeLanguageServer, pub workspace: Entity, - pub buffer_lsp_url: lsp::Url, + pub buffer_lsp_url: lsp::Uri, } pub(crate) fn rust_lang() -> Arc { @@ -189,7 +189,7 @@ impl EditorLspTestContext { }, lsp, workspace, - buffer_lsp_url: lsp::Url::from_file_path(root.join("dir").join(file_name)).unwrap(), + buffer_lsp_url: lsp::Uri::from_file_path(root.join("dir").join(file_name)).unwrap(), } } @@ -358,7 +358,7 @@ impl EditorLspTestContext { where T: 'static + request::Request, T::Params: 'static + Send, - F: 'static + Send + FnMut(lsp::Url, T::Params, gpui::AsyncApp) -> Fut, + F: 'static + Send + FnMut(lsp::Uri, T::Params, gpui::AsyncApp) -> Fut, Fut: 'static + Future>, { let url = self.buffer_lsp_url.clone(); diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 4ddc2b3018614f592beeb55aaa2cc9ed46b5522c..1a1d9fb4a7dc3a3d2a847cee3661361343a6871e 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -202,7 +202,7 @@ pub struct Diagnostic { pub source: Option, /// A machine-readable code that identifies this diagnostic. pub code: Option, - pub code_description: Option, + pub code_description: Option, /// Whether this diagnostic is a hint, warning, or error. pub severity: DiagnosticSeverity, /// The human-readable message associated with this diagnostic. diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 3be189cea08f247f97d05e6b9714f07d17289a8a..0d5a8e916c8712733dcc7a26faa984453cdd30fd 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -431,7 +431,7 @@ pub fn deserialize_diagnostics( code: diagnostic.code.map(lsp::NumberOrString::from_string), code_description: diagnostic .code_description - .and_then(|s| lsp::Url::parse(&s).ok()), + .and_then(|s| lsp::Uri::from_str(&s).ok()), is_primary: diagnostic.is_primary, is_disk_based: diagnostic.is_disk_based, is_unnecessary: diagnostic.is_unnecessary, diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index 3e8dce756be42ca59d88d86404518e65cf54ff7e..a5acc0043298cab49264dac75d51e6a69e5149fe 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -1058,7 +1058,7 @@ mod tests { #[gpui::test] async fn test_process_rust_diagnostics() { let mut params = lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/a")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/a")).unwrap(), version: None, diagnostics: vec![ // no newlines diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index 1ad89db017bc9a0c6f9009cba8ad22f94a31c65d..943bdab5ff817da7819590679d19bbe522b47835 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -100,8 +100,8 @@ pub struct LanguageServer { io_tasks: Mutex>, Task>)>>, output_done_rx: Mutex>, server: Arc>>, - workspace_folders: Option>>>, - root_uri: Url, + workspace_folders: Option>>>, + root_uri: Uri, } #[derive(Clone, Debug, PartialEq, Eq, Hash)] @@ -310,7 +310,7 @@ impl LanguageServer { binary: LanguageServerBinary, root_path: &Path, code_action_kinds: Option>, - workspace_folders: Option>>>, + workspace_folders: Option>>>, cx: &mut AsyncApp, ) -> Result { let working_dir = if root_path.is_dir() { @@ -318,7 +318,7 @@ impl LanguageServer { } else { root_path.parent().unwrap_or_else(|| Path::new("/")) }; - let root_uri = Url::from_file_path(&working_dir) + let root_uri = Uri::from_file_path(&working_dir) .map_err(|()| anyhow!("{working_dir:?} is not a valid URI"))?; log::info!( @@ -384,8 +384,8 @@ impl LanguageServer { server: Option, code_action_kinds: Option>, binary: LanguageServerBinary, - root_uri: Url, - workspace_folders: Option>>>, + root_uri: Uri, + workspace_folders: Option>>>, cx: &mut AsyncApp, on_unhandled_notification: F, ) -> Self @@ -1350,7 +1350,7 @@ impl LanguageServer { } /// Add new workspace folder to the list. - pub fn add_workspace_folder(&self, uri: Url) { + pub fn add_workspace_folder(&self, uri: Uri) { if self .capabilities() .workspace @@ -1385,7 +1385,7 @@ impl LanguageServer { } /// Remove existing workspace folder from the list. - pub fn remove_workspace_folder(&self, uri: Url) { + pub fn remove_workspace_folder(&self, uri: Uri) { if self .capabilities() .workspace @@ -1417,7 +1417,7 @@ impl LanguageServer { self.notify::(¶ms).ok(); } } - pub fn set_workspace_folders(&self, folders: BTreeSet) { + pub fn set_workspace_folders(&self, folders: BTreeSet) { let Some(workspace_folders) = self.workspace_folders.as_ref() else { return; }; @@ -1450,7 +1450,7 @@ impl LanguageServer { } } - pub fn workspace_folders(&self) -> BTreeSet { + pub fn workspace_folders(&self) -> BTreeSet { self.workspace_folders.as_ref().map_or_else( || BTreeSet::from_iter([self.root_uri.clone()]), |folders| folders.lock().clone(), @@ -1459,7 +1459,7 @@ impl LanguageServer { pub fn register_buffer( &self, - uri: Url, + uri: Uri, language_id: String, version: i32, initial_text: String, @@ -1470,7 +1470,7 @@ impl LanguageServer { .ok(); } - pub fn unregister_buffer(&self, uri: Url) { + pub fn unregister_buffer(&self, uri: Uri) { self.notify::(&DidCloseTextDocumentParams { text_document: TextDocumentIdentifier::new(uri), }) @@ -1587,7 +1587,7 @@ impl FakeLanguageServer { let server_name = LanguageServerName(name.clone().into()); let process_name = Arc::from(name.as_str()); let root = Self::root_path(); - let workspace_folders: Arc>> = Default::default(); + let workspace_folders: Arc>> = Default::default(); let mut server = LanguageServer::new_internal( server_id, server_name.clone(), @@ -1657,13 +1657,13 @@ impl FakeLanguageServer { (server, fake) } #[cfg(target_os = "windows")] - fn root_path() -> Url { - Url::from_file_path("C:/").unwrap() + fn root_path() -> Uri { + Uri::from_file_path("C:/").unwrap() } #[cfg(not(target_os = "windows"))] - fn root_path() -> Url { - Url::from_file_path("/").unwrap() + fn root_path() -> Uri { + Uri::from_file_path("/").unwrap() } } @@ -1865,7 +1865,7 @@ mod tests { server .notify::(&DidOpenTextDocumentParams { text_document: TextDocumentItem::new( - Url::from_str("file://a/b").unwrap(), + Uri::from_str("file://a/b").unwrap(), "rust".to_string(), 0, "".to_string(), @@ -1886,7 +1886,7 @@ mod tests { message: "ok".to_string(), }); fake.notify::(&PublishDiagnosticsParams { - uri: Url::from_str("file://b/c").unwrap(), + uri: Uri::from_str("file://b/c").unwrap(), version: Some(5), diagnostics: vec![], }); diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index ce7a871d1a63107ab4908dccea68dd41d73a319f..a960e1183dd46537ef3aee829cd9753b28001480 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -50,8 +50,8 @@ pub fn lsp_formatting_options(settings: &LanguageSettings) -> lsp::FormattingOpt } } -pub fn file_path_to_lsp_url(path: &Path) -> Result { - match lsp::Url::from_file_path(path) { +pub fn file_path_to_lsp_url(path: &Path) -> Result { + match lsp::Uri::from_file_path(path) { Ok(url) => Ok(url), Err(()) => anyhow::bail!("Invalid file path provided to LSP request: {path:?}"), } @@ -3135,7 +3135,7 @@ impl InlayHints { Some(((uri, range), server_id)) => Some(( LanguageServerId(server_id as usize), lsp::Location { - uri: lsp::Url::parse(&uri) + uri: lsp::Uri::from_str(&uri) .context("invalid uri in hint part {part:?}")?, range: lsp::Range::new( point_to_lsp(PointUtf16::new( @@ -3733,7 +3733,7 @@ impl GetDocumentDiagnostics { .filter_map(|diagnostics| { Some(LspPullDiagnostics::Response { server_id: LanguageServerId::from_proto(diagnostics.server_id), - uri: lsp::Url::from_str(diagnostics.uri.as_str()).log_err()?, + uri: lsp::Uri::from_str(diagnostics.uri.as_str()).log_err()?, diagnostics: if diagnostics.changed { PulledDiagnostics::Unchanged { result_id: diagnostics.result_id?, @@ -3788,7 +3788,7 @@ impl GetDocumentDiagnostics { start: point_to_lsp(PointUtf16::new(start.row, start.column)), end: point_to_lsp(PointUtf16::new(end.row, end.column)), }, - uri: lsp::Url::parse(&info.location_url.unwrap()).unwrap(), + uri: lsp::Uri::from_str(&info.location_url.unwrap()).unwrap(), }, message: info.message, } @@ -3821,7 +3821,7 @@ impl GetDocumentDiagnostics { code_description: diagnostic .code_description .map(|code_description| CodeDescription { - href: Some(lsp::Url::parse(&code_description).unwrap()), + href: Some(lsp::Uri::from_str(&code_description).unwrap()), }), related_information: Some(related_information), tags: Some(tags), @@ -3961,7 +3961,7 @@ pub struct WorkspaceLspPullDiagnostics { } fn process_full_workspace_diagnostics_report( - diagnostics: &mut HashMap, + diagnostics: &mut HashMap, server_id: LanguageServerId, report: lsp::WorkspaceFullDocumentDiagnosticReport, ) { @@ -3984,7 +3984,7 @@ fn process_full_workspace_diagnostics_report( } fn process_unchanged_workspace_diagnostics_report( - diagnostics: &mut HashMap, + diagnostics: &mut HashMap, server_id: LanguageServerId, report: lsp::WorkspaceUnchangedDocumentDiagnosticReport, ) { @@ -4343,9 +4343,9 @@ impl LspCommand for GetDocumentColor { } fn process_related_documents( - diagnostics: &mut HashMap, + diagnostics: &mut HashMap, server_id: LanguageServerId, - documents: impl IntoIterator, + documents: impl IntoIterator, ) { for (url, report_kind) in documents { match report_kind { @@ -4360,9 +4360,9 @@ fn process_related_documents( } fn process_unchanged_diagnostics_report( - diagnostics: &mut HashMap, + diagnostics: &mut HashMap, server_id: LanguageServerId, - uri: lsp::Url, + uri: lsp::Uri, report: lsp::UnchangedDocumentDiagnosticReport, ) { let result_id = report.result_id; @@ -4404,9 +4404,9 @@ fn process_unchanged_diagnostics_report( } fn process_full_diagnostics_report( - diagnostics: &mut HashMap, + diagnostics: &mut HashMap, server_id: LanguageServerId, - uri: lsp::Url, + uri: lsp::Uri, report: lsp::FullDocumentDiagnosticReport, ) { let result_id = report.result_id; @@ -4540,7 +4540,7 @@ mod tests { fn test_related_information() { let related_info = lsp::DiagnosticRelatedInformation { location: lsp::Location { - uri: lsp::Url::parse("file:///test.rs").unwrap(), + uri: lsp::Uri::from_str("file:///test.rs").unwrap(), range: lsp::Range { start: lsp::Position::new(1, 1), end: lsp::Position::new(1, 5), diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index b4c7c0bc37fc0409570ece3c5e3df00b1b1cd89f..3f04f38607415e9678944c5546aa84abf4446597 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -79,7 +79,7 @@ use lsp::{ LSP_REQUEST_TIMEOUT, LanguageServer, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerId, LanguageServerName, LanguageServerSelector, LspRequestFuture, MessageActionItem, MessageType, OneOf, RenameFilesParams, SymbolKind, - TextDocumentSyncSaveOptions, TextEdit, WillRenameFiles, WorkDoneProgressCancelParams, + TextDocumentSyncSaveOptions, TextEdit, Uri, WillRenameFiles, WorkDoneProgressCancelParams, WorkspaceFolder, notification::DidRenameFiles, }; use node_runtime::read_package_installed_version; @@ -114,7 +114,7 @@ use std::{ }; use sum_tree::Dimensions; use text::{Anchor, BufferId, LineEnding, OffsetRangeExt}; -use url::Url; + use util::{ ConnectionResult, ResultExt as _, debug_panic, defer, maybe, merge_json_value_into, paths::{PathExt, SanitizedPath}, @@ -314,7 +314,7 @@ impl LocalLspStore { true, cx, ); - let pending_workspace_folders: Arc>> = Default::default(); + let pending_workspace_folders: Arc>> = Default::default(); let pending_server = cx.spawn({ let adapter = adapter.clone(); @@ -2405,7 +2405,7 @@ impl LocalLspStore { { let uri = - Url::from_file_path(worktree.read(cx).abs_path().join(&path.path)); + Uri::from_file_path(worktree.read(cx).abs_path().join(&path.path)); let server_id = self.get_or_insert_language_server( &worktree, @@ -2565,7 +2565,7 @@ impl LocalLspStore { None => return, }; - let Ok(file_url) = lsp::Url::from_file_path(old_path.as_path()) else { + let Ok(file_url) = lsp::Uri::from_file_path(old_path.as_path()) else { debug_panic!( "`{}` is not parseable as an URI", old_path.to_string_lossy() @@ -2578,7 +2578,7 @@ impl LocalLspStore { pub(crate) fn unregister_buffer_from_language_servers( &mut self, buffer: &Entity, - file_url: &lsp::Url, + file_url: &lsp::Uri, cx: &mut App, ) { buffer.update(cx, |buffer, cx| { @@ -4694,7 +4694,7 @@ impl LspStore { for node in nodes { let server_id = node.server_id_or_init(|disposition| { let path = &disposition.path; - let uri = Url::from_file_path(worktree_root.join(&path.path)); + let uri = Uri::from_file_path(worktree_root.join(&path.path)); let key = LanguageServerSeed { worktree_id, name: disposition.server_name.clone(), @@ -6578,7 +6578,7 @@ impl LspStore { File::from_dyn(buffer.file()) .and_then(|file| { let abs_path = file.as_local()?.abs_path(cx); - lsp::Url::from_file_path(abs_path).ok() + lsp::Uri::from_file_path(abs_path).ok() }) .is_none_or(|buffer_uri| { unchanged_buffers.contains(&buffer_uri) @@ -7179,7 +7179,7 @@ impl LspStore { let buffer = buffer.read(cx); let file = File::from_dyn(buffer.file())?; let abs_path = file.as_local()?.abs_path(cx); - let uri = lsp::Url::from_file_path(abs_path).unwrap(); + let uri = lsp::Uri::from_file_path(abs_path).unwrap(); let next_snapshot = buffer.text_snapshot(); for language_server in language_servers { let language_server = language_server.clone(); @@ -7816,7 +7816,7 @@ impl LspStore { }; let symbol_abs_path = resolve_path(&worktree_abs_path, &symbol.path.path); - let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) { + let symbol_uri = if let Ok(uri) = lsp::Uri::from_file_path(symbol_abs_path) { uri } else { return Task::ready(Err(anyhow!("invalid symbol path"))); @@ -7830,14 +7830,14 @@ impl LspStore { pub(crate) fn open_local_buffer_via_lsp( &mut self, - mut abs_path: lsp::Url, + abs_path: lsp::Uri, language_server_id: LanguageServerId, cx: &mut Context, ) -> Task>> { cx.spawn(async move |lsp_store, cx| { // Escape percent-encoded string. let current_scheme = abs_path.scheme().to_owned(); - let _ = abs_path.set_scheme("file"); + // Uri is immutable, so we can't modify the scheme let abs_path = abs_path .to_file_path() @@ -9230,8 +9230,12 @@ impl LspStore { maybe!({ let local_store = self.as_local()?; - let old_uri = lsp::Url::from_file_path(old_path).ok().map(String::from)?; - let new_uri = lsp::Url::from_file_path(new_path).ok().map(String::from)?; + let old_uri = lsp::Uri::from_file_path(old_path) + .ok() + .map(|uri| uri.to_string())?; + let new_uri = lsp::Uri::from_file_path(new_path) + .ok() + .map(|uri| uri.to_string())?; for language_server in local_store.language_servers_for_worktree(worktree_id) { let Some(filter) = local_store @@ -9264,8 +9268,12 @@ impl LspStore { is_dir: bool, cx: AsyncApp, ) -> Task { - let old_uri = lsp::Url::from_file_path(old_path).ok().map(String::from); - let new_uri = lsp::Url::from_file_path(new_path).ok().map(String::from); + let old_uri = lsp::Uri::from_file_path(old_path) + .ok() + .map(|uri| uri.to_string()); + let new_uri = lsp::Uri::from_file_path(new_path) + .ok() + .map(|uri| uri.to_string()); cx.spawn(async move |cx| { let mut tasks = vec![]; this.update(cx, |this, cx| { @@ -10878,7 +10886,7 @@ impl LspStore { language_server: Arc, server_id: LanguageServerId, key: LanguageServerSeed, - workspace_folders: Arc>>, + workspace_folders: Arc>>, cx: &mut Context, ) { let Some(local) = self.as_local_mut() else { @@ -11038,7 +11046,7 @@ impl LspStore { let snapshot = versions.last().unwrap(); let version = snapshot.version; let initial_snapshot = &snapshot.snapshot; - let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap(); + let uri = lsp::Uri::from_file_path(file.abs_path(cx)).unwrap(); language_server.register_buffer( uri, adapter.language_id(&language.name()), @@ -11277,7 +11285,7 @@ impl LspStore { PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED, }; Some(lsp::FileEvent { - uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(), + uri: lsp::Uri::from_file_path(abs_path.join(path)).unwrap(), typ, }) }) @@ -11689,7 +11697,7 @@ impl LspStore { File::from_dyn(buffer.file()) .and_then(|file| { let abs_path = file.as_local()?.abs_path(cx); - lsp::Url::from_file_path(abs_path).ok() + lsp::Uri::from_file_path(abs_path).ok() }) .is_none_or(|buffer_uri| { unchanged_buffers.contains(&buffer_uri) @@ -12821,7 +12829,7 @@ pub enum LanguageServerState { Starting { startup: Task>>, /// List of language servers that will be added to the workspace once it's initialization completes. - pending_workspace_folders: Arc>>, + pending_workspace_folders: Arc>>, }, Running { @@ -12833,7 +12841,7 @@ pub enum LanguageServerState { } impl LanguageServerState { - fn add_workspace_folder(&self, uri: Url) { + fn add_workspace_folder(&self, uri: Uri) { match self { LanguageServerState::Starting { pending_workspace_folders, @@ -12846,7 +12854,7 @@ impl LanguageServerState { } } } - fn _remove_workspace_folder(&self, uri: Url) { + fn _remove_workspace_folder(&self, uri: Uri) { match self { LanguageServerState::Starting { pending_workspace_folders, diff --git a/crates/project/src/lsp_store/lsp_ext_command.rs b/crates/project/src/lsp_store/lsp_ext_command.rs index 1c969f8114eb7647e7c109baf2a7b70339997b41..0263946b25ed58969a3a7a98a9f537ce81d86ab1 100644 --- a/crates/project/src/lsp_store/lsp_ext_command.rs +++ b/crates/project/src/lsp_store/lsp_ext_command.rs @@ -213,7 +213,7 @@ impl LspCommand for OpenDocs { ) -> Result { Ok(OpenDocsParams { text_document: lsp::TextDocumentIdentifier { - uri: lsp::Url::from_file_path(path).unwrap(), + uri: lsp::Uri::from_file_path(path).unwrap(), }, position: point_to_lsp(self.position), }) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 8c289c935cd2bc4ebb919d171f0a9e4f0334b334..74ad08570a996a2dc9fc07bfb616f0edc0085b9f 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -930,7 +930,7 @@ pub enum LspPullDiagnostics { /// The id of the language server that produced diagnostics. server_id: LanguageServerId, /// URI of the resource, - uri: lsp::Url, + uri: lsp::Uri, /// The diagnostics produced by this language server. diagnostics: PulledDiagnostics, }, @@ -3599,7 +3599,7 @@ impl Project { pub fn open_local_buffer_via_lsp( &mut self, - abs_path: lsp::Url, + abs_path: lsp::Uri, language_server_id: LanguageServerId, cx: &mut Context, ) -> Task>> { diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 96f891d9c380fe6feec490627cd782955c833eda..a07f94fb737745b22bf6eaf685e1a4f2874a4dae 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -18,7 +18,6 @@ use git::{ }; use git2::RepositoryInitOptions; use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal}; -use http_client::Url; use itertools::Itertools; use language::{ Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, DiskState, FakeLspAdapter, @@ -29,7 +28,7 @@ use language::{ }; use lsp::{ DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit, - WillRenameFiles, notification::DidRenameFiles, + Uri, WillRenameFiles, notification::DidRenameFiles, }; use parking_lot::Mutex; use paths::{config_dir, tasks_file}; @@ -701,7 +700,7 @@ async fn test_running_multiple_instances_of_a_single_server_in_one_worktree( assert_eq!( server.workspace_folders(), BTreeSet::from_iter( - [Url::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter() + [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter() ) ); @@ -891,7 +890,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::TextDocumentItem { - uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(), version: 0, text: "const A: i32 = 1;".to_string(), language_id: "rust".to_string(), @@ -921,7 +920,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::VersionedTextDocumentIdentifier::new( - lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(), 1 ) ); @@ -942,7 +941,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::TextDocumentItem { - uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(), version: 0, text: "{\"a\": 1}".to_string(), language_id: "json".to_string(), @@ -992,7 +991,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::VersionedTextDocumentIdentifier::new( - lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(), 1 ) ); @@ -1008,7 +1007,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::TextDocumentIdentifier::new( - lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap() + lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap() ) ); assert_eq!( @@ -1017,7 +1016,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::TextDocumentIdentifier::new( - lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap() + lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap() ) ); @@ -1034,7 +1033,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .receive_notification::() .await .text_document, - lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()), + lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()), ); assert_eq!( fake_rust_server @@ -1042,7 +1041,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::TextDocumentItem { - uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(), version: 0, text: rust_buffer2.update(cx, |buffer, _| buffer.text()), language_id: "rust".to_string(), @@ -1084,7 +1083,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .receive_notification::() .await .text_document, - lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()), + lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()), ); assert_eq!( fake_json_server @@ -1092,7 +1091,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::TextDocumentItem { - uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(), version: 0, text: rust_buffer2.update(cx, |buffer, _| buffer.text()), language_id: "json".to_string(), @@ -1118,7 +1117,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::VersionedTextDocumentIdentifier::new( - lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(), + lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(), 1 ) ); @@ -1148,7 +1147,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::TextDocumentItem { - uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(), version: 0, text: rust_buffer.update(cx, |buffer, _| buffer.text()), language_id: "rust".to_string(), @@ -1169,13 +1168,13 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { ], [ lsp::TextDocumentItem { - uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(), version: 0, text: json_buffer.update(cx, |buffer, _| buffer.text()), language_id: "json".to_string(), }, lsp::TextDocumentItem { - uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(), version: 0, text: rust_buffer2.update(cx, |buffer, _| buffer.text()), language_id: "json".to_string(), @@ -1187,7 +1186,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { cx.update(|_| drop(_json_handle)); let close_message = lsp::DidCloseTextDocumentParams { text_document: lsp::TextDocumentIdentifier::new( - lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(), + lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(), ), }; assert_eq!( @@ -1316,7 +1315,7 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon let _out_of_worktree_buffer = project .update(cx, |project, cx| { project.open_local_buffer_via_lsp( - lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(), server_id, cx, ) @@ -1476,23 +1475,23 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon &*file_changes.lock(), &[ lsp::FileEvent { - uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(), typ: lsp::FileChangeType::CHANGED, }, lsp::FileEvent { - uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(), typ: lsp::FileChangeType::DELETED, }, lsp::FileEvent { - uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(), typ: lsp::FileChangeType::CREATED, }, lsp::FileEvent { - uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(), typ: lsp::FileChangeType::CREATED, }, lsp::FileEvent { - uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(), typ: lsp::FileChangeType::CHANGED, }, ] @@ -1539,7 +1538,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) { .update_diagnostics( LanguageServerId(0), lsp::PublishDiagnosticsParams { - uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(), + uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)), @@ -1558,7 +1557,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) { .update_diagnostics( LanguageServerId(0), lsp::PublishDiagnosticsParams { - uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(), + uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)), @@ -1650,7 +1649,7 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) { .update_diagnostics( server_id, lsp::PublishDiagnosticsParams { - uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(), + uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)), @@ -1669,7 +1668,7 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) { .update_diagnostics( server_id, lsp::PublishDiagnosticsParams { - uri: Url::from_file_path(path!("/root/other.rs")).unwrap(), + uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)), @@ -1813,7 +1812,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { ); fake_server.notify::(&lsp::PublishDiagnosticsParams { - uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(), + uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)), @@ -1866,7 +1865,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { // Ensure publishing empty diagnostics twice only results in one update event. fake_server.notify::(&lsp::PublishDiagnosticsParams { - uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(), + uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(), version: None, diagnostics: Default::default(), }); @@ -1879,7 +1878,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { ); fake_server.notify::(&lsp::PublishDiagnosticsParams { - uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(), + uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(), version: None, diagnostics: Default::default(), }); @@ -2011,7 +2010,7 @@ async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAp // Publish diagnostics let fake_server = fake_servers.next().await.unwrap(); fake_server.notify::(&lsp::PublishDiagnosticsParams { - uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(), + uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(), version: None, diagnostics: vec![lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)), @@ -2092,7 +2091,7 @@ async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::T // Before restarting the server, report diagnostics with an unknown buffer version. let fake_server = fake_servers.next().await.unwrap(); fake_server.notify::(&lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(), version: Some(10000), diagnostics: Vec::new(), }); @@ -2343,7 +2342,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) { // Report some diagnostics for the initial version of the buffer fake_server.notify::(&lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(), version: Some(open_notification.text_document.version), diagnostics: vec![ lsp::Diagnostic { @@ -2431,7 +2430,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) { // Ensure overlapping diagnostics are highlighted correctly. fake_server.notify::(&lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(), version: Some(open_notification.text_document.version), diagnostics: vec![ lsp::Diagnostic { @@ -2525,7 +2524,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) { // Handle out-of-order diagnostics fake_server.notify::(&lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(), version: Some(change_notification_2.text_document.version), diagnostics: vec![ lsp::Diagnostic { @@ -3206,7 +3205,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) { Ok(Some(lsp::GotoDefinitionResponse::Scalar( lsp::Location::new( - lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(), lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)), ), ))) @@ -3765,7 +3764,7 @@ async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) { edit: lsp::WorkspaceEdit { changes: Some( [( - lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(), + lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(), vec![lsp::TextEdit { range: lsp::Range::new( lsp::Position::new(0, 0), @@ -3904,7 +3903,7 @@ async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) { .await .text_document, lsp::TextDocumentItem { - uri: lsp::Url::from_file_path(path!("/dir/file.rs")).unwrap(), + uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(), version: 0, text: "".to_string(), language_id: "rust".to_string(), @@ -4742,7 +4741,7 @@ async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) { .await .unwrap(); - let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap(); + let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap(); let message = lsp::PublishDiagnosticsParams { uri: buffer_uri.clone(), diagnostics: vec![ @@ -5064,7 +5063,7 @@ async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) { new_text: "This is not a drill".to_owned(), })], text_document: lsp::OptionalVersionedTextDocumentIdentifier { - uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(), + uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(), version: Some(1337), }, }] @@ -5189,14 +5188,14 @@ async fn test_rename(cx: &mut gpui::TestAppContext) { changes: Some( [ ( - lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(), vec![lsp::TextEdit::new( lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)), "THREE".to_string(), )], ), ( - lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(), vec![ lsp::TextEdit::new( lsp::Range::new( diff --git a/crates/project_symbols/src/project_symbols.rs b/crates/project_symbols/src/project_symbols.rs index 72029e55a0dad7e2070f4660b86b4b4d1eb4ffba..7f42f9e8efbda74bae52318d7353896e296ababc 100644 --- a/crates/project_symbols/src/project_symbols.rs +++ b/crates/project_symbols/src/project_symbols.rs @@ -437,7 +437,7 @@ mod tests { deprecated: None, container_name: None, location: lsp::Location::new( - lsp::Url::from_file_path(path.as_ref()).unwrap(), + lsp::Uri::from_file_path(path.as_ref()).unwrap(), lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)), ), } diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index e106a5ef18d59ebeb942564f24600635f78f89c7..353857f5871551a20315f638aa3d9653b3ed2848 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -533,7 +533,7 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext Ok(Some(lsp::WorkspaceEdit { changes: Some( [( - lsp::Url::from_file_path(path!("/code/project1/src/lib.rs")).unwrap(), + lsp::Uri::from_file_path(path!("/code/project1/src/lib.rs")).unwrap(), vec![lsp::TextEdit::new( lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(0, 6)), "two".to_string(), From 5001c037116386cb3f3316d5e4459fe78a4bd3fc Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 29 Aug 2025 17:14:27 +0300 Subject: [PATCH 11/54] Properly process files that cannot be open for a reason (#37170) Follow-up of https://github.com/zed-industries/zed/pull/36764 * Fix `anyhow!({e})` conversion lossing Collab error codes context when opening a buffer remotely * Use this context to only allow opening files that had not specific Collab error code Release Notes: - N/A --- crates/client/src/client.rs | 15 ++---------- crates/project/src/buffer_store.rs | 22 ++++++++++++++--- crates/remote/src/remote_client.rs | 2 +- crates/workspace/src/workspace.rs | 39 ++++++++++++++++++------------ 4 files changed, 45 insertions(+), 33 deletions(-) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index bdbf049b75ef1e0de351c65be7382a94d73448e6..1e735b0025f1e8a15809b096c5a462361d4ed8f3 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -1696,21 +1696,10 @@ impl Client { ); cx.spawn(async move |_| match future.await { Ok(()) => { - log::debug!( - "rpc message handled. client_id:{}, sender_id:{:?}, type:{}", - client_id, - original_sender_id, - type_name - ); + log::debug!("rpc message handled. client_id:{client_id}, sender_id:{original_sender_id:?}, type:{type_name}"); } Err(error) => { - log::error!( - "error handling message. client_id:{}, sender_id:{:?}, type:{}, error:{:?}", - client_id, - original_sender_id, - type_name, - error - ); + log::error!("error handling message. client_id:{client_id}, sender_id:{original_sender_id:?}, type:{type_name}, error:{error:#}"); } }) .detach(); diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 295bad6e596252cbbeecb36b587b696ccbab32a0..89bd4b27c9c47470a781e0ff322f5ef4a29b4927 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -20,7 +20,7 @@ use language::{ }, }; use rpc::{ - AnyProtoClient, ErrorExt as _, TypedEnvelope, + AnyProtoClient, ErrorCode, ErrorExt as _, TypedEnvelope, proto::{self, ToProto}, }; use smol::channel::Receiver; @@ -837,7 +837,15 @@ impl BufferStore { } }; - cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) }) + cx.background_spawn(async move { + task.await.map_err(|e| { + if e.error_code() != ErrorCode::Internal { + anyhow!(e.error_code()) + } else { + anyhow!("{e}") + } + }) + }) } pub fn create_buffer(&mut self, cx: &mut Context) -> Task>> { @@ -944,7 +952,15 @@ impl BufferStore { ) -> impl Iterator>>)> { self.loading_buffers.iter().map(|(path, task)| { let task = task.clone(); - (path, async move { task.await.map_err(|e| anyhow!("{e}")) }) + (path, async move { + task.await.map_err(|e| { + if e.error_code() != ErrorCode::Internal { + anyhow!(e.error_code()) + } else { + anyhow!("{e}") + } + }) + }) }) } diff --git a/crates/remote/src/remote_client.rs b/crates/remote/src/remote_client.rs index dd529ca87499b0daf2061fd990f7149828e3fce4..7e231e622cb2336a113799f7087fc0e30a5f79ff 100644 --- a/crates/remote/src/remote_client.rs +++ b/crates/remote/src/remote_client.rs @@ -1117,7 +1117,7 @@ impl ChannelClient { } Err(error) => { log::error!( - "{}:error handling message. type:{}, error:{}", + "{}:error handling message. type:{}, error:{:#}", this.name, type_name, format!("{error:#}").lines().fold( diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 0f119c14003d0f54f2f3a5323cb5e9106716a24d..61442eb6348e6152a4ad8ba4d3f93c24d1887346 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -648,23 +648,30 @@ impl ProjectItemRegistry { ) as Box<_>; Ok((project_entry_id, build_workspace_item)) } - Err(e) => match entry_abs_path.as_deref().filter(|_| is_file) { - Some(abs_path) => match cx.update(|window, cx| { - T::for_broken_project_item(abs_path, is_local, &e, window, cx) - })? { - Some(broken_project_item_view) => { - let build_workspace_item = Box::new( - move |_: &mut Pane, _: &mut Window, cx: &mut Context| { - cx.new(|_| broken_project_item_view).boxed_clone() - }, - ) - as Box<_>; - Ok((None, build_workspace_item)) + Err(e) => { + if e.error_code() == ErrorCode::Internal { + if let Some(abs_path) = + entry_abs_path.as_deref().filter(|_| is_file) + { + if let Some(broken_project_item_view) = + cx.update(|window, cx| { + T::for_broken_project_item( + abs_path, is_local, &e, window, cx, + ) + })? + { + let build_workspace_item = Box::new( + move |_: &mut Pane, _: &mut Window, cx: &mut Context| { + cx.new(|_| broken_project_item_view).boxed_clone() + }, + ) + as Box<_>; + return Ok((None, build_workspace_item)); + } } - None => Err(e)?, - }, - None => Err(e)?, - }, + } + Err(e) + } } })) }); From 11fb57a6d96f2133c492c6da18b6a976cb2429b2 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 29 Aug 2025 16:16:02 +0200 Subject: [PATCH 12/54] acp: Use the custom claude installation to perform login (#37169) Release Notes: - N/A --------- Co-authored-by: Bennet Bo Fenner Co-authored-by: Agus Zubiaga Co-authored-by: Nathan Sobo Co-authored-by: Cole Miller Co-authored-by: morgankrey --- Cargo.lock | 1 + crates/agent_servers/src/agent_servers.rs | 10 ++-- crates/agent_servers/src/claude.rs | 40 ++++++++++++- crates/agent_servers/src/e2e_tests.rs | 2 +- crates/agent_ui/Cargo.toml | 1 + crates/agent_ui/src/acp/message_editor.rs | 2 +- crates/agent_ui/src/acp/thread_view.rs | 69 +++++++++++++++-------- 7 files changed, 94 insertions(+), 31 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e493c99a2fc0f9514503b7cee8ef41cca582c387..aa1bcab9a68294baa4264916ef5a35adbeb20802 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -414,6 +414,7 @@ dependencies = [ "serde_json", "serde_json_lenient", "settings", + "shlex", "smol", "streaming_diff", "task", diff --git a/crates/agent_servers/src/agent_servers.rs b/crates/agent_servers/src/agent_servers.rs index 83b3be76ce709c9b8c4d9f13ca55632a79e7b677..c1fc7b91ae862a25eac8da998f4b848327a3dd3e 100644 --- a/crates/agent_servers/src/agent_servers.rs +++ b/crates/agent_servers/src/agent_servers.rs @@ -44,11 +44,11 @@ pub fn init(cx: &mut App) { pub struct AgentServerDelegate { project: Entity, - status_tx: watch::Sender, + status_tx: Option>, } impl AgentServerDelegate { - pub fn new(project: Entity, status_tx: watch::Sender) -> Self { + pub fn new(project: Entity, status_tx: Option>) -> Self { Self { project, status_tx } } @@ -72,7 +72,7 @@ impl AgentServerDelegate { "External agents are not yet available in remote projects." ))); }; - let mut status_tx = self.status_tx; + let status_tx = self.status_tx; cx.spawn(async move |cx| { if !ignore_system_version { @@ -165,7 +165,9 @@ impl AgentServerDelegate { .detach(); file_name } else { - status_tx.send("Installing…".into()).ok(); + if let Some(mut status_tx) = status_tx { + status_tx.send("Installing…".into()).ok(); + } let dir = dir.clone(); cx.background_spawn(Self::download_latest_version( fs, diff --git a/crates/agent_servers/src/claude.rs b/crates/agent_servers/src/claude.rs index db8853695ec798a8b146666292cd29f2c1fc145c..0a4f152e8afd991fed90af12aa5bbff909c8aa2d 100644 --- a/crates/agent_servers/src/claude.rs +++ b/crates/agent_servers/src/claude.rs @@ -1,8 +1,8 @@ use language_models::provider::anthropic::AnthropicLanguageModelProvider; use settings::SettingsStore; -use std::any::Any; use std::path::Path; use std::rc::Rc; +use std::{any::Any, path::PathBuf}; use anyhow::Result; use gpui::{App, AppContext as _, SharedString, Task}; @@ -13,9 +13,47 @@ use acp_thread::AgentConnection; #[derive(Clone)] pub struct ClaudeCode; +pub struct ClaudeCodeLoginCommand { + pub path: PathBuf, + pub arguments: Vec, +} + impl ClaudeCode { const BINARY_NAME: &'static str = "claude-code-acp"; const PACKAGE_NAME: &'static str = "@zed-industries/claude-code-acp"; + + pub fn login_command( + delegate: AgentServerDelegate, + cx: &mut App, + ) -> Task> { + let settings = cx.read_global(|settings: &SettingsStore, _| { + settings.get::(None).claude.clone() + }); + + cx.spawn(async move |cx| { + let mut command = if let Some(settings) = settings { + settings.command + } else { + cx.update(|cx| { + delegate.get_or_npm_install_builtin_agent( + Self::BINARY_NAME.into(), + Self::PACKAGE_NAME.into(), + "node_modules/@anthropic-ai/claude-code/cli.js".into(), + true, + None, + cx, + ) + })? + .await? + }; + command.args.push("/login".into()); + + Ok(ClaudeCodeLoginCommand { + path: command.path, + arguments: command.args, + }) + }) + } } impl AgentServer for ClaudeCode { diff --git a/crates/agent_servers/src/e2e_tests.rs b/crates/agent_servers/src/e2e_tests.rs index 5d2becf0ccc4b30cfeca27f4eb5ee08c2d0bb7d1..7988b86081351b29c8a19b676498db26d0b83fc3 100644 --- a/crates/agent_servers/src/e2e_tests.rs +++ b/crates/agent_servers/src/e2e_tests.rs @@ -498,7 +498,7 @@ pub async fn new_test_thread( current_dir: impl AsRef, cx: &mut TestAppContext, ) -> Entity { - let delegate = AgentServerDelegate::new(project.clone(), watch::channel("".into()).0); + let delegate = AgentServerDelegate::new(project.clone(), None); let connection = cx .update(|cx| server.connect(current_dir.as_ref(), delegate, cx)) diff --git a/crates/agent_ui/Cargo.toml b/crates/agent_ui/Cargo.toml index 6b0979ee696571841a7ec620ca48de2880f66492..6c8b9528800041d8920d935a8f75867d03719a9d 100644 --- a/crates/agent_ui/Cargo.toml +++ b/crates/agent_ui/Cargo.toml @@ -80,6 +80,7 @@ serde.workspace = true serde_json.workspace = true serde_json_lenient.workspace = true settings.workspace = true +shlex.workspace = true smol.workspace = true streaming_diff.workspace = true task.workspace = true diff --git a/crates/agent_ui/src/acp/message_editor.rs b/crates/agent_ui/src/acp/message_editor.rs index bd5e4faf7aedba4644206794a1c7a837517c52d6..b9e85e0ee34b3dccd0dcd4a22c1fbaa05031e2d9 100644 --- a/crates/agent_ui/src/acp/message_editor.rs +++ b/crates/agent_ui/src/acp/message_editor.rs @@ -645,7 +645,7 @@ impl MessageEditor { self.project.read(cx).fs().clone(), self.history_store.clone(), )); - let delegate = AgentServerDelegate::new(self.project.clone(), watch::channel("".into()).0); + let delegate = AgentServerDelegate::new(self.project.clone(), None); let connection = server.connect(Path::new(""), delegate, cx); cx.spawn(async move |_, cx| { let agent = connection.await?; diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index c718540c217425c8987f4282d5990579d529779e..eff9ceedd433ea8beb833108fb9fea1eb3f706da 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -9,7 +9,7 @@ use agent_client_protocol::{self as acp, PromptCapabilities}; use agent_servers::{AgentServer, AgentServerDelegate, ClaudeCode}; use agent_settings::{AgentProfileId, AgentSettings, CompletionMode, NotifyWhenAgentWaiting}; use agent2::{DbThreadMetadata, HistoryEntry, HistoryEntryId, HistoryStore}; -use anyhow::{Result, anyhow, bail}; +use anyhow::{Context as _, Result, anyhow, bail}; use audio::{Audio, Sound}; use buffer_diff::BufferDiff; use client::zed_urls; @@ -423,7 +423,7 @@ impl AcpThreadView { .map(|worktree| worktree.read(cx).abs_path()) .unwrap_or_else(|| paths::home_dir().as_path().into()); let (tx, mut rx) = watch::channel("Loading…".into()); - let delegate = AgentServerDelegate::new(project.clone(), tx); + let delegate = AgentServerDelegate::new(project.clone(), Some(tx)); let connect_task = agent.connect(&root_dir, delegate, cx); let load_task = cx.spawn_in(window, async move |this, cx| { @@ -1386,31 +1386,52 @@ impl AcpThreadView { let Some(terminal_panel) = workspace.read(cx).panel::(cx) else { return Task::ready(Ok(())); }; - let project = workspace.read(cx).project().read(cx); + let project_entity = workspace.read(cx).project(); + let project = project_entity.read(cx); let cwd = project.first_project_directory(cx); let shell = project.terminal_settings(&cwd, cx).shell.clone(); - let terminal = terminal_panel.update(cx, |terminal_panel, cx| { - terminal_panel.spawn_task( - &SpawnInTerminal { - id: task::TaskId("claude-login".into()), - full_label: "claude /login".to_owned(), - label: "claude /login".to_owned(), - command: Some("claude".to_owned()), - args: vec!["/login".to_owned()], - command_label: "claude /login".to_owned(), - cwd, - use_new_terminal: true, - allow_concurrent_runs: true, - hide: task::HideStrategy::Always, - shell, - ..Default::default() - }, - window, - cx, - ) - }); - cx.spawn(async move |cx| { + let delegate = AgentServerDelegate::new(project_entity.clone(), None); + let command = ClaudeCode::login_command(delegate, cx); + + window.spawn(cx, async move |cx| { + let login_command = command.await?; + let command = login_command + .path + .to_str() + .with_context(|| format!("invalid login command: {:?}", login_command.path))?; + let command = shlex::try_quote(command)?; + let args = login_command + .arguments + .iter() + .map(|arg| { + Ok(shlex::try_quote(arg) + .context("Failed to quote argument")? + .to_string()) + }) + .collect::>>()?; + + let terminal = terminal_panel.update_in(cx, |terminal_panel, window, cx| { + terminal_panel.spawn_task( + &SpawnInTerminal { + id: task::TaskId("claude-login".into()), + full_label: "claude /login".to_owned(), + label: "claude /login".to_owned(), + command: Some(command.into()), + args, + command_label: "claude /login".to_owned(), + cwd, + use_new_terminal: true, + allow_concurrent_runs: true, + hide: task::HideStrategy::Always, + shell, + ..Default::default() + }, + window, + cx, + ) + })?; + let terminal = terminal.await?; let mut exit_status = terminal .read_with(cx, |terminal, cx| terminal.wait_for_completed_task(cx))? From a13881746a5ac5f5693f867016f2908b081090c3 Mon Sep 17 00:00:00 2001 From: Smit Barmase Date: Fri, 29 Aug 2025 22:22:43 +0530 Subject: [PATCH 13/54] editor: APCA contrast (#37165) Closes #35787 Closes #17890 Closes #28789 Closes #36495 How it works: For highlights (and selections) within the visible rows of the editor, we split them row by row. This is efficient since the number of visible rows is constant. For each row, all highlights and selections, which may overlap, are flattened using a line sweep. This produces non-overlapping consecutive segments for each row, each with a blended background color. Next, for each row, we split text runs into smaller runs to adjust its color using APCA contrast. Since both text runs and segment are non-overlapping and consecutive, we can use two-pointer on them to do this. For example, a text run for the variable red might be split into two runs if a highlight partially covers it. As a result, one part may appear as red, while the other appears as a lighter red, depending on the background behind it. Result: image image image Release Notes: - Improved text contrast when selected or highlighted in the editor. --- assets/settings/default.json | 14 + crates/editor/src/editor_settings.rs | 7 + crates/editor/src/element.rs | 538 ++++++++++++++++++++++++++- 3 files changed, 557 insertions(+), 2 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 297c932e5b54ca75eb34b2399c0a1f427dcc9f77..572193be4eecbeb63a19eab1811bff126638162b 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -226,6 +226,20 @@ // The debounce delay before querying highlights from the language // server based on the current cursor location. "lsp_highlight_debounce": 75, + // The minimum APCA perceptual contrast between foreground and background colors. + // APCA (Accessible Perceptual Contrast Algorithm) is more accurate than WCAG 2.x, + // especially for dark mode. Values range from 0 to 106. + // + // Based on APCA Readability Criterion (ARC) Bronze Simple Mode: + // https://readtech.org/ARC/tests/bronze-simple-mode/ + // - 0: No contrast adjustment + // - 45: Minimum for large fluent text (36px+) + // - 60: Minimum for other content text + // - 75: Minimum for body text + // - 90: Preferred for body text + // + // This only affects text drawn over highlight backgrounds in the editor. + "minimum_contrast_for_highlights": 45, // Whether to pop the completions menu while typing in an editor without // explicitly requesting it. "show_completions_on_input": true, diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index 9b110d782a0bbcf789791240ef42a935b7ecd47b..55c040428d7e73d9e6e9bf6cc66cc20d301038f2 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -56,6 +56,7 @@ pub struct EditorSettings { pub inline_code_actions: bool, pub drag_and_drop_selection: DragAndDropSelection, pub lsp_document_colors: DocumentColorsRenderMode, + pub minimum_contrast_for_highlights: f32, } /// How to render LSP `textDocument/documentColor` colors in the editor. @@ -550,6 +551,12 @@ pub struct EditorSettingsContent { /// /// Default: false pub show_signature_help_after_edits: Option, + /// The minimum APCA perceptual contrast to maintain when + /// rendering text over highlight backgrounds in the editor. + /// + /// Values range from 0 to 106. Set to 0 to disable adjustments. + /// Default: 45 + pub minimum_contrast_for_highlights: Option, /// Whether to follow-up empty go to definition responses from the language server or not. /// `FindAllReferences` allows to look up references of the same symbol instead. diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index a63c18e003907f16a1383bbfb12085e1044d9eb9..ca6eac080e6121880eae63b4dc60ca6d32c6da5d 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -82,6 +82,7 @@ use std::{ use sum_tree::Bias; use text::{BufferId, SelectionGoal}; use theme::{ActiveTheme, Appearance, BufferLineHeight, PlayerColor}; +use ui::utils::ensure_minimum_contrast; use ui::{ ButtonLike, ContextMenu, Indicator, KeyBinding, POPOVER_Y_PADDING, Tooltip, h_flex, prelude::*, right_click_menu, @@ -3260,12 +3261,161 @@ impl EditorElement { .collect() } + fn bg_segments_per_row( + rows: Range, + selections: &[(PlayerColor, Vec)], + highlight_ranges: &[(Range, Hsla)], + base_background: Hsla, + ) -> Vec, Hsla)>> { + if rows.start >= rows.end { + return Vec::new(); + } + let highlight_iter = highlight_ranges.iter().cloned(); + let selection_iter = selections.iter().flat_map(|(player_color, layouts)| { + let color = player_color.selection; + layouts.iter().filter_map(move |selection_layout| { + if selection_layout.range.start != selection_layout.range.end { + Some((selection_layout.range.clone(), color)) + } else { + None + } + }) + }); + let mut per_row_map = vec![Vec::new(); rows.len()]; + for (range, color) in highlight_iter.chain(selection_iter) { + let covered_rows = if range.end.column() == 0 { + cmp::max(range.start.row(), rows.start)..cmp::min(range.end.row(), rows.end) + } else { + cmp::max(range.start.row(), rows.start) + ..cmp::min(range.end.row().next_row(), rows.end) + }; + for row in covered_rows.iter_rows() { + let seg_start = if row == range.start.row() { + range.start + } else { + DisplayPoint::new(row, 0) + }; + let seg_end = if row == range.end.row() && range.end.column() != 0 { + range.end + } else { + DisplayPoint::new(row, u32::MAX) + }; + let ix = row.minus(rows.start) as usize; + debug_assert!(row >= rows.start && row < rows.end); + debug_assert!(ix < per_row_map.len()); + per_row_map[ix].push((seg_start..seg_end, color)); + } + } + for row_segments in per_row_map.iter_mut() { + if row_segments.is_empty() { + continue; + } + let segments = mem::take(row_segments); + let merged = Self::merge_overlapping_ranges(segments, base_background); + *row_segments = merged; + } + per_row_map + } + + /// Merge overlapping ranges by splitting at all range boundaries and blending colors where + /// multiple ranges overlap. The result contains non-overlapping ranges ordered from left to right. + /// + /// Expects `start.row() == end.row()` for each range. + fn merge_overlapping_ranges( + ranges: Vec<(Range, Hsla)>, + base_background: Hsla, + ) -> Vec<(Range, Hsla)> { + struct Boundary { + pos: DisplayPoint, + is_start: bool, + index: usize, + color: Hsla, + } + + let mut boundaries: SmallVec<[Boundary; 16]> = SmallVec::with_capacity(ranges.len() * 2); + for (index, (range, color)) in ranges.iter().enumerate() { + debug_assert!( + range.start.row() == range.end.row(), + "expects single-row ranges" + ); + if range.start < range.end { + boundaries.push(Boundary { + pos: range.start, + is_start: true, + index, + color: *color, + }); + boundaries.push(Boundary { + pos: range.end, + is_start: false, + index, + color: *color, + }); + } + } + + if boundaries.is_empty() { + return Vec::new(); + } + + boundaries + .sort_unstable_by(|a, b| a.pos.cmp(&b.pos).then_with(|| a.is_start.cmp(&b.is_start))); + + let mut processed_ranges: Vec<(Range, Hsla)> = Vec::new(); + let mut active_ranges: SmallVec<[(usize, Hsla); 8]> = SmallVec::new(); + + let mut i = 0; + let mut start_pos = boundaries[0].pos; + + let boundaries_len = boundaries.len(); + while i < boundaries_len { + let current_boundary_pos = boundaries[i].pos; + if start_pos < current_boundary_pos { + if !active_ranges.is_empty() { + let mut color = base_background; + for &(_, c) in &active_ranges { + color = Hsla::blend(color, c); + } + if let Some((last_range, last_color)) = processed_ranges.last_mut() { + if *last_color == color && last_range.end == start_pos { + last_range.end = current_boundary_pos; + } else { + processed_ranges.push((start_pos..current_boundary_pos, color)); + } + } else { + processed_ranges.push((start_pos..current_boundary_pos, color)); + } + } + } + while i < boundaries_len && boundaries[i].pos == current_boundary_pos { + let active_range = &boundaries[i]; + if active_range.is_start { + let idx = active_range.index; + let pos = active_ranges + .binary_search_by_key(&idx, |(i, _)| *i) + .unwrap_or_else(|p| p); + active_ranges.insert(pos, (idx, active_range.color)); + } else { + let idx = active_range.index; + if let Ok(pos) = active_ranges.binary_search_by_key(&idx, |(i, _)| *i) { + active_ranges.remove(pos); + } + } + i += 1; + } + start_pos = current_boundary_pos; + } + + processed_ranges + } + fn layout_lines( rows: Range, snapshot: &EditorSnapshot, style: &EditorStyle, editor_width: Pixels, is_row_soft_wrapped: impl Copy + Fn(usize) -> bool, + bg_segments_per_row: &[Vec<(Range, Hsla)>], window: &mut Window, cx: &mut App, ) -> Vec { @@ -3321,6 +3471,7 @@ impl EditorElement { &snapshot.mode, editor_width, is_row_soft_wrapped, + bg_segments_per_row, window, cx, ) @@ -7340,6 +7491,7 @@ impl LineWithInvisibles { editor_mode: &EditorMode, text_width: Pixels, is_row_soft_wrapped: impl Copy + Fn(usize) -> bool, + bg_segments_per_row: &[Vec<(Range, Hsla)>], window: &mut Window, cx: &mut App, ) -> Vec { @@ -7355,6 +7507,7 @@ impl LineWithInvisibles { let mut row = 0; let mut line_exceeded_max_len = false; let font_size = text_style.font_size.to_pixels(window.rem_size()); + let min_contrast = EditorSettings::get_global(cx).minimum_contrast_for_highlights; let ellipsis = SharedString::from("⋯"); @@ -7367,10 +7520,16 @@ impl LineWithInvisibles { }]) { if let Some(replacement) = highlighted_chunk.replacement { if !line.is_empty() { + let segments = bg_segments_per_row.get(row).map(|v| &v[..]).unwrap_or(&[]); + let text_runs: &[TextRun] = if segments.is_empty() { + &styles + } else { + &Self::split_runs_by_bg_segments(&styles, segments, min_contrast) + }; let shaped_line = window.text_system().shape_line( line.clone().into(), font_size, - &styles, + text_runs, None, ); width += shaped_line.width; @@ -7448,10 +7607,16 @@ impl LineWithInvisibles { } else { for (ix, mut line_chunk) in highlighted_chunk.text.split('\n').enumerate() { if ix > 0 { + let segments = bg_segments_per_row.get(row).map(|v| &v[..]).unwrap_or(&[]); + let text_runs = if segments.is_empty() { + &styles + } else { + &Self::split_runs_by_bg_segments(&styles, segments, min_contrast) + }; let shaped_line = window.text_system().shape_line( line.clone().into(), font_size, - &styles, + text_runs, None, ); width += shaped_line.width; @@ -7539,6 +7704,81 @@ impl LineWithInvisibles { layouts } + /// Takes text runs and non-overlapping left-to-right background ranges with color. + /// Returns new text runs with adjusted contrast as per background ranges. + fn split_runs_by_bg_segments( + text_runs: &[TextRun], + bg_segments: &[(Range, Hsla)], + min_contrast: f32, + ) -> Vec { + let mut output_runs: Vec = Vec::with_capacity(text_runs.len()); + let mut line_col = 0usize; + let mut segment_ix = 0usize; + + for text_run in text_runs.iter() { + let run_start_col = line_col; + let run_end_col = run_start_col + text_run.len; + while segment_ix < bg_segments.len() + && (bg_segments[segment_ix].0.end.column() as usize) <= run_start_col + { + segment_ix += 1; + } + let mut cursor_col = run_start_col; + let mut local_segment_ix = segment_ix; + while local_segment_ix < bg_segments.len() { + let (range, segment_color) = &bg_segments[local_segment_ix]; + let segment_start_col = range.start.column() as usize; + let segment_end_col = range.end.column() as usize; + if segment_start_col >= run_end_col { + break; + } + if segment_start_col > cursor_col { + let span_len = segment_start_col - cursor_col; + output_runs.push(TextRun { + len: span_len, + font: text_run.font.clone(), + color: text_run.color, + background_color: text_run.background_color, + underline: text_run.underline, + strikethrough: text_run.strikethrough, + }); + cursor_col = segment_start_col; + } + let segment_slice_end_col = segment_end_col.min(run_end_col); + if segment_slice_end_col > cursor_col { + let new_text_color = + ensure_minimum_contrast(text_run.color, *segment_color, min_contrast); + output_runs.push(TextRun { + len: segment_slice_end_col - cursor_col, + font: text_run.font.clone(), + color: new_text_color, + background_color: text_run.background_color, + underline: text_run.underline, + strikethrough: text_run.strikethrough, + }); + cursor_col = segment_slice_end_col; + } + if segment_end_col >= run_end_col { + break; + } + local_segment_ix += 1; + } + if cursor_col < run_end_col { + output_runs.push(TextRun { + len: run_end_col - cursor_col, + font: text_run.font.clone(), + color: text_run.color, + background_color: text_run.background_color, + underline: text_run.underline, + strikethrough: text_run.strikethrough, + }); + } + line_col = run_end_col; + segment_ix = local_segment_ix; + } + output_runs + } + fn prepaint( &mut self, line_height: Pixels, @@ -8452,12 +8692,20 @@ impl Element for EditorElement { cx, ); + let bg_segments_per_row = Self::bg_segments_per_row( + start_row..end_row, + &selections, + &highlighted_ranges, + self.style.background, + ); + let mut line_layouts = Self::layout_lines( start_row..end_row, &snapshot, &self.style, editor_width, is_row_soft_wrapped, + &bg_segments_per_row, window, cx, ); @@ -9817,6 +10065,7 @@ pub fn layout_line( &snapshot.mode, text_width, is_row_soft_wrapped, + &[], window, cx, ) @@ -10717,4 +10966,289 @@ mod tests { .cloned() .collect() } + + #[gpui::test] + fn test_merge_overlapping_ranges() { + let base_bg = Hsla::default(); + let color1 = Hsla { + h: 0.0, + s: 0.5, + l: 0.5, + a: 0.5, + }; + let color2 = Hsla { + h: 120.0, + s: 0.5, + l: 0.5, + a: 0.5, + }; + + let display_point = |col| DisplayPoint::new(DisplayRow(0), col); + let cols = |v: &Vec<(Range, Hsla)>| -> Vec<(u32, u32)> { + v.iter() + .map(|(r, _)| (r.start.column(), r.end.column())) + .collect() + }; + + // Test overlapping ranges blend colors + let overlapping = vec![ + (display_point(5)..display_point(15), color1), + (display_point(10)..display_point(20), color2), + ]; + let result = EditorElement::merge_overlapping_ranges(overlapping, base_bg); + assert_eq!(cols(&result), vec![(5, 10), (10, 15), (15, 20)]); + + // Test middle segment should have blended color + let blended = Hsla::blend(Hsla::blend(base_bg, color1), color2); + assert_eq!(result[1].1, blended); + + // Test adjacent same-color ranges merge + let adjacent_same = vec![ + (display_point(5)..display_point(10), color1), + (display_point(10)..display_point(15), color1), + ]; + let result = EditorElement::merge_overlapping_ranges(adjacent_same, base_bg); + assert_eq!(cols(&result), vec![(5, 15)]); + + // Test contained range splits + let contained = vec![ + (display_point(5)..display_point(20), color1), + (display_point(10)..display_point(15), color2), + ]; + let result = EditorElement::merge_overlapping_ranges(contained, base_bg); + assert_eq!(cols(&result), vec![(5, 10), (10, 15), (15, 20)]); + + // Test multiple overlaps split at every boundary + let color3 = Hsla { + h: 240.0, + s: 0.5, + l: 0.5, + a: 0.5, + }; + let complex = vec![ + (display_point(5)..display_point(12), color1), + (display_point(8)..display_point(16), color2), + (display_point(10)..display_point(14), color3), + ]; + let result = EditorElement::merge_overlapping_ranges(complex, base_bg); + assert_eq!( + cols(&result), + vec![(5, 8), (8, 10), (10, 12), (12, 14), (14, 16)] + ); + } + + #[gpui::test] + fn test_bg_segments_per_row() { + let base_bg = Hsla::default(); + + // Case A: selection spans three display rows: row 1 [5, end), full row 2, row 3 [0, 7) + { + let selection_color = Hsla { + h: 200.0, + s: 0.5, + l: 0.5, + a: 0.5, + }; + let player_color = PlayerColor { + cursor: selection_color, + background: selection_color, + selection: selection_color, + }; + + let spanning_selection = SelectionLayout { + head: DisplayPoint::new(DisplayRow(3), 7), + cursor_shape: CursorShape::Bar, + is_newest: true, + is_local: true, + range: DisplayPoint::new(DisplayRow(1), 5)..DisplayPoint::new(DisplayRow(3), 7), + active_rows: DisplayRow(1)..DisplayRow(4), + user_name: None, + }; + + let selections = vec![(player_color, vec![spanning_selection])]; + let result = EditorElement::bg_segments_per_row( + DisplayRow(0)..DisplayRow(5), + &selections, + &[], + base_bg, + ); + + assert_eq!(result.len(), 5); + assert!(result[0].is_empty()); + assert_eq!(result[1].len(), 1); + assert_eq!(result[2].len(), 1); + assert_eq!(result[3].len(), 1); + assert!(result[4].is_empty()); + + assert_eq!(result[1][0].0.start, DisplayPoint::new(DisplayRow(1), 5)); + assert_eq!(result[1][0].0.end.row(), DisplayRow(1)); + assert_eq!(result[1][0].0.end.column(), u32::MAX); + assert_eq!(result[2][0].0.start, DisplayPoint::new(DisplayRow(2), 0)); + assert_eq!(result[2][0].0.end.row(), DisplayRow(2)); + assert_eq!(result[2][0].0.end.column(), u32::MAX); + assert_eq!(result[3][0].0.start, DisplayPoint::new(DisplayRow(3), 0)); + assert_eq!(result[3][0].0.end, DisplayPoint::new(DisplayRow(3), 7)); + } + + // Case B: selection ends exactly at the start of row 3, excluding row 3 + { + let selection_color = Hsla { + h: 120.0, + s: 0.5, + l: 0.5, + a: 0.5, + }; + let player_color = PlayerColor { + cursor: selection_color, + background: selection_color, + selection: selection_color, + }; + + let selection = SelectionLayout { + head: DisplayPoint::new(DisplayRow(2), 0), + cursor_shape: CursorShape::Bar, + is_newest: true, + is_local: true, + range: DisplayPoint::new(DisplayRow(1), 5)..DisplayPoint::new(DisplayRow(3), 0), + active_rows: DisplayRow(1)..DisplayRow(3), + user_name: None, + }; + + let selections = vec![(player_color, vec![selection])]; + let result = EditorElement::bg_segments_per_row( + DisplayRow(0)..DisplayRow(4), + &selections, + &[], + base_bg, + ); + + assert_eq!(result.len(), 4); + assert!(result[0].is_empty()); + assert_eq!(result[1].len(), 1); + assert_eq!(result[2].len(), 1); + assert!(result[3].is_empty()); + + assert_eq!(result[1][0].0.start, DisplayPoint::new(DisplayRow(1), 5)); + assert_eq!(result[1][0].0.end.row(), DisplayRow(1)); + assert_eq!(result[1][0].0.end.column(), u32::MAX); + assert_eq!(result[2][0].0.start, DisplayPoint::new(DisplayRow(2), 0)); + assert_eq!(result[2][0].0.end.row(), DisplayRow(2)); + assert_eq!(result[2][0].0.end.column(), u32::MAX); + } + } + + #[cfg(test)] + fn generate_test_run(len: usize, color: Hsla) -> TextRun { + TextRun { + len, + font: gpui::font(".SystemUIFont"), + color, + background_color: None, + underline: None, + strikethrough: None, + } + } + + #[gpui::test] + fn test_split_runs_by_bg_segments(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let text_color = Hsla { + h: 210.0, + s: 0.1, + l: 0.4, + a: 1.0, + }; + let bg1 = Hsla { + h: 30.0, + s: 0.6, + l: 0.8, + a: 1.0, + }; + let bg2 = Hsla { + h: 200.0, + s: 0.6, + l: 0.2, + a: 1.0, + }; + let min_contrast = 45.0; + + // Case A: single run; disjoint segments inside the run + let runs = vec![generate_test_run(20, text_color)]; + let segs = vec![ + ( + DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 10), + bg1, + ), + ( + DisplayPoint::new(DisplayRow(0), 12)..DisplayPoint::new(DisplayRow(0), 16), + bg2, + ), + ]; + let out = LineWithInvisibles::split_runs_by_bg_segments(&runs, &segs, min_contrast); + // Expected slices: [0,5) [5,10) [10,12) [12,16) [16,20) + assert_eq!( + out.iter().map(|r| r.len).collect::>(), + vec![5, 5, 2, 4, 4] + ); + assert_eq!(out[0].color, text_color); + assert_eq!( + out[1].color, + ensure_minimum_contrast(text_color, bg1, min_contrast) + ); + assert_eq!(out[2].color, text_color); + assert_eq!( + out[3].color, + ensure_minimum_contrast(text_color, bg2, min_contrast) + ); + assert_eq!(out[4].color, text_color); + + // Case B: multiple runs; segment extends to end of line (u32::MAX) + let runs = vec![ + generate_test_run(8, text_color), + generate_test_run(7, text_color), + ]; + let segs = vec![( + DisplayPoint::new(DisplayRow(0), 6)..DisplayPoint::new(DisplayRow(0), u32::MAX), + bg1, + )]; + let out = LineWithInvisibles::split_runs_by_bg_segments(&runs, &segs, min_contrast); + // Expected slices across runs: [0,6) [6,8) | [0,7) + assert_eq!(out.iter().map(|r| r.len).collect::>(), vec![6, 2, 7]); + let adjusted = ensure_minimum_contrast(text_color, bg1, min_contrast); + assert_eq!(out[0].color, text_color); + assert_eq!(out[1].color, adjusted); + assert_eq!(out[2].color, adjusted); + + // Case C: multi-byte characters + // for text: "Hello 🌍 世界!" + let runs = vec![ + generate_test_run(5, text_color), // "Hello" + generate_test_run(6, text_color), // " 🌍 " + generate_test_run(6, text_color), // "世界" + generate_test_run(1, text_color), // "!" + ]; + // selecting "🌍 世" + let segs = vec![( + DisplayPoint::new(DisplayRow(0), 6)..DisplayPoint::new(DisplayRow(0), 14), + bg1, + )]; + let out = LineWithInvisibles::split_runs_by_bg_segments(&runs, &segs, min_contrast); + // "Hello" | " " | "🌍 " | "世" | "界" | "!" + assert_eq!( + out.iter().map(|r| r.len).collect::>(), + vec![5, 1, 5, 3, 3, 1] + ); + assert_eq!(out[0].color, text_color); // "Hello" + assert_eq!( + out[2].color, + ensure_minimum_contrast(text_color, bg1, min_contrast) + ); // "🌍 " + assert_eq!( + out[3].color, + ensure_minimum_contrast(text_color, bg1, min_contrast) + ); // "世" + assert_eq!(out[4].color, text_color); // "界" + assert_eq!(out[5].color, text_color); // "!" + } } From 3d4f9172040eacc6cc8787524588506abef07c0a Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 29 Aug 2025 14:07:27 -0300 Subject: [PATCH 14/54] Make project symbols picker entry consistent with outline picker (#37176) Closes https://github.com/zed-industries/zed/issues/36383 The project symbols modal didn't use the buffer font and highlighted matches through modifying the font weight, which is inconsistent with the outline picker, which presents code in list items in a similar way, as well as project _and_ buffer search highlighting design. Release Notes: - N/A --- crates/project_symbols/src/project_symbols.rs | 57 +++++++++++-------- 1 file changed, 33 insertions(+), 24 deletions(-) diff --git a/crates/project_symbols/src/project_symbols.rs b/crates/project_symbols/src/project_symbols.rs index 7f42f9e8efbda74bae52318d7353896e296ababc..ea67499acb07fc7517028dcd43282b051d52c3eb 100644 --- a/crates/project_symbols/src/project_symbols.rs +++ b/crates/project_symbols/src/project_symbols.rs @@ -1,18 +1,19 @@ use editor::{Bias, Editor, SelectionEffects, scroll::Autoscroll, styled_runs_for_code_label}; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ - App, Context, DismissEvent, Entity, FontWeight, ParentElement, StyledText, Task, WeakEntity, - Window, rems, + App, Context, DismissEvent, Entity, HighlightStyle, ParentElement, StyledText, Task, TextStyle, + WeakEntity, Window, relative, rems, }; use ordered_float::OrderedFloat; use picker::{Picker, PickerDelegate}; use project::{Project, Symbol}; +use settings::Settings; use std::{borrow::Cow, cmp::Reverse, sync::Arc}; -use theme::ActiveTheme; +use theme::{ActiveTheme, ThemeSettings}; use util::ResultExt; use workspace::{ Workspace, - ui::{Color, Label, LabelCommon, LabelLike, ListItem, ListItemSpacing, Toggleable, v_flex}, + ui::{LabelLike, ListItem, ListItemSpacing, prelude::*}, }; pub fn init(cx: &mut App) { @@ -213,7 +214,7 @@ impl PickerDelegate for ProjectSymbolsDelegate { &self, ix: usize, selected: bool, - window: &mut Window, + _window: &mut Window, cx: &mut Context>, ) -> Option { let string_match = &self.matches[ix]; @@ -235,18 +236,29 @@ impl PickerDelegate for ProjectSymbolsDelegate { let label = symbol.label.text.clone(); let path = path.to_string(); - let highlights = gpui::combine_highlights( - string_match - .positions - .iter() - .map(|pos| (*pos..pos + 1, FontWeight::BOLD.into())), - syntax_runs.map(|(range, mut highlight)| { - // Ignore font weight for syntax highlighting, as we'll use it - // for fuzzy matches. - highlight.font_weight = None; - (range, highlight) - }), - ); + let settings = ThemeSettings::get_global(cx); + + let text_style = TextStyle { + color: cx.theme().colors().text, + font_family: settings.buffer_font.family.clone(), + font_features: settings.buffer_font.features.clone(), + font_fallbacks: settings.buffer_font.fallbacks.clone(), + font_size: settings.buffer_font_size(cx).into(), + font_weight: settings.buffer_font.weight, + line_height: relative(1.), + ..Default::default() + }; + + let highlight_style = HighlightStyle { + background_color: Some(cx.theme().colors().text_accent.alpha(0.3)), + ..Default::default() + }; + let custom_highlights = string_match + .positions + .iter() + .map(|pos| (*pos..pos + 1, highlight_style)); + + let highlights = gpui::combine_highlights(custom_highlights, syntax_runs); Some( ListItem::new(ix) @@ -255,13 +267,10 @@ impl PickerDelegate for ProjectSymbolsDelegate { .toggle_state(selected) .child( v_flex() - .child( - LabelLike::new().child( - StyledText::new(label) - .with_default_highlights(&window.text_style(), highlights), - ), - ) - .child(Label::new(path).color(Color::Muted)), + .child(LabelLike::new().child( + StyledText::new(label).with_default_highlights(&text_style, highlights), + )) + .child(Label::new(path).size(LabelSize::Small).color(Color::Muted)), ), ) } From 92f739dbb9f2b46a1d825b39c0ea2c521dae0dbc Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Fri, 29 Aug 2025 13:40:39 -0400 Subject: [PATCH 15/54] acp: Improve error reporting and log more information when failing to launch gemini (#37178) In the case where we fail to create an ACP connection to Gemini, only report the "unsupported version" error if the version for the found binary is at least our minimum version. That means we'll surface the real error in this situation. This also fixes incorrect sorting of downloaded Gemini versions--as @kpe pointed out we were effectively using the version string as a key. Now we'll correctly use the parsed semver::Version instead. Release Notes: - N/A --- crates/agent_servers/src/agent_servers.rs | 11 ++++--- crates/agent_servers/src/gemini.rs | 39 ++++++++++++++--------- 2 files changed, 31 insertions(+), 19 deletions(-) diff --git a/crates/agent_servers/src/agent_servers.rs b/crates/agent_servers/src/agent_servers.rs index c1fc7b91ae862a25eac8da998f4b848327a3dd3e..c610c53ea8d61d24ece6d3c80ec15505d259ea3b 100644 --- a/crates/agent_servers/src/agent_servers.rs +++ b/crates/agent_servers/src/agent_servers.rs @@ -105,22 +105,23 @@ impl AgentServerDelegate { .to_str() .and_then(|name| semver::Version::from_str(&name).ok()) { - versions.push((file_name.to_owned(), version)); + versions.push((version, file_name.to_owned())); } else { to_delete.push(file_name.to_owned()) } } versions.sort(); - let newest_version = if let Some((file_name, version)) = versions.last().cloned() - && minimum_version.is_none_or(|minimum_version| version > minimum_version) + let newest_version = if let Some((version, file_name)) = versions.last().cloned() + && minimum_version.is_none_or(|minimum_version| version >= minimum_version) { versions.pop(); Some(file_name) } else { None }; - to_delete.extend(versions.into_iter().map(|(file_name, _)| file_name)); + log::debug!("existing version of {package_name}: {newest_version:?}"); + to_delete.extend(versions.into_iter().map(|(_, file_name)| file_name)); cx.background_spawn({ let fs = fs.clone(); @@ -200,6 +201,8 @@ impl AgentServerDelegate { node_runtime: NodeRuntime, package_name: SharedString, ) -> Result { + log::debug!("downloading latest version of {package_name}"); + let tmp_dir = tempfile::tempdir_in(&dir)?; node_runtime diff --git a/crates/agent_servers/src/gemini.rs b/crates/agent_servers/src/gemini.rs index 5e958f686959d78e6ceaf8b8ea7d8404ffba166a..a1553d288ab44d96bdfe08723a092ce231ba005b 100644 --- a/crates/agent_servers/src/gemini.rs +++ b/crates/agent_servers/src/gemini.rs @@ -63,7 +63,9 @@ impl AgentServer for Gemini { })? .await? }; - command.args.push("--experimental-acp".into()); + if !command.args.contains(&ACP_ARG.into()) { + command.args.push(ACP_ARG.into()); + } if let Some(api_key) = cx.update(GoogleLanguageModelProvider::api_key)?.await.ok() { command @@ -86,17 +88,17 @@ impl AgentServer for Gemini { .await; let current_version = String::from_utf8(version_output?.stdout)?.trim().to_owned(); - if !connection.prompt_capabilities().image { - return Err(LoadError::Unsupported { - current_version: current_version.into(), - command: command.path.to_string_lossy().to_string().into(), - minimum_version: Self::MINIMUM_VERSION.into(), - } - .into()); + + log::error!("connected to gemini, but missing prompt_capabilities.image (version is {current_version})"); + return Err(LoadError::Unsupported { + current_version: current_version.into(), + command: command.path.to_string_lossy().to_string().into(), + minimum_version: Self::MINIMUM_VERSION.into(), } + .into()); } } - Err(_) => { + Err(e) => { let version_fut = util::command::new_smol_command(&command.path) .args(command.args.iter()) .arg("--version") @@ -111,12 +113,19 @@ impl AgentServer for Gemini { let (version_output, help_output) = futures::future::join(version_fut, help_fut).await; - - let current_version = std::str::from_utf8(&version_output?.stdout)? - .trim() - .to_string(); - let supported = String::from_utf8(help_output?.stdout)?.contains(ACP_ARG); - + let Some(version_output) = version_output.ok().and_then(|output| String::from_utf8(output.stdout).ok()) else { + return result; + }; + let Some((help_stdout, help_stderr)) = help_output.ok().and_then(|output| String::from_utf8(output.stdout).ok().zip(String::from_utf8(output.stderr).ok())) else { + return result; + }; + + let current_version = version_output.trim().to_string(); + let supported = help_stdout.contains(ACP_ARG) || current_version.parse::().is_ok_and(|version| version >= Self::MINIMUM_VERSION.parse::().unwrap()); + + log::error!("failed to create ACP connection to gemini (version is {current_version}, supported: {supported}): {e}"); + log::debug!("gemini --help stdout: {help_stdout:?}"); + log::debug!("gemini --help stderr: {help_stderr:?}"); if !supported { return Err(LoadError::Unsupported { current_version: current_version.into(), From a790e514af4d6957aa1a14cc8190b2ff24a0484c Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Fri, 29 Aug 2025 14:58:54 -0300 Subject: [PATCH 16/54] Fix ACP permission request with new tool calls (#37182) Release Notes: - Gemini integration: Fixed a bug with permission requests when `always_allow_tool_calls` is enabled --- Cargo.lock | 1 + crates/acp_thread/Cargo.toml | 1 + crates/acp_thread/src/acp_thread.rs | 35 ++++++++++++++++++++++++++-- crates/acp_thread/src/connection.rs | 17 +++++++------- crates/agent2/src/agent.rs | 13 ++++------- crates/agent_servers/src/acp.rs | 36 ++++------------------------- 6 files changed, 53 insertions(+), 50 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index aa1bcab9a68294baa4264916ef5a35adbeb20802..e201b4af804b0be95f100c34f93652b6ecf6f8e6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8,6 +8,7 @@ version = "0.1.0" dependencies = [ "action_log", "agent-client-protocol", + "agent_settings", "anyhow", "buffer_diff", "collections", diff --git a/crates/acp_thread/Cargo.toml b/crates/acp_thread/Cargo.toml index eab756db51885b8b2e2797bbf0303937f19fefb9..196614f731c6e330328e46eb75ba58cf928cf6cc 100644 --- a/crates/acp_thread/Cargo.toml +++ b/crates/acp_thread/Cargo.toml @@ -19,6 +19,7 @@ test-support = ["gpui/test-support", "project/test-support", "dep:parking_lot"] action_log.workspace = true agent-client-protocol.workspace = true anyhow.workspace = true +agent_settings.workspace = true buffer_diff.workspace = true collections.workspace = true editor.workspace = true diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index 04ff032ad40c600c80fed7cff9f48139b2307931..394619732a72c205b6c5c940cc8b2b7d3a6d3d38 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -3,6 +3,7 @@ mod diff; mod mention; mod terminal; +use agent_settings::AgentSettings; use collections::HashSet; pub use connection::*; pub use diff::*; @@ -10,6 +11,7 @@ use language::language_settings::FormatOnSave; pub use mention::*; use project::lsp_store::{FormatTrigger, LspFormatTarget}; use serde::{Deserialize, Serialize}; +use settings::Settings as _; pub use terminal::*; use action_log::ActionLog; @@ -1230,9 +1232,29 @@ impl AcpThread { tool_call: acp::ToolCallUpdate, options: Vec, cx: &mut Context, - ) -> Result, acp::Error> { + ) -> Result> { let (tx, rx) = oneshot::channel(); + if AgentSettings::get_global(cx).always_allow_tool_actions { + // Don't use AllowAlways, because then if you were to turn off always_allow_tool_actions, + // some tools would (incorrectly) continue to auto-accept. + if let Some(allow_once_option) = options.iter().find_map(|option| { + if matches!(option.kind, acp::PermissionOptionKind::AllowOnce) { + Some(option.id.clone()) + } else { + None + } + }) { + self.upsert_tool_call_inner(tool_call, ToolCallStatus::Pending, cx)?; + return Ok(async { + acp::RequestPermissionOutcome::Selected { + option_id: allow_once_option, + } + } + .boxed()); + } + } + let status = ToolCallStatus::WaitingForConfirmation { options, respond_tx: tx, @@ -1240,7 +1262,16 @@ impl AcpThread { self.upsert_tool_call_inner(tool_call, status, cx)?; cx.emit(AcpThreadEvent::ToolAuthorizationRequired); - Ok(rx) + + let fut = async { + match rx.await { + Ok(option) => acp::RequestPermissionOutcome::Selected { option_id: option }, + Err(oneshot::Canceled) => acp::RequestPermissionOutcome::Cancelled, + } + } + .boxed(); + + Ok(fut) } pub fn authorize_tool_call( diff --git a/crates/acp_thread/src/connection.rs b/crates/acp_thread/src/connection.rs index af229b7545651c2f19f361afc7ea0abadcb5cc76..96abd1d2b4cf92698e7046cd4b7e24e6043280ff 100644 --- a/crates/acp_thread/src/connection.rs +++ b/crates/acp_thread/src/connection.rs @@ -393,14 +393,15 @@ mod test_support { }; let task = cx.spawn(async move |cx| { if let Some((tool_call, options)) = permission_request { - let permission = thread.update(cx, |thread, cx| { - thread.request_tool_call_authorization( - tool_call.clone().into(), - options.clone(), - cx, - ) - })?; - permission?.await?; + thread + .update(cx, |thread, cx| { + thread.request_tool_call_authorization( + tool_call.clone().into(), + options.clone(), + cx, + ) + })?? + .await; } thread.update(cx, |thread, cx| { thread.handle_session_update(update.clone(), cx).unwrap(); diff --git a/crates/agent2/src/agent.rs b/crates/agent2/src/agent.rs index ea80df8fb52cffab80c8c64307b75de7f0954a56..bb6a3c097ca27d6103c1072986f6d3255bc6c69f 100644 --- a/crates/agent2/src/agent.rs +++ b/crates/agent2/src/agent.rs @@ -762,18 +762,15 @@ impl NativeAgentConnection { options, response, }) => { - let recv = acp_thread.update(cx, |thread, cx| { + let outcome_task = acp_thread.update(cx, |thread, cx| { thread.request_tool_call_authorization(tool_call, options, cx) - })?; + })??; cx.background_spawn(async move { - if let Some(recv) = recv.log_err() - && let Some(option) = recv - .await - .context("authorization sender was dropped") - .log_err() + if let acp::RequestPermissionOutcome::Selected { option_id } = + outcome_task.await { response - .send(option) + .send(option_id) .map(|_| anyhow!("authorization receiver was dropped")) .log_err(); } diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index d929d1fc501fb2093f47f8bdeb4d3695b7b87ebf..b1d4bea5c35c113277847690906dd2f21e12050c 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -3,15 +3,13 @@ use acp_thread::AgentConnection; use acp_tools::AcpConnectionRegistry; use action_log::ActionLog; use agent_client_protocol::{self as acp, Agent as _, ErrorCode}; -use agent_settings::AgentSettings; use anyhow::anyhow; use collections::HashMap; use futures::AsyncBufReadExt as _; -use futures::channel::oneshot; use futures::io::BufReader; use project::Project; use serde::Deserialize; -use settings::Settings as _; + use std::{any::Any, cell::RefCell}; use std::{path::Path, rc::Rc}; use thiserror::Error; @@ -345,28 +343,7 @@ impl acp::Client for ClientDelegate { ) -> Result { let cx = &mut self.cx.clone(); - // If always_allow_tool_actions is enabled, then auto-choose the first "Allow" button - if AgentSettings::try_read_global(cx, |settings| settings.always_allow_tool_actions) - .unwrap_or(false) - { - // Don't use AllowAlways, because then if you were to turn off always_allow_tool_actions, - // some tools would (incorrectly) continue to auto-accept. - if let Some(allow_once_option) = arguments.options.iter().find_map(|option| { - if matches!(option.kind, acp::PermissionOptionKind::AllowOnce) { - Some(option.id.clone()) - } else { - None - } - }) { - return Ok(acp::RequestPermissionResponse { - outcome: acp::RequestPermissionOutcome::Selected { - option_id: allow_once_option, - }, - }); - } - } - - let rx = self + let task = self .sessions .borrow() .get(&arguments.session_id) @@ -374,14 +351,9 @@ impl acp::Client for ClientDelegate { .thread .update(cx, |thread, cx| { thread.request_tool_call_authorization(arguments.tool_call, arguments.options, cx) - })?; + })??; - let result = rx?.await; - - let outcome = match result { - Ok(option) => acp::RequestPermissionOutcome::Selected { option_id: option }, - Err(oneshot::Canceled) => acp::RequestPermissionOutcome::Cancelled, - }; + let outcome = task.await; Ok(acp::RequestPermissionResponse { outcome }) } From fcc3d1092fc2c0323d6f06e93e06c5ed8fad2c0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Raphael=20L=C3=BCthy?= Date: Fri, 29 Aug 2025 22:17:22 +0200 Subject: [PATCH 17/54] supermaven: Improve completion caching and position validation (#37047) Closes #36981 - Add completion text and position caching to reduce redundant API calls - Only trigger new completion requests on text changes, not cursor movement - Validate cursor position to ensure completions show at correct location - Improve end-of-line range calculation for more accurate deletions - Extract reset_completion_cache helper for cleaner code organization - Update completion diff algorithm documentation for clarity Edit: Sorry this is the 2nd PR, I forgot that the forks history was messy; I cherrypicked and cleaned it properly with this PR Release Notes: - supermaven: Improved caching of predictions - supermaven: Fixed an issue where changing cursor position would incorrectly trigger new completions --- .../src/supermaven_completion_provider.rs | 93 +++++++++++++++---- 1 file changed, 77 insertions(+), 16 deletions(-) diff --git a/crates/supermaven/src/supermaven_completion_provider.rs b/crates/supermaven/src/supermaven_completion_provider.rs index eb54c83f8126002a19728e51b282b98191707717..89c5129822d94229cd1644587f15f4a4de2bf86a 100644 --- a/crates/supermaven/src/supermaven_completion_provider.rs +++ b/crates/supermaven/src/supermaven_completion_provider.rs @@ -19,8 +19,10 @@ pub struct SupermavenCompletionProvider { supermaven: Entity, buffer_id: Option, completion_id: Option, + completion_text: Option, file_extension: Option, pending_refresh: Option>>, + completion_position: Option, } impl SupermavenCompletionProvider { @@ -29,16 +31,19 @@ impl SupermavenCompletionProvider { supermaven, buffer_id: None, completion_id: None, + completion_text: None, file_extension: None, pending_refresh: None, + completion_position: None, } } } // Computes the edit prediction from the difference between the completion text. -// this is defined by greedily matching the buffer text against the completion text, with any leftover buffer placed at the end. -// for example, given the completion text "moo cows are cool" and the buffer text "cowsre pool", the completion state would be -// the inlays "moo ", " a", and "cool" which will render as "[moo ]cows[ a]re [cool]pool" in the editor. +// This is defined by greedily matching the buffer text against the completion text. +// Inlays are inserted for parts of the completion text that are not present in the buffer text. +// For example, given the completion text "axbyc" and the buffer text "xy", the rendered output in the editor would be "[a]x[b]y[c]". +// The parts in brackets are the inlays. fn completion_from_diff( snapshot: BufferSnapshot, completion_text: &str, @@ -133,6 +138,14 @@ impl EditPredictionProvider for SupermavenCompletionProvider { debounce: bool, cx: &mut Context, ) { + // Only make new completion requests when debounce is true (i.e., when text is typed) + // When debounce is false (i.e., cursor movement), we should not make new requests + if !debounce { + return; + } + + reset_completion_cache(self, cx); + let Some(mut completion) = self.supermaven.update(cx, |supermaven, cx| { supermaven.complete(&buffer_handle, cursor_position, cx) }) else { @@ -146,6 +159,17 @@ impl EditPredictionProvider for SupermavenCompletionProvider { while let Some(()) = completion.updates.next().await { this.update(cx, |this, cx| { + // Get the completion text and cache it + if let Some(text) = + this.supermaven + .read(cx) + .completion(&buffer_handle, cursor_position, cx) + { + this.completion_text = Some(text.to_string()); + + this.completion_position = Some(cursor_position); + } + this.completion_id = Some(completion.id); this.buffer_id = Some(buffer_handle.entity_id()); this.file_extension = buffer_handle.read(cx).file().and_then(|file| { @@ -156,7 +180,6 @@ impl EditPredictionProvider for SupermavenCompletionProvider { .to_string(), ) }); - this.pending_refresh = None; cx.notify(); })?; } @@ -174,13 +197,11 @@ impl EditPredictionProvider for SupermavenCompletionProvider { } fn accept(&mut self, _cx: &mut Context) { - self.pending_refresh = None; - self.completion_id = None; + reset_completion_cache(self, _cx); } fn discard(&mut self, _cx: &mut Context) { - self.pending_refresh = None; - self.completion_id = None; + reset_completion_cache(self, _cx); } fn suggest( @@ -189,10 +210,34 @@ impl EditPredictionProvider for SupermavenCompletionProvider { cursor_position: Anchor, cx: &mut Context, ) -> Option { - let completion_text = self - .supermaven - .read(cx) - .completion(buffer, cursor_position, cx)?; + if self.buffer_id != Some(buffer.entity_id()) { + return None; + } + + if self.completion_id.is_none() { + return None; + } + + let completion_text = if let Some(cached_text) = &self.completion_text { + cached_text.as_str() + } else { + let text = self + .supermaven + .read(cx) + .completion(buffer, cursor_position, cx)?; + self.completion_text = Some(text.to_string()); + text + }; + + // Check if the cursor is still at the same position as the completion request + // If we don't have a completion position stored, don't show the completion + if let Some(completion_position) = self.completion_position { + if cursor_position != completion_position { + return None; + } + } else { + return None; + } let completion_text = trim_to_end_of_line_unless_leading_newline(completion_text); @@ -200,15 +245,20 @@ impl EditPredictionProvider for SupermavenCompletionProvider { if !completion_text.trim().is_empty() { let snapshot = buffer.read(cx).snapshot(); - let mut point = cursor_position.to_point(&snapshot); - point.column = snapshot.line_len(point.row); - let range = cursor_position..snapshot.anchor_after(point); + + // Calculate the range from cursor to end of line correctly + let cursor_point = cursor_position.to_point(&snapshot); + let end_of_line = snapshot.anchor_after(language::Point::new( + cursor_point.row, + snapshot.line_len(cursor_point.row), + )); + let delete_range = cursor_position..end_of_line; Some(completion_from_diff( snapshot, completion_text, cursor_position, - range, + delete_range, )) } else { None @@ -216,6 +266,17 @@ impl EditPredictionProvider for SupermavenCompletionProvider { } } +fn reset_completion_cache( + provider: &mut SupermavenCompletionProvider, + _cx: &mut Context, +) { + provider.pending_refresh = None; + provider.completion_id = None; + provider.completion_text = None; + provider.completion_position = None; + provider.buffer_id = None; +} + fn trim_to_end_of_line_unless_leading_newline(text: &str) -> &str { if has_leading_newline(text) { text From e9252a7a74b6af4002639405b32b6167da810fe6 Mon Sep 17 00:00:00 2001 From: Dino Date: Fri, 29 Aug 2025 21:23:44 +0100 Subject: [PATCH 18/54] editor: Context menu aside scrolling (#35985) Add support for scrolling the contents rendered aside an `editor::code_context_menus::CodeContextMenu` by introducing the `scroll_aside` method. For now this method is only implemented for the `CodeContextMenu::Completions` variant, which will scroll the aside contents for an `editor::code_context_menus::CompletionsMenu` element, as a `ScrollHandle` is added to the aside content that is rendered. In order to be possible to trigger this via keybindings, a new editor action is introduced, `ContextMenuScrollAside`, which accepts a number of lines or pages to scroll the content by. Lastly, the default keymaps for both MacOS and Linux, as well as for Zed's vim mode, are updated to ensure that the following keybindings are supported when a completion menu is open and the completion item's documentation is rendered aside: - `ctrl-e` - `ctrl-y` - `ctrl-d` - `ctrl-u` ### Recording https://github.com/user-attachments/assets/02043763-87ea-46f5-9768-00e907127b69 --- Closes #13194 Release Notes: - Added support for scrolling the documentation panel shown alongside the completion menu in the editor with `cltr-d`, `ctrl-u`, `ctrl-e` and `ctrl-y` --------- Co-authored-by: Conrad Irwin Co-authored-by: MrSubidubi --- assets/keymaps/vim.json | 11 ++- crates/editor/src/code_context_menus.rs | 43 ++++++++++- crates/editor/src/hover_links.rs | 18 +++-- crates/editor/src/hover_popover.rs | 2 +- crates/editor/src/scroll/scroll_amount.rs | 2 +- crates/vim/src/normal/scroll.rs | 6 +- crates/vim/src/test.rs | 87 ++++++++++++++++++++++- crates/vim/src/test/vim_test_context.rs | 4 ++ 8 files changed, 156 insertions(+), 17 deletions(-) diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 0a88baee027a3ae4d72409f5f142ceda3f4d9717..bd6eb3982cd9860b2635a3390d47484f1a6dbe55 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -324,7 +324,7 @@ } }, { - "context": "vim_mode == insert", + "context": "vim_mode == insert && !menu", "bindings": { "ctrl-c": "vim::NormalBefore", "ctrl-[": "vim::NormalBefore", @@ -354,6 +354,15 @@ "ctrl-s": "editor::ShowSignatureHelp" } }, + { + "context": "showing_completions", + "bindings": { + "ctrl-d": "vim::ScrollDown", + "ctrl-u": "vim::ScrollUp", + "ctrl-e": "vim::LineDown", + "ctrl-y": "vim::LineUp" + } + }, { "context": "(vim_mode == normal || vim_mode == helix_normal) && !menu", "bindings": { diff --git a/crates/editor/src/code_context_menus.rs b/crates/editor/src/code_context_menus.rs index 96809d68777ca8d84623c308bb8b06eec493a5be..01e74284eff4cb140efe43202ef5dda9a002f94d 100644 --- a/crates/editor/src/code_context_menus.rs +++ b/crates/editor/src/code_context_menus.rs @@ -1,7 +1,9 @@ +use crate::scroll::ScrollAmount; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ - AnyElement, Entity, Focusable, FontWeight, ListSizingBehavior, ScrollStrategy, SharedString, - Size, StrikethroughStyle, StyledText, Task, UniformListScrollHandle, div, px, uniform_list, + AnyElement, Entity, Focusable, FontWeight, ListSizingBehavior, ScrollHandle, ScrollStrategy, + SharedString, Size, StrikethroughStyle, StyledText, Task, UniformListScrollHandle, div, px, + uniform_list, }; use itertools::Itertools; use language::CodeLabel; @@ -184,6 +186,20 @@ impl CodeContextMenu { CodeContextMenu::CodeActions(_) => false, } } + + pub fn scroll_aside( + &mut self, + scroll_amount: ScrollAmount, + window: &mut Window, + cx: &mut Context, + ) { + match self { + CodeContextMenu::Completions(completions_menu) => { + completions_menu.scroll_aside(scroll_amount, window, cx) + } + CodeContextMenu::CodeActions(_) => (), + } + } } pub enum ContextMenuOrigin { @@ -207,6 +223,9 @@ pub struct CompletionsMenu { filter_task: Task<()>, cancel_filter: Arc, scroll_handle: UniformListScrollHandle, + // The `ScrollHandle` used on the Markdown documentation rendered on the + // side of the completions menu. + pub scroll_handle_aside: ScrollHandle, resolve_completions: bool, show_completion_documentation: bool, last_rendered_range: Rc>>>, @@ -279,6 +298,7 @@ impl CompletionsMenu { filter_task: Task::ready(()), cancel_filter: Arc::new(AtomicBool::new(false)), scroll_handle: UniformListScrollHandle::new(), + scroll_handle_aside: ScrollHandle::new(), resolve_completions: true, last_rendered_range: RefCell::new(None).into(), markdown_cache: RefCell::new(VecDeque::new()).into(), @@ -348,6 +368,7 @@ impl CompletionsMenu { filter_task: Task::ready(()), cancel_filter: Arc::new(AtomicBool::new(false)), scroll_handle: UniformListScrollHandle::new(), + scroll_handle_aside: ScrollHandle::new(), resolve_completions: false, show_completion_documentation: false, last_rendered_range: RefCell::new(None).into(), @@ -911,6 +932,7 @@ impl CompletionsMenu { .max_w(max_size.width) .max_h(max_size.height) .overflow_y_scroll() + .track_scroll(&self.scroll_handle_aside) .occlude(), ) .into_any_element(), @@ -1175,6 +1197,23 @@ impl CompletionsMenu { } }); } + + pub fn scroll_aside( + &mut self, + amount: ScrollAmount, + window: &mut Window, + cx: &mut Context, + ) { + let mut offset = self.scroll_handle_aside.offset(); + + offset.y -= amount.pixels( + window.line_height(), + self.scroll_handle_aside.bounds().size.height - px(16.), + ) / 2.0; + + cx.notify(); + self.scroll_handle_aside.set_offset(offset); + } } #[derive(Clone)] diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index 94f49f601a101cd8ca2556df9ec1568b5e7337fa..ba0b6f88683969aca3818a2795aa6b8454de3bb8 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -188,22 +188,26 @@ impl Editor { pub fn scroll_hover( &mut self, - amount: &ScrollAmount, + amount: ScrollAmount, window: &mut Window, cx: &mut Context, ) -> bool { let selection = self.selections.newest_anchor().head(); let snapshot = self.snapshot(window, cx); - let Some(popover) = self.hover_state.info_popovers.iter().find(|popover| { + if let Some(popover) = self.hover_state.info_popovers.iter().find(|popover| { popover .symbol_range .point_within_range(&TriggerPoint::Text(selection), &snapshot) - }) else { - return false; - }; - popover.scroll(amount, window, cx); - true + }) { + popover.scroll(amount, window, cx); + true + } else if let Some(context_menu) = self.context_menu.borrow_mut().as_mut() { + context_menu.scroll_aside(amount, window, cx); + true + } else { + false + } } fn cmd_click_reveal_task( diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index fab53457876866223be6b7d32f964cd1abd1dd28..6541f76a56e671fb414e28d83adc6b0459e288a8 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -896,7 +896,7 @@ impl InfoPopover { .into_any_element() } - pub fn scroll(&self, amount: &ScrollAmount, window: &mut Window, cx: &mut Context) { + pub fn scroll(&self, amount: ScrollAmount, window: &mut Window, cx: &mut Context) { let mut current = self.scroll_handle.offset(); current.y -= amount.pixels( window.line_height(), diff --git a/crates/editor/src/scroll/scroll_amount.rs b/crates/editor/src/scroll/scroll_amount.rs index 5992c9023c1f9d6eb7e7eb201099c6eef17a33d8..43f1aa128548597ee07cbb297ab5aaf0e8f79b6e 100644 --- a/crates/editor/src/scroll/scroll_amount.rs +++ b/crates/editor/src/scroll/scroll_amount.rs @@ -15,7 +15,7 @@ impl ScrollDirection { } } -#[derive(Debug, Clone, PartialEq, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Deserialize)] pub enum ScrollAmount { // Scroll N lines (positive is towards the end of the document) Line(f32), diff --git a/crates/vim/src/normal/scroll.rs b/crates/vim/src/normal/scroll.rs index 9eb8367f57ade6a7ccf090f0e16d87e73f4a9f25..eeb98692bc30c5c8c39c0be23ba17b3276b708df 100644 --- a/crates/vim/src/normal/scroll.rs +++ b/crates/vim/src/normal/scroll.rs @@ -98,7 +98,7 @@ impl Vim { Vim::take_forced_motion(cx); self.exit_temporary_normal(window, cx); self.update_editor(cx, |_, editor, cx| { - scroll_editor(editor, move_cursor, &amount, window, cx) + scroll_editor(editor, move_cursor, amount, window, cx) }); } } @@ -106,7 +106,7 @@ impl Vim { fn scroll_editor( editor: &mut Editor, preserve_cursor_position: bool, - amount: &ScrollAmount, + amount: ScrollAmount, window: &mut Window, cx: &mut Context, ) { @@ -126,7 +126,7 @@ fn scroll_editor( ScrollAmount::Line(amount.lines(visible_line_count) - 1.0) } } - _ => amount.clone(), + _ => amount, }; editor.scroll_screen(&amount, window, cx); diff --git a/crates/vim/src/test.rs b/crates/vim/src/test.rs index ce04b621cb91c7b6b7da57bd1e1b74e9c0e00bbc..84376719d141fa4862a3e7a1b0f6116dd809bfe5 100644 --- a/crates/vim/src/test.rs +++ b/crates/vim/src/test.rs @@ -8,13 +8,15 @@ use collections::HashMap; use command_palette::CommandPalette; use editor::{ AnchorRangeExt, DisplayPoint, Editor, EditorMode, MultiBuffer, actions::DeleteLine, - display_map::DisplayRow, test::editor_test_context::EditorTestContext, + code_context_menus::CodeContextMenu, display_map::DisplayRow, + test::editor_test_context::EditorTestContext, }; use futures::StreamExt; -use gpui::{KeyBinding, Modifiers, MouseButton, TestAppContext}; +use gpui::{KeyBinding, Modifiers, MouseButton, TestAppContext, px}; use language::Point; pub use neovim_backed_test_context::*; use settings::SettingsStore; +use ui::Pixels; use util::test::marked_text_ranges; pub use vim_test_context::*; @@ -971,6 +973,87 @@ async fn test_comma_w(cx: &mut gpui::TestAppContext) { .assert_eq("hellˇo hello\nhello hello"); } +#[gpui::test] +async fn test_completion_menu_scroll_aside(cx: &mut TestAppContext) { + let mut cx = VimTestContext::new_typescript(cx).await; + + cx.lsp + .set_request_handler::(move |_, _| async move { + Ok(Some(lsp::CompletionResponse::Array(vec![ + lsp::CompletionItem { + label: "Test Item".to_string(), + documentation: Some(lsp::Documentation::String( + "This is some very long documentation content that will be displayed in the aside panel for scrolling.\n".repeat(50) + )), + ..Default::default() + }, + ]))) + }); + + cx.set_state("variableˇ", Mode::Insert); + cx.simulate_keystroke("."); + cx.executor().run_until_parked(); + + let mut initial_offset: Pixels = px(0.0); + + cx.update_editor(|editor, _, _| { + let binding = editor.context_menu().borrow(); + let Some(CodeContextMenu::Completions(menu)) = binding.as_ref() else { + panic!("Should have completions menu open"); + }; + + initial_offset = menu.scroll_handle_aside.offset().y; + }); + + // The `ctrl-e` shortcut should scroll the completion menu's aside content + // down, so the updated offset should be lower than the initial offset. + cx.simulate_keystroke("ctrl-e"); + cx.update_editor(|editor, _, _| { + let binding = editor.context_menu().borrow(); + let Some(CodeContextMenu::Completions(menu)) = binding.as_ref() else { + panic!("Should have completions menu open"); + }; + + assert!(menu.scroll_handle_aside.offset().y < initial_offset); + }); + + // The `ctrl-y` shortcut should do the inverse scrolling as `ctrl-e`, so the + // offset should now be the same as the initial offset. + cx.simulate_keystroke("ctrl-y"); + cx.update_editor(|editor, _, _| { + let binding = editor.context_menu().borrow(); + let Some(CodeContextMenu::Completions(menu)) = binding.as_ref() else { + panic!("Should have completions menu open"); + }; + + assert_eq!(menu.scroll_handle_aside.offset().y, initial_offset); + }); + + // The `ctrl-d` shortcut should scroll the completion menu's aside content + // down, so the updated offset should be lower than the initial offset. + cx.simulate_keystroke("ctrl-d"); + cx.update_editor(|editor, _, _| { + let binding = editor.context_menu().borrow(); + let Some(CodeContextMenu::Completions(menu)) = binding.as_ref() else { + panic!("Should have completions menu open"); + }; + + assert!(menu.scroll_handle_aside.offset().y < initial_offset); + }); + + // The `ctrl-u` shortcut should do the inverse scrolling as `ctrl-u`, so the + // offset should now be the same as the initial offset. + cx.simulate_keystroke("ctrl-u"); + cx.update_editor(|editor, _, _| { + let binding = editor.context_menu().borrow(); + let Some(CodeContextMenu::Completions(menu)) = binding.as_ref() else { + panic!("Should have completions menu open"); + }; + + assert_eq!(menu.scroll_handle_aside.offset().y, initial_offset); + }); +} + #[gpui::test] async fn test_rename(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new_typescript(cx).await; diff --git a/crates/vim/src/test/vim_test_context.rs b/crates/vim/src/test/vim_test_context.rs index e7ac692df14cb482656d930efa2313e85c27a4bc..ef9588acae181bad2b079d7c89458458bb851a64 100644 --- a/crates/vim/src/test/vim_test_context.rs +++ b/crates/vim/src/test/vim_test_context.rs @@ -49,6 +49,10 @@ impl VimTestContext { Self::new_with_lsp( EditorLspTestContext::new_typescript( lsp::ServerCapabilities { + completion_provider: Some(lsp::CompletionOptions { + trigger_characters: Some(vec![".".to_string()]), + ..Default::default() + }), rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions { prepare_provider: Some(true), work_done_progress_options: Default::default(), From f2c3f3b168bab7c808e3ce2c3392b0c692919f81 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Fri, 29 Aug 2025 16:56:10 -0400 Subject: [PATCH 19/54] settings ui: Start work on creating the initial structure (#36904) ## Goal This PR creates the initial settings ui structure with the primary goal of making a settings UI that is - Comprehensive: All settings are available through the UI - Correct: Easy to understand the underlying JSON file from the UI - Intuitive - Easy to implement per setting so that UI is not a hindrance to future settings changes ### Structure The overall structure is settings layer -> data layer -> ui layer. The settings layer is the pre-existing settings definitions, that implement the `Settings` trait. The data layer is constructed from settings primarily through the `SettingsUi` trait, and it's associated derive macro. The data layer tracks the grouping of the settings, the json path of the settings, and a data representation of how to render the controls for the setting in the UI, that is either a marker value for the component to use (avoiding a dependency on the `ui` crate) or a custom render function. Abstracting the data layer from the ui layer allows crates depending on `settings` to implement their own UI without having to add additional UI dependencies, thus avoiding circular dependencies. In cases where custom UI is desired, and a creating a custom render function in the same crate is infeasible due to circular dependencies, the current solution is to implement a marker for the component in the `settings` crate, and then handle the rendering of that component in `settings_ui`. ### Foundation This PR creates a macro and a trait both called `SettingsUi`. The `SettingsUi` trait is added as a new trait bound on the `Settings` trait, this allows the type system to guarantee that all settings implement UI functionality. The macro is used to derived the trait for most types, and can be modified through attributes for unique cases as well. A derive-macro is used to generate the settings UI trait impl, allowing it the UI generation to be generated from the static information in our code base (`default.json`, Struct/Enum names, field names, `serde` attributes, etc). This allows the UI to be auto-generated for the most part, and ensures consistency across the UI. #### Immediate Follow ups - Add a new `SettingsPath` trait that will be a trait bound on `SettingsUi` and `Settings` - This trait will replace the `Settings::key` value to enable `SettingsUi` to infer the json path of it's derived type - Figure out how to render `Option where T: SettingsUi` correctly - Handle `serde` attributes in the `SettingsUi` proc macro to correctly get json path from a type's field and identity Release Notes: - N/A --------- Co-authored-by: Ben Kunkle --- Cargo.lock | 75 ++- Cargo.toml | 10 +- assets/settings/default.json | 3 + crates/agent_servers/src/settings.rs | 4 +- crates/agent_settings/src/agent_settings.rs | 4 +- crates/agent_ui/src/slash_command_settings.rs | 4 +- crates/audio/src/audio_settings.rs | 4 +- crates/auto_update/src/auto_update.rs | 3 +- crates/call/src/call_settings.rs | 4 +- crates/client/src/client.rs | 8 +- crates/collab_ui/src/panel_settings.rs | 10 +- crates/dap/src/debugger_settings.rs | 8 +- crates/editor/src/editor_settings.rs | 4 +- .../extension_host/src/extension_settings.rs | 4 +- .../file_finder/src/file_finder_settings.rs | 4 +- crates/git_hosting_providers/src/settings.rs | 4 +- crates/git_ui/src/git_panel_settings.rs | 4 +- crates/go_to_line/src/cursor_position.rs | 4 +- crates/gpui_macros/src/derive_action.rs | 7 + .../image_viewer/src/image_viewer_settings.rs | 4 +- crates/journal/src/journal.rs | 4 +- crates/keymap_editor/Cargo.toml | 53 ++ crates/keymap_editor/LICENSE-GPL | 1 + .../src/keymap_editor.rs} | 6 +- .../src/ui_components/keystroke_input.rs | 0 .../src/ui_components/mod.rs | 0 .../src/ui_components/table.rs | 0 crates/language/src/language_settings.rs | 4 +- crates/language_models/src/settings.rs | 4 +- .../src/outline_panel_settings.rs | 4 +- crates/project/src/project.rs | 2 +- crates/project/src/project_settings.rs | 4 +- .../src/project_panel_settings.rs | 4 +- crates/recent_projects/src/ssh_connections.rs | 4 +- crates/repl/src/jupyter_settings.rs | 4 +- crates/settings/Cargo.toml | 1 + crates/settings/src/base_keymap_setting.rs | 8 +- crates/settings/src/settings.rs | 4 + crates/settings/src/settings_json.rs | 12 +- crates/settings/src/settings_store.rs | 144 +++-- crates/settings/src/settings_ui.rs | 118 +++++ crates/settings/src/vscode_import.rs | 4 +- crates/settings_ui/Cargo.toml | 41 +- crates/settings_ui/src/settings_ui.rs | 500 +++++++++++++++++- crates/settings_ui_macros/Cargo.toml | 22 + crates/settings_ui_macros/LICENSE-GPL | 1 + .../src/settings_ui_macros.rs | 201 +++++++ crates/terminal/src/terminal_settings.rs | 4 +- crates/theme/src/settings.rs | 4 +- crates/title_bar/Cargo.toml | 2 +- crates/title_bar/src/title_bar.rs | 6 +- crates/title_bar/src/title_bar_settings.rs | 5 +- crates/vim/src/vim.rs | 4 +- .../vim_mode_setting/src/vim_mode_setting.rs | 4 +- crates/workspace/src/item.rs | 6 +- crates/workspace/src/workspace_settings.rs | 6 +- crates/worktree/src/worktree_settings.rs | 4 +- crates/zed/Cargo.toml | 1 + crates/zed/src/main.rs | 1 + crates/zed/src/zed.rs | 2 +- crates/zed/src/zed/app_menus.rs | 3 +- crates/zlog_settings/src/zlog_settings.rs | 4 +- 62 files changed, 1149 insertions(+), 229 deletions(-) create mode 100644 crates/keymap_editor/Cargo.toml create mode 120000 crates/keymap_editor/LICENSE-GPL rename crates/{settings_ui/src/keybindings.rs => keymap_editor/src/keymap_editor.rs} (99%) rename crates/{settings_ui => keymap_editor}/src/ui_components/keystroke_input.rs (100%) rename crates/{settings_ui => keymap_editor}/src/ui_components/mod.rs (100%) rename crates/{settings_ui => keymap_editor}/src/ui_components/table.rs (100%) create mode 100644 crates/settings/src/settings_ui.rs create mode 100644 crates/settings_ui_macros/Cargo.toml create mode 120000 crates/settings_ui_macros/LICENSE-GPL create mode 100644 crates/settings_ui_macros/src/settings_ui_macros.rs diff --git a/Cargo.lock b/Cargo.lock index e201b4af804b0be95f100c34f93652b6ecf6f8e6..4c68280de25b878187b3a5627362f6373808734b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8951,6 +8951,44 @@ dependencies = [ "uuid", ] +[[package]] +name = "keymap_editor" +version = "0.1.0" +dependencies = [ + "anyhow", + "collections", + "command_palette", + "component", + "db", + "editor", + "fs", + "fuzzy", + "gpui", + "itertools 0.14.0", + "language", + "log", + "menu", + "notifications", + "paths", + "project", + "search", + "serde", + "serde_json", + "settings", + "telemetry", + "tempfile", + "theme", + "tree-sitter-json", + "tree-sitter-rust", + "ui", + "ui_input", + "util", + "vim", + "workspace", + "workspace-hack", + "zed_actions", +] + [[package]] name = "khronos-egl" version = "6.0.0" @@ -14856,6 +14894,7 @@ dependencies = [ "serde_derive", "serde_json", "serde_json_lenient", + "settings_ui_macros", "smallvec", "tree-sitter", "tree-sitter-json", @@ -14891,39 +14930,28 @@ name = "settings_ui" version = "0.1.0" dependencies = [ "anyhow", - "collections", - "command_palette", "command_palette_hooks", - "component", - "db", "editor", "feature_flags", - "fs", - "fuzzy", "gpui", - "itertools 0.14.0", - "language", - "log", - "menu", - "notifications", - "paths", - "project", - "search", "serde", "serde_json", "settings", - "telemetry", - "tempfile", + "smallvec", "theme", - "tree-sitter-json", - "tree-sitter-rust", "ui", - "ui_input", - "util", - "vim", "workspace", "workspace-hack", - "zed_actions", +] + +[[package]] +name = "settings_ui_macros" +version = "0.1.0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.101", + "workspace-hack", ] [[package]] @@ -16739,6 +16767,7 @@ dependencies = [ "db", "gpui", "http_client", + "keymap_editor", "notifications", "pretty_assertions", "project", @@ -16747,7 +16776,6 @@ dependencies = [ "schemars", "serde", "settings", - "settings_ui", "smallvec", "story", "telemetry", @@ -20458,6 +20486,7 @@ dependencies = [ "itertools 0.14.0", "jj_ui", "journal", + "keymap_editor", "language", "language_extension", "language_model", diff --git a/Cargo.toml b/Cargo.toml index d346043c0ef64b3cce0827c2553c5b3c254d66f7..b64113311adb2662562cc4ae488054f54d569c3e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -54,6 +54,8 @@ members = [ "crates/deepseek", "crates/diagnostics", "crates/docs_preprocessor", + "crates/edit_prediction", + "crates/edit_prediction_button", "crates/editor", "crates/eval", "crates/explorer_command_injector", @@ -82,13 +84,12 @@ members = [ "crates/http_client_tls", "crates/icons", "crates/image_viewer", - "crates/edit_prediction", - "crates/edit_prediction_button", "crates/inspector_ui", "crates/install_cli", "crates/jj", "crates/jj_ui", "crates/journal", + "crates/keymap_editor", "crates/language", "crates/language_extension", "crates/language_model", @@ -146,6 +147,7 @@ members = [ "crates/settings", "crates/settings_profile_selector", "crates/settings_ui", + "crates/settings_ui_macros", "crates/snippet", "crates/snippet_provider", "crates/snippets_ui", @@ -156,9 +158,9 @@ members = [ "crates/streaming_diff", "crates/sum_tree", "crates/supermaven", - "crates/system_specs", "crates/supermaven_api", "crates/svg_preview", + "crates/system_specs", "crates/tab_switcher", "crates/task", "crates/tasks_ui", @@ -314,6 +316,7 @@ install_cli = { path = "crates/install_cli" } jj = { path = "crates/jj" } jj_ui = { path = "crates/jj_ui" } journal = { path = "crates/journal" } +keymap_editor = { path = "crates/keymap_editor" } language = { path = "crates/language" } language_extension = { path = "crates/language_extension" } language_model = { path = "crates/language_model" } @@ -373,6 +376,7 @@ semantic_version = { path = "crates/semantic_version" } session = { path = "crates/session" } settings = { path = "crates/settings" } settings_ui = { path = "crates/settings_ui" } +settings_ui_macros = { path = "crates/settings_ui_macros" } snippet = { path = "crates/snippet" } snippet_provider = { path = "crates/snippet_provider" } snippets_ui = { path = "crates/snippets_ui" } diff --git a/assets/settings/default.json b/assets/settings/default.json index 572193be4eecbeb63a19eab1811bff126638162b..b15eb6e5ce8de85bb088108f065a31494b9087a1 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1922,7 +1922,10 @@ "debugger": { "stepping_granularity": "line", "save_breakpoints": true, + "timeout": 2000, "dock": "bottom", + "log_dap_communications": true, + "format_dap_log_messages": true, "button": true }, // Configures any number of settings profiles that are temporarily applied on diff --git a/crates/agent_servers/src/settings.rs b/crates/agent_servers/src/settings.rs index 81f80a7d7d9581b8c1862ae3393c4a5d5e6706b6..693d7d7b7014b3abbecfbe592bac67210b336872 100644 --- a/crates/agent_servers/src/settings.rs +++ b/crates/agent_servers/src/settings.rs @@ -6,13 +6,13 @@ use collections::HashMap; use gpui::{App, SharedString}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; pub fn init(cx: &mut App) { AllAgentServersSettings::register(cx); } -#[derive(Default, Deserialize, Serialize, Clone, JsonSchema, Debug)] +#[derive(Default, Deserialize, Serialize, Clone, JsonSchema, Debug, SettingsUi)] pub struct AllAgentServersSettings { pub gemini: Option, pub claude: Option, diff --git a/crates/agent_settings/src/agent_settings.rs b/crates/agent_settings/src/agent_settings.rs index ed1ed2b89879c18eceaab22843390a766e4f6c77..3808cc510f7941107f6e4ab90c9a5f8a2c3d920a 100644 --- a/crates/agent_settings/src/agent_settings.rs +++ b/crates/agent_settings/src/agent_settings.rs @@ -8,7 +8,7 @@ use gpui::{App, Pixels, SharedString}; use language_model::LanguageModel; use schemars::{JsonSchema, json_schema}; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; use std::borrow::Cow; pub use crate::agent_profile::*; @@ -48,7 +48,7 @@ pub enum NotifyWhenAgentWaiting { Never, } -#[derive(Default, Clone, Debug)] +#[derive(Default, Clone, Debug, SettingsUi)] pub struct AgentSettings { pub enabled: bool, pub button: bool, diff --git a/crates/agent_ui/src/slash_command_settings.rs b/crates/agent_ui/src/slash_command_settings.rs index 73e5622aa921ccf03a3813717446e830c21079b8..c54a10ed49a77d395c4968e551b1cd30ad1c6e07 100644 --- a/crates/agent_ui/src/slash_command_settings.rs +++ b/crates/agent_ui/src/slash_command_settings.rs @@ -2,10 +2,10 @@ use anyhow::Result; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; /// Settings for slash commands. -#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)] +#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema, SettingsUi)] pub struct SlashCommandSettings { /// Settings for the `/cargo-workspace` slash command. #[serde(default)] diff --git a/crates/audio/src/audio_settings.rs b/crates/audio/src/audio_settings.rs index 807179881c7c3b27aad2e3142a84c730951eb709..e42918825cd3a25bb18d6f0b357801949520833f 100644 --- a/crates/audio/src/audio_settings.rs +++ b/crates/audio/src/audio_settings.rs @@ -2,9 +2,9 @@ use anyhow::Result; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; -#[derive(Deserialize, Debug)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)] pub struct AudioSettings { /// Opt into the new audio system. #[serde(rename = "experimental.rodio_audio", default)] diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 2150873cadd0a84b4a2894ebbe373d9bd0e007f0..71dcf25aeea9d8ebd4feb01db9161dc177fcdd26 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -10,7 +10,7 @@ use paths::remote_servers_dir; use release_channel::{AppCommitSha, ReleaseChannel}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsStore}; +use settings::{Settings, SettingsSources, SettingsStore, SettingsUi}; use smol::{fs, io::AsyncReadExt}; use smol::{fs::File, process::Command}; use std::{ @@ -113,6 +113,7 @@ impl Drop for MacOsUnmounter { } } +#[derive(SettingsUi)] struct AutoUpdateSetting(bool); /// Whether or not to automatically check for updates. diff --git a/crates/call/src/call_settings.rs b/crates/call/src/call_settings.rs index c8f51e0c1a2019dd2c266210e469989946ed8a35..64d11d0df64eedbbc29f06b8205f0318d999ea30 100644 --- a/crates/call/src/call_settings.rs +++ b/crates/call/src/call_settings.rs @@ -2,9 +2,9 @@ use anyhow::Result; use gpui::App; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; -#[derive(Deserialize, Debug)] +#[derive(Deserialize, Debug, SettingsUi)] pub struct CallSettings { pub mute_on_join: bool, pub share_on_join: bool, diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 1e735b0025f1e8a15809b096c5a462361d4ed8f3..c5bb1af0d7605cfcfc28d86bc389189d653e28ae 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -31,7 +31,7 @@ use release_channel::{AppVersion, ReleaseChannel}; use rpc::proto::{AnyTypedEnvelope, EnvelopedMessage, PeerId, RequestMessage}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; use std::{ any::TypeId, convert::TryFrom, @@ -101,7 +101,7 @@ pub struct ClientSettingsContent { server_url: Option, } -#[derive(Deserialize)] +#[derive(Deserialize, SettingsUi)] pub struct ClientSettings { pub server_url: String, } @@ -127,7 +127,7 @@ pub struct ProxySettingsContent { proxy: Option, } -#[derive(Deserialize, Default)] +#[derive(Deserialize, Default, SettingsUi)] pub struct ProxySettings { pub proxy: Option, } @@ -520,7 +520,7 @@ impl Drop for PendingEntitySubscription { } } -#[derive(Copy, Clone, Deserialize, Debug)] +#[derive(Copy, Clone, Deserialize, Debug, SettingsUi)] pub struct TelemetrySettings { pub diagnostics: bool, pub metrics: bool, diff --git a/crates/collab_ui/src/panel_settings.rs b/crates/collab_ui/src/panel_settings.rs index 652d9eb67f6ce1f0ab583e20e4feab05cfb743e3..4e5c8ad8f005d00a8802ab0a1f79ff7fbb3d0861 100644 --- a/crates/collab_ui/src/panel_settings.rs +++ b/crates/collab_ui/src/panel_settings.rs @@ -1,10 +1,10 @@ use gpui::Pixels; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; use workspace::dock::DockPosition; -#[derive(Deserialize, Debug)] +#[derive(Deserialize, Debug, SettingsUi)] pub struct CollaborationPanelSettings { pub button: bool, pub dock: DockPosition, @@ -20,7 +20,7 @@ pub enum ChatPanelButton { WhenInCall, } -#[derive(Deserialize, Debug)] +#[derive(Deserialize, Debug, SettingsUi)] pub struct ChatPanelSettings { pub button: ChatPanelButton, pub dock: DockPosition, @@ -43,7 +43,7 @@ pub struct ChatPanelSettingsContent { pub default_width: Option, } -#[derive(Deserialize, Debug)] +#[derive(Deserialize, Debug, SettingsUi)] pub struct NotificationPanelSettings { pub button: bool, pub dock: DockPosition, @@ -66,7 +66,7 @@ pub struct PanelSettingsContent { pub default_width: Option, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)] pub struct MessageEditorSettings { /// Whether to automatically replace emoji shortcodes with emoji characters. /// For example: typing `:wave:` gets replaced with `👋`. diff --git a/crates/dap/src/debugger_settings.rs b/crates/dap/src/debugger_settings.rs index e1176633e5403116c2789161d654912337150e9a..6843f19e3811967084cc61a3874ec86451ab6faf 100644 --- a/crates/dap/src/debugger_settings.rs +++ b/crates/dap/src/debugger_settings.rs @@ -2,9 +2,9 @@ use dap_types::SteppingGranularity; use gpui::{App, Global}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; -#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq, SettingsUi)] #[serde(rename_all = "snake_case")] pub enum DebugPanelDockPosition { Left, @@ -12,12 +12,14 @@ pub enum DebugPanelDockPosition { Right, } -#[derive(Serialize, Deserialize, JsonSchema, Clone, Copy)] +#[derive(Serialize, Deserialize, JsonSchema, Clone, Copy, SettingsUi)] #[serde(default)] +#[settings_ui(group = "Debugger", path = "debugger")] pub struct DebuggerSettings { /// Determines the stepping granularity. /// /// Default: line + #[settings_ui(skip)] pub stepping_granularity: SteppingGranularity, /// Whether the breakpoints should be reused across Zed sessions. /// diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index 55c040428d7e73d9e6e9bf6cc66cc20d301038f2..c2baa9de024b1988f9acb77a529936f947103f56 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -6,12 +6,12 @@ use language::CursorShape; use project::project_settings::DiagnosticSeverity; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, VsCodeSettings}; +use settings::{Settings, SettingsSources, SettingsUi, VsCodeSettings}; use util::serde::default_true; /// Imports from the VSCode settings at /// https://code.visualstudio.com/docs/reference/default-settings -#[derive(Deserialize, Clone)] +#[derive(Deserialize, Clone, SettingsUi)] pub struct EditorSettings { pub cursor_blink: bool, pub cursor_shape: Option, diff --git a/crates/extension_host/src/extension_settings.rs b/crates/extension_host/src/extension_settings.rs index cfa67990b09de9fda5bf0e26229a9b1b1410de46..6bd760795cec6d1c4208770f1355e8ac7a34eb95 100644 --- a/crates/extension_host/src/extension_settings.rs +++ b/crates/extension_host/src/extension_settings.rs @@ -3,10 +3,10 @@ use collections::HashMap; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; use std::sync::Arc; -#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)] +#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema, SettingsUi)] pub struct ExtensionSettings { /// The extensions that should be automatically installed by Zed. /// diff --git a/crates/file_finder/src/file_finder_settings.rs b/crates/file_finder/src/file_finder_settings.rs index 350e1de3b36c9073d137993ce4fbc50aa43bb36e..20057417a2ddbce7acd7fd5a8e09e54aab779638 100644 --- a/crates/file_finder/src/file_finder_settings.rs +++ b/crates/file_finder/src/file_finder_settings.rs @@ -1,9 +1,9 @@ use anyhow::Result; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; -#[derive(Deserialize, Debug, Clone, Copy, PartialEq)] +#[derive(Deserialize, Debug, Clone, Copy, PartialEq, SettingsUi)] pub struct FileFinderSettings { pub file_icons: bool, pub modal_max_width: Option, diff --git a/crates/git_hosting_providers/src/settings.rs b/crates/git_hosting_providers/src/settings.rs index 91179fea392bc38cfc2a513bfc391dd3eec6137d..34e3805a39ea8a13a6a2f79552a6a917c4597692 100644 --- a/crates/git_hosting_providers/src/settings.rs +++ b/crates/git_hosting_providers/src/settings.rs @@ -5,7 +5,7 @@ use git::GitHostingProviderRegistry; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsStore}; +use settings::{Settings, SettingsStore, SettingsUi}; use url::Url; use util::ResultExt as _; @@ -78,7 +78,7 @@ pub struct GitHostingProviderConfig { pub name: String, } -#[derive(Default, Debug, Clone, Serialize, Deserialize, JsonSchema)] +#[derive(Default, Debug, Clone, Serialize, Deserialize, JsonSchema, SettingsUi)] pub struct GitHostingProviderSettings { /// The list of custom Git hosting providers. #[serde(default)] diff --git a/crates/git_ui/src/git_panel_settings.rs b/crates/git_ui/src/git_panel_settings.rs index b6891c7d256794b5b457669a20b17e6e41e4fd23..576949220405e408df1b23d189e661405c4c39e4 100644 --- a/crates/git_ui/src/git_panel_settings.rs +++ b/crates/git_ui/src/git_panel_settings.rs @@ -2,7 +2,7 @@ use editor::ShowScrollbar; use gpui::Pixels; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; use workspace::dock::DockPosition; #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] @@ -77,7 +77,7 @@ pub struct GitPanelSettingsContent { pub collapse_untracked_diff: Option, } -#[derive(Deserialize, Debug, Clone, PartialEq)] +#[derive(Deserialize, Debug, Clone, PartialEq, SettingsUi)] pub struct GitPanelSettings { pub button: bool, pub dock: DockPosition, diff --git a/crates/go_to_line/src/cursor_position.rs b/crates/go_to_line/src/cursor_position.rs index e60a3651aae3f062b16fdfa7aa01a28e5c845e85..345af8a867c6ff6c1790450d2b28cd275c04ebbb 100644 --- a/crates/go_to_line/src/cursor_position.rs +++ b/crates/go_to_line/src/cursor_position.rs @@ -2,7 +2,7 @@ use editor::{Editor, EditorSettings, MultiBufferSnapshot}; use gpui::{App, Entity, FocusHandle, Focusable, Subscription, Task, WeakEntity}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; use std::{fmt::Write, num::NonZeroU32, time::Duration}; use text::{Point, Selection}; use ui::{ @@ -293,7 +293,7 @@ impl StatusItemView for CursorPosition { } } -#[derive(Clone, Copy, Default, PartialEq, JsonSchema, Deserialize, Serialize)] +#[derive(Clone, Copy, Default, PartialEq, JsonSchema, Deserialize, Serialize, SettingsUi)] #[serde(rename_all = "snake_case")] pub(crate) enum LineIndicatorFormat { Short, diff --git a/crates/gpui_macros/src/derive_action.rs b/crates/gpui_macros/src/derive_action.rs index 9c7f97371d86eecc29dc16902ba9e392d53b8660..4e6c6277e452189657b4725b4027780a54cfed1d 100644 --- a/crates/gpui_macros/src/derive_action.rs +++ b/crates/gpui_macros/src/derive_action.rs @@ -16,6 +16,13 @@ pub(crate) fn derive_action(input: TokenStream) -> TokenStream { let mut deprecated = None; let mut doc_str: Option = None; + /* + * + * #[action()] + * Struct Foo { + * bar: bool // is bar considered an attribute + } + */ for attr in &input.attrs { if attr.path().is_ident("action") { attr.parse_nested_meta(|meta| { diff --git a/crates/image_viewer/src/image_viewer_settings.rs b/crates/image_viewer/src/image_viewer_settings.rs index 1dcf99c0afcb3f69f48e2e1a82351852a4bf1c22..4949b266b4e03c7089d4bc25e2a223a0ce64a081 100644 --- a/crates/image_viewer/src/image_viewer_settings.rs +++ b/crates/image_viewer/src/image_viewer_settings.rs @@ -1,10 +1,10 @@ use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; /// The settings for the image viewer. -#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Default)] +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Default, SettingsUi)] pub struct ImageViewerSettings { /// The unit to use for displaying image file sizes. /// diff --git a/crates/journal/src/journal.rs b/crates/journal/src/journal.rs index c09ab6f764893589945f2c3cc00d71df84b8f77a..ffa24571c88a0f0e06252565261b1a6d285d098c 100644 --- a/crates/journal/src/journal.rs +++ b/crates/journal/src/journal.rs @@ -5,7 +5,7 @@ use editor::{Editor, SelectionEffects}; use gpui::{App, AppContext as _, Context, Window, actions}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; use std::{ fs::OpenOptions, path::{Path, PathBuf}, @@ -22,7 +22,7 @@ actions!( ); /// Settings specific to journaling -#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, SettingsUi)] pub struct JournalSettings { /// The path of the directory where journal entries are stored. /// diff --git a/crates/keymap_editor/Cargo.toml b/crates/keymap_editor/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..ae3af21239f22a8d01ec9e792a3ab0daed6080bb --- /dev/null +++ b/crates/keymap_editor/Cargo.toml @@ -0,0 +1,53 @@ +[package] +name = "keymap_editor" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/keymap_editor.rs" + +[dependencies] +anyhow.workspace = true +collections.workspace = true +command_palette.workspace = true +component.workspace = true +db.workspace = true +editor.workspace = true +fs.workspace = true +fuzzy.workspace = true +gpui.workspace = true +itertools.workspace = true +language.workspace = true +log.workspace = true +menu.workspace = true +notifications.workspace = true +paths.workspace = true +project.workspace = true +search.workspace = true +serde.workspace = true +serde_json.workspace = true +settings.workspace = true +telemetry.workspace = true +tempfile.workspace = true +theme.workspace = true +tree-sitter-json.workspace = true +tree-sitter-rust.workspace = true +ui.workspace = true +ui_input.workspace = true +util.workspace = true +vim.workspace = true +workspace-hack.workspace = true +workspace.workspace = true +zed_actions.workspace = true + +[dev-dependencies] +db = {"workspace"= true, "features" = ["test-support"]} +fs = { workspace = true, features = ["test-support"] } +gpui = { workspace = true, features = ["test-support"] } +project = { workspace = true, features = ["test-support"] } +workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/keymap_editor/LICENSE-GPL b/crates/keymap_editor/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/keymap_editor/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/settings_ui/src/keybindings.rs b/crates/keymap_editor/src/keymap_editor.rs similarity index 99% rename from crates/settings_ui/src/keybindings.rs rename to crates/keymap_editor/src/keymap_editor.rs index 161e1e768ddd8a111e001198d8aad352169d1cef..12149061124d2b3144a32b7f54a65ce5af70d492 100644 --- a/crates/settings_ui/src/keybindings.rs +++ b/crates/keymap_editor/src/keymap_editor.rs @@ -5,6 +5,8 @@ use std::{ time::Duration, }; +mod ui_components; + use anyhow::{Context as _, anyhow}; use collections::{HashMap, HashSet}; use editor::{CompletionProvider, Editor, EditorEvent}; @@ -34,8 +36,10 @@ use workspace::{ register_serializable_item, }; +pub use ui_components::*; + use crate::{ - keybindings::persistence::KEYBINDING_EDITORS, + persistence::KEYBINDING_EDITORS, ui_components::{ keystroke_input::{ClearKeystrokes, KeystrokeInput, StartRecording, StopRecording}, table::{ColumnWidths, ResizeBehavior, Table, TableInteractionState}, diff --git a/crates/settings_ui/src/ui_components/keystroke_input.rs b/crates/keymap_editor/src/ui_components/keystroke_input.rs similarity index 100% rename from crates/settings_ui/src/ui_components/keystroke_input.rs rename to crates/keymap_editor/src/ui_components/keystroke_input.rs diff --git a/crates/settings_ui/src/ui_components/mod.rs b/crates/keymap_editor/src/ui_components/mod.rs similarity index 100% rename from crates/settings_ui/src/ui_components/mod.rs rename to crates/keymap_editor/src/ui_components/mod.rs diff --git a/crates/settings_ui/src/ui_components/table.rs b/crates/keymap_editor/src/ui_components/table.rs similarity index 100% rename from crates/settings_ui/src/ui_components/table.rs rename to crates/keymap_editor/src/ui_components/table.rs diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 0f82d3997f981286c81dc18c29f8763b0402ddd2..a44df4993af5f29cbfce337d2c90dd8f840d97a6 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -17,7 +17,7 @@ use serde::{ }; use settings::{ - ParameterizedJsonSchema, Settings, SettingsLocation, SettingsSources, SettingsStore, + ParameterizedJsonSchema, Settings, SettingsLocation, SettingsSources, SettingsStore, SettingsUi, }; use shellexpand; use std::{borrow::Cow, num::NonZeroU32, path::Path, slice, sync::Arc}; @@ -55,7 +55,7 @@ pub fn all_language_settings<'a>( } /// The settings for all languages. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, SettingsUi)] pub struct AllLanguageSettings { /// The edit prediction settings. pub edit_predictions: EditPredictionSettings, diff --git a/crates/language_models/src/settings.rs b/crates/language_models/src/settings.rs index b163585aa7b745447381aa62f710e8c5dbdf469c..1d03ab48f7de3ab9a20c1a099803e6b759b8ea81 100644 --- a/crates/language_models/src/settings.rs +++ b/crates/language_models/src/settings.rs @@ -5,7 +5,7 @@ use collections::HashMap; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; use crate::provider::{ self, @@ -29,7 +29,7 @@ pub fn init_settings(cx: &mut App) { AllLanguageModelSettings::register(cx); } -#[derive(Default)] +#[derive(Default, SettingsUi)] pub struct AllLanguageModelSettings { pub anthropic: AnthropicSettings, pub bedrock: AmazonBedrockSettings, diff --git a/crates/outline_panel/src/outline_panel_settings.rs b/crates/outline_panel/src/outline_panel_settings.rs index 133d28b748d2978e07a540b3c8c7517b03dc4767..c33125654f043022bfaa7a31200d43d1d6326607 100644 --- a/crates/outline_panel/src/outline_panel_settings.rs +++ b/crates/outline_panel/src/outline_panel_settings.rs @@ -2,7 +2,7 @@ use editor::ShowScrollbar; use gpui::Pixels; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Copy, PartialEq)] #[serde(rename_all = "snake_case")] @@ -18,7 +18,7 @@ pub enum ShowIndentGuides { Never, } -#[derive(Deserialize, Debug, Clone, Copy, PartialEq)] +#[derive(Deserialize, Debug, Clone, Copy, PartialEq, SettingsUi)] pub struct OutlinePanelSettings { pub button: bool, pub default_width: Pixels, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 74ad08570a996a2dc9fc07bfb616f0edc0085b9f..b32e95741f522650e5d20f80a6ba18c423805234 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -952,7 +952,7 @@ pub enum PulledDiagnostics { /// Whether to disable all AI features in Zed. /// /// Default: false -#[derive(Copy, Clone, Debug)] +#[derive(Copy, Clone, Debug, settings::SettingsUi)] pub struct DisableAiSettings { pub disable_ai: bool, } diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 4447c2512943257b27a91fb1ac051bccde6e3f7f..30a71c4caeb676509239151a4766beb590fdb47e 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -19,7 +19,7 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{ InvalidSettingsError, LocalSettingsKind, Settings, SettingsLocation, SettingsSources, - SettingsStore, parse_json_with_comments, watch_config_file, + SettingsStore, SettingsUi, parse_json_with_comments, watch_config_file, }; use std::{ collections::BTreeMap, @@ -36,7 +36,7 @@ use crate::{ worktree_store::{WorktreeStore, WorktreeStoreEvent}, }; -#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi)] pub struct ProjectSettings { /// Configuration for language servers. /// diff --git a/crates/project_panel/src/project_panel_settings.rs b/crates/project_panel/src/project_panel_settings.rs index fc399d66a7b78e75a9e43a3e7bf0404624123685..9c7bd4fd66e9e5b884867bf13f88856c126974b6 100644 --- a/crates/project_panel/src/project_panel_settings.rs +++ b/crates/project_panel/src/project_panel_settings.rs @@ -2,7 +2,7 @@ use editor::ShowScrollbar; use gpui::Pixels; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Copy, PartialEq)] #[serde(rename_all = "snake_case")] @@ -28,7 +28,7 @@ pub enum EntrySpacing { Standard, } -#[derive(Deserialize, Debug, Clone, Copy, PartialEq)] +#[derive(Deserialize, Debug, Clone, Copy, PartialEq, SettingsUi)] pub struct ProjectPanelSettings { pub button: bool, pub hide_gitignore: bool, diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index e3fb249d1632a35d888996da2665d00ea98b2c26..29f6e75bbdebf72b36295b20295f0705b636214e 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -20,7 +20,7 @@ use remote::{ }; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; use theme::ThemeSettings; use ui::{ ActiveTheme, Color, Context, Icon, IconName, IconSize, InteractiveElement, IntoElement, Label, @@ -29,7 +29,7 @@ use ui::{ use util::serde::default_true; use workspace::{AppState, ModalView, Workspace}; -#[derive(Deserialize)] +#[derive(Deserialize, SettingsUi)] pub struct SshSettings { pub ssh_connections: Option>, /// Whether to read ~/.ssh/config for ssh connection sources. diff --git a/crates/repl/src/jupyter_settings.rs b/crates/repl/src/jupyter_settings.rs index 8b00e0f75722e54766b3d7447894e73dfeb441f8..c3bfd2079dfae21c9b990b15faec4cf7d4ffaa68 100644 --- a/crates/repl/src/jupyter_settings.rs +++ b/crates/repl/src/jupyter_settings.rs @@ -4,9 +4,9 @@ use editor::EditorSettings; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; -#[derive(Debug, Default)] +#[derive(Debug, Default, SettingsUi)] pub struct JupyterSettings { pub kernel_selections: HashMap, } diff --git a/crates/settings/Cargo.toml b/crates/settings/Cargo.toml index 892d4dea8b2daac7395bcbe273635fbb535a0e53..8768b4073602461a5031b8d70d3a1e930ad2a41e 100644 --- a/crates/settings/Cargo.toml +++ b/crates/settings/Cargo.toml @@ -31,6 +31,7 @@ schemars.workspace = true serde.workspace = true serde_derive.workspace = true serde_json.workspace = true +settings_ui_macros.workspace = true serde_json_lenient.workspace = true smallvec.workspace = true tree-sitter-json.workspace = true diff --git a/crates/settings/src/base_keymap_setting.rs b/crates/settings/src/base_keymap_setting.rs index 91dda03d00ca282e5ccacde2c07f5359be1ebb16..087f25185a99cb927892e3ada22d92c1c319a390 100644 --- a/crates/settings/src/base_keymap_setting.rs +++ b/crates/settings/src/base_keymap_setting.rs @@ -1,13 +1,17 @@ use std::fmt::{Display, Formatter}; -use crate::{Settings, SettingsSources, VsCodeSettings}; +use crate as settings; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; +use settings::{Settings, SettingsSources, VsCodeSettings}; +use settings_ui_macros::SettingsUi; /// Base key bindings scheme. Base keymaps can be overridden with user keymaps. /// /// Default: VSCode -#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq, Default)] +#[derive( + Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq, Default, SettingsUi, +)] pub enum BaseKeymap { #[default] VSCode, diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index a0717333159e508ea42a1b95bd9f2226e6392871..983cd31dd31d6b9c2cd017568fffe0812f9ae4e5 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -4,6 +4,7 @@ mod keymap_file; mod settings_file; mod settings_json; mod settings_store; +mod settings_ui; mod vscode_import; use gpui::{App, Global}; @@ -23,6 +24,9 @@ pub use settings_store::{ InvalidSettingsError, LocalSettingsKind, Settings, SettingsLocation, SettingsSources, SettingsStore, }; +pub use settings_ui::*; +// Re-export the derive macro +pub use settings_ui_macros::SettingsUi; pub use vscode_import::{VsCodeSettings, VsCodeSettingsSource}; #[derive(Clone, Debug, PartialEq)] diff --git a/crates/settings/src/settings_json.rs b/crates/settings/src/settings_json.rs index f112ec811d2828350d41eeab63161c8e345d4d77..b916df6e5c205c7fc2c0c920d0ac8343cb986a5c 100644 --- a/crates/settings/src/settings_json.rs +++ b/crates/settings/src/settings_json.rs @@ -87,9 +87,9 @@ pub fn update_value_in_json_text<'a>( } /// * `replace_key` - When an exact key match according to `key_path` is found, replace the key with `replace_key` if `Some`. -fn replace_value_in_json_text( +pub fn replace_value_in_json_text>( text: &str, - key_path: &[&str], + key_path: &[T], tab_size: usize, new_value: Option<&Value>, replace_key: Option<&str>, @@ -141,7 +141,7 @@ fn replace_value_in_json_text( let found_key = text .get(key_range.clone()) .map(|key_text| { - depth < key_path.len() && key_text == format!("\"{}\"", key_path[depth]) + depth < key_path.len() && key_text == format!("\"{}\"", key_path[depth].as_ref()) }) .unwrap_or(false); @@ -226,13 +226,13 @@ fn replace_value_in_json_text( } } else { // We have key paths, construct the sub objects - let new_key = key_path[depth]; + let new_key = key_path[depth].as_ref(); // We don't have the key, construct the nested objects let mut new_value = serde_json::to_value(new_value.unwrap_or(&serde_json::Value::Null)).unwrap(); for key in key_path[(depth + 1)..].iter().rev() { - new_value = serde_json::json!({ key.to_string(): new_value }); + new_value = serde_json::json!({ key.as_ref().to_string(): new_value }); } if let Some(first_key_start) = first_key_start { @@ -465,7 +465,7 @@ pub fn append_top_level_array_value_in_json_text( } let (mut replace_range, mut replace_value) = - replace_value_in_json_text("", &[], tab_size, Some(new_value), None); + replace_value_in_json_text::<&str>("", &[], tab_size, Some(new_value), None); replace_range.start = close_bracket_start; replace_range.end = close_bracket_start; diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index fbd0f75aefc2173a3affbb7423d4ccc718679919..09ac6f9766e32e7a0d8765b09919cd0f8c09866c 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -7,7 +7,7 @@ use futures::{ channel::{mpsc, oneshot}, future::LocalBoxFuture, }; -use gpui::{App, AsyncApp, BorrowAppContext, Global, Task, UpdateGlobal}; +use gpui::{App, AsyncApp, BorrowAppContext, Global, SharedString, Task, UpdateGlobal}; use paths::{EDITORCONFIG_NAME, local_settings_file_relative_path, task_file_name}; use schemars::JsonSchema; @@ -31,14 +31,15 @@ use util::{ pub type EditorconfigProperties = ec4rs::Properties; use crate::{ - ActiveSettingsProfileName, ParameterizedJsonSchema, SettingsJsonSchemaParams, VsCodeSettings, - WorktreeId, parse_json_with_comments, update_value_in_json_text, + ActiveSettingsProfileName, ParameterizedJsonSchema, SettingsJsonSchemaParams, SettingsUiEntry, + VsCodeSettings, WorktreeId, parse_json_with_comments, replace_value_in_json_text, + settings_ui::SettingsUi, update_value_in_json_text, }; /// A value that can be defined as a user setting. /// /// Settings can be loaded from a combination of multiple JSON files. -pub trait Settings: 'static + Send + Sync { +pub trait Settings: SettingsUi + 'static + Send + Sync { /// The name of a key within the JSON file from which this setting should /// be deserialized. If this is `None`, then the setting will be deserialized /// from the root object. @@ -284,6 +285,7 @@ trait AnySettingValue: 'static + Send + Sync { text: &mut String, edits: &mut Vec<(Range, String)>, ); + fn settings_ui_item(&self) -> SettingsUiEntry; } struct DeserializedSetting(Box); @@ -480,6 +482,11 @@ impl SettingsStore { self.raw_global_settings.as_ref() } + /// Access the raw JSON value of the default settings. + pub fn raw_default_settings(&self) -> &Value { + &self.raw_default_settings + } + #[cfg(any(test, feature = "test-support"))] pub fn test(cx: &mut App) -> Self { let mut this = Self::new(cx); @@ -532,49 +539,10 @@ impl SettingsStore { } } - pub fn update_settings_file( + fn update_settings_file_inner( &self, fs: Arc, - update: impl 'static + Send + FnOnce(&mut T::FileContent, &App), - ) { - self.setting_file_updates_tx - .unbounded_send(Box::new(move |cx: AsyncApp| { - async move { - let old_text = Self::load_settings(&fs).await?; - let new_text = cx.read_global(|store: &SettingsStore, cx| { - store.new_text_for_update::(old_text, |content| update(content, cx)) - })?; - let settings_path = paths::settings_file().as_path(); - if fs.is_file(settings_path).await { - let resolved_path = - fs.canonicalize(settings_path).await.with_context(|| { - format!("Failed to canonicalize settings path {:?}", settings_path) - })?; - - fs.atomic_write(resolved_path.clone(), new_text) - .await - .with_context(|| { - format!("Failed to write settings to file {:?}", resolved_path) - })?; - } else { - fs.atomic_write(settings_path.to_path_buf(), new_text) - .await - .with_context(|| { - format!("Failed to write settings to file {:?}", settings_path) - })?; - } - - anyhow::Ok(()) - } - .boxed_local() - })) - .ok(); - } - - pub fn import_vscode_settings( - &self, - fs: Arc, - vscode_settings: VsCodeSettings, + update: impl 'static + Send + FnOnce(String, AsyncApp) -> Result, ) -> oneshot::Receiver> { let (tx, rx) = oneshot::channel::>(); self.setting_file_updates_tx @@ -582,9 +550,7 @@ impl SettingsStore { async move { let res = async move { let old_text = Self::load_settings(&fs).await?; - let new_text = cx.read_global(|store: &SettingsStore, _cx| { - store.get_vscode_edits(old_text, &vscode_settings) - })?; + let new_text = update(old_text, cx)?; let settings_path = paths::settings_file().as_path(); if fs.is_file(settings_path).await { let resolved_path = @@ -607,7 +573,6 @@ impl SettingsStore { format!("Failed to write settings to file {:?}", settings_path) })?; } - anyhow::Ok(()) } .await; @@ -622,9 +587,67 @@ impl SettingsStore { } .boxed_local() })) - .ok(); + .map_err(|err| anyhow::format_err!("Failed to update settings file: {}", err)) + .log_with_level(log::Level::Warn); + return rx; + } + + pub fn update_settings_file_at_path( + &self, + fs: Arc, + path: &[&str], + new_value: serde_json::Value, + ) -> oneshot::Receiver> { + let key_path = path + .into_iter() + .cloned() + .map(SharedString::new) + .collect::>(); + let update = move |mut old_text: String, cx: AsyncApp| { + cx.read_global(|store: &SettingsStore, _cx| { + // todo(settings_ui) use `update_value_in_json_text` for merging new and old objects with comment preservation, needs old value though... + let (range, replacement) = replace_value_in_json_text( + &old_text, + key_path.as_slice(), + store.json_tab_size(), + Some(&new_value), + None, + ); + old_text.replace_range(range, &replacement); + old_text + }) + }; + self.update_settings_file_inner(fs, update) + } - rx + pub fn update_settings_file( + &self, + fs: Arc, + update: impl 'static + Send + FnOnce(&mut T::FileContent, &App), + ) { + _ = self.update_settings_file_inner(fs, move |old_text: String, cx: AsyncApp| { + cx.read_global(|store: &SettingsStore, cx| { + store.new_text_for_update::(old_text, |content| update(content, cx)) + }) + }); + } + + pub fn import_vscode_settings( + &self, + fs: Arc, + vscode_settings: VsCodeSettings, + ) -> oneshot::Receiver> { + self.update_settings_file_inner(fs, move |old_text: String, cx: AsyncApp| { + cx.read_global(|store: &SettingsStore, _cx| { + store.get_vscode_edits(old_text, &vscode_settings) + }) + }) + } + + pub fn settings_ui_items(&self) -> impl IntoIterator { + self.setting_values + .values() + .map(|item| item.settings_ui_item()) } } @@ -1520,6 +1543,10 @@ impl AnySettingValue for SettingValue { edits, ); } + + fn settings_ui_item(&self) -> SettingsUiEntry { + ::settings_ui_entry() + } } #[cfg(test)] @@ -1527,7 +1554,10 @@ mod tests { use crate::VsCodeSettingsSource; use super::*; + // This is so the SettingsUi macro can still work properly + use crate as settings; use serde_derive::Deserialize; + use settings_ui_macros::SettingsUi; use unindent::Unindent; #[gpui::test] @@ -2070,14 +2100,14 @@ mod tests { pretty_assertions::assert_eq!(new, expected); } - #[derive(Debug, PartialEq, Deserialize)] + #[derive(Debug, PartialEq, Deserialize, SettingsUi)] struct UserSettings { name: String, age: u32, staff: bool, } - #[derive(Default, Clone, Serialize, Deserialize, JsonSchema)] + #[derive(Default, Clone, Serialize, Deserialize, JsonSchema, SettingsUi)] struct UserSettingsContent { name: Option, age: Option, @@ -2097,7 +2127,7 @@ mod tests { } } - #[derive(Debug, Deserialize, PartialEq)] + #[derive(Debug, Deserialize, PartialEq, SettingsUi)] struct TurboSetting(bool); impl Settings for TurboSetting { @@ -2111,7 +2141,7 @@ mod tests { fn import_from_vscode(_vscode: &VsCodeSettings, _current: &mut Self::FileContent) {} } - #[derive(Clone, Debug, PartialEq, Deserialize)] + #[derive(Clone, Debug, PartialEq, Deserialize, SettingsUi)] struct MultiKeySettings { #[serde(default)] key1: String, @@ -2144,7 +2174,7 @@ mod tests { } } - #[derive(Debug, Deserialize)] + #[derive(Debug, Deserialize, SettingsUi)] struct JournalSettings { pub path: String, pub hour_format: HourFormat, @@ -2245,7 +2275,7 @@ mod tests { ); } - #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] + #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, SettingsUi)] struct LanguageSettings { #[serde(default)] languages: HashMap, diff --git a/crates/settings/src/settings_ui.rs b/crates/settings/src/settings_ui.rs new file mode 100644 index 0000000000000000000000000000000000000000..8b30ebc9d5968943d3814f7569d1367d389e386a --- /dev/null +++ b/crates/settings/src/settings_ui.rs @@ -0,0 +1,118 @@ +use anyhow::Context as _; +use fs::Fs; +use gpui::{AnyElement, App, AppContext as _, ReadGlobal as _, Window}; +use smallvec::SmallVec; + +use crate::SettingsStore; + +pub trait SettingsUi { + fn settings_ui_item() -> SettingsUiItem { + SettingsUiItem::None + } + fn settings_ui_entry() -> SettingsUiEntry; +} + +pub struct SettingsUiEntry { + // todo(settings_ui): move this back here once there isn't a None variant + // pub path: &'static str, + // pub title: &'static str, + pub item: SettingsUiEntryVariant, +} + +pub enum SettingsUiEntryVariant { + Group { + path: &'static str, + title: &'static str, + items: Vec, + }, + Item { + path: &'static str, + item: SettingsUiItemSingle, + }, + // todo(settings_ui): remove + None, +} + +pub enum SettingsUiItemSingle { + SwitchField, + NumericStepper, + ToggleGroup(&'static [&'static str]), + /// This should be used when toggle group size > 6 + DropDown(&'static [&'static str]), + Custom(Box, &mut Window, &mut App) -> AnyElement>), +} + +pub struct SettingsValue { + pub title: &'static str, + pub path: SmallVec<[&'static str; 1]>, + pub value: Option, + pub default_value: T, +} + +impl SettingsValue { + pub fn read(&self) -> &T { + match &self.value { + Some(value) => value, + None => &self.default_value, + } + } +} + +impl SettingsValue { + pub fn write_value(path: &SmallVec<[&'static str; 1]>, value: serde_json::Value, cx: &mut App) { + let settings_store = SettingsStore::global(cx); + let fs = ::global(cx); + + let rx = settings_store.update_settings_file_at_path(fs.clone(), path.as_slice(), value); + let path = path.clone(); + cx.background_spawn(async move { + rx.await? + .with_context(|| format!("Failed to update setting at path `{:?}`", path.join("."))) + }) + .detach_and_log_err(cx); + } +} + +impl SettingsValue { + pub fn write( + path: &SmallVec<[&'static str; 1]>, + value: T, + cx: &mut App, + ) -> Result<(), serde_json::Error> { + SettingsValue::write_value(path, serde_json::to_value(value)?, cx); + Ok(()) + } +} + +pub enum SettingsUiItem { + Group { + title: &'static str, + items: Vec, + }, + Single(SettingsUiItemSingle), + None, +} + +impl SettingsUi for bool { + fn settings_ui_item() -> SettingsUiItem { + SettingsUiItem::Single(SettingsUiItemSingle::SwitchField) + } + + fn settings_ui_entry() -> SettingsUiEntry { + SettingsUiEntry { + item: SettingsUiEntryVariant::None, + } + } +} + +impl SettingsUi for u64 { + fn settings_ui_item() -> SettingsUiItem { + SettingsUiItem::Single(SettingsUiItemSingle::NumericStepper) + } + + fn settings_ui_entry() -> SettingsUiEntry { + SettingsUiEntry { + item: SettingsUiEntryVariant::None, + } + } +} diff --git a/crates/settings/src/vscode_import.rs b/crates/settings/src/vscode_import.rs index 4a48c18f7c2cbd19538257d51e8342c15c69f587..53fbf797c3d9e56e49b1d96e7dabcac19ddde8e2 100644 --- a/crates/settings/src/vscode_import.rs +++ b/crates/settings/src/vscode_import.rs @@ -2,7 +2,7 @@ use anyhow::{Context as _, Result, anyhow}; use fs::Fs; use paths::{cursor_settings_file_paths, vscode_settings_file_paths}; use serde_json::{Map, Value}; -use std::{path::Path, rc::Rc, sync::Arc}; +use std::{path::Path, sync::Arc}; #[derive(Clone, Copy, PartialEq, Eq, Debug)] pub enum VsCodeSettingsSource { @@ -21,7 +21,7 @@ impl std::fmt::Display for VsCodeSettingsSource { pub struct VsCodeSettings { pub source: VsCodeSettingsSource, - pub path: Rc, + pub path: Arc, content: Map, } diff --git a/crates/settings_ui/Cargo.toml b/crates/settings_ui/Cargo.toml index 8a151359ec4bb246e23c4a09fdbe63c23c69a98a..7c2b81aee0ecf48afb7131adf5ddb19a165ca351 100644 --- a/crates/settings_ui/Cargo.toml +++ b/crates/settings_ui/Cargo.toml @@ -11,45 +11,26 @@ workspace = true [lib] path = "src/settings_ui.rs" +[features] +default = [] + [dependencies] anyhow.workspace = true -collections.workspace = true -command_palette.workspace = true command_palette_hooks.workspace = true -component.workspace = true -db.workspace = true editor.workspace = true feature_flags.workspace = true -fs.workspace = true -fuzzy.workspace = true gpui.workspace = true -itertools.workspace = true -language.workspace = true -log.workspace = true -menu.workspace = true -notifications.workspace = true -paths.workspace = true -project.workspace = true -search.workspace = true -serde.workspace = true serde_json.workspace = true +serde.workspace = true settings.workspace = true -telemetry.workspace = true -tempfile.workspace = true +smallvec.workspace = true theme.workspace = true -tree-sitter-json.workspace = true -tree-sitter-rust.workspace = true ui.workspace = true -ui_input.workspace = true -util.workspace = true -vim.workspace = true -workspace-hack.workspace = true workspace.workspace = true -zed_actions.workspace = true +workspace-hack.workspace = true -[dev-dependencies] -db = {"workspace"= true, "features" = ["test-support"]} -fs = { workspace = true, features = ["test-support"] } -gpui = { workspace = true, features = ["test-support"] } -project = { workspace = true, features = ["test-support"] } -workspace = { workspace = true, features = ["test-support"] } +# Uncomment other workspace dependencies as needed +# assistant.workspace = true +# client.workspace = true +# project.workspace = true +# settings.workspace = true diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index 3022cc714268f641b7b6f30021b5e86d6072b7b6..ae03170a1a9a2cb3e53c67402c95c8e79e739ab9 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -1,20 +1,24 @@ mod appearance_settings_controls; use std::any::TypeId; +use std::ops::{Not, Range}; +use anyhow::Context as _; use command_palette_hooks::CommandPaletteFilter; use editor::EditorSettingsControls; use feature_flags::{FeatureFlag, FeatureFlagViewExt}; -use gpui::{App, Entity, EventEmitter, FocusHandle, Focusable, actions}; -use ui::prelude::*; -use workspace::item::{Item, ItemEvent}; -use workspace::{Workspace, with_active_or_new_workspace}; +use gpui::{App, Entity, EventEmitter, FocusHandle, Focusable, ReadGlobal, actions}; +use settings::{SettingsStore, SettingsUiEntryVariant, SettingsUiItemSingle, SettingsValue}; +use smallvec::SmallVec; +use ui::{NumericStepper, SwitchField, ToggleButtonGroup, ToggleButtonSimple, prelude::*}; +use workspace::{ + Workspace, + item::{Item, ItemEvent}, + with_active_or_new_workspace, +}; use crate::appearance_settings_controls::AppearanceSettingsControls; -pub mod keybindings; -pub mod ui_components; - pub struct SettingsUiFeatureFlag; impl FeatureFlag for SettingsUiFeatureFlag { @@ -75,18 +79,18 @@ pub fn init(cx: &mut App) { .detach(); }) .detach(); - - keybindings::init(cx); } pub struct SettingsPage { focus_handle: FocusHandle, + settings_tree: SettingsUiTree, } impl SettingsPage { pub fn new(_workspace: &Workspace, cx: &mut Context) -> Entity { cx.new(|cx| Self { focus_handle: cx.focus_handle(), + settings_tree: SettingsUiTree::new(cx), }) } } @@ -119,26 +123,472 @@ impl Item for SettingsPage { } } +// We want to iterate over the side bar with root groups +// - this is a loop over top level groups, and if any are expanded, recursively displaying their items +// - Should be able to get all items from a group (flatten a group) +// - Should be able to toggle/untoggle groups in UI (at least in sidebar) +// - Search should be available +// - there should be an index of text -> item mappings, for using fuzzy::match +// - Do we want to show the parent groups when a item is matched? + +struct UIEntry { + title: &'static str, + path: &'static str, + _depth: usize, + // a + // b < a descendant range < a total descendant range + // f | | + // g | | + // c < | + // d | + // e < + descendant_range: Range, + total_descendant_range: Range, + next_sibling: Option, + // expanded: bool, + render: Option, +} + +struct SettingsUiTree { + root_entry_indices: Vec, + entries: Vec, + active_entry_index: usize, +} + +fn build_tree_item( + tree: &mut Vec, + group: SettingsUiEntryVariant, + depth: usize, + prev_index: Option, +) { + let index = tree.len(); + tree.push(UIEntry { + title: "", + path: "", + _depth: depth, + descendant_range: index + 1..index + 1, + total_descendant_range: index + 1..index + 1, + render: None, + next_sibling: None, + }); + if let Some(prev_index) = prev_index { + tree[prev_index].next_sibling = Some(index); + } + match group { + SettingsUiEntryVariant::Group { + path, + title, + items: group_items, + } => { + tree[index].path = path; + tree[index].title = title; + for group_item in group_items { + let prev_index = tree[index] + .descendant_range + .is_empty() + .not() + .then_some(tree[index].descendant_range.end - 1); + tree[index].descendant_range.end = tree.len() + 1; + build_tree_item(tree, group_item.item, depth + 1, prev_index); + tree[index].total_descendant_range.end = tree.len(); + } + } + SettingsUiEntryVariant::Item { path, item } => { + tree[index].path = path; + // todo(settings_ui) create title from path in macro, and use here + tree[index].title = path; + tree[index].render = Some(item); + } + SettingsUiEntryVariant::None => { + return; + } + } +} + +impl SettingsUiTree { + fn new(cx: &App) -> Self { + let settings_store = SettingsStore::global(cx); + let mut tree = vec![]; + let mut root_entry_indices = vec![]; + for item in settings_store.settings_ui_items() { + if matches!(item.item, SettingsUiEntryVariant::None) { + continue; + } + + assert!( + matches!(item.item, SettingsUiEntryVariant::Group { .. }), + "top level items must be groups: {:?}", + match item.item { + SettingsUiEntryVariant::Item { path, .. } => path, + _ => unreachable!(), + } + ); + let prev_root_entry_index = root_entry_indices.last().copied(); + root_entry_indices.push(tree.len()); + build_tree_item(&mut tree, item.item, 0, prev_root_entry_index); + } + + root_entry_indices.sort_by_key(|i| tree[*i].title); + + let active_entry_index = root_entry_indices[0]; + Self { + entries: tree, + root_entry_indices, + active_entry_index, + } + } +} + +fn render_nav(tree: &SettingsUiTree, _window: &mut Window, cx: &mut Context) -> Div { + let mut nav = v_flex().p_4().gap_2(); + for &index in &tree.root_entry_indices { + nav = nav.child( + div() + .id(index) + .on_click(cx.listener(move |settings, _, _, _| { + settings.settings_tree.active_entry_index = index; + })) + .child( + Label::new(SharedString::new_static(tree.entries[index].title)) + .size(LabelSize::Large) + .when(tree.active_entry_index == index, |this| { + this.color(Color::Selected) + }), + ), + ); + } + nav +} + +fn render_content( + tree: &SettingsUiTree, + window: &mut Window, + cx: &mut Context, +) -> impl IntoElement { + let Some(entry) = tree.entries.get(tree.active_entry_index) else { + return div() + .size_full() + .child(Label::new(SharedString::new_static("No settings found")).color(Color::Error)); + }; + let mut content = v_flex().size_full().gap_4(); + + let mut child_index = entry + .descendant_range + .is_empty() + .not() + .then_some(entry.descendant_range.start); + let mut path = smallvec::smallvec![entry.path]; + + while let Some(index) = child_index { + let child = &tree.entries[index]; + child_index = child.next_sibling; + if child.render.is_none() { + // todo(settings_ui): subgroups? + continue; + } + path.push(child.path); + let settings_value = settings_value_from_settings_and_path( + path.clone(), + // PERF: how to structure this better? There feels like there's a way to avoid the clone + // and every value lookup + SettingsStore::global(cx).raw_user_settings(), + SettingsStore::global(cx).raw_default_settings(), + ); + content = content.child( + div() + .child( + Label::new(SharedString::new_static(tree.entries[index].title)) + .size(LabelSize::Large) + .when(tree.active_entry_index == index, |this| { + this.color(Color::Selected) + }), + ) + .child(render_item_single( + settings_value, + child.render.as_ref().unwrap(), + window, + cx, + )), + ); + + path.pop(); + } + + return content; +} + impl Render for SettingsPage { - fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - v_flex() + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + div() + .grid() + .grid_cols(16) .p_4() + .bg(cx.theme().colors().editor_background) .size_full() - .gap_4() - .child(Label::new("Settings").size(LabelSize::Large)) - .child( - v_flex().gap_1().child(Label::new("Appearance")).child( - v_flex() - .elevation_2(cx) - .child(AppearanceSettingsControls::new()), - ), - ) .child( - v_flex().gap_1().child(Label::new("Editor")).child( - v_flex() - .elevation_2(cx) - .child(EditorSettingsControls::new()), - ), + div() + .col_span(2) + .h_full() + .child(render_nav(&self.settings_tree, window, cx)), ) + .child(div().col_span(4).h_full().child(render_content( + &self.settings_tree, + window, + cx, + ))) } } + +// todo(settings_ui): remove, only here as inspiration +#[allow(dead_code)] +fn render_old_appearance_settings(cx: &mut App) -> impl IntoElement { + v_flex() + .p_4() + .size_full() + .gap_4() + .child(Label::new("Settings").size(LabelSize::Large)) + .child( + v_flex().gap_1().child(Label::new("Appearance")).child( + v_flex() + .elevation_2(cx) + .child(AppearanceSettingsControls::new()), + ), + ) + .child( + v_flex().gap_1().child(Label::new("Editor")).child( + v_flex() + .elevation_2(cx) + .child(EditorSettingsControls::new()), + ), + ) +} + +fn element_id_from_path(path: &[&'static str]) -> ElementId { + if path.len() == 0 { + panic!("Path length must not be zero"); + } else if path.len() == 1 { + ElementId::Name(SharedString::new_static(path[0])) + } else { + ElementId::from(( + ElementId::from(SharedString::new_static(path[path.len() - 2])), + SharedString::new_static(path[path.len() - 1]), + )) + } +} + +fn render_item_single( + settings_value: SettingsValue, + item: &SettingsUiItemSingle, + window: &mut Window, + cx: &mut App, +) -> AnyElement { + match item { + SettingsUiItemSingle::Custom(_) => div() + .child(format!("Item: {}", settings_value.path.join("."))) + .into_any_element(), + SettingsUiItemSingle::SwitchField => { + render_any_item(settings_value, render_switch_field, window, cx) + } + SettingsUiItemSingle::NumericStepper => { + render_any_item(settings_value, render_numeric_stepper, window, cx) + } + SettingsUiItemSingle::ToggleGroup(variants) => { + render_toggle_button_group(settings_value, variants, window, cx) + } + SettingsUiItemSingle::DropDown(_) => { + unimplemented!("This") + } + } +} + +fn read_settings_value_from_path<'a>( + settings_contents: &'a serde_json::Value, + path: &[&'static str], +) -> Option<&'a serde_json::Value> { + let Some((key, remaining)) = path.split_first() else { + return Some(settings_contents); + }; + let Some(value) = settings_contents.get(key) else { + return None; + }; + + read_settings_value_from_path(value, remaining) +} + +fn downcast_any_item( + settings_value: SettingsValue, +) -> SettingsValue { + let value = settings_value + .value + .map(|value| serde_json::from_value::(value).expect("value is not a T")); + // todo(settings_ui) Create test that constructs UI tree, and asserts that all elements have default values + let default_value = serde_json::from_value::(settings_value.default_value) + .expect("default value is not an Option"); + let deserialized_setting_value = SettingsValue { + title: settings_value.title, + path: settings_value.path, + value, + default_value, + }; + deserialized_setting_value +} + +fn render_any_item( + settings_value: SettingsValue, + render_fn: impl Fn(SettingsValue, &mut Window, &mut App) -> AnyElement + 'static, + window: &mut Window, + cx: &mut App, +) -> AnyElement { + let deserialized_setting_value = downcast_any_item(settings_value); + render_fn(deserialized_setting_value, window, cx) +} + +fn render_numeric_stepper( + value: SettingsValue, + _window: &mut Window, + _cx: &mut App, +) -> AnyElement { + let id = element_id_from_path(&value.path); + let path = value.path.clone(); + let num = value.value.unwrap_or_else(|| value.default_value); + + NumericStepper::new( + id, + num.to_string(), + { + let path = value.path.clone(); + move |_, _, cx| { + let Some(number) = serde_json::Number::from_u128(num.saturating_sub(1) as u128) + else { + return; + }; + let new_value = serde_json::Value::Number(number); + SettingsValue::write_value(&path, new_value, cx); + } + }, + move |_, _, cx| { + let Some(number) = serde_json::Number::from_u128(num.saturating_add(1) as u128) else { + return; + }; + + let new_value = serde_json::Value::Number(number); + + SettingsValue::write_value(&path, new_value, cx); + }, + ) + .style(ui::NumericStepperStyle::Outlined) + .into_any_element() +} + +fn render_switch_field( + value: SettingsValue, + _window: &mut Window, + _cx: &mut App, +) -> AnyElement { + let id = element_id_from_path(&value.path); + let path = value.path.clone(); + SwitchField::new( + id, + SharedString::new_static(value.title), + None, + match value.read() { + true => ToggleState::Selected, + false => ToggleState::Unselected, + }, + move |toggle_state, _, cx| { + let new_value = serde_json::Value::Bool(match toggle_state { + ToggleState::Indeterminate => { + return; + } + ToggleState::Selected => true, + ToggleState::Unselected => false, + }); + + SettingsValue::write_value(&path, new_value, cx); + }, + ) + .into_any_element() +} + +fn render_toggle_button_group( + value: SettingsValue, + variants: &'static [&'static str], + _: &mut Window, + _: &mut App, +) -> AnyElement { + let value = downcast_any_item::(value); + + fn make_toggle_group( + group_name: &'static str, + value: SettingsValue, + variants: &'static [&'static str], + ) -> AnyElement { + let mut variants_array: [&'static str; LEN] = ["default"; LEN]; + variants_array.copy_from_slice(variants); + let active_value = value.read(); + + let selected_idx = variants_array + .iter() + .enumerate() + .find_map(|(idx, variant)| { + if variant == &active_value { + Some(idx) + } else { + None + } + }); + + ToggleButtonGroup::single_row( + group_name, + variants_array.map(|variant| { + let path = value.path.clone(); + ToggleButtonSimple::new(variant, move |_, _, cx| { + SettingsValue::write_value( + &path, + serde_json::Value::String(variant.to_string()), + cx, + ); + }) + }), + ) + .when_some(selected_idx, |this, ix| this.selected_index(ix)) + .style(ui::ToggleButtonGroupStyle::Filled) + .into_any_element() + } + + macro_rules! templ_toggl_with_const_param { + ($len:expr) => { + if variants.len() == $len { + return make_toggle_group::<$len>(value.title, value, variants); + } + }; + } + templ_toggl_with_const_param!(1); + templ_toggl_with_const_param!(2); + templ_toggl_with_const_param!(3); + templ_toggl_with_const_param!(4); + templ_toggl_with_const_param!(5); + templ_toggl_with_const_param!(6); + unreachable!("Too many variants"); +} + +fn settings_value_from_settings_and_path( + path: SmallVec<[&'static str; 1]>, + user_settings: &serde_json::Value, + default_settings: &serde_json::Value, +) -> SettingsValue { + let default_value = read_settings_value_from_path(default_settings, &path) + .with_context(|| format!("No default value for item at path {:?}", path.join("."))) + .expect("Default value set for item") + .clone(); + + let value = read_settings_value_from_path(user_settings, &path).cloned(); + let settings_value = SettingsValue { + default_value, + value, + path: path.clone(), + // todo(settings_ui) title for items + title: path.last().expect("path non empty"), + }; + return settings_value; +} diff --git a/crates/settings_ui_macros/Cargo.toml b/crates/settings_ui_macros/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..e242e7546d1527632dba6eece9b17ccea27295f4 --- /dev/null +++ b/crates/settings_ui_macros/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "settings_ui_macros" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lib] +path = "src/settings_ui_macros.rs" +proc-macro = true + +[lints] +workspace = true + +[features] +default = [] + +[dependencies] +proc-macro2.workspace = true +quote.workspace = true +syn.workspace = true +workspace-hack.workspace = true diff --git a/crates/settings_ui_macros/LICENSE-GPL b/crates/settings_ui_macros/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/settings_ui_macros/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/settings_ui_macros/src/settings_ui_macros.rs b/crates/settings_ui_macros/src/settings_ui_macros.rs new file mode 100644 index 0000000000000000000000000000000000000000..6e37745a7c24155de631e47ffc8c265209ee24e8 --- /dev/null +++ b/crates/settings_ui_macros/src/settings_ui_macros.rs @@ -0,0 +1,201 @@ +use proc_macro2::TokenStream; +use quote::{ToTokens, quote}; +use syn::{Data, DeriveInput, LitStr, Token, parse_macro_input}; + +/// Derive macro for the `SettingsUi` marker trait. +/// +/// This macro automatically implements the `SettingsUi` trait for the annotated type. +/// The `SettingsUi` trait is a marker trait used to indicate that a type can be +/// displayed in the settings UI. +/// +/// # Example +/// +/// ``` +/// use settings::SettingsUi; +/// use settings_ui_macros::SettingsUi; +/// +/// #[derive(SettingsUi)] +/// #[settings_ui(group = "Standard")] +/// struct MySettings { +/// enabled: bool, +/// count: usize, +/// } +/// ``` +#[proc_macro_derive(SettingsUi, attributes(settings_ui))] +pub fn derive_settings_ui(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let input = parse_macro_input!(input as DeriveInput); + let name = &input.ident; + + // Handle generic parameters if present + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + + let mut group_name = Option::::None; + let mut path_name = Option::::None; + + for attr in &input.attrs { + if attr.path().is_ident("settings_ui") { + attr.parse_nested_meta(|meta| { + if meta.path.is_ident("group") { + if group_name.is_some() { + return Err(meta.error("Only one 'group' path can be specified")); + } + meta.input.parse::()?; + let lit: LitStr = meta.input.parse()?; + group_name = Some(lit.value()); + } else if meta.path.is_ident("path") { + // todo(settings_ui) try get KEY from Settings if possible, and once we do, + // if can get key from settings, throw error if path also passed + if path_name.is_some() { + return Err(meta.error("Only one 'path' can be specified")); + } + meta.input.parse::()?; + let lit: LitStr = meta.input.parse()?; + path_name = Some(lit.value()); + } + Ok(()) + }) + .unwrap_or_else(|e| panic!("in #[settings_ui] attribute: {}", e)); + } + } + + if path_name.is_none() && group_name.is_some() { + // todo(settings_ui) derive path from settings + panic!("path is required when group is specified"); + } + + let ui_render_fn_body = generate_ui_item_body(group_name.as_ref(), path_name.as_ref(), &input); + + let settings_ui_item_fn_body = path_name + .as_ref() + .map(|path_name| map_ui_item_to_render(path_name, quote! { Self })) + .unwrap_or(quote! { + settings::SettingsUiEntry { + item: settings::SettingsUiEntryVariant::None + } + }); + + let expanded = quote! { + impl #impl_generics settings::SettingsUi for #name #ty_generics #where_clause { + fn settings_ui_item() -> settings::SettingsUiItem { + #ui_render_fn_body + } + + fn settings_ui_entry() -> settings::SettingsUiEntry { + #settings_ui_item_fn_body + } + } + }; + + proc_macro::TokenStream::from(expanded) +} + +fn map_ui_item_to_render(path: &str, ty: TokenStream) -> TokenStream { + quote! { + settings::SettingsUiEntry { + item: match #ty::settings_ui_item() { + settings::SettingsUiItem::Group{title, items} => settings::SettingsUiEntryVariant::Group { + title, + path: #path, + items, + }, + settings::SettingsUiItem::Single(item) => settings::SettingsUiEntryVariant::Item { + path: #path, + item, + }, + settings::SettingsUiItem::None => settings::SettingsUiEntryVariant::None, + } + } + } +} + +fn generate_ui_item_body( + group_name: Option<&String>, + path_name: Option<&String>, + input: &syn::DeriveInput, +) -> TokenStream { + match (group_name, path_name, &input.data) { + (_, _, Data::Union(_)) => unimplemented!("Derive SettingsUi for Unions"), + (None, None, Data::Struct(_)) => quote! { + settings::SettingsUiItem::None + }, + (Some(_), None, Data::Struct(_)) => quote! { + settings::SettingsUiItem::None + }, + (None, Some(_), Data::Struct(_)) => quote! { + settings::SettingsUiItem::None + }, + (Some(group_name), _, Data::Struct(data_struct)) => { + let fields = data_struct + .fields + .iter() + .filter(|field| { + !field.attrs.iter().any(|attr| { + let mut has_skip = false; + if attr.path().is_ident("settings_ui") { + let _ = attr.parse_nested_meta(|meta| { + if meta.path.is_ident("skip") { + has_skip = true; + } + Ok(()) + }); + } + + has_skip + }) + }) + .map(|field| { + ( + field.ident.clone().expect("tuple fields").to_string(), + field.ty.to_token_stream(), + ) + }) + .map(|(name, ty)| map_ui_item_to_render(&name, ty)); + + quote! { + settings::SettingsUiItem::Group{ title: #group_name, items: vec![#(#fields),*] } + } + } + (None, _, Data::Enum(data_enum)) => { + let mut lowercase = false; + for attr in &input.attrs { + if attr.path().is_ident("serde") { + attr.parse_nested_meta(|meta| { + if meta.path.is_ident("rename_all") { + meta.input.parse::()?; + let lit = meta.input.parse::()?.value(); + // todo(settings_ui) snake case + lowercase = lit == "lowercase" || lit == "snake_case"; + } + Ok(()) + }) + .ok(); + } + } + let length = data_enum.variants.len(); + + let variants = data_enum.variants.iter().map(|variant| { + let string = variant.ident.clone().to_string(); + + if lowercase { + string.to_lowercase() + } else { + string + } + }); + + if length > 6 { + quote! { + settings::SettingsUiItem::Single(settings::SettingsUiItemSingle::DropDown(&[#(#variants),*])) + } + } else { + quote! { + settings::SettingsUiItem::Single(settings::SettingsUiItemSingle::ToggleGroup(&[#(#variants),*])) + } + } + } + // todo(settings_ui) discriminated unions + (_, _, Data::Enum(_)) => quote! { + settings::SettingsUiItem::None + }, + } +} diff --git a/crates/terminal/src/terminal_settings.rs b/crates/terminal/src/terminal_settings.rs index 635e3e2ca5895562c7981d89169bf6f0632a223f..01f2d85f09e416b6c8ac40d7fa283d1f1e296cd5 100644 --- a/crates/terminal/src/terminal_settings.rs +++ b/crates/terminal/src/terminal_settings.rs @@ -6,7 +6,7 @@ use gpui::{AbsoluteLength, App, FontFallbacks, FontFeatures, FontWeight, Pixels, use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; -use settings::SettingsSources; +use settings::{SettingsSources, SettingsUi}; use std::path::PathBuf; use task::Shell; use theme::FontFamilyName; @@ -24,7 +24,7 @@ pub struct Toolbar { pub breadcrumbs: bool, } -#[derive(Clone, Debug, Deserialize)] +#[derive(Clone, Debug, Deserialize, SettingsUi)] pub struct TerminalSettings { pub shell: Shell, pub working_directory: WorkingDirectory, diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index df147cfe92377962b135fed309ef0a7df68adcd8..61b41eba0642f10312a4c78df447ac7344f7e2dc 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -13,7 +13,7 @@ use gpui::{ use refineable::Refineable; use schemars::{JsonSchema, json_schema}; use serde::{Deserialize, Serialize}; -use settings::{ParameterizedJsonSchema, Settings, SettingsSources}; +use settings::{ParameterizedJsonSchema, Settings, SettingsSources, SettingsUi}; use std::sync::Arc; use util::ResultExt as _; use util::schemars::replace_subschema; @@ -87,7 +87,7 @@ impl From for String { } /// Customizable settings for the UI and theme system. -#[derive(Clone, PartialEq)] +#[derive(Clone, PartialEq, SettingsUi)] pub struct ThemeSettings { /// The UI font size. Determines the size of text in the UI, /// as well as the size of a [gpui::Rems] unit. diff --git a/crates/title_bar/Cargo.toml b/crates/title_bar/Cargo.toml index cf178e2850397c5a2398033a02addb73ab615ec9..f60ac7c301359d0bb0d3d8ee1d4115c5d815cf69 100644 --- a/crates/title_bar/Cargo.toml +++ b/crates/title_bar/Cargo.toml @@ -42,7 +42,7 @@ rpc.workspace = true schemars.workspace = true serde.workspace = true settings.workspace = true -settings_ui.workspace = true +keymap_editor.workspace = true smallvec.workspace = true story = { workspace = true, optional = true } telemetry.workspace = true diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index ac5e9201b3be083fef43e58c2e717cb59a0ba185..075b9fcd86276244d154be1aebe904fbfb4a7b6c 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -29,10 +29,10 @@ use gpui::{ IntoElement, MouseButton, ParentElement, Render, StatefulInteractiveElement, Styled, Subscription, WeakEntity, Window, actions, div, }; +use keymap_editor; use onboarding_banner::OnboardingBanner; use project::Project; use settings::Settings as _; -use settings_ui::keybindings; use std::sync::Arc; use theme::ActiveTheme; use title_bar_settings::TitleBarSettings; @@ -684,7 +684,7 @@ impl TitleBar { "Settings Profiles", zed_actions::settings_profile_selector::Toggle.boxed_clone(), ) - .action("Key Bindings", Box::new(keybindings::OpenKeymapEditor)) + .action("Key Bindings", Box::new(keymap_editor::OpenKeymapEditor)) .action( "Themes…", zed_actions::theme_selector::Toggle::default().boxed_clone(), @@ -732,7 +732,7 @@ impl TitleBar { "Settings Profiles", zed_actions::settings_profile_selector::Toggle.boxed_clone(), ) - .action("Key Bindings", Box::new(keybindings::OpenKeymapEditor)) + .action("Key Bindings", Box::new(keymap_editor::OpenKeymapEditor)) .action( "Themes…", zed_actions::theme_selector::Toggle::default().boxed_clone(), diff --git a/crates/title_bar/src/title_bar_settings.rs b/crates/title_bar/src/title_bar_settings.rs index a98e984d80e1dbf0c016b8d8e4c6dc609106c081..29d74c8590a63cd8aa75bdaa3655111d76fcf757 100644 --- a/crates/title_bar/src/title_bar_settings.rs +++ b/crates/title_bar/src/title_bar_settings.rs @@ -1,9 +1,10 @@ use db::anyhow; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; -#[derive(Copy, Clone, Deserialize, Debug)] +#[derive(Copy, Clone, Deserialize, Debug, SettingsUi)] +#[settings_ui(group = "Title Bar", path = "title_bar")] pub struct TitleBarSettings { pub show_branch_icon: bool, pub show_onboarding_banner: bool, diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 9da01e6f444d2284814282f9bf6eecfb0814953d..a5cd909d5b53079d1da49591a5eca21416ba415a 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -39,7 +39,7 @@ use object::Object; use schemars::JsonSchema; use serde::Deserialize; use serde_derive::Serialize; -use settings::{Settings, SettingsSources, SettingsStore, update_settings_file}; +use settings::{Settings, SettingsSources, SettingsStore, SettingsUi, update_settings_file}; use state::{Mode, Operator, RecordedSelection, SearchState, VimGlobals}; use std::{mem, ops::Range, sync::Arc}; use surrounds::SurroundsType; @@ -1774,7 +1774,7 @@ struct CursorShapeSettings { pub insert: Option, } -#[derive(Deserialize)] +#[derive(Deserialize, SettingsUi)] struct VimSettings { pub default_mode: Mode, pub toggle_relative_line_numbers: bool, diff --git a/crates/vim_mode_setting/src/vim_mode_setting.rs b/crates/vim_mode_setting/src/vim_mode_setting.rs index 6f60d3f21fc707abd981c34d7617f4e9bb563477..7fb39ef4f6f10370f1a0fb2cf83dcb3a88b80d81 100644 --- a/crates/vim_mode_setting/src/vim_mode_setting.rs +++ b/crates/vim_mode_setting/src/vim_mode_setting.rs @@ -6,7 +6,7 @@ use anyhow::Result; use gpui::App; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; /// Initializes the `vim_mode_setting` crate. pub fn init(cx: &mut App) { @@ -17,6 +17,7 @@ pub fn init(cx: &mut App) { /// Whether or not to enable Vim mode. /// /// Default: false +#[derive(SettingsUi)] pub struct VimModeSetting(pub bool); impl Settings for VimModeSetting { @@ -43,6 +44,7 @@ impl Settings for VimModeSetting { /// Whether or not to enable Helix mode. /// /// Default: false +#[derive(SettingsUi)] pub struct HelixModeSetting(pub bool); impl Settings for HelixModeSetting { diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index db91bd82b904b40d0eaf2466689156f03d3723f3..a513f8c9317645469e5d5ca54c3b5351383c1ca3 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -17,7 +17,7 @@ use gpui::{ use project::{Project, ProjectEntryId, ProjectPath}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsLocation, SettingsSources}; +use settings::{Settings, SettingsLocation, SettingsSources, SettingsUi}; use smallvec::SmallVec; use std::{ any::{Any, TypeId}, @@ -49,7 +49,7 @@ impl Default for SaveOptions { } } -#[derive(Deserialize)] +#[derive(Deserialize, SettingsUi)] pub struct ItemSettings { pub git_status: bool, pub close_position: ClosePosition, @@ -59,7 +59,7 @@ pub struct ItemSettings { pub show_close_button: ShowCloseButton, } -#[derive(Deserialize)] +#[derive(Deserialize, SettingsUi)] pub struct PreviewTabsSettings { pub enabled: bool, pub enable_preview_from_file_finder: bool, diff --git a/crates/workspace/src/workspace_settings.rs b/crates/workspace/src/workspace_settings.rs index 0d7fb9bb9c1ae6f8ff4a6644132c4a347da4117d..419e33e54435779012207a024ea49e44a8acb1c2 100644 --- a/crates/workspace/src/workspace_settings.rs +++ b/crates/workspace/src/workspace_settings.rs @@ -6,9 +6,9 @@ use collections::HashMap; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; -#[derive(Deserialize)] +#[derive(Deserialize, SettingsUi)] pub struct WorkspaceSettings { pub active_pane_modifiers: ActivePanelModifiers, pub bottom_dock_layout: BottomDockLayout, @@ -216,7 +216,7 @@ pub struct WorkspaceSettingsContent { pub zoomed_padding: Option, } -#[derive(Deserialize)] +#[derive(Deserialize, SettingsUi)] pub struct TabBarSettings { pub show: bool, pub show_nav_history_buttons: bool, diff --git a/crates/worktree/src/worktree_settings.rs b/crates/worktree/src/worktree_settings.rs index b18d3509beb408c37beaf246a747248d2f17438a..df3a4d35570ad21b80f968539afbe681c58e2a06 100644 --- a/crates/worktree/src/worktree_settings.rs +++ b/crates/worktree/src/worktree_settings.rs @@ -4,10 +4,10 @@ use anyhow::Context as _; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources}; +use settings::{Settings, SettingsSources, SettingsUi}; use util::paths::PathMatcher; -#[derive(Clone, PartialEq, Eq)] +#[derive(Clone, PartialEq, Eq, SettingsUi)] pub struct WorktreeSettings { pub file_scan_inclusions: PathMatcher, pub file_scan_exclusions: PathMatcher, diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 0ddfe3dde1b57de8f6fb5ae83d1bb3ccef8b12ff..bb46a5a4f65ac76a7cff2a5bc43525db30ed0930 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -131,6 +131,7 @@ serde_json.workspace = true session.workspace = true settings.workspace = true settings_ui.workspace = true +keymap_editor.workspace = true shellexpand.workspace = true smol.workspace = true snippet_provider.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 5e7934c3094755b39535ef054f077dbc9fb180af..e4438792045617498e5c8cd3b52117b1d0b752ef 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -632,6 +632,7 @@ pub fn main() { svg_preview::init(cx); onboarding::init(cx); settings_ui::init(cx); + keymap_editor::init(cx); extensions_ui::init(cx); zeta::init(cx); inspector_ui::init(app_state.clone(), cx); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 5a180e4b42705332bd51dffe43943d131a42907f..5797070a39c8a60dc760ac3b82341842bc11d63e 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -1491,7 +1491,7 @@ fn reload_keymaps(cx: &mut App, mut user_key_bindings: Vec) { workspace::NewWindow, )]); // todo: nicer api here? - settings_ui::keybindings::KeymapEventChannel::trigger_keymap_changed(cx); + keymap_editor::KeymapEventChannel::trigger_keymap_changed(cx); } pub fn load_default_keymap(cx: &mut App) { diff --git a/crates/zed/src/zed/app_menus.rs b/crates/zed/src/zed/app_menus.rs index 6c7ab0b37403ae941660da853a83e6c147e5869f..342fd26cb77aa08dcbc346609b3185f3263f0f1d 100644 --- a/crates/zed/src/zed/app_menus.rs +++ b/crates/zed/src/zed/app_menus.rs @@ -1,6 +1,5 @@ use collab_ui::collab_panel; use gpui::{Menu, MenuItem, OsAction}; -use settings_ui::keybindings; use terminal_view::terminal_panel; pub fn app_menus() -> Vec { @@ -17,7 +16,7 @@ pub fn app_menus() -> Vec { name: "Settings".into(), items: vec![ MenuItem::action("Open Settings", super::OpenSettings), - MenuItem::action("Open Key Bindings", keybindings::OpenKeymapEditor), + MenuItem::action("Open Key Bindings", keymap_editor::OpenKeymapEditor), MenuItem::action("Open Default Settings", super::OpenDefaultSettings), MenuItem::action( "Open Default Key Bindings", diff --git a/crates/zlog_settings/src/zlog_settings.rs b/crates/zlog_settings/src/zlog_settings.rs index b58cbcc1433d01ce865580111d1f92c98987bbea..0cdc784489b47d89388edc9ed20aed6f3c2f9959 100644 --- a/crates/zlog_settings/src/zlog_settings.rs +++ b/crates/zlog_settings/src/zlog_settings.rs @@ -3,7 +3,7 @@ use anyhow::Result; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsStore}; +use settings::{Settings, SettingsStore, SettingsUi}; pub fn init(cx: &mut App) { ZlogSettings::register(cx); @@ -15,7 +15,7 @@ pub fn init(cx: &mut App) { .detach(); } -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi)] pub struct ZlogSettings { #[serde(default, flatten)] pub scopes: std::collections::HashMap, From 515282d719416a2b95bfb1461e5796f41e96eae4 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Fri, 29 Aug 2025 15:16:42 -0600 Subject: [PATCH 20/54] zeta: Add detection of BSD licenses + efficiency improvements + more lenient whitespace handling (#37194) Closes #36564 Release Notes: - Edit Prediction: Added various BSD licenses to open-source licenses eligible for data collection. --- Cargo.lock | 1 + crates/zeta/Cargo.toml | 1 + crates/zeta/src/license_detection.rs | 450 ++++++++---------- crates/zeta/src/license_detection/0bsd.regex | 12 + crates/zeta/src/license_detection/0bsd.txt | 13 + .../{apache.regex => apache-2.0.regex} | 12 +- .../{apache-text => apache-2.0.txt} | 0 .../src/license_detection/bsd-1-clause.regex | 17 + .../src/license_detection/bsd-1-clause.txt | 20 + .../src/license_detection/bsd-2-clause.regex | 22 + .../src/license_detection/bsd-2-clause.txt | 26 + .../src/license_detection/bsd-3-clause.regex | 26 + .../src/license_detection/bsd-3-clause.txt | 29 ++ crates/zeta/src/license_detection/isc.regex | 4 +- crates/zeta/src/license_detection/isc.txt | 15 + crates/zeta/src/license_detection/mit.regex | 4 +- .../license_detection/{mit-text => mit.txt} | 0 .../{upl.regex => upl-1.0.regex} | 4 +- crates/zeta/src/license_detection/upl-1.0.txt | 35 ++ 19 files changed, 436 insertions(+), 255 deletions(-) create mode 100644 crates/zeta/src/license_detection/0bsd.regex create mode 100644 crates/zeta/src/license_detection/0bsd.txt rename crates/zeta/src/license_detection/{apache.regex => apache-2.0.regex} (98%) rename crates/zeta/src/license_detection/{apache-text => apache-2.0.txt} (100%) create mode 100644 crates/zeta/src/license_detection/bsd-1-clause.regex create mode 100644 crates/zeta/src/license_detection/bsd-1-clause.txt create mode 100644 crates/zeta/src/license_detection/bsd-2-clause.regex create mode 100644 crates/zeta/src/license_detection/bsd-2-clause.txt create mode 100644 crates/zeta/src/license_detection/bsd-3-clause.regex create mode 100644 crates/zeta/src/license_detection/bsd-3-clause.txt create mode 100644 crates/zeta/src/license_detection/isc.txt rename crates/zeta/src/license_detection/{mit-text => mit.txt} (100%) rename crates/zeta/src/license_detection/{upl.regex => upl-1.0.regex} (96%) create mode 100644 crates/zeta/src/license_detection/upl-1.0.txt diff --git a/Cargo.lock b/Cargo.lock index 4c68280de25b878187b3a5627362f6373808734b..84d633dd6f126f1ce86cd73b83f9d1aac23c591e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -20830,6 +20830,7 @@ dependencies = [ "serde", "serde_json", "settings", + "strum 0.27.1", "telemetry", "telemetry_events", "theme", diff --git a/crates/zeta/Cargo.toml b/crates/zeta/Cargo.toml index ee76308ff38089b9553f9a6ba87998ce74480181..05eedd6015d47e0c020266f27da8d63850d162e3 100644 --- a/crates/zeta/Cargo.toml +++ b/crates/zeta/Cargo.toml @@ -46,6 +46,7 @@ release_channel.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true +strum.workspace = true telemetry.workspace = true telemetry_events.workspace = true theme.workspace = true diff --git a/crates/zeta/src/license_detection.rs b/crates/zeta/src/license_detection.rs index 022b2d19de433e9087454fec0874c0d1b31ae6c3..d6b8ef10a3363f49f92607e30c6059ffee573a65 100644 --- a/crates/zeta/src/license_detection.rs +++ b/crates/zeta/src/license_detection.rs @@ -1,5 +1,6 @@ use std::{ collections::BTreeSet, + fmt::{Display, Formatter}, path::{Path, PathBuf}, sync::{Arc, LazyLock}, }; @@ -10,6 +11,7 @@ use gpui::{App, AppContext as _, Entity, Subscription, Task}; use postage::watch; use project::Worktree; use regex::Regex; +use strum::VariantArray; use util::ResultExt as _; use worktree::ChildEntriesOptions; @@ -17,8 +19,14 @@ use worktree::ChildEntriesOptions; static LICENSE_FILE_NAME_REGEX: LazyLock = LazyLock::new(|| { regex::bytes::RegexBuilder::new( "^ \ - (?: license | licence) \ - (?: [\\-._] (?: apache | isc | mit | upl))? \ + (?: license | licence)? \ + (?: [\\-._]? \ + (?: apache (?: [\\-._] (?: 2.0 | 2 ))? | \ + 0? bsd (?: [\\-._] [0123])? (?: [\\-._] clause)? | \ + isc | \ + mit | \ + upl))? \ + (?: [\\-._]? (?: license | licence))? \ (?: \\.txt | \\.md)? \ $", ) @@ -28,40 +36,93 @@ static LICENSE_FILE_NAME_REGEX: LazyLock = LazyLock::new(|| .unwrap() }); -fn is_license_eligible_for_data_collection(license: &str) -> bool { - static LICENSE_REGEXES: LazyLock> = LazyLock::new(|| { - [ - include_str!("license_detection/apache.regex"), - include_str!("license_detection/isc.regex"), - include_str!("license_detection/mit.regex"), - include_str!("license_detection/upl.regex"), - ] - .into_iter() - .map(|pattern| Regex::new(&canonicalize_license_text(pattern)).unwrap()) - .collect() +#[derive(Debug, Clone, Copy, Eq, PartialEq, VariantArray)] +pub enum OpenSourceLicense { + Apache2_0, + BSD0Clause, + BSD1Clause, + BSD2Clause, + BSD3Clause, + ISC, + MIT, + UPL1_0, +} + +impl Display for OpenSourceLicense { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.spdx_identifier()) + } +} + +impl OpenSourceLicense { + pub fn spdx_identifier(&self) -> &'static str { + match self { + OpenSourceLicense::Apache2_0 => "apache-2.0", + OpenSourceLicense::BSD0Clause => "0bsd", + OpenSourceLicense::BSD1Clause => "bsd-1-clause", + OpenSourceLicense::BSD2Clause => "bsd-2-clause", + OpenSourceLicense::BSD3Clause => "bsd-3-clause", + OpenSourceLicense::ISC => "isc", + OpenSourceLicense::MIT => "mit", + OpenSourceLicense::UPL1_0 => "upl-1.0", + } + } + + pub fn regex(&self) -> &'static str { + match self { + OpenSourceLicense::Apache2_0 => include_str!("license_detection/apache-2.0.regex"), + OpenSourceLicense::BSD0Clause => include_str!("license_detection/0bsd.regex"), + OpenSourceLicense::BSD1Clause => include_str!("license_detection/bsd-1-clause.regex"), + OpenSourceLicense::BSD2Clause => include_str!("license_detection/bsd-2-clause.regex"), + OpenSourceLicense::BSD3Clause => include_str!("license_detection/bsd-3-clause.regex"), + OpenSourceLicense::ISC => include_str!("license_detection/isc.regex"), + OpenSourceLicense::MIT => include_str!("license_detection/mit.regex"), + OpenSourceLicense::UPL1_0 => include_str!("license_detection/upl-1.0.regex"), + } + } +} + +fn detect_license(license: &str) -> Option { + static LICENSE_REGEX: LazyLock = LazyLock::new(|| { + let mut regex_string = String::new(); + let mut is_first = true; + for license in OpenSourceLicense::VARIANTS { + if is_first { + regex_string.push_str("^(?:("); + is_first = false; + } else { + regex_string.push_str(")|("); + } + regex_string.push_str(&canonicalize_license_text(license.regex())); + } + regex_string.push_str("))$"); + let regex = Regex::new(®ex_string).unwrap(); + assert_eq!(regex.captures_len(), OpenSourceLicense::VARIANTS.len() + 1); + regex }); - let license = canonicalize_license_text(license); - LICENSE_REGEXES.iter().any(|regex| regex.is_match(&license)) + LICENSE_REGEX + .captures(&canonicalize_license_text(license)) + .and_then(|captures| { + let license = OpenSourceLicense::VARIANTS + .iter() + .enumerate() + .find(|(index, _)| captures.get(index + 1).is_some()) + .map(|(_, license)| *license); + if license.is_none() { + log::error!("bug: open source license regex matched without any capture groups"); + } + license + }) } /// Canonicalizes the whitespace of license text and license regexes. fn canonicalize_license_text(license: &str) -> String { - static PARAGRAPH_SEPARATOR_REGEX: LazyLock = - LazyLock::new(|| Regex::new(r"\s*\n\s*\n\s*").unwrap()); - - PARAGRAPH_SEPARATOR_REGEX - .split(license) - .filter(|paragraph| !paragraph.trim().is_empty()) - .map(|paragraph| { - paragraph - .trim() - .split_whitespace() - .collect::>() - .join(" ") - }) + license + .split_ascii_whitespace() .collect::>() - .join("\n\n") + .join(" ") + .to_ascii_lowercase() } pub enum LicenseDetectionWatcher { @@ -157,7 +218,7 @@ impl LicenseDetectionWatcher { return None; } let text = fs.load(&abs_path).await.log_err()?; - let is_eligible = is_license_eligible_for_data_collection(&text); + let is_eligible = detect_license(&text).is_some(); if is_eligible { log::debug!( "`{abs_path:?}` matches a license that is eligible for data collection (if enabled)" @@ -193,193 +254,47 @@ mod tests { use super::*; - const MIT_LICENSE: &str = include_str!("license_detection/mit-text"); - const APACHE_LICENSE: &str = include_str!("license_detection/apache-text"); - - #[test] - fn test_mit_positive_detection() { - assert!(is_license_eligible_for_data_collection(MIT_LICENSE)); - } - - #[test] - fn test_mit_negative_detection() { - let example_license = format!( - r#"{MIT_LICENSE} - - This project is dual licensed under the MIT License and the Apache License, Version 2.0."# - ); - assert!(!is_license_eligible_for_data_collection(&example_license)); - } - - #[test] - fn test_isc_positive_detection() { - let example_license = unindent( - r#" - ISC License - - Copyright (c) 2024, John Doe - - Permission to use, copy, modify, and/or distribute this software for any - purpose with or without fee is hereby granted, provided that the above - copyright notice and this permission notice appear in all copies. - - THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - "# - .trim(), - ); - - assert!(is_license_eligible_for_data_collection(&example_license)); - } - - #[test] - fn test_isc_negative_detection() { - let example_license = unindent( - r#" - ISC License - - Copyright (c) 2024, John Doe - - Permission to use, copy, modify, and/or distribute this software for any - purpose with or without fee is hereby granted, provided that the above - copyright notice and this permission notice appear in all copies. - - THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - This project is dual licensed under the ISC License and the MIT License. - "# - .trim(), - ); - - assert!(!is_license_eligible_for_data_collection(&example_license)); - } - - #[test] - fn test_upl_positive_detection() { - let example_license = unindent( - r#" - Copyright (c) 2025, John Doe - - The Universal Permissive License (UPL), Version 1.0 - - Subject to the condition set forth below, permission is hereby granted to any person - obtaining a copy of this software, associated documentation and/or data (collectively - the "Software"), free of charge and under any and all copyright rights in the - Software, and any and all patent rights owned or freely licensable by each licensor - hereunder covering either (i) the unmodified Software as contributed to or provided - by such licensor, or (ii) the Larger Works (as defined below), to deal in both - - (a) the Software, and - - (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if one is - included with the Software (each a "Larger Work" to which the Software is - contributed by such licensors), - - without restriction, including without limitation the rights to copy, create - derivative works of, display, perform, and distribute the Software and make, use, - sell, offer for sale, import, export, have made, and have sold the Software and the - Larger Work(s), and to sublicense the foregoing rights on either these or other - terms. - - This license is subject to the following condition: - - The above copyright notice and either this complete permission notice or at a minimum - a reference to the UPL must be included in all copies or substantial portions of the - Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, - INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A - PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT - HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF - CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE - OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - "# - .trim(), - ); - - assert!(is_license_eligible_for_data_collection(&example_license)); + const APACHE_2_0_TXT: &str = include_str!("license_detection/apache-2.0.txt"); + const ISC_TXT: &str = include_str!("license_detection/isc.txt"); + const MIT_TXT: &str = include_str!("license_detection/mit.txt"); + const UPL_1_0_TXT: &str = include_str!("license_detection/upl-1.0.txt"); + const BSD_0_CLAUSE_TXT: &str = include_str!("license_detection/0bsd.txt"); + const BSD_1_CLAUSE_TXT: &str = include_str!("license_detection/bsd-1-clause.txt"); + const BSD_2_CLAUSE_TXT: &str = include_str!("license_detection/bsd-2-clause.txt"); + const BSD_3_CLAUSE_TXT: &str = include_str!("license_detection/bsd-3-clause.txt"); + + #[track_caller] + fn assert_matches_license(text: &str, license: OpenSourceLicense) { + let license_regex = + Regex::new(&format!("^{}$", canonicalize_license_text(license.regex()))).unwrap(); + assert!(license_regex.is_match(&canonicalize_license_text(text))); + assert_eq!(detect_license(text), Some(license)); } #[test] - fn test_upl_negative_detection() { - let example_license = unindent( - r#" - UPL License - - Copyright (c) 2024, John Doe - - The Universal Permissive License (UPL), Version 1.0 - - Subject to the condition set forth below, permission is hereby granted to any person - obtaining a copy of this software, associated documentation and/or data (collectively - the "Software"), free of charge and under any and all copyright rights in the - Software, and any and all patent rights owned or freely licensable by each licensor - hereunder covering either (i) the unmodified Software as contributed to or provided - by such licensor, or (ii) the Larger Works (as defined below), to deal in both - - (a) the Software, and - - (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if one is - included with the Software (each a "Larger Work" to which the Software is - contributed by such licensors), - - without restriction, including without limitation the rights to copy, create - derivative works of, display, perform, and distribute the Software and make, use, - sell, offer for sale, import, export, have made, and have sold the Software and the - Larger Work(s), and to sublicense the foregoing rights on either these or other - terms. - - This license is subject to the following condition: - - The above copyright notice and either this complete permission notice or at a minimum - a reference to the UPL must be included in all copies or substantial portions of the - Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, - INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A - PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT - HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF - CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE - OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - This project is dual licensed under the ISC License and the MIT License. - "# - .trim(), - ); - - assert!(!is_license_eligible_for_data_collection(&example_license)); + fn test_0bsd_positive_detection() { + assert_matches_license(BSD_0_CLAUSE_TXT, OpenSourceLicense::BSD0Clause); } #[test] fn test_apache_positive_detection() { - assert!(is_license_eligible_for_data_collection(APACHE_LICENSE)); + assert_matches_license(APACHE_2_0_TXT, OpenSourceLicense::Apache2_0); let license_with_appendix = format!( - r#"{APACHE_LICENSE} + r#"{APACHE_2_0_TXT} END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. Copyright [yyyy] [name of copyright owner] @@ -395,9 +310,7 @@ mod tests { See the License for the specific language governing permissions and limitations under the License."# ); - assert!(is_license_eligible_for_data_collection( - &license_with_appendix - )); + assert_matches_license(&license_with_appendix, OpenSourceLicense::Apache2_0); // Sometimes people fill in the appendix with copyright info. let license_with_copyright = license_with_appendix.replace( @@ -405,16 +318,79 @@ mod tests { "Copyright 2025 John Doe", ); assert!(license_with_copyright != license_with_appendix); - assert!(is_license_eligible_for_data_collection( - &license_with_copyright - )); + assert_matches_license(&license_with_copyright, OpenSourceLicense::Apache2_0); } #[test] fn test_apache_negative_detection() { - assert!(!is_license_eligible_for_data_collection(&format!( - "{APACHE_LICENSE}\n\nThe terms in this license are void if P=NP." - ))); + assert!( + detect_license(&format!( + "{APACHE_2_0_TXT}\n\nThe terms in this license are void if P=NP." + )) + .is_none() + ); + } + + #[test] + fn test_bsd_1_clause_positive_detection() { + assert_matches_license(BSD_1_CLAUSE_TXT, OpenSourceLicense::BSD1Clause); + } + + #[test] + fn test_bsd_2_clause_positive_detection() { + assert_matches_license(BSD_2_CLAUSE_TXT, OpenSourceLicense::BSD2Clause); + } + + #[test] + fn test_bsd_3_clause_positive_detection() { + assert_matches_license(BSD_3_CLAUSE_TXT, OpenSourceLicense::BSD3Clause); + } + + #[test] + fn test_isc_positive_detection() { + assert_matches_license(ISC_TXT, OpenSourceLicense::ISC); + } + + #[test] + fn test_isc_negative_detection() { + let license_text = format!( + r#"{ISC_TXT} + + This project is dual licensed under the ISC License and the MIT License."# + ); + + assert!(detect_license(&license_text).is_none()); + } + + #[test] + fn test_mit_positive_detection() { + assert_matches_license(MIT_TXT, OpenSourceLicense::MIT); + } + + #[test] + fn test_mit_negative_detection() { + let license_text = format!( + r#"{MIT_TXT} + + This project is dual licensed under the MIT License and the Apache License, Version 2.0."# + ); + assert!(detect_license(&license_text).is_none()); + } + + #[test] + fn test_upl_positive_detection() { + assert_matches_license(UPL_1_0_TXT, OpenSourceLicense::UPL1_0); + } + + #[test] + fn test_upl_negative_detection() { + let license_text = format!( + r#"{UPL_1_0_TXT} + + This project is dual licensed under the UPL License and the MIT License."# + ); + + assert!(detect_license(&license_text).is_none()); } #[test] @@ -439,10 +415,22 @@ mod tests { assert!(LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE-ISC")); assert!(LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE-UPL")); + // Test with "license" coming after + assert!(LICENSE_FILE_NAME_REGEX.is_match(b"APACHE-LICENSE")); + + // Test version numbers + assert!(LICENSE_FILE_NAME_REGEX.is_match(b"APACHE-2")); + assert!(LICENSE_FILE_NAME_REGEX.is_match(b"APACHE-2.0")); + assert!(LICENSE_FILE_NAME_REGEX.is_match(b"BSD-1")); + assert!(LICENSE_FILE_NAME_REGEX.is_match(b"BSD-2")); + assert!(LICENSE_FILE_NAME_REGEX.is_match(b"BSD-3")); + assert!(LICENSE_FILE_NAME_REGEX.is_match(b"BSD-3-CLAUSE")); + // Test combinations assert!(LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE-MIT.txt")); assert!(LICENSE_FILE_NAME_REGEX.is_match(b"LICENCE.ISC.md")); assert!(LICENSE_FILE_NAME_REGEX.is_match(b"license_upl")); + assert!(LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE.APACHE.2.0")); // Test case insensitive assert!(LICENSE_FILE_NAME_REGEX.is_match(b"License")); @@ -461,39 +449,17 @@ mod tests { assert!(!LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE.old")); assert!(!LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE-GPL")); assert!(!LICENSE_FILE_NAME_REGEX.is_match(b"LICENSEABC")); - assert!(!LICENSE_FILE_NAME_REGEX.is_match(b"")); } #[test] fn test_canonicalize_license_text() { - // Test basic whitespace normalization - let input = "Line 1\n Line 2 \n\n\n Line 3 "; - let expected = "Line 1 Line 2\n\nLine 3"; - assert_eq!(canonicalize_license_text(input), expected); - - // Test paragraph separation - let input = "Paragraph 1\nwith multiple lines\n\n\n\nParagraph 2\nwith more lines"; - let expected = "Paragraph 1 with multiple lines\n\nParagraph 2 with more lines"; - assert_eq!(canonicalize_license_text(input), expected); - - // Test empty paragraphs are filtered out - let input = "\n\n\nParagraph 1\n\n\n \n\n\nParagraph 2\n\n\n"; - let expected = "Paragraph 1\n\nParagraph 2"; - assert_eq!(canonicalize_license_text(input), expected); - - // Test single line - let input = " Single line with spaces "; - let expected = "Single line with spaces"; - assert_eq!(canonicalize_license_text(input), expected); - - // Test multiple consecutive spaces within lines - let input = "Word1 Word2\n\nWord3 Word4"; - let expected = "Word1 Word2\n\nWord3 Word4"; + let input = " Paragraph 1\nwith multiple lines\n\n\n\nParagraph 2\nwith more lines\n "; + let expected = "paragraph 1 with multiple lines paragraph 2 with more lines"; assert_eq!(canonicalize_license_text(input), expected); // Test tabs and mixed whitespace let input = "Word1\t\tWord2\n\n Word3\r\n\r\n\r\nWord4 "; - let expected = "Word1 Word2\n\nWord3\n\nWord4"; + let expected = "word1 word2 word3 word4"; assert_eq!(canonicalize_license_text(input), expected); } @@ -532,9 +498,7 @@ mod tests { .trim(), ); - assert!(is_license_eligible_for_data_collection( - &mit_with_weird_spacing - )); + assert_matches_license(&mit_with_weird_spacing, OpenSourceLicense::MIT); } fn init_test(cx: &mut TestAppContext) { @@ -590,14 +554,14 @@ mod tests { assert!(matches!(watcher, LicenseDetectionWatcher::Local { .. })); assert!(!watcher.is_project_open_source()); - fs.write(Path::new("/root/LICENSE-MIT"), MIT_LICENSE.as_bytes()) + fs.write(Path::new("/root/LICENSE-MIT"), MIT_TXT.as_bytes()) .await .unwrap(); cx.background_executor.run_until_parked(); assert!(watcher.is_project_open_source()); - fs.write(Path::new("/root/LICENSE-APACHE"), APACHE_LICENSE.as_bytes()) + fs.write(Path::new("/root/LICENSE-APACHE"), APACHE_2_0_TXT.as_bytes()) .await .unwrap(); @@ -630,7 +594,7 @@ mod tests { let fs = FakeFs::new(cx.background_executor.clone()); fs.insert_tree( "/root", - json!({ "main.rs": "fn main() {}", "LICENSE-MIT": MIT_LICENSE }), + json!({ "main.rs": "fn main() {}", "LICENSE-MIT": MIT_TXT }), ) .await; diff --git a/crates/zeta/src/license_detection/0bsd.regex b/crates/zeta/src/license_detection/0bsd.regex new file mode 100644 index 0000000000000000000000000000000000000000..7928a8d181a48ad54bb825ac120aaa4ef53ba8ef --- /dev/null +++ b/crates/zeta/src/license_detection/0bsd.regex @@ -0,0 +1,12 @@ +.* + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted\. + +THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS\. IN NO EVENT SHALL THE AUTHOR BE LIABLE +FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY +DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN +AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE\. diff --git a/crates/zeta/src/license_detection/0bsd.txt b/crates/zeta/src/license_detection/0bsd.txt new file mode 100644 index 0000000000000000000000000000000000000000..d3061a372fda562b5a1d0a85bc56c67fc0d7d3fb --- /dev/null +++ b/crates/zeta/src/license_detection/0bsd.txt @@ -0,0 +1,13 @@ +Zero-Clause BSD +============= + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE +FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY +DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN +AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/crates/zeta/src/license_detection/apache.regex b/crates/zeta/src/license_detection/apache-2.0.regex similarity index 98% rename from crates/zeta/src/license_detection/apache.regex rename to crates/zeta/src/license_detection/apache-2.0.regex index e200e063c9d35f6e56d6f808190fc4206e7ea02c..dcf12fe28915f94e1f5d8de81285ea49dcc10f8e 100644 --- a/crates/zeta/src/license_detection/apache.regex +++ b/crates/zeta/src/license_detection/apache-2.0.regex @@ -1,4 +1,4 @@ - ^Apache License + Apache License Version 2\.0, January 2004 http://www\.apache\.org/licenses/ @@ -171,9 +171,9 @@ of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability\.(:? + of your accepting any such warranty or additional liability\.(?: - END OF TERMS AND CONDITIONS)?(:? + END OF TERMS AND CONDITIONS)?(?: APPENDIX: How to apply the Apache License to your work\. @@ -184,9 +184,9 @@ comment syntax for the file format\. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier - identification within third\-party archives\.)?(:? + identification within third\-party archives\.)?(?: - Copyright .*)?(:? + Copyright .*)?(?: Licensed under the Apache License, Version 2\.0 \(the "License"\); you may not use this file except in compliance with the License\. @@ -198,4 +198,4 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied\. See the License for the specific language governing permissions and - limitations under the License\.)?$ + limitations under the License\.)? diff --git a/crates/zeta/src/license_detection/apache-text b/crates/zeta/src/license_detection/apache-2.0.txt similarity index 100% rename from crates/zeta/src/license_detection/apache-text rename to crates/zeta/src/license_detection/apache-2.0.txt diff --git a/crates/zeta/src/license_detection/bsd-1-clause.regex b/crates/zeta/src/license_detection/bsd-1-clause.regex new file mode 100644 index 0000000000000000000000000000000000000000..5e73e5c6d0e67cd9e4899e1a44bd064f11f3e3dc --- /dev/null +++ b/crates/zeta/src/license_detection/bsd-1-clause.regex @@ -0,0 +1,17 @@ +.*Copyright.* + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +(?:1\.|\*)? Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer\. + +THIS SOFTWARE IS PROVIDED BY .* “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, +INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL .* BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES \(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION\) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +TORT \(INCLUDING NEGLIGENCE OR OTHERWISE\) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\. diff --git a/crates/zeta/src/license_detection/bsd-1-clause.txt b/crates/zeta/src/license_detection/bsd-1-clause.txt new file mode 100644 index 0000000000000000000000000000000000000000..1ae6f9d5ff16f1783ac1d62f438dc8e566414cd3 --- /dev/null +++ b/crates/zeta/src/license_detection/bsd-1-clause.txt @@ -0,0 +1,20 @@ +Copyright (c) 2024 John Doe +Some Organization +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this list +of conditions and the following disclaimer. + +THIS SOFTWARE IS PROVIDED BY [Name of Organization] “AS IS” AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT +SHALL [Name of Organisation] BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY +OF SUCH DAMAGE. diff --git a/crates/zeta/src/license_detection/bsd-2-clause.regex b/crates/zeta/src/license_detection/bsd-2-clause.regex new file mode 100644 index 0000000000000000000000000000000000000000..93d22652fb11ba81d55e7d2d38e1b42bdce243b6 --- /dev/null +++ b/crates/zeta/src/license_detection/bsd-2-clause.regex @@ -0,0 +1,22 @@ +.*Copyright.* + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +(?:1\.|\*)? Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer\. + +(?:2\.|\*)? Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution\. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED\. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES \(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION\) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +TORT \(INCLUDING NEGLIGENCE OR OTHERWISE\) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\. diff --git a/crates/zeta/src/license_detection/bsd-2-clause.txt b/crates/zeta/src/license_detection/bsd-2-clause.txt new file mode 100644 index 0000000000000000000000000000000000000000..bbf946465e7f0f7f24b73bbf944bbd699f1b8e14 --- /dev/null +++ b/crates/zeta/src/license_detection/bsd-2-clause.txt @@ -0,0 +1,26 @@ +Copyright (c) 2024 + +John Doe (john.doe@gmail.com) + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/crates/zeta/src/license_detection/bsd-3-clause.regex b/crates/zeta/src/license_detection/bsd-3-clause.regex new file mode 100644 index 0000000000000000000000000000000000000000..b31443de64283d0d66135b73e57eaf9bd19b88a3 --- /dev/null +++ b/crates/zeta/src/license_detection/bsd-3-clause.regex @@ -0,0 +1,26 @@ +.*Copyright.* + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +(?:1\.|\*)? Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer\. + +(?:2\.|\*)? Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution\. + +(?:3\.|\*)? Neither the name of the copyright holder nor the names of its +contributors may be used to endorse or promote products derived from this +software without specific prior written permission\. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED\. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES \(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION\) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +TORT \(INCLUDING NEGLIGENCE OR OTHERWISE\) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\. diff --git a/crates/zeta/src/license_detection/bsd-3-clause.txt b/crates/zeta/src/license_detection/bsd-3-clause.txt new file mode 100644 index 0000000000000000000000000000000000000000..0edcde7462648aaee95c558e1eec94dba303de16 --- /dev/null +++ b/crates/zeta/src/license_detection/bsd-3-clause.txt @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2025, John Doe +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its contributors +may be used to endorse or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/crates/zeta/src/license_detection/isc.regex b/crates/zeta/src/license_detection/isc.regex index 63c6126bcea79e5103656788cb28a5b2b6faec22..ddaece5375fc17455e8640bb47a807d5cd347f5b 100644 --- a/crates/zeta/src/license_detection/isc.regex +++ b/crates/zeta/src/license_detection/isc.regex @@ -1,4 +1,4 @@ -^.*ISC License.* +.*ISC License.* Copyright.* @@ -12,4 +12,4 @@ MERCHANTABILITY AND FITNESS\. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE\.$ +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE\. diff --git a/crates/zeta/src/license_detection/isc.txt b/crates/zeta/src/license_detection/isc.txt new file mode 100644 index 0000000000000000000000000000000000000000..97fda7f97515bf3f2010eaf5f93f07cda371a14c --- /dev/null +++ b/crates/zeta/src/license_detection/isc.txt @@ -0,0 +1,15 @@ +ISC License + +Copyright (c) 2024, John Doe + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/crates/zeta/src/license_detection/mit.regex b/crates/zeta/src/license_detection/mit.regex index deda8f0352270bb09ab4ad3631fd35246c89aa9a..43130424c5fe5f73d11ddda5d5c821bc6cb86afe 100644 --- a/crates/zeta/src/license_detection/mit.regex +++ b/crates/zeta/src/license_detection/mit.regex @@ -1,4 +1,4 @@ -^.*MIT License.* +.*MIT License.* Copyright.* @@ -18,4 +18,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE\.$ +SOFTWARE\. diff --git a/crates/zeta/src/license_detection/mit-text b/crates/zeta/src/license_detection/mit.txt similarity index 100% rename from crates/zeta/src/license_detection/mit-text rename to crates/zeta/src/license_detection/mit.txt diff --git a/crates/zeta/src/license_detection/upl.regex b/crates/zeta/src/license_detection/upl-1.0.regex similarity index 96% rename from crates/zeta/src/license_detection/upl.regex rename to crates/zeta/src/license_detection/upl-1.0.regex index 34ba2a64c66abb553ca1721d52c3e1d2752f5076..0959f729716af4714ae9f41c92e1480d276cdeab 100644 --- a/crates/zeta/src/license_detection/upl.regex +++ b/crates/zeta/src/license_detection/upl-1.0.regex @@ -1,4 +1,4 @@ -^Copyright.* +Copyright.* The Universal Permissive License.* @@ -32,4 +32,4 @@ INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE -OR THE USE OR OTHER DEALINGS IN THE SOFTWARE\.$ +OR THE USE OR OTHER DEALINGS IN THE SOFTWARE\. diff --git a/crates/zeta/src/license_detection/upl-1.0.txt b/crates/zeta/src/license_detection/upl-1.0.txt new file mode 100644 index 0000000000000000000000000000000000000000..6193e80270967eee149b9ae7b3c392ed1d45cf15 --- /dev/null +++ b/crates/zeta/src/license_detection/upl-1.0.txt @@ -0,0 +1,35 @@ +Copyright (c) 2025, John Doe + +The Universal Permissive License (UPL), Version 1.0 + +Subject to the condition set forth below, permission is hereby granted to any person +obtaining a copy of this software, associated documentation and/or data (collectively +the "Software"), free of charge and under any and all copyright rights in the +Software, and any and all patent rights owned or freely licensable by each licensor +hereunder covering either (i) the unmodified Software as contributed to or provided +by such licensor, or (ii) the Larger Works (as defined below), to deal in both + +(a) the Software, and + +(b) any piece of software and/or hardware listed in the lrgrwrks.txt file if one is + included with the Software (each a "Larger Work" to which the Software is + contributed by such licensors), + +without restriction, including without limitation the rights to copy, create +derivative works of, display, perform, and distribute the Software and make, use, +sell, offer for sale, import, export, have made, and have sold the Software and the +Larger Work(s), and to sublicense the foregoing rights on either these or other +terms. + +This license is subject to the following condition: + +The above copyright notice and either this complete permission notice or at a minimum +a reference to the UPL must be included in all copies or substantial portions of the +Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE +OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. From db508bbbe2fe41507b2930d19effbecd25ea84c4 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Fri, 29 Aug 2025 17:29:58 -0400 Subject: [PATCH 21/54] docs: Remove MSYS2 instructions --- docs/src/development/windows.md | 15 +-------------- 1 file changed, 1 insertion(+), 14 deletions(-) diff --git a/docs/src/development/windows.md b/docs/src/development/windows.md index 551d5f9f21129d7fbe774c8265e385150ab0f0b0..45e8ea911bb2add362d2f53bc46e9674535663e2 100644 --- a/docs/src/development/windows.md +++ b/docs/src/development/windows.md @@ -114,20 +114,7 @@ cargo test --workspace ## Installing from msys2 -[MSYS2](https://msys2.org/) distribution provides Zed as a package [mingw-w64-zed](https://packages.msys2.org/base/mingw-w64-zed). The package is available for UCRT64, CLANG64 and CLANGARM64 repositories. To download it, run - -```sh -pacman -Syu -pacman -S $MINGW_PACKAGE_PREFIX-zed -``` - -You can see the [build script](https://github.com/msys2/MINGW-packages/blob/master/mingw-w64-zed/PKGBUILD) for more details on build process. - -> Please, report any issue in [msys2/MINGW-packages/issues](https://github.com/msys2/MINGW-packages/issues?q=is%3Aissue+is%3Aopen+zed) first. - -See also MSYS2 [documentation page](https://www.msys2.org/docs/ides-editors). - -Note that `collab` is not supported for MSYS2. +Zed does not support unofficial MSYS2 Zed packages built for Mingw-w64. Please report any issues you may have with [mingw-w64-zed](https://packages.msys2.org/base/mingw-w64-zed) to [msys2/MINGW-packages/issues](https://github.com/msys2/MINGW-packages/issues?q=is%3Aissue+is%3Aopen+zed). ## Troubleshooting From bdedb18c300e71086a63dae1cacf3fe87c885fcf Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Fri, 29 Aug 2025 17:36:22 -0400 Subject: [PATCH 22/54] docs: Fix msys2 (#37199) I accidentally pushed https://github.com/zed-industries/zed/commit/db508bbbe2fe41507b2930d19effbecd25ea84c4 to main instead of to a branch. That broke tests. Release Notes: - N/A --- docs/src/development/windows.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/development/windows.md b/docs/src/development/windows.md index 45e8ea911bb2add362d2f53bc46e9674535663e2..a4ad220bcc859d7d49edec7f967537ee4de2418a 100644 --- a/docs/src/development/windows.md +++ b/docs/src/development/windows.md @@ -114,7 +114,7 @@ cargo test --workspace ## Installing from msys2 -Zed does not support unofficial MSYS2 Zed packages built for Mingw-w64. Please report any issues you may have with [mingw-w64-zed](https://packages.msys2.org/base/mingw-w64-zed) to [msys2/MINGW-packages/issues](https://github.com/msys2/MINGW-packages/issues?q=is%3Aissue+is%3Aopen+zed). +Zed does not support unofficial MSYS2 Zed packages built for Mingw-w64. Please report any issues you may have with [mingw-w64-zed](https://packages.msys2.org/base/mingw-w64-zed) to [msys2/MINGW-packages/issues](https://github.com/msys2/MINGW-packages/issues?q=is%3Aissue+is%3Aopen+zed). ## Troubleshooting From a70cf3f1d432462f164fbc4b4de187bc7b52e31d Mon Sep 17 00:00:00 2001 From: Shardul Vaidya <31039336+5herlocked@users.noreply.github.com> Date: Fri, 29 Aug 2025 18:13:06 -0400 Subject: [PATCH 23/54] bedrock: Inference Config updates (#35808) Fixes #36866 - Updated internal naming for Claude 4 models to be consistent. - Corrected max output tokens for Anthropic Bedrock models to match docs Shoutout to @tlehn for noticing the bug, and finding the resolution. Release Notes: - bedrock: Fixed inference config errors causing Opus 4 Thinking and Opus 4.1 Thinking to fail (thanks [@tlehn](https://github.com/tlehn) and [@5herlocked](https://github.com/5herlocked]) - bedrock: Fixed an issue which prevented Rules / System prompts not functioning with Bedrock models (thanks [@tlehn](https://github.com/tlehn) and [@5herlocked](https://github.com/5herlocked]) --- crates/bedrock/src/bedrock.rs | 18 +++++++++++++++++- crates/bedrock/src/models.rs | 28 +++++++++++++--------------- 2 files changed, 30 insertions(+), 16 deletions(-) diff --git a/crates/bedrock/src/bedrock.rs b/crates/bedrock/src/bedrock.rs index c8315d4201a46d5ac47825ff40aed3829f191d87..ec0b4070906fdfd31195668312b3e7b425cd28ee 100644 --- a/crates/bedrock/src/bedrock.rs +++ b/crates/bedrock/src/bedrock.rs @@ -3,6 +3,7 @@ mod models; use anyhow::{Context, Error, Result, anyhow}; use aws_sdk_bedrockruntime as bedrock; pub use aws_sdk_bedrockruntime as bedrock_client; +use aws_sdk_bedrockruntime::types::InferenceConfiguration; pub use aws_sdk_bedrockruntime::types::{ AnyToolChoice as BedrockAnyToolChoice, AutoToolChoice as BedrockAutoToolChoice, ContentBlock as BedrockInnerContent, Tool as BedrockTool, ToolChoice as BedrockToolChoice, @@ -17,7 +18,8 @@ pub use bedrock::types::{ ConverseOutput as BedrockResponse, ConverseStreamOutput as BedrockStreamingResponse, ImageBlock as BedrockImageBlock, Message as BedrockMessage, ReasoningContentBlock as BedrockThinkingBlock, ReasoningTextBlock as BedrockThinkingTextBlock, - ResponseStream as BedrockResponseStream, ToolResultBlock as BedrockToolResultBlock, + ResponseStream as BedrockResponseStream, SystemContentBlock as BedrockSystemContentBlock, + ToolResultBlock as BedrockToolResultBlock, ToolResultContentBlock as BedrockToolResultContentBlock, ToolResultStatus as BedrockToolResultStatus, ToolUseBlock as BedrockToolUseBlock, }; @@ -58,6 +60,20 @@ pub async fn stream_completion( response = response.set_tool_config(request.tools); } + let inference_config = InferenceConfiguration::builder() + .max_tokens(request.max_tokens as i32) + .set_temperature(request.temperature) + .set_top_p(request.top_p) + .build(); + + response = response.inference_config(inference_config); + + if let Some(system) = request.system { + if !system.is_empty() { + response = response.system(BedrockSystemContentBlock::Text(system)); + } + } + let output = response .send() .await diff --git a/crates/bedrock/src/models.rs b/crates/bedrock/src/models.rs index 69d2ffb84569ef848f88de47f5394a6b25b18e02..c3a793d69d086a8a8c607d34debc5a7034f33f32 100644 --- a/crates/bedrock/src/models.rs +++ b/crates/bedrock/src/models.rs @@ -151,12 +151,12 @@ impl Model { pub fn id(&self) -> &str { match self { - Model::ClaudeSonnet4 => "claude-4-sonnet", - Model::ClaudeSonnet4Thinking => "claude-4-sonnet-thinking", - Model::ClaudeOpus4 => "claude-4-opus", - Model::ClaudeOpus4_1 => "claude-4-opus-1", - Model::ClaudeOpus4Thinking => "claude-4-opus-thinking", - Model::ClaudeOpus4_1Thinking => "claude-4-opus-1-thinking", + Model::ClaudeSonnet4 => "claude-sonnet-4", + Model::ClaudeSonnet4Thinking => "claude-sonnet-4-thinking", + Model::ClaudeOpus4 => "claude-opus-4", + Model::ClaudeOpus4_1 => "claude-opus-4-1", + Model::ClaudeOpus4Thinking => "claude-opus-4-thinking", + Model::ClaudeOpus4_1Thinking => "claude-opus-4-1-thinking", Model::Claude3_5SonnetV2 => "claude-3-5-sonnet-v2", Model::Claude3_5Sonnet => "claude-3-5-sonnet", Model::Claude3Opus => "claude-3-opus", @@ -359,14 +359,12 @@ impl Model { pub fn max_output_tokens(&self) -> u64 { match self { Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3_5Haiku => 4_096, - Self::Claude3_7Sonnet - | Self::Claude3_7SonnetThinking - | Self::ClaudeSonnet4 - | Self::ClaudeSonnet4Thinking - | Self::ClaudeOpus4 - | Model::ClaudeOpus4Thinking + Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => 128_000, + Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking => 64_000, + Self::ClaudeOpus4 + | Self::ClaudeOpus4Thinking | Self::ClaudeOpus4_1 - | Model::ClaudeOpus4_1Thinking => 128_000, + | Self::ClaudeOpus4_1Thinking => 32_000, Self::Claude3_5SonnetV2 | Self::PalmyraWriterX4 | Self::PalmyraWriterX5 => 8_192, Self::Custom { max_output_tokens, .. @@ -784,10 +782,10 @@ mod tests { ); // Test thinking models have different friendly IDs but same request IDs - assert_eq!(Model::ClaudeSonnet4.id(), "claude-4-sonnet"); + assert_eq!(Model::ClaudeSonnet4.id(), "claude-sonnet-4"); assert_eq!( Model::ClaudeSonnet4Thinking.id(), - "claude-4-sonnet-thinking" + "claude-sonnet-4-thinking" ); assert_eq!( Model::ClaudeSonnet4.request_id(), From 1c2e2a00fe87d8a9820d5d23f4828482f94c57f9 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Fri, 29 Aug 2025 18:26:11 -0400 Subject: [PATCH 24/54] agent: Re-add workaround for language model behavior with empty tool result (#37196) This is just copying over the same workaround here: https://github.com/zed-industries/zed/blob/a790e514af4d6957aa1a14cc8190b2ff24a0484c/crates/agent/src/thread.rs#L1455-L1459 Into the agent2 code. Release Notes: - agent: Fixed an issue where some tool calls in the Zed agent could return an error like "`tool_use` ids were found without `tool_result` blocks immediately after" --- crates/agent2/src/thread.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/crates/agent2/src/thread.rs b/crates/agent2/src/thread.rs index 97ea1caf1d766be0314a16cc0f518ad701564569..8ff5b845066c8af90eb713aef2a0c87e6d114a85 100644 --- a/crates/agent2/src/thread.rs +++ b/crates/agent2/src/thread.rs @@ -484,11 +484,15 @@ impl AgentMessage { }; for tool_result in self.tool_results.values() { + let mut tool_result = tool_result.clone(); + // Surprisingly, the API fails if we return an empty string here. + // It thinks we are sending a tool use without a tool result. + if tool_result.content.is_empty() { + tool_result.content = "".into(); + } user_message .content - .push(language_model::MessageContent::ToolResult( - tool_result.clone(), - )); + .push(language_model::MessageContent::ToolResult(tool_result)); } let mut messages = Vec::new(); From f78f3e7729b6e505685ba20ef207c709f0229149 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 29 Aug 2025 17:18:52 -0700 Subject: [PATCH 25/54] Add initial support for WSL (#37035) Closes #36188 ## Todo * [x] CLI * [x] terminals * [x] tasks ## For future PRs * debugging * UI for opening WSL projects * fixing workspace state restoration Release Notes: - Windows alpha: Zed now supports editing folders in WSL. --------- Co-authored-by: Junkui Zhang <364772080@qq.com> --- crates/auto_update_helper/src/updater.rs | 26 +- crates/cli/src/cli.rs | 1 + crates/cli/src/main.rs | 74 ++- crates/extension_host/src/extension_host.rs | 11 +- crates/paths/src/paths.rs | 5 + crates/project/src/debugger/dap_store.rs | 10 +- crates/project/src/project.rs | 4 +- crates/project/src/terminals.rs | 2 +- .../src/disconnected_overlay.rs | 31 +- crates/recent_projects/src/recent_projects.rs | 33 +- ...h_connections.rs => remote_connections.rs} | 138 ++--- crates/recent_projects/src/remote_servers.rs | 123 +++-- crates/remote/src/remote.rs | 3 +- crates/remote/src/remote_client.rs | 117 ++++- crates/remote/src/transport.rs | 335 ++++++++++++ crates/remote/src/transport/ssh.rs | 341 +----------- crates/remote/src/transport/wsl.rs | 494 ++++++++++++++++++ crates/title_bar/src/title_bar.rs | 13 +- crates/workspace/src/persistence.rs | 438 +++++++++++----- crates/workspace/src/persistence/model.rs | 32 +- crates/workspace/src/workspace.rs | 59 +-- crates/zed/resources/windows/zed-wsl | 25 + crates/zed/src/main.rs | 75 +-- crates/zed/src/zed.rs | 4 +- crates/zed/src/zed/open_listener.rs | 91 ++-- crates/zed/src/zed/windows_only_instance.rs | 1 + script/bundle-windows.ps1 | 1 + 27 files changed, 1701 insertions(+), 786 deletions(-) rename crates/recent_projects/src/{ssh_connections.rs => remote_connections.rs} (85%) create mode 100644 crates/remote/src/transport/wsl.rs create mode 100644 crates/zed/resources/windows/zed-wsl diff --git a/crates/auto_update_helper/src/updater.rs b/crates/auto_update_helper/src/updater.rs index 762771617609e63996685d3d96fae69135355249..a48bbccec304a1b49bb0496c21b299f5dd176076 100644 --- a/crates/auto_update_helper/src/updater.rs +++ b/crates/auto_update_helper/src/updater.rs @@ -16,7 +16,7 @@ use crate::windows_impl::WM_JOB_UPDATED; type Job = fn(&Path) -> Result<()>; #[cfg(not(test))] -pub(crate) const JOBS: [Job; 6] = [ +pub(crate) const JOBS: &[Job] = &[ // Delete old files |app_dir| { let zed_executable = app_dir.join("Zed.exe"); @@ -32,6 +32,12 @@ pub(crate) const JOBS: [Job; 6] = [ std::fs::remove_file(&zed_cli) .context(format!("Failed to remove old file {}", zed_cli.display())) }, + |app_dir| { + let zed_wsl = app_dir.join("bin\\zed"); + log::info!("Removing old file: {}", zed_wsl.display()); + std::fs::remove_file(&zed_wsl) + .context(format!("Failed to remove old file {}", zed_wsl.display())) + }, // Copy new files |app_dir| { let zed_executable_source = app_dir.join("install\\Zed.exe"); @@ -65,6 +71,22 @@ pub(crate) const JOBS: [Job; 6] = [ zed_cli_dest.display() )) }, + |app_dir| { + let zed_wsl_source = app_dir.join("install\\bin\\zed"); + let zed_wsl_dest = app_dir.join("bin\\zed"); + log::info!( + "Copying new file {} to {}", + zed_wsl_source.display(), + zed_wsl_dest.display() + ); + std::fs::copy(&zed_wsl_source, &zed_wsl_dest) + .map(|_| ()) + .context(format!( + "Failed to copy new file {} to {}", + zed_wsl_source.display(), + zed_wsl_dest.display() + )) + }, // Clean up installer folder and updates folder |app_dir| { let updates_folder = app_dir.join("updates"); @@ -85,7 +107,7 @@ pub(crate) const JOBS: [Job; 6] = [ ]; #[cfg(test)] -pub(crate) const JOBS: [Job; 2] = [ +pub(crate) const JOBS: &[Job] = &[ |_| { std::thread::sleep(Duration::from_millis(1000)); if let Ok(config) = std::env::var("ZED_AUTO_UPDATE") { diff --git a/crates/cli/src/cli.rs b/crates/cli/src/cli.rs index 6274f69035a02bed20d1a85608371744395c951a..79a10fa2b0936b44d9500fd9990ffa4c6ac62e85 100644 --- a/crates/cli/src/cli.rs +++ b/crates/cli/src/cli.rs @@ -14,6 +14,7 @@ pub enum CliRequest { paths: Vec, urls: Vec, diff_paths: Vec<[String; 2]>, + wsl: Option, wait: bool, open_new_workspace: Option, env: Option>, diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index b84e7a9f7a53a471bd854a15377c79f45003aaf4..151e96e3cf68ab94295a8386d2842539e6a986a2 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -6,7 +6,6 @@ use anyhow::{Context as _, Result}; use clap::Parser; use cli::{CliRequest, CliResponse, IpcHandshake, ipc::IpcOneShotServer}; -use collections::HashMap; use parking_lot::Mutex; use std::{ env, fs, io, @@ -85,6 +84,15 @@ struct Args { /// Run zed in dev-server mode #[arg(long)] dev_server_token: Option, + /// The username and WSL distribution to use when opening paths. ,If not specified, + /// Zed will attempt to open the paths directly. + /// + /// The username is optional, and if not specified, the default user for the distribution + /// will be used. + /// + /// Example: `me@Ubuntu` or `Ubuntu` for default distribution. + #[arg(long, value_name = "USER@DISTRO")] + wsl: Option, /// Not supported in Zed CLI, only supported on Zed binary /// Will attempt to give the correct command to run #[arg(long)] @@ -129,14 +137,41 @@ fn parse_path_with_position(argument_str: &str) -> anyhow::Result { Ok(canonicalized.to_string(|path| path.to_string_lossy().to_string())) } -fn main() -> Result<()> { - #[cfg(all(not(debug_assertions), target_os = "windows"))] - unsafe { - use ::windows::Win32::System::Console::{ATTACH_PARENT_PROCESS, AttachConsole}; +fn parse_path_in_wsl(source: &str, wsl: &str) -> Result { + let mut command = util::command::new_std_command("wsl.exe"); - let _ = AttachConsole(ATTACH_PARENT_PROCESS); + let (user, distro_name) = if let Some((user, distro)) = wsl.split_once('@') { + if user.is_empty() { + anyhow::bail!("user is empty in wsl argument"); + } + (Some(user), distro) + } else { + (None, wsl) + }; + + if let Some(user) = user { + command.arg("--user").arg(user); } + let output = command + .arg("--distribution") + .arg(distro_name) + .arg("wslpath") + .arg("-m") + .arg(source) + .output()?; + + let result = String::from_utf8_lossy(&output.stdout); + let prefix = format!("//wsl.localhost/{}", distro_name); + + Ok(result + .trim() + .strip_prefix(&prefix) + .unwrap_or(&result) + .to_string()) +} + +fn main() -> Result<()> { #[cfg(unix)] util::prevent_root_execution(); @@ -223,6 +258,8 @@ fn main() -> Result<()> { let env = { #[cfg(any(target_os = "linux", target_os = "freebsd"))] { + use collections::HashMap; + // On Linux, the desktop entry uses `cli` to spawn `zed`. // We need to handle env vars correctly since std::env::vars() may not contain // project-specific vars (e.g. those set by direnv). @@ -235,8 +272,19 @@ fn main() -> Result<()> { } } - #[cfg(not(any(target_os = "linux", target_os = "freebsd")))] - Some(std::env::vars().collect::>()) + #[cfg(target_os = "windows")] + { + // On Windows, by default, a child process inherits a copy of the environment block of the parent process. + // So we don't need to pass env vars explicitly. + None + } + + #[cfg(not(any(target_os = "linux", target_os = "freebsd", target_os = "windows")))] + { + use collections::HashMap; + + Some(std::env::vars().collect::>()) + } }; let exit_status = Arc::new(Mutex::new(None)); @@ -271,8 +319,10 @@ fn main() -> Result<()> { paths.push(tmp_file.path().to_string_lossy().to_string()); let (tmp_file, _) = tmp_file.keep()?; anonymous_fd_tmp_files.push((file, tmp_file)); + } else if let Some(wsl) = &args.wsl { + urls.push(format!("file://{}", parse_path_in_wsl(path, wsl)?)); } else { - paths.push(parse_path_with_position(path)?) + paths.push(parse_path_with_position(path)?); } } @@ -292,6 +342,7 @@ fn main() -> Result<()> { paths, urls, diff_paths, + wsl: args.wsl, wait: args.wait, open_new_workspace, env, @@ -644,15 +695,15 @@ mod windows { Storage::FileSystem::{ CreateFileW, FILE_FLAGS_AND_ATTRIBUTES, FILE_SHARE_MODE, OPEN_EXISTING, WriteFile, }, - System::Threading::CreateMutexW, + System::Threading::{CREATE_NEW_PROCESS_GROUP, CreateMutexW}, }, core::HSTRING, }; use crate::{Detect, InstalledApp}; - use std::io; use std::path::{Path, PathBuf}; use std::process::ExitStatus; + use std::{io, os::windows::process::CommandExt}; fn check_single_instance() -> bool { let mutex = unsafe { @@ -691,6 +742,7 @@ mod windows { fn launch(&self, ipc_url: String) -> anyhow::Result<()> { if check_single_instance() { std::process::Command::new(self.0.clone()) + .creation_flags(CREATE_NEW_PROCESS_GROUP.0) .arg(ipc_url) .spawn()?; } else { diff --git a/crates/extension_host/src/extension_host.rs b/crates/extension_host/src/extension_host.rs index b8189c36511a03f136e5e215549453947e888bb1..b114ad9f4c526f9c270681c55626455531becc2f 100644 --- a/crates/extension_host/src/extension_host.rs +++ b/crates/extension_host/src/extension_host.rs @@ -43,7 +43,7 @@ use language::{ use node_runtime::NodeRuntime; use project::ContextProviderWithTasks; use release_channel::ReleaseChannel; -use remote::RemoteClient; +use remote::{RemoteClient, RemoteConnectionOptions}; use semantic_version::SemanticVersion; use serde::{Deserialize, Serialize}; use settings::Settings; @@ -117,7 +117,7 @@ pub struct ExtensionStore { pub wasm_host: Arc, pub wasm_extensions: Vec<(Arc, WasmExtension)>, pub tasks: Vec>, - pub remote_clients: HashMap>, + pub remote_clients: HashMap>, pub ssh_registered_tx: UnboundedSender<()>, } @@ -1779,16 +1779,15 @@ impl ExtensionStore { } pub fn register_remote_client(&mut self, client: Entity, cx: &mut Context) { - let connection_options = client.read(cx).connection_options(); - let ssh_url = connection_options.ssh_url(); + let options = client.read(cx).connection_options(); - if let Some(existing_client) = self.remote_clients.get(&ssh_url) + if let Some(existing_client) = self.remote_clients.get(&options) && existing_client.upgrade().is_some() { return; } - self.remote_clients.insert(ssh_url, client.downgrade()); + self.remote_clients.insert(options, client.downgrade()); self.ssh_registered_tx.unbounded_send(()).ok(); } } diff --git a/crates/paths/src/paths.rs b/crates/paths/src/paths.rs index c2c3c89305939bc32c635549c23d64d565f8fbb0..ede42af0272902892afd2e9dfdafb5c5eae2f8f5 100644 --- a/crates/paths/src/paths.rs +++ b/crates/paths/src/paths.rs @@ -33,6 +33,11 @@ pub fn remote_server_dir_relative() -> &'static Path { Path::new(".zed_server") } +/// Returns the relative path to the zed_wsl_server directory on the wsl host. +pub fn remote_wsl_server_dir_relative() -> &'static Path { + Path::new(".zed_wsl_server") +} + /// Sets a custom directory for all user data, overriding the default data directory. /// This function must be called before any other path operations that depend on the data directory. /// The directory's path will be canonicalized to an absolute path by a blocking FS operation. diff --git a/crates/project/src/debugger/dap_store.rs b/crates/project/src/debugger/dap_store.rs index d8c6d3acc1116e9a97b2f6ca3fc54ec098029cbe..6c1449b728d3ee5b8c8b019d5e527e9adfb3bf25 100644 --- a/crates/project/src/debugger/dap_store.rs +++ b/crates/project/src/debugger/dap_store.rs @@ -258,8 +258,14 @@ impl DapStore { let connection; if let Some(c) = binary.connection { let host = Ipv4Addr::LOCALHOST; - let port = dap::transport::TcpTransport::unused_port(host).await?; - port_forwarding = Some((port, c.host.to_string(), c.port)); + let port; + if remote.read_with(cx, |remote, _cx| remote.shares_network_interface())? { + port = c.port; + port_forwarding = None; + } else { + port = dap::transport::TcpTransport::unused_port(host).await?; + port_forwarding = Some((port, c.host.to_string(), c.port)); + } connection = Some(TcpArguments { port, host, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index b32e95741f522650e5d20f80a6ba18c423805234..557367edf522a103ee1a8b55f5264be561d1698e 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -87,7 +87,7 @@ use node_runtime::NodeRuntime; use parking_lot::Mutex; pub use prettier_store::PrettierStore; use project_settings::{ProjectSettings, SettingsObserver, SettingsObserverEvent}; -use remote::{RemoteClient, SshConnectionOptions}; +use remote::{RemoteClient, RemoteConnectionOptions}; use rpc::{ AnyProtoClient, ErrorCode, proto::{FromProto, LanguageServerPromptResponse, REMOTE_SERVER_PROJECT_ID, ToProto}, @@ -1916,7 +1916,7 @@ impl Project { .map(|remote| remote.read(cx).connection_state()) } - pub fn remote_connection_options(&self, cx: &App) -> Option { + pub fn remote_connection_options(&self, cx: &App) -> Option { self.remote_client .as_ref() .map(|remote| remote.read(cx).connection_options()) diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index c189242fadc2948593186edb5dcd2c56879f07af..597da04617e9670e623196ef21f02c366e49d392 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -512,7 +512,7 @@ fn create_remote_shell( *env = command.env; log::debug!("Connecting to a remote server: {:?}", command.program); - let host = remote_client.read(cx).connection_options().host; + let host = remote_client.read(cx).connection_options().display_name(); Ok(Shell::WithArguments { program: command.program, diff --git a/crates/recent_projects/src/disconnected_overlay.rs b/crates/recent_projects/src/disconnected_overlay.rs index 36da6897b92e4bc183aa7c0f51d5100e8836931e..c97f7062a8206052e7c63f6bec909dd5823dbedf 100644 --- a/crates/recent_projects/src/disconnected_overlay.rs +++ b/crates/recent_projects/src/disconnected_overlay.rs @@ -1,6 +1,6 @@ use gpui::{ClickEvent, DismissEvent, EventEmitter, FocusHandle, Focusable, Render, WeakEntity}; use project::project_settings::ProjectSettings; -use remote::SshConnectionOptions; +use remote::RemoteConnectionOptions; use settings::Settings; use ui::{ Button, ButtonCommon, ButtonStyle, Clickable, Context, ElevationIndex, FluentBuilder, Headline, @@ -9,11 +9,11 @@ use ui::{ }; use workspace::{ModalView, OpenOptions, Workspace, notifications::DetachAndPromptErr}; -use crate::open_ssh_project; +use crate::open_remote_project; enum Host { - RemoteProject, - SshRemoteProject(SshConnectionOptions), + CollabGuestProject, + RemoteServerProject(RemoteConnectionOptions), } pub struct DisconnectedOverlay { @@ -66,9 +66,9 @@ impl DisconnectedOverlay { let remote_connection_options = project.read(cx).remote_connection_options(cx); let host = if let Some(ssh_connection_options) = remote_connection_options { - Host::SshRemoteProject(ssh_connection_options) + Host::RemoteServerProject(ssh_connection_options) } else { - Host::RemoteProject + Host::CollabGuestProject }; workspace.toggle_modal(window, cx, |_, cx| DisconnectedOverlay { @@ -86,14 +86,14 @@ impl DisconnectedOverlay { self.finished = true; cx.emit(DismissEvent); - if let Host::SshRemoteProject(ssh_connection_options) = &self.host { - self.reconnect_to_ssh_remote(ssh_connection_options.clone(), window, cx); + if let Host::RemoteServerProject(ssh_connection_options) = &self.host { + self.reconnect_to_remote_project(ssh_connection_options.clone(), window, cx); } } - fn reconnect_to_ssh_remote( + fn reconnect_to_remote_project( &self, - connection_options: SshConnectionOptions, + connection_options: RemoteConnectionOptions, window: &mut Window, cx: &mut Context, ) { @@ -114,7 +114,7 @@ impl DisconnectedOverlay { .collect(); cx.spawn_in(window, async move |_, cx| { - open_ssh_project( + open_remote_project( connection_options, paths, app_state, @@ -138,13 +138,13 @@ impl DisconnectedOverlay { impl Render for DisconnectedOverlay { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - let can_reconnect = matches!(self.host, Host::SshRemoteProject(_)); + let can_reconnect = matches!(self.host, Host::RemoteServerProject(_)); let message = match &self.host { - Host::RemoteProject => { + Host::CollabGuestProject => { "Your connection to the remote project has been lost.".to_string() } - Host::SshRemoteProject(options) => { + Host::RemoteServerProject(options) => { let autosave = if ProjectSettings::get_global(cx) .session .restore_unsaved_buffers @@ -155,7 +155,8 @@ impl Render for DisconnectedOverlay { }; format!( "Your connection to {} has been lost.{}", - options.host, autosave + options.display_name(), + autosave ) } }; diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index fa57b588cd8457788adc0226264a4871c3305b85..aa0ce7661b29123c25fdf20cbde5f53e6525d2d6 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -1,9 +1,10 @@ pub mod disconnected_overlay; +mod remote_connections; mod remote_servers; mod ssh_config; -mod ssh_connections; -pub use ssh_connections::{is_connecting_over_ssh, open_ssh_project}; +use remote::RemoteConnectionOptions; +pub use remote_connections::open_remote_project; use disconnected_overlay::DisconnectedOverlay; use fuzzy::{StringMatch, StringMatchCandidate}; @@ -16,9 +17,9 @@ use picker::{ Picker, PickerDelegate, highlighted_match_with_paths::{HighlightedMatch, HighlightedMatchWithPaths}, }; +pub use remote_connections::SshSettings; pub use remote_servers::RemoteServerProjects; use settings::Settings; -pub use ssh_connections::SshSettings; use std::{path::Path, sync::Arc}; use ui::{KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*, tooltip_container}; use util::{ResultExt, paths::PathExt}; @@ -290,7 +291,7 @@ impl PickerDelegate for RecentProjectsDelegate { if workspace.database_id() == Some(*candidate_workspace_id) { Task::ready(Ok(())) } else { - match candidate_workspace_location { + match candidate_workspace_location.clone() { SerializedWorkspaceLocation::Local => { let paths = candidate_workspace_paths.paths().to_vec(); if replace_current_window { @@ -320,7 +321,7 @@ impl PickerDelegate for RecentProjectsDelegate { workspace.open_workspace_for_paths(false, paths, window, cx) } } - SerializedWorkspaceLocation::Ssh(connection) => { + SerializedWorkspaceLocation::Remote(mut connection) => { let app_state = workspace.app_state().clone(); let replace_window = if replace_current_window { @@ -334,18 +335,16 @@ impl PickerDelegate for RecentProjectsDelegate { ..Default::default() }; - let connection_options = SshSettings::get_global(cx) - .connection_options_for( - connection.host.clone(), - connection.port, - connection.user.clone(), - ); + if let RemoteConnectionOptions::Ssh(connection) = &mut connection { + SshSettings::get_global(cx) + .fill_connection_options_from_settings(connection); + }; let paths = candidate_workspace_paths.paths().to_vec(); cx.spawn_in(window, async move |_, cx| { - open_ssh_project( - connection_options, + open_remote_project( + connection.clone(), paths, app_state, open_options, @@ -418,9 +417,11 @@ impl PickerDelegate for RecentProjectsDelegate { SerializedWorkspaceLocation::Local => Icon::new(IconName::Screen) .color(Color::Muted) .into_any_element(), - SerializedWorkspaceLocation::Ssh(_) => Icon::new(IconName::Server) - .color(Color::Muted) - .into_any_element(), + SerializedWorkspaceLocation::Remote(_) => { + Icon::new(IconName::Server) + .color(Color::Muted) + .into_any_element() + } }) }) .child({ diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/remote_connections.rs similarity index 85% rename from crates/recent_projects/src/ssh_connections.rs rename to crates/recent_projects/src/remote_connections.rs index 29f6e75bbdebf72b36295b20295f0705b636214e..47607813b547e28b9b4a37449f8daaa6ec022764 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/remote_connections.rs @@ -16,7 +16,8 @@ use language::CursorShape; use markdown::{Markdown, MarkdownElement, MarkdownStyle}; use release_channel::ReleaseChannel; use remote::{ - ConnectionIdentifier, RemoteClient, RemotePlatform, SshConnectionOptions, SshPortForwardOption, + ConnectionIdentifier, RemoteClient, RemoteConnectionOptions, RemotePlatform, + SshConnectionOptions, SshPortForwardOption, }; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -42,32 +43,35 @@ impl SshSettings { self.ssh_connections.clone().into_iter().flatten() } + pub fn fill_connection_options_from_settings(&self, options: &mut SshConnectionOptions) { + for conn in self.ssh_connections() { + if conn.host == options.host + && conn.username == options.username + && conn.port == options.port + { + options.nickname = conn.nickname; + options.upload_binary_over_ssh = conn.upload_binary_over_ssh.unwrap_or_default(); + options.args = Some(conn.args); + options.port_forwards = conn.port_forwards; + break; + } + } + } + pub fn connection_options_for( &self, host: String, port: Option, username: Option, ) -> SshConnectionOptions { - for conn in self.ssh_connections() { - if conn.host == host && conn.username == username && conn.port == port { - return SshConnectionOptions { - nickname: conn.nickname, - upload_binary_over_ssh: conn.upload_binary_over_ssh.unwrap_or_default(), - args: Some(conn.args), - host, - port, - username, - port_forwards: conn.port_forwards, - password: None, - }; - } - } - SshConnectionOptions { + let mut options = SshConnectionOptions { host, port, username, ..Default::default() - } + }; + self.fill_connection_options_from_settings(&mut options); + options } } @@ -135,7 +139,7 @@ impl Settings for SshSettings { fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {} } -pub struct SshPrompt { +pub struct RemoteConnectionPrompt { connection_string: SharedString, nickname: Option, status_message: Option, @@ -144,7 +148,7 @@ pub struct SshPrompt { editor: Entity, } -impl Drop for SshPrompt { +impl Drop for RemoteConnectionPrompt { fn drop(&mut self) { if let Some(cancel) = self.cancellation.take() { cancel.send(()).ok(); @@ -152,24 +156,22 @@ impl Drop for SshPrompt { } } -pub struct SshConnectionModal { - pub(crate) prompt: Entity, +pub struct RemoteConnectionModal { + pub(crate) prompt: Entity, paths: Vec, finished: bool, } -impl SshPrompt { +impl RemoteConnectionPrompt { pub(crate) fn new( - connection_options: &SshConnectionOptions, + connection_string: String, + nickname: Option, window: &mut Window, cx: &mut Context, ) -> Self { - let connection_string = connection_options.connection_string().into(); - let nickname = connection_options.nickname.clone().map(|s| s.into()); - Self { - connection_string, - nickname, + connection_string: connection_string.into(), + nickname: nickname.map(|nickname| nickname.into()), editor: cx.new(|cx| Editor::single_line(window, cx)), status_message: None, cancellation: None, @@ -232,7 +234,7 @@ impl SshPrompt { } } -impl Render for SshPrompt { +impl Render for RemoteConnectionPrompt { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let theme = ThemeSettings::get_global(cx); @@ -297,15 +299,22 @@ impl Render for SshPrompt { } } -impl SshConnectionModal { +impl RemoteConnectionModal { pub(crate) fn new( - connection_options: &SshConnectionOptions, + connection_options: &RemoteConnectionOptions, paths: Vec, window: &mut Window, cx: &mut Context, ) -> Self { + let (connection_string, nickname) = match connection_options { + RemoteConnectionOptions::Ssh(options) => { + (options.connection_string(), options.nickname.clone()) + } + RemoteConnectionOptions::Wsl(options) => (options.distro_name.clone(), None), + }; Self { - prompt: cx.new(|cx| SshPrompt::new(connection_options, window, cx)), + prompt: cx + .new(|cx| RemoteConnectionPrompt::new(connection_string, nickname, window, cx)), finished: false, paths, } @@ -386,7 +395,7 @@ impl RenderOnce for SshConnectionHeader { } } -impl Render for SshConnectionModal { +impl Render for RemoteConnectionModal { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl ui::IntoElement { let nickname = self.prompt.read(cx).nickname.clone(); let connection_string = self.prompt.read(cx).connection_string.clone(); @@ -423,15 +432,15 @@ impl Render for SshConnectionModal { } } -impl Focusable for SshConnectionModal { +impl Focusable for RemoteConnectionModal { fn focus_handle(&self, cx: &gpui::App) -> gpui::FocusHandle { self.prompt.read(cx).editor.focus_handle(cx) } } -impl EventEmitter for SshConnectionModal {} +impl EventEmitter for RemoteConnectionModal {} -impl ModalView for SshConnectionModal { +impl ModalView for RemoteConnectionModal { fn on_before_dismiss( &mut self, _window: &mut Window, @@ -446,13 +455,13 @@ impl ModalView for SshConnectionModal { } #[derive(Clone)] -pub struct SshClientDelegate { +pub struct RemoteClientDelegate { window: AnyWindowHandle, - ui: WeakEntity, + ui: WeakEntity, known_password: Option, } -impl remote::RemoteClientDelegate for SshClientDelegate { +impl remote::RemoteClientDelegate for RemoteClientDelegate { fn ask_password(&self, prompt: String, tx: oneshot::Sender, cx: &mut AsyncApp) { let mut known_password = self.known_password.clone(); if let Some(password) = known_password.take() { @@ -522,7 +531,7 @@ impl remote::RemoteClientDelegate for SshClientDelegate { } } -impl SshClientDelegate { +impl RemoteClientDelegate { fn update_status(&self, status: Option<&str>, cx: &mut AsyncApp) { self.window .update(cx, |_, _, cx| { @@ -534,14 +543,10 @@ impl SshClientDelegate { } } -pub fn is_connecting_over_ssh(workspace: &Workspace, cx: &App) -> bool { - workspace.active_modal::(cx).is_some() -} - pub fn connect_over_ssh( unique_identifier: ConnectionIdentifier, connection_options: SshConnectionOptions, - ui: Entity, + ui: Entity, window: &mut Window, cx: &mut App, ) -> Task>>> { @@ -554,7 +559,7 @@ pub fn connect_over_ssh( unique_identifier, connection_options, rx, - Arc::new(SshClientDelegate { + Arc::new(RemoteClientDelegate { window, ui: ui.downgrade(), known_password, @@ -563,8 +568,8 @@ pub fn connect_over_ssh( ) } -pub async fn open_ssh_project( - connection_options: SshConnectionOptions, +pub async fn open_remote_project( + connection_options: RemoteConnectionOptions, paths: Vec, app_state: Arc, open_options: workspace::OpenOptions, @@ -575,13 +580,7 @@ pub async fn open_ssh_project( } else { let workspace_position = cx .update(|cx| { - workspace::ssh_workspace_position_from_db( - connection_options.host.clone(), - connection_options.port, - connection_options.username.clone(), - &paths, - cx, - ) + workspace::remote_workspace_position_from_db(connection_options.clone(), &paths, cx) })? .await .context("fetching ssh workspace position from db")?; @@ -611,16 +610,16 @@ pub async fn open_ssh_project( loop { let (cancel_tx, cancel_rx) = oneshot::channel(); let delegate = window.update(cx, { - let connection_options = connection_options.clone(); let paths = paths.clone(); + let connection_options = connection_options.clone(); move |workspace, window, cx| { window.activate_window(); workspace.toggle_modal(window, cx, |window, cx| { - SshConnectionModal::new(&connection_options, paths, window, cx) + RemoteConnectionModal::new(&connection_options, paths, window, cx) }); let ui = workspace - .active_modal::(cx)? + .active_modal::(cx)? .read(cx) .prompt .clone(); @@ -629,19 +628,25 @@ pub async fn open_ssh_project( ui.set_cancellation_tx(cancel_tx); }); - Some(Arc::new(SshClientDelegate { + Some(Arc::new(RemoteClientDelegate { window: window.window_handle(), ui: ui.downgrade(), - known_password: connection_options.password.clone(), + known_password: if let RemoteConnectionOptions::Ssh(options) = + &connection_options + { + options.password.clone() + } else { + None + }, })) } })?; let Some(delegate) = delegate else { break }; - let did_open_ssh_project = cx + let did_open_project = cx .update(|cx| { - workspace::open_ssh_project_with_new_connection( + workspace::open_remote_project_with_new_connection( window, connection_options.clone(), cancel_rx, @@ -655,19 +660,22 @@ pub async fn open_ssh_project( window .update(cx, |workspace, _, cx| { - if let Some(ui) = workspace.active_modal::(cx) { + if let Some(ui) = workspace.active_modal::(cx) { ui.update(cx, |modal, cx| modal.finished(cx)) } }) .ok(); - if let Err(e) = did_open_ssh_project { + if let Err(e) = did_open_project { log::error!("Failed to open project: {e:?}"); let response = window .update(cx, |_, window, cx| { window.prompt( PromptLevel::Critical, - "Failed to connect over SSH", + match connection_options { + RemoteConnectionOptions::Ssh(_) => "Failed to connect over SSH", + RemoteConnectionOptions::Wsl(_) => "Failed to connect to WSL", + }, Some(&e.to_string()), &["Retry", "Ok"], cx, diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index f4fd1f1c1bbb12e2fbf11088baf859b08bfbf310..3cf084bef76a56cf85973f67bb5713aee59fb1bc 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -1,70 +1,52 @@ -use std::any::Any; -use std::borrow::Cow; -use std::collections::BTreeSet; -use std::path::PathBuf; -use std::rc::Rc; -use std::sync::Arc; -use std::sync::atomic; -use std::sync::atomic::AtomicUsize; - +use crate::{ + remote_connections::{ + RemoteConnectionModal, RemoteConnectionPrompt, RemoteSettingsContent, SshConnection, + SshConnectionHeader, SshProject, SshSettings, connect_over_ssh, open_remote_project, + }, + ssh_config::parse_ssh_config_hosts, +}; use editor::Editor; use file_finder::OpenPathDelegate; -use futures::FutureExt; -use futures::channel::oneshot; -use futures::future::Shared; -use futures::select; -use gpui::ClickEvent; -use gpui::ClipboardItem; -use gpui::Subscription; -use gpui::Task; -use gpui::WeakEntity; -use gpui::canvas; +use futures::{FutureExt, channel::oneshot, future::Shared, select}; use gpui::{ - AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, - PromptLevel, ScrollHandle, Window, + AnyElement, App, ClickEvent, ClipboardItem, Context, DismissEvent, Entity, EventEmitter, + FocusHandle, Focusable, PromptLevel, ScrollHandle, Subscription, Task, WeakEntity, Window, + canvas, }; -use paths::global_ssh_config_file; -use paths::user_ssh_config_file; +use paths::{global_ssh_config_file, user_ssh_config_file}; use picker::Picker; -use project::Fs; -use project::Project; -use remote::remote_client::ConnectionIdentifier; -use remote::{RemoteClient, SshConnectionOptions}; -use settings::Settings; -use settings::SettingsStore; -use settings::update_settings_file; -use settings::watch_config_file; +use project::{Fs, Project}; +use remote::{ + RemoteClient, RemoteConnectionOptions, SshConnectionOptions, + remote_client::ConnectionIdentifier, +}; +use settings::{Settings, SettingsStore, update_settings_file, watch_config_file}; use smol::stream::StreamExt as _; -use ui::Navigable; -use ui::NavigableEntry; +use std::{ + any::Any, + borrow::Cow, + collections::BTreeSet, + path::PathBuf, + rc::Rc, + sync::{ + Arc, + atomic::{self, AtomicUsize}, + }, +}; use ui::{ - IconButtonShape, List, ListItem, ListSeparator, Modal, ModalHeader, Scrollbar, ScrollbarState, - Section, Tooltip, prelude::*, + IconButtonShape, List, ListItem, ListSeparator, Modal, ModalHeader, Navigable, NavigableEntry, + Scrollbar, ScrollbarState, Section, Tooltip, prelude::*, }; use util::{ ResultExt, paths::{PathStyle, RemotePathBuf}, }; -use workspace::OpenOptions; -use workspace::Toast; -use workspace::notifications::NotificationId; use workspace::{ - ModalView, Workspace, notifications::DetachAndPromptErr, - open_ssh_project_with_existing_connection, + ModalView, OpenOptions, Toast, Workspace, + notifications::{DetachAndPromptErr, NotificationId}, + open_remote_project_with_existing_connection, }; -use crate::ssh_config::parse_ssh_config_hosts; -use crate::ssh_connections::RemoteSettingsContent; -use crate::ssh_connections::SshConnection; -use crate::ssh_connections::SshConnectionHeader; -use crate::ssh_connections::SshConnectionModal; -use crate::ssh_connections::SshProject; -use crate::ssh_connections::SshPrompt; -use crate::ssh_connections::SshSettings; -use crate::ssh_connections::connect_over_ssh; -use crate::ssh_connections::open_ssh_project; - -mod navigation_base {} pub struct RemoteServerProjects { mode: Mode, focus_handle: FocusHandle, @@ -79,7 +61,7 @@ pub struct RemoteServerProjects { struct CreateRemoteServer { address_editor: Entity, address_error: Option, - ssh_prompt: Option>, + ssh_prompt: Option>, _creating: Option>>, } @@ -222,8 +204,13 @@ impl ProjectPicker { }) .log_err()?; - open_ssh_project_with_existing_connection( - connection, project, paths, app_state, window, cx, + open_remote_project_with_existing_connection( + RemoteConnectionOptions::Ssh(connection), + project, + paths, + app_state, + window, + cx, ) .await .log_err(); @@ -472,7 +459,14 @@ impl RemoteServerProjects { return; } }; - let ssh_prompt = cx.new(|cx| SshPrompt::new(&connection_options, window, cx)); + let ssh_prompt = cx.new(|cx| { + RemoteConnectionPrompt::new( + connection_options.connection_string(), + connection_options.nickname.clone(), + window, + cx, + ) + }); let connection = connect_over_ssh( ConnectionIdentifier::setup(), @@ -552,15 +546,20 @@ impl RemoteServerProjects { }; let create_new_window = self.create_new_window; - let connection_options = ssh_connection.into(); + let connection_options: SshConnectionOptions = ssh_connection.into(); workspace.update(cx, |_, cx| { cx.defer_in(window, move |workspace, window, cx| { let app_state = workspace.app_state().clone(); workspace.toggle_modal(window, cx, |window, cx| { - SshConnectionModal::new(&connection_options, Vec::new(), window, cx) + RemoteConnectionModal::new( + &RemoteConnectionOptions::Ssh(connection_options.clone()), + Vec::new(), + window, + cx, + ) }); let prompt = workspace - .active_modal::(cx) + .active_modal::(cx) .unwrap() .read(cx) .prompt @@ -579,7 +578,7 @@ impl RemoteServerProjects { let session = connect.await; workspace.update(cx, |workspace, cx| { - if let Some(prompt) = workspace.active_modal::(cx) { + if let Some(prompt) = workspace.active_modal::(cx) { prompt.update(cx, |prompt, cx| prompt.finished(cx)) } })?; @@ -898,8 +897,8 @@ impl RemoteServerProjects { }; cx.spawn_in(window, async move |_, cx| { - let result = open_ssh_project( - server.into(), + let result = open_remote_project( + RemoteConnectionOptions::Ssh(server.into()), project.paths.into_iter().map(PathBuf::from).collect(), app_state, OpenOptions { diff --git a/crates/remote/src/remote.rs b/crates/remote/src/remote.rs index c698353d9edfc0d48c7039f321a2c88890e8c098..74d45b1a696ff1a02a9f2b4d9afc3844f82196cd 100644 --- a/crates/remote/src/remote.rs +++ b/crates/remote/src/remote.rs @@ -6,6 +6,7 @@ mod transport; pub use remote_client::{ ConnectionIdentifier, ConnectionState, RemoteClient, RemoteClientDelegate, RemoteClientEvent, - RemotePlatform, + RemoteConnectionOptions, RemotePlatform, }; pub use transport::ssh::{SshConnectionOptions, SshPortForwardOption}; +pub use transport::wsl::WslConnectionOptions; diff --git a/crates/remote/src/remote_client.rs b/crates/remote/src/remote_client.rs index 7e231e622cb2336a113799f7087fc0e30a5f79ff..501c6a8dd639630b1930cb32e804f8cca658a9ca 100644 --- a/crates/remote/src/remote_client.rs +++ b/crates/remote/src/remote_client.rs @@ -1,6 +1,11 @@ use crate::{ - SshConnectionOptions, protocol::MessageId, proxy::ProxyLaunchError, - transport::ssh::SshRemoteConnection, + SshConnectionOptions, + protocol::MessageId, + proxy::ProxyLaunchError, + transport::{ + ssh::SshRemoteConnection, + wsl::{WslConnectionOptions, WslRemoteConnection}, + }, }; use anyhow::{Context as _, Result, anyhow}; use async_trait::async_trait; @@ -237,7 +242,7 @@ impl From<&State> for ConnectionState { pub struct RemoteClient { client: Arc, unique_identifier: String, - connection_options: SshConnectionOptions, + connection_options: RemoteConnectionOptions, path_style: PathStyle, state: Option, } @@ -290,6 +295,22 @@ impl RemoteClient { cancellation: oneshot::Receiver<()>, delegate: Arc, cx: &mut App, + ) -> Task>>> { + Self::new( + unique_identifier, + RemoteConnectionOptions::Ssh(connection_options), + cancellation, + delegate, + cx, + ) + } + + pub fn new( + unique_identifier: ConnectionIdentifier, + connection_options: RemoteConnectionOptions, + cancellation: oneshot::Receiver<()>, + delegate: Arc, + cx: &mut App, ) -> Task>>> { let unique_identifier = unique_identifier.to_string(cx); cx.spawn(async move |cx| { @@ -424,7 +445,7 @@ impl RemoteClient { } let state = self.state.take().unwrap(); - let (attempts, ssh_connection, delegate) = match state { + let (attempts, remote_connection, delegate) = match state { State::Connected { ssh_connection, delegate, @@ -482,15 +503,15 @@ impl RemoteClient { }; } - if let Err(error) = ssh_connection + if let Err(error) = remote_connection .kill() .await .context("Failed to kill ssh process") { - failed!(error, attempts, ssh_connection, delegate); + failed!(error, attempts, remote_connection, delegate); }; - let connection_options = ssh_connection.connection_options(); + let connection_options = remote_connection.connection_options(); let (outgoing_tx, outgoing_rx) = mpsc::unbounded::(); let (incoming_tx, incoming_rx) = mpsc::unbounded::(); @@ -519,7 +540,7 @@ impl RemoteClient { { Ok((ssh_connection, io_task)) => (ssh_connection, io_task), Err(error) => { - failed!(error, attempts, ssh_connection, delegate); + failed!(error, attempts, remote_connection, delegate); } }; @@ -751,6 +772,13 @@ impl RemoteClient { Some(self.state.as_ref()?.remote_connection()?.shell()) } + pub fn shares_network_interface(&self) -> bool { + self.state + .as_ref() + .and_then(|state| state.remote_connection()) + .map_or(false, |connection| connection.shares_network_interface()) + } + pub fn build_command( &self, program: Option, @@ -789,11 +817,7 @@ impl RemoteClient { self.client.clone().into() } - pub fn host(&self) -> String { - self.connection_options.host.clone() - } - - pub fn connection_options(&self) -> SshConnectionOptions { + pub fn connection_options(&self) -> RemoteConnectionOptions { self.connection_options.clone() } @@ -836,14 +860,14 @@ impl RemoteClient { pub fn fake_server( client_cx: &mut gpui::TestAppContext, server_cx: &mut gpui::TestAppContext, - ) -> (SshConnectionOptions, AnyProtoClient) { + ) -> (RemoteConnectionOptions, AnyProtoClient) { let port = client_cx .update(|cx| cx.default_global::().connections.len() as u16 + 1); - let opts = SshConnectionOptions { + let opts = RemoteConnectionOptions::Ssh(SshConnectionOptions { host: "".to_string(), port: Some(port), ..Default::default() - }; + }); let (outgoing_tx, _) = mpsc::unbounded::(); let (_, incoming_rx) = mpsc::unbounded::(); let server_client = @@ -874,13 +898,13 @@ impl RemoteClient { #[cfg(any(test, feature = "test-support"))] pub async fn fake_client( - opts: SshConnectionOptions, + opts: RemoteConnectionOptions, client_cx: &mut gpui::TestAppContext, ) -> Entity { let (_tx, rx) = oneshot::channel(); client_cx .update(|cx| { - Self::ssh( + Self::new( ConnectionIdentifier::setup(), opts, rx, @@ -901,7 +925,7 @@ enum ConnectionPoolEntry { #[derive(Default)] struct ConnectionPool { - connections: HashMap, + connections: HashMap, } impl Global for ConnectionPool {} @@ -909,7 +933,7 @@ impl Global for ConnectionPool {} impl ConnectionPool { pub fn connect( &mut self, - opts: SshConnectionOptions, + opts: RemoteConnectionOptions, delegate: &Arc, cx: &mut App, ) -> Shared, Arc>>> { @@ -939,9 +963,18 @@ impl ConnectionPool { let opts = opts.clone(); let delegate = delegate.clone(); async move |cx| { - let connection = SshRemoteConnection::new(opts.clone(), delegate, cx) - .await - .map(|connection| Arc::new(connection) as Arc); + let connection = match opts.clone() { + RemoteConnectionOptions::Ssh(opts) => { + SshRemoteConnection::new(opts, delegate, cx) + .await + .map(|connection| Arc::new(connection) as Arc) + } + RemoteConnectionOptions::Wsl(opts) => { + WslRemoteConnection::new(opts, delegate, cx) + .await + .map(|connection| Arc::new(connection) as Arc) + } + }; cx.update_global(|pool: &mut Self, _| { debug_assert!(matches!( @@ -972,6 +1005,33 @@ impl ConnectionPool { } } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum RemoteConnectionOptions { + Ssh(SshConnectionOptions), + Wsl(WslConnectionOptions), +} + +impl RemoteConnectionOptions { + pub fn display_name(&self) -> String { + match self { + RemoteConnectionOptions::Ssh(opts) => opts.host.clone(), + RemoteConnectionOptions::Wsl(opts) => opts.distro_name.clone(), + } + } +} + +impl From for RemoteConnectionOptions { + fn from(opts: SshConnectionOptions) -> Self { + RemoteConnectionOptions::Ssh(opts) + } +} + +impl From for RemoteConnectionOptions { + fn from(opts: WslConnectionOptions) -> Self { + RemoteConnectionOptions::Wsl(opts) + } +} + #[async_trait(?Send)] pub(crate) trait RemoteConnection: Send + Sync { fn start_proxy( @@ -992,6 +1052,9 @@ pub(crate) trait RemoteConnection: Send + Sync { ) -> Task>; async fn kill(&self) -> Result<()>; fn has_been_killed(&self) -> bool; + fn shares_network_interface(&self) -> bool { + false + } fn build_command( &self, program: Option, @@ -1000,7 +1063,7 @@ pub(crate) trait RemoteConnection: Send + Sync { working_dir: Option, port_forward: Option<(u16, String, u16)>, ) -> Result; - fn connection_options(&self) -> SshConnectionOptions; + fn connection_options(&self) -> RemoteConnectionOptions; fn path_style(&self) -> PathStyle; fn shell(&self) -> String; @@ -1307,7 +1370,7 @@ impl ProtoClient for ChannelClient { #[cfg(any(test, feature = "test-support"))] mod fake { use super::{ChannelClient, RemoteClientDelegate, RemoteConnection, RemotePlatform}; - use crate::{SshConnectionOptions, remote_client::CommandTemplate}; + use crate::remote_client::{CommandTemplate, RemoteConnectionOptions}; use anyhow::Result; use async_trait::async_trait; use collections::HashMap; @@ -1326,7 +1389,7 @@ mod fake { use util::paths::{PathStyle, RemotePathBuf}; pub(super) struct FakeRemoteConnection { - pub(super) connection_options: SshConnectionOptions, + pub(super) connection_options: RemoteConnectionOptions, pub(super) server_channel: Arc, pub(super) server_cx: SendableCx, } @@ -1386,7 +1449,7 @@ mod fake { unreachable!() } - fn connection_options(&self) -> SshConnectionOptions { + fn connection_options(&self) -> RemoteConnectionOptions { self.connection_options.clone() } diff --git a/crates/remote/src/transport.rs b/crates/remote/src/transport.rs index aa086fd3f56196e71224ef346c9810e8638c5c47..36525b7fcc1d91f106cffb6592a1ffd8e5e96fa9 100644 --- a/crates/remote/src/transport.rs +++ b/crates/remote/src/transport.rs @@ -1 +1,336 @@ +use crate::{ + json_log::LogRecord, + protocol::{MESSAGE_LEN_SIZE, message_len_from_buffer, read_message_with_len, write_message}, +}; +use anyhow::{Context as _, Result}; +use futures::{ + AsyncReadExt as _, FutureExt as _, StreamExt as _, + channel::mpsc::{Sender, UnboundedReceiver, UnboundedSender}, +}; +use gpui::{AppContext as _, AsyncApp, Task}; +use rpc::proto::Envelope; +use smol::process::Child; + pub mod ssh; +pub mod wsl; + +fn handle_rpc_messages_over_child_process_stdio( + mut ssh_proxy_process: Child, + incoming_tx: UnboundedSender, + mut outgoing_rx: UnboundedReceiver, + mut connection_activity_tx: Sender<()>, + cx: &AsyncApp, +) -> Task> { + let mut child_stderr = ssh_proxy_process.stderr.take().unwrap(); + let mut child_stdout = ssh_proxy_process.stdout.take().unwrap(); + let mut child_stdin = ssh_proxy_process.stdin.take().unwrap(); + + let mut stdin_buffer = Vec::new(); + let mut stdout_buffer = Vec::new(); + let mut stderr_buffer = Vec::new(); + let mut stderr_offset = 0; + + let stdin_task = cx.background_spawn(async move { + while let Some(outgoing) = outgoing_rx.next().await { + write_message(&mut child_stdin, &mut stdin_buffer, outgoing).await?; + } + anyhow::Ok(()) + }); + + let stdout_task = cx.background_spawn({ + let mut connection_activity_tx = connection_activity_tx.clone(); + async move { + loop { + stdout_buffer.resize(MESSAGE_LEN_SIZE, 0); + let len = child_stdout.read(&mut stdout_buffer).await?; + + if len == 0 { + return anyhow::Ok(()); + } + + if len < MESSAGE_LEN_SIZE { + child_stdout.read_exact(&mut stdout_buffer[len..]).await?; + } + + let message_len = message_len_from_buffer(&stdout_buffer); + let envelope = + read_message_with_len(&mut child_stdout, &mut stdout_buffer, message_len) + .await?; + connection_activity_tx.try_send(()).ok(); + incoming_tx.unbounded_send(envelope).ok(); + } + } + }); + + let stderr_task: Task> = cx.background_spawn(async move { + loop { + stderr_buffer.resize(stderr_offset + 1024, 0); + + let len = child_stderr + .read(&mut stderr_buffer[stderr_offset..]) + .await?; + if len == 0 { + return anyhow::Ok(()); + } + + stderr_offset += len; + let mut start_ix = 0; + while let Some(ix) = stderr_buffer[start_ix..stderr_offset] + .iter() + .position(|b| b == &b'\n') + { + let line_ix = start_ix + ix; + let content = &stderr_buffer[start_ix..line_ix]; + start_ix = line_ix + 1; + if let Ok(record) = serde_json::from_slice::(content) { + record.log(log::logger()) + } else { + eprintln!("(remote) {}", String::from_utf8_lossy(content)); + } + } + stderr_buffer.drain(0..start_ix); + stderr_offset -= start_ix; + + connection_activity_tx.try_send(()).ok(); + } + }); + + cx.background_spawn(async move { + let result = futures::select! { + result = stdin_task.fuse() => { + result.context("stdin") + } + result = stdout_task.fuse() => { + result.context("stdout") + } + result = stderr_task.fuse() => { + result.context("stderr") + } + }; + + let status = ssh_proxy_process.status().await?.code().unwrap_or(1); + match result { + Ok(_) => Ok(status), + Err(error) => Err(error), + } + }) +} + +#[cfg(debug_assertions)] +async fn build_remote_server_from_source( + platform: &crate::RemotePlatform, + delegate: &dyn crate::RemoteClientDelegate, + cx: &mut AsyncApp, +) -> Result> { + use std::path::Path; + + let Some(build_remote_server) = std::env::var("ZED_BUILD_REMOTE_SERVER").ok() else { + return Ok(None); + }; + + use smol::process::{Command, Stdio}; + use std::env::VarError; + + async fn run_cmd(command: &mut Command) -> Result<()> { + let output = command + .kill_on_drop(true) + .stderr(Stdio::inherit()) + .output() + .await?; + anyhow::ensure!( + output.status.success(), + "Failed to run command: {command:?}" + ); + Ok(()) + } + + let use_musl = !build_remote_server.contains("nomusl"); + let triple = format!( + "{}-{}", + platform.arch, + match platform.os { + "linux" => + if use_musl { + "unknown-linux-musl" + } else { + "unknown-linux-gnu" + }, + "macos" => "apple-darwin", + _ => anyhow::bail!("can't cross compile for: {:?}", platform), + } + ); + let mut rust_flags = match std::env::var("RUSTFLAGS") { + Ok(val) => val, + Err(VarError::NotPresent) => String::new(), + Err(e) => { + log::error!("Failed to get env var `RUSTFLAGS` value: {e}"); + String::new() + } + }; + if platform.os == "linux" && use_musl { + rust_flags.push_str(" -C target-feature=+crt-static"); + } + if build_remote_server.contains("mold") { + rust_flags.push_str(" -C link-arg=-fuse-ld=mold"); + } + + if platform.arch == std::env::consts::ARCH && platform.os == std::env::consts::OS { + delegate.set_status(Some("Building remote server binary from source"), cx); + log::info!("building remote server binary from source"); + run_cmd( + Command::new("cargo") + .args([ + "build", + "--package", + "remote_server", + "--features", + "debug-embed", + "--target-dir", + "target/remote_server", + "--target", + &triple, + ]) + .env("RUSTFLAGS", &rust_flags), + ) + .await?; + } else if build_remote_server.contains("cross") { + #[cfg(target_os = "windows")] + use util::paths::SanitizedPath; + + delegate.set_status(Some("Installing cross.rs for cross-compilation"), cx); + log::info!("installing cross"); + run_cmd(Command::new("cargo").args([ + "install", + "cross", + "--git", + "https://github.com/cross-rs/cross", + ])) + .await?; + + delegate.set_status( + Some(&format!( + "Building remote server binary from source for {} with Docker", + &triple + )), + cx, + ); + log::info!("building remote server binary from source for {}", &triple); + + // On Windows, the binding needs to be set to the canonical path + #[cfg(target_os = "windows")] + let src = SanitizedPath::new(&smol::fs::canonicalize("./target").await?).to_glob_string(); + #[cfg(not(target_os = "windows"))] + let src = "./target"; + + run_cmd( + Command::new("cross") + .args([ + "build", + "--package", + "remote_server", + "--features", + "debug-embed", + "--target-dir", + "target/remote_server", + "--target", + &triple, + ]) + .env( + "CROSS_CONTAINER_OPTS", + format!("--mount type=bind,src={src},dst=/app/target"), + ) + .env("RUSTFLAGS", &rust_flags), + ) + .await?; + } else { + let which = cx + .background_spawn(async move { which::which("zig") }) + .await; + + if which.is_err() { + #[cfg(not(target_os = "windows"))] + { + anyhow::bail!( + "zig not found on $PATH, install zig (see https://ziglang.org/learn/getting-started or use zigup) or pass ZED_BUILD_REMOTE_SERVER=cross to use cross" + ) + } + #[cfg(target_os = "windows")] + { + anyhow::bail!( + "zig not found on $PATH, install zig (use `winget install -e --id zig.zig` or see https://ziglang.org/learn/getting-started or use zigup) or pass ZED_BUILD_REMOTE_SERVER=cross to use cross" + ) + } + } + + delegate.set_status(Some("Adding rustup target for cross-compilation"), cx); + log::info!("adding rustup target"); + run_cmd(Command::new("rustup").args(["target", "add"]).arg(&triple)).await?; + + delegate.set_status(Some("Installing cargo-zigbuild for cross-compilation"), cx); + log::info!("installing cargo-zigbuild"); + run_cmd(Command::new("cargo").args(["install", "--locked", "cargo-zigbuild"])).await?; + + delegate.set_status( + Some(&format!( + "Building remote binary from source for {triple} with Zig" + )), + cx, + ); + log::info!("building remote binary from source for {triple} with Zig"); + run_cmd( + Command::new("cargo") + .args([ + "zigbuild", + "--package", + "remote_server", + "--features", + "debug-embed", + "--target-dir", + "target/remote_server", + "--target", + &triple, + ]) + .env("RUSTFLAGS", &rust_flags), + ) + .await?; + }; + let bin_path = Path::new("target") + .join("remote_server") + .join(&triple) + .join("debug") + .join("remote_server"); + + let path = if !build_remote_server.contains("nocompress") { + delegate.set_status(Some("Compressing binary"), cx); + + #[cfg(not(target_os = "windows"))] + { + run_cmd(Command::new("gzip").args(["-f", &bin_path.to_string_lossy()])).await?; + } + + #[cfg(target_os = "windows")] + { + // On Windows, we use 7z to compress the binary + let seven_zip = which::which("7z.exe").context("7z.exe not found on $PATH, install it (e.g. with `winget install -e --id 7zip.7zip`) or, if you don't want this behaviour, set $env:ZED_BUILD_REMOTE_SERVER=\"nocompress\"")?; + let gz_path = format!("target/remote_server/{}/debug/remote_server.gz", triple); + if smol::fs::metadata(&gz_path).await.is_ok() { + smol::fs::remove_file(&gz_path).await?; + } + run_cmd(Command::new(seven_zip).args([ + "a", + "-tgzip", + &gz_path, + &bin_path.to_string_lossy(), + ])) + .await?; + } + + let mut archive_path = bin_path; + archive_path.set_extension("gz"); + std::env::current_dir()?.join(archive_path) + } else { + bin_path + }; + + Ok(Some(path)) +} diff --git a/crates/remote/src/transport/ssh.rs b/crates/remote/src/transport/ssh.rs index 34f1ebf71c278538b57e486856f9b3315a41cf91..0995e0dd611ae667cc2e68638773c8b80bf2f22b 100644 --- a/crates/remote/src/transport/ssh.rs +++ b/crates/remote/src/transport/ssh.rs @@ -1,14 +1,12 @@ use crate::{ RemoteClientDelegate, RemotePlatform, - json_log::LogRecord, - protocol::{MESSAGE_LEN_SIZE, message_len_from_buffer, read_message_with_len, write_message}, - remote_client::{CommandTemplate, RemoteConnection}, + remote_client::{CommandTemplate, RemoteConnection, RemoteConnectionOptions}, }; use anyhow::{Context as _, Result, anyhow}; use async_trait::async_trait; use collections::HashMap; use futures::{ - AsyncReadExt as _, FutureExt as _, StreamExt as _, + AsyncReadExt as _, FutureExt as _, channel::mpsc::{Sender, UnboundedReceiver, UnboundedSender}, select_biased, }; @@ -99,8 +97,8 @@ impl RemoteConnection for SshRemoteConnection { self.master_process.lock().is_none() } - fn connection_options(&self) -> SshConnectionOptions { - self.socket.connection_options.clone() + fn connection_options(&self) -> RemoteConnectionOptions { + RemoteConnectionOptions::Ssh(self.socket.connection_options.clone()) } fn shell(&self) -> String { @@ -267,7 +265,7 @@ impl RemoteConnection for SshRemoteConnection { } }; - Self::multiplex( + super::handle_rpc_messages_over_child_process_stdio( ssh_proxy_process, incoming_tx, outgoing_rx, @@ -415,109 +413,6 @@ impl SshRemoteConnection { Ok(this) } - fn multiplex( - mut ssh_proxy_process: Child, - incoming_tx: UnboundedSender, - mut outgoing_rx: UnboundedReceiver, - mut connection_activity_tx: Sender<()>, - cx: &AsyncApp, - ) -> Task> { - let mut child_stderr = ssh_proxy_process.stderr.take().unwrap(); - let mut child_stdout = ssh_proxy_process.stdout.take().unwrap(); - let mut child_stdin = ssh_proxy_process.stdin.take().unwrap(); - - let mut stdin_buffer = Vec::new(); - let mut stdout_buffer = Vec::new(); - let mut stderr_buffer = Vec::new(); - let mut stderr_offset = 0; - - let stdin_task = cx.background_spawn(async move { - while let Some(outgoing) = outgoing_rx.next().await { - write_message(&mut child_stdin, &mut stdin_buffer, outgoing).await?; - } - anyhow::Ok(()) - }); - - let stdout_task = cx.background_spawn({ - let mut connection_activity_tx = connection_activity_tx.clone(); - async move { - loop { - stdout_buffer.resize(MESSAGE_LEN_SIZE, 0); - let len = child_stdout.read(&mut stdout_buffer).await?; - - if len == 0 { - return anyhow::Ok(()); - } - - if len < MESSAGE_LEN_SIZE { - child_stdout.read_exact(&mut stdout_buffer[len..]).await?; - } - - let message_len = message_len_from_buffer(&stdout_buffer); - let envelope = - read_message_with_len(&mut child_stdout, &mut stdout_buffer, message_len) - .await?; - connection_activity_tx.try_send(()).ok(); - incoming_tx.unbounded_send(envelope).ok(); - } - } - }); - - let stderr_task: Task> = cx.background_spawn(async move { - loop { - stderr_buffer.resize(stderr_offset + 1024, 0); - - let len = child_stderr - .read(&mut stderr_buffer[stderr_offset..]) - .await?; - if len == 0 { - return anyhow::Ok(()); - } - - stderr_offset += len; - let mut start_ix = 0; - while let Some(ix) = stderr_buffer[start_ix..stderr_offset] - .iter() - .position(|b| b == &b'\n') - { - let line_ix = start_ix + ix; - let content = &stderr_buffer[start_ix..line_ix]; - start_ix = line_ix + 1; - if let Ok(record) = serde_json::from_slice::(content) { - record.log(log::logger()) - } else { - eprintln!("(remote) {}", String::from_utf8_lossy(content)); - } - } - stderr_buffer.drain(0..start_ix); - stderr_offset -= start_ix; - - connection_activity_tx.try_send(()).ok(); - } - }); - - cx.background_spawn(async move { - let result = futures::select! { - result = stdin_task.fuse() => { - result.context("stdin") - } - result = stdout_task.fuse() => { - result.context("stdout") - } - result = stderr_task.fuse() => { - result.context("stderr") - } - }; - - let status = ssh_proxy_process.status().await?.code().unwrap_or(1); - match result { - Ok(_) => Ok(status), - Err(error) => Err(error), - } - }) - } - - #[allow(unused)] async fn ensure_server_binary( &self, delegate: &Arc, @@ -544,19 +439,20 @@ impl SshRemoteConnection { self.ssh_path_style, ); - let build_remote_server = std::env::var("ZED_BUILD_REMOTE_SERVER").ok(); #[cfg(debug_assertions)] - if let Some(build_remote_server) = build_remote_server { - let src_path = self.build_local(build_remote_server, delegate, cx).await?; + if let Some(remote_server_path) = + super::build_remote_server_from_source(&self.ssh_platform, delegate.as_ref(), cx) + .await? + { let tmp_path = RemotePathBuf::new( paths::remote_server_dir_relative().join(format!( "download-{}-{}", std::process::id(), - src_path.file_name().unwrap().to_string_lossy() + remote_server_path.file_name().unwrap().to_string_lossy() )), self.ssh_path_style, ); - self.upload_local_server_binary(&src_path, &tmp_path, delegate, cx) + self.upload_local_server_binary(&remote_server_path, &tmp_path, delegate, cx) .await?; self.extract_server_binary(&dst_path, &tmp_path, delegate, cx) .await?; @@ -794,221 +690,6 @@ impl SshRemoteConnection { ); Ok(()) } - - #[cfg(debug_assertions)] - async fn build_local( - &self, - build_remote_server: String, - delegate: &Arc, - cx: &mut AsyncApp, - ) -> Result { - use smol::process::{Command, Stdio}; - use std::env::VarError; - - async fn run_cmd(command: &mut Command) -> Result<()> { - let output = command - .kill_on_drop(true) - .stderr(Stdio::inherit()) - .output() - .await?; - anyhow::ensure!( - output.status.success(), - "Failed to run command: {command:?}" - ); - Ok(()) - } - - let use_musl = !build_remote_server.contains("nomusl"); - let triple = format!( - "{}-{}", - self.ssh_platform.arch, - match self.ssh_platform.os { - "linux" => - if use_musl { - "unknown-linux-musl" - } else { - "unknown-linux-gnu" - }, - "macos" => "apple-darwin", - _ => anyhow::bail!("can't cross compile for: {:?}", self.ssh_platform), - } - ); - let mut rust_flags = match std::env::var("RUSTFLAGS") { - Ok(val) => val, - Err(VarError::NotPresent) => String::new(), - Err(e) => { - log::error!("Failed to get env var `RUSTFLAGS` value: {e}"); - String::new() - } - }; - if self.ssh_platform.os == "linux" && use_musl { - rust_flags.push_str(" -C target-feature=+crt-static"); - } - if build_remote_server.contains("mold") { - rust_flags.push_str(" -C link-arg=-fuse-ld=mold"); - } - - if self.ssh_platform.arch == std::env::consts::ARCH - && self.ssh_platform.os == std::env::consts::OS - { - delegate.set_status(Some("Building remote server binary from source"), cx); - log::info!("building remote server binary from source"); - run_cmd( - Command::new("cargo") - .args([ - "build", - "--package", - "remote_server", - "--features", - "debug-embed", - "--target-dir", - "target/remote_server", - "--target", - &triple, - ]) - .env("RUSTFLAGS", &rust_flags), - ) - .await?; - } else if build_remote_server.contains("cross") { - #[cfg(target_os = "windows")] - use util::paths::SanitizedPath; - - delegate.set_status(Some("Installing cross.rs for cross-compilation"), cx); - log::info!("installing cross"); - run_cmd(Command::new("cargo").args([ - "install", - "cross", - "--git", - "https://github.com/cross-rs/cross", - ])) - .await?; - - delegate.set_status( - Some(&format!( - "Building remote server binary from source for {} with Docker", - &triple - )), - cx, - ); - log::info!("building remote server binary from source for {}", &triple); - - // On Windows, the binding needs to be set to the canonical path - #[cfg(target_os = "windows")] - let src = - SanitizedPath::new(&smol::fs::canonicalize("./target").await?).to_glob_string(); - #[cfg(not(target_os = "windows"))] - let src = "./target"; - run_cmd( - Command::new("cross") - .args([ - "build", - "--package", - "remote_server", - "--features", - "debug-embed", - "--target-dir", - "target/remote_server", - "--target", - &triple, - ]) - .env( - "CROSS_CONTAINER_OPTS", - format!("--mount type=bind,src={src},dst=/app/target"), - ) - .env("RUSTFLAGS", &rust_flags), - ) - .await?; - } else { - let which = cx - .background_spawn(async move { which::which("zig") }) - .await; - - if which.is_err() { - #[cfg(not(target_os = "windows"))] - { - anyhow::bail!( - "zig not found on $PATH, install zig (see https://ziglang.org/learn/getting-started or use zigup) or pass ZED_BUILD_REMOTE_SERVER=cross to use cross" - ) - } - #[cfg(target_os = "windows")] - { - anyhow::bail!( - "zig not found on $PATH, install zig (use `winget install -e --id zig.zig` or see https://ziglang.org/learn/getting-started or use zigup) or pass ZED_BUILD_REMOTE_SERVER=cross to use cross" - ) - } - } - - delegate.set_status(Some("Adding rustup target for cross-compilation"), cx); - log::info!("adding rustup target"); - run_cmd(Command::new("rustup").args(["target", "add"]).arg(&triple)).await?; - - delegate.set_status(Some("Installing cargo-zigbuild for cross-compilation"), cx); - log::info!("installing cargo-zigbuild"); - run_cmd(Command::new("cargo").args(["install", "--locked", "cargo-zigbuild"])).await?; - - delegate.set_status( - Some(&format!( - "Building remote binary from source for {triple} with Zig" - )), - cx, - ); - log::info!("building remote binary from source for {triple} with Zig"); - run_cmd( - Command::new("cargo") - .args([ - "zigbuild", - "--package", - "remote_server", - "--features", - "debug-embed", - "--target-dir", - "target/remote_server", - "--target", - &triple, - ]) - .env("RUSTFLAGS", &rust_flags), - ) - .await?; - }; - let bin_path = Path::new("target") - .join("remote_server") - .join(&triple) - .join("debug") - .join("remote_server"); - - let path = if !build_remote_server.contains("nocompress") { - delegate.set_status(Some("Compressing binary"), cx); - - #[cfg(not(target_os = "windows"))] - { - run_cmd(Command::new("gzip").args(["-f", &bin_path.to_string_lossy()])).await?; - } - #[cfg(target_os = "windows")] - { - // On Windows, we use 7z to compress the binary - let seven_zip = which::which("7z.exe").context("7z.exe not found on $PATH, install it (e.g. with `winget install -e --id 7zip.7zip`) or, if you don't want this behaviour, set $env:ZED_BUILD_REMOTE_SERVER=\"nocompress\"")?; - let gz_path = format!("target/remote_server/{}/debug/remote_server.gz", triple); - if smol::fs::metadata(&gz_path).await.is_ok() { - smol::fs::remove_file(&gz_path).await?; - } - run_cmd(Command::new(seven_zip).args([ - "a", - "-tgzip", - &gz_path, - &bin_path.to_string_lossy(), - ])) - .await?; - } - - let mut archive_path = bin_path; - archive_path.set_extension("gz"); - std::env::current_dir()?.join(archive_path) - } else { - bin_path - }; - - Ok(path) - } } impl SshSocket { diff --git a/crates/remote/src/transport/wsl.rs b/crates/remote/src/transport/wsl.rs new file mode 100644 index 0000000000000000000000000000000000000000..ea8f2443d9a674492674bdc2fb19f2a021b03dcc --- /dev/null +++ b/crates/remote/src/transport/wsl.rs @@ -0,0 +1,494 @@ +use crate::{ + RemoteClientDelegate, RemotePlatform, + remote_client::{CommandTemplate, RemoteConnection, RemoteConnectionOptions}, +}; +use anyhow::{Result, anyhow, bail}; +use async_trait::async_trait; +use collections::HashMap; +use futures::channel::mpsc::{Sender, UnboundedReceiver, UnboundedSender}; +use gpui::{App, AppContext as _, AsyncApp, SemanticVersion, Task}; +use release_channel::{AppCommitSha, AppVersion, ReleaseChannel}; +use rpc::proto::Envelope; +use smol::{fs, process}; +use std::{ + fmt::Write as _, + path::{Path, PathBuf}, + process::Stdio, + sync::Arc, + time::Instant, +}; +use util::paths::{PathStyle, RemotePathBuf}; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct WslConnectionOptions { + pub distro_name: String, + pub user: Option, +} + +pub(crate) struct WslRemoteConnection { + remote_binary_path: Option, + platform: RemotePlatform, + shell: String, + connection_options: WslConnectionOptions, +} + +impl WslRemoteConnection { + pub(crate) async fn new( + connection_options: WslConnectionOptions, + delegate: Arc, + cx: &mut AsyncApp, + ) -> Result { + log::info!( + "Connecting to WSL distro {} with user {:?}", + connection_options.distro_name, + connection_options.user + ); + let (release_channel, version, commit) = cx.update(|cx| { + ( + ReleaseChannel::global(cx), + AppVersion::global(cx), + AppCommitSha::try_global(cx), + ) + })?; + + let mut this = Self { + connection_options, + remote_binary_path: None, + platform: RemotePlatform { os: "", arch: "" }, + shell: String::new(), + }; + delegate.set_status(Some("Detecting WSL environment"), cx); + this.platform = this.detect_platform().await?; + this.shell = this.detect_shell().await?; + this.remote_binary_path = Some( + this.ensure_server_binary(&delegate, release_channel, version, commit, cx) + .await?, + ); + + Ok(this) + } + + async fn detect_platform(&self) -> Result { + let arch_str = self.run_wsl_command("uname", &["-m"]).await?; + let arch_str = arch_str.trim().to_string(); + let arch = match arch_str.as_str() { + "x86_64" => "x86_64", + "aarch64" | "arm64" => "aarch64", + _ => "x86_64", + }; + Ok(RemotePlatform { os: "linux", arch }) + } + + async fn detect_shell(&self) -> Result { + Ok(self + .run_wsl_command("sh", &["-c", "echo $SHELL"]) + .await + .ok() + .and_then(|shell_path| shell_path.trim().split('/').next_back().map(str::to_string)) + .unwrap_or_else(|| "bash".to_string())) + } + + async fn windows_path_to_wsl_path(&self, source: &Path) -> Result { + windows_path_to_wsl_path_impl(&self.connection_options, source).await + } + + fn wsl_command(&self, program: &str, args: &[&str]) -> process::Command { + wsl_command_impl(&self.connection_options, program, args) + } + + async fn run_wsl_command(&self, program: &str, args: &[&str]) -> Result { + run_wsl_command_impl(&self.connection_options, program, args).await + } + + async fn ensure_server_binary( + &self, + delegate: &Arc, + release_channel: ReleaseChannel, + version: SemanticVersion, + commit: Option, + cx: &mut AsyncApp, + ) -> Result { + let version_str = match release_channel { + ReleaseChannel::Nightly => { + let commit = commit.map(|s| s.full()).unwrap_or_default(); + format!("{}-{}", version, commit) + } + ReleaseChannel::Dev => "build".to_string(), + _ => version.to_string(), + }; + + let binary_name = format!( + "zed-remote-server-{}-{}", + release_channel.dev_name(), + version_str + ); + + let dst_path = RemotePathBuf::new( + paths::remote_wsl_server_dir_relative().join(binary_name), + PathStyle::Posix, + ); + + if let Some(parent) = dst_path.parent() { + self.run_wsl_command("mkdir", &["-p", &parent.to_string()]) + .await + .map_err(|e| anyhow!("Failed to create directory: {}", e))?; + } + + #[cfg(debug_assertions)] + if let Some(remote_server_path) = + super::build_remote_server_from_source(&self.platform, delegate.as_ref(), cx).await? + { + let tmp_path = RemotePathBuf::new( + paths::remote_wsl_server_dir_relative().join(format!( + "download-{}-{}", + std::process::id(), + remote_server_path.file_name().unwrap().to_string_lossy() + )), + PathStyle::Posix, + ); + self.upload_file(&remote_server_path, &tmp_path, delegate, cx) + .await?; + self.extract_and_install(&tmp_path, &dst_path, delegate, cx) + .await?; + return Ok(dst_path); + } + + if self + .run_wsl_command(&dst_path.to_string(), &["version"]) + .await + .is_ok() + { + return Ok(dst_path); + } + + delegate.set_status(Some("Installing remote server"), cx); + + let wanted_version = match release_channel { + ReleaseChannel::Nightly => None, + ReleaseChannel::Dev => { + return Err(anyhow!("Dev builds require manual installation")); + } + _ => Some(cx.update(|cx| AppVersion::global(cx))?), + }; + + let src_path = delegate + .download_server_binary_locally(self.platform, release_channel, wanted_version, cx) + .await?; + + let tmp_path = RemotePathBuf::new( + PathBuf::from(format!("{}.{}.tmp", dst_path, std::process::id())), + PathStyle::Posix, + ); + + self.upload_file(&src_path, &tmp_path, delegate, cx).await?; + self.extract_and_install(&tmp_path, &dst_path, delegate, cx) + .await?; + + Ok(dst_path) + } + + async fn upload_file( + &self, + src_path: &Path, + dst_path: &RemotePathBuf, + delegate: &Arc, + cx: &mut AsyncApp, + ) -> Result<()> { + delegate.set_status(Some("Uploading remote server to WSL"), cx); + + if let Some(parent) = dst_path.parent() { + self.run_wsl_command("mkdir", &["-p", &parent.to_string()]) + .await + .map_err(|e| anyhow!("Failed to create directory when uploading file: {}", e))?; + } + + let t0 = Instant::now(); + let src_stat = fs::metadata(&src_path).await?; + let size = src_stat.len(); + log::info!( + "uploading remote server to WSL {:?} ({}kb)", + dst_path, + size / 1024 + ); + + let src_path_in_wsl = self.windows_path_to_wsl_path(src_path).await?; + self.run_wsl_command("cp", &["-f", &src_path_in_wsl, &dst_path.to_string()]) + .await + .map_err(|e| { + anyhow!( + "Failed to copy file {}({}) to WSL {:?}: {}", + src_path.display(), + src_path_in_wsl, + dst_path, + e + ) + })?; + + log::info!("uploaded remote server in {:?}", t0.elapsed()); + Ok(()) + } + + async fn extract_and_install( + &self, + tmp_path: &RemotePathBuf, + dst_path: &RemotePathBuf, + delegate: &Arc, + cx: &mut AsyncApp, + ) -> Result<()> { + delegate.set_status(Some("Extracting remote server"), cx); + + let tmp_path_str = tmp_path.to_string(); + let dst_path_str = dst_path.to_string(); + + // Build extraction script with proper error handling + let script = if tmp_path_str.ends_with(".gz") { + let uncompressed = tmp_path_str.trim_end_matches(".gz"); + format!( + "set -e; gunzip -f '{}' && chmod 755 '{}' && mv -f '{}' '{}'", + tmp_path_str, uncompressed, uncompressed, dst_path_str + ) + } else { + format!( + "set -e; chmod 755 '{}' && mv -f '{}' '{}'", + tmp_path_str, tmp_path_str, dst_path_str + ) + }; + + self.run_wsl_command("sh", &["-c", &script]) + .await + .map_err(|e| anyhow!("Failed to extract server binary: {}", e))?; + Ok(()) + } +} + +#[async_trait(?Send)] +impl RemoteConnection for WslRemoteConnection { + fn start_proxy( + &self, + unique_identifier: String, + reconnect: bool, + incoming_tx: UnboundedSender, + outgoing_rx: UnboundedReceiver, + connection_activity_tx: Sender<()>, + delegate: Arc, + cx: &mut AsyncApp, + ) -> Task> { + delegate.set_status(Some("Starting proxy"), cx); + + let Some(remote_binary_path) = &self.remote_binary_path else { + return Task::ready(Err(anyhow!("Remote binary path not set"))); + }; + + let mut proxy_command = format!( + "exec {} proxy --identifier {}", + remote_binary_path, unique_identifier + ); + + if reconnect { + proxy_command.push_str(" --reconnect"); + } + + for env_var in ["RUST_LOG", "RUST_BACKTRACE", "ZED_GENERATE_MINIDUMPS"] { + if let Some(value) = std::env::var(env_var).ok() { + proxy_command = format!("{}='{}' {}", env_var, value, proxy_command); + } + } + let proxy_process = match self + .wsl_command("sh", &["-lc", &proxy_command]) + .kill_on_drop(true) + .spawn() + { + Ok(process) => process, + Err(error) => { + return Task::ready(Err(anyhow!("failed to spawn remote server: {}", error))); + } + }; + + super::handle_rpc_messages_over_child_process_stdio( + proxy_process, + incoming_tx, + outgoing_rx, + connection_activity_tx, + cx, + ) + } + + fn upload_directory( + &self, + src_path: PathBuf, + dest_path: RemotePathBuf, + cx: &App, + ) -> Task> { + cx.background_spawn({ + let options = self.connection_options.clone(); + async move { + let wsl_src = windows_path_to_wsl_path_impl(&options, &src_path).await?; + + run_wsl_command_impl(&options, "cp", &["-r", &wsl_src, &dest_path.to_string()]) + .await + .map_err(|e| { + anyhow!( + "failed to upload directory {} -> {}: {}", + src_path.display(), + dest_path.to_string(), + e + ) + })?; + + Ok(()) + } + }) + } + + async fn kill(&self) -> Result<()> { + Ok(()) + } + + fn has_been_killed(&self) -> bool { + false + } + + fn shares_network_interface(&self) -> bool { + true + } + + fn build_command( + &self, + program: Option, + args: &[String], + env: &HashMap, + working_dir: Option, + port_forward: Option<(u16, String, u16)>, + ) -> Result { + if port_forward.is_some() { + bail!("WSL shares the network interface with the host system"); + } + + let working_dir = working_dir + .map(|working_dir| RemotePathBuf::new(working_dir.into(), PathStyle::Posix).to_string()) + .unwrap_or("~".to_string()); + + let mut script = String::new(); + + for (k, v) in env.iter() { + write!(&mut script, "{}='{}' ", k, v).unwrap(); + } + + if let Some(program) = program { + let command = shlex::try_quote(&program)?; + script.push_str(&command); + for arg in args { + let arg = shlex::try_quote(&arg)?; + script.push_str(" "); + script.push_str(&arg); + } + } else { + write!(&mut script, "exec {} -l", self.shell).unwrap(); + } + + let wsl_args = if let Some(user) = &self.connection_options.user { + vec![ + "--distribution".to_string(), + self.connection_options.distro_name.clone(), + "--user".to_string(), + user.clone(), + "--cd".to_string(), + working_dir, + "--".to_string(), + self.shell.clone(), + "-c".to_string(), + shlex::try_quote(&script)?.to_string(), + ] + } else { + vec![ + "--distribution".to_string(), + self.connection_options.distro_name.clone(), + "--cd".to_string(), + working_dir, + "--".to_string(), + self.shell.clone(), + "-c".to_string(), + shlex::try_quote(&script)?.to_string(), + ] + }; + + Ok(CommandTemplate { + program: "wsl.exe".to_string(), + args: wsl_args, + env: HashMap::default(), + }) + } + + fn connection_options(&self) -> RemoteConnectionOptions { + RemoteConnectionOptions::Wsl(self.connection_options.clone()) + } + + fn path_style(&self) -> PathStyle { + PathStyle::Posix + } + + fn shell(&self) -> String { + self.shell.clone() + } +} + +/// `wslpath` is a executable available in WSL, it's a linux binary. +/// So it doesn't support Windows style paths. +async fn sanitize_path(path: &Path) -> Result { + let path = smol::fs::canonicalize(path).await?; + let path_str = path.to_string_lossy(); + + let sanitized = path_str.strip_prefix(r"\\?\").unwrap_or(&path_str); + Ok(sanitized.replace('\\', "/")) +} + +async fn windows_path_to_wsl_path_impl( + options: &WslConnectionOptions, + source: &Path, +) -> Result { + let source = sanitize_path(source).await?; + run_wsl_command_impl(options, "wslpath", &["-u", &source]).await +} + +fn wsl_command_impl( + options: &WslConnectionOptions, + program: &str, + args: &[&str], +) -> process::Command { + let mut command = util::command::new_smol_command("wsl.exe"); + + if let Some(user) = &options.user { + command.arg("--user").arg(user); + } + + command + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .arg("--distribution") + .arg(&options.distro_name) + .arg("--cd") + .arg("~") + .arg(program) + .args(args); + + command +} + +async fn run_wsl_command_impl( + options: &WslConnectionOptions, + program: &str, + args: &[&str], +) -> Result { + let output = wsl_command_impl(options, program, args).output().await?; + + if !output.status.success() { + return Err(anyhow!( + "Command '{}' failed: {}", + program, + String::from_utf8_lossy(&output.stderr).trim() + )); + } + + Ok(String::from_utf8_lossy(&output.stdout).trim().to_string()) +} diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 075b9fcd86276244d154be1aebe904fbfb4a7b6c..2b13ef58c3a8707b81d6870590efe5337ffef048 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -32,6 +32,7 @@ use gpui::{ use keymap_editor; use onboarding_banner::OnboardingBanner; use project::Project; +use remote::RemoteConnectionOptions; use settings::Settings as _; use std::sync::Arc; use theme::ActiveTheme; @@ -304,12 +305,14 @@ impl TitleBar { fn render_remote_project_connection(&self, cx: &mut Context) -> Option { let options = self.project.read(cx).remote_connection_options(cx)?; - let host: SharedString = options.connection_string().into(); + let host: SharedString = options.display_name().into(); - let nickname = options - .nickname - .map(|nick| nick.into()) - .unwrap_or_else(|| host.clone()); + let nickname = if let RemoteConnectionOptions::Ssh(options) = options { + options.nickname.map(|nick| nick.into()) + } else { + None + }; + let nickname = nickname.unwrap_or_else(|| host.clone()); let (indicator_color, meta) = match self.project.read(cx).remote_connection_state(cx)? { remote::ConnectionState::Connecting => (Color::Info, format!("Connecting to: {host}")), diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 3ef9ff65eb0fe5aedfd5e72aa18f1481a011fce7..160823f547f3ab0019d4a631550aec70f1ca101e 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -20,6 +20,7 @@ use project::debugger::breakpoint_store::{BreakpointState, SourceBreakpoint}; use language::{LanguageName, Toolchain}; use project::WorktreeId; +use remote::{RemoteConnectionOptions, SshConnectionOptions, WslConnectionOptions}; use sqlez::{ bindable::{Bind, Column, StaticColumnCount}, statement::{SqlType, Statement}, @@ -33,11 +34,12 @@ use uuid::Uuid; use crate::{ WorkspaceId, path_list::{PathList, SerializedPathList}, + persistence::model::RemoteConnectionKind, }; use model::{ - GroupId, ItemId, PaneId, SerializedItem, SerializedPane, SerializedPaneGroup, - SerializedSshConnection, SerializedWorkspace, SshConnectionId, + GroupId, ItemId, PaneId, RemoteConnectionId, SerializedItem, SerializedPane, + SerializedPaneGroup, SerializedWorkspace, }; use self::model::{DockStructure, SerializedWorkspaceLocation}; @@ -627,6 +629,88 @@ impl Domain for WorkspaceDb { END WHERE paths IS NOT NULL ), + sql!( + CREATE TABLE remote_connections( + id INTEGER PRIMARY KEY, + kind TEXT NOT NULL, + host TEXT, + port INTEGER, + user TEXT, + distro TEXT + ); + + CREATE TABLE workspaces_2( + workspace_id INTEGER PRIMARY KEY, + paths TEXT, + paths_order TEXT, + remote_connection_id INTEGER REFERENCES remote_connections(id), + timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL, + window_state TEXT, + window_x REAL, + window_y REAL, + window_width REAL, + window_height REAL, + display BLOB, + left_dock_visible INTEGER, + left_dock_active_panel TEXT, + right_dock_visible INTEGER, + right_dock_active_panel TEXT, + bottom_dock_visible INTEGER, + bottom_dock_active_panel TEXT, + left_dock_zoom INTEGER, + right_dock_zoom INTEGER, + bottom_dock_zoom INTEGER, + fullscreen INTEGER, + centered_layout INTEGER, + session_id TEXT, + window_id INTEGER + ) STRICT; + + INSERT INTO remote_connections + SELECT + id, + "ssh" as kind, + host, + port, + user, + NULL as distro + FROM ssh_connections; + + INSERT + INTO workspaces_2 + SELECT + workspace_id, + paths, + paths_order, + ssh_connection_id as remote_connection_id, + timestamp, + window_state, + window_x, + window_y, + window_width, + window_height, + display, + left_dock_visible, + left_dock_active_panel, + right_dock_visible, + right_dock_active_panel, + bottom_dock_visible, + bottom_dock_active_panel, + left_dock_zoom, + right_dock_zoom, + bottom_dock_zoom, + fullscreen, + centered_layout, + session_id, + window_id + FROM + workspaces; + + DROP TABLE workspaces; + ALTER TABLE workspaces_2 RENAME TO workspaces; + + CREATE UNIQUE INDEX ix_workspaces_location ON workspaces(remote_connection_id, paths); + ), ]; // Allow recovering from bad migration that was initially shipped to nightly @@ -650,10 +734,10 @@ impl WorkspaceDb { self.workspace_for_roots_internal(worktree_roots, None) } - pub(crate) fn ssh_workspace_for_roots>( + pub(crate) fn remote_workspace_for_roots>( &self, worktree_roots: &[P], - ssh_project_id: SshConnectionId, + ssh_project_id: RemoteConnectionId, ) -> Option { self.workspace_for_roots_internal(worktree_roots, Some(ssh_project_id)) } @@ -661,7 +745,7 @@ impl WorkspaceDb { pub(crate) fn workspace_for_roots_internal>( &self, worktree_roots: &[P], - ssh_connection_id: Option, + remote_connection_id: Option, ) -> Option { // paths are sorted before db interactions to ensure that the order of the paths // doesn't affect the workspace selection for existing workspaces @@ -713,13 +797,13 @@ impl WorkspaceDb { FROM workspaces WHERE paths IS ? AND - ssh_connection_id IS ? + remote_connection_id IS ? LIMIT 1 }) .map(|mut prepared_statement| { (prepared_statement)(( root_paths.serialize().paths, - ssh_connection_id.map(|id| id.0 as i32), + remote_connection_id.map(|id| id.0 as i32), )) .unwrap() }) @@ -803,14 +887,12 @@ impl WorkspaceDb { log::debug!("Saving workspace at location: {:?}", workspace.location); self.write(move |conn| { conn.with_savepoint("update_worktrees", || { - let ssh_connection_id = match &workspace.location { + let remote_connection_id = match workspace.location.clone() { SerializedWorkspaceLocation::Local => None, - SerializedWorkspaceLocation::Ssh(connection) => { - Some(Self::get_or_create_ssh_connection_query( + SerializedWorkspaceLocation::Remote(connection_options) => { + Some(Self::get_or_create_remote_connection_internal( conn, - connection.host.clone(), - connection.port, - connection.user.clone(), + connection_options )?.0) } }; @@ -860,11 +942,11 @@ impl WorkspaceDb { WHERE workspace_id != ?1 AND paths IS ?2 AND - ssh_connection_id IS ?3 + remote_connection_id IS ?3 ))?(( workspace.id, paths.paths.clone(), - ssh_connection_id, + remote_connection_id, )) .context("clearing out old locations")?; @@ -874,7 +956,7 @@ impl WorkspaceDb { workspace_id, paths, paths_order, - ssh_connection_id, + remote_connection_id, left_dock_visible, left_dock_active_panel, left_dock_zoom, @@ -893,7 +975,7 @@ impl WorkspaceDb { UPDATE SET paths = ?2, paths_order = ?3, - ssh_connection_id = ?4, + remote_connection_id = ?4, left_dock_visible = ?5, left_dock_active_panel = ?6, left_dock_zoom = ?7, @@ -912,7 +994,7 @@ impl WorkspaceDb { workspace.id, paths.paths.clone(), paths.order.clone(), - ssh_connection_id, + remote_connection_id, workspace.docks, workspace.session_id, workspace.window_id, @@ -931,39 +1013,78 @@ impl WorkspaceDb { .await; } - pub(crate) async fn get_or_create_ssh_connection( + pub(crate) async fn get_or_create_remote_connection( &self, - host: String, - port: Option, - user: Option, - ) -> Result { - self.write(move |conn| Self::get_or_create_ssh_connection_query(conn, host, port, user)) + options: RemoteConnectionOptions, + ) -> Result { + self.write(move |conn| Self::get_or_create_remote_connection_internal(conn, options)) .await } - fn get_or_create_ssh_connection_query( + fn get_or_create_remote_connection_internal( + this: &Connection, + options: RemoteConnectionOptions, + ) -> Result { + let kind; + let user; + let mut host = None; + let mut port = None; + let mut distro = None; + match options { + RemoteConnectionOptions::Ssh(options) => { + kind = RemoteConnectionKind::Ssh; + host = Some(options.host); + port = options.port; + user = options.username; + } + RemoteConnectionOptions::Wsl(options) => { + kind = RemoteConnectionKind::Wsl; + distro = Some(options.distro_name); + user = options.user; + } + } + Self::get_or_create_remote_connection_query(this, kind, host, port, user, distro) + } + + fn get_or_create_remote_connection_query( this: &Connection, - host: String, + kind: RemoteConnectionKind, + host: Option, port: Option, user: Option, - ) -> Result { + distro: Option, + ) -> Result { if let Some(id) = this.select_row_bound(sql!( - SELECT id FROM ssh_connections WHERE host IS ? AND port IS ? AND user IS ? LIMIT 1 - ))?((host.clone(), port, user.clone()))? - { - Ok(SshConnectionId(id)) + SELECT id + FROM remote_connections + WHERE + kind IS ? AND + host IS ? AND + port IS ? AND + user IS ? AND + distro IS ? + LIMIT 1 + ))?(( + kind.serialize(), + host.clone(), + port, + user.clone(), + distro.clone(), + ))? { + Ok(RemoteConnectionId(id)) } else { - log::debug!("Inserting SSH project at host {host}"); let id = this.select_row_bound(sql!( - INSERT INTO ssh_connections ( + INSERT INTO remote_connections ( + kind, host, port, - user - ) VALUES (?1, ?2, ?3) + user, + distro + ) VALUES (?1, ?2, ?3, ?4, ?5) RETURNING id - ))?((host, port, user))? - .context("failed to insert ssh project")?; - Ok(SshConnectionId(id)) + ))?((kind.serialize(), host, port, user, distro))? + .context("failed to insert remote project")?; + Ok(RemoteConnectionId(id)) } } @@ -973,15 +1094,17 @@ impl WorkspaceDb { } } - fn recent_workspaces(&self) -> Result)>> { + fn recent_workspaces( + &self, + ) -> Result)>> { Ok(self .recent_workspaces_query()? .into_iter() - .map(|(id, paths, order, ssh_connection_id)| { + .map(|(id, paths, order, remote_connection_id)| { ( id, PathList::deserialize(&SerializedPathList { paths, order }), - ssh_connection_id, + remote_connection_id.map(RemoteConnectionId), ) }) .collect()) @@ -1001,7 +1124,7 @@ impl WorkspaceDb { fn session_workspaces( &self, session_id: String, - ) -> Result, Option)>> { + ) -> Result, Option)>> { Ok(self .session_workspaces_query(session_id)? .into_iter() @@ -1009,7 +1132,7 @@ impl WorkspaceDb { ( PathList::deserialize(&SerializedPathList { paths, order }), window_id, - ssh_connection_id.map(SshConnectionId), + ssh_connection_id.map(RemoteConnectionId), ) }) .collect()) @@ -1017,7 +1140,7 @@ impl WorkspaceDb { query! { fn session_workspaces_query(session_id: String) -> Result, Option)>> { - SELECT paths, paths_order, window_id, ssh_connection_id + SELECT paths, paths_order, window_id, remote_connection_id FROM workspaces WHERE session_id = ?1 ORDER BY timestamp DESC @@ -1039,40 +1162,55 @@ impl WorkspaceDb { } } - fn ssh_connections(&self) -> Result> { - Ok(self - .ssh_connections_query()? - .into_iter() - .map(|(id, host, port, user)| { - ( - SshConnectionId(id), - SerializedSshConnection { host, port, user }, - ) - }) - .collect()) - } - - query! { - pub fn ssh_connections_query() -> Result, Option)>> { - SELECT id, host, port, user - FROM ssh_connections - } - } - - pub(crate) fn ssh_connection(&self, id: SshConnectionId) -> Result { - let row = self.ssh_connection_query(id.0)?; - Ok(SerializedSshConnection { - host: row.0, - port: row.1, - user: row.2, + fn remote_connections(&self) -> Result> { + Ok(self.select(sql!( + SELECT + id, kind, host, port, user, distro + FROM + remote_connections + ))?()? + .into_iter() + .filter_map(|(id, kind, host, port, user, distro)| { + Some(( + RemoteConnectionId(id), + Self::remote_connection_from_row(kind, host, port, user, distro)?, + )) }) + .collect()) } - query! { - fn ssh_connection_query(id: u64) -> Result<(String, Option, Option)> { - SELECT host, port, user - FROM ssh_connections + pub(crate) fn remote_connection( + &self, + id: RemoteConnectionId, + ) -> Result { + let (kind, host, port, user, distro) = self.select_row_bound(sql!( + SELECT kind, host, port, user, distro + FROM remote_connections WHERE id = ? + ))?(id.0)? + .context("no such remote connection")?; + Self::remote_connection_from_row(kind, host, port, user, distro) + .context("invalid remote_connection row") + } + + fn remote_connection_from_row( + kind: String, + host: Option, + port: Option, + user: Option, + distro: Option, + ) -> Option { + match RemoteConnectionKind::deserialize(&kind)? { + RemoteConnectionKind::Wsl => Some(RemoteConnectionOptions::Wsl(WslConnectionOptions { + distro_name: distro?, + user: user, + })), + RemoteConnectionKind::Ssh => Some(RemoteConnectionOptions::Ssh(SshConnectionOptions { + host: host?, + port, + username: user, + ..Default::default() + })), } } @@ -1108,14 +1246,14 @@ impl WorkspaceDb { ) -> Result> { let mut result = Vec::new(); let mut delete_tasks = Vec::new(); - let ssh_connections = self.ssh_connections()?; + let remote_connections = self.remote_connections()?; - for (id, paths, ssh_connection_id) in self.recent_workspaces()? { - if let Some(ssh_connection_id) = ssh_connection_id.map(SshConnectionId) { - if let Some(ssh_connection) = ssh_connections.get(&ssh_connection_id) { + for (id, paths, remote_connection_id) in self.recent_workspaces()? { + if let Some(remote_connection_id) = remote_connection_id { + if let Some(connection_options) = remote_connections.get(&remote_connection_id) { result.push(( id, - SerializedWorkspaceLocation::Ssh(ssh_connection.clone()), + SerializedWorkspaceLocation::Remote(connection_options.clone()), paths, )); } else { @@ -1157,12 +1295,14 @@ impl WorkspaceDb { ) -> Result> { let mut workspaces = Vec::new(); - for (paths, window_id, ssh_connection_id) in + for (paths, window_id, remote_connection_id) in self.session_workspaces(last_session_id.to_owned())? { - if let Some(ssh_connection_id) = ssh_connection_id { + if let Some(remote_connection_id) = remote_connection_id { workspaces.push(( - SerializedWorkspaceLocation::Ssh(self.ssh_connection(ssh_connection_id)?), + SerializedWorkspaceLocation::Remote( + self.remote_connection(remote_connection_id)?, + ), paths, window_id.map(WindowId::from), )); @@ -1545,6 +1685,7 @@ mod tests { }; use gpui; use pretty_assertions::assert_eq; + use remote::SshConnectionOptions; use std::{thread, time::Duration}; #[gpui::test] @@ -2196,14 +2337,20 @@ mod tests { }; let connection_id = db - .get_or_create_ssh_connection("my-host".to_string(), Some(1234), None) + .get_or_create_remote_connection(RemoteConnectionOptions::Ssh(SshConnectionOptions { + host: "my-host".to_string(), + port: Some(1234), + ..Default::default() + })) .await .unwrap(); let workspace_5 = SerializedWorkspace { id: WorkspaceId(5), paths: PathList::default(), - location: SerializedWorkspaceLocation::Ssh(db.ssh_connection(connection_id).unwrap()), + location: SerializedWorkspaceLocation::Remote( + db.remote_connection(connection_id).unwrap(), + ), center_group: Default::default(), window_bounds: Default::default(), display: Default::default(), @@ -2362,13 +2509,12 @@ mod tests { } #[gpui::test] - async fn test_last_session_workspace_locations_ssh_projects() { - let db = WorkspaceDb::open_test_db( - "test_serializing_workspaces_last_session_workspaces_ssh_projects", - ) - .await; + async fn test_last_session_workspace_locations_remote() { + let db = + WorkspaceDb::open_test_db("test_serializing_workspaces_last_session_workspaces_remote") + .await; - let ssh_connections = [ + let remote_connections = [ ("host-1", "my-user-1"), ("host-2", "my-user-2"), ("host-3", "my-user-3"), @@ -2376,30 +2522,31 @@ mod tests { ] .into_iter() .map(|(host, user)| async { - db.get_or_create_ssh_connection(host.to_string(), None, Some(user.to_string())) + let options = RemoteConnectionOptions::Ssh(SshConnectionOptions { + host: host.to_string(), + username: Some(user.to_string()), + ..Default::default() + }); + db.get_or_create_remote_connection(options.clone()) .await .unwrap(); - SerializedSshConnection { - host: host.into(), - port: None, - user: Some(user.into()), - } + options }) .collect::>(); - let ssh_connections = futures::future::join_all(ssh_connections).await; + let remote_connections = futures::future::join_all(remote_connections).await; let workspaces = [ - (1, ssh_connections[0].clone(), 9), - (2, ssh_connections[1].clone(), 5), - (3, ssh_connections[2].clone(), 8), - (4, ssh_connections[3].clone(), 2), + (1, remote_connections[0].clone(), 9), + (2, remote_connections[1].clone(), 5), + (3, remote_connections[2].clone(), 8), + (4, remote_connections[3].clone(), 2), ] .into_iter() - .map(|(id, ssh_connection, window_id)| SerializedWorkspace { + .map(|(id, remote_connection, window_id)| SerializedWorkspace { id: WorkspaceId(id), paths: PathList::default(), - location: SerializedWorkspaceLocation::Ssh(ssh_connection), + location: SerializedWorkspaceLocation::Remote(remote_connection), center_group: Default::default(), window_bounds: Default::default(), display: Default::default(), @@ -2429,28 +2576,28 @@ mod tests { assert_eq!( have[0], ( - SerializedWorkspaceLocation::Ssh(ssh_connections[3].clone()), + SerializedWorkspaceLocation::Remote(remote_connections[3].clone()), PathList::default() ) ); assert_eq!( have[1], ( - SerializedWorkspaceLocation::Ssh(ssh_connections[2].clone()), + SerializedWorkspaceLocation::Remote(remote_connections[2].clone()), PathList::default() ) ); assert_eq!( have[2], ( - SerializedWorkspaceLocation::Ssh(ssh_connections[1].clone()), + SerializedWorkspaceLocation::Remote(remote_connections[1].clone()), PathList::default() ) ); assert_eq!( have[3], ( - SerializedWorkspaceLocation::Ssh(ssh_connections[0].clone()), + SerializedWorkspaceLocation::Remote(remote_connections[0].clone()), PathList::default() ) ); @@ -2465,13 +2612,23 @@ mod tests { let user = Some("user".to_string()); let connection_id = db - .get_or_create_ssh_connection(host.clone(), port, user.clone()) + .get_or_create_remote_connection(RemoteConnectionOptions::Ssh(SshConnectionOptions { + host: host.clone(), + port, + username: user.clone(), + ..Default::default() + })) .await .unwrap(); // Test that calling the function again with the same parameters returns the same project let same_connection = db - .get_or_create_ssh_connection(host.clone(), port, user.clone()) + .get_or_create_remote_connection(RemoteConnectionOptions::Ssh(SshConnectionOptions { + host: host.clone(), + port, + username: user.clone(), + ..Default::default() + })) .await .unwrap(); @@ -2483,7 +2640,12 @@ mod tests { let user2 = Some("otheruser".to_string()); let different_connection = db - .get_or_create_ssh_connection(host2.clone(), port2, user2.clone()) + .get_or_create_remote_connection(RemoteConnectionOptions::Ssh(SshConnectionOptions { + host: host2.clone(), + port: port2, + username: user2.clone(), + ..Default::default() + })) .await .unwrap(); @@ -2497,12 +2659,22 @@ mod tests { let (host, port, user) = ("example.com".to_string(), None, None); let connection_id = db - .get_or_create_ssh_connection(host.clone(), port, None) + .get_or_create_remote_connection(RemoteConnectionOptions::Ssh(SshConnectionOptions { + host: host.clone(), + port, + username: None, + ..Default::default() + })) .await .unwrap(); let same_connection_id = db - .get_or_create_ssh_connection(host.clone(), port, user.clone()) + .get_or_create_remote_connection(RemoteConnectionOptions::Ssh(SshConnectionOptions { + host: host.clone(), + port, + username: user.clone(), + ..Default::default() + })) .await .unwrap(); @@ -2510,8 +2682,8 @@ mod tests { } #[gpui::test] - async fn test_get_ssh_connections() { - let db = WorkspaceDb::open_test_db("test_get_ssh_connections").await; + async fn test_get_remote_connections() { + let db = WorkspaceDb::open_test_db("test_get_remote_connections").await; let connections = [ ("example.com".to_string(), None, None), @@ -2526,39 +2698,49 @@ mod tests { let mut ids = Vec::new(); for (host, port, user) in connections.iter() { ids.push( - db.get_or_create_ssh_connection(host.clone(), *port, user.clone()) - .await - .unwrap(), + db.get_or_create_remote_connection(RemoteConnectionOptions::Ssh( + SshConnectionOptions { + host: host.clone(), + port: *port, + username: user.clone(), + ..Default::default() + }, + )) + .await + .unwrap(), ); } - let stored_projects = db.ssh_connections().unwrap(); + let stored_connections = db.remote_connections().unwrap(); assert_eq!( - stored_projects, + stored_connections, [ ( ids[0], - SerializedSshConnection { + RemoteConnectionOptions::Ssh(SshConnectionOptions { host: "example.com".into(), port: None, - user: None, - } + username: None, + ..Default::default() + }), ), ( ids[1], - SerializedSshConnection { + RemoteConnectionOptions::Ssh(SshConnectionOptions { host: "anotherexample.com".into(), port: Some(123), - user: Some("user2".into()), - } + username: Some("user2".into()), + ..Default::default() + }), ), ( ids[2], - SerializedSshConnection { + RemoteConnectionOptions::Ssh(SshConnectionOptions { host: "yetanother.com".into(), port: Some(345), - user: None, - } + username: None, + ..Default::default() + }), ), ] .into_iter() diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index 04757d04950ac1ca200096d7b46d04abb18ce8f9..005a1ba2347f8ac3847199ad4564d8ca45420f4a 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -12,7 +12,7 @@ use db::sqlez::{ use gpui::{AsyncWindowContext, Entity, WeakEntity}; use project::{Project, debugger::breakpoint_store::SourceBreakpoint}; -use serde::{Deserialize, Serialize}; +use remote::RemoteConnectionOptions; use std::{ collections::BTreeMap, path::{Path, PathBuf}, @@ -24,19 +24,18 @@ use uuid::Uuid; #[derive( Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize, )] -pub(crate) struct SshConnectionId(pub u64); +pub(crate) struct RemoteConnectionId(pub u64); -#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] -pub struct SerializedSshConnection { - pub host: String, - pub port: Option, - pub user: Option, +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub(crate) enum RemoteConnectionKind { + Ssh, + Wsl, } #[derive(Debug, PartialEq, Clone)] pub enum SerializedWorkspaceLocation { Local, - Ssh(SerializedSshConnection), + Remote(RemoteConnectionOptions), } impl SerializedWorkspaceLocation { @@ -68,6 +67,23 @@ pub struct DockStructure { pub(crate) bottom: DockData, } +impl RemoteConnectionKind { + pub(crate) fn serialize(&self) -> &'static str { + match self { + RemoteConnectionKind::Ssh => "ssh", + RemoteConnectionKind::Wsl => "wsl", + } + } + + pub(crate) fn deserialize(text: &str) -> Option { + match text { + "ssh" => Some(Self::Ssh), + "wsl" => Some(Self::Wsl), + _ => None, + } + } +} + impl Column for DockStructure { fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { let (left, next_index) = DockData::column(statement, start_index)?; diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 61442eb6348e6152a4ad8ba4d3f93c24d1887346..bd19f37c1e0fd8653f5d73dea365f1148fd2e91d 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -67,14 +67,14 @@ pub use pane_group::*; use persistence::{DB, SerializedWindowBounds, model::SerializedWorkspace}; pub use persistence::{ DB as WORKSPACE_DB, WorkspaceDb, delete_unloaded_items, - model::{ItemId, SerializedSshConnection, SerializedWorkspaceLocation}, + model::{ItemId, SerializedWorkspaceLocation}, }; use postage::stream::Stream; use project::{ DirectoryLister, Project, ProjectEntryId, ProjectPath, ResolvedPath, Worktree, WorktreeId, debugger::{breakpoint_store::BreakpointStoreEvent, session::ThreadStatus}, }; -use remote::{RemoteClientDelegate, SshConnectionOptions, remote_client::ConnectionIdentifier}; +use remote::{RemoteClientDelegate, RemoteConnectionOptions, remote_client::ConnectionIdentifier}; use schemars::JsonSchema; use serde::Deserialize; use session::AppSession; @@ -5262,14 +5262,7 @@ impl Workspace { fn serialize_workspace_location(&self, cx: &App) -> WorkspaceLocation { let paths = PathList::new(&self.root_paths(cx)); if let Some(connection) = self.project.read(cx).remote_connection_options(cx) { - WorkspaceLocation::Location( - SerializedWorkspaceLocation::Ssh(SerializedSshConnection { - host: connection.host, - port: connection.port, - user: connection.username, - }), - paths, - ) + WorkspaceLocation::Location(SerializedWorkspaceLocation::Remote(connection), paths) } else if self.project.read(cx).is_local() { if !paths.is_empty() { WorkspaceLocation::Location(SerializedWorkspaceLocation::Local, paths) @@ -7282,9 +7275,9 @@ pub fn create_and_open_local_file( }) } -pub fn open_ssh_project_with_new_connection( +pub fn open_remote_project_with_new_connection( window: WindowHandle, - connection_options: SshConnectionOptions, + connection_options: RemoteConnectionOptions, cancel_rx: oneshot::Receiver<()>, delegate: Arc, app_state: Arc, @@ -7293,11 +7286,11 @@ pub fn open_ssh_project_with_new_connection( ) -> Task> { cx.spawn(async move |cx| { let (workspace_id, serialized_workspace) = - serialize_ssh_project(connection_options.clone(), paths.clone(), cx).await?; + serialize_remote_project(connection_options.clone(), paths.clone(), cx).await?; let session = match cx .update(|cx| { - remote::RemoteClient::ssh( + remote::RemoteClient::new( ConnectionIdentifier::Workspace(workspace_id.0), connection_options, cancel_rx, @@ -7323,7 +7316,7 @@ pub fn open_ssh_project_with_new_connection( ) })?; - open_ssh_project_inner( + open_remote_project_inner( project, paths, workspace_id, @@ -7336,8 +7329,8 @@ pub fn open_ssh_project_with_new_connection( }) } -pub fn open_ssh_project_with_existing_connection( - connection_options: SshConnectionOptions, +pub fn open_remote_project_with_existing_connection( + connection_options: RemoteConnectionOptions, project: Entity, paths: Vec, app_state: Arc, @@ -7346,9 +7339,9 @@ pub fn open_ssh_project_with_existing_connection( ) -> Task> { cx.spawn(async move |cx| { let (workspace_id, serialized_workspace) = - serialize_ssh_project(connection_options.clone(), paths.clone(), cx).await?; + serialize_remote_project(connection_options.clone(), paths.clone(), cx).await?; - open_ssh_project_inner( + open_remote_project_inner( project, paths, workspace_id, @@ -7361,7 +7354,7 @@ pub fn open_ssh_project_with_existing_connection( }) } -async fn open_ssh_project_inner( +async fn open_remote_project_inner( project: Entity, paths: Vec, workspace_id: WorkspaceId, @@ -7448,22 +7441,18 @@ async fn open_ssh_project_inner( Ok(()) } -fn serialize_ssh_project( - connection_options: SshConnectionOptions, +fn serialize_remote_project( + connection_options: RemoteConnectionOptions, paths: Vec, cx: &AsyncApp, ) -> Task)>> { cx.background_spawn(async move { - let ssh_connection_id = persistence::DB - .get_or_create_ssh_connection( - connection_options.host.clone(), - connection_options.port, - connection_options.username.clone(), - ) + let remote_connection_id = persistence::DB + .get_or_create_remote_connection(connection_options) .await?; let serialized_workspace = - persistence::DB.ssh_workspace_for_roots(&paths, ssh_connection_id); + persistence::DB.remote_workspace_for_roots(&paths, remote_connection_id); let workspace_id = if let Some(workspace_id) = serialized_workspace.as_ref().map(|workspace| workspace.id) @@ -8013,22 +8002,20 @@ pub struct WorkspacePosition { pub centered_layout: bool, } -pub fn ssh_workspace_position_from_db( - host: String, - port: Option, - user: Option, +pub fn remote_workspace_position_from_db( + connection_options: RemoteConnectionOptions, paths_to_open: &[PathBuf], cx: &App, ) -> Task> { let paths = paths_to_open.to_vec(); cx.background_spawn(async move { - let ssh_connection_id = persistence::DB - .get_or_create_ssh_connection(host, port, user) + let remote_connection_id = persistence::DB + .get_or_create_remote_connection(connection_options) .await .context("fetching serialized ssh project")?; let serialized_workspace = - persistence::DB.ssh_workspace_for_roots(&paths, ssh_connection_id); + persistence::DB.remote_workspace_for_roots(&paths, remote_connection_id); let (window_bounds, display) = if let Some(bounds) = window_bounds_env_override() { (Some(WindowBounds::Windowed(bounds)), None) diff --git a/crates/zed/resources/windows/zed-wsl b/crates/zed/resources/windows/zed-wsl new file mode 100644 index 0000000000000000000000000000000000000000..d3cbb93af6f5979508229656deadeab0dbf21661 --- /dev/null +++ b/crates/zed/resources/windows/zed-wsl @@ -0,0 +1,25 @@ +#!/usr/bin/env sh + +if [ "$ZED_WSL_DEBUG_INFO" = true ]; then + set -x +fi + +ZED_PATH="$(dirname "$(realpath "$0")")" + +IN_WSL=false +if [ -n "$WSL_DISTRO_NAME" ]; then + # $WSL_DISTRO_NAME is available since WSL builds 18362, also for WSL2 + IN_WSL=true +fi + +if [ $IN_WSL = true ]; then + WSL_USER="$USER" + if [ -z "$WSL_USER" ]; then + WSL_USER="$USERNAME" + fi + "$ZED_PATH/zed.exe" --wsl "$WSL_USER@$WSL_DISTRO_NAME" "$@" + exit $? +else + echo "Only WSL is supported for now" >&2 + exit 1 +fi diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index e4438792045617498e5c8cd3b52117b1d0b752ef..79cf2bfa66fb217680dea86720eb46402f116958 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -23,13 +23,14 @@ use http_client::{Url, read_proxy_from_env}; use language::LanguageRegistry; use onboarding::{FIRST_OPEN, show_onboarding_view}; use prompt_store::PromptBuilder; +use remote::RemoteConnectionOptions; use reqwest_client::ReqwestClient; use assets::Assets; use node_runtime::{NodeBinaryOptions, NodeRuntime}; use parking_lot::Mutex; use project::project_settings::ProjectSettings; -use recent_projects::{SshSettings, open_ssh_project}; +use recent_projects::{SshSettings, open_remote_project}; use release_channel::{AppCommitSha, AppVersion, ReleaseChannel}; use session::{AppSession, Session}; use settings::{BaseKeymap, Settings, SettingsStore, watch_config_file}; @@ -360,6 +361,7 @@ pub fn main() { open_listener.open(RawOpenRequest { urls, diff_paths: Vec::new(), + ..Default::default() }) } }); @@ -696,7 +698,7 @@ pub fn main() { let urls: Vec<_> = args .paths_or_urls .iter() - .filter_map(|arg| parse_url_arg(arg, cx).log_err()) + .map(|arg| parse_url_arg(arg, cx)) .collect(); let diff_paths: Vec<[String; 2]> = args @@ -706,7 +708,11 @@ pub fn main() { .collect(); if !urls.is_empty() || !diff_paths.is_empty() { - open_listener.open(RawOpenRequest { urls, diff_paths }) + open_listener.open(RawOpenRequest { + urls, + diff_paths, + wsl: args.wsl, + }) } match open_rx @@ -792,10 +798,10 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut return; } - if let Some(connection_options) = request.ssh_connection { + if let Some(connection_options) = request.remote_connection { cx.spawn(async move |cx| { let paths: Vec = request.open_paths.into_iter().map(PathBuf::from).collect(); - open_ssh_project( + open_remote_project( connection_options, paths, app_state, @@ -978,31 +984,24 @@ async fn restore_or_create_workspace(app_state: Arc, cx: &mut AsyncApp tasks.push(task); } } - SerializedWorkspaceLocation::Ssh(ssh) => { + SerializedWorkspaceLocation::Remote(mut connection_options) => { let app_state = app_state.clone(); - let ssh_host = ssh.host.clone(); - let task = cx.spawn(async move |cx| { - let connection_options = cx.update(|cx| { + if let RemoteConnectionOptions::Ssh(options) = &mut connection_options { + cx.update(|cx| { SshSettings::get_global(cx) - .connection_options_for(ssh.host, ssh.port, ssh.user) - }); - - match connection_options { - Ok(connection_options) => recent_projects::open_ssh_project( - connection_options, - paths.paths().into_iter().map(PathBuf::from).collect(), - app_state, - workspace::OpenOptions::default(), - cx, - ) - .await - .map_err(|e| anyhow::anyhow!(e)), - Err(e) => Err(anyhow::anyhow!( - "Failed to get SSH connection options for {}: {}", - ssh_host, - e - )), - } + .fill_connection_options_from_settings(options) + })?; + } + let task = cx.spawn(async move |cx| { + recent_projects::open_remote_project( + connection_options, + paths.paths().into_iter().map(PathBuf::from).collect(), + app_state, + workspace::OpenOptions::default(), + cx, + ) + .await + .map_err(|e| anyhow::anyhow!(e)) }); tasks.push(task); } @@ -1184,6 +1183,16 @@ struct Args { #[arg(long, value_name = "DIR")] user_data_dir: Option, + /// The username and WSL distribution to use when opening paths. ,If not specified, + /// Zed will attempt to open the paths directly. + /// + /// The username is optional, and if not specified, the default user for the distribution + /// will be used. + /// + /// Example: `me@Ubuntu` or `Ubuntu` for default distribution. + #[arg(long, value_name = "USER@DISTRO")] + wsl: Option, + /// Instructs zed to run as a dev server on this machine. (not implemented) #[arg(long)] dev_server_token: Option, @@ -1242,18 +1251,18 @@ impl ToString for IdType { } } -fn parse_url_arg(arg: &str, cx: &App) -> Result { +fn parse_url_arg(arg: &str, cx: &App) -> String { match std::fs::canonicalize(Path::new(&arg)) { - Ok(path) => Ok(format!("file://{}", path.display())), - Err(error) => { + Ok(path) => format!("file://{}", path.display()), + Err(_) => { if arg.starts_with("file://") || arg.starts_with("zed-cli://") || arg.starts_with("ssh://") || parse_zed_link(arg, cx).is_some() { - Ok(arg.into()) + arg.into() } else { - anyhow::bail!("error parsing path argument: {error}") + format!("file://{arg}") } } } diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 5797070a39c8a60dc760ac3b82341842bc11d63e..d0e4687a132a85645cdbfe52e67ebb6afd894c0e 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -48,7 +48,7 @@ use project::{DirectoryLister, ProjectItem}; use project_panel::ProjectPanel; use prompt_store::PromptBuilder; use quick_action_bar::QuickActionBar; -use recent_projects::open_ssh_project; +use recent_projects::open_remote_project; use release_channel::{AppCommitSha, ReleaseChannel}; use rope::Rope; use search::project_search::ProjectSearchBar; @@ -1557,7 +1557,7 @@ pub fn open_new_ssh_project_from_project( }; let connection_options = ssh_client.read(cx).connection_options(); cx.spawn_in(window, async move |_, cx| { - open_ssh_project( + open_remote_project( connection_options, paths, app_state, diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index 2194fb7af5d48577a4316b99418df7dbce0a0375..f2d8cd46c301c0f688d36e17ed1b7d0dcd31ec00 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -17,8 +17,8 @@ use gpui::{App, AsyncApp, Global, WindowHandle}; use language::Point; use onboarding::FIRST_OPEN; use onboarding::show_onboarding_view; -use recent_projects::{SshSettings, open_ssh_project}; -use remote::SshConnectionOptions; +use recent_projects::{SshSettings, open_remote_project}; +use remote::{RemoteConnectionOptions, WslConnectionOptions}; use settings::Settings; use std::path::{Path, PathBuf}; use std::sync::Arc; @@ -37,7 +37,7 @@ pub struct OpenRequest { pub diff_paths: Vec<[String; 2]>, pub open_channel_notes: Vec<(u64, Option)>, pub join_channel: Option, - pub ssh_connection: Option, + pub remote_connection: Option, } #[derive(Debug)] @@ -51,6 +51,23 @@ pub enum OpenRequestKind { impl OpenRequest { pub fn parse(request: RawOpenRequest, cx: &App) -> Result { let mut this = Self::default(); + + this.diff_paths = request.diff_paths; + if let Some(wsl) = request.wsl { + let (user, distro_name) = if let Some((user, distro)) = wsl.split_once('@') { + if user.is_empty() { + anyhow::bail!("user is empty in wsl argument"); + } + (Some(user.to_string()), distro.to_string()) + } else { + (None, wsl) + }; + this.remote_connection = Some(RemoteConnectionOptions::Wsl(WslConnectionOptions { + distro_name, + user, + })); + } + for url in request.urls { if let Some(server_name) = url.strip_prefix("zed-cli://") { this.kind = Some(OpenRequestKind::CliConnection(connect_to_cli(server_name)?)); @@ -80,8 +97,6 @@ impl OpenRequest { } } - this.diff_paths = request.diff_paths; - Ok(this) } @@ -108,13 +123,15 @@ impl OpenRequest { if let Some(password) = url.password() { connection_options.password = Some(password.to_string()); } - if let Some(ssh_connection) = &self.ssh_connection { + + let connection_options = RemoteConnectionOptions::Ssh(connection_options); + if let Some(ssh_connection) = &self.remote_connection { anyhow::ensure!( *ssh_connection == connection_options, - "cannot open multiple ssh connections" + "cannot open multiple different remote connections" ); } - self.ssh_connection = Some(connection_options); + self.remote_connection = Some(connection_options); self.parse_file_path(url.path()); Ok(()) } @@ -152,6 +169,7 @@ pub struct OpenListener(UnboundedSender); pub struct RawOpenRequest { pub urls: Vec, pub diff_paths: Vec<[String; 2]>, + pub wsl: Option, } impl Global for OpenListener {} @@ -303,13 +321,21 @@ pub async fn handle_cli_connection( paths, diff_paths, wait, + wsl, open_new_workspace, env, user_data_dir: _, } => { if !urls.is_empty() { cx.update(|cx| { - match OpenRequest::parse(RawOpenRequest { urls, diff_paths }, cx) { + match OpenRequest::parse( + RawOpenRequest { + urls, + diff_paths, + wsl, + }, + cx, + ) { Ok(open_request) => { handle_open_request(open_request, app_state.clone(), cx); responses.send(CliResponse::Exit { status: 0 }).log_err(); @@ -422,30 +448,26 @@ async fn open_workspaces( errored = true } } - SerializedWorkspaceLocation::Ssh(ssh) => { + SerializedWorkspaceLocation::Remote(mut connection) => { let app_state = app_state.clone(); - let connection_options = cx.update(|cx| { - SshSettings::get_global(cx) - .connection_options_for(ssh.host, ssh.port, ssh.user) - }); - if let Ok(connection_options) = connection_options { - cx.spawn(async move |cx| { - open_ssh_project( - connection_options, - workspace_paths.paths().to_vec(), - app_state, - OpenOptions::default(), - cx, - ) - .await - .log_err(); - }) - .detach(); - // We don't set `errored` here if `open_ssh_project` fails, because for ssh projects, the - // error is displayed in the window. - } else { - errored = false; + if let RemoteConnectionOptions::Ssh(options) = &mut connection { + cx.update(|cx| { + SshSettings::get_global(cx) + .fill_connection_options_from_settings(options) + })?; } + cx.spawn(async move |cx| { + open_remote_project( + connection, + workspace_paths.paths().to_vec(), + app_state, + OpenOptions::default(), + cx, + ) + .await + .log_err(); + }) + .detach(); } } } @@ -587,6 +609,7 @@ mod tests { }; use editor::Editor; use gpui::TestAppContext; + use remote::SshConnectionOptions; use serde_json::json; use std::sync::Arc; use util::path; @@ -609,8 +632,8 @@ mod tests { .unwrap() }); assert_eq!( - request.ssh_connection.unwrap(), - SshConnectionOptions { + request.remote_connection.unwrap(), + RemoteConnectionOptions::Ssh(SshConnectionOptions { host: "localhost".into(), username: Some("me".into()), port: None, @@ -619,7 +642,7 @@ mod tests { port_forwards: None, nickname: None, upload_binary_over_ssh: false, - } + }) ); assert_eq!(request.open_paths, vec!["/"]); } diff --git a/crates/zed/src/zed/windows_only_instance.rs b/crates/zed/src/zed/windows_only_instance.rs index bd62dea75aac5ad2c4b01c4b17d8d6219b9110db..1dd51b5ffbd7c11cce0346142834581c022f512d 100644 --- a/crates/zed/src/zed/windows_only_instance.rs +++ b/crates/zed/src/zed/windows_only_instance.rs @@ -153,6 +153,7 @@ fn send_args_to_instance(args: &Args) -> anyhow::Result<()> { urls, diff_paths, wait: false, + wsl: args.wsl.clone(), open_new_workspace: None, env: None, user_data_dir: args.user_data_dir.clone(), diff --git a/script/bundle-windows.ps1 b/script/bundle-windows.ps1 index 8ae02124918a2f7f47a1c6204f5199f6eb4e6056..84ad39fb706f9d3e0e4af73a68b468e0bea33ee1 100644 --- a/script/bundle-windows.ps1 +++ b/script/bundle-windows.ps1 @@ -150,6 +150,7 @@ function CollectFiles { Move-Item -Path "$innoDir\zed_explorer_command_injector.appx" -Destination "$innoDir\appx\zed_explorer_command_injector.appx" -Force Move-Item -Path "$innoDir\zed_explorer_command_injector.dll" -Destination "$innoDir\appx\zed_explorer_command_injector.dll" -Force Move-Item -Path "$innoDir\cli.exe" -Destination "$innoDir\bin\zed.exe" -Force + Move-Item -Path "$innoDir\zed-wsl" -Destination "$innoDir\bin\zed" -Force Move-Item -Path "$innoDir\auto_update_helper.exe" -Destination "$innoDir\tools\auto_update_helper.exe" -Force Move-Item -Path ".\AGS_SDK-6.3.0\ags_lib\lib\amd_ags_x64.dll" -Destination "$innoDir\amd_ags_x64.dll" -Force } From 7d0a303785fd73677c255fb15657e6af8dc1e3e8 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Fri, 29 Aug 2025 23:03:47 -0400 Subject: [PATCH 26/54] Add xAI to supported language model providers (#37206) After setting a `grok` model via the agent panel, the settings complains that it doesn't recognize the language model provider: SCR-20250829-tqqd Also, sorted the list, in the follow-up commit. Release Notes: - N/A --- crates/agent_settings/src/agent_settings.rs | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/crates/agent_settings/src/agent_settings.rs b/crates/agent_settings/src/agent_settings.rs index 3808cc510f7941107f6e4ab90c9a5f8a2c3d920a..3e21e18a11ba68726f15d88bec93b95f01f89500 100644 --- a/crates/agent_settings/src/agent_settings.rs +++ b/crates/agent_settings/src/agent_settings.rs @@ -352,18 +352,19 @@ impl JsonSchema for LanguageModelProviderSetting { fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema { json_schema!({ "enum": [ - "anthropic", "amazon-bedrock", + "anthropic", + "copilot_chat", + "deepseek", "google", "lmstudio", + "mistral", "ollama", "openai", - "zed.dev", - "copilot_chat", - "deepseek", "openrouter", - "mistral", - "vercel" + "vercel", + "x_ai", + "zed.dev" ] }) } From b473f4a1304bc1a4b2e911cc6063167ede3a281c Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Sat, 30 Aug 2025 15:13:23 +0200 Subject: [PATCH 27/54] Fix SQL error in recent projects query (#37220) Follow-up to https://github.com/zed-industries/zed/pull/37035 In the WSL PR, `ssh_connection_id` was renamed to `remote_connection_id`. However, that was not accounted for within the `recent_workspaces_query`. This caused a query fail: ``` 2025-08-30T14:45:44+02:00 ERROR [recent_projects] Prepare call failed for query: SELECT workspace_id, paths, paths_order, ssh_connection_id FROM workspaces WHERE paths IS NOT NULL OR ssh_connection_id IS NOT NULL ORDER BY timestamp DESC Caused by: Sqlite call failed with code 1 and message: Some("no such column: ssh_connection_id") ``` and resulted in no recent workspaces being shown within the recent projects picker. This change updates the column name to the new name and thus fixes the error. Release Notes: - N/A --- crates/workspace/src/persistence.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 160823f547f3ab0019d4a631550aec70f1ca101e..ef5a86a2762510fbea6f6a1a5172953a0ea20f7d 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -1112,11 +1112,11 @@ impl WorkspaceDb { query! { fn recent_workspaces_query() -> Result)>> { - SELECT workspace_id, paths, paths_order, ssh_connection_id + SELECT workspace_id, paths, paths_order, remote_connection_id FROM workspaces WHERE paths IS NOT NULL OR - ssh_connection_id IS NOT NULL + remote_connection_id IS NOT NULL ORDER BY timestamp DESC } } @@ -1128,11 +1128,11 @@ impl WorkspaceDb { Ok(self .session_workspaces_query(session_id)? .into_iter() - .map(|(paths, order, window_id, ssh_connection_id)| { + .map(|(paths, order, window_id, remote_connection_id)| { ( PathList::deserialize(&SerializedPathList { paths, order }), window_id, - ssh_connection_id.map(RemoteConnectionId), + remote_connection_id.map(RemoteConnectionId), ) }) .collect()) From 0a32aa8db1c4bbd4ae8977b0923ece0f32537074 Mon Sep 17 00:00:00 2001 From: Umesh Yadav <23421535+imumesh18@users.noreply.github.com> Date: Sat, 30 Aug 2025 20:12:15 +0530 Subject: [PATCH 28/54] language_models: Fix GitHub Copilot thread summary by removing unnecessary noop tool logic (#37152) Closes #37025 This PR fixes GitHub Copilot thread summary failures by removing the unnecessary `noop` tool insertion logic. The code was originally added as a workaround in https://github.com/zed-industries/zed/pull/30007 for supposed GitHub Copilot API issues when tools were used previously in a conversation but no tools are provided in the current request. However, testing revealed that this scenario works fine without the workaround, and the `noop` tool insertion was actually causing "Invalid schema for function 'noop'" errors that prevented thread summarization from working. Removing this logic eliminates the errors and allows thread summarization to function correctly with GitHub Copilot models. The best way to see if removing that part of code works is just triggering thread summarisation. Error Log: ``` 2025-08-27T13:47:50-04:00 ERROR [workspace::notifications] "Failed to connect to API: 400 Bad Request {"error":{"message":"Invalid schema for function 'noop': In context=(), object schema missing properties.","code":"invalid_function_parameters"}}\n" ``` Release Notes: - Fixed GitHub Copilot thread summary failures by removing unnecessary noop tool insertion logic. --- .../src/provider/copilot_chat.rs | 20 +------------------ 1 file changed, 1 insertion(+), 19 deletions(-) diff --git a/crates/language_models/src/provider/copilot_chat.rs b/crates/language_models/src/provider/copilot_chat.rs index eb12c0056f871a4d9eb053c51455081572868aef..d48c12aa4b5de713c0130320f7c9e61a733dc33e 100644 --- a/crates/language_models/src/provider/copilot_chat.rs +++ b/crates/language_models/src/provider/copilot_chat.rs @@ -475,7 +475,6 @@ fn into_copilot_chat( } } - let mut tool_called = false; let mut messages: Vec = Vec::new(); for message in request_messages { match message.role { @@ -545,7 +544,6 @@ fn into_copilot_chat( let mut tool_calls = Vec::new(); for content in &message.content { if let MessageContent::ToolUse(tool_use) = content { - tool_called = true; tool_calls.push(ToolCall { id: tool_use.id.to_string(), content: copilot::copilot_chat::ToolCallContent::Function { @@ -590,7 +588,7 @@ fn into_copilot_chat( } } - let mut tools = request + let tools = request .tools .iter() .map(|tool| Tool::Function { @@ -602,22 +600,6 @@ fn into_copilot_chat( }) .collect::>(); - // The API will return a Bad Request (with no error message) when tools - // were used previously in the conversation but no tools are provided as - // part of this request. Inserting a dummy tool seems to circumvent this - // error. - if tool_called && tools.is_empty() { - tools.push(Tool::Function { - function: copilot::copilot_chat::Function { - name: "noop".to_string(), - description: "No operation".to_string(), - parameters: serde_json::json!({ - "type": "object" - }), - }, - }); - } - Ok(CopilotChatRequest { intent: true, n: 1, From af26b627bf8540edc6eea4146acc081183f6241e Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Sat, 30 Aug 2025 12:59:04 -0500 Subject: [PATCH 29/54] settings: Improve parse errors (#37234) Closes #ISSUE Adds a dependency on `serde_path_to_error` to the workspace allowing us to include the path to the setting that failed to parse on settings parse failure. Release Notes: - N/A *or* Added/Fixed/Improved ... --- Cargo.lock | 1 + Cargo.toml | 1 + crates/settings/Cargo.toml | 1 + crates/settings/src/settings_json.rs | 3 ++- crates/settings/src/settings_store.rs | 26 +++++++++++++++++++++++--- docs/src/visual-customization.md | 2 +- 6 files changed, 29 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 84d633dd6f126f1ce86cd73b83f9d1aac23c591e..a80809461e5135541b1223e7482310effa6cb50b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14894,6 +14894,7 @@ dependencies = [ "serde_derive", "serde_json", "serde_json_lenient", + "serde_path_to_error", "settings_ui_macros", "smallvec", "tree-sitter", diff --git a/Cargo.toml b/Cargo.toml index b64113311adb2662562cc4ae488054f54d569c3e..48017d9c6b4858fb7e5415b92bd993e534d1fabb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -592,6 +592,7 @@ serde_json_lenient = { version = "0.2", features = [ "preserve_order", "raw_value", ] } +serde_path_to_error = "0.1.17" serde_repr = "0.1" serde_urlencoded = "0.7" sha2 = "0.10" diff --git a/crates/settings/Cargo.toml b/crates/settings/Cargo.toml index 8768b4073602461a5031b8d70d3a1e930ad2a41e..d9b8d7275f9abdd60df85443988595f025bf26c0 100644 --- a/crates/settings/Cargo.toml +++ b/crates/settings/Cargo.toml @@ -33,6 +33,7 @@ serde_derive.workspace = true serde_json.workspace = true settings_ui_macros.workspace = true serde_json_lenient.workspace = true +serde_path_to_error.workspace = true smallvec.workspace = true tree-sitter-json.workspace = true tree-sitter.workspace = true diff --git a/crates/settings/src/settings_json.rs b/crates/settings/src/settings_json.rs index b916df6e5c205c7fc2c0c920d0ac8343cb986a5c..480fe057eacb8d96255a3bf2d7b5f96208f87ced 100644 --- a/crates/settings/src/settings_json.rs +++ b/crates/settings/src/settings_json.rs @@ -563,7 +563,8 @@ pub fn to_pretty_json( } pub fn parse_json_with_comments(content: &str) -> Result { - Ok(serde_json_lenient::from_str(content)?) + let mut deserializer = serde_json_lenient::Deserializer::from_str(content); + Ok(serde_path_to_error::deserialize(&mut deserializer)?) } #[cfg(test)] diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 09ac6f9766e32e7a0d8765b09919cd0f8c09866c..023f8cbfba3d96b0a6cad2e1c6ebb930f0bcdf9e 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -11,7 +11,7 @@ use gpui::{App, AsyncApp, BorrowAppContext, Global, SharedString, Task, UpdateGl use paths::{EDITORCONFIG_NAME, local_settings_file_relative_path, task_file_name}; use schemars::JsonSchema; -use serde::{Deserialize, Serialize, de::DeserializeOwned}; +use serde::{Serialize, de::DeserializeOwned}; use serde_json::{Value, json}; use smallvec::SmallVec; use std::{ @@ -1464,9 +1464,29 @@ impl AnySettingValue for SettingValue { return (T::KEY, Ok(DeserializedSetting(Box::new(value)))); } } - let value = T::FileContent::deserialize(json) + let value = serde_path_to_error::deserialize::<_, T::FileContent>(json) .map(|value| DeserializedSetting(Box::new(value))) - .map_err(anyhow::Error::from); + .map_err(|err| { + // construct a path using the key and reported error path if possible. + // Unfortunately, serde_path_to_error does not expose the necessary + // methods and data to simply add the key to the path + let mut path = String::new(); + if let Some(key) = key { + path.push_str(key); + } + let err_path = err.path().to_string(); + // when the path is empty, serde_path_to_error stringifies the path as ".", + // when the path is unknown, serde_path_to_error stringifies the path as an empty string + if !err_path.is_empty() && !err_path.starts_with(".") { + path.push('.'); + path.push_str(&err_path); + } + if path.is_empty() { + anyhow::Error::from(err.into_inner()) + } else { + anyhow::anyhow!("'{}': {}", err.into_inner(), path) + } + }); (key, value) } diff --git a/docs/src/visual-customization.md b/docs/src/visual-customization.md index 1df76d17f026c9457b296230f93bec0e10c4aa19..47c72e80f5ea0ca6ce8576e29c51ff9e44041eb5 100644 --- a/docs/src/visual-customization.md +++ b/docs/src/visual-customization.md @@ -8,7 +8,7 @@ See [Configuring Zed](./configuring-zed.md) for additional information and other Use may install zed extensions providing [Themes](./themes.md) and [Icon Themes](./icon-themes.md) via {#action zed::Extensions} from the command palette or menu. -You can preview/choose amongsts your installed themes and icon themes with {#action theme_selector::Toggle} ({#kb theme_selector::Toggle}) and ({#action icon_theme_selector::Toggle}) which will modify the following settings: +You can preview/choose amongst your installed themes and icon themes with {#action theme_selector::Toggle} ({#kb theme_selector::Toggle}) and ({#action icon_theme_selector::Toggle}) which will modify the following settings: ```json { From de576bd1b81cef5a8bc41506806ea44c92d9d9a5 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Sat, 30 Aug 2025 15:51:08 -0400 Subject: [PATCH 30/54] agent: Fix agent panel header not updating when opening a history entry (#37189) Closes #37171 Release Notes: - agent: Fixed a bug that caused the agent information in the panel header to be incorrect when opening a thread from history. --- crates/agent_ui/src/agent_panel.rs | 33 +++++++++++++++++++++++++----- 1 file changed, 28 insertions(+), 5 deletions(-) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 3eb171054a2c4d529bbc4b89063bf58f69ce5c45..fac880b783271ffd8c9524464a8f0a178f276895 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -284,6 +284,17 @@ impl AgentType { } } +impl From for AgentType { + fn from(value: ExternalAgent) -> Self { + match value { + ExternalAgent::Gemini => Self::Gemini, + ExternalAgent::ClaudeCode => Self::ClaudeCode, + ExternalAgent::Custom { name, command } => Self::Custom { name, command }, + ExternalAgent::NativeAgent => Self::NativeAgent, + } + } +} + impl ActiveView { pub fn which_font_size_used(&self) -> WhichFontSize { match self { @@ -1049,6 +1060,11 @@ impl AgentPanel { editor }); + if self.selected_agent != AgentType::TextThread { + self.selected_agent = AgentType::TextThread; + self.serialize(cx); + } + self.set_active_view( ActiveView::prompt_editor( context_editor.clone(), @@ -1140,6 +1156,12 @@ impl AgentPanel { } } + let selected_agent = ext_agent.into(); + if this.selected_agent != selected_agent { + this.selected_agent = selected_agent; + this.serialize(cx); + } + let thread_view = cx.new(|cx| { crate::acp::AcpThreadView::new( server, @@ -1235,6 +1257,12 @@ impl AgentPanel { cx, ) }); + + if self.selected_agent != AgentType::TextThread { + self.selected_agent = AgentType::TextThread; + self.serialize(cx); + } + self.set_active_view( ActiveView::prompt_editor( editor, @@ -1860,11 +1888,6 @@ impl AgentPanel { window: &mut Window, cx: &mut Context, ) { - if self.selected_agent != agent { - self.selected_agent = agent.clone(); - self.serialize(cx); - } - match agent { AgentType::Zed => { window.dispatch_action( From ad746f25f268ef0ad3aeddc41bc05287c1d0d006 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Sat, 30 Aug 2025 14:13:39 -0600 Subject: [PATCH 31/54] zeta: Add zlib to license detection + ignore symbol differences (#37238) See discussion on #36564. Makes the license regexes a less fragile by not matching on symbols, while also excluding cases where a long file ends with a valid license. Also adds Zlib license, a commented out test to check all license-like files discovered in the homedir, and more testcases. Not too happy with the efficiency here, on my quite good computer it takes ~120ms to compile the regex and allocates ~8mb for it. This is just not a great use of regexes, I think something using eager substring matching would be much more efficient - hoping to followup with that. Release Notes: - Edit Prediction: Added Zlib license to open-source licenses eligible for data collection. --- Cargo.lock | 1 + crates/zeta/Cargo.toml | 1 + .../0bsd.txt | 0 .../apache-2.0-ex0.txt} | 0 .../zeta/license_examples/apache-2.0-ex1.txt | 55 +++++ .../apache-2.0-ex2.txt} | 150 ++++++------ .../zeta/license_examples/apache-2.0-ex3.txt | 13 + .../bsd-1-clause.txt | 0 .../bsd-2-clause-ex0.txt} | 0 .../bsd-3-clause-ex0.txt} | 0 .../license_examples/bsd-3-clause-ex1.txt | 27 +++ .../license_examples/bsd-3-clause-ex2.txt | 31 +++ .../license_examples/bsd-3-clause-ex3.txt | 30 +++ .../license_examples/bsd-3-clause-ex4.txt | 27 +++ .../isc.txt | 0 .../mit.txt => license_examples/mit-ex0.txt} | 0 crates/zeta/license_examples/mit-ex1.txt | 26 ++ crates/zeta/license_examples/mit-ex2.txt | 22 ++ crates/zeta/license_examples/mit-ex3.txt | 21 ++ .../upl-1.0.txt | 0 crates/zeta/license_examples/zlib-ex0.txt | 19 ++ crates/zeta/license_regexes/0bsd.regex | 10 + crates/zeta/license_regexes/apache-2.0.regex | 223 +++++++++++++++++ crates/zeta/license_regexes/bsd.regex | 23 ++ crates/zeta/license_regexes/isc.regex | 12 + crates/zeta/license_regexes/mit.regex | 17 ++ crates/zeta/license_regexes/upl-1.0.regex | 32 +++ crates/zeta/license_regexes/zlib.regex | 18 ++ crates/zeta/src/license_detection.rs | 229 +++++++++++++++--- crates/zeta/src/license_detection/0bsd.regex | 12 - .../src/license_detection/bsd-1-clause.regex | 17 -- .../src/license_detection/bsd-2-clause.regex | 22 -- .../src/license_detection/bsd-3-clause.regex | 26 -- crates/zeta/src/license_detection/isc.regex | 15 -- crates/zeta/src/license_detection/mit.regex | 21 -- .../zeta/src/license_detection/upl-1.0.regex | 35 --- 36 files changed, 867 insertions(+), 268 deletions(-) rename crates/zeta/{src/license_detection => license_examples}/0bsd.txt (100%) rename crates/zeta/{src/license_detection/apache-2.0.txt => license_examples/apache-2.0-ex0.txt} (100%) create mode 100644 crates/zeta/license_examples/apache-2.0-ex1.txt rename crates/zeta/{src/license_detection/apache-2.0.regex => license_examples/apache-2.0-ex2.txt} (59%) create mode 100644 crates/zeta/license_examples/apache-2.0-ex3.txt rename crates/zeta/{src/license_detection => license_examples}/bsd-1-clause.txt (100%) rename crates/zeta/{src/license_detection/bsd-2-clause.txt => license_examples/bsd-2-clause-ex0.txt} (100%) rename crates/zeta/{src/license_detection/bsd-3-clause.txt => license_examples/bsd-3-clause-ex0.txt} (100%) create mode 100644 crates/zeta/license_examples/bsd-3-clause-ex1.txt create mode 100644 crates/zeta/license_examples/bsd-3-clause-ex2.txt create mode 100644 crates/zeta/license_examples/bsd-3-clause-ex3.txt create mode 100644 crates/zeta/license_examples/bsd-3-clause-ex4.txt rename crates/zeta/{src/license_detection => license_examples}/isc.txt (100%) rename crates/zeta/{src/license_detection/mit.txt => license_examples/mit-ex0.txt} (100%) create mode 100644 crates/zeta/license_examples/mit-ex1.txt create mode 100644 crates/zeta/license_examples/mit-ex2.txt create mode 100644 crates/zeta/license_examples/mit-ex3.txt rename crates/zeta/{src/license_detection => license_examples}/upl-1.0.txt (100%) create mode 100644 crates/zeta/license_examples/zlib-ex0.txt create mode 100644 crates/zeta/license_regexes/0bsd.regex create mode 100644 crates/zeta/license_regexes/apache-2.0.regex create mode 100644 crates/zeta/license_regexes/bsd.regex create mode 100644 crates/zeta/license_regexes/isc.regex create mode 100644 crates/zeta/license_regexes/mit.regex create mode 100644 crates/zeta/license_regexes/upl-1.0.regex create mode 100644 crates/zeta/license_regexes/zlib.regex delete mode 100644 crates/zeta/src/license_detection/0bsd.regex delete mode 100644 crates/zeta/src/license_detection/bsd-1-clause.regex delete mode 100644 crates/zeta/src/license_detection/bsd-2-clause.regex delete mode 100644 crates/zeta/src/license_detection/bsd-3-clause.regex delete mode 100644 crates/zeta/src/license_detection/isc.regex delete mode 100644 crates/zeta/src/license_detection/mit.regex delete mode 100644 crates/zeta/src/license_detection/upl-1.0.regex diff --git a/Cargo.lock b/Cargo.lock index a80809461e5135541b1223e7482310effa6cb50b..4ca45445e2ed26819c612381b682aa9d1bf35d07 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -20817,6 +20817,7 @@ dependencies = [ "gpui", "http_client", "indoc", + "itertools 0.14.0", "language", "language_model", "log", diff --git a/crates/zeta/Cargo.toml b/crates/zeta/Cargo.toml index 05eedd6015d47e0c020266f27da8d63850d162e3..a57781ee8ee4b97805935efc7943df9eff1a8958 100644 --- a/crates/zeta/Cargo.toml +++ b/crates/zeta/Cargo.toml @@ -34,6 +34,7 @@ futures.workspace = true gpui.workspace = true http_client.workspace = true indoc.workspace = true +itertools.workspace = true language.workspace = true language_model.workspace = true log.workspace = true diff --git a/crates/zeta/src/license_detection/0bsd.txt b/crates/zeta/license_examples/0bsd.txt similarity index 100% rename from crates/zeta/src/license_detection/0bsd.txt rename to crates/zeta/license_examples/0bsd.txt diff --git a/crates/zeta/src/license_detection/apache-2.0.txt b/crates/zeta/license_examples/apache-2.0-ex0.txt similarity index 100% rename from crates/zeta/src/license_detection/apache-2.0.txt rename to crates/zeta/license_examples/apache-2.0-ex0.txt diff --git a/crates/zeta/license_examples/apache-2.0-ex1.txt b/crates/zeta/license_examples/apache-2.0-ex1.txt new file mode 100644 index 0000000000000000000000000000000000000000..2df8c87fda4e00fd552766016915a97205de740d --- /dev/null +++ b/crates/zeta/license_examples/apache-2.0-ex1.txt @@ -0,0 +1,55 @@ +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +You must cause any modified files to carry prominent notices stating that You changed the files; and + +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS diff --git a/crates/zeta/src/license_detection/apache-2.0.regex b/crates/zeta/license_examples/apache-2.0-ex2.txt similarity index 59% rename from crates/zeta/src/license_detection/apache-2.0.regex rename to crates/zeta/license_examples/apache-2.0-ex2.txt index dcf12fe28915f94e1f5d8de81285ea49dcc10f8e..016b1bc2e6136a367c59b13eecec27e605d13664 100644 --- a/crates/zeta/src/license_detection/apache-2.0.regex +++ b/crates/zeta/license_examples/apache-2.0-ex2.txt @@ -1,109 +1,110 @@ + Apache License - Version 2\.0, January 2004 - http://www\.apache\.org/licenses/ + Version 2.0, January 2004 + http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - 1\. Definitions\. + 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document\. + and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License\. + the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common - control with that entity\. For the purposes of this definition, - "control" means \(i\) the power, direct or indirect, to cause the + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or - otherwise, or \(ii\) ownership of fifty percent \(50%\) or more of the - outstanding shares, or \(iii\) beneficial ownership of such entity\. + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. - "You" \(or "Your"\) shall mean an individual or Legal Entity - exercising permissions granted by this License\. + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation - source, and configuration files\. + source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, - and conversions to other media types\. + and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work - \(an example is provided in the Appendix below\)\. + (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on \(or derived from\) the Work and for which the + form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship\. For the purposes + represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain - separable from, or merely link \(or bind by name\) to the interfaces of, - the Work and Derivative Works thereof\. + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner\. For the purposes of this definition, "submitted" + the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution\." + designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work\. + subsequently incorporated within the Work. - 2\. Grant of Copyright License\. Subject to the terms and conditions of + 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, - worldwide, non\-exclusive, no\-charge, royalty\-free, irrevocable + worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form\. + Work and such Derivative Works in Source or Object form. - 3\. Grant of Patent License\. Subject to the terms and conditions of + 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, - worldwide, non\-exclusive, no\-charge, royalty\-free, irrevocable - \(except as stated in this section\) patent license to make, have made, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their - Contribution\(s\) alone or by combination of their Contribution\(s\) - with the Work to which such Contribution\(s\) was submitted\. If You - institute patent litigation against any entity \(including a - cross\-claim or counterclaim in a lawsuit\) alleging that the Work + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate - as of the date such litigation is filed\. + as of the date such litigation is filed. - 4\. Redistribution\. You may reproduce and distribute copies of the + 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: - \(a\) You must give any other recipients of the Work or + (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and - \(b\) You must cause any modified files to carry prominent notices + (b) You must cause any modified files to carry prominent notices stating that You changed the files; and - \(c\) You must retain, in the Source form of any Derivative Works + (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and - \(d\) If the Work includes a "NOTICE" text file as part of its + (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not @@ -112,90 +113,77 @@ as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and - wherever such third\-party notices normally appear\. The contents + wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and - do not modify the License\. You may add Your own attribution + do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed - as modifying the License\. + as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License\. + the conditions stated in this License. - 5\. Submission of Contributions\. Unless You explicitly state otherwise, + 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions\. + this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions\. + with Licensor regarding such Contributions. - 6\. Trademarks\. This License does not grant permission to use the trade + 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file\. + origin of the Work and reproducing the content of the NOTICE file. - 7\. Disclaimer of Warranty\. Unless required by applicable law or - agreed to in writing, Licensor provides the Work \(and each - Contributor provides its Contributions\) on an "AS IS" BASIS, + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions - of TITLE, NON\-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE\. You are solely responsible for determining the + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License\. + risks associated with Your exercise of permissions under this License. - 8\. Limitation of Liability\. In no event and under no legal theory, - whether in tort \(including negligence\), contract, or otherwise, - unless required by applicable law \(such as deliberate and grossly - negligent acts\) or agreed to in writing, shall any Contributor be + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the - Work \(including but not limited to damages for loss of goodwill, + Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses\), even if such Contributor - has been advised of the possibility of such damages\. + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. - 9\. Accepting Warranty or Additional Liability\. While redistributing + 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this - License\. However, in accepting such obligations, You may act only + License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability\.(?: - - END OF TERMS AND CONDITIONS)?(?: - - APPENDIX: How to apply the Apache License to your work\. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "\[\]" - replaced with your own identifying information\. \(Don't include - the brackets!\) The text should be enclosed in the appropriate - comment syntax for the file format\. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third\-party archives\.)?(?: + of your accepting any such warranty or additional liability. - Copyright .*)?(?: + END OF TERMS AND CONDITIONS - Licensed under the Apache License, Version 2\.0 \(the "License"\); - you may not use this file except in compliance with the License\. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www\.apache\.org/licenses/LICENSE\-2\.0 + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied\. + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and - limitations under the License\.)? + limitations under the License. diff --git a/crates/zeta/license_examples/apache-2.0-ex3.txt b/crates/zeta/license_examples/apache-2.0-ex3.txt new file mode 100644 index 0000000000000000000000000000000000000000..243448ceb5d4909dab3740d54d2541a4370d459f --- /dev/null +++ b/crates/zeta/license_examples/apache-2.0-ex3.txt @@ -0,0 +1,13 @@ +Copyright 2011 Someone + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/crates/zeta/src/license_detection/bsd-1-clause.txt b/crates/zeta/license_examples/bsd-1-clause.txt similarity index 100% rename from crates/zeta/src/license_detection/bsd-1-clause.txt rename to crates/zeta/license_examples/bsd-1-clause.txt diff --git a/crates/zeta/src/license_detection/bsd-2-clause.txt b/crates/zeta/license_examples/bsd-2-clause-ex0.txt similarity index 100% rename from crates/zeta/src/license_detection/bsd-2-clause.txt rename to crates/zeta/license_examples/bsd-2-clause-ex0.txt diff --git a/crates/zeta/src/license_detection/bsd-3-clause.txt b/crates/zeta/license_examples/bsd-3-clause-ex0.txt similarity index 100% rename from crates/zeta/src/license_detection/bsd-3-clause.txt rename to crates/zeta/license_examples/bsd-3-clause-ex0.txt diff --git a/crates/zeta/license_examples/bsd-3-clause-ex1.txt b/crates/zeta/license_examples/bsd-3-clause-ex1.txt new file mode 100644 index 0000000000000000000000000000000000000000..d460f673756539fb8f16db9a63968059db892027 --- /dev/null +++ b/crates/zeta/license_examples/bsd-3-clause-ex1.txt @@ -0,0 +1,27 @@ +// Copyright 2024 (this is copy modified from chromium) +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of da company nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/crates/zeta/license_examples/bsd-3-clause-ex2.txt b/crates/zeta/license_examples/bsd-3-clause-ex2.txt new file mode 100644 index 0000000000000000000000000000000000000000..99fa52679d8ceebed7ffbc0d75d37ca7cb1da41c --- /dev/null +++ b/crates/zeta/license_examples/bsd-3-clause-ex2.txt @@ -0,0 +1,31 @@ +The Glasgow Haskell Compiler License + +Copyright 2002, The University Court of the University of Glasgow. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +- Redistributions of source code must retain the above copyright notice, +this list of conditions and the following disclaimer. + +- Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation +and/or other materials provided with the distribution. + +- Neither name of the University nor the names of its contributors may be +used to endorse or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY COURT OF THE UNIVERSITY OF +GLASGOW AND THE CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +UNIVERSITY COURT OF THE UNIVERSITY OF GLASGOW OR THE CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. diff --git a/crates/zeta/license_examples/bsd-3-clause-ex3.txt b/crates/zeta/license_examples/bsd-3-clause-ex3.txt new file mode 100644 index 0000000000000000000000000000000000000000..68a181b5a71e991a80b8030d8a5094266092a432 --- /dev/null +++ b/crates/zeta/license_examples/bsd-3-clause-ex3.txt @@ -0,0 +1,30 @@ +Copyright (c) 2019 Someone + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of Someone nor the names of other + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/crates/zeta/license_examples/bsd-3-clause-ex4.txt b/crates/zeta/license_examples/bsd-3-clause-ex4.txt new file mode 100644 index 0000000000000000000000000000000000000000..259c59ff9062bbc208044264afb3fa83e0773968 --- /dev/null +++ b/crates/zeta/license_examples/bsd-3-clause-ex4.txt @@ -0,0 +1,27 @@ +Copyright (c) 2009-2011, Mozilla Foundation and contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the names of the Mozilla Foundation nor the names of project + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/crates/zeta/src/license_detection/isc.txt b/crates/zeta/license_examples/isc.txt similarity index 100% rename from crates/zeta/src/license_detection/isc.txt rename to crates/zeta/license_examples/isc.txt diff --git a/crates/zeta/src/license_detection/mit.txt b/crates/zeta/license_examples/mit-ex0.txt similarity index 100% rename from crates/zeta/src/license_detection/mit.txt rename to crates/zeta/license_examples/mit-ex0.txt diff --git a/crates/zeta/license_examples/mit-ex1.txt b/crates/zeta/license_examples/mit-ex1.txt new file mode 100644 index 0000000000000000000000000000000000000000..d3642458dc5c970492f7b021c0881b39654220b9 --- /dev/null +++ b/crates/zeta/license_examples/mit-ex1.txt @@ -0,0 +1,26 @@ +Copyright (c) 2006-2009 Someone +Copyright (c) 2009-2013 Some organization + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/crates/zeta/license_examples/mit-ex2.txt b/crates/zeta/license_examples/mit-ex2.txt new file mode 100644 index 0000000000000000000000000000000000000000..31ec7bf0e8b1e5c57ebebe3c93f2ed2b0d24ed04 --- /dev/null +++ b/crates/zeta/license_examples/mit-ex2.txt @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) someone + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/crates/zeta/license_examples/mit-ex3.txt b/crates/zeta/license_examples/mit-ex3.txt new file mode 100644 index 0000000000000000000000000000000000000000..ed5c99140214039ed83a7a0179c587b63805c918 --- /dev/null +++ b/crates/zeta/license_examples/mit-ex3.txt @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Someone. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/crates/zeta/src/license_detection/upl-1.0.txt b/crates/zeta/license_examples/upl-1.0.txt similarity index 100% rename from crates/zeta/src/license_detection/upl-1.0.txt rename to crates/zeta/license_examples/upl-1.0.txt diff --git a/crates/zeta/license_examples/zlib-ex0.txt b/crates/zeta/license_examples/zlib-ex0.txt new file mode 100644 index 0000000000000000000000000000000000000000..84a3048c69a2e1ae4aa93b26945f1aff4b6888fe --- /dev/null +++ b/crates/zeta/license_examples/zlib-ex0.txt @@ -0,0 +1,19 @@ +Copyright (c) 2021 Someone + +This software is provided 'as-is', without any express or implied warranty. In +no event will the authors be held liable for any damages arising from the use of +this software. + +Permission is granted to anyone to use this software for any purpose, including +commercial applications, and to alter it and redistribute it freely, subject to +the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not claim + that you wrote the original software. If you use this software in a product, + an acknowledgment in the product documentation would be appreciated but is + not required. + + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + + 3. This notice may not be removed or altered from any source distribution. diff --git a/crates/zeta/license_regexes/0bsd.regex b/crates/zeta/license_regexes/0bsd.regex new file mode 100644 index 0000000000000000000000000000000000000000..15725f206a905fb0de1c2f03ec40dde25a1f01c4 --- /dev/null +++ b/crates/zeta/license_regexes/0bsd.regex @@ -0,0 +1,10 @@ +.{0,512}Permission to use copy modify andor distribute this software for any +purpose with or without fee is hereby granted + +THE SOFTWARE IS PROVIDED AS IS AND THE AUTHOR DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS IN NO EVENT SHALL THE AUTHOR BE LIABLE +FOR ANY SPECIAL DIRECT INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY +DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE DATA OR PROFITS WHETHER IN +AN ACTION OF CONTRACT NEGLIGENCE OR OTHER TORTIOUS ACTION ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE diff --git a/crates/zeta/license_regexes/apache-2.0.regex b/crates/zeta/license_regexes/apache-2.0.regex new file mode 100644 index 0000000000000000000000000000000000000000..26cbecf2ee299e957e18d9da5c467f7788874358 --- /dev/null +++ b/crates/zeta/license_regexes/apache-2.0.regex @@ -0,0 +1,223 @@ +.{0,512}Licensed under the Apache License Version 20 the License +you may not use this file except in compliance with the License +You may obtain a copy of the License at + + https?wwwapacheorglicensesLICENSE20 + +Unless required by applicable law or agreed to in writing software +distributed under the License is distributed on an AS IS BASIS +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND either express or implied +See the License for the specific language governing permissions and +limitations under the License|.{0,512}(?:Licensed under the Apache License Version 20 the License +you may not use this file except in compliance with the License +You may obtain a copy of the License at + + https?wwwapacheorglicensesLICENSE20 + +Unless required by applicable law or agreed to in writing software +distributed under the License is distributed on an AS IS BASIS +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND either express or implied +See the License for the specific language governing permissions and +limitations under the License)? + + ?Apache License + Version 20 January 2004 + https?wwwapacheorglicenses + + TERMS AND CONDITIONS FOR USE REPRODUCTION AND DISTRIBUTION + + 1 Definitions + + License shall mean the terms and conditions for use reproduction + and distribution as defined by Sections 1 through 9 of this document + + Licensor shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License + + Legal Entity shall mean the union of the acting entity and all + other entities that control are controlled by or are under common + control with that entity For the purposes of this definition + control means i the power direct or indirect to cause the + direction or management of such entity whether by contract or + otherwise or ii ownership of fifty percent 50 or more of the + outstanding shares or iii beneficial ownership of such entity + + You or Your shall mean an individual or Legal Entity + exercising permissions granted by this License + + Source form shall mean the preferred form for making modifications + including but not limited to software source code documentation + source and configuration files + + Object form shall mean any form resulting from mechanical + transformation or translation of a Source form including but + not limited to compiled object code generated documentation + and conversions to other media types + + Work shall mean the work of authorship whether in Source or + Object form made available under the License as indicated by a + copyright notice that is included in or attached to the work + an example is provided in the Appendix below + + Derivative Works shall mean any work whether in Source or Object + form that is based on or derived from the Work and for which the + editorial revisions annotations elaborations or other modifications + represent as a whole an original work of authorship For the purposes + of this License Derivative Works shall not include works that remain + separable from or merely link or bind by name to the interfaces of + the Work and Derivative Works thereof + + Contribution shall mean any work of authorship including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner For the purposes of this definition submitted + means any form of electronic verbal or written communication sent + to the Licensor or its representatives including but not limited to + communication on electronic mailing lists source code control systems + and issue tracking systems that are managed by or on behalf of the + Licensor for the purpose of discussing and improving the Work but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as Not a Contribution + + Contributor shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work + + 2 Grant of Copyright License Subject to the terms and conditions of + this License each Contributor hereby grants to You a perpetual + worldwide nonexclusive nocharge royaltyfree irrevocable + copyright license to reproduce prepare Derivative Works of + publicly display publicly perform sublicense and distribute the + Work and such Derivative Works in Source or Object form + + 3 Grant of Patent License Subject to the terms and conditions of + this License each Contributor hereby grants to You a perpetual + worldwide nonexclusive nocharge royaltyfree irrevocable + except as stated in this section patent license to make have made + use offer to sell sell import and otherwise transfer the Work + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contributions alone or by combination of their Contributions + with the Work to which such Contributions was submitted If You + institute patent litigation against any entity including a + crossclaim or counterclaim in a lawsuit alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed + + 4 Redistribution You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium with or without + modifications and in Source or Object form provided that You + meet the following conditions + + (?:a )?You must give any other recipients of the Work or + Derivative Works a copy of this License and + + (?:b )?You must cause any modified files to carry prominent notices + stating that You changed the files and + + (?:c )?You must retain in the Source form of any Derivative Works + that You distribute all copyright patent trademark and + attribution notices from the Source form of the Work + excluding those notices that do not pertain to any part of + the Derivative Works and + + (?:d )?If the Work includes a NOTICE text file as part of its + distribution then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file excluding those notices that do not + pertain to any part of the Derivative Works in at least one + of the following places within a NOTICE text file distributed + as part of the Derivative Works within the Source form or + documentation if provided along with the Derivative Works or + within a display generated by the Derivative Works if and + wherever such thirdparty notices normally appear The contents + of the NOTICE file are for informational purposes only and + do not modify the License You may add Your own attribution + notices within Derivative Works that You distribute alongside + or as an addendum to the NOTICE text from the Work provided + that such additional attribution notices cannot be construed + as modifying the License + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use reproduction or distribution of Your modifications or + for any such Derivative Works as a whole provided Your use + reproduction and distribution of the Work otherwise complies with + the conditions stated in this License + + 5 Submission of Contributions Unless You explicitly state otherwise + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License without any additional terms or conditions + Notwithstanding the above nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions + + 6 Trademarks This License does not grant permission to use the trade + names trademarks service marks or product names of the Licensor + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file + + 7 Disclaimer of Warranty Unless required by applicable law or + agreed to in writing Licensor provides the Work and each + Contributor provides its Contributions on an AS IS BASIS + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND either express or + implied including without limitation any warranties or conditions + of TITLE NONINFRINGEMENT MERCHANTABILITY or FITNESS FOR A + PARTICULAR PURPOSE You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License + + 8 Limitation of Liability In no event and under no legal theory + whether in tort including negligence contract or otherwise + unless required by applicable law such as deliberate and grossly + negligent acts or agreed to in writing shall any Contributor be + liable to You for damages including any direct indirect special + incidental or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work including but not limited to damages for loss of goodwill + work stoppage computer failure or malfunction or any and all + other commercial damages or losses even if such Contributor + has been advised of the possibility of such damages + + 9 Accepting Warranty or Additional Liability While redistributing + the Work or Derivative Works thereof You may choose to offer + and charge a fee for acceptance of support warranty indemnity + or other liability obligations andor rights consistent with this + License However in accepting such obligations You may act only + on Your own behalf and on Your sole responsibility not on behalf + of any other Contributor and only if You agree to indemnify + defend and hold each Contributor harmless for any liability + incurred by or claims asserted against such Contributor by reason + of your accepting any such warranty or additional liability(?: + + END OF TERMS AND CONDITIONS)?(?: + + APPENDIX How to apply the Apache License to your work + + To apply the Apache License to your work attach the following + boilerplate notice with the fields enclosed by brackets + replaced with your own identifying information Dont include + the brackets The text should be enclosed in the appropriate + comment syntax for the file format We also recommend that a + file or class name and description of purpose be included on the + same printed page as the copyright notice for easier + identification within thirdparty archives)?(?: + + Copyright.{0,512})?(?: + + Licensed under the Apache License Version 20 the License + you may not use this file except in compliance with the License + You may obtain a copy of the License at + + https?wwwapacheorglicensesLICENSE20 + + Unless required by applicable law or agreed to in writing software + distributed under the License is distributed on an AS IS BASIS + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND either express or implied + See the License for the specific language governing permissions and + limitations under the License)? diff --git a/crates/zeta/license_regexes/bsd.regex b/crates/zeta/license_regexes/bsd.regex new file mode 100644 index 0000000000000000000000000000000000000000..655e38fa4336a9e3580ec8a0fc29bba4e5bd68ab --- /dev/null +++ b/crates/zeta/license_regexes/bsd.regex @@ -0,0 +1,23 @@ +.{0,512}Redistribution and use in source and binary forms with or without +modification are permitted provided that the following conditions are met + +(?:1 )?Redistributions of source code must retain the above copyright +notice this list of conditions and the following disclaimer(?: + +(?:2 )?Redistributions in binary form must reproduce the above copyright +notice this list of conditions and the following disclaimer in the +documentation andor other materials provided with the distribution(?: + +(?:3 )?.{0,128} may be used to endorse or +promote products derived from this software without specific prior written +permission)?)? + +THIS SOFTWARE IS PROVIDED BY .{0,128}AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES +INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED IN NO EVENT SHALL .{0,128}BE LIABLE +FOR ANY DIRECT INDIRECT INCIDENTAL SPECIAL EXEMPLARY OR CONSEQUENTIAL +DAMAGES INCLUDING BUT NOT LIMITED TO PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES LOSS OF USE DATA OR PROFITS OR BUSINESS INTERRUPTION HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY WHETHER IN CONTRACT STRICT LIABILITY OR +TORT INCLUDING NEGLIGENCE OR OTHERWISE ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/crates/zeta/license_regexes/isc.regex b/crates/zeta/license_regexes/isc.regex new file mode 100644 index 0000000000000000000000000000000000000000..ba3e3c9cbf8d1a5711485066c496fa89fb8f4c66 --- /dev/null +++ b/crates/zeta/license_regexes/isc.regex @@ -0,0 +1,12 @@ +.{0,512}Permission to use copy modify andor distribute +this software for any purpose with or without fee is hereby granted provided +that the above copyright notice and this permission notice appear in all +copies + +THE SOFTWARE IS PROVIDED AS IS AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL DIRECT INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE DATA OR PROFITS WHETHER IN AN +ACTION OF CONTRACT NEGLIGENCE OR OTHER TORTIOUS ACTION ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE diff --git a/crates/zeta/license_regexes/mit.regex b/crates/zeta/license_regexes/mit.regex new file mode 100644 index 0000000000000000000000000000000000000000..a8fa7b3ee7a5656d74012790344e1204e75cefa4 --- /dev/null +++ b/crates/zeta/license_regexes/mit.regex @@ -0,0 +1,17 @@ +.{0,512}Permission is hereby granted free of charge to any +person obtaining a copy of this software and associated documentation files +the Software to deal in the Software without restriction including +without limitation the rights to use copy modify merge publish distribute +sublicense andor sell copies of the Software and to permit persons to whom +the Software is furnished to do so subject to the following conditions + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software + +THE SOFTWARE IS PROVIDED AS IS WITHOUT WARRANTY OF ANY KIND EXPRESS OR +IMPLIED INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM DAMAGES OR OTHER +LIABILITY WHETHER IN AN ACTION OF CONTRACT TORT OR OTHERWISE ARISING FROM +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE diff --git a/crates/zeta/license_regexes/upl-1.0.regex b/crates/zeta/license_regexes/upl-1.0.regex new file mode 100644 index 0000000000000000000000000000000000000000..f86f5fa3ab76d4f99177923d16b0b5fe8d0f18c0 --- /dev/null +++ b/crates/zeta/license_regexes/upl-1.0.regex @@ -0,0 +1,32 @@ +.{0,512}Subject to the condition set forth below permission is hereby granted to any +person obtaining a copy of this software associated documentation andor data +collectively the Software free of charge and under any and all copyright +rights in the Software and any and all patent rights owned or freely licensable +by each licensor hereunder covering either i the unmodified Software as +contributed to or provided by such licensor or ii the Larger Works as +defined below to deal in both + +a the Software and + +b any piece of software andor hardware listed in the lrgrwrkstxt file if one is + included with the Software each a Larger Work to which the Software is + contributed by such licensors + +without restriction including without limitation the rights to copy create +derivative works of display perform and distribute the Software and make use +sell offer for sale import export have made and have sold the Software and the +Larger Works and to sublicense the foregoing rights on either these or other +terms + +This license is subject to the following condition + +The above copyright notice and either this complete permission notice or at a minimum +a reference to the UPL must be included in all copies or substantial portions of the +Software + +THE SOFTWARE IS PROVIDED AS IS WITHOUT WARRANTY OF ANY KIND EXPRESS OR IMPLIED +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM DAMAGES OR OTHER LIABILITY WHETHER IN AN ACTION OF +CONTRACT TORT OR OTHERWISE ARISING FROM OUT OF OR IN CONNECTION WITH THE SOFTWARE +OR THE USE OR OTHER DEALINGS IN THE SOFTWARE diff --git a/crates/zeta/license_regexes/zlib.regex b/crates/zeta/license_regexes/zlib.regex new file mode 100644 index 0000000000000000000000000000000000000000..63a688d08c0fbc5a24783c1f8a2462979927ca6c --- /dev/null +++ b/crates/zeta/license_regexes/zlib.regex @@ -0,0 +1,18 @@ +.{0,512}This software is provided asis without any express or implied +warranty In no event will the authors be held liable for any damages +arising from the use of this software + +Permission is granted to anyone to use this software for any purpose +including commercial applications and to alter it and redistribute it +freely subject to the following restrictions + +1? The origin of this software must not be misrepresented you must not +claim that you wrote the original software If you use this software +in a product an acknowledgment in the product documentation would be +appreciated but is not required + +2? Altered source versions must be plainly marked as such and must not be +misrepresented as being the original software + +3? This notice may not be removed or altered from any source +distribution diff --git a/crates/zeta/src/license_detection.rs b/crates/zeta/src/license_detection.rs index d6b8ef10a3363f49f92607e30c6059ffee573a65..81314477e5383450c089be5291e02ba3f8478ac4 100644 --- a/crates/zeta/src/license_detection.rs +++ b/crates/zeta/src/license_detection.rs @@ -8,6 +8,7 @@ use std::{ use fs::Fs; use futures::StreamExt as _; use gpui::{App, AppContext as _, Entity, Subscription, Task}; +use itertools::Itertools; use postage::watch; use project::Worktree; use regex::Regex; @@ -25,7 +26,8 @@ static LICENSE_FILE_NAME_REGEX: LazyLock = LazyLock::new(|| 0? bsd (?: [\\-._] [0123])? (?: [\\-._] clause)? | \ isc | \ mit | \ - upl))? \ + upl | \ + zlib))? \ (?: [\\-._]? (?: license | licence))? \ (?: \\.txt | \\.md)? \ $", @@ -36,16 +38,15 @@ static LICENSE_FILE_NAME_REGEX: LazyLock = LazyLock::new(|| .unwrap() }); -#[derive(Debug, Clone, Copy, Eq, PartialEq, VariantArray)] +#[derive(Debug, Clone, Copy, Eq, Ord, PartialOrd, PartialEq, VariantArray)] pub enum OpenSourceLicense { Apache2_0, - BSD0Clause, - BSD1Clause, - BSD2Clause, - BSD3Clause, + BSDZero, + BSD, ISC, MIT, UPL1_0, + Zlib, } impl Display for OpenSourceLicense { @@ -55,29 +56,31 @@ impl Display for OpenSourceLicense { } impl OpenSourceLicense { + /// These are SPDX identifiers for the licenses, except for BSD, where the variants are not + /// distinguished. pub fn spdx_identifier(&self) -> &'static str { match self { OpenSourceLicense::Apache2_0 => "apache-2.0", - OpenSourceLicense::BSD0Clause => "0bsd", - OpenSourceLicense::BSD1Clause => "bsd-1-clause", - OpenSourceLicense::BSD2Clause => "bsd-2-clause", - OpenSourceLicense::BSD3Clause => "bsd-3-clause", + OpenSourceLicense::BSDZero => "0bsd", + OpenSourceLicense::BSD => "bsd", OpenSourceLicense::ISC => "isc", OpenSourceLicense::MIT => "mit", OpenSourceLicense::UPL1_0 => "upl-1.0", + OpenSourceLicense::Zlib => "zlib", } } + /// Regexes to match the license text. These regexes are expected to match the entire file. Also + /// note that `canonicalize_license_text` removes everything but alphanumeric ascii characters. pub fn regex(&self) -> &'static str { match self { - OpenSourceLicense::Apache2_0 => include_str!("license_detection/apache-2.0.regex"), - OpenSourceLicense::BSD0Clause => include_str!("license_detection/0bsd.regex"), - OpenSourceLicense::BSD1Clause => include_str!("license_detection/bsd-1-clause.regex"), - OpenSourceLicense::BSD2Clause => include_str!("license_detection/bsd-2-clause.regex"), - OpenSourceLicense::BSD3Clause => include_str!("license_detection/bsd-3-clause.regex"), - OpenSourceLicense::ISC => include_str!("license_detection/isc.regex"), - OpenSourceLicense::MIT => include_str!("license_detection/mit.regex"), - OpenSourceLicense::UPL1_0 => include_str!("license_detection/upl-1.0.regex"), + OpenSourceLicense::Apache2_0 => include_str!("../license_regexes/apache-2.0.regex"), + OpenSourceLicense::BSDZero => include_str!("../license_regexes/0bsd.regex"), + OpenSourceLicense::BSD => include_str!("../license_regexes/bsd.regex"), + OpenSourceLicense::ISC => include_str!("../license_regexes/isc.regex"), + OpenSourceLicense::MIT => include_str!("../license_regexes/mit.regex"), + OpenSourceLicense::UPL1_0 => include_str!("../license_regexes/upl-1.0.regex"), + OpenSourceLicense::Zlib => include_str!("../license_regexes/zlib.regex"), } } } @@ -93,7 +96,7 @@ fn detect_license(license: &str) -> Option { } else { regex_string.push_str(")|("); } - regex_string.push_str(&canonicalize_license_text(license.regex())); + regex_string.push_str(&canonicalize_license_regex(license.regex())); } regex_string.push_str("))$"); let regex = Regex::new(®ex_string).unwrap(); @@ -116,15 +119,25 @@ fn detect_license(license: &str) -> Option { }) } -/// Canonicalizes the whitespace of license text and license regexes. -fn canonicalize_license_text(license: &str) -> String { +/// Canonicalizes the whitespace of license text. +fn canonicalize_license_regex(license: &str) -> String { license .split_ascii_whitespace() - .collect::>() .join(" ") .to_ascii_lowercase() } +/// Canonicalizes the whitespace of license text. +fn canonicalize_license_text(license: &str) -> String { + license + .chars() + .filter(|c| c.is_ascii_alphanumeric() || c.is_ascii_whitespace()) + .map(|c| c.to_ascii_lowercase()) + .collect::() + .split_ascii_whitespace() + .join(" ") +} + pub enum LicenseDetectionWatcher { Local { is_open_source_rx: watch::Receiver, @@ -254,26 +267,96 @@ mod tests { use super::*; - const APACHE_2_0_TXT: &str = include_str!("license_detection/apache-2.0.txt"); - const ISC_TXT: &str = include_str!("license_detection/isc.txt"); - const MIT_TXT: &str = include_str!("license_detection/mit.txt"); - const UPL_1_0_TXT: &str = include_str!("license_detection/upl-1.0.txt"); - const BSD_0_CLAUSE_TXT: &str = include_str!("license_detection/0bsd.txt"); - const BSD_1_CLAUSE_TXT: &str = include_str!("license_detection/bsd-1-clause.txt"); - const BSD_2_CLAUSE_TXT: &str = include_str!("license_detection/bsd-2-clause.txt"); - const BSD_3_CLAUSE_TXT: &str = include_str!("license_detection/bsd-3-clause.txt"); + const APACHE_2_0_TXT: &str = include_str!("../license_examples/apache-2.0-ex0.txt"); + const ISC_TXT: &str = include_str!("../license_examples/isc.txt"); + const MIT_TXT: &str = include_str!("../license_examples/mit-ex0.txt"); + const UPL_1_0_TXT: &str = include_str!("../license_examples/upl-1.0.txt"); + const BSD_0_TXT: &str = include_str!("../license_examples/0bsd.txt"); #[track_caller] fn assert_matches_license(text: &str, license: OpenSourceLicense) { - let license_regex = - Regex::new(&format!("^{}$", canonicalize_license_text(license.regex()))).unwrap(); - assert!(license_regex.is_match(&canonicalize_license_text(text))); - assert_eq!(detect_license(text), Some(license)); + if detect_license(text) != Some(license) { + let license_regex_text = canonicalize_license_regex(license.regex()); + let license_regex = Regex::new(&format!("^{}$", license_regex_text)).unwrap(); + let text = canonicalize_license_text(text); + let matched_regex = license_regex.is_match(&text); + if matched_regex { + panic!( + "The following text matches the individual regex for {}, \ + but not the combined one:\n```license-text\n{}\n```\n", + license, text + ); + } else { + panic!( + "The following text doesn't match the regex for {}:\n\ + ```license-text\n{}\n```\n\n```regex\n{}\n```\n", + license, text, license_regex_text + ); + } + } } + /* + // Uncomment this and run with `cargo test -p zeta -- --no-capture &> licenses-output` to + // traverse your entire home directory and run license detection on every file that has a + // license-like name. #[test] - fn test_0bsd_positive_detection() { - assert_matches_license(BSD_0_CLAUSE_TXT, OpenSourceLicense::BSD0Clause); + fn test_check_all_licenses_in_home_dir() { + let mut detected = Vec::new(); + let mut unrecognized = Vec::new(); + let mut walked_entries = 0; + let homedir = std::env::home_dir().unwrap(); + for entry in walkdir::WalkDir::new(&homedir) { + walked_entries += 1; + if walked_entries % 10000 == 0 { + println!( + "So far visited {} files in {}", + walked_entries, + homedir.display() + ); + } + let Ok(entry) = entry else { + continue; + }; + if !LICENSE_FILE_NAME_REGEX.is_match(entry.file_name().as_encoded_bytes()) { + continue; + } + let Ok(contents) = std::fs::read_to_string(entry.path()) else { + continue; + }; + let path_string = entry.path().to_string_lossy().to_string(); + match detect_license(&contents) { + Some(license) => detected.push((license, path_string)), + None => unrecognized.push(path_string), + } + } + println!("\nDetected licenses:\n"); + detected.sort(); + for (license, path) in &detected { + println!("{}: {}", license.spdx_identifier(), path); + } + println!("\nUnrecognized licenses:\n"); + for path in &unrecognized { + println!("{}", path); + } + panic!( + "{} licenses detected, {} unrecognized", + detected.len(), + unrecognized.len() + ); + println!("This line has a warning to make sure this test is always commented out"); + } + */ + + #[test] + fn test_no_unicode_in_regexes() { + for license in OpenSourceLicense::VARIANTS { + assert!( + !license.regex().contains(|c: char| !c.is_ascii()), + "{}.regex contains unicode", + license.spdx_identifier() + ); + } } #[test] @@ -319,6 +402,24 @@ mod tests { ); assert!(license_with_copyright != license_with_appendix); assert_matches_license(&license_with_copyright, OpenSourceLicense::Apache2_0); + + assert_matches_license( + include_str!("../../../LICENSE-APACHE"), + OpenSourceLicense::Apache2_0, + ); + + assert_matches_license( + include_str!("../license_examples/apache-2.0-ex1.txt"), + OpenSourceLicense::Apache2_0, + ); + assert_matches_license( + include_str!("../license_examples/apache-2.0-ex2.txt"), + OpenSourceLicense::Apache2_0, + ); + assert_matches_license( + include_str!("../license_examples/apache-2.0-ex3.txt"), + OpenSourceLicense::Apache2_0, + ); } #[test] @@ -333,17 +434,47 @@ mod tests { #[test] fn test_bsd_1_clause_positive_detection() { - assert_matches_license(BSD_1_CLAUSE_TXT, OpenSourceLicense::BSD1Clause); + assert_matches_license( + include_str!("../license_examples/bsd-1-clause.txt"), + OpenSourceLicense::BSD, + ); } #[test] fn test_bsd_2_clause_positive_detection() { - assert_matches_license(BSD_2_CLAUSE_TXT, OpenSourceLicense::BSD2Clause); + assert_matches_license( + include_str!("../license_examples/bsd-2-clause-ex0.txt"), + OpenSourceLicense::BSD, + ); } #[test] fn test_bsd_3_clause_positive_detection() { - assert_matches_license(BSD_3_CLAUSE_TXT, OpenSourceLicense::BSD3Clause); + assert_matches_license( + include_str!("../license_examples/bsd-3-clause-ex0.txt"), + OpenSourceLicense::BSD, + ); + assert_matches_license( + include_str!("../license_examples/bsd-3-clause-ex1.txt"), + OpenSourceLicense::BSD, + ); + assert_matches_license( + include_str!("../license_examples/bsd-3-clause-ex2.txt"), + OpenSourceLicense::BSD, + ); + assert_matches_license( + include_str!("../license_examples/bsd-3-clause-ex3.txt"), + OpenSourceLicense::BSD, + ); + assert_matches_license( + include_str!("../license_examples/bsd-3-clause-ex4.txt"), + OpenSourceLicense::BSD, + ); + } + + #[test] + fn test_bsd_0_positive_detection() { + assert_matches_license(BSD_0_TXT, OpenSourceLicense::BSDZero); } #[test] @@ -365,6 +496,18 @@ mod tests { #[test] fn test_mit_positive_detection() { assert_matches_license(MIT_TXT, OpenSourceLicense::MIT); + assert_matches_license( + include_str!("../license_examples/mit-ex1.txt"), + OpenSourceLicense::MIT, + ); + assert_matches_license( + include_str!("../license_examples/mit-ex2.txt"), + OpenSourceLicense::MIT, + ); + assert_matches_license( + include_str!("../license_examples/mit-ex3.txt"), + OpenSourceLicense::MIT, + ); } #[test] @@ -393,6 +536,14 @@ mod tests { assert!(detect_license(&license_text).is_none()); } + #[test] + fn test_zlib_positive_detection() { + assert_matches_license( + include_str!("../license_examples/zlib-ex0.txt"), + OpenSourceLicense::Zlib, + ); + } + #[test] fn test_license_file_name_regex() { // Test basic license file names diff --git a/crates/zeta/src/license_detection/0bsd.regex b/crates/zeta/src/license_detection/0bsd.regex deleted file mode 100644 index 7928a8d181a48ad54bb825ac120aaa4ef53ba8ef..0000000000000000000000000000000000000000 --- a/crates/zeta/src/license_detection/0bsd.regex +++ /dev/null @@ -1,12 +0,0 @@ -.* - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted\. - -THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL -WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES -OF MERCHANTABILITY AND FITNESS\. IN NO EVENT SHALL THE AUTHOR BE LIABLE -FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY -DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN -AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE\. diff --git a/crates/zeta/src/license_detection/bsd-1-clause.regex b/crates/zeta/src/license_detection/bsd-1-clause.regex deleted file mode 100644 index 5e73e5c6d0e67cd9e4899e1a44bd064f11f3e3dc..0000000000000000000000000000000000000000 --- a/crates/zeta/src/license_detection/bsd-1-clause.regex +++ /dev/null @@ -1,17 +0,0 @@ -.*Copyright.* - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - -(?:1\.|\*)? Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer\. - -THIS SOFTWARE IS PROVIDED BY .* “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, -INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL .* BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES \(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION\) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR -TORT \(INCLUDING NEGLIGENCE OR OTHERWISE\) ARISING IN ANY WAY OUT OF THE USE OF -THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\. diff --git a/crates/zeta/src/license_detection/bsd-2-clause.regex b/crates/zeta/src/license_detection/bsd-2-clause.regex deleted file mode 100644 index 93d22652fb11ba81d55e7d2d38e1b42bdce243b6..0000000000000000000000000000000000000000 --- a/crates/zeta/src/license_detection/bsd-2-clause.regex +++ /dev/null @@ -1,22 +0,0 @@ -.*Copyright.* - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - -(?:1\.|\*)? Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer\. - -(?:2\.|\*)? Redistributions in binary form must reproduce the above copyright -notice, this list of conditions and the following disclaimer in the -documentation and/or other materials provided with the distribution\. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED\. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES \(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION\) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR -TORT \(INCLUDING NEGLIGENCE OR OTHERWISE\) ARISING IN ANY WAY OUT OF THE USE OF -THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\. diff --git a/crates/zeta/src/license_detection/bsd-3-clause.regex b/crates/zeta/src/license_detection/bsd-3-clause.regex deleted file mode 100644 index b31443de64283d0d66135b73e57eaf9bd19b88a3..0000000000000000000000000000000000000000 --- a/crates/zeta/src/license_detection/bsd-3-clause.regex +++ /dev/null @@ -1,26 +0,0 @@ -.*Copyright.* - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - -(?:1\.|\*)? Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer\. - -(?:2\.|\*)? Redistributions in binary form must reproduce the above copyright -notice, this list of conditions and the following disclaimer in the -documentation and/or other materials provided with the distribution\. - -(?:3\.|\*)? Neither the name of the copyright holder nor the names of its -contributors may be used to endorse or promote products derived from this -software without specific prior written permission\. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED\. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES \(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION\) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR -TORT \(INCLUDING NEGLIGENCE OR OTHERWISE\) ARISING IN ANY WAY OUT OF THE USE OF -THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\. diff --git a/crates/zeta/src/license_detection/isc.regex b/crates/zeta/src/license_detection/isc.regex deleted file mode 100644 index ddaece5375fc17455e8640bb47a807d5cd347f5b..0000000000000000000000000000000000000000 --- a/crates/zeta/src/license_detection/isc.regex +++ /dev/null @@ -1,15 +0,0 @@ -.*ISC License.* - -Copyright.* - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies\. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS\. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE\. diff --git a/crates/zeta/src/license_detection/mit.regex b/crates/zeta/src/license_detection/mit.regex deleted file mode 100644 index 43130424c5fe5f73d11ddda5d5c821bc6cb86afe..0000000000000000000000000000000000000000 --- a/crates/zeta/src/license_detection/mit.regex +++ /dev/null @@ -1,21 +0,0 @@ -.*MIT License.* - -Copyright.* - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files \(the "Software"\), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software\. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE\. diff --git a/crates/zeta/src/license_detection/upl-1.0.regex b/crates/zeta/src/license_detection/upl-1.0.regex deleted file mode 100644 index 0959f729716af4714ae9f41c92e1480d276cdeab..0000000000000000000000000000000000000000 --- a/crates/zeta/src/license_detection/upl-1.0.regex +++ /dev/null @@ -1,35 +0,0 @@ -Copyright.* - -The Universal Permissive License.* - -Subject to the condition set forth below, permission is hereby granted to any person -obtaining a copy of this software, associated documentation and/or data \(collectively -the "Software"\), free of charge and under any and all copyright rights in the -Software, and any and all patent rights owned or freely licensable by each licensor -hereunder covering either \(i\) the unmodified Software as contributed to or provided -by such licensor, or \(ii\) the Larger Works \(as defined below\), to deal in both - -\(a\) the Software, and - -\(b\) any piece of software and/or hardware listed in the lrgrwrks\.txt file if one is - included with the Software \(each a "Larger Work" to which the Software is - contributed by such licensors\), - -without restriction, including without limitation the rights to copy, create -derivative works of, display, perform, and distribute the Software and make, use, -sell, offer for sale, import, export, have made, and have sold the Software and the -Larger Work\(s\), and to sublicense the foregoing rights on either these or other -terms\. - -This license is subject to the following condition: - -The above copyright notice and either this complete permission notice or at a minimum -a reference to the UPL must be included in all copies or substantial portions of the -Software\. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, -INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT\. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF -CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE -OR THE USE OR OTHER DEALINGS IN THE SOFTWARE\. From 253765aaa1f15d45e2effe098222d07550fdf0e9 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Sun, 31 Aug 2025 01:23:21 -0600 Subject: [PATCH 32/54] zeta: Improve efficiency and clarity of license detection patterns (#37242) See discussion on #36564 Adds a simple ad-hoc substring matching pattern language which allows skipping a bounded number of chars between matched substrings. Before this change compiling the regex was taking ~120ms on a fast machine and ~8mb of memory. This new version is way faster and uses minimal memory. Checked the behavior of this vs by running it against 10k licenses that happened to be in my home dir. There were only 4 differences of behavior with the regex implementation, and these were false negatives for the regex implementation that are true positives with the new one. Of the ~10k licenses in my home dir, ~1k do not match one of these licenses, usually because it's GPL/MPL/etc. Release Notes: - N/A --- Cargo.lock | 1 - crates/zeta/Cargo.toml | 1 - .../zeta/license_examples/apache-2.0-ex4.txt | 187 ++++++++++ crates/zeta/license_patterns/0bsd-pattern | 11 + .../zeta/license_patterns/apache-2.0-pattern | 109 ++++++ .../apache-2.0-reference-pattern | 14 + crates/zeta/license_patterns/bsd-pattern | 32 ++ crates/zeta/license_patterns/isc-pattern | 12 + crates/zeta/license_patterns/mit-pattern | 18 + crates/zeta/license_patterns/upl-1.0-pattern | 32 ++ crates/zeta/license_patterns/zlib-pattern | 21 ++ crates/zeta/license_regexes/0bsd.regex | 10 - crates/zeta/license_regexes/apache-2.0.regex | 223 ----------- crates/zeta/license_regexes/bsd.regex | 23 -- crates/zeta/license_regexes/isc.regex | 12 - crates/zeta/license_regexes/mit.regex | 17 - crates/zeta/license_regexes/upl-1.0.regex | 32 -- crates/zeta/license_regexes/zlib.regex | 18 - crates/zeta/src/license_detection.rs | 352 +++++++++--------- 19 files changed, 614 insertions(+), 511 deletions(-) create mode 100644 crates/zeta/license_examples/apache-2.0-ex4.txt create mode 100644 crates/zeta/license_patterns/0bsd-pattern create mode 100644 crates/zeta/license_patterns/apache-2.0-pattern create mode 100644 crates/zeta/license_patterns/apache-2.0-reference-pattern create mode 100644 crates/zeta/license_patterns/bsd-pattern create mode 100644 crates/zeta/license_patterns/isc-pattern create mode 100644 crates/zeta/license_patterns/mit-pattern create mode 100644 crates/zeta/license_patterns/upl-1.0-pattern create mode 100644 crates/zeta/license_patterns/zlib-pattern delete mode 100644 crates/zeta/license_regexes/0bsd.regex delete mode 100644 crates/zeta/license_regexes/apache-2.0.regex delete mode 100644 crates/zeta/license_regexes/bsd.regex delete mode 100644 crates/zeta/license_regexes/isc.regex delete mode 100644 crates/zeta/license_regexes/mit.regex delete mode 100644 crates/zeta/license_regexes/upl-1.0.regex delete mode 100644 crates/zeta/license_regexes/zlib.regex diff --git a/Cargo.lock b/Cargo.lock index 4ca45445e2ed26819c612381b682aa9d1bf35d07..ab3b713a113a95183e5f394bae0f1a31301da3f1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -20840,7 +20840,6 @@ dependencies = [ "tree-sitter-go", "tree-sitter-rust", "ui", - "unindent", "util", "uuid", "workspace", diff --git a/crates/zeta/Cargo.toml b/crates/zeta/Cargo.toml index a57781ee8ee4b97805935efc7943df9eff1a8958..a9c2a7619f4db22e51c014672aa2100b30a2539a 100644 --- a/crates/zeta/Cargo.toml +++ b/crates/zeta/Cargo.toml @@ -78,7 +78,6 @@ settings = { workspace = true, features = ["test-support"] } theme = { workspace = true, features = ["test-support"] } tree-sitter-go.workspace = true tree-sitter-rust.workspace = true -unindent.workspace = true workspace = { workspace = true, features = ["test-support"] } worktree = { workspace = true, features = ["test-support"] } zlog.workspace = true diff --git a/crates/zeta/license_examples/apache-2.0-ex4.txt b/crates/zeta/license_examples/apache-2.0-ex4.txt new file mode 100644 index 0000000000000000000000000000000000000000..8c004949ee1e3da9b0ba26cd35c57e61c243c3d1 --- /dev/null +++ b/crates/zeta/license_examples/apache-2.0-ex4.txt @@ -0,0 +1,187 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + Copyright (c) 2017, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/crates/zeta/license_patterns/0bsd-pattern b/crates/zeta/license_patterns/0bsd-pattern new file mode 100644 index 0000000000000000000000000000000000000000..8b7f6100424931fa957615c5326d347eb1825942 --- /dev/null +++ b/crates/zeta/license_patterns/0bsd-pattern @@ -0,0 +1,11 @@ +-- 0..512 +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE +FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY +DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN +AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/crates/zeta/license_patterns/apache-2.0-pattern b/crates/zeta/license_patterns/apache-2.0-pattern new file mode 100644 index 0000000000000000000000000000000000000000..39e2d10c25800c1be612a3d43408025bcad9e74b --- /dev/null +++ b/crates/zeta/license_patterns/apache-2.0-pattern @@ -0,0 +1,109 @@ +-- 0..512 +-- 0..0 optional: +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http +-- 0..1 optional: +://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +-- 0..5 +Apache License + +Version 2.0, January 2004 + +http +-- 0..1 +://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +-- 1..5 +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +-- 1..5 +You must cause any modified files to carry prominent notices stating that You changed the files; and + +-- 1..5 +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +-- 1..5 +If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +-- 1..1 optional: +END OF TERMS AND CONDITIONS + +-- 1..1 optional: +APPENDIX How to apply the Apache License to your work + +To apply the Apache License to your work attach the following +boilerplate notice with the fields enclosed by brackets +replaced with your own identifying information Dont include +the brackets The text should be enclosed in the appropriate +comment syntax for the file format We also recommend that a +file or class name and description of purpose be included on the +same printed page as the copyright notice for easier +identification within thirdparty archives + +-- 1..512 optional: +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. + +-- 1..5 optional: +You may obtain a copy of the License at + +http +-- 0..1 optional: +://www.apache.org/licenses/LICENSE-2.0 + +-- 1..5 optional: +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/crates/zeta/license_patterns/apache-2.0-reference-pattern b/crates/zeta/license_patterns/apache-2.0-reference-pattern new file mode 100644 index 0000000000000000000000000000000000000000..192148fc7a774f563e421b84eb9dbdd39fe3f8cc --- /dev/null +++ b/crates/zeta/license_patterns/apache-2.0-reference-pattern @@ -0,0 +1,14 @@ +-- 0..512 +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http +-- 0..1 +://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/crates/zeta/license_patterns/bsd-pattern b/crates/zeta/license_patterns/bsd-pattern new file mode 100644 index 0000000000000000000000000000000000000000..917b7e3c443db4354f33c647be3a875eb9b6e9fa --- /dev/null +++ b/crates/zeta/license_patterns/bsd-pattern @@ -0,0 +1,32 @@ +-- 0..512 +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +-- 1..5 +Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +-- 1..5 optional: +Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +-- 1..128 optional: +may be used to endorse or promote products derived from this software without +specific prior written permission. + +-- 1..5 +THIS SOFTWARE IS PROVIDED +-- 1..128 +“AS IS” AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL +-- 1..128 +BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/crates/zeta/license_patterns/isc-pattern b/crates/zeta/license_patterns/isc-pattern new file mode 100644 index 0000000000000000000000000000000000000000..8a47a1339f5c8462b96e5c77312451e2b324dcf0 --- /dev/null +++ b/crates/zeta/license_patterns/isc-pattern @@ -0,0 +1,12 @@ +-- 0..512 +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/crates/zeta/license_patterns/mit-pattern b/crates/zeta/license_patterns/mit-pattern new file mode 100644 index 0000000000000000000000000000000000000000..6e21baa00cbeb24d24e0da6eac3dac61a5355000 --- /dev/null +++ b/crates/zeta/license_patterns/mit-pattern @@ -0,0 +1,18 @@ +-- 0..512 +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/crates/zeta/license_patterns/upl-1.0-pattern b/crates/zeta/license_patterns/upl-1.0-pattern new file mode 100644 index 0000000000000000000000000000000000000000..da9d26ca3faf51654b7d42a734c88c8958a9ad8e --- /dev/null +++ b/crates/zeta/license_patterns/upl-1.0-pattern @@ -0,0 +1,32 @@ +-- 0..512 +Subject to the condition set forth below, permission is hereby granted to any person +obtaining a copy of this software, associated documentation and/or data (collectively +the "Software"), free of charge and under any and all copyright rights in the +Software, and any and all patent rights owned or freely licensable by each licensor +hereunder covering either (i) the unmodified Software as contributed to or provided +by such licensor, or (ii) the Larger Works (as defined below), to deal in both + +(a) the Software, and + +(b) any piece of software and/or hardware listed in the lrgrwrks.txt file if one is + included with the Software (each a "Larger Work" to which the Software is + contributed by such licensors), + +without restriction, including without limitation the rights to copy, create +derivative works of, display, perform, and distribute the Software and make, use, +sell, offer for sale, import, export, have made, and have sold the Software and the +Larger Work(s), and to sublicense the foregoing rights on either these or other +terms. + +This license is subject to the following condition: + +The above copyright notice and either this complete permission notice or at a minimum +a reference to the UPL must be included in all copies or substantial portions of the +Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE +OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/crates/zeta/license_patterns/zlib-pattern b/crates/zeta/license_patterns/zlib-pattern new file mode 100644 index 0000000000000000000000000000000000000000..121b5409ccd9be87eaed17673ee3ded74fcac76c --- /dev/null +++ b/crates/zeta/license_patterns/zlib-pattern @@ -0,0 +1,21 @@ +-- 0..512 +This software is provided 'as-is', without any express or implied warranty. In +no event will the authors be held liable for any damages arising from the use of +this software. + +Permission is granted to anyone to use this software for any purpose, including +commercial applications, and to alter it and redistribute it freely, subject to +the following restrictions: + +-- 1..5 +The origin of this software must not be misrepresented; you must not claim +that you wrote the original software. If you use this software in a product, +an acknowledgment in the product documentation would be appreciated but is +not required. + +-- 1..5 +Altered source versions must be plainly marked as such, and must not be +misrepresented as being the original software. + +-- 1..5 +This notice may not be removed or altered from any source distribution. diff --git a/crates/zeta/license_regexes/0bsd.regex b/crates/zeta/license_regexes/0bsd.regex deleted file mode 100644 index 15725f206a905fb0de1c2f03ec40dde25a1f01c4..0000000000000000000000000000000000000000 --- a/crates/zeta/license_regexes/0bsd.regex +++ /dev/null @@ -1,10 +0,0 @@ -.{0,512}Permission to use copy modify andor distribute this software for any -purpose with or without fee is hereby granted - -THE SOFTWARE IS PROVIDED AS IS AND THE AUTHOR DISCLAIMS ALL -WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES -OF MERCHANTABILITY AND FITNESS IN NO EVENT SHALL THE AUTHOR BE LIABLE -FOR ANY SPECIAL DIRECT INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY -DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE DATA OR PROFITS WHETHER IN -AN ACTION OF CONTRACT NEGLIGENCE OR OTHER TORTIOUS ACTION ARISING OUT -OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE diff --git a/crates/zeta/license_regexes/apache-2.0.regex b/crates/zeta/license_regexes/apache-2.0.regex deleted file mode 100644 index 26cbecf2ee299e957e18d9da5c467f7788874358..0000000000000000000000000000000000000000 --- a/crates/zeta/license_regexes/apache-2.0.regex +++ /dev/null @@ -1,223 +0,0 @@ -.{0,512}Licensed under the Apache License Version 20 the License -you may not use this file except in compliance with the License -You may obtain a copy of the License at - - https?wwwapacheorglicensesLICENSE20 - -Unless required by applicable law or agreed to in writing software -distributed under the License is distributed on an AS IS BASIS -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND either express or implied -See the License for the specific language governing permissions and -limitations under the License|.{0,512}(?:Licensed under the Apache License Version 20 the License -you may not use this file except in compliance with the License -You may obtain a copy of the License at - - https?wwwapacheorglicensesLICENSE20 - -Unless required by applicable law or agreed to in writing software -distributed under the License is distributed on an AS IS BASIS -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND either express or implied -See the License for the specific language governing permissions and -limitations under the License)? - - ?Apache License - Version 20 January 2004 - https?wwwapacheorglicenses - - TERMS AND CONDITIONS FOR USE REPRODUCTION AND DISTRIBUTION - - 1 Definitions - - License shall mean the terms and conditions for use reproduction - and distribution as defined by Sections 1 through 9 of this document - - Licensor shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License - - Legal Entity shall mean the union of the acting entity and all - other entities that control are controlled by or are under common - control with that entity For the purposes of this definition - control means i the power direct or indirect to cause the - direction or management of such entity whether by contract or - otherwise or ii ownership of fifty percent 50 or more of the - outstanding shares or iii beneficial ownership of such entity - - You or Your shall mean an individual or Legal Entity - exercising permissions granted by this License - - Source form shall mean the preferred form for making modifications - including but not limited to software source code documentation - source and configuration files - - Object form shall mean any form resulting from mechanical - transformation or translation of a Source form including but - not limited to compiled object code generated documentation - and conversions to other media types - - Work shall mean the work of authorship whether in Source or - Object form made available under the License as indicated by a - copyright notice that is included in or attached to the work - an example is provided in the Appendix below - - Derivative Works shall mean any work whether in Source or Object - form that is based on or derived from the Work and for which the - editorial revisions annotations elaborations or other modifications - represent as a whole an original work of authorship For the purposes - of this License Derivative Works shall not include works that remain - separable from or merely link or bind by name to the interfaces of - the Work and Derivative Works thereof - - Contribution shall mean any work of authorship including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner For the purposes of this definition submitted - means any form of electronic verbal or written communication sent - to the Licensor or its representatives including but not limited to - communication on electronic mailing lists source code control systems - and issue tracking systems that are managed by or on behalf of the - Licensor for the purpose of discussing and improving the Work but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as Not a Contribution - - Contributor shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work - - 2 Grant of Copyright License Subject to the terms and conditions of - this License each Contributor hereby grants to You a perpetual - worldwide nonexclusive nocharge royaltyfree irrevocable - copyright license to reproduce prepare Derivative Works of - publicly display publicly perform sublicense and distribute the - Work and such Derivative Works in Source or Object form - - 3 Grant of Patent License Subject to the terms and conditions of - this License each Contributor hereby grants to You a perpetual - worldwide nonexclusive nocharge royaltyfree irrevocable - except as stated in this section patent license to make have made - use offer to sell sell import and otherwise transfer the Work - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contributions alone or by combination of their Contributions - with the Work to which such Contributions was submitted If You - institute patent litigation against any entity including a - crossclaim or counterclaim in a lawsuit alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed - - 4 Redistribution You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium with or without - modifications and in Source or Object form provided that You - meet the following conditions - - (?:a )?You must give any other recipients of the Work or - Derivative Works a copy of this License and - - (?:b )?You must cause any modified files to carry prominent notices - stating that You changed the files and - - (?:c )?You must retain in the Source form of any Derivative Works - that You distribute all copyright patent trademark and - attribution notices from the Source form of the Work - excluding those notices that do not pertain to any part of - the Derivative Works and - - (?:d )?If the Work includes a NOTICE text file as part of its - distribution then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file excluding those notices that do not - pertain to any part of the Derivative Works in at least one - of the following places within a NOTICE text file distributed - as part of the Derivative Works within the Source form or - documentation if provided along with the Derivative Works or - within a display generated by the Derivative Works if and - wherever such thirdparty notices normally appear The contents - of the NOTICE file are for informational purposes only and - do not modify the License You may add Your own attribution - notices within Derivative Works that You distribute alongside - or as an addendum to the NOTICE text from the Work provided - that such additional attribution notices cannot be construed - as modifying the License - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use reproduction or distribution of Your modifications or - for any such Derivative Works as a whole provided Your use - reproduction and distribution of the Work otherwise complies with - the conditions stated in this License - - 5 Submission of Contributions Unless You explicitly state otherwise - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License without any additional terms or conditions - Notwithstanding the above nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions - - 6 Trademarks This License does not grant permission to use the trade - names trademarks service marks or product names of the Licensor - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file - - 7 Disclaimer of Warranty Unless required by applicable law or - agreed to in writing Licensor provides the Work and each - Contributor provides its Contributions on an AS IS BASIS - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND either express or - implied including without limitation any warranties or conditions - of TITLE NONINFRINGEMENT MERCHANTABILITY or FITNESS FOR A - PARTICULAR PURPOSE You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License - - 8 Limitation of Liability In no event and under no legal theory - whether in tort including negligence contract or otherwise - unless required by applicable law such as deliberate and grossly - negligent acts or agreed to in writing shall any Contributor be - liable to You for damages including any direct indirect special - incidental or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work including but not limited to damages for loss of goodwill - work stoppage computer failure or malfunction or any and all - other commercial damages or losses even if such Contributor - has been advised of the possibility of such damages - - 9 Accepting Warranty or Additional Liability While redistributing - the Work or Derivative Works thereof You may choose to offer - and charge a fee for acceptance of support warranty indemnity - or other liability obligations andor rights consistent with this - License However in accepting such obligations You may act only - on Your own behalf and on Your sole responsibility not on behalf - of any other Contributor and only if You agree to indemnify - defend and hold each Contributor harmless for any liability - incurred by or claims asserted against such Contributor by reason - of your accepting any such warranty or additional liability(?: - - END OF TERMS AND CONDITIONS)?(?: - - APPENDIX How to apply the Apache License to your work - - To apply the Apache License to your work attach the following - boilerplate notice with the fields enclosed by brackets - replaced with your own identifying information Dont include - the brackets The text should be enclosed in the appropriate - comment syntax for the file format We also recommend that a - file or class name and description of purpose be included on the - same printed page as the copyright notice for easier - identification within thirdparty archives)?(?: - - Copyright.{0,512})?(?: - - Licensed under the Apache License Version 20 the License - you may not use this file except in compliance with the License - You may obtain a copy of the License at - - https?wwwapacheorglicensesLICENSE20 - - Unless required by applicable law or agreed to in writing software - distributed under the License is distributed on an AS IS BASIS - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND either express or implied - See the License for the specific language governing permissions and - limitations under the License)? diff --git a/crates/zeta/license_regexes/bsd.regex b/crates/zeta/license_regexes/bsd.regex deleted file mode 100644 index 655e38fa4336a9e3580ec8a0fc29bba4e5bd68ab..0000000000000000000000000000000000000000 --- a/crates/zeta/license_regexes/bsd.regex +++ /dev/null @@ -1,23 +0,0 @@ -.{0,512}Redistribution and use in source and binary forms with or without -modification are permitted provided that the following conditions are met - -(?:1 )?Redistributions of source code must retain the above copyright -notice this list of conditions and the following disclaimer(?: - -(?:2 )?Redistributions in binary form must reproduce the above copyright -notice this list of conditions and the following disclaimer in the -documentation andor other materials provided with the distribution(?: - -(?:3 )?.{0,128} may be used to endorse or -promote products derived from this software without specific prior written -permission)?)? - -THIS SOFTWARE IS PROVIDED BY .{0,128}AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES -INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED IN NO EVENT SHALL .{0,128}BE LIABLE -FOR ANY DIRECT INDIRECT INCIDENTAL SPECIAL EXEMPLARY OR CONSEQUENTIAL -DAMAGES INCLUDING BUT NOT LIMITED TO PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES LOSS OF USE DATA OR PROFITS OR BUSINESS INTERRUPTION HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY WHETHER IN CONTRACT STRICT LIABILITY OR -TORT INCLUDING NEGLIGENCE OR OTHERWISE ARISING IN ANY WAY OUT OF THE USE OF -THIS SOFTWARE EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE diff --git a/crates/zeta/license_regexes/isc.regex b/crates/zeta/license_regexes/isc.regex deleted file mode 100644 index ba3e3c9cbf8d1a5711485066c496fa89fb8f4c66..0000000000000000000000000000000000000000 --- a/crates/zeta/license_regexes/isc.regex +++ /dev/null @@ -1,12 +0,0 @@ -.{0,512}Permission to use copy modify andor distribute -this software for any purpose with or without fee is hereby granted provided -that the above copyright notice and this permission notice appear in all -copies - -THE SOFTWARE IS PROVIDED AS IS AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL DIRECT INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE DATA OR PROFITS WHETHER IN AN -ACTION OF CONTRACT NEGLIGENCE OR OTHER TORTIOUS ACTION ARISING OUT OF -OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE diff --git a/crates/zeta/license_regexes/mit.regex b/crates/zeta/license_regexes/mit.regex deleted file mode 100644 index a8fa7b3ee7a5656d74012790344e1204e75cefa4..0000000000000000000000000000000000000000 --- a/crates/zeta/license_regexes/mit.regex +++ /dev/null @@ -1,17 +0,0 @@ -.{0,512}Permission is hereby granted free of charge to any -person obtaining a copy of this software and associated documentation files -the Software to deal in the Software without restriction including -without limitation the rights to use copy modify merge publish distribute -sublicense andor sell copies of the Software and to permit persons to whom -the Software is furnished to do so subject to the following conditions - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software - -THE SOFTWARE IS PROVIDED AS IS WITHOUT WARRANTY OF ANY KIND EXPRESS OR -IMPLIED INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM DAMAGES OR OTHER -LIABILITY WHETHER IN AN ACTION OF CONTRACT TORT OR OTHERWISE ARISING FROM -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE diff --git a/crates/zeta/license_regexes/upl-1.0.regex b/crates/zeta/license_regexes/upl-1.0.regex deleted file mode 100644 index f86f5fa3ab76d4f99177923d16b0b5fe8d0f18c0..0000000000000000000000000000000000000000 --- a/crates/zeta/license_regexes/upl-1.0.regex +++ /dev/null @@ -1,32 +0,0 @@ -.{0,512}Subject to the condition set forth below permission is hereby granted to any -person obtaining a copy of this software associated documentation andor data -collectively the Software free of charge and under any and all copyright -rights in the Software and any and all patent rights owned or freely licensable -by each licensor hereunder covering either i the unmodified Software as -contributed to or provided by such licensor or ii the Larger Works as -defined below to deal in both - -a the Software and - -b any piece of software andor hardware listed in the lrgrwrkstxt file if one is - included with the Software each a Larger Work to which the Software is - contributed by such licensors - -without restriction including without limitation the rights to copy create -derivative works of display perform and distribute the Software and make use -sell offer for sale import export have made and have sold the Software and the -Larger Works and to sublicense the foregoing rights on either these or other -terms - -This license is subject to the following condition - -The above copyright notice and either this complete permission notice or at a minimum -a reference to the UPL must be included in all copies or substantial portions of the -Software - -THE SOFTWARE IS PROVIDED AS IS WITHOUT WARRANTY OF ANY KIND EXPRESS OR IMPLIED -INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM DAMAGES OR OTHER LIABILITY WHETHER IN AN ACTION OF -CONTRACT TORT OR OTHERWISE ARISING FROM OUT OF OR IN CONNECTION WITH THE SOFTWARE -OR THE USE OR OTHER DEALINGS IN THE SOFTWARE diff --git a/crates/zeta/license_regexes/zlib.regex b/crates/zeta/license_regexes/zlib.regex deleted file mode 100644 index 63a688d08c0fbc5a24783c1f8a2462979927ca6c..0000000000000000000000000000000000000000 --- a/crates/zeta/license_regexes/zlib.regex +++ /dev/null @@ -1,18 +0,0 @@ -.{0,512}This software is provided asis without any express or implied -warranty In no event will the authors be held liable for any damages -arising from the use of this software - -Permission is granted to anyone to use this software for any purpose -including commercial applications and to alter it and redistribute it -freely subject to the following restrictions - -1? The origin of this software must not be misrepresented you must not -claim that you wrote the original software If you use this software -in a product an acknowledgment in the product documentation would be -appreciated but is not required - -2? Altered source versions must be plainly marked as such and must not be -misrepresented as being the original software - -3? This notice may not be removed or altered from any source -distribution diff --git a/crates/zeta/src/license_detection.rs b/crates/zeta/src/license_detection.rs index 81314477e5383450c089be5291e02ba3f8478ac4..2939f8a0c491422099e14ae7cc76997a9031e7a0 100644 --- a/crates/zeta/src/license_detection.rs +++ b/crates/zeta/src/license_detection.rs @@ -1,19 +1,20 @@ use std::{ collections::BTreeSet, fmt::{Display, Formatter}, + ops::Range, path::{Path, PathBuf}, sync::{Arc, LazyLock}, }; +use anyhow::{Result, anyhow}; use fs::Fs; use futures::StreamExt as _; use gpui::{App, AppContext as _, Entity, Subscription, Task}; use itertools::Itertools; use postage::watch; use project::Worktree; -use regex::Regex; use strum::VariantArray; -use util::ResultExt as _; +use util::{ResultExt as _, maybe}; use worktree::ChildEntriesOptions; /// Matches the most common license locations, with US and UK English spelling. @@ -70,68 +71,170 @@ impl OpenSourceLicense { } } - /// Regexes to match the license text. These regexes are expected to match the entire file. Also - /// note that `canonicalize_license_text` removes everything but alphanumeric ascii characters. - pub fn regex(&self) -> &'static str { + pub fn patterns(&self) -> &'static [&'static str] { match self { - OpenSourceLicense::Apache2_0 => include_str!("../license_regexes/apache-2.0.regex"), - OpenSourceLicense::BSDZero => include_str!("../license_regexes/0bsd.regex"), - OpenSourceLicense::BSD => include_str!("../license_regexes/bsd.regex"), - OpenSourceLicense::ISC => include_str!("../license_regexes/isc.regex"), - OpenSourceLicense::MIT => include_str!("../license_regexes/mit.regex"), - OpenSourceLicense::UPL1_0 => include_str!("../license_regexes/upl-1.0.regex"), - OpenSourceLicense::Zlib => include_str!("../license_regexes/zlib.regex"), + OpenSourceLicense::Apache2_0 => &[ + include_str!("../license_patterns/apache-2.0-pattern"), + include_str!("../license_patterns/apache-2.0-reference-pattern"), + ], + OpenSourceLicense::BSDZero => &[include_str!("../license_patterns/0bsd-pattern")], + OpenSourceLicense::BSD => &[include_str!("../license_patterns/bsd-pattern")], + OpenSourceLicense::ISC => &[include_str!("../license_patterns/isc-pattern")], + OpenSourceLicense::MIT => &[include_str!("../license_patterns/mit-pattern")], + OpenSourceLicense::UPL1_0 => &[include_str!("../license_patterns/upl-1.0-pattern")], + OpenSourceLicense::Zlib => &[include_str!("../license_patterns/zlib-pattern")], } } } -fn detect_license(license: &str) -> Option { - static LICENSE_REGEX: LazyLock = LazyLock::new(|| { - let mut regex_string = String::new(); - let mut is_first = true; - for license in OpenSourceLicense::VARIANTS { - if is_first { - regex_string.push_str("^(?:("); - is_first = false; - } else { - regex_string.push_str(")|("); - } - regex_string.push_str(&canonicalize_license_regex(license.regex())); +// TODO: Consider using databake or similar to not parse at runtime. +static LICENSE_PATTERNS: LazyLock = LazyLock::new(|| { + let mut approximate_max_length = 0; + let mut patterns = Vec::new(); + for license in OpenSourceLicense::VARIANTS { + for pattern in license.patterns() { + let (pattern, length) = parse_pattern(pattern).unwrap(); + patterns.push((*license, pattern)); + approximate_max_length = approximate_max_length.max(length); + } + } + LicensePatterns { + patterns, + approximate_max_length, + } +}); + +fn detect_license(text: &str) -> Option { + let text = canonicalize_license_text(text); + for (license, pattern) in LICENSE_PATTERNS.patterns.iter() { + log::trace!("Checking if license is {}", license); + if check_pattern(&pattern, &text) { + return Some(*license); } - regex_string.push_str("))$"); - let regex = Regex::new(®ex_string).unwrap(); - assert_eq!(regex.captures_len(), OpenSourceLicense::VARIANTS.len() + 1); - regex - }); - - LICENSE_REGEX - .captures(&canonicalize_license_text(license)) - .and_then(|captures| { - let license = OpenSourceLicense::VARIANTS - .iter() - .enumerate() - .find(|(index, _)| captures.get(index + 1).is_some()) - .map(|(_, license)| *license); - if license.is_none() { - log::error!("bug: open source license regex matched without any capture groups"); + } + + None +} + +struct LicensePatterns { + patterns: Vec<(OpenSourceLicense, Vec)>, + approximate_max_length: usize, +} + +#[derive(Debug, Clone, Default, PartialEq, Eq)] +struct PatternPart { + /// Indicates that matching `text` is optional. Skipping `match_any_chars` is conditional on + /// matching `text`. + optional: bool, + /// Indicates the number of characters that can be skipped before matching `text`. + match_any_chars: Range, + /// The text to match, may be empty. + text: String, +} + +/// Lines that start with "-- " begin a `PatternPart`. `-- 1..10` specifies `match_any_chars: +/// 1..10`. `-- 1..10 optional:` additionally specifies `optional: true`. It's a parse error for a +/// line to start with `--` without matching this format. +/// +/// Text that does not have `--` prefixes participate in the `text` field and are canonicalized by +/// lowercasing, replacing all runs of whitespace with a single space, and otherwise only keeping +/// ascii alphanumeric characters. +fn parse_pattern(pattern_source: &str) -> Result<(Vec, usize)> { + let mut pattern = Vec::new(); + let mut part = PatternPart::default(); + let mut approximate_max_length = 0; + for line in pattern_source.lines() { + if let Some(directive) = line.trim().strip_prefix("--") { + if part != PatternPart::default() { + pattern.push(part); + part = PatternPart::default(); + } + let valid = maybe!({ + let directive_chunks = directive.split_whitespace().collect::>(); + if !(1..=2).contains(&directive_chunks.len()) { + return None; + } + if directive_chunks.len() == 2 { + part.optional = true; + } + let range_chunks = directive_chunks[0].split("..").collect::>(); + if range_chunks.len() != 2 { + return None; + } + part.match_any_chars.start = range_chunks[0].parse::().ok()?; + part.match_any_chars.end = range_chunks[1].parse::().ok()?; + if part.match_any_chars.start > part.match_any_chars.end { + return None; + } + approximate_max_length += part.match_any_chars.end; + Some(()) + }); + if valid.is_none() { + return Err(anyhow!("Invalid pattern directive: {}", line)); } - license - }) + continue; + } + approximate_max_length += line.len() + 1; + let line = canonicalize_license_text(line); + if line.is_empty() { + continue; + } + if !part.text.is_empty() { + part.text.push(' '); + } + part.text.push_str(&line); + } + if part != PatternPart::default() { + pattern.push(part); + } + Ok((pattern, approximate_max_length)) } -/// Canonicalizes the whitespace of license text. -fn canonicalize_license_regex(license: &str) -> String { - license - .split_ascii_whitespace() - .join(" ") - .to_ascii_lowercase() +/// Checks a pattern against text by iterating over the pattern parts in reverse order, and checking +/// matches with the end of a prefix of the input. Assumes that `canonicalize_license_text` has +/// already been applied to the input. +fn check_pattern(pattern: &[PatternPart], input: &str) -> bool { + let mut input_ix = input.len(); + let mut match_any_chars = 0..0; + for part in pattern.iter().rev() { + if part.text.is_empty() { + match_any_chars.start += part.match_any_chars.start; + match_any_chars.end += part.match_any_chars.end; + continue; + } + let mut matched = false; + for skip_count in match_any_chars.start..=match_any_chars.end { + let end_ix = input_ix.saturating_sub(skip_count); + if end_ix < part.text.len() { + break; + } + if input[..end_ix].ends_with(&part.text) { + matched = true; + input_ix = end_ix - part.text.len(); + match_any_chars = part.match_any_chars.clone(); + break; + } + } + if !matched && !part.optional { + log::trace!( + "Failed to match pattern `...{}` against input `...{}`", + &part.text[part.text.len().saturating_sub(128)..], + &input[input_ix.saturating_sub(128)..] + ); + return false; + } + } + match_any_chars.contains(&input_ix) } -/// Canonicalizes the whitespace of license text. +/// Canonicalizes license text by removing all non-alphanumeric characters, lowercasing, and turning +/// runs of whitespace into a single space. Unicode alphanumeric characters are intentionally +/// preserved since these should cause license mismatch when not within a portion of the license +/// where arbitrary text is allowed. fn canonicalize_license_text(license: &str) -> String { license .chars() - .filter(|c| c.is_ascii_alphanumeric() || c.is_ascii_whitespace()) + .filter(|c| c.is_ascii_whitespace() || c.is_alphanumeric()) .map(|c| c.to_ascii_lowercase()) .collect::() .split_ascii_whitespace() @@ -218,7 +321,7 @@ impl LicenseDetectionWatcher { async fn is_path_eligible(fs: &Arc, abs_path: PathBuf) -> Option { log::debug!("checking if `{abs_path:?}` is an open source license"); - // Resolve symlinks so that the file size from metadata is correct. + // resolve symlinks so that the file size from metadata is correct let Some(abs_path) = fs.canonicalize(&abs_path).await.ok() else { log::debug!( "`{abs_path:?}` license file probably deleted (error canonicalizing the path)" @@ -226,8 +329,13 @@ impl LicenseDetectionWatcher { return None; }; let metadata = fs.metadata(&abs_path).await.log_err()??; - // If the license file is >32kb it's unlikely to legitimately match any eligible license. - if metadata.len > 32768 { + if metadata.len > LICENSE_PATTERNS.approximate_max_length as u64 { + log::debug!( + "`{abs_path:?}` license file was skipped \ + because its size of {} bytes was larger than the max size of {} bytes", + metadata.len, + LICENSE_PATTERNS.approximate_max_length + ); return None; } let text = fs.load(&abs_path).await.log_err()?; @@ -262,7 +370,6 @@ mod tests { use gpui::TestAppContext; use serde_json::json; use settings::{Settings as _, SettingsStore}; - use unindent::unindent; use worktree::WorktreeSettings; use super::*; @@ -275,25 +382,8 @@ mod tests { #[track_caller] fn assert_matches_license(text: &str, license: OpenSourceLicense) { - if detect_license(text) != Some(license) { - let license_regex_text = canonicalize_license_regex(license.regex()); - let license_regex = Regex::new(&format!("^{}$", license_regex_text)).unwrap(); - let text = canonicalize_license_text(text); - let matched_regex = license_regex.is_match(&text); - if matched_regex { - panic!( - "The following text matches the individual regex for {}, \ - but not the combined one:\n```license-text\n{}\n```\n", - license, text - ); - } else { - panic!( - "The following text doesn't match the regex for {}:\n\ - ```license-text\n{}\n```\n\n```regex\n{}\n```\n", - license, text, license_regex_text - ); - } - } + assert_eq!(detect_license(text), Some(license)); + assert!(text.len() < LICENSE_PATTERNS.approximate_max_length); } /* @@ -325,7 +415,8 @@ mod tests { continue; }; let path_string = entry.path().to_string_lossy().to_string(); - match detect_license(&contents) { + let license = detect_license(&contents); + match license { Some(license) => detected.push((license, path_string)), None => unrecognized.push(path_string), } @@ -348,87 +439,38 @@ mod tests { } */ - #[test] - fn test_no_unicode_in_regexes() { - for license in OpenSourceLicense::VARIANTS { - assert!( - !license.regex().contains(|c: char| !c.is_ascii()), - "{}.regex contains unicode", - license.spdx_identifier() - ); - } - } - #[test] fn test_apache_positive_detection() { assert_matches_license(APACHE_2_0_TXT, OpenSourceLicense::Apache2_0); - - let license_with_appendix = format!( - r#"{APACHE_2_0_TXT} - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License."# - ); - assert_matches_license(&license_with_appendix, OpenSourceLicense::Apache2_0); - - // Sometimes people fill in the appendix with copyright info. - let license_with_copyright = license_with_appendix.replace( - "Copyright [yyyy] [name of copyright owner]", - "Copyright 2025 John Doe", + assert_matches_license( + include_str!("../license_examples/apache-2.0-ex1.txt"), + OpenSourceLicense::Apache2_0, ); - assert!(license_with_copyright != license_with_appendix); - assert_matches_license(&license_with_copyright, OpenSourceLicense::Apache2_0); - assert_matches_license( - include_str!("../../../LICENSE-APACHE"), + include_str!("../license_examples/apache-2.0-ex2.txt"), OpenSourceLicense::Apache2_0, ); - assert_matches_license( - include_str!("../license_examples/apache-2.0-ex1.txt"), + include_str!("../license_examples/apache-2.0-ex3.txt"), OpenSourceLicense::Apache2_0, ); assert_matches_license( - include_str!("../license_examples/apache-2.0-ex2.txt"), + include_str!("../license_examples/apache-2.0-ex4.txt"), OpenSourceLicense::Apache2_0, ); assert_matches_license( - include_str!("../license_examples/apache-2.0-ex3.txt"), + include_str!("../../../LICENSE-APACHE"), OpenSourceLicense::Apache2_0, ); } #[test] fn test_apache_negative_detection() { - assert!( + assert_eq!( detect_license(&format!( "{APACHE_2_0_TXT}\n\nThe terms in this license are void if P=NP." - )) - .is_none() + )), + None ); } @@ -490,7 +532,7 @@ mod tests { This project is dual licensed under the ISC License and the MIT License."# ); - assert!(detect_license(&license_text).is_none()); + assert_eq!(detect_license(&license_text), None); } #[test] @@ -517,7 +559,7 @@ mod tests { This project is dual licensed under the MIT License and the Apache License, Version 2.0."# ); - assert!(detect_license(&license_text).is_none()); + assert_eq!(detect_license(&license_text), None); } #[test] @@ -533,7 +575,7 @@ mod tests { This project is dual licensed under the UPL License and the MIT License."# ); - assert!(detect_license(&license_text).is_none()); + assert_eq!(detect_license(&license_text), None); } #[test] @@ -614,44 +656,6 @@ mod tests { assert_eq!(canonicalize_license_text(input), expected); } - #[test] - fn test_license_detection_canonicalizes_whitespace() { - let mit_with_weird_spacing = unindent( - r#" - MIT License - - - Copyright (c) 2024 John Doe - - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - - - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. - - - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - SOFTWARE. - "# - .trim(), - ); - - assert_matches_license(&mit_with_weird_spacing, OpenSourceLicense::MIT); - } - fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let settings_store = SettingsStore::test(cx); From fe0ab30e8fcdd66402131925c05fd38472d877ee Mon Sep 17 00:00:00 2001 From: Jason Lee Date: Sun, 31 Aug 2025 16:14:57 +0800 Subject: [PATCH 33/54] Fix auto size rendering of SVG images in Markdown (#36663) Release Notes: - Fixed auto size rendering of SVG images in Markdown. ## Before image image ## After image image For GPUI example ``` cargo run -p gpui --example image ``` SCR-20250821-ojoy --- crates/gpui/examples/image/image.rs | 88 +++++++++++++++-------------- crates/gpui/src/assets.rs | 11 +++- crates/gpui/src/elements/img.rs | 23 ++++---- 3 files changed, 68 insertions(+), 54 deletions(-) diff --git a/crates/gpui/examples/image/image.rs b/crates/gpui/examples/image/image.rs index bd1708e8c453656b2b7047b428f3dc63409eddec..34a510f76db396a91a225dffe21fcec986a62e20 100644 --- a/crates/gpui/examples/image/image.rs +++ b/crates/gpui/examples/image/image.rs @@ -75,65 +75,71 @@ impl Render for ImageShowcase { fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { div() .id("main") + .bg(gpui::white()) .overflow_y_scroll() .p_5() .size_full() - .flex() - .flex_col() - .justify_center() - .items_center() - .gap_8() - .bg(rgb(0xffffff)) .child( div() .flex() - .flex_row() + .flex_col() .justify_center() .items_center() .gap_8() - .child(ImageContainer::new( - "Image loaded from a local file", - self.local_resource.clone(), - )) - .child(ImageContainer::new( - "Image loaded from a remote resource", - self.remote_resource.clone(), + .child(img( + "https://github.com/zed-industries/zed/actions/workflows/ci.yml/badge.svg", )) - .child(ImageContainer::new( - "Image loaded from an asset", - self.asset_resource.clone(), - )), - ) - .child( - div() - .flex() - .flex_row() - .gap_8() .child( div() - .flex_col() - .child("Auto Width") - .child(img("https://picsum.photos/800/400").h(px(180.))), + .flex() + .flex_row() + .justify_center() + .items_center() + .gap_8() + .child(ImageContainer::new( + "Image loaded from a local file", + self.local_resource.clone(), + )) + .child(ImageContainer::new( + "Image loaded from a remote resource", + self.remote_resource.clone(), + )) + .child(ImageContainer::new( + "Image loaded from an asset", + self.asset_resource.clone(), + )), + ) + .child( + div() + .flex() + .flex_row() + .gap_8() + .child( + div() + .flex_col() + .child("Auto Width") + .child(img("https://picsum.photos/800/400").h(px(180.))), + ) + .child( + div() + .flex_col() + .child("Auto Height") + .child(img("https://picsum.photos/800/400").w(px(180.))), + ), ) .child( div() + .flex() .flex_col() - .child("Auto Height") - .child(img("https://picsum.photos/800/400").w(px(180.))), + .justify_center() + .items_center() + .w_full() + .border_1() + .border_color(rgb(0xC0C0C0)) + .child("image with max width 100%") + .child(img("https://picsum.photos/800/400").max_w_full()), ), ) - .child( - div() - .flex() - .flex_col() - .justify_center() - .items_center() - .w_full() - .border_1() - .border_color(rgb(0xC0C0C0)) - .child("image with max width 100%") - .child(img("https://picsum.photos/800/400").max_w_full()), - ) } } diff --git a/crates/gpui/src/assets.rs b/crates/gpui/src/assets.rs index 70a07c11e9239c048f9eaede8cae31a79acf779c..8930b58f8d4fc0423b7d6f41755189a03d8b8b84 100644 --- a/crates/gpui/src/assets.rs +++ b/crates/gpui/src/assets.rs @@ -1,4 +1,4 @@ -use crate::{DevicePixels, Result, SharedString, Size, size}; +use crate::{DevicePixels, Pixels, Result, SharedString, Size, size}; use smallvec::SmallVec; use image::{Delay, Frame}; @@ -42,6 +42,8 @@ pub(crate) struct RenderImageParams { pub struct RenderImage { /// The ID associated with this image pub id: ImageId, + /// The scale factor of this image on render. + pub(crate) scale_factor: f32, data: SmallVec<[Frame; 1]>, } @@ -60,6 +62,7 @@ impl RenderImage { Self { id: ImageId(NEXT_ID.fetch_add(1, SeqCst)), + scale_factor: 1.0, data: data.into(), } } @@ -77,6 +80,12 @@ impl RenderImage { size(width.into(), height.into()) } + /// Get the size of this image, in pixels for display, adjusted for the scale factor. + pub(crate) fn render_size(&self, frame_index: usize) -> Size { + self.size(frame_index) + .map(|v| (v.0 as f32 / self.scale_factor).into()) + } + /// Get the delay of this frame from the previous pub fn delay(&self, frame_index: usize) -> Delay { self.data[frame_index].delay() diff --git a/crates/gpui/src/elements/img.rs b/crates/gpui/src/elements/img.rs index 893860d7e1b781144b2d8de06ae2135420854ed7..40d1b5e44981b7cfd0de92ddbb10f2f715008c70 100644 --- a/crates/gpui/src/elements/img.rs +++ b/crates/gpui/src/elements/img.rs @@ -332,20 +332,18 @@ impl Element for Img { state.started_loading = None; } - let image_size = data.size(frame_index); - style.aspect_ratio = - Some(image_size.width.0 as f32 / image_size.height.0 as f32); + let image_size = data.render_size(frame_index); + style.aspect_ratio = Some(image_size.width / image_size.height); if let Length::Auto = style.size.width { style.size.width = match style.size.height { Length::Definite(DefiniteLength::Absolute( AbsoluteLength::Pixels(height), )) => Length::Definite( - px(image_size.width.0 as f32 * height.0 - / image_size.height.0 as f32) - .into(), + px(image_size.width.0 * height.0 / image_size.height.0) + .into(), ), - _ => Length::Definite(px(image_size.width.0 as f32).into()), + _ => Length::Definite(image_size.width.into()), }; } @@ -354,11 +352,10 @@ impl Element for Img { Length::Definite(DefiniteLength::Absolute( AbsoluteLength::Pixels(width), )) => Length::Definite( - px(image_size.height.0 as f32 * width.0 - / image_size.width.0 as f32) - .into(), + px(image_size.height.0 * width.0 / image_size.width.0) + .into(), ), - _ => Length::Definite(px(image_size.height.0 as f32).into()), + _ => Length::Definite(image_size.height.into()), }; } @@ -701,7 +698,9 @@ impl Asset for ImageAssetLoader { swap_rgba_pa_to_bgra(pixel); } - RenderImage::new(SmallVec::from_elem(Frame::new(buffer), 1)) + let mut image = RenderImage::new(SmallVec::from_elem(Frame::new(buffer), 1)); + image.scale_factor = SMOOTH_SVG_SCALE_FACTOR; + image }; Ok(Arc::new(data)) From e1155848962bcd04ea3ab7b2493a63e468d0ac6f Mon Sep 17 00:00:00 2001 From: Dan Dascalescu Date: Sun, 31 Aug 2025 11:19:25 +0300 Subject: [PATCH 34/54] docs: Copyedit debugger.md and clarify settings location (#36996) Release Notes: - N/A --- docs/src/debugger.md | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/docs/src/debugger.md b/docs/src/debugger.md index 7cfbf63cd8266f7865e948d7da1997c1d81a1f95..b018ea904b2c480bfd5ae6b405d65fe355a5ec2e 100644 --- a/docs/src/debugger.md +++ b/docs/src/debugger.md @@ -78,11 +78,10 @@ While configuration fields are debug adapter-dependent, most adapters support th // The debug adapter that Zed should use to debug the program "adapter": "Example adapter name", // Request: - // - launch: Zed will launch the program if specified or shows a debug terminal with the right configuration - // - attach: Zed will attach to a running program to debug it or when the process_id is not specified we will show a process picker (only supported for node currently) + // - launch: Zed will launch the program if specified, or show a debug terminal with the right configuration + // - attach: Zed will attach to a running program to debug it, or when the process_id is not specified, will show a process picker (only supported for node currently) "request": "launch", - // program: The program that you want to debug - // This field supports path resolution with ~ or . symbols + // The program to debug. This field supports path resolution with ~ or . symbols. "program": "path_to_program", // cwd: defaults to the current working directory of your project ($ZED_WORKTREE_ROOT) "cwd": "$ZED_WORKTREE_ROOT" @@ -148,6 +147,8 @@ The debug adapter will then stop whenever an exception of a given kind occurs. W ## Settings +The settings for the debugger are grouped under the `debugger` key in `settings.json`: + - `dock`: Determines the position of the debug panel in the UI. - `stepping_granularity`: Determines the stepping granularity. - `save_breakpoints`: Whether the breakpoints should be reused across Zed sessions. From d80f13242b410af03e54a0bdbe9c6007b9feb6d8 Mon Sep 17 00:00:00 2001 From: Gerd Augsburg Date: Sun, 31 Aug 2025 10:26:28 +0200 Subject: [PATCH 35/54] Support for "Insert" from character key location (#37219) Release Notes: - Added support for the Insert-Key from a character key location for keyboard layouts like neo2 --- crates/gpui/src/platform/linux/platform.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/gpui/src/platform/linux/platform.rs b/crates/gpui/src/platform/linux/platform.rs index 8bd89fc399cb8215748467090b973f3f4ee00759..196e5b65d04125ca90c588212c140d3a63345c2e 100644 --- a/crates/gpui/src/platform/linux/platform.rs +++ b/crates/gpui/src/platform/linux/platform.rs @@ -848,6 +848,7 @@ impl crate::Keystroke { Keysym::Down => "down".to_owned(), Keysym::Home => "home".to_owned(), Keysym::End => "end".to_owned(), + Keysym::Insert => "insert".to_owned(), _ => { let name = xkb::keysym_get_name(key_sym).to_lowercase(); From 1ca5e84019a1541943863f281f9b1804bba7dee1 Mon Sep 17 00:00:00 2001 From: Remco Smits Date: Sun, 31 Aug 2025 10:43:24 +0200 Subject: [PATCH 36/54] markdown: Add HTML `img` tag support (#36700) Closes #21992 Screenshot 2025-08-21 at 18 09 24 Code example: ```markdown # Html Tag Description of image # Html Tag with width and height Description of image # Html Tag with style attribute with width and height Description of image # Normal Tag ![alt text](https://picsum.photos/200/300) ``` Release Notes: - Markdown: Added HTML `` tag support --- Cargo.lock | 2 + crates/markdown_preview/Cargo.toml | 6 +- .../markdown_preview/src/markdown_elements.rs | 17 +- .../markdown_preview/src/markdown_parser.rs | 372 +++++++++++++++++- .../markdown_preview/src/markdown_renderer.rs | 137 ++++--- 5 files changed, 456 insertions(+), 78 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ab3b713a113a95183e5f394bae0f1a31301da3f1..6fc771894f589d98dec459bf877e8fa35f56f2aa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9953,9 +9953,11 @@ dependencies = [ "editor", "fs", "gpui", + "html5ever 0.27.0", "language", "linkify", "log", + "markup5ever_rcdom", "pretty_assertions", "pulldown-cmark 0.12.2", "settings", diff --git a/crates/markdown_preview/Cargo.toml b/crates/markdown_preview/Cargo.toml index ebdd8a9eb6c0ffbe99f7c14d1e97b13b3a95d8a3..55646cdcf43617223665e9dc48f13c55f966d99d 100644 --- a/crates/markdown_preview/Cargo.toml +++ b/crates/markdown_preview/Cargo.toml @@ -19,19 +19,21 @@ anyhow.workspace = true async-recursion.workspace = true collections.workspace = true editor.workspace = true +fs.workspace = true gpui.workspace = true +html5ever.workspace = true language.workspace = true linkify.workspace = true log.workspace = true +markup5ever_rcdom.workspace = true pretty_assertions.workspace = true pulldown-cmark.workspace = true settings.workspace = true theme.workspace = true ui.workspace = true util.workspace = true -workspace.workspace = true workspace-hack.workspace = true -fs.workspace = true +workspace.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } diff --git a/crates/markdown_preview/src/markdown_elements.rs b/crates/markdown_preview/src/markdown_elements.rs index a570e79f5344d0f35693072f82f947004e24ac65..560e468439efce22aa72d91054d68d491e125b23 100644 --- a/crates/markdown_preview/src/markdown_elements.rs +++ b/crates/markdown_preview/src/markdown_elements.rs @@ -1,5 +1,6 @@ use gpui::{ - FontStyle, FontWeight, HighlightStyle, SharedString, StrikethroughStyle, UnderlineStyle, px, + DefiniteLength, FontStyle, FontWeight, HighlightStyle, SharedString, StrikethroughStyle, + UnderlineStyle, px, }; use language::HighlightId; use std::{fmt::Display, ops::Range, path::PathBuf}; @@ -15,6 +16,7 @@ pub enum ParsedMarkdownElement { /// A paragraph of text and other inline elements. Paragraph(MarkdownParagraph), HorizontalRule(Range), + Image(Image), } impl ParsedMarkdownElement { @@ -30,6 +32,7 @@ impl ParsedMarkdownElement { MarkdownParagraphChunk::Image(image) => image.source_range.clone(), }, Self::HorizontalRule(range) => range.clone(), + Self::Image(image) => image.source_range.clone(), }) } @@ -290,6 +293,8 @@ pub struct Image { pub link: Link, pub source_range: Range, pub alt_text: Option, + pub width: Option, + pub height: Option, } impl Image { @@ -303,10 +308,20 @@ impl Image { source_range, link, alt_text: None, + width: None, + height: None, }) } pub fn set_alt_text(&mut self, alt_text: SharedString) { self.alt_text = Some(alt_text); } + + pub fn set_width(&mut self, width: DefiniteLength) { + self.width = Some(width); + } + + pub fn set_height(&mut self, height: DefiniteLength) { + self.height = Some(height); + } } diff --git a/crates/markdown_preview/src/markdown_parser.rs b/crates/markdown_preview/src/markdown_parser.rs index b51b98a2ed64c72d76a8ca6e7316b6866bdcd9fe..1b116c50d9820dc4fea9d6b2e5816543d75e7d52 100644 --- a/crates/markdown_preview/src/markdown_parser.rs +++ b/crates/markdown_preview/src/markdown_parser.rs @@ -1,10 +1,12 @@ use crate::markdown_elements::*; use async_recursion::async_recursion; use collections::FxHashMap; -use gpui::FontWeight; +use gpui::{DefiniteLength, FontWeight, px, relative}; +use html5ever::{ParseOpts, local_name, parse_document, tendril::TendrilSink}; use language::LanguageRegistry; +use markup5ever_rcdom::RcDom; use pulldown_cmark::{Alignment, Event, Options, Parser, Tag, TagEnd}; -use std::{ops::Range, path::PathBuf, sync::Arc, vec}; +use std::{cell::RefCell, collections::HashMap, ops::Range, path::PathBuf, rc::Rc, sync::Arc, vec}; pub async fn parse_markdown( markdown_input: &str, @@ -172,9 +174,14 @@ impl<'a> MarkdownParser<'a> { self.cursor += 1; - let code_block = self.parse_code_block(language).await; + let code_block = self.parse_code_block(language).await?; Some(vec![ParsedMarkdownElement::CodeBlock(code_block)]) } + Tag::HtmlBlock => { + self.cursor += 1; + + Some(self.parse_html_block().await) + } _ => None, }, Event::Rule => { @@ -378,7 +385,7 @@ impl<'a> MarkdownParser<'a> { TagEnd::Image => { if let Some(mut image) = image.take() { if !text.is_empty() { - image.alt_text = Some(std::mem::take(&mut text).into()); + image.set_alt_text(std::mem::take(&mut text).into()); } markdown_text_like.push(MarkdownParagraphChunk::Image(image)); } @@ -695,13 +702,22 @@ impl<'a> MarkdownParser<'a> { } } - async fn parse_code_block(&mut self, language: Option) -> ParsedMarkdownCodeBlock { - let (_event, source_range) = self.previous().unwrap(); + async fn parse_code_block( + &mut self, + language: Option, + ) -> Option { + let Some((_event, source_range)) = self.previous() else { + return None; + }; + let source_range = source_range.clone(); let mut code = String::new(); while !self.eof() { - let (current, _source_range) = self.current().unwrap(); + let Some((current, _source_range)) = self.current() else { + break; + }; + match current { Event::Text(text) => { code.push_str(text); @@ -734,23 +750,190 @@ impl<'a> MarkdownParser<'a> { None }; - ParsedMarkdownCodeBlock { + Some(ParsedMarkdownCodeBlock { source_range, contents: code.into(), language, highlights, + }) + } + + async fn parse_html_block(&mut self) -> Vec { + let mut elements = Vec::new(); + let Some((_event, _source_range)) = self.previous() else { + return elements; + }; + + while !self.eof() { + let Some((current, source_range)) = self.current() else { + break; + }; + let source_range = source_range.clone(); + match current { + Event::Html(html) => { + let mut cursor = std::io::Cursor::new(html.as_bytes()); + let Some(dom) = parse_document(RcDom::default(), ParseOpts::default()) + .from_utf8() + .read_from(&mut cursor) + .ok() + else { + self.cursor += 1; + continue; + }; + + self.cursor += 1; + + self.parse_html_node(source_range, &dom.document, &mut elements); + } + Event::End(TagEnd::CodeBlock) => { + self.cursor += 1; + break; + } + _ => { + break; + } + } + } + + elements + } + + fn parse_html_node( + &self, + source_range: Range, + node: &Rc, + elements: &mut Vec, + ) { + match &node.data { + markup5ever_rcdom::NodeData::Document => { + self.consume_children(source_range, node, elements); + } + markup5ever_rcdom::NodeData::Doctype { .. } => {} + markup5ever_rcdom::NodeData::Text { contents } => { + elements.push(ParsedMarkdownElement::Paragraph(vec![ + MarkdownParagraphChunk::Text(ParsedMarkdownText { + source_range, + contents: contents.borrow().to_string(), + highlights: Vec::default(), + region_ranges: Vec::default(), + regions: Vec::default(), + }), + ])); + } + markup5ever_rcdom::NodeData::Comment { .. } => {} + markup5ever_rcdom::NodeData::Element { name, attrs, .. } => { + if local_name!("img") == name.local { + if let Some(image) = self.extract_image(source_range, attrs) { + elements.push(ParsedMarkdownElement::Image(image)); + } + } else { + self.consume_children(source_range, node, elements); + } + } + markup5ever_rcdom::NodeData::ProcessingInstruction { .. } => {} + } + } + + fn consume_children( + &self, + source_range: Range, + node: &Rc, + elements: &mut Vec, + ) { + for node in node.children.borrow().iter() { + self.parse_html_node(source_range.clone(), node, elements); + } + } + + fn attr_value( + attrs: &RefCell>, + name: html5ever::LocalName, + ) -> Option { + attrs.borrow().iter().find_map(|attr| { + if attr.name.local == name { + Some(attr.value.to_string()) + } else { + None + } + }) + } + + fn extract_styles_from_attributes( + attrs: &RefCell>, + ) -> HashMap { + let mut styles = HashMap::new(); + + if let Some(style) = Self::attr_value(attrs, local_name!("style")) { + for decl in style.split(';') { + let mut parts = decl.splitn(2, ':'); + if let Some((key, value)) = parts.next().zip(parts.next()) { + styles.insert( + key.trim().to_lowercase().to_string(), + value.trim().to_string(), + ); + } + } + } + + styles + } + + fn extract_image( + &self, + source_range: Range, + attrs: &RefCell>, + ) -> Option { + let src = Self::attr_value(attrs, local_name!("src"))?; + + let mut image = Image::identify(src, source_range, self.file_location_directory.clone())?; + + if let Some(alt) = Self::attr_value(attrs, local_name!("alt")) { + image.set_alt_text(alt.into()); + } + + let styles = Self::extract_styles_from_attributes(attrs); + + if let Some(width) = Self::attr_value(attrs, local_name!("width")) + .or_else(|| styles.get("width").cloned()) + .and_then(|width| Self::parse_length(&width)) + { + image.set_width(width); + } + + if let Some(height) = Self::attr_value(attrs, local_name!("height")) + .or_else(|| styles.get("height").cloned()) + .and_then(|height| Self::parse_length(&height)) + { + image.set_height(height); + } + + Some(image) + } + + /// Parses the width/height attribute value of an html element (e.g. img element) + fn parse_length(value: &str) -> Option { + if value.ends_with("%") { + value + .trim_end_matches("%") + .parse::() + .ok() + .map(|value| relative(value / 100.)) + } else { + value + .trim_end_matches("px") + .parse() + .ok() + .map(|value| px(value).into()) } } } #[cfg(test)] mod tests { - use core::panic; - use super::*; - use ParsedMarkdownListItemType::*; - use gpui::BackgroundExecutor; + use core::panic; + use gpui::{AbsoluteLength, BackgroundExecutor, DefiniteLength}; use language::{ HighlightId, Language, LanguageConfig, LanguageMatcher, LanguageRegistry, tree_sitter_rust, }; @@ -925,6 +1108,8 @@ mod tests { url: "https://blog.logrocket.com/wp-content/uploads/2024/04/exploring-zed-open-source-code-editor-rust-2.png".to_string(), }, alt_text: Some("test".into()), + height: None, + width: None, },) ); } @@ -946,6 +1131,8 @@ mod tests { url: "http://example.com/foo.png".to_string(), }, alt_text: None, + height: None, + width: None, },) ); } @@ -965,6 +1152,8 @@ mod tests { url: "http://example.com/foo.png".to_string(), }, alt_text: Some("foo bar baz".into()), + height: None, + width: None, }),], ); } @@ -990,6 +1179,8 @@ mod tests { url: "http://example.com/foo.png".to_string(), }, alt_text: Some("foo".into()), + height: None, + width: None, }), MarkdownParagraphChunk::Text(ParsedMarkdownText { source_range: 0..81, @@ -1004,11 +1195,168 @@ mod tests { url: "http://example.com/bar.png".to_string(), }, alt_text: Some("bar".into()), + height: None, + width: None, }) ] ); } + #[test] + fn test_parse_length() { + // Test percentage values + assert_eq!( + MarkdownParser::parse_length("50%"), + Some(DefiniteLength::Fraction(0.5)) + ); + assert_eq!( + MarkdownParser::parse_length("100%"), + Some(DefiniteLength::Fraction(1.0)) + ); + assert_eq!( + MarkdownParser::parse_length("25%"), + Some(DefiniteLength::Fraction(0.25)) + ); + assert_eq!( + MarkdownParser::parse_length("0%"), + Some(DefiniteLength::Fraction(0.0)) + ); + + // Test pixel values + assert_eq!( + MarkdownParser::parse_length("100px"), + Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.0)))) + ); + assert_eq!( + MarkdownParser::parse_length("50px"), + Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(50.0)))) + ); + assert_eq!( + MarkdownParser::parse_length("0px"), + Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(0.0)))) + ); + + // Test values without units (should be treated as pixels) + assert_eq!( + MarkdownParser::parse_length("100"), + Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.0)))) + ); + assert_eq!( + MarkdownParser::parse_length("42"), + Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(42.0)))) + ); + + // Test invalid values + assert_eq!(MarkdownParser::parse_length("invalid"), None); + assert_eq!(MarkdownParser::parse_length("px"), None); + assert_eq!(MarkdownParser::parse_length("%"), None); + assert_eq!(MarkdownParser::parse_length(""), None); + assert_eq!(MarkdownParser::parse_length("abc%"), None); + assert_eq!(MarkdownParser::parse_length("abcpx"), None); + + // Test decimal values + assert_eq!( + MarkdownParser::parse_length("50.5%"), + Some(DefiniteLength::Fraction(0.505)) + ); + assert_eq!( + MarkdownParser::parse_length("100.25px"), + Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.25)))) + ); + assert_eq!( + MarkdownParser::parse_length("42.0"), + Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(42.0)))) + ); + } + + #[gpui::test] + async fn test_html_image_tag() { + let parsed = parse("").await; + + let ParsedMarkdownElement::Image(image) = &parsed.children[0] else { + panic!("Expected a image element"); + }; + assert_eq!( + image.clone(), + Image { + source_range: 0..40, + link: Link::Web { + url: "http://example.com/foo.png".to_string(), + }, + alt_text: None, + height: None, + width: None, + }, + ); + } + + #[gpui::test] + async fn test_html_image_tag_with_alt_text() { + let parsed = parse("\"Foo\"").await; + + let ParsedMarkdownElement::Image(image) = &parsed.children[0] else { + panic!("Expected a image element"); + }; + assert_eq!( + image.clone(), + Image { + source_range: 0..50, + link: Link::Web { + url: "http://example.com/foo.png".to_string(), + }, + alt_text: Some("Foo".into()), + height: None, + width: None, + }, + ); + } + + #[gpui::test] + async fn test_html_image_tag_with_height_and_width() { + let parsed = + parse("").await; + + let ParsedMarkdownElement::Image(image) = &parsed.children[0] else { + panic!("Expected a image element"); + }; + assert_eq!( + image.clone(), + Image { + source_range: 0..65, + link: Link::Web { + url: "http://example.com/foo.png".to_string(), + }, + alt_text: None, + height: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.)))), + width: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(200.)))), + }, + ); + } + + #[gpui::test] + async fn test_html_image_style_tag_with_height_and_width() { + let parsed = parse( + "", + ) + .await; + + let ParsedMarkdownElement::Image(image) = &parsed.children[0] else { + panic!("Expected a image element"); + }; + assert_eq!( + image.clone(), + Image { + source_range: 0..75, + link: Link::Web { + url: "http://example.com/foo.png".to_string(), + }, + alt_text: None, + height: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.)))), + width: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(200.)))), + }, + ); + } + #[gpui::test] async fn test_header_only_table() { let markdown = "\ diff --git a/crates/markdown_preview/src/markdown_renderer.rs b/crates/markdown_preview/src/markdown_renderer.rs index b0b10e927cb3bbc4f0b8366cc77b091c9df773d2..b07b4686a4eaebdfaef804ba903b6575f56ae479 100644 --- a/crates/markdown_preview/src/markdown_renderer.rs +++ b/crates/markdown_preview/src/markdown_renderer.rs @@ -1,5 +1,5 @@ use crate::markdown_elements::{ - HeadingLevel, Link, MarkdownParagraph, MarkdownParagraphChunk, ParsedMarkdown, + HeadingLevel, Image, Link, MarkdownParagraph, MarkdownParagraphChunk, ParsedMarkdown, ParsedMarkdownBlockQuote, ParsedMarkdownCodeBlock, ParsedMarkdownElement, ParsedMarkdownHeading, ParsedMarkdownListItem, ParsedMarkdownListItemType, ParsedMarkdownTable, ParsedMarkdownTableAlignment, ParsedMarkdownTableRow, @@ -164,6 +164,7 @@ pub fn render_markdown_block(block: &ParsedMarkdownElement, cx: &mut RenderConte BlockQuote(block_quote) => render_markdown_block_quote(block_quote, cx), CodeBlock(code_block) => render_markdown_code_block(code_block, cx), HorizontalRule(_) => render_markdown_rule(cx), + Image(image) => render_markdown_image(image, cx), } } @@ -722,65 +723,7 @@ fn render_markdown_text(parsed_new: &MarkdownParagraph, cx: &mut RenderContext) } MarkdownParagraphChunk::Image(image) => { - let image_resource = match image.link.clone() { - Link::Web { url } => Resource::Uri(url.into()), - Link::Path { path, .. } => Resource::Path(Arc::from(path)), - }; - - let element_id = cx.next_id(&image.source_range); - - let image_element = div() - .id(element_id) - .cursor_pointer() - .child( - img(ImageSource::Resource(image_resource)) - .max_w_full() - .with_fallback({ - let alt_text = image.alt_text.clone(); - move || div().children(alt_text.clone()).into_any_element() - }), - ) - .tooltip({ - let link = image.link.clone(); - move |_, cx| { - InteractiveMarkdownElementTooltip::new( - Some(link.to_string()), - "open image", - cx, - ) - .into() - } - }) - .on_click({ - let workspace = workspace_clone.clone(); - let link = image.link.clone(); - move |_, window, cx| { - if window.modifiers().secondary() { - match &link { - Link::Web { url } => cx.open_url(url), - Link::Path { path, .. } => { - if let Some(workspace) = &workspace { - _ = workspace.update(cx, |workspace, cx| { - workspace - .open_abs_path( - path.clone(), - OpenOptions { - visible: Some(OpenVisible::None), - ..Default::default() - }, - window, - cx, - ) - .detach(); - }); - } - } - } - } - } - }) - .into_any(); - any_element.push(image_element); + any_element.push(render_markdown_image(image, cx)); } } } @@ -793,18 +736,86 @@ fn render_markdown_rule(cx: &mut RenderContext) -> AnyElement { div().py(cx.scaled_rems(0.5)).child(rule).into_any() } +fn render_markdown_image(image: &Image, cx: &mut RenderContext) -> AnyElement { + let image_resource = match image.link.clone() { + Link::Web { url } => Resource::Uri(url.into()), + Link::Path { path, .. } => Resource::Path(Arc::from(path)), + }; + + let element_id = cx.next_id(&image.source_range); + let workspace = cx.workspace.clone(); + + div() + .id(element_id) + .cursor_pointer() + .child( + img(ImageSource::Resource(image_resource)) + .max_w_full() + .with_fallback({ + let alt_text = image.alt_text.clone(); + move || div().children(alt_text.clone()).into_any_element() + }) + .when_some(image.height, |this, height| this.h(height)) + .when_some(image.width, |this, width| this.w(width)), + ) + .tooltip({ + let link = image.link.clone(); + let alt_text = image.alt_text.clone(); + move |_, cx| { + InteractiveMarkdownElementTooltip::new( + Some(alt_text.clone().unwrap_or(link.to_string().into())), + "open image", + cx, + ) + .into() + } + }) + .on_click({ + let link = image.link.clone(); + move |_, window, cx| { + if window.modifiers().secondary() { + match &link { + Link::Web { url } => cx.open_url(url), + Link::Path { path, .. } => { + if let Some(workspace) = &workspace { + _ = workspace.update(cx, |workspace, cx| { + workspace + .open_abs_path( + path.clone(), + OpenOptions { + visible: Some(OpenVisible::None), + ..Default::default() + }, + window, + cx, + ) + .detach(); + }); + } + } + } + } + } + }) + .into_any() +} + struct InteractiveMarkdownElementTooltip { tooltip_text: Option, - action_text: String, + action_text: SharedString, } impl InteractiveMarkdownElementTooltip { - pub fn new(tooltip_text: Option, action_text: &str, cx: &mut App) -> Entity { + pub fn new( + tooltip_text: Option, + action_text: impl Into, + cx: &mut App, + ) -> Entity { let tooltip_text = tooltip_text.map(|t| util::truncate_and_trailoff(&t, 50).into()); cx.new(|_cx| Self { tooltip_text, - action_text: action_text.to_string(), + action_text: action_text.into(), }) } } From f348737e8cfac9da2b6579ee7ce86ae788cc09c6 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sun, 31 Aug 2025 08:54:22 +0000 Subject: [PATCH 37/54] Update Rust crate tracing-subscriber to v0.3.20 [SECURITY] (#37195) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [tracing-subscriber](https://tokio.rs) ([source](https://redirect.github.com/tokio-rs/tracing)) | dependencies | patch | `0.3.19` -> `0.3.20` | ### GitHub Vulnerability Alerts #### [CVE-2025-58160](https://redirect.github.com/tokio-rs/tracing/security/advisories/GHSA-xwfj-jgwm-7wp5) ### Impact Previous versions of tracing-subscriber were vulnerable to ANSI escape sequence injection attacks. Untrusted user input containing ANSI escape sequences could be injected into terminal output when logged, potentially allowing attackers to: - Manipulate terminal title bars - Clear screens or modify terminal display - Potentially mislead users through terminal manipulation In isolation, impact is minimal, however security issues have been found in terminal emulators that enabled an attacker to use ANSI escape sequences via logs to exploit vulnerabilities in the terminal emulator. ### Patches `tracing-subscriber` version 0.3.20 fixes this vulnerability by escaping ANSI control characters in when writing events to destinations that may be printed to the terminal. ### Workarounds Avoid printing logs to terminal emulators without escaping ANSI control sequences. ### References https://www.packetlabs.net/posts/weaponizing-ansi-escape-sequences/ ### Acknowledgments We would like to thank [zefr0x](http://github.com/zefr0x) who responsibly reported the issue at `security@tokio.rs`. If you believe you have found a security vulnerability in any tokio-rs project, please email us at `security@tokio.rs`. --- ### Release Notes
tokio-rs/tracing (tracing-subscriber) ### [`v0.3.20`](https://redirect.github.com/tokio-rs/tracing/releases/tag/tracing-subscriber-0.3.20): tracing-subscriber 0.3.20 [Compare Source](https://redirect.github.com/tokio-rs/tracing/compare/tracing-subscriber-0.3.19...tracing-subscriber-0.3.20) **Security Fix**: ANSI Escape Sequence Injection (CVE-TBD) #### Impact Previous versions of tracing-subscriber were vulnerable to ANSI escape sequence injection attacks. Untrusted user input containing ANSI escape sequences could be injected into terminal output when logged, potentially allowing attackers to: - Manipulate terminal title bars - Clear screens or modify terminal display - Potentially mislead users through terminal manipulation In isolation, impact is minimal, however security issues have been found in terminal emulators that enabled an attacker to use ANSI escape sequences via logs to exploit vulnerabilities in the terminal emulator. #### Solution Version 0.3.20 fixes this vulnerability by escaping ANSI control characters in when writing events to destinations that may be printed to the terminal. #### Affected Versions All versions of tracing-subscriber prior to 0.3.20 are affected by this vulnerability. #### Recommendations Immediate Action Required: We recommend upgrading to tracing-subscriber 0.3.20 immediately, especially if your application: - Logs user-provided input (form data, HTTP headers, query parameters, etc.) - Runs in environments where terminal output is displayed to users #### Migration This is a patch release with no breaking API changes. Simply update your Cargo.toml: ```toml [dependencies] tracing-subscriber = "0.3.20" ``` #### Acknowledgments We would like to thank [zefr0x](http://github.com/zefr0x) who responsibly reported the issue at `security@tokio.rs`. If you believe you have found a security vulnerability in any tokio-rs project, please email us at `security@tokio.rs`.
--- ### Configuration 📅 **Schedule**: Branch creation - "" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: Kirill Bulatov --- Cargo.lock | 81 ++++++++++--------------------- tooling/workspace-hack/Cargo.toml | 4 +- 2 files changed, 27 insertions(+), 58 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6fc771894f589d98dec459bf877e8fa35f56f2aa..fed7077281333f53f4a9ce7b746227e3369d663b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -507,7 +507,7 @@ dependencies = [ "parking_lot", "piper", "polling", - "regex-automata 0.4.9", + "regex-automata", "rustix-openpty", "serde", "signal-hook", @@ -2457,7 +2457,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4" dependencies = [ "memchr", - "regex-automata 0.4.9", + "regex-automata", "serde", ] @@ -4732,7 +4732,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b545b8c50194bdd008283985ab0b31dba153cfd5b3066a92770634fbc0d7d291" dependencies = [ - "nu-ansi-term 0.50.1", + "nu-ansi-term", ] [[package]] @@ -5631,8 +5631,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "531e46835a22af56d1e3b66f04844bed63158bc094a628bec1d321d9b4c44bf2" dependencies = [ "bit-set 0.5.3", - "regex-automata 0.4.9", - "regex-syntax 0.8.5", + "regex-automata", + "regex-syntax", ] [[package]] @@ -5642,8 +5642,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298" dependencies = [ "bit-set 0.8.0", - "regex-automata 0.4.9", - "regex-syntax 0.8.5", + "regex-automata", + "regex-syntax", ] [[package]] @@ -7293,8 +7293,8 @@ dependencies = [ "aho-corasick", "bstr", "log", - "regex-automata 0.4.9", - "regex-syntax 0.8.5", + "regex-automata", + "regex-syntax", ] [[package]] @@ -8299,7 +8299,7 @@ dependencies = [ "globset", "log", "memchr", - "regex-automata 0.4.9", + "regex-automata", "same-file", "walkdir", "winapi-util", @@ -8898,7 +8898,7 @@ dependencies = [ "percent-encoding", "referencing", "regex", - "regex-syntax 0.8.5", + "regex-syntax", "reqwest 0.12.15 (registry+https://github.com/rust-lang/crates.io-index)", "serde", "serde_json", @@ -9738,7 +9738,7 @@ dependencies = [ "lazy_static", "proc-macro2", "quote", - "regex-syntax 0.8.5", + "regex-syntax", "rustc_version", "syn 2.0.101", ] @@ -10018,11 +10018,11 @@ dependencies = [ [[package]] name = "matchers" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" dependencies = [ - "regex-automata 0.1.10", + "regex-automata", ] [[package]] @@ -10723,16 +10723,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "nu-ansi-term" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" -dependencies = [ - "overload", - "winapi", -] - [[package]] name = "nu-ansi-term" version = "0.50.1" @@ -11426,12 +11416,6 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a80800c0488c3a21695ea981a54918fbb37abf04f4d0720c453632255e2ff0e" -[[package]] -name = "overload" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" - [[package]] name = "p256" version = "0.11.1" @@ -13422,17 +13406,8 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.9", - "regex-syntax 0.8.5", -] - -[[package]] -name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" -dependencies = [ - "regex-syntax 0.6.29", + "regex-automata", + "regex-syntax", ] [[package]] @@ -13443,7 +13418,7 @@ checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.5", + "regex-syntax", ] [[package]] @@ -13452,12 +13427,6 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a" -[[package]] -name = "regex-syntax" -version = "0.6.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" - [[package]] name = "regex-syntax" version = "0.8.5" @@ -17147,14 +17116,14 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.19" +version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" +checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5" dependencies = [ "matchers", - "nu-ansi-term 0.46.0", + "nu-ansi-term", "once_cell", - "regex", + "regex-automata", "serde", "serde_json", "sharded-slab", @@ -17185,7 +17154,7 @@ checksum = "a7cf18d43cbf0bfca51f657132cc616a5097edc4424d538bae6fa60142eaf9f0" dependencies = [ "cc", "regex", - "regex-syntax 0.8.5", + "regex-syntax", "serde_json", "streaming-iterator", "tree-sitter-language", @@ -19983,8 +19952,8 @@ dependencies = [ "rand_core 0.6.4", "regalloc2", "regex", - "regex-automata 0.4.9", - "regex-syntax 0.8.5", + "regex-automata", + "regex-syntax", "ring", "rust_decimal", "rustc-hash 1.1.0", diff --git a/tooling/workspace-hack/Cargo.toml b/tooling/workspace-hack/Cargo.toml index 2f9a963abc8b09d2255a5229dd2e44e06b2e8c9f..9bcaabb8cc942818fab9b3a454a0858f70be6bf2 100644 --- a/tooling/workspace-hack/Cargo.toml +++ b/tooling/workspace-hack/Cargo.toml @@ -568,7 +568,7 @@ tokio-rustls = { version = "0.26", default-features = false, features = ["loggin tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } -winapi = { version = "0.3", default-features = false, features = ["cfg", "commapi", "consoleapi", "errhandlingapi", "evntrace", "fileapi", "handleapi", "impl-debug", "impl-default", "in6addr", "inaddr", "ioapiset", "knownfolders", "minwinbase", "minwindef", "namedpipeapi", "ntsecapi", "objbase", "processenv", "processthreadsapi", "shlobj", "std", "synchapi", "sysinfoapi", "timezoneapi", "winbase", "windef", "winerror", "winioctl", "winnt", "winreg", "winsock2", "winuser"] } +winapi = { version = "0.3", default-features = false, features = ["cfg", "commapi", "consoleapi", "evntrace", "fileapi", "handleapi", "impl-debug", "impl-default", "in6addr", "inaddr", "ioapiset", "knownfolders", "minwinbase", "minwindef", "namedpipeapi", "ntsecapi", "objbase", "processenv", "processthreadsapi", "shlobj", "std", "synchapi", "sysinfoapi", "timezoneapi", "winbase", "windef", "winerror", "winioctl", "winnt", "winreg", "winsock2", "winuser"] } windows-core = { version = "0.61" } windows-numerics = { version = "0.2" } windows-sys-73dcd821b1037cfd = { package = "windows-sys", version = "0.59", features = ["Wdk_Foundation", "Wdk_Storage_FileSystem", "Win32_Globalization", "Win32_NetworkManagement_IpHelper", "Win32_Networking_WinSock", "Win32_Security_Authentication_Identity", "Win32_Security_Credentials", "Win32_Security_Cryptography", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_Console", "Win32_System_Diagnostics_Debug", "Win32_System_IO", "Win32_System_Ioctl", "Win32_System_Kernel", "Win32_System_LibraryLoader", "Win32_System_Memory", "Win32_System_Performance", "Win32_System_Pipes", "Win32_System_Registry", "Win32_System_SystemInformation", "Win32_System_SystemServices", "Win32_System_Threading", "Win32_System_Time", "Win32_System_WindowsProgramming", "Win32_UI_Input_KeyboardAndMouse", "Win32_UI_Shell", "Win32_UI_WindowsAndMessaging"] } @@ -592,7 +592,7 @@ tokio-rustls = { version = "0.26", default-features = false, features = ["loggin tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } -winapi = { version = "0.3", default-features = false, features = ["cfg", "commapi", "consoleapi", "errhandlingapi", "evntrace", "fileapi", "handleapi", "impl-debug", "impl-default", "in6addr", "inaddr", "ioapiset", "knownfolders", "minwinbase", "minwindef", "namedpipeapi", "ntsecapi", "objbase", "processenv", "processthreadsapi", "shlobj", "std", "synchapi", "sysinfoapi", "timezoneapi", "winbase", "windef", "winerror", "winioctl", "winnt", "winreg", "winsock2", "winuser"] } +winapi = { version = "0.3", default-features = false, features = ["cfg", "commapi", "consoleapi", "evntrace", "fileapi", "handleapi", "impl-debug", "impl-default", "in6addr", "inaddr", "ioapiset", "knownfolders", "minwinbase", "minwindef", "namedpipeapi", "ntsecapi", "objbase", "processenv", "processthreadsapi", "shlobj", "std", "synchapi", "sysinfoapi", "timezoneapi", "winbase", "windef", "winerror", "winioctl", "winnt", "winreg", "winsock2", "winuser"] } windows-core = { version = "0.61" } windows-numerics = { version = "0.2" } windows-sys-73dcd821b1037cfd = { package = "windows-sys", version = "0.59", features = ["Wdk_Foundation", "Wdk_Storage_FileSystem", "Win32_Globalization", "Win32_NetworkManagement_IpHelper", "Win32_Networking_WinSock", "Win32_Security_Authentication_Identity", "Win32_Security_Credentials", "Win32_Security_Cryptography", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_Console", "Win32_System_Diagnostics_Debug", "Win32_System_IO", "Win32_System_Ioctl", "Win32_System_Kernel", "Win32_System_LibraryLoader", "Win32_System_Memory", "Win32_System_Performance", "Win32_System_Pipes", "Win32_System_Registry", "Win32_System_SystemInformation", "Win32_System_SystemServices", "Win32_System_Threading", "Win32_System_Time", "Win32_System_WindowsProgramming", "Win32_UI_Input_KeyboardAndMouse", "Win32_UI_Shell", "Win32_UI_WindowsAndMessaging"] } From b69ebbd7b797117ffcd36d45a856cf4c1705d197 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Sun, 31 Aug 2025 13:19:12 +0300 Subject: [PATCH 38/54] Bump pnpm dependencies (#37258) Takes care of https://github.com/zed-industries/zed/security/dependabot/64 Release Notes: - N/A --- script/danger/pnpm-lock.yaml | 64 ++-- script/issue_response/package.json | 10 +- script/issue_response/pnpm-lock.yaml | 418 +++++++++++++++------------ 3 files changed, 264 insertions(+), 228 deletions(-) diff --git a/script/danger/pnpm-lock.yaml b/script/danger/pnpm-lock.yaml index f2739779e2bb1ea71b2cf14cf8e0940458745330..fd6b3f66acb627d57520e4ca928cc8ce2793b4b9 100644 --- a/script/danger/pnpm-lock.yaml +++ b/script/danger/pnpm-lock.yaml @@ -33,8 +33,8 @@ packages: resolution: {integrity: sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==} engines: {node: '>= 18'} - '@octokit/core@5.2.1': - resolution: {integrity: sha512-dKYCMuPO1bmrpuogcjQ8z7ICCH3FP6WmxpwC03yjzGfZhj9fTJg6+bS1+UAplekbN2C+M61UNllGOOoAfGCrdQ==} + '@octokit/core@5.2.2': + resolution: {integrity: sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg==} engines: {node: '>= 18'} '@octokit/endpoint@9.0.6': @@ -131,8 +131,8 @@ packages: commander@2.20.3: resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==} - core-js@3.41.0: - resolution: {integrity: sha512-SJ4/EHwS36QMJd6h/Rg+GyR4A5xE0FSI3eZ+iBVpfqf1x0eTSg1smWLHrA+2jQThZSh97fmSgFSU8B61nxosxA==} + core-js@3.45.1: + resolution: {integrity: sha512-L4NPsJlCfZsPeXukyzHFlg/i7IIVwHSItR0wg0FLNqYClJ4MQYTYLbC7EkjKYRLZF2iof2MUgN0EGy7MdQFChg==} danger-plugin-pr-hygiene@0.6.1: resolution: {integrity: sha512-nb+iUQvirE3BlKXI1WoOND6sujyGzHar590mJm5tt4RLi65HXFaU5hqONxgDoWFujJNHYnXse9yaZdxnxEi4QA==} @@ -142,8 +142,8 @@ packages: engines: {node: '>=18'} hasBin: true - debug@4.4.0: - resolution: {integrity: sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==} + debug@4.4.1: + resolution: {integrity: sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==} engines: {node: '>=6.0'} peerDependencies: supports-color: '*' @@ -252,8 +252,8 @@ packages: resolution: {integrity: sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==} engines: {node: '>=12', npm: '>=6'} - jwa@1.4.1: - resolution: {integrity: sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==} + jwa@1.4.2: + resolution: {integrity: sha512-eeH5JO+21J78qMvTIDdBXidBd6nG2kZjg5Ohz/1fpa28Z4CcsWUzJ1ZZyFq/3z3N17aZy+ZuBoHljASbL1WfOw==} jws@3.2.2: resolution: {integrity: sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==} @@ -385,8 +385,8 @@ packages: safe-buffer@5.2.1: resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} - semver@7.7.1: - resolution: {integrity: sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==} + semver@7.7.2: + resolution: {integrity: sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==} engines: {node: '>=10'} hasBin: true @@ -460,7 +460,7 @@ snapshots: '@octokit/auth-token@4.0.0': {} - '@octokit/core@5.2.1': + '@octokit/core@5.2.2': dependencies: '@octokit/auth-token': 4.0.0 '@octokit/graphql': 7.1.1 @@ -483,18 +483,18 @@ snapshots: '@octokit/openapi-types@24.2.0': {} - '@octokit/plugin-paginate-rest@11.4.4-cjs.2(@octokit/core@5.2.1)': + '@octokit/plugin-paginate-rest@11.4.4-cjs.2(@octokit/core@5.2.2)': dependencies: - '@octokit/core': 5.2.1 + '@octokit/core': 5.2.2 '@octokit/types': 13.10.0 - '@octokit/plugin-request-log@4.0.1(@octokit/core@5.2.1)': + '@octokit/plugin-request-log@4.0.1(@octokit/core@5.2.2)': dependencies: - '@octokit/core': 5.2.1 + '@octokit/core': 5.2.2 - '@octokit/plugin-rest-endpoint-methods@13.3.2-cjs.1(@octokit/core@5.2.1)': + '@octokit/plugin-rest-endpoint-methods@13.3.2-cjs.1(@octokit/core@5.2.2)': dependencies: - '@octokit/core': 5.2.1 + '@octokit/core': 5.2.2 '@octokit/types': 13.10.0 '@octokit/request-error@5.1.1': @@ -512,10 +512,10 @@ snapshots: '@octokit/rest@20.1.2': dependencies: - '@octokit/core': 5.2.1 - '@octokit/plugin-paginate-rest': 11.4.4-cjs.2(@octokit/core@5.2.1) - '@octokit/plugin-request-log': 4.0.1(@octokit/core@5.2.1) - '@octokit/plugin-rest-endpoint-methods': 13.3.2-cjs.1(@octokit/core@5.2.1) + '@octokit/core': 5.2.2 + '@octokit/plugin-paginate-rest': 11.4.4-cjs.2(@octokit/core@5.2.2) + '@octokit/plugin-request-log': 4.0.1(@octokit/core@5.2.2) + '@octokit/plugin-rest-endpoint-methods': 13.3.2-cjs.1(@octokit/core@5.2.2) '@octokit/types@13.10.0': dependencies: @@ -525,7 +525,7 @@ snapshots: agent-base@6.0.2: dependencies: - debug: 4.4.0 + debug: 4.4.1 transitivePeerDependencies: - supports-color @@ -571,7 +571,7 @@ snapshots: commander@2.20.3: {} - core-js@3.41.0: {} + core-js@3.45.1: {} danger-plugin-pr-hygiene@0.6.1: {} @@ -582,8 +582,8 @@ snapshots: async-retry: 1.2.3 chalk: 2.4.2 commander: 2.20.3 - core-js: 3.41.0 - debug: 4.4.0 + core-js: 3.45.1 + debug: 4.4.1 fast-json-patch: 3.1.1 get-stdin: 6.0.0 http-proxy-agent: 5.0.0 @@ -618,7 +618,7 @@ snapshots: - encoding - supports-color - debug@4.4.0: + debug@4.4.1: dependencies: ms: 2.1.3 @@ -688,14 +688,14 @@ snapshots: dependencies: '@tootallnate/once': 2.0.0 agent-base: 6.0.2 - debug: 4.4.0 + debug: 4.4.1 transitivePeerDependencies: - supports-color https-proxy-agent@5.0.1: dependencies: agent-base: 6.0.2 - debug: 4.4.0 + debug: 4.4.1 transitivePeerDependencies: - supports-color @@ -720,9 +720,9 @@ snapshots: lodash.isstring: 4.0.1 lodash.once: 4.1.1 ms: 2.1.3 - semver: 7.7.1 + semver: 7.7.2 - jwa@1.4.1: + jwa@1.4.2: dependencies: buffer-equal-constant-time: 1.0.1 ecdsa-sig-formatter: 1.0.11 @@ -730,7 +730,7 @@ snapshots: jws@3.2.2: dependencies: - jwa: 1.4.1 + jwa: 1.4.2 safe-buffer: 5.2.1 lodash.find@4.6.0: {} @@ -823,7 +823,7 @@ snapshots: safe-buffer@5.2.1: {} - semver@7.7.1: {} + semver@7.7.2: {} side-channel-list@1.0.0: dependencies: diff --git a/script/issue_response/package.json b/script/issue_response/package.json index 0f3715ef27eda3ff7a3e35b84e42adc7fbbf5e16..70696bc4b808868143ca2ffd94f782d24da7d05b 100644 --- a/script/issue_response/package.json +++ b/script/issue_response/package.json @@ -9,14 +9,14 @@ "start": "node main.js" }, "dependencies": { - "@octokit/rest": "^21.1.0", - "@slack/webhook": "^7.0.4", + "@octokit/rest": "^21.1.1", + "@slack/webhook": "^7.0.6", "date-fns": "^4.1.0", - "octokit": "^4.1.1" + "octokit": "^4.1.4" }, "devDependencies": { - "@octokit/types": "^13.8.0", - "@slack/types": "^2.14.0", + "@octokit/types": "^13.10.0", + "@slack/types": "^2.16.0", "@tsconfig/node20": "20.1.5", "@tsconfig/strictest": "2.0.5", "typescript": "5.7.3" diff --git a/script/issue_response/pnpm-lock.yaml b/script/issue_response/pnpm-lock.yaml index 7286c36467cc0e02441925337c8b849102dd6a83..a42e2460758b4c28b68c24065916140edf2c8404 100644 --- a/script/issue_response/pnpm-lock.yaml +++ b/script/issue_response/pnpm-lock.yaml @@ -9,24 +9,24 @@ importers: .: dependencies: '@octokit/rest': - specifier: ^21.1.0 + specifier: ^21.1.1 version: 21.1.1 '@slack/webhook': - specifier: ^7.0.4 - version: 7.0.5 + specifier: ^7.0.6 + version: 7.0.6 date-fns: specifier: ^4.1.0 version: 4.1.0 octokit: - specifier: ^4.1.1 - version: 4.1.2 + specifier: ^4.1.4 + version: 4.1.4 devDependencies: '@octokit/types': - specifier: ^13.8.0 - version: 13.8.0 + specifier: ^13.10.0 + version: 13.10.0 '@slack/types': - specifier: ^2.14.0 - version: 2.14.0 + specifier: ^2.16.0 + version: 2.16.0 '@tsconfig/node20': specifier: 20.1.5 version: 20.1.5 @@ -39,44 +39,44 @@ importers: packages: - '@octokit/app@15.1.4': - resolution: {integrity: sha512-PM1MqlPAnItjQIKWRmSoJu02+m7Eif4Am3w5C+Ctkw0//QETWMbW2ejBZhcw3aS7wRcFSbS+lH3NoYm614aZVQ==} + '@octokit/app@15.1.6': + resolution: {integrity: sha512-WELCamoCJo9SN0lf3SWZccf68CF0sBNPQuLYmZ/n87p5qvBJDe9aBtr5dHkh7T9nxWZ608pizwsUbypSzZAiUw==} engines: {node: '>= 18'} - '@octokit/auth-app@7.1.5': - resolution: {integrity: sha512-boklS4E6LpbA3nRx+SU2fRKRGZJdOGoSZne/i3Y0B5rfHOcGwFgcXrwDLdtbv4igfDSnAkZaoNBv1GYjPDKRNw==} + '@octokit/auth-app@7.2.2': + resolution: {integrity: sha512-p6hJtEyQDCJEPN9ijjhEC/kpFHMHN4Gca9r+8S0S8EJi7NaWftaEmexjxxpT1DFBeJpN4u/5RE22ArnyypupJw==} engines: {node: '>= 18'} - '@octokit/auth-oauth-app@8.1.3': - resolution: {integrity: sha512-4e6OjVe5rZ8yBe8w7byBjpKtSXFuro7gqeGAAZc7QYltOF8wB93rJl2FE0a4U1Mt88xxPv/mS+25/0DuLk0Ewg==} + '@octokit/auth-oauth-app@8.1.4': + resolution: {integrity: sha512-71iBa5SflSXcclk/OL3lJzdt4iFs56OJdpBGEBl1wULp7C58uiswZLV6TdRaiAzHP1LT8ezpbHlKuxADb+4NkQ==} engines: {node: '>= 18'} - '@octokit/auth-oauth-device@7.1.3': - resolution: {integrity: sha512-BECO/N4B/Uikj0w3GCvjf/odMujtYTP3q82BJSjxC2J3rxTEiZIJ+z2xnRlDb0IE9dQSaTgRqUPVOieSbFcVzg==} + '@octokit/auth-oauth-device@7.1.5': + resolution: {integrity: sha512-lR00+k7+N6xeECj0JuXeULQ2TSBB/zjTAmNF2+vyGPDEFx1dgk1hTDmL13MjbSmzusuAmuJD8Pu39rjp9jH6yw==} engines: {node: '>= 18'} - '@octokit/auth-oauth-user@5.1.3': - resolution: {integrity: sha512-zNPByPn9K7TC+OOHKGxU+MxrE9SZAN11UHYEFLsK2NRn3akJN2LHRl85q+Eypr3tuB2GrKx3rfj2phJdkYCvzw==} + '@octokit/auth-oauth-user@5.1.6': + resolution: {integrity: sha512-/R8vgeoulp7rJs+wfJ2LtXEVC7pjQTIqDab7wPKwVG6+2v/lUnCOub6vaHmysQBbb45FknM3tbHW8TOVqYHxCw==} engines: {node: '>= 18'} '@octokit/auth-token@5.1.2': resolution: {integrity: sha512-JcQDsBdg49Yky2w2ld20IHAlwr8d/d8N6NiOXbtuoPCqzbsiJgF633mVUw3x4mo0H5ypataQIX7SFu3yy44Mpw==} engines: {node: '>= 18'} - '@octokit/auth-unauthenticated@6.1.2': - resolution: {integrity: sha512-07DlUGcz/AAVdzu3EYfi/dOyMSHp9YsOxPl/MPmtlVXWiD//GlV8HgZsPhud94DEyx+RfrW0wSl46Lx+AWbOlg==} + '@octokit/auth-unauthenticated@6.1.3': + resolution: {integrity: sha512-d5gWJla3WdSl1yjbfMpET+hUSFCE15qM0KVSB0H1shyuJihf/RL1KqWoZMIaonHvlNojkL9XtLFp8QeLe+1iwA==} engines: {node: '>= 18'} - '@octokit/core@6.1.4': - resolution: {integrity: sha512-lAS9k7d6I0MPN+gb9bKDt7X8SdxknYqAMh44S5L+lNqIN2NuV8nvv3g8rPp7MuRxcOpxpUIATWprO0C34a8Qmg==} + '@octokit/core@6.1.6': + resolution: {integrity: sha512-kIU8SLQkYWGp3pVKiYzA5OSaNF5EE03P/R8zEmmrG6XwOg5oBjXyQVVIauQ0dgau4zYhpZEhJrvIYt6oM+zZZA==} engines: {node: '>= 18'} - '@octokit/endpoint@10.1.3': - resolution: {integrity: sha512-nBRBMpKPhQUxCsQQeW+rCJ/OPSMcj3g0nfHn01zGYZXuNDvvXudF/TYY6APj5THlurerpFN4a/dQAIAaM6BYhA==} + '@octokit/endpoint@10.1.4': + resolution: {integrity: sha512-OlYOlZIsfEVZm5HCSR8aSg02T2lbUWOsCQoPKfTXJwDzcHQBrVBGdGXb89dv2Kw2ToZaRtudp8O3ZIYoaOjKlA==} engines: {node: '>= 18'} - '@octokit/graphql@8.2.1': - resolution: {integrity: sha512-n57hXtOoHrhwTWdvhVkdJHdhTv0JstjDbDRhJfwIRNfFqmSo1DaK/mD2syoNUoLCyqSjBpGAKOG0BuwF392slw==} + '@octokit/graphql@8.2.2': + resolution: {integrity: sha512-Yi8hcoqsrXGdt0yObxbebHXFOiUA+2v3n53epuOg1QUgOB6c4XzvisBNVXJSl8RYA5KrDuSL2yq9Qmqe5N0ryA==} engines: {node: '>= 18'} '@octokit/oauth-app@7.1.6': @@ -87,15 +87,18 @@ packages: resolution: {integrity: sha512-ooXV8GBSabSWyhLUowlMIVd9l1s2nsOGQdlP2SQ4LnkEsGXzeCvbSbCPdZThXhEFzleGPwbapT0Sb+YhXRyjCA==} engines: {node: '>= 18'} - '@octokit/oauth-methods@5.1.4': - resolution: {integrity: sha512-Jc/ycnePClOvO1WL7tlC+TRxOFtyJBGuTDsL4dzXNiVZvzZdrPuNw7zHI3qJSUX2n6RLXE5L0SkFmYyNaVUFoQ==} + '@octokit/oauth-methods@5.1.5': + resolution: {integrity: sha512-Ev7K8bkYrYLhoOSZGVAGsLEscZQyq7XQONCBBAl2JdMg7IT3PQn/y8P0KjloPoYpI5UylqYrLeUcScaYWXwDvw==} engines: {node: '>= 18'} - '@octokit/openapi-types@23.0.1': - resolution: {integrity: sha512-izFjMJ1sir0jn0ldEKhZ7xegCTj/ObmEDlEfpFrx4k/JyZSMRHbO3/rBwgE7f3m2DHt+RrNGIVw4wSmwnm3t/g==} + '@octokit/openapi-types@24.2.0': + resolution: {integrity: sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==} - '@octokit/openapi-webhooks-types@9.1.0': - resolution: {integrity: sha512-bO1D2jLdU8qEvqmbWjNxJzDYSFT4wesiYKIKP6f4LaM0XUGtn/0LBv/20hu9YqcnpdX38X5o/xANTMtIAqdwYw==} + '@octokit/openapi-types@25.1.0': + resolution: {integrity: sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==} + + '@octokit/openapi-webhooks-types@11.0.0': + resolution: {integrity: sha512-ZBzCFj98v3SuRM7oBas6BHZMJRadlnDoeFfvm1olVxZnYeU6Vh97FhPxyS5aLh5pN51GYv2I51l/hVUAVkGBlA==} '@octokit/plugin-paginate-graphql@5.2.4': resolution: {integrity: sha512-pLZES1jWaOynXKHOqdnwZ5ULeVR6tVVCMm+AUbp0htdcyXDU95WbkYdU4R2ej1wKj5Tu94Mee2Ne0PjPO9cCyA==} @@ -103,8 +106,14 @@ packages: peerDependencies: '@octokit/core': '>=6' - '@octokit/plugin-paginate-rest@11.4.2': - resolution: {integrity: sha512-BXJ7XPCTDXFF+wxcg/zscfgw2O/iDPtNSkwwR1W1W5c4Mb3zav/M2XvxQ23nVmKj7jpweB4g8viMeCQdm7LMVA==} + '@octokit/plugin-paginate-rest@11.6.0': + resolution: {integrity: sha512-n5KPteiF7pWKgBIBJSk8qzoZWcUkza2O6A0za97pMGVrGfPdltxrfmfF5GucHYvHGZD8BdaZmmHGz5cX/3gdpw==} + engines: {node: '>= 18'} + peerDependencies: + '@octokit/core': '>=6' + + '@octokit/plugin-paginate-rest@12.0.0': + resolution: {integrity: sha512-MPd6WK1VtZ52lFrgZ0R2FlaoiWllzgqFHaSZxvp72NmoDeZ0m8GeJdg4oB6ctqMTYyrnDYp592Xma21mrgiyDA==} engines: {node: '>= 18'} peerDependencies: '@octokit/core': '>=6' @@ -115,53 +124,62 @@ packages: peerDependencies: '@octokit/core': '>=6' - '@octokit/plugin-rest-endpoint-methods@13.3.1': - resolution: {integrity: sha512-o8uOBdsyR+WR8MK9Cco8dCgvG13H1RlM1nWnK/W7TEACQBFux/vPREgKucxUfuDQ5yi1T3hGf4C5ZmZXAERgwQ==} + '@octokit/plugin-rest-endpoint-methods@13.5.0': + resolution: {integrity: sha512-9Pas60Iv9ejO3WlAX3maE1+38c5nqbJXV5GrncEfkndIpZrJ/WPMRd2xYDcPPEt5yzpxcjw9fWNoPhsSGzqKqw==} + engines: {node: '>= 18'} + peerDependencies: + '@octokit/core': '>=6' + + '@octokit/plugin-rest-endpoint-methods@14.0.0': + resolution: {integrity: sha512-iQt6ovem4b7zZYZQtdv+PwgbL5VPq37th1m2x2TdkgimIDJpsi2A6Q/OI/23i/hR6z5mL0EgisNR4dcbmckSZQ==} engines: {node: '>= 18'} peerDependencies: '@octokit/core': '>=6' - '@octokit/plugin-retry@7.1.4': - resolution: {integrity: sha512-7AIP4p9TttKN7ctygG4BtR7rrB0anZqoU9ThXFk8nETqIfvgPUANTSYHqWYknK7W3isw59LpZeLI8pcEwiJdRg==} + '@octokit/plugin-retry@7.2.1': + resolution: {integrity: sha512-wUc3gv0D6vNHpGxSaR3FlqJpTXGWgqmk607N9L3LvPL4QjaxDgX/1nY2mGpT37Khn+nlIXdljczkRnNdTTV3/A==} engines: {node: '>= 18'} peerDependencies: '@octokit/core': '>=6' - '@octokit/plugin-throttling@9.4.0': - resolution: {integrity: sha512-IOlXxXhZA4Z3m0EEYtrrACkuHiArHLZ3CvqWwOez/pURNqRuwfoFlTPbN5Muf28pzFuztxPyiUiNwz8KctdZaQ==} + '@octokit/plugin-throttling@10.0.0': + resolution: {integrity: sha512-Kuq5/qs0DVYTHZuBAzCZStCzo2nKvVRo/TDNhCcpC2TKiOGz/DisXMCvjt3/b5kr6SCI1Y8eeeJTHBxxpFvZEg==} engines: {node: '>= 18'} peerDependencies: '@octokit/core': ^6.1.3 - '@octokit/request-error@6.1.7': - resolution: {integrity: sha512-69NIppAwaauwZv6aOzb+VVLwt+0havz9GT5YplkeJv7fG7a40qpLt/yZKyiDxAhgz0EtgNdNcb96Z0u+Zyuy2g==} + '@octokit/request-error@6.1.8': + resolution: {integrity: sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ==} engines: {node: '>= 18'} - '@octokit/request@9.2.2': - resolution: {integrity: sha512-dZl0ZHx6gOQGcffgm1/Sf6JfEpmh34v3Af2Uci02vzUYz6qEN6zepoRtmybWXIGXFIK8K9ylE3b+duCWqhArtg==} + '@octokit/request@9.2.4': + resolution: {integrity: sha512-q8ybdytBmxa6KogWlNa818r0k1wlqzNC+yNkcQDECHvQo8Vmstrg18JwqJHdJdUiHD2sjlwBgSm9kHkOKe2iyA==} engines: {node: '>= 18'} '@octokit/rest@21.1.1': resolution: {integrity: sha512-sTQV7va0IUVZcntzy1q3QqPm/r8rWtDCqpRAmb8eXXnKkjoQEtFe3Nt5GTVsHft+R6jJoHeSiVLcgcvhtue/rg==} engines: {node: '>= 18'} - '@octokit/types@13.8.0': - resolution: {integrity: sha512-x7DjTIbEpEWXK99DMd01QfWy0hd5h4EN+Q7shkdKds3otGQP+oWE/y0A76i1OvH9fygo4ddvNf7ZvF0t78P98A==} + '@octokit/types@13.10.0': + resolution: {integrity: sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==} + + '@octokit/types@14.1.0': + resolution: {integrity: sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==} '@octokit/webhooks-methods@5.1.1': resolution: {integrity: sha512-NGlEHZDseJTCj8TMMFehzwa9g7On4KJMPVHDSrHxCQumL6uSQR8wIkP/qesv52fXqV1BPf4pTxwtS31ldAt9Xg==} engines: {node: '>= 18'} - '@octokit/webhooks@13.6.1': - resolution: {integrity: sha512-vk0jnc5k0/mLMUI4IA9LfSYkLs3OHtfa7B3h4aRG6to912V3wIG8lS/wKwatwYxRkAug4oE8is0ERRI8pzoYTw==} + '@octokit/webhooks@13.9.1': + resolution: {integrity: sha512-Nss2b4Jyn4wB3EAqAPJypGuCJFalz/ZujKBQQ5934To7Xw9xjf4hkr/EAByxQY7hp7MKd790bWGz7XYSTsHmaw==} engines: {node: '>= 18'} - '@slack/types@2.14.0': - resolution: {integrity: sha512-n0EGm7ENQRxlXbgKSrQZL69grzg1gHLAVd+GlRVQJ1NSORo0FrApR7wql/gaKdu2n4TO83Sq/AmeUOqD60aXUA==} + '@slack/types@2.16.0': + resolution: {integrity: sha512-bICnyukvdklXhwxprR3uF1+ZFkTvWTZge4evlCS4G1H1HU6QLY68AcjqzQRymf7/5gNt6Y4OBb4NdviheyZcAg==} engines: {node: '>= 12.13.0', npm: '>= 6.12.0'} - '@slack/webhook@7.0.5': - resolution: {integrity: sha512-PmbZx89+SmH4zt78FUwe4If8hWX2MAIRmGXjmlF0A8PwyJb/H7CWaQYV6DDlZn1+7Zs6CEytKH0ejEE/idVSDw==} + '@slack/webhook@7.0.6': + resolution: {integrity: sha512-RvNCcOjNbzl5uQ2TZsbTJ+A+5ptoWMwnyd/W4lKzeXFToIwebeaZiuntcP0usmhZHj1LH9H1T9WN6Bt1B/DLyg==} engines: {node: '>= 18', npm: '>= 8.6.0'} '@tsconfig/node20@20.1.5': @@ -170,17 +188,17 @@ packages: '@tsconfig/strictest@2.0.5': resolution: {integrity: sha512-ec4tjL2Rr0pkZ5hww65c+EEPYwxOi4Ryv+0MtjeaSQRJyq322Q27eOQiFbuNgw2hpL4hB1/W/HBGk3VKS43osg==} - '@types/aws-lambda@8.10.147': - resolution: {integrity: sha512-nD0Z9fNIZcxYX5Mai2CTmFD7wX7UldCkW2ezCF8D1T5hdiLsnTWDGRpfRYntU6VjTdLQjOvyszru7I1c1oCQew==} + '@types/aws-lambda@8.10.152': + resolution: {integrity: sha512-soT/c2gYBnT5ygwiHPmd9a1bftj462NWVk2tKCc1PYHSIacB2UwbTS2zYG4jzag1mRDuzg/OjtxQjQ2NKRB6Rw==} - '@types/node@22.13.13': - resolution: {integrity: sha512-ClsL5nMwKaBRwPcCvH8E7+nU4GxHVx1axNvMZTFHMEfNI7oahimt26P5zjVCRrjiIWj6YFXfE1v3dEp94wLcGQ==} + '@types/node@24.3.0': + resolution: {integrity: sha512-aPTXCrfwnDLj4VvXrm+UUCQjNEvJgNA8s5F1cvwQU+3KNltTOkBm1j30uNLyqqPNe7gE3KFzImYoZEfLhp4Yow==} asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} - axios@1.8.4: - resolution: {integrity: sha512-eBSYY4Y68NNlHbHBMdeDmKNtDgXWhQsJcGqzO3iLUM0GraQFSS9cVgPX5I9b3lbdFKyYoAEGAZF1DwhTaljNAw==} + axios@1.11.0: + resolution: {integrity: sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==} before-after-hook@3.0.2: resolution: {integrity: sha512-Nik3Sc0ncrMK4UUdXQmAnRtzmNQTAAXmXIopizwZ1W1t8QmfJj+zL4OA2I7XPTPW5z5TDqv4hRo/JzouDJnX3A==} @@ -226,8 +244,8 @@ packages: fast-content-type-parse@2.0.1: resolution: {integrity: sha512-nGqtvLrj5w0naR6tDPfB4cUmYCqouzyQiz6C5y/LtcDllJdrcc6WaWW6iXyIIOErTa/XRybj28aasdn4LkVk6Q==} - follow-redirects@1.15.9: - resolution: {integrity: sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==} + follow-redirects@1.15.11: + resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==} engines: {node: '>=4.0'} peerDependencies: debug: '*' @@ -235,8 +253,8 @@ packages: debug: optional: true - form-data@4.0.2: - resolution: {integrity: sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==} + form-data@4.0.4: + resolution: {integrity: sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==} engines: {node: '>= 6'} function-bind@1.1.2: @@ -278,8 +296,8 @@ packages: resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} engines: {node: '>= 0.6'} - octokit@4.1.2: - resolution: {integrity: sha512-0kcTxJOK3yQrJsRb8wKa28hlTze4QOz4sLuUnfXXnhboDhFKgv8LxS86tFwbsafDW9JZ08ByuVAE8kQbYJIZkA==} + octokit@4.1.4: + resolution: {integrity: sha512-cRvxRte6FU3vAHRC9+PMSY3D+mRAs2Rd9emMoqp70UGRvJRM3sbAoim2IXRZNNsf8wVfn4sGxVBHRAP+JBVX/g==} engines: {node: '>= 18'} proxy-from-env@1.1.0: @@ -294,182 +312,198 @@ packages: engines: {node: '>=14.17'} hasBin: true - undici-types@6.20.0: - resolution: {integrity: sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==} + undici-types@7.10.0: + resolution: {integrity: sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==} - universal-github-app-jwt@2.2.0: - resolution: {integrity: sha512-G5o6f95b5BggDGuUfKDApKaCgNYy2x7OdHY0zSMF081O0EJobw+1130VONhrA7ezGSV2FNOGyM+KQpQZAr9bIQ==} + universal-github-app-jwt@2.2.2: + resolution: {integrity: sha512-dcmbeSrOdTnsjGjUfAlqNDJrhxXizjAz94ija9Qw8YkZ1uu0d+GoZzyH+Jb9tIIqvGsadUfwg+22k5aDqqwzbw==} - universal-user-agent@7.0.2: - resolution: {integrity: sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q==} + universal-user-agent@7.0.3: + resolution: {integrity: sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A==} snapshots: - '@octokit/app@15.1.4': + '@octokit/app@15.1.6': dependencies: - '@octokit/auth-app': 7.1.5 - '@octokit/auth-unauthenticated': 6.1.2 - '@octokit/core': 6.1.4 + '@octokit/auth-app': 7.2.2 + '@octokit/auth-unauthenticated': 6.1.3 + '@octokit/core': 6.1.6 '@octokit/oauth-app': 7.1.6 - '@octokit/plugin-paginate-rest': 11.4.2(@octokit/core@6.1.4) - '@octokit/types': 13.8.0 - '@octokit/webhooks': 13.6.1 + '@octokit/plugin-paginate-rest': 12.0.0(@octokit/core@6.1.6) + '@octokit/types': 14.1.0 + '@octokit/webhooks': 13.9.1 - '@octokit/auth-app@7.1.5': + '@octokit/auth-app@7.2.2': dependencies: - '@octokit/auth-oauth-app': 8.1.3 - '@octokit/auth-oauth-user': 5.1.3 - '@octokit/request': 9.2.2 - '@octokit/request-error': 6.1.7 - '@octokit/types': 13.8.0 + '@octokit/auth-oauth-app': 8.1.4 + '@octokit/auth-oauth-user': 5.1.6 + '@octokit/request': 9.2.4 + '@octokit/request-error': 6.1.8 + '@octokit/types': 14.1.0 toad-cache: 3.7.0 - universal-github-app-jwt: 2.2.0 - universal-user-agent: 7.0.2 + universal-github-app-jwt: 2.2.2 + universal-user-agent: 7.0.3 - '@octokit/auth-oauth-app@8.1.3': + '@octokit/auth-oauth-app@8.1.4': dependencies: - '@octokit/auth-oauth-device': 7.1.3 - '@octokit/auth-oauth-user': 5.1.3 - '@octokit/request': 9.2.2 - '@octokit/types': 13.8.0 - universal-user-agent: 7.0.2 + '@octokit/auth-oauth-device': 7.1.5 + '@octokit/auth-oauth-user': 5.1.6 + '@octokit/request': 9.2.4 + '@octokit/types': 14.1.0 + universal-user-agent: 7.0.3 - '@octokit/auth-oauth-device@7.1.3': + '@octokit/auth-oauth-device@7.1.5': dependencies: - '@octokit/oauth-methods': 5.1.4 - '@octokit/request': 9.2.2 - '@octokit/types': 13.8.0 - universal-user-agent: 7.0.2 + '@octokit/oauth-methods': 5.1.5 + '@octokit/request': 9.2.4 + '@octokit/types': 14.1.0 + universal-user-agent: 7.0.3 - '@octokit/auth-oauth-user@5.1.3': + '@octokit/auth-oauth-user@5.1.6': dependencies: - '@octokit/auth-oauth-device': 7.1.3 - '@octokit/oauth-methods': 5.1.4 - '@octokit/request': 9.2.2 - '@octokit/types': 13.8.0 - universal-user-agent: 7.0.2 + '@octokit/auth-oauth-device': 7.1.5 + '@octokit/oauth-methods': 5.1.5 + '@octokit/request': 9.2.4 + '@octokit/types': 14.1.0 + universal-user-agent: 7.0.3 '@octokit/auth-token@5.1.2': {} - '@octokit/auth-unauthenticated@6.1.2': + '@octokit/auth-unauthenticated@6.1.3': dependencies: - '@octokit/request-error': 6.1.7 - '@octokit/types': 13.8.0 + '@octokit/request-error': 6.1.8 + '@octokit/types': 14.1.0 - '@octokit/core@6.1.4': + '@octokit/core@6.1.6': dependencies: '@octokit/auth-token': 5.1.2 - '@octokit/graphql': 8.2.1 - '@octokit/request': 9.2.2 - '@octokit/request-error': 6.1.7 - '@octokit/types': 13.8.0 + '@octokit/graphql': 8.2.2 + '@octokit/request': 9.2.4 + '@octokit/request-error': 6.1.8 + '@octokit/types': 14.1.0 before-after-hook: 3.0.2 - universal-user-agent: 7.0.2 + universal-user-agent: 7.0.3 - '@octokit/endpoint@10.1.3': + '@octokit/endpoint@10.1.4': dependencies: - '@octokit/types': 13.8.0 - universal-user-agent: 7.0.2 + '@octokit/types': 14.1.0 + universal-user-agent: 7.0.3 - '@octokit/graphql@8.2.1': + '@octokit/graphql@8.2.2': dependencies: - '@octokit/request': 9.2.2 - '@octokit/types': 13.8.0 - universal-user-agent: 7.0.2 + '@octokit/request': 9.2.4 + '@octokit/types': 14.1.0 + universal-user-agent: 7.0.3 '@octokit/oauth-app@7.1.6': dependencies: - '@octokit/auth-oauth-app': 8.1.3 - '@octokit/auth-oauth-user': 5.1.3 - '@octokit/auth-unauthenticated': 6.1.2 - '@octokit/core': 6.1.4 + '@octokit/auth-oauth-app': 8.1.4 + '@octokit/auth-oauth-user': 5.1.6 + '@octokit/auth-unauthenticated': 6.1.3 + '@octokit/core': 6.1.6 '@octokit/oauth-authorization-url': 7.1.1 - '@octokit/oauth-methods': 5.1.4 - '@types/aws-lambda': 8.10.147 - universal-user-agent: 7.0.2 + '@octokit/oauth-methods': 5.1.5 + '@types/aws-lambda': 8.10.152 + universal-user-agent: 7.0.3 '@octokit/oauth-authorization-url@7.1.1': {} - '@octokit/oauth-methods@5.1.4': + '@octokit/oauth-methods@5.1.5': dependencies: '@octokit/oauth-authorization-url': 7.1.1 - '@octokit/request': 9.2.2 - '@octokit/request-error': 6.1.7 - '@octokit/types': 13.8.0 + '@octokit/request': 9.2.4 + '@octokit/request-error': 6.1.8 + '@octokit/types': 14.1.0 + + '@octokit/openapi-types@24.2.0': {} - '@octokit/openapi-types@23.0.1': {} + '@octokit/openapi-types@25.1.0': {} - '@octokit/openapi-webhooks-types@9.1.0': {} + '@octokit/openapi-webhooks-types@11.0.0': {} - '@octokit/plugin-paginate-graphql@5.2.4(@octokit/core@6.1.4)': + '@octokit/plugin-paginate-graphql@5.2.4(@octokit/core@6.1.6)': dependencies: - '@octokit/core': 6.1.4 + '@octokit/core': 6.1.6 - '@octokit/plugin-paginate-rest@11.4.2(@octokit/core@6.1.4)': + '@octokit/plugin-paginate-rest@11.6.0(@octokit/core@6.1.6)': dependencies: - '@octokit/core': 6.1.4 - '@octokit/types': 13.8.0 + '@octokit/core': 6.1.6 + '@octokit/types': 13.10.0 - '@octokit/plugin-request-log@5.3.1(@octokit/core@6.1.4)': + '@octokit/plugin-paginate-rest@12.0.0(@octokit/core@6.1.6)': dependencies: - '@octokit/core': 6.1.4 + '@octokit/core': 6.1.6 + '@octokit/types': 14.1.0 - '@octokit/plugin-rest-endpoint-methods@13.3.1(@octokit/core@6.1.4)': + '@octokit/plugin-request-log@5.3.1(@octokit/core@6.1.6)': dependencies: - '@octokit/core': 6.1.4 - '@octokit/types': 13.8.0 + '@octokit/core': 6.1.6 - '@octokit/plugin-retry@7.1.4(@octokit/core@6.1.4)': + '@octokit/plugin-rest-endpoint-methods@13.5.0(@octokit/core@6.1.6)': dependencies: - '@octokit/core': 6.1.4 - '@octokit/request-error': 6.1.7 - '@octokit/types': 13.8.0 + '@octokit/core': 6.1.6 + '@octokit/types': 13.10.0 + + '@octokit/plugin-rest-endpoint-methods@14.0.0(@octokit/core@6.1.6)': + dependencies: + '@octokit/core': 6.1.6 + '@octokit/types': 14.1.0 + + '@octokit/plugin-retry@7.2.1(@octokit/core@6.1.6)': + dependencies: + '@octokit/core': 6.1.6 + '@octokit/request-error': 6.1.8 + '@octokit/types': 14.1.0 bottleneck: 2.19.5 - '@octokit/plugin-throttling@9.4.0(@octokit/core@6.1.4)': + '@octokit/plugin-throttling@10.0.0(@octokit/core@6.1.6)': dependencies: - '@octokit/core': 6.1.4 - '@octokit/types': 13.8.0 + '@octokit/core': 6.1.6 + '@octokit/types': 14.1.0 bottleneck: 2.19.5 - '@octokit/request-error@6.1.7': + '@octokit/request-error@6.1.8': dependencies: - '@octokit/types': 13.8.0 + '@octokit/types': 14.1.0 - '@octokit/request@9.2.2': + '@octokit/request@9.2.4': dependencies: - '@octokit/endpoint': 10.1.3 - '@octokit/request-error': 6.1.7 - '@octokit/types': 13.8.0 + '@octokit/endpoint': 10.1.4 + '@octokit/request-error': 6.1.8 + '@octokit/types': 14.1.0 fast-content-type-parse: 2.0.1 - universal-user-agent: 7.0.2 + universal-user-agent: 7.0.3 '@octokit/rest@21.1.1': dependencies: - '@octokit/core': 6.1.4 - '@octokit/plugin-paginate-rest': 11.4.2(@octokit/core@6.1.4) - '@octokit/plugin-request-log': 5.3.1(@octokit/core@6.1.4) - '@octokit/plugin-rest-endpoint-methods': 13.3.1(@octokit/core@6.1.4) + '@octokit/core': 6.1.6 + '@octokit/plugin-paginate-rest': 11.6.0(@octokit/core@6.1.6) + '@octokit/plugin-request-log': 5.3.1(@octokit/core@6.1.6) + '@octokit/plugin-rest-endpoint-methods': 13.5.0(@octokit/core@6.1.6) + + '@octokit/types@13.10.0': + dependencies: + '@octokit/openapi-types': 24.2.0 - '@octokit/types@13.8.0': + '@octokit/types@14.1.0': dependencies: - '@octokit/openapi-types': 23.0.1 + '@octokit/openapi-types': 25.1.0 '@octokit/webhooks-methods@5.1.1': {} - '@octokit/webhooks@13.6.1': + '@octokit/webhooks@13.9.1': dependencies: - '@octokit/openapi-webhooks-types': 9.1.0 - '@octokit/request-error': 6.1.7 + '@octokit/openapi-webhooks-types': 11.0.0 + '@octokit/request-error': 6.1.8 '@octokit/webhooks-methods': 5.1.1 - '@slack/types@2.14.0': {} + '@slack/types@2.16.0': {} - '@slack/webhook@7.0.5': + '@slack/webhook@7.0.6': dependencies: - '@slack/types': 2.14.0 - '@types/node': 22.13.13 - axios: 1.8.4 + '@slack/types': 2.16.0 + '@types/node': 24.3.0 + axios: 1.11.0 transitivePeerDependencies: - debug @@ -477,18 +511,18 @@ snapshots: '@tsconfig/strictest@2.0.5': {} - '@types/aws-lambda@8.10.147': {} + '@types/aws-lambda@8.10.152': {} - '@types/node@22.13.13': + '@types/node@24.3.0': dependencies: - undici-types: 6.20.0 + undici-types: 7.10.0 asynckit@0.4.0: {} - axios@1.8.4: + axios@1.11.0: dependencies: - follow-redirects: 1.15.9 - form-data: 4.0.2 + follow-redirects: 1.15.11 + form-data: 4.0.4 proxy-from-env: 1.1.0 transitivePeerDependencies: - debug @@ -533,13 +567,14 @@ snapshots: fast-content-type-parse@2.0.1: {} - follow-redirects@1.15.9: {} + follow-redirects@1.15.11: {} - form-data@4.0.2: + form-data@4.0.4: dependencies: asynckit: 0.4.0 combined-stream: 1.0.8 es-set-tostringtag: 2.1.0 + hasown: 2.0.2 mime-types: 2.1.35 function-bind@1.1.2: {} @@ -582,18 +617,19 @@ snapshots: dependencies: mime-db: 1.52.0 - octokit@4.1.2: + octokit@4.1.4: dependencies: - '@octokit/app': 15.1.4 - '@octokit/core': 6.1.4 + '@octokit/app': 15.1.6 + '@octokit/core': 6.1.6 '@octokit/oauth-app': 7.1.6 - '@octokit/plugin-paginate-graphql': 5.2.4(@octokit/core@6.1.4) - '@octokit/plugin-paginate-rest': 11.4.2(@octokit/core@6.1.4) - '@octokit/plugin-rest-endpoint-methods': 13.3.1(@octokit/core@6.1.4) - '@octokit/plugin-retry': 7.1.4(@octokit/core@6.1.4) - '@octokit/plugin-throttling': 9.4.0(@octokit/core@6.1.4) - '@octokit/request-error': 6.1.7 - '@octokit/types': 13.8.0 + '@octokit/plugin-paginate-graphql': 5.2.4(@octokit/core@6.1.6) + '@octokit/plugin-paginate-rest': 12.0.0(@octokit/core@6.1.6) + '@octokit/plugin-rest-endpoint-methods': 14.0.0(@octokit/core@6.1.6) + '@octokit/plugin-retry': 7.2.1(@octokit/core@6.1.6) + '@octokit/plugin-throttling': 10.0.0(@octokit/core@6.1.6) + '@octokit/request-error': 6.1.8 + '@octokit/types': 14.1.0 + '@octokit/webhooks': 13.9.1 proxy-from-env@1.1.0: {} @@ -601,8 +637,8 @@ snapshots: typescript@5.7.3: {} - undici-types@6.20.0: {} + undici-types@7.10.0: {} - universal-github-app-jwt@2.2.0: {} + universal-github-app-jwt@2.2.2: {} - universal-user-agent@7.0.2: {} + universal-user-agent@7.0.3: {} From 39d41ed822af014ef28a1499f8313939a641b724 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Sun, 31 Aug 2025 13:29:29 +0300 Subject: [PATCH 39/54] Add another entry to show how to hide the Sign In button from the interface (#37260) Release Notes: - N/A --- docs/src/accounts.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/src/accounts.md b/docs/src/accounts.md index 1ce23cf902dc558de4163621d4ec886d2b719e15..af4c4c172f76ba1d491ddb4031714f60f848c3b6 100644 --- a/docs/src/accounts.md +++ b/docs/src/accounts.md @@ -30,3 +30,8 @@ To sign out of Zed, you can use either of these methods: Your Zed account's email address is the address provided by GitHub OAuth. If you have a public email address then it will be used, otherwise your primary GitHub email address will be used. Changes to your email address on GitHub can be synced to your Zed account by [signing in to zed.dev](https://zed.dev/sign_in). Stripe is used for billing, and will use your Zed account's email address when starting a subscription. Changes to your Zed account email address do not currently update the email address used in Stripe. See [Updating Billing Information](./ai/billing.md#updating-billing-info) for how to change this email address. + +## Hiding Sign In button from the interface + +In case the Sign In feature is not used, it's possible to hide that from the interface by using `show_sign_in` settings property. +Refer to [Visual Customization page](./visual-customization.md) for more details. From babc0c09f0f54c9b5d2df93de4430b0b4cac9e07 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Sun, 31 Aug 2025 20:56:23 +0300 Subject: [PATCH 40/54] Add a "mandatory PR contents" section in the contribution docs (#37259) The LLM part is inspired by (and paraphrased from) https://github.com/ghostty-org/ghostty?tab=contributing-ov-file#ai-assistance-notice Release Notes: - N/A --- CONTRIBUTING.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 91b1b75f8292f37b122c152d71fe1e38eeccf817..dd5bbdc2e1d7f7a98e42fdaba21a6189eb92c638 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -27,6 +27,22 @@ By effectively engaging with the Zed team and community early in your process, w We plan to set aside time each week to pair program with contributors on promising pull requests in Zed. This will be an experiment. We tend to prefer pairing over async code review on our team, and we'd like to see how well it works in an open source setting. If we're finding it difficult to get on the same page with async review, we may ask you to pair with us if you're open to it. The closer a contribution is to the goals outlined in our roadmap, the more likely we'll be to spend time pairing on it. +## Mandatory PR contents + +Please ensure the PR contains + +- Before & after screenshots, if there are visual adjustments introduced. + +Examples of visual adjustments: tree-sitter query updates, UI changes, etc. + +- A disclosure of the AI assistance usage, if any was used. + +Any kind of AI assistance must be disclosed in the PR, along with the extent to which AI assistance was used (e.g. docs only vs. code generation). + +If the PR responses are being generated by an AI, disclose that as well. + +As a small exception, trivial tab-completion doesn't need to be disclosed, as long as it's limited to single keywords or short phrases. + ## Tips to improve the chances of your PR getting reviewed and merged - Discuss your plans ahead of time with the team From e48be30266a836640d74fbda086041863e35cc47 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Sun, 31 Aug 2025 20:39:26 +0200 Subject: [PATCH 41/54] vim: Fix `NormalBefore` with completions shown (#37272) Follow-up to https://github.com/zed-industries/zed/pull/35985 The `!menu` is actually not needed and breaks other keybinds from that context. Release Notes: - N/A --- assets/keymaps/vim.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index bd6eb3982cd9860b2635a3390d47484f1a6dbe55..fd33b888b742bff8ba6a3c1b1ff15b8dbe0c11f8 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -324,7 +324,7 @@ } }, { - "context": "vim_mode == insert && !menu", + "context": "vim_mode == insert", "bindings": { "ctrl-c": "vim::NormalBefore", "ctrl-[": "vim::NormalBefore", From 9c8c3966dfc2089d7ff340f2b5c0842e638b7344 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Sun, 31 Aug 2025 15:57:24 -0400 Subject: [PATCH 42/54] linux: Support ctrl-insert in markdown previews (#37273) Closes: https://github.com/zed-industries/zed/issues/37240 Release Notes: - Added support for copying in Markdown preview using `ctrl-insert` on Linux/Windows --- assets/keymaps/default-linux.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 2610f9b7051cbce74ce6df13d49699c74e870395..a60dc92844b337409e717b56975789073eb964fb 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -170,6 +170,7 @@ "context": "Markdown", "bindings": { "copy": "markdown::Copy", + "ctrl-insert": "markdown::Copy", "ctrl-c": "markdown::Copy" } }, @@ -258,6 +259,7 @@ "context": "AgentPanel > Markdown", "bindings": { "copy": "markdown::CopyAsMarkdown", + "ctrl-insert": "markdown::CopyAsMarkdown", "ctrl-c": "markdown::CopyAsMarkdown" } }, From 5abc398a0a1f486ba16743919a45955e721c9221 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Sun, 31 Aug 2025 23:09:09 +0200 Subject: [PATCH 43/54] nix: Update flake, remove legacy Darwin SDK usage (#37254) `darwin.apple_sdk.frameworks` has been obsoleted and is no longer required to be specified explicitly as per [Nixpkgs Reference Manual](https://nixos.org/manual/nixpkgs/stable/#sec-darwin-legacy-frameworks). @P1n3appl3 not sure what the process for updating Nix is, so lemme know if this is desired/acceptable! Release Notes: - N/A --- flake.lock | 18 +++++++++--------- nix/build.nix | 1 - 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/flake.lock b/flake.lock index 80022f7b555900ad78dca230d37faeb04dd09c7d..d96f0a998ff47958a7b605d61e1bf539929555f5 100644 --- a/flake.lock +++ b/flake.lock @@ -2,11 +2,11 @@ "nodes": { "crane": { "locked": { - "lastModified": 1754269165, - "narHash": "sha256-0tcS8FHd4QjbCVoxN9jI+PjHgA4vc/IjkUSp+N3zy0U=", + "lastModified": 1755993354, + "narHash": "sha256-FCRRAzSaL/+umLIm3RU3O/+fJ2ssaPHseI2SSFL8yZU=", "owner": "ipetkov", "repo": "crane", - "rev": "444e81206df3f7d92780680e45858e31d2f07a08", + "rev": "25bd41b24426c7734278c2ff02e53258851db914", "type": "github" }, "original": { @@ -33,10 +33,10 @@ "nixpkgs": { "locked": { "lastModified": 315532800, - "narHash": "sha256-5VYevX3GccubYeccRGAXvCPA1ktrGmIX1IFC0icX07g=", - "rev": "a683adc19ff5228af548c6539dbc3440509bfed3", + "narHash": "sha256-E8CyvVDZuIsF7puIw+OLkrFmhj3qUV+iwPcNbBhdcxM=", + "rev": "a918bb3594dd243c2f8534b3be01b3cb4ed35fd1", "type": "tarball", - "url": "https://releases.nixos.org/nixpkgs/nixpkgs-25.11pre840248.a683adc19ff5/nixexprs.tar.xz" + "url": "https://releases.nixos.org/nixpkgs/nixpkgs-25.11pre854010.a918bb3594dd/nixexprs.tar.xz" }, "original": { "type": "tarball", @@ -58,11 +58,11 @@ ] }, "locked": { - "lastModified": 1754575663, - "narHash": "sha256-afOx8AG0KYtw7mlt6s6ahBBy7eEHZwws3iCRoiuRQS4=", + "lastModified": 1756607787, + "narHash": "sha256-ciwAdgtlAN1PCaidWK6RuWsTBL8DVuyDCGM+X3ein5Q=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "6db0fb0e9cec2e9729dc52bf4898e6c135bb8a0f", + "rev": "f46d294b87ebb9f7124f1ce13aa2a5f5acc0f3eb", "type": "github" }, "original": { diff --git a/nix/build.nix b/nix/build.nix index 03403cc1c97f2dca0a42d7fb09bc5936d67e7cab..9012a47c1fa1a1874ec4283bb73eae96087d4529 100644 --- a/nix/build.nix +++ b/nix/build.nix @@ -145,7 +145,6 @@ let ] ++ lib.optionals stdenv'.hostPlatform.isDarwin [ apple-sdk_15 - darwin.apple_sdk.frameworks.System (darwinMinVersionHook "10.15") ]; From d74384f6e2a1d0b04f7788883d1b599e7e0b85fa Mon Sep 17 00:00:00 2001 From: tidely <43219534+tidely@users.noreply.github.com> Date: Mon, 1 Sep 2025 00:42:57 +0300 Subject: [PATCH 44/54] anthropic: Remove logging when no credentials are available (#37276) Removes excess log which got through on each start of Zed ``` ERROR [agent_ui::language_model_selector] Failed to authenticate provider: Anthropic: credentials not found ``` The `AnthropicLanguageModelProvider::api_key` method returned a `anyhow::Result` which would convert `AuthenticateError::CredentialsNotFound` into a generic error because of the implicit `Into` when using the `?` operator. This would then get converted into a `AuthenticateError::Other` later. By specifying the error type as `AuthenticateError`, we remove this implicit conversion and the log gets removed. Release Notes: - N/A --- crates/language_models/src/provider/anthropic.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/language_models/src/provider/anthropic.rs b/crates/language_models/src/provider/anthropic.rs index c492edeaf569fe5eeedadd840bd6338c073b48dd..6c003c4c3919a9f553024c6b1b56d03d410d984b 100644 --- a/crates/language_models/src/provider/anthropic.rs +++ b/crates/language_models/src/provider/anthropic.rs @@ -197,7 +197,7 @@ impl AnthropicLanguageModelProvider { }) } - pub fn api_key(cx: &mut App) -> Task> { + pub fn api_key(cx: &mut App) -> Task> { let credentials_provider = ::global(cx); let api_url = AllLanguageModelSettings::get_global(cx) .anthropic From c833f8905bbe63955b34d69c2fb3eca42aa6c17e Mon Sep 17 00:00:00 2001 From: Umesh Yadav <23421535+imumesh18@users.noreply.github.com> Date: Mon, 1 Sep 2025 04:21:17 +0530 Subject: [PATCH 45/54] language_models: Fix `grok-code-fast-1` support for Copilot (#37116) This PR fixes a deserialization issue in GitHub Copilot Chat that was causing warnings when encountering xAI models from the GitHub Copilot API and skipping the Grok model from model selector. Release Notes: - Fixed support for xAI models that are now available through GitHub Copilot Chat. --- crates/copilot/src/copilot_chat.rs | 2 ++ crates/language_models/src/provider/copilot_chat.rs | 10 +++++++++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/crates/copilot/src/copilot_chat.rs b/crates/copilot/src/copilot_chat.rs index bfddba0e2f8a41e3ed234b21ee52454d104c9dd2..9b9d6e19b8de86fd0ee7e6fe6bf57d6d91da19da 100644 --- a/crates/copilot/src/copilot_chat.rs +++ b/crates/copilot/src/copilot_chat.rs @@ -164,6 +164,8 @@ pub enum ModelVendor { OpenAI, Google, Anthropic, + #[serde(rename = "xAI")] + XAI, } #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)] diff --git a/crates/language_models/src/provider/copilot_chat.rs b/crates/language_models/src/provider/copilot_chat.rs index d48c12aa4b5de713c0130320f7c9e61a733dc33e..bd284eb72b207dee90048f06dc44a8e21ae8d34f 100644 --- a/crates/language_models/src/provider/copilot_chat.rs +++ b/crates/language_models/src/provider/copilot_chat.rs @@ -32,6 +32,8 @@ use std::time::Duration; use ui::prelude::*; use util::debug_panic; +use crate::provider::x_ai::count_xai_tokens; + use super::anthropic::count_anthropic_tokens; use super::google::count_google_tokens; use super::open_ai::count_open_ai_tokens; @@ -228,7 +230,9 @@ impl LanguageModel for CopilotChatLanguageModel { ModelVendor::OpenAI | ModelVendor::Anthropic => { LanguageModelToolSchemaFormat::JsonSchema } - ModelVendor::Google => LanguageModelToolSchemaFormat::JsonSchemaSubset, + ModelVendor::Google | ModelVendor::XAI => { + LanguageModelToolSchemaFormat::JsonSchemaSubset + } } } @@ -256,6 +260,10 @@ impl LanguageModel for CopilotChatLanguageModel { match self.model.vendor() { ModelVendor::Anthropic => count_anthropic_tokens(request, cx), ModelVendor::Google => count_google_tokens(request, cx), + ModelVendor::XAI => { + let model = x_ai::Model::from_id(self.model.id()).unwrap_or_default(); + count_xai_tokens(request, model, cx) + } ModelVendor::OpenAI => { let model = open_ai::Model::from_id(self.model.id()).unwrap_or_default(); count_open_ai_tokens(request, model, cx) From 129bff83585f79e77a72e787728bcd000eeca679 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Sun, 31 Aug 2025 19:52:43 -0400 Subject: [PATCH 46/54] agent: Make it so delete_path tool needs user confirmation (#37191) Closes https://github.com/zed-industries/zed/issues/37048 Release Notes: - agent: Make delete_path tool require user confirmation by default --- crates/assistant_tools/src/delete_path_tool.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/assistant_tools/src/delete_path_tool.rs b/crates/assistant_tools/src/delete_path_tool.rs index b181eeff5ca0f1a45176921ed9e24973aae3839f..7c85f1ed7552931822500f76bb9f3b1b1f47fd0c 100644 --- a/crates/assistant_tools/src/delete_path_tool.rs +++ b/crates/assistant_tools/src/delete_path_tool.rs @@ -35,7 +35,7 @@ impl Tool for DeletePathTool { } fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - false + true } fn may_perform_edits(&self) -> bool { From f290daf7eac29a1aafa89be1074b76feb78acebb Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Sun, 31 Aug 2025 20:08:17 -0400 Subject: [PATCH 47/54] docs: Improve Bedrock suggested IAM policy (#37278) Closes https://github.com/zed-industries/zed/issues/37251 H/T: @brandon-fryslie Release Notes: - N/A --- docs/src/ai/llm-providers.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/src/ai/llm-providers.md b/docs/src/ai/llm-providers.md index 5ef6081421240ae13ab53b27fd966aec64ca3b82..ecc4cb004befc199cf77708367e639a6dd6b029d 100644 --- a/docs/src/ai/llm-providers.md +++ b/docs/src/ai/llm-providers.md @@ -40,7 +40,6 @@ Ensure your credentials have the following permissions set up: - `bedrock:InvokeModelWithResponseStream` - `bedrock:InvokeModel` -- `bedrock:ConverseStream` Your IAM policy should look similar to: @@ -52,8 +51,7 @@ Your IAM policy should look similar to: "Effect": "Allow", "Action": [ "bedrock:InvokeModel", - "bedrock:InvokeModelWithResponseStream", - "bedrock:ConverseStream" + "bedrock:InvokeModelWithResponseStream" ], "Resource": "*" } From a852bcc09410b47dcabbe9b089725777024d125e Mon Sep 17 00:00:00 2001 From: Gaauwe Rombouts Date: Mon, 1 Sep 2025 02:24:00 +0200 Subject: [PATCH 48/54] Improve system window tabs visibility (#37244) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Follow up of https://github.com/zed-industries/zed/pull/33334 After chatting with @MrSubidubi we found out that he had an old defaults setting (most likely from when he encountered a previous window tabbing bug): ``` ❯ defaults read dev.zed.Zed-Nightly { NSNavPanelExpandedSizeForOpenMode = "{800, 448}"; NSNavPanelExpandedSizeForSaveMode = "{800, 448}"; NSNavPanelExpandedStateForSaveMode = 1; NSOSPLastRootDirectory = {length = 828, bytes = 0x626f6f6b 3c030000 00000410 30000000 ... dc010000 00000000 }; "NSWindow Frame NSNavPanelAutosaveName" = "557 1726 800 448 -323 982 2560 1440 "; "NSWindowTabbingShoudShowTabBarKey-GPUIWindow-GPUIWindow-(null)-HT-FS" = 1; } ``` > That suffix is AppKit’s fallback autosave name when no tabbing identifier is set. It encodes the NSWindow subclass (GPUIWindow), plus traits like HT (hidden titlebar) and FS (fullscreen). Which explains why it only happened on the Nightly build, since each bundle has it's own defaults. It also explains why the tabbar would disappear when he activated the `use_system_window_tabs` setting, because with that setting activated, the tabbing identifier becomes "zed" (instead of the default one when omitted) for which he didn't have the `NSWindowTabbingShoudShowTabBarKey` default. The original implementation was perhaps a bit naive and relied fully on macOS to determine if the tabbar should be shown. I've updated the code to always hide the tabbar, if the setting is turned off and there is only 1 tab entry. While testing, I also noticed that the menu's like 'merge all windows' wouldn't become active when the setting was turned on, only after a full workspace reload. So I added a setting observer as well, to immediately set the correct window properties to enable all the features without a reload. Release Notes: - N/A --- crates/gpui/src/platform.rs | 1 + crates/gpui/src/platform/mac/window.rs | 21 ++++++++++ crates/gpui/src/window.rs | 7 ++++ crates/title_bar/src/system_window_tabs.rs | 49 ++++++++++++++++++++-- crates/zed/src/main.rs | 2 +- 5 files changed, 76 insertions(+), 4 deletions(-) diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index eb1d73814388a26503e9ada782bc358dc712b53c..d3425c8835bb474ffbed6bc79371340d569d1bfb 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -522,6 +522,7 @@ pub(crate) trait PlatformWindow: HasWindowHandle + HasDisplayHandle { fn merge_all_windows(&self) {} fn move_tab_to_new_window(&self) {} fn toggle_window_tab_overview(&self) {} + fn set_tabbing_identifier(&self, _identifier: Option) {} #[cfg(target_os = "windows")] fn get_raw_handle(&self) -> windows::HWND; diff --git a/crates/gpui/src/platform/mac/window.rs b/crates/gpui/src/platform/mac/window.rs index 0262cbb1213ca670cece780959c740f292764630..686cfb314e58c4e10e916a07931fb5f4248ea54e 100644 --- a/crates/gpui/src/platform/mac/window.rs +++ b/crates/gpui/src/platform/mac/window.rs @@ -781,6 +781,8 @@ impl MacWindow { if let Some(tabbing_identifier) = tabbing_identifier { let tabbing_id = NSString::alloc(nil).init_str(tabbing_identifier.as_str()); let _: () = msg_send![native_window, setTabbingIdentifier: tabbing_id]; + } else { + let _: () = msg_send![native_window, setTabbingIdentifier:nil]; } } WindowKind::PopUp => { @@ -1018,6 +1020,25 @@ impl PlatformWindow for MacWindow { } } + fn set_tabbing_identifier(&self, tabbing_identifier: Option) { + let native_window = self.0.lock().native_window; + unsafe { + let allows_automatic_window_tabbing = tabbing_identifier.is_some(); + if allows_automatic_window_tabbing { + let () = msg_send![class!(NSWindow), setAllowsAutomaticWindowTabbing: YES]; + } else { + let () = msg_send![class!(NSWindow), setAllowsAutomaticWindowTabbing: NO]; + } + + if let Some(tabbing_identifier) = tabbing_identifier { + let tabbing_id = NSString::alloc(nil).init_str(tabbing_identifier.as_str()); + let _: () = msg_send![native_window, setTabbingIdentifier: tabbing_id]; + } else { + let _: () = msg_send![native_window, setTabbingIdentifier:nil]; + } + } + } + fn scale_factor(&self) -> f32 { self.0.as_ref().lock().scale_factor() } diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 4504f512551b678b9304a4c180f54b15c34af956..c2719665d423a4431184d56a9b6bff16f8ad443b 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -4390,6 +4390,13 @@ impl Window { self.platform_window.toggle_window_tab_overview() } + /// Sets the tabbing identifier for the window. + /// This is macOS specific. + pub fn set_tabbing_identifier(&self, tabbing_identifier: Option) { + self.platform_window + .set_tabbing_identifier(tabbing_identifier) + } + /// Toggles the inspector mode on this window. #[cfg(any(feature = "inspector", debug_assertions))] pub fn toggle_inspector(&mut self, cx: &mut App) { diff --git a/crates/title_bar/src/system_window_tabs.rs b/crates/title_bar/src/system_window_tabs.rs index cc50fbc2b99b56c2d8dab95e0c56deb33da2bb4b..ba898da716f042573840f8f9c9f375747ac5cc04 100644 --- a/crates/title_bar/src/system_window_tabs.rs +++ b/crates/title_bar/src/system_window_tabs.rs @@ -1,4 +1,4 @@ -use settings::Settings; +use settings::{Settings, SettingsStore}; use gpui::{ AnyWindowHandle, Context, Hsla, InteractiveElement, MouseButton, ParentElement, ScrollHandle, @@ -11,7 +11,7 @@ use ui::{ LabelSize, Tab, h_flex, prelude::*, right_click_menu, }; use workspace::{ - CloseWindow, ItemSettings, Workspace, + CloseWindow, ItemSettings, Workspace, WorkspaceSettings, item::{ClosePosition, ShowCloseButton}, }; @@ -53,6 +53,46 @@ impl SystemWindowTabs { } pub fn init(cx: &mut App) { + let mut was_use_system_window_tabs = + WorkspaceSettings::get_global(cx).use_system_window_tabs; + + cx.observe_global::(move |cx| { + let use_system_window_tabs = WorkspaceSettings::get_global(cx).use_system_window_tabs; + if use_system_window_tabs == was_use_system_window_tabs { + return; + } + was_use_system_window_tabs = use_system_window_tabs; + + let tabbing_identifier = if use_system_window_tabs { + Some(String::from("zed")) + } else { + None + }; + + if use_system_window_tabs { + SystemWindowTabController::init(cx); + } + + cx.windows().iter().for_each(|handle| { + let _ = handle.update(cx, |_, window, cx| { + window.set_tabbing_identifier(tabbing_identifier.clone()); + if use_system_window_tabs { + let tabs = if let Some(tabs) = window.tabbed_windows() { + tabs + } else { + vec![SystemWindowTab::new( + SharedString::from(window.window_title()), + window.window_handle(), + )] + }; + + SystemWindowTabController::add_tab(cx, handle.window_id(), tabs); + } + }); + }); + }) + .detach(); + cx.observe_new(|workspace: &mut Workspace, _, _| { workspace.register_action_renderer(|div, _, window, cx| { let window_id = window.window_handle().window_id(); @@ -336,6 +376,7 @@ impl SystemWindowTabs { impl Render for SystemWindowTabs { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let use_system_window_tabs = WorkspaceSettings::get_global(cx).use_system_window_tabs; let active_background_color = cx.theme().colors().title_bar_background; let inactive_background_color = cx.theme().colors().tab_bar_background; let entity = cx.entity(); @@ -368,7 +409,9 @@ impl Render for SystemWindowTabs { .collect::>(); let number_of_tabs = tab_items.len().max(1); - if !window.tab_bar_visible() && !visible { + if (!window.tab_bar_visible() && !visible) + || (!use_system_window_tabs && number_of_tabs == 1) + { return h_flex().into_any_element(); } diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 79cf2bfa66fb217680dea86720eb46402f116958..d1d221fb37ddf4d76804f326f3d60ae7a09cdcbc 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -955,7 +955,7 @@ async fn installation_id() -> Result { async fn restore_or_create_workspace(app_state: Arc, cx: &mut AsyncApp) -> Result<()> { if let Some(locations) = restorable_workspace_locations(cx, &app_state).await { let use_system_window_tabs = cx - .update(|cx| WorkspaceSettings::get(None, cx).use_system_window_tabs) + .update(|cx| WorkspaceSettings::get_global(cx).use_system_window_tabs) .unwrap_or(false); let mut results: Vec> = Vec::new(); let mut tasks = Vec::new(); From 62083fe7963dd5bed4579bb12abac1b7800cdbaa Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 1 Sep 2025 09:49:52 +0200 Subject: [PATCH 49/54] gpui: Do not render ligatures between different styled text runs (#37175) Currently when we render text with differing styles adjacently we might form a ligature between the text, causing the ligature forming characters to take on one of the two styles. This can especially become confusing when a ligature is formed between actual text and inlay hints. Annoyingly, the only ways to prevent this with core text is to either render each run separately, or to insert a zero-width non-joiner to force core text to break the ligatures apart, as it otherwise will merge subsequent font runs of the same fonts. We currently do layouting on a per line basis and it is unlikely we want to change that as it would incur a lot of complexity and annoyances to merge things back into a line, so this goes with the other approach of inserting ZWNJ characters instead. Note that neither linux nor windows seem to currently render ligatures, so this only concerns macOS rendering at the moment. Release Notes: - Fixed ligatures forming between real text and inlay hints on macOS --- crates/gpui/src/platform/mac/text_system.rs | 170 ++++++++++++++++---- crates/gpui/src/text_system.rs | 102 +++++++----- crates/gpui/src/text_system/line_layout.rs | 15 +- 3 files changed, 205 insertions(+), 82 deletions(-) diff --git a/crates/gpui/src/platform/mac/text_system.rs b/crates/gpui/src/platform/mac/text_system.rs index 72a0f2e565d9937e3aaf4082b663c3e2ae6ac91d..ba7017b58f76f028a1c5c80959e9359bc379c0cb 100644 --- a/crates/gpui/src/platform/mac/text_system.rs +++ b/crates/gpui/src/platform/mac/text_system.rs @@ -43,7 +43,7 @@ use pathfinder_geometry::{ vector::{Vector2F, Vector2I}, }; use smallvec::SmallVec; -use std::{borrow::Cow, char, cmp, convert::TryFrom, sync::Arc}; +use std::{borrow::Cow, char, convert::TryFrom, sync::Arc}; use super::open_type::apply_features_and_fallbacks; @@ -67,6 +67,7 @@ struct MacTextSystemState { font_ids_by_postscript_name: HashMap, font_ids_by_font_key: HashMap>, postscript_names_by_font_id: HashMap, + zwnjs_scratch_space: Vec<(usize, usize)>, } impl MacTextSystem { @@ -79,6 +80,7 @@ impl MacTextSystem { font_ids_by_postscript_name: HashMap::default(), font_ids_by_font_key: HashMap::default(), postscript_names_by_font_id: HashMap::default(), + zwnjs_scratch_space: Vec::new(), })) } } @@ -424,29 +426,41 @@ impl MacTextSystemState { } fn layout_line(&mut self, text: &str, font_size: Pixels, font_runs: &[FontRun]) -> LineLayout { + const ZWNJ: char = '\u{200C}'; + const ZWNJ_STR: &str = "\u{200C}"; + const ZWNJ_SIZE_16: usize = ZWNJ.len_utf16(); + + self.zwnjs_scratch_space.clear(); // Construct the attributed string, converting UTF8 ranges to UTF16 ranges. let mut string = CFMutableAttributedString::new(); - { - string.replace_str(&CFString::new(text), CFRange::init(0, 0)); - let utf16_line_len = string.char_len() as usize; - let mut ix_converter = StringIndexConverter::new(text); + { + let mut ix_converter = StringIndexConverter::new(&text); + let mut last_font_run = None; for run in font_runs { - let utf8_end = ix_converter.utf8_ix + run.len; - let utf16_start = ix_converter.utf16_ix; - - if utf16_start >= utf16_line_len { - break; + let text = &text[ix_converter.utf8_ix..][..run.len]; + // if the fonts are the same, we need to disconnect the text with a ZWNJ + // to prevent core text from forming ligatures between them + let needs_zwnj = last_font_run.replace(run.font_id) == Some(run.font_id); + + let n_zwnjs = self.zwnjs_scratch_space.len(); + let utf16_start = ix_converter.utf16_ix + n_zwnjs * ZWNJ_SIZE_16; + ix_converter.advance_to_utf8_ix(ix_converter.utf8_ix + run.len); + + string.replace_str(&CFString::new(text), CFRange::init(utf16_start as isize, 0)); + if needs_zwnj { + let zwnjs_pos = string.char_len(); + self.zwnjs_scratch_space.push((n_zwnjs, zwnjs_pos as usize)); + string.replace_str( + &CFString::from_static_string(ZWNJ_STR), + CFRange::init(zwnjs_pos, 0), + ); } - - ix_converter.advance_to_utf8_ix(utf8_end); - let utf16_end = cmp::min(ix_converter.utf16_ix, utf16_line_len); + let utf16_end = string.char_len() as usize; let cf_range = CFRange::init(utf16_start as isize, (utf16_end - utf16_start) as isize); - - let font: &FontKitFont = &self.fonts[run.font_id.0]; - + let font = &self.fonts[run.font_id.0]; unsafe { string.set_attribute( cf_range, @@ -454,17 +468,12 @@ impl MacTextSystemState { &font.native_font().clone_with_font_size(font_size.into()), ); } - - if utf16_end == utf16_line_len { - break; - } } } - // Retrieve the glyphs from the shaped line, converting UTF16 offsets to UTF8 offsets. let line = CTLine::new_with_attributed_string(string.as_concrete_TypeRef()); let glyph_runs = line.glyph_runs(); - let mut runs = Vec::with_capacity(glyph_runs.len() as usize); + let mut runs = >::with_capacity(glyph_runs.len() as usize); let mut ix_converter = StringIndexConverter::new(text); for run in glyph_runs.into_iter() { let attributes = run.attributes().unwrap(); @@ -476,28 +485,44 @@ impl MacTextSystemState { }; let font_id = self.id_for_native_font(font); - let mut glyphs = Vec::with_capacity(run.glyph_count().try_into().unwrap_or(0)); - for ((glyph_id, position), glyph_utf16_ix) in run + let mut glyphs = match runs.last_mut() { + Some(run) if run.font_id == font_id => &mut run.glyphs, + _ => { + runs.push(ShapedRun { + font_id, + glyphs: Vec::with_capacity(run.glyph_count().try_into().unwrap_or(0)), + }); + &mut runs.last_mut().unwrap().glyphs + } + }; + for ((&glyph_id, position), &glyph_utf16_ix) in run .glyphs() .iter() .zip(run.positions().iter()) .zip(run.string_indices().iter()) { - let glyph_utf16_ix = usize::try_from(*glyph_utf16_ix).unwrap(); + let mut glyph_utf16_ix = usize::try_from(glyph_utf16_ix).unwrap(); + let r = self + .zwnjs_scratch_space + .binary_search_by(|&(_, it)| it.cmp(&glyph_utf16_ix)); + match r { + // this glyph is a ZWNJ, skip it + Ok(_) => continue, + // adjust the index to account for the ZWNJs we've inserted + Err(idx) => glyph_utf16_ix -= idx * ZWNJ_SIZE_16, + } if ix_converter.utf16_ix > glyph_utf16_ix { // We cannot reuse current index converter, as it can only seek forward. Restart the search. ix_converter = StringIndexConverter::new(text); } ix_converter.advance_to_utf16_ix(glyph_utf16_ix); glyphs.push(ShapedGlyph { - id: GlyphId(*glyph_id as u32), + id: GlyphId(glyph_id as u32), position: point(position.x as f32, position.y as f32).map(px), index: ix_converter.utf8_ix, is_emoji: self.is_emoji(font_id), }); } - - runs.push(ShapedRun { font_id, glyphs }); } let typographic_bounds = line.get_typographic_bounds(); LineLayout { @@ -696,4 +721,93 @@ mod tests { // There's no glyph for \u{feff} assert_eq!(layout.runs[0].glyphs[1].id, GlyphId(69u32)); // b } + + #[test] + fn test_layout_line_zwnj_insertion() { + let fonts = MacTextSystem::new(); + let font_id = fonts.font_id(&font("Helvetica")).unwrap(); + + let text = "hello world"; + let font_runs = &[ + FontRun { font_id, len: 5 }, // "hello" + FontRun { font_id, len: 6 }, // " world" + ]; + + let layout = fonts.layout_line(text, px(16.), font_runs); + assert_eq!(layout.len, text.len()); + + for run in &layout.runs { + for glyph in &run.glyphs { + assert!( + glyph.index < text.len(), + "Glyph index {} is out of bounds for text length {}", + glyph.index, + text.len() + ); + } + } + + // Test with different font runs - should not insert ZWNJ + let font_id2 = fonts.font_id(&font("Times")).unwrap_or(font_id); + let font_runs_different = &[ + FontRun { font_id, len: 5 }, // "hello" + // " world" + FontRun { + font_id: font_id2, + len: 6, + }, + ]; + + let layout2 = fonts.layout_line(text, px(16.), font_runs_different); + assert_eq!(layout2.len, text.len()); + + for run in &layout2.runs { + for glyph in &run.glyphs { + assert!( + glyph.index < text.len(), + "Glyph index {} is out of bounds for text length {}", + glyph.index, + text.len() + ); + } + } + } + + #[test] + fn test_layout_line_zwnj_edge_cases() { + let fonts = MacTextSystem::new(); + let font_id = fonts.font_id(&font("Helvetica")).unwrap(); + + let text = "hello"; + let font_runs = &[FontRun { font_id, len: 5 }]; + let layout = fonts.layout_line(text, px(16.), font_runs); + assert_eq!(layout.len, text.len()); + + let text = "abc"; + let font_runs = &[ + FontRun { font_id, len: 1 }, // "a" + FontRun { font_id, len: 1 }, // "b" + FontRun { font_id, len: 1 }, // "c" + ]; + let layout = fonts.layout_line(text, px(16.), font_runs); + assert_eq!(layout.len, text.len()); + + for run in &layout.runs { + for glyph in &run.glyphs { + assert!( + glyph.index < text.len(), + "Glyph index {} is out of bounds for text length {}", + glyph.index, + text.len() + ); + } + } + + // Test with empty text + let text = ""; + let font_runs = &[]; + let layout = fonts.layout_line(text, px(16.), font_runs); + assert_eq!(layout.len, 0); + assert!(layout.runs.is_empty()); + } } diff --git a/crates/gpui/src/text_system.rs b/crates/gpui/src/text_system.rs index 53991089da94c58d0035bff0d607ad3ab57a69bd..be34b9e2aac055bd9f17c2f69b3c72d24e392593 100644 --- a/crates/gpui/src/text_system.rs +++ b/crates/gpui/src/text_system.rs @@ -413,9 +413,10 @@ impl WindowTextSystem { let mut wrapped_lines = 0; let mut process_line = |line_text: SharedString| { + font_runs.clear(); let line_end = line_start + line_text.len(); - let mut last_font: Option = None; + let mut last_font: Option = None; let mut decoration_runs = SmallVec::<[DecorationRun; 32]>::new(); let mut run_start = line_start; while run_start < line_end { @@ -425,23 +426,14 @@ impl WindowTextSystem { let run_len_within_line = cmp::min(line_end, run_start + run.len) - run_start; - if last_font == Some(run.font.clone()) { - font_runs.last_mut().unwrap().len += run_len_within_line; - } else { - last_font = Some(run.font.clone()); - font_runs.push(FontRun { - len: run_len_within_line, - font_id: self.resolve_font(&run.font), - }); - } - - if decoration_runs.last().is_some_and(|last_run| { - last_run.color == run.color - && last_run.underline == run.underline - && last_run.strikethrough == run.strikethrough - && last_run.background_color == run.background_color - }) { - decoration_runs.last_mut().unwrap().len += run_len_within_line as u32; + let decoration_changed = if let Some(last_run) = decoration_runs.last_mut() + && last_run.color == run.color + && last_run.underline == run.underline + && last_run.strikethrough == run.strikethrough + && last_run.background_color == run.background_color + { + last_run.len += run_len_within_line as u32; + false } else { decoration_runs.push(DecorationRun { len: run_len_within_line as u32, @@ -450,6 +442,21 @@ impl WindowTextSystem { underline: run.underline, strikethrough: run.strikethrough, }); + true + }; + + if let Some(font_run) = font_runs.last_mut() + && Some(font_run.font_id) == last_font + && !decoration_changed + { + font_run.len += run_len_within_line; + } else { + let font_id = self.resolve_font(&run.font); + last_font = Some(font_id); + font_runs.push(FontRun { + len: run_len_within_line, + font_id, + }); } if run_len_within_line == run.len { @@ -484,8 +491,6 @@ impl WindowTextSystem { runs.next(); } } - - font_runs.clear(); }; let mut split_lines = text.split('\n'); @@ -519,37 +524,54 @@ impl WindowTextSystem { /// Subsets of the line can be styled independently with the `runs` parameter. /// Generally, you should prefer to use `TextLayout::shape_line` instead, which /// can be painted directly. - pub fn layout_line( + pub fn layout_line( &self, - text: Text, + text: &str, font_size: Pixels, runs: &[TextRun], force_width: Option, - ) -> Arc - where - Text: AsRef, - SharedString: From, - { + ) -> Arc { + let mut last_run = None::<&TextRun>; + let mut last_font: Option = None; let mut font_runs = self.font_runs_pool.lock().pop().unwrap_or_default(); + font_runs.clear(); + for run in runs.iter() { - let font_id = self.resolve_font(&run.font); - if let Some(last_run) = font_runs.last_mut() - && last_run.font_id == font_id + let decoration_changed = if let Some(last_run) = last_run + && last_run.color == run.color + && last_run.underline == run.underline + && last_run.strikethrough == run.strikethrough + // we do not consider differing background color relevant, as it does not affect glyphs + // && last_run.background_color == run.background_color { - last_run.len += run.len; - continue; + false + } else { + last_run = Some(run); + true + }; + + if let Some(font_run) = font_runs.last_mut() + && Some(font_run.font_id) == last_font + && !decoration_changed + { + font_run.len += run.len; + } else { + let font_id = self.resolve_font(&run.font); + last_font = Some(font_id); + font_runs.push(FontRun { + len: run.len, + font_id, + }); } - font_runs.push(FontRun { - len: run.len, - font_id, - }); } - let layout = - self.line_layout_cache - .layout_line_internal(text, font_size, &font_runs, force_width); + let layout = self.line_layout_cache.layout_line( + &SharedString::new(text), + font_size, + &font_runs, + force_width, + ); - font_runs.clear(); self.font_runs_pool.lock().push(font_runs); layout diff --git a/crates/gpui/src/text_system/line_layout.rs b/crates/gpui/src/text_system/line_layout.rs index 43694702a82566b8f84199dcfc4ff996da93588e..4ac1d258970802ed1c4fe86bd98f2971b78fbc04 100644 --- a/crates/gpui/src/text_system/line_layout.rs +++ b/crates/gpui/src/text_system/line_layout.rs @@ -501,7 +501,7 @@ impl LineLayoutCache { } else { drop(current_frame); let text = SharedString::from(text); - let unwrapped_layout = self.layout_line::<&SharedString>(&text, font_size, runs); + let unwrapped_layout = self.layout_line::<&SharedString>(&text, font_size, runs, None); let wrap_boundaries = if let Some(wrap_width) = wrap_width { unwrapped_layout.compute_wrap_boundaries(text.as_ref(), wrap_width, max_lines) } else { @@ -535,19 +535,6 @@ impl LineLayoutCache { text: Text, font_size: Pixels, runs: &[FontRun], - ) -> Arc - where - Text: AsRef, - SharedString: From, - { - self.layout_line_internal(text, font_size, runs, None) - } - - pub fn layout_line_internal( - &self, - text: Text, - font_size: Pixels, - runs: &[FontRun], force_width: Option, ) -> Arc where From 3315fd94d27fde9f4d327ce6a8c4a009fd605505 Mon Sep 17 00:00:00 2001 From: Ivan Trubach Date: Mon, 1 Sep 2025 11:21:55 +0300 Subject: [PATCH 50/54] editor: Add an option to disable rounded corners for text selection (#36987) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #19891 Similar to VSCode’s `editor.roundedSelection` option. #### Before/after
Enabled (default)Disabled
Editor-based UIsimage imageimage image
Terminalimageimage
Release Notes: - Added setting `rounded_selection` to disable rounded corners for text selection. --- assets/settings/default.json | 2 ++ crates/editor/src/editor_settings.rs | 6 ++++++ crates/editor/src/element.rs | 9 +++++++-- crates/terminal_view/src/terminal_element.rs | 9 +++++++-- docs/src/configuring-zed.md | 6 ++++++ 5 files changed, 28 insertions(+), 4 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index b15eb6e5ce8de85bb088108f065a31494b9087a1..2aec3aa7b9d56b3a04d2c8d1f80bb0d37c91b8cc 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -223,6 +223,8 @@ "current_line_highlight": "all", // Whether to highlight all occurrences of the selected text in an editor. "selection_highlight": true, + // Whether the text selection should have rounded corners. + "rounded_selection": true, // The debounce delay before querying highlights from the language // server based on the current cursor location. "lsp_highlight_debounce": 75, diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index c2baa9de024b1988f9acb77a529936f947103f56..084c4eb5c618cbf3d290b317b0035f1b8f307b3f 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -17,6 +17,7 @@ pub struct EditorSettings { pub cursor_shape: Option, pub current_line_highlight: CurrentLineHighlight, pub selection_highlight: bool, + pub rounded_selection: bool, pub lsp_highlight_debounce: u64, pub hover_popover_enabled: bool, pub hover_popover_delay: u64, @@ -441,6 +442,10 @@ pub struct EditorSettingsContent { /// /// Default: true pub selection_highlight: Option, + /// Whether the text selection should have rounded corners. + /// + /// Default: true + pub rounded_selection: Option, /// The debounce delay before querying highlights from the language /// server based on the current cursor location. /// @@ -794,6 +799,7 @@ impl Settings for EditorSettings { "editor.selectionHighlight", &mut current.selection_highlight, ); + vscode.bool_setting("editor.roundedSelection", &mut current.rounded_selection); vscode.bool_setting("editor.hover.enabled", &mut current.hover_popover_enabled); vscode.u64_setting("editor.hover.delay", &mut current.hover_popover_delay); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index ca6eac080e6121880eae63b4dc60ca6d32c6da5d..f384afa1ae988d8d224f9ec3de70932543519571 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -6063,7 +6063,7 @@ impl EditorElement { }; self.paint_lines_background(layout, window, cx); - let invisible_display_ranges = self.paint_highlights(layout, window); + let invisible_display_ranges = self.paint_highlights(layout, window, cx); self.paint_document_colors(layout, window); self.paint_lines(&invisible_display_ranges, layout, window, cx); self.paint_redactions(layout, window); @@ -6085,6 +6085,7 @@ impl EditorElement { &mut self, layout: &mut EditorLayout, window: &mut Window, + cx: &mut App, ) -> SmallVec<[Range; 32]> { window.paint_layer(layout.position_map.text_hitbox.bounds, |window| { let mut invisible_display_ranges = SmallVec::<[Range; 32]>::new(); @@ -6101,7 +6102,11 @@ impl EditorElement { ); } - let corner_radius = 0.15 * layout.position_map.line_height; + let corner_radius = if EditorSettings::get_global(cx).rounded_selection { + 0.15 * layout.position_map.line_height + } else { + Pixels::ZERO + }; for (player_color, selections) in &layout.selections { for selection in selections.iter() { diff --git a/crates/terminal_view/src/terminal_element.rs b/crates/terminal_view/src/terminal_element.rs index 56715b604eeffe0b42302adcdf0d6fdd93919879..5bbf5ad36b3de89514d92ce9e305988817cec32f 100644 --- a/crates/terminal_view/src/terminal_element.rs +++ b/crates/terminal_view/src/terminal_element.rs @@ -1,4 +1,4 @@ -use editor::{CursorLayout, HighlightedRange, HighlightedRangeLine}; +use editor::{CursorLayout, EditorSettings, HighlightedRange, HighlightedRangeLine}; use gpui::{ AbsoluteLength, AnyElement, App, AvailableSpace, Bounds, ContentMask, Context, DispatchPhase, Element, ElementId, Entity, FocusHandle, Font, FontFeatures, FontStyle, FontWeight, @@ -1257,12 +1257,17 @@ impl Element for TerminalElement { if let Some((start_y, highlighted_range_lines)) = to_highlighted_range_lines(relative_highlighted_range, layout, origin) { + let corner_radius = if EditorSettings::get_global(cx).rounded_selection { + 0.15 * layout.dimensions.line_height + } else { + Pixels::ZERO + }; let hr = HighlightedRange { start_y, line_height: layout.dimensions.line_height, lines: highlighted_range_lines, color: *color, - corner_radius: 0.15 * layout.dimensions.line_height, + corner_radius: corner_radius, }; hr.paint(true, bounds, window); } diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 2b1d801f8010c8ad00f1295c38803bd80df1c282..e245b3ca2facecb097b315f28d98ef2ea5a20048 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -685,6 +685,12 @@ List of `string` values - Setting: `selection_highlight` - Default: `true` +## Rounded Selection + +- Description: Whether the text selection should have rounded corners. +- Setting: `rounded_selection` +- Default: `true` + ## Cursor Blink - Description: Whether or not the cursor blinks. From acff65ed3f70d9d48b1ad189e68b5ce136b0967b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BC=A0=E5=B0=8F=E7=99=BD?= <364772080@qq.com> Date: Mon, 1 Sep 2025 16:33:59 +0800 Subject: [PATCH 51/54] windows: Update documents about WSL (#37292) Release Notes: - N/A --- crates/cli/src/main.rs | 6 ++++-- crates/zed/src/main.rs | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index 151e96e3cf68ab94295a8386d2842539e6a986a2..d67843b4c93eb64b01fbdd6e26955d96a0c50e70 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -84,13 +84,15 @@ struct Args { /// Run zed in dev-server mode #[arg(long)] dev_server_token: Option, - /// The username and WSL distribution to use when opening paths. ,If not specified, + /// The username and WSL distribution to use when opening paths. If not specified, /// Zed will attempt to open the paths directly. /// /// The username is optional, and if not specified, the default user for the distribution /// will be used. /// - /// Example: `me@Ubuntu` or `Ubuntu` for default distribution. + /// Example: `me@Ubuntu` or `Ubuntu`. + /// + /// WARN: You should not fill in this field by hand. #[arg(long, value_name = "USER@DISTRO")] wsl: Option, /// Not supported in Zed CLI, only supported on Zed binary diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index d1d221fb37ddf4d76804f326f3d60ae7a09cdcbc..3a7baa1559d68cbce8cfbf96b0bf4384aa1f7e0b 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -1183,13 +1183,15 @@ struct Args { #[arg(long, value_name = "DIR")] user_data_dir: Option, - /// The username and WSL distribution to use when opening paths. ,If not specified, + /// The username and WSL distribution to use when opening paths. If not specified, /// Zed will attempt to open the paths directly. /// /// The username is optional, and if not specified, the default user for the distribution /// will be used. /// - /// Example: `me@Ubuntu` or `Ubuntu` for default distribution. + /// Example: `me@Ubuntu` or `Ubuntu`. + /// + /// WARN: You should not fill in this field by hand. #[arg(long, value_name = "USER@DISTRO")] wsl: Option, From 2790eb604a1de04107f2412dadc06d4f75415380 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=9B=B7=E7=94=B5=E6=A2=85?= <1554694323@qq.com> Date: Mon, 1 Sep 2025 16:49:09 +0800 Subject: [PATCH 52/54] deepseek: Fix API URL (#33905) Closes #33904 Release Notes: - Add support for custom API Urls for DeepSeek Provider --------- Co-authored-by: Peter Tripp --- assets/settings/default.json | 2 +- crates/deepseek/src/deepseek.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 2aec3aa7b9d56b3a04d2c8d1f80bb0d37c91b8cc..623a4612d06975ca4681d75a775d061e594608b2 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1776,7 +1776,7 @@ "api_url": "http://localhost:1234/api/v0" }, "deepseek": { - "api_url": "https://api.deepseek.com" + "api_url": "https://api.deepseek.com/v1" }, "mistral": { "api_url": "https://api.mistral.ai/v1" diff --git a/crates/deepseek/src/deepseek.rs b/crates/deepseek/src/deepseek.rs index c2554c67e93b4c1d3772e60a62063fdae0511f05..e09a9e0f7a19642253245b381abdc9fa05d0af00 100644 --- a/crates/deepseek/src/deepseek.rs +++ b/crates/deepseek/src/deepseek.rs @@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize}; use serde_json::Value; use std::convert::TryFrom; -pub const DEEPSEEK_API_URL: &str = "https://api.deepseek.com"; +pub const DEEPSEEK_API_URL: &str = "https://api.deepseek.com/v1"; #[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)] #[serde(rename_all = "lowercase")] @@ -263,7 +263,7 @@ pub async fn stream_completion( api_key: &str, request: Request, ) -> Result>> { - let uri = format!("{api_url}/v1/chat/completions"); + let uri = format!("{api_url}/chat/completions"); let request_builder = HttpRequest::builder() .method(Method::POST) .uri(uri) From 61175ab9cdbe84feb647bddde84ee4766d627d47 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BC=A0=E5=B0=8F=E7=99=BD?= <364772080@qq.com> Date: Mon, 1 Sep 2025 23:26:25 +0800 Subject: [PATCH 53/54] =?UTF-8?q?windows:=20Don=E2=80=99t=20skip=20the=20t?= =?UTF-8?q?ypo=20check=20for=20the=20windows=20folder=20(#37314)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Try to narrow down the scope of typo checking Release Notes: - N/A --- crates/gpui/src/platform/windows/directx_renderer.rs | 2 +- crates/gpui/src/platform/windows/events.rs | 2 +- crates/gpui/src/platform/windows/vsync.rs | 2 +- typos.toml | 5 ++++- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/crates/gpui/src/platform/windows/directx_renderer.rs b/crates/gpui/src/platform/windows/directx_renderer.rs index f84a1c1b6d0d158684e4c6cad6edbf72105425e0..c496d29a0338ec4d758e436e85a3066163705db6 100644 --- a/crates/gpui/src/platform/windows/directx_renderer.rs +++ b/crates/gpui/src/platform/windows/directx_renderer.rs @@ -1760,7 +1760,7 @@ mod amd { anyhow::bail!("Failed to initialize AMD AGS, error code: {}", result); } - // Vulkan acctually returns this as the driver version + // Vulkan actually returns this as the driver version let software_version = if !gpu_info.radeon_software_version.is_null() { std::ffi::CStr::from_ptr(gpu_info.radeon_software_version) .to_string_lossy() diff --git a/crates/gpui/src/platform/windows/events.rs b/crates/gpui/src/platform/windows/events.rs index 4def6a11a5f16f235b1d7018ecbbdec5565ab951..f4e3e5c3029936ce6bf9c10096fe0546376ff43c 100644 --- a/crates/gpui/src/platform/windows/events.rs +++ b/crates/gpui/src/platform/windows/events.rs @@ -708,7 +708,7 @@ impl WindowsWindowInner { .system_settings .auto_hide_taskbar_position { - // Fot the auto-hide taskbar, adjust in by 1 pixel on taskbar edge, + // For the auto-hide taskbar, adjust in by 1 pixel on taskbar edge, // so the window isn't treated as a "fullscreen app", which would cause // the taskbar to disappear. match taskbar_position { diff --git a/crates/gpui/src/platform/windows/vsync.rs b/crates/gpui/src/platform/windows/vsync.rs index 6d09b0960f11cefce007413066620b3b332e1ae9..5cbcb8e99e2741c4b37cad4d550e290c4cab869f 100644 --- a/crates/gpui/src/platform/windows/vsync.rs +++ b/crates/gpui/src/platform/windows/vsync.rs @@ -94,7 +94,7 @@ impl VSyncProvider { // DwmFlush and DCompositionWaitForCompositorClock returns very early // instead of waiting until vblank when the monitor goes to sleep or is // unplugged (nothing to present due to desktop occlusion). We use 1ms as - // a threshhold for the duration of the wait functions and fallback to + // a threshold for the duration of the wait functions and fallback to // Sleep() if it returns before that. This could happen during normal // operation for the first call after the vsync thread becomes non-idle, // but it shouldn't happen often. diff --git a/typos.toml b/typos.toml index e5f02b64159faddd165d6d4571b929c82ad5bed0..ab33d9ccb44701d6652a7916b01c51d59e82a23b 100644 --- a/typos.toml +++ b/typos.toml @@ -36,7 +36,10 @@ extend-exclude = [ # glsl isn't recognized by this tool. "extensions/glsl/languages/glsl/", # Windows likes its abbreviations. - "crates/gpui/src/platform/windows/", + "crates/gpui/src/platform/windows/directx_renderer.rs", + "crates/gpui/src/platform/windows/events.rs", + "crates/gpui/src/platform/windows/direct_write.rs", + "crates/gpui/src/platform/windows/window.rs", # Some typos in the base mdBook CSS. "docs/theme/css/", # Spellcheck triggers on `|Fixe[sd]|` regex part. From d910feac1dfef3b9b3228f25acc7bc7e0193d9ad Mon Sep 17 00:00:00 2001 From: localcc Date: Mon, 1 Sep 2025 20:07:45 +0200 Subject: [PATCH 54/54] Implement perceptual gamma / contrast correction (#37167) Closes #36023 This improves font rendering quality by doing perceptual gamma+contrast correction which makes font edges look nicer and more legible. A comparison image: (left is old, right is new) Screenshot 2025-08-29 140015 This is most noticeable on smaller fonts / low-dpi displays Release Notes: - Improved font rendering quality --- Cargo.toml | 1 + .../platform/windows/alpha_correction.hlsl | 28 ++++ .../platform/windows/color_text_raster.hlsl | 14 +- .../gpui/src/platform/windows/direct_write.rs | 141 +++++------------- .../src/platform/windows/directx_renderer.rs | 76 +++++++++- crates/gpui/src/platform/windows/platform.rs | 15 +- crates/gpui/src/platform/windows/shaders.hlsl | 9 +- 7 files changed, 157 insertions(+), 127 deletions(-) create mode 100644 crates/gpui/src/platform/windows/alpha_correction.hlsl diff --git a/Cargo.toml b/Cargo.toml index 48017d9c6b4858fb7e5415b92bd993e534d1fabb..b20b37edb9ea08f49c757cc2d8764ce62494d688 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -696,6 +696,7 @@ features = [ "Win32_Graphics_Dxgi_Common", "Win32_Graphics_Gdi", "Win32_Graphics_Imaging", + "Win32_Graphics_Hlsl", "Win32_Networking_WinSock", "Win32_Security", "Win32_Security_Credentials", diff --git a/crates/gpui/src/platform/windows/alpha_correction.hlsl b/crates/gpui/src/platform/windows/alpha_correction.hlsl new file mode 100644 index 0000000000000000000000000000000000000000..7844a15f48bb27a137b913c94cba3fdc6d1fada9 --- /dev/null +++ b/crates/gpui/src/platform/windows/alpha_correction.hlsl @@ -0,0 +1,28 @@ +float color_brightness(float3 color) { + // REC. 601 luminance coefficients for percieved brightness + return dot(color, float3(0.30f, 0.59f, 0.11f)); +} + +float light_on_dark_contrast(float enhancedContrast, float3 color) { + float brightness = color_brightness(color); + float multiplier = saturate(4.0f * (0.75f - brightness)); + return enhancedContrast * multiplier; +} + +float enhance_contrast(float alpha, float k) { + return alpha * (k + 1.0f) / (alpha * k + 1.0f); +} + +float apply_alpha_correction(float a, float b, float4 g) { + float brightness_adjustment = g.x * b + g.y; + float correction = brightness_adjustment * a + (g.z * b + g.w); + return a + a * (1.0f - a) * correction; +} + +float apply_contrast_and_gamma_correction(float sample, float3 color, float enhanced_contrast_factor, float4 gamma_ratios) { + float enhanced_contrast = light_on_dark_contrast(enhanced_contrast_factor, color); + float brightness = color_brightness(color); + + float contrasted = enhance_contrast(sample, enhanced_contrast); + return apply_alpha_correction(contrasted, brightness, gamma_ratios); +} diff --git a/crates/gpui/src/platform/windows/color_text_raster.hlsl b/crates/gpui/src/platform/windows/color_text_raster.hlsl index ccc5fa26f00d57f2b69e85965a66b6ecea98a833..322c743a993f11e2324b6fdb45c019919329f612 100644 --- a/crates/gpui/src/platform/windows/color_text_raster.hlsl +++ b/crates/gpui/src/platform/windows/color_text_raster.hlsl @@ -1,3 +1,5 @@ +#include "alpha_correction.hlsl" + struct RasterVertexOutput { float4 position : SV_Position; float2 texcoord : TEXCOORD0; @@ -23,17 +25,19 @@ struct Bounds { int2 size; }; -Texture2D t_layer : register(t0); +Texture2D t_layer : register(t0); SamplerState s_layer : register(s0); cbuffer GlyphLayerTextureParams : register(b0) { Bounds bounds; float4 run_color; + float4 gamma_ratios; + float grayscale_enhanced_contrast; + float3 _pad; }; float4 emoji_rasterization_fragment(PixelInput input): SV_Target { - float3 sampled = t_layer.Sample(s_layer, input.texcoord.xy).rgb; - float alpha = (sampled.r + sampled.g + sampled.b) / 3; - - return float4(run_color.rgb, alpha); + float sample = t_layer.Sample(s_layer, input.texcoord.xy).r; + float alpha_corrected = apply_contrast_and_gamma_correction(sample, run_color.rgb, grayscale_enhanced_contrast, gamma_ratios); + return float4(run_color.rgb, alpha_corrected * run_color.a); } diff --git a/crates/gpui/src/platform/windows/direct_write.rs b/crates/gpui/src/platform/windows/direct_write.rs index a86a1fab62a404c4f49e785491bb2925a6f3cf61..e81b87c733bf277b8f534a3fda8d6db55ce34e36 100644 --- a/crates/gpui/src/platform/windows/direct_write.rs +++ b/crates/gpui/src/platform/windows/direct_write.rs @@ -10,12 +10,8 @@ use windows::{ Foundation::*, Globalization::GetUserDefaultLocaleName, Graphics::{ - Direct3D::D3D_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP, - Direct3D11::*, - DirectWrite::*, - Dxgi::Common::*, - Gdi::{IsRectEmpty, LOGFONTW}, - Imaging::*, + Direct3D::D3D_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP, Direct3D11::*, DirectWrite::*, + Dxgi::Common::*, Gdi::LOGFONTW, }, System::SystemServices::LOCALE_NAME_MAX_LENGTH, UI::WindowsAndMessaging::*, @@ -40,12 +36,10 @@ pub(crate) struct DirectWriteTextSystem(RwLock); struct DirectWriteComponent { locale: String, factory: IDWriteFactory5, - bitmap_factory: AgileReference, in_memory_loader: IDWriteInMemoryFontFileLoader, builder: IDWriteFontSetBuilder1, text_renderer: Arc, - render_params: IDWriteRenderingParams3, gpu_state: GPUState, } @@ -76,11 +70,10 @@ struct FontIdentifier { } impl DirectWriteComponent { - pub fn new(bitmap_factory: &IWICImagingFactory, gpu_context: &DirectXDevices) -> Result { + pub fn new(gpu_context: &DirectXDevices) -> Result { // todo: ideally this would not be a large unsafe block but smaller isolated ones for easier auditing unsafe { let factory: IDWriteFactory5 = DWriteCreateFactory(DWRITE_FACTORY_TYPE_SHARED)?; - let bitmap_factory = AgileReference::new(bitmap_factory)?; // The `IDWriteInMemoryFontFileLoader` here is supported starting from // Windows 10 Creators Update, which consequently requires the entire // `DirectWriteTextSystem` to run on `win10 1703`+. @@ -92,36 +85,14 @@ impl DirectWriteComponent { let locale = String::from_utf16_lossy(&locale_vec); let text_renderer = Arc::new(TextRendererWrapper::new(&locale)); - let render_params = { - let default_params: IDWriteRenderingParams3 = - factory.CreateRenderingParams()?.cast()?; - let gamma = default_params.GetGamma(); - let enhanced_contrast = default_params.GetEnhancedContrast(); - let gray_contrast = default_params.GetGrayscaleEnhancedContrast(); - let cleartype_level = default_params.GetClearTypeLevel(); - let grid_fit_mode = default_params.GetGridFitMode(); - - factory.CreateCustomRenderingParams( - gamma, - enhanced_contrast, - gray_contrast, - cleartype_level, - DWRITE_PIXEL_GEOMETRY_RGB, - DWRITE_RENDERING_MODE1_NATURAL_SYMMETRIC, - grid_fit_mode, - )? - }; - let gpu_state = GPUState::new(gpu_context)?; Ok(DirectWriteComponent { locale, factory, - bitmap_factory, in_memory_loader, builder, text_renderer, - render_params, gpu_state, }) } @@ -212,11 +183,8 @@ impl GPUState { } impl DirectWriteTextSystem { - pub(crate) fn new( - gpu_context: &DirectXDevices, - bitmap_factory: &IWICImagingFactory, - ) -> Result { - let components = DirectWriteComponent::new(bitmap_factory, gpu_context)?; + pub(crate) fn new(gpu_context: &DirectXDevices) -> Result { + let components = DirectWriteComponent::new(gpu_context)?; let system_font_collection = unsafe { let mut result = std::mem::zeroed(); components @@ -762,14 +730,14 @@ impl DirectWriteState { unsafe { font.font_face.GetRecommendedRenderingMode( params.font_size.0, - // The dpi here seems that it has the same effect with `Some(&transform)` - 1.0, - 1.0, + // Using 96 as scale is applied by the transform + 96.0, + 96.0, Some(&transform), false, DWRITE_OUTLINE_THRESHOLD_ANTIALIASED, DWRITE_MEASURING_MODE_NATURAL, - &self.components.render_params, + None, &mut rendering_mode, &mut grid_fit_mode, )?; @@ -782,8 +750,7 @@ impl DirectWriteState { rendering_mode, DWRITE_MEASURING_MODE_NATURAL, grid_fit_mode, - // We're using cleartype not grayscale for monochrome is because it provides better quality - DWRITE_TEXT_ANTIALIAS_MODE_CLEARTYPE, + DWRITE_TEXT_ANTIALIAS_MODE_GRAYSCALE, baseline_origin_x, baseline_origin_y, ) @@ -794,10 +761,14 @@ impl DirectWriteState { fn raster_bounds(&self, params: &RenderGlyphParams) -> Result> { let glyph_analysis = self.create_glyph_run_analysis(params)?; - let bounds = unsafe { glyph_analysis.GetAlphaTextureBounds(DWRITE_TEXTURE_CLEARTYPE_3x1)? }; - // Some glyphs cannot be drawn with ClearType, such as bitmap fonts. In that case - // GetAlphaTextureBounds() supposedly returns an empty RECT, but I haven't tested that yet. - if !unsafe { IsRectEmpty(&bounds) }.as_bool() { + let bounds = unsafe { glyph_analysis.GetAlphaTextureBounds(DWRITE_TEXTURE_ALIASED_1x1)? }; + + if bounds.right < bounds.left { + Ok(Bounds { + origin: point(0.into(), 0.into()), + size: size(0.into(), 0.into()), + }) + } else { Ok(Bounds { origin: point(bounds.left.into(), bounds.top.into()), size: size( @@ -805,25 +776,6 @@ impl DirectWriteState { (bounds.bottom - bounds.top).into(), ), }) - } else { - // If it's empty, retry with grayscale AA. - let bounds = - unsafe { glyph_analysis.GetAlphaTextureBounds(DWRITE_TEXTURE_ALIASED_1x1)? }; - - if bounds.right < bounds.left { - Ok(Bounds { - origin: point(0.into(), 0.into()), - size: size(0.into(), 0.into()), - }) - } else { - Ok(Bounds { - origin: point(bounds.left.into(), bounds.top.into()), - size: size( - (bounds.right - bounds.left).into(), - (bounds.bottom - bounds.top).into(), - ), - }) - } } } @@ -872,13 +824,12 @@ impl DirectWriteState { glyph_bounds: Bounds, ) -> Result> { let mut bitmap_data = - vec![0u8; glyph_bounds.size.width.0 as usize * glyph_bounds.size.height.0 as usize * 3]; + vec![0u8; glyph_bounds.size.width.0 as usize * glyph_bounds.size.height.0 as usize]; let glyph_analysis = self.create_glyph_run_analysis(params)?; unsafe { glyph_analysis.CreateAlphaTexture( - // We're using cleartype not grayscale for monochrome is because it provides better quality - DWRITE_TEXTURE_CLEARTYPE_3x1, + DWRITE_TEXTURE_ALIASED_1x1, &RECT { left: glyph_bounds.origin.x.0, top: glyph_bounds.origin.y.0, @@ -889,30 +840,6 @@ impl DirectWriteState { )?; } - let bitmap_factory = self.components.bitmap_factory.resolve()?; - let bitmap = unsafe { - bitmap_factory.CreateBitmapFromMemory( - glyph_bounds.size.width.0 as u32, - glyph_bounds.size.height.0 as u32, - &GUID_WICPixelFormat24bppRGB, - glyph_bounds.size.width.0 as u32 * 3, - &bitmap_data, - ) - }?; - - let grayscale_bitmap = - unsafe { WICConvertBitmapSource(&GUID_WICPixelFormat8bppGray, &bitmap) }?; - - let mut bitmap_data = - vec![0u8; glyph_bounds.size.width.0 as usize * glyph_bounds.size.height.0 as usize]; - unsafe { - grayscale_bitmap.CopyPixels( - std::ptr::null() as _, - glyph_bounds.size.width.0 as u32, - &mut bitmap_data, - ) - }?; - Ok(bitmap_data) } @@ -981,25 +908,24 @@ impl DirectWriteState { DWRITE_RENDERING_MODE1_NATURAL_SYMMETRIC, DWRITE_MEASURING_MODE_NATURAL, DWRITE_GRID_FIT_MODE_DEFAULT, - DWRITE_TEXT_ANTIALIAS_MODE_CLEARTYPE, + DWRITE_TEXT_ANTIALIAS_MODE_GRAYSCALE, baseline_origin_x, baseline_origin_y, ) }?; let color_bounds = - unsafe { color_analysis.GetAlphaTextureBounds(DWRITE_TEXTURE_CLEARTYPE_3x1) }?; + unsafe { color_analysis.GetAlphaTextureBounds(DWRITE_TEXTURE_ALIASED_1x1) }?; let color_size = size( color_bounds.right - color_bounds.left, color_bounds.bottom - color_bounds.top, ); if color_size.width > 0 && color_size.height > 0 { - let mut alpha_data = - vec![0u8; (color_size.width * color_size.height * 3) as usize]; + let mut alpha_data = vec![0u8; (color_size.width * color_size.height) as usize]; unsafe { color_analysis.CreateAlphaTexture( - DWRITE_TEXTURE_CLEARTYPE_3x1, + DWRITE_TEXTURE_ALIASED_1x1, &color_bounds, &mut alpha_data, ) @@ -1015,10 +941,6 @@ impl DirectWriteState { } }; let bounds = bounds(point(color_bounds.left, color_bounds.top), color_size); - let alpha_data = alpha_data - .chunks_exact(3) - .flat_map(|chunk| [chunk[0], chunk[1], chunk[2], 255]) - .collect::>(); glyph_layers.push(GlyphLayerTexture::new( &self.components.gpu_state, run_color, @@ -1135,10 +1057,18 @@ impl DirectWriteState { unsafe { device_context.PSSetSamplers(0, Some(&gpu_state.sampler)) }; unsafe { device_context.OMSetBlendState(&gpu_state.blend_state, None, 0xffffffff) }; + let crate::FontInfo { + gamma_ratios, + grayscale_enhanced_contrast, + } = DirectXRenderer::get_font_info(); + for layer in glyph_layers { let params = GlyphLayerTextureParams { run_color: layer.run_color, bounds: layer.bounds, + gamma_ratios: *gamma_ratios, + grayscale_enhanced_contrast: *grayscale_enhanced_contrast, + _pad: [0f32; 3], }; unsafe { let mut dest = std::mem::zeroed(); @@ -1298,7 +1228,7 @@ impl GlyphLayerTexture { Height: texture_size.height as u32, MipLevels: 1, ArraySize: 1, - Format: DXGI_FORMAT_R8G8B8A8_UNORM, + Format: DXGI_FORMAT_R8_UNORM, SampleDesc: DXGI_SAMPLE_DESC { Count: 1, Quality: 0, @@ -1334,7 +1264,7 @@ impl GlyphLayerTexture { 0, None, alpha_data.as_ptr() as _, - (texture_size.width * 4) as u32, + texture_size.width as u32, 0, ) }; @@ -1352,6 +1282,9 @@ impl GlyphLayerTexture { struct GlyphLayerTextureParams { bounds: Bounds, run_color: Rgba, + gamma_ratios: [f32; 4], + grayscale_enhanced_contrast: f32, + _pad: [f32; 3], } struct TextRendererWrapper(pub IDWriteTextRenderer); diff --git a/crates/gpui/src/platform/windows/directx_renderer.rs b/crates/gpui/src/platform/windows/directx_renderer.rs index c496d29a0338ec4d758e436e85a3066163705db6..0c092e22283d29ba1b522012a51f6cab77f51865 100644 --- a/crates/gpui/src/platform/windows/directx_renderer.rs +++ b/crates/gpui/src/platform/windows/directx_renderer.rs @@ -1,4 +1,7 @@ -use std::{mem::ManuallyDrop, sync::Arc}; +use std::{ + mem::ManuallyDrop, + sync::{Arc, OnceLock}, +}; use ::util::ResultExt; use anyhow::{Context, Result}; @@ -9,6 +12,7 @@ use windows::{ Direct3D::*, Direct3D11::*, DirectComposition::*, + DirectWrite::*, Dxgi::{Common::*, *}, }, }, @@ -27,6 +31,11 @@ const RENDER_TARGET_FORMAT: DXGI_FORMAT = DXGI_FORMAT_B8G8R8A8_UNORM; // This configuration is used for MSAA rendering on paths only, and it's guaranteed to be supported by DirectX 11. const PATH_MULTISAMPLE_COUNT: u32 = 4; +pub(crate) struct FontInfo { + pub gamma_ratios: [f32; 4], + pub grayscale_enhanced_contrast: f32, +} + pub(crate) struct DirectXRenderer { hwnd: HWND, atlas: Arc, @@ -35,6 +44,7 @@ pub(crate) struct DirectXRenderer { globals: DirectXGlobalElements, pipelines: DirectXRenderPipelines, direct_composition: Option, + font_info: &'static FontInfo, } /// Direct3D objects @@ -171,6 +181,7 @@ impl DirectXRenderer { globals, pipelines, direct_composition, + font_info: Self::get_font_info(), }) } @@ -183,10 +194,12 @@ impl DirectXRenderer { &self.devices.device_context, self.globals.global_params_buffer[0].as_ref().unwrap(), &[GlobalParams { + gamma_ratios: self.font_info.gamma_ratios, viewport_size: [ self.resources.viewport[0].Width, self.resources.viewport[0].Height, ], + grayscale_enhanced_contrast: self.font_info.grayscale_enhanced_contrast, _pad: 0, }], )?; @@ -617,6 +630,52 @@ impl DirectXRenderer { driver_info: driver_version, }) } + + pub(crate) fn get_font_info() -> &'static FontInfo { + static CACHED_FONT_INFO: OnceLock = OnceLock::new(); + CACHED_FONT_INFO.get_or_init(|| unsafe { + let factory: IDWriteFactory5 = DWriteCreateFactory(DWRITE_FACTORY_TYPE_SHARED).unwrap(); + let render_params: IDWriteRenderingParams1 = + factory.CreateRenderingParams().unwrap().cast().unwrap(); + FontInfo { + gamma_ratios: Self::get_gamma_ratios(render_params.GetGamma()), + grayscale_enhanced_contrast: render_params.GetGrayscaleEnhancedContrast(), + } + }) + } + + // Gamma ratios for brightening/darkening edges for better contrast + // https://github.com/microsoft/terminal/blob/1283c0f5b99a2961673249fa77c6b986efb5086c/src/renderer/atlas/dwrite.cpp#L50 + fn get_gamma_ratios(gamma: f32) -> [f32; 4] { + const GAMMA_INCORRECT_TARGET_RATIOS: [[f32; 4]; 13] = [ + [0.0000 / 4.0, 0.0000 / 4.0, 0.0000 / 4.0, 0.0000 / 4.0], // gamma = 1.0 + [0.0166 / 4.0, -0.0807 / 4.0, 0.2227 / 4.0, -0.0751 / 4.0], // gamma = 1.1 + [0.0350 / 4.0, -0.1760 / 4.0, 0.4325 / 4.0, -0.1370 / 4.0], // gamma = 1.2 + [0.0543 / 4.0, -0.2821 / 4.0, 0.6302 / 4.0, -0.1876 / 4.0], // gamma = 1.3 + [0.0739 / 4.0, -0.3963 / 4.0, 0.8167 / 4.0, -0.2287 / 4.0], // gamma = 1.4 + [0.0933 / 4.0, -0.5161 / 4.0, 0.9926 / 4.0, -0.2616 / 4.0], // gamma = 1.5 + [0.1121 / 4.0, -0.6395 / 4.0, 1.1588 / 4.0, -0.2877 / 4.0], // gamma = 1.6 + [0.1300 / 4.0, -0.7649 / 4.0, 1.3159 / 4.0, -0.3080 / 4.0], // gamma = 1.7 + [0.1469 / 4.0, -0.8911 / 4.0, 1.4644 / 4.0, -0.3234 / 4.0], // gamma = 1.8 + [0.1627 / 4.0, -1.0170 / 4.0, 1.6051 / 4.0, -0.3347 / 4.0], // gamma = 1.9 + [0.1773 / 4.0, -1.1420 / 4.0, 1.7385 / 4.0, -0.3426 / 4.0], // gamma = 2.0 + [0.1908 / 4.0, -1.2652 / 4.0, 1.8650 / 4.0, -0.3476 / 4.0], // gamma = 2.1 + [0.2031 / 4.0, -1.3864 / 4.0, 1.9851 / 4.0, -0.3501 / 4.0], // gamma = 2.2 + ]; + + const NORM13: f32 = ((0x10000 as f64) / (255.0 * 255.0) * 4.0) as f32; + const NORM24: f32 = ((0x100 as f64) / (255.0) * 4.0) as f32; + + let index = ((gamma * 10.0).round() as usize).clamp(10, 22) - 10; + let ratios = GAMMA_INCORRECT_TARGET_RATIOS[index]; + + [ + ratios[0] * NORM13, + ratios[1] * NORM24, + ratios[2] * NORM13, + ratios[3] * NORM24, + ] + } } impl DirectXResources { @@ -822,8 +881,10 @@ impl DirectXGlobalElements { #[derive(Debug, Default)] #[repr(C)] struct GlobalParams { + gamma_ratios: [f32; 4], viewport_size: [f32; 2], - _pad: u64, + grayscale_enhanced_contrast: f32, + _pad: u32, } struct PipelineState { @@ -1544,6 +1605,10 @@ pub(crate) mod shader_resources { #[cfg(debug_assertions)] pub(super) fn build_shader_blob(entry: ShaderModule, target: ShaderTarget) -> Result { unsafe { + use windows::Win32::Graphics::{ + Direct3D::ID3DInclude, Hlsl::D3D_COMPILE_STANDARD_FILE_INCLUDE, + }; + let shader_name = if matches!(entry, ShaderModule::EmojiRasterization) { "color_text_raster.hlsl" } else { @@ -1572,10 +1637,15 @@ pub(crate) mod shader_resources { let entry_point = PCSTR::from_raw(entry.as_ptr()); let target_cstr = PCSTR::from_raw(target.as_ptr()); + // really dirty trick because winapi bindings are unhappy otherwise + let include_handler = &std::mem::transmute::( + D3D_COMPILE_STANDARD_FILE_INCLUDE as usize, + ); + let ret = D3DCompileFromFile( &HSTRING::from(shader_path.to_str().unwrap()), None, - None, + include_handler, entry_point, target_cstr, D3DCOMPILE_DEBUG | D3DCOMPILE_SKIP_OPTIMIZATION, diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index 3a6ccff90f06156345a71482fe723c76d4c2ca39..b06f369aabb860d7b0de3603ecc7e8357571fd2c 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -1,7 +1,6 @@ use std::{ cell::RefCell, ffi::OsStr, - mem::ManuallyDrop, path::{Path, PathBuf}, rc::Rc, sync::Arc, @@ -18,10 +17,7 @@ use windows::{ UI::ViewManagement::UISettings, Win32::{ Foundation::*, - Graphics::{ - Gdi::*, - Imaging::{CLSID_WICImagingFactory, IWICImagingFactory}, - }, + Graphics::Gdi::*, Security::Credentials::*, System::{Com::*, LibraryLoader::*, Ole::*, SystemInformation::*, Threading::*}, UI::{Input::KeyboardAndMouse::*, Shell::*, WindowsAndMessaging::*}, @@ -41,7 +37,6 @@ pub(crate) struct WindowsPlatform { foreground_executor: ForegroundExecutor, text_system: Arc, windows_version: WindowsVersion, - bitmap_factory: ManuallyDrop, drop_target_helper: IDropTargetHelper, validation_number: usize, main_thread_id_win32: u32, @@ -101,12 +96,8 @@ impl WindowsPlatform { let foreground_executor = ForegroundExecutor::new(dispatcher); let directx_devices = DirectXDevices::new(disable_direct_composition) .context("Unable to init directx devices.")?; - let bitmap_factory = ManuallyDrop::new(unsafe { - CoCreateInstance(&CLSID_WICImagingFactory, None, CLSCTX_INPROC_SERVER) - .context("Error creating bitmap factory.")? - }); let text_system = Arc::new( - DirectWriteTextSystem::new(&directx_devices, &bitmap_factory) + DirectWriteTextSystem::new(&directx_devices) .context("Error creating DirectWriteTextSystem")?, ); let drop_target_helper: IDropTargetHelper = unsafe { @@ -128,7 +119,6 @@ impl WindowsPlatform { text_system, disable_direct_composition, windows_version, - bitmap_factory, drop_target_helper, validation_number, main_thread_id_win32, @@ -716,7 +706,6 @@ impl Platform for WindowsPlatform { impl Drop for WindowsPlatform { fn drop(&mut self) { unsafe { - ManuallyDrop::drop(&mut self.bitmap_factory); OleUninitialize(); } } diff --git a/crates/gpui/src/platform/windows/shaders.hlsl b/crates/gpui/src/platform/windows/shaders.hlsl index 6fabe859e3fe6de58c438642455964e135258860..2cef54ae6166e313795eb42210b5f07c1bc378fc 100644 --- a/crates/gpui/src/platform/windows/shaders.hlsl +++ b/crates/gpui/src/platform/windows/shaders.hlsl @@ -1,6 +1,10 @@ +#include "alpha_correction.hlsl" + cbuffer GlobalParams: register(b0) { + float4 gamma_ratios; float2 global_viewport_size; - uint2 _pad; + float grayscale_enhanced_contrast; + uint _pad; }; Texture2D t_sprite: register(t0); @@ -1098,7 +1102,8 @@ MonochromeSpriteVertexOutput monochrome_sprite_vertex(uint vertex_id: SV_VertexI float4 monochrome_sprite_fragment(MonochromeSpriteFragmentInput input): SV_Target { float sample = t_sprite.Sample(s_sprite, input.tile_position).r; - return float4(input.color.rgb, input.color.a * sample); + float alpha_corrected = apply_contrast_and_gamma_correction(sample, input.color.rgb, grayscale_enhanced_contrast, gamma_ratios); + return float4(input.color.rgb, input.color.a * alpha_corrected); } /*